From 3f17eea888ad0a701049b4d67288849122c4eb3b Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 17 Jan 2022 14:21:37 +0100 Subject: [PATCH 01/82] chore: clippy --- Cargo.lock | 1 + ethers-solc/Cargo.toml | 1 + ethers-solc/tests/project.rs | 14 +++++++------- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 44169a062..b4a28fad3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1344,6 +1344,7 @@ dependencies = [ "num_cpus", "once_cell", "pretty_assertions", + "rand 0.8.4", "rayon", "regex", "semver", diff --git a/ethers-solc/Cargo.toml b/ethers-solc/Cargo.toml index 042ae8aa0..65ca316e6 100644 --- a/ethers-solc/Cargo.toml +++ b/ethers-solc/Cargo.toml @@ -51,6 +51,7 @@ getrandom = { version = "0.2", features = ["js"] } [dev-dependencies] criterion = { version = "0.3", features = ["async_tokio"] } pretty_assertions = "1.0.0" +rand = "0.8.4" tempfile = "3.3.0" tokio = { version = "1.15.0", features = ["full"] } diff --git a/ethers-solc/tests/project.rs b/ethers-solc/tests/project.rs index 2a9a91c83..4ed33601d 100644 --- a/ethers-solc/tests/project.rs +++ b/ethers-solc/tests/project.rs @@ -322,8 +322,8 @@ fn can_flatten_file() { assert!(result.is_ok()); let result = result.unwrap(); - assert!(result.find("contract Foo").is_some()); - assert!(result.find("contract Bar").is_some()); + assert!(result.contains("contract Foo")); + assert!(result.contains("contract Bar")); } #[test] @@ -340,8 +340,8 @@ fn can_flatten_file_with_external_lib() { assert!(result.is_ok()); let result = result.unwrap(); - assert!(result.find("library console").is_some()); - assert!(result.find("contract Greeter").is_some()); + assert!(result.contains("library console")); + assert!(result.contains("contract Greeter")); } #[test] @@ -356,7 +356,7 @@ fn can_flatten_file_in_dapp_sample() { assert!(result.is_ok()); let result = result.unwrap(); - assert!(result.find("contract DSTest").is_some()); - assert!(result.find("contract Dapp").is_some()); - assert!(result.find("contract DappTest").is_some()); + assert!(result.contains("contract DSTest")); + assert!(result.contains("contract Dapp")); + assert!(result.contains("contract DappTest")); } From d624d32d44a8f3c0b2fd2e1a115caadb231dccef Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 17 Jan 2022 23:34:53 +0100 Subject: [PATCH 02/82] refactor: rewrite compiler passes and cache --- ethers-solc/src/cache.rs | 7 +- ethers-solc/src/compile/many.rs | 40 ++++ .../src/{compile.rs => compile/mod.rs} | 48 +---- ethers-solc/src/compile/project.rs | 181 ++++++++++++++++++ ethers-solc/src/lib.rs | 18 +- ethers-solc/src/resolver.rs | 89 ++++++--- 6 files changed, 317 insertions(+), 66 deletions(-) create mode 100644 ethers-solc/src/compile/many.rs rename ethers-solc/src/{compile.rs => compile/mod.rs} (95%) create mode 100644 ethers-solc/src/compile/project.rs diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 4fd0074ed..eaee0fb22 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -59,6 +59,11 @@ impl SolFilesCache { self.format == ETHERS_FORMAT_VERSION } + /// Returns the corresponding `CacheEntry` for the file if it exists + pub fn entry(&self, file: impl AsRef) -> Option<&CacheEntry> { + self.files.get(file.as_ref()) + } + /// Reads the cache json file from the given path #[tracing::instrument(skip_all, name = "sol-files-cache::read")] pub fn read(path: impl AsRef) -> Result { @@ -189,7 +194,7 @@ impl SolFilesCache { } /// Returns true if the entry has any imports that were changed - fn has_changed_imports( + pub(crate) fn has_changed_imports( &self, path: &Path, entry: &CacheEntry, diff --git a/ethers-solc/src/compile/many.rs b/ethers-solc/src/compile/many.rs new file mode 100644 index 000000000..109c72e0a --- /dev/null +++ b/ethers-solc/src/compile/many.rs @@ -0,0 +1,40 @@ +use crate::{error::Result, CompilerInput, CompilerOutput, Solc}; + +/// The result of a `solc` process bundled with its `Solc` and `CompilerInput` +type CompileElement = (Result, Solc, CompilerInput); + +/// The bundled output of multiple `solc` processes. +#[derive(Debug)] +pub struct CompiledMany { + outputs: Vec, +} + +impl CompiledMany { + pub fn new(outputs: Vec) -> Self { + Self { outputs } + } + + /// Returns an iterator over all output elements + pub fn outputs(&self) -> impl Iterator { + self.outputs.iter() + } + + /// Returns an iterator over all output elements + pub fn into_outputs(self) -> impl Iterator { + self.outputs.into_iter() + } + + /// Returns all `CompilerOutput` or the first error that occurred + pub fn flattened(self) -> Result> { + self.into_iter().collect() + } +} + +impl IntoIterator for CompiledMany { + type Item = Result; + type IntoIter = std::vec::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + self.outputs.into_iter().map(|(res, _, _)| res).collect::>().into_iter() + } +} diff --git a/ethers-solc/src/compile.rs b/ethers-solc/src/compile/mod.rs similarity index 95% rename from ethers-solc/src/compile.rs rename to ethers-solc/src/compile/mod.rs index 153274dc6..7af60596f 100644 --- a/ethers-solc/src/compile.rs +++ b/ethers-solc/src/compile/mod.rs @@ -14,6 +14,10 @@ use std::{ str::FromStr, }; +pub mod many; +#[cfg(all(feature = "svm", feature = "async"))] +pub mod project; + /// The name of the `solc` binary on the system pub const SOLC: &str = "solc"; @@ -556,7 +560,7 @@ impl Solc { /// let outputs = Solc::compile_many([(solc1, input1), (solc2, input2)], 2).await.flattened().unwrap(); /// # } /// ``` - pub async fn compile_many(jobs: I, n: usize) -> CompiledMany + pub async fn compile_many(jobs: I, n: usize) -> crate::many::CompiledMany where I: IntoIterator, { @@ -569,42 +573,8 @@ impl Solc { .buffer_unordered(n) .collect::>() .await; - CompiledMany { outputs } - } -} - -/// The result of a `solc` process bundled with its `Solc` and `CompilerInput` -type CompileElement = (Result, Solc, CompilerInput); - -/// The output of multiple `solc` processes. -#[derive(Debug)] -pub struct CompiledMany { - outputs: Vec, -} - -impl CompiledMany { - /// Returns an iterator over all output elements - pub fn outputs(&self) -> impl Iterator { - self.outputs.iter() - } - - /// Returns an iterator over all output elements - pub fn into_outputs(self) -> impl Iterator { - self.outputs.into_iter() - } - - /// Returns all `CompilerOutput` or the first error that occurred - pub fn flattened(self) -> Result> { - self.into_iter().collect() - } -} - -impl IntoIterator for CompiledMany { - type Item = Result; - type IntoIter = std::vec::IntoIter>; - fn into_iter(self) -> Self::IntoIter { - self.outputs.into_iter().map(|(res, _, _)| res).collect::>().into_iter() + crate::many::CompiledMany::new(outputs) } } @@ -670,7 +640,7 @@ mod tests { #[test] fn solc_compile_works() { - let input = include_str!("../test-data/in/compiler-in-1.json"); + let input = include_str!("../../test-data/in/compiler-in-1.json"); let input: CompilerInput = serde_json::from_str(input).unwrap(); let out = solc().compile(&input).unwrap(); let other = solc().compile(&serde_json::json!(input)).unwrap(); @@ -680,7 +650,7 @@ mod tests { #[cfg(feature = "async")] #[tokio::test] async fn async_solc_compile_works() { - let input = include_str!("../test-data/in/compiler-in-1.json"); + let input = include_str!("../../test-data/in/compiler-in-1.json"); let input: CompilerInput = serde_json::from_str(input).unwrap(); let out = solc().async_compile(&input).await.unwrap(); let other = solc().async_compile(&serde_json::json!(input)).await.unwrap(); @@ -689,7 +659,7 @@ mod tests { #[cfg(feature = "async")] #[tokio::test] async fn async_solc_compile_works2() { - let input = include_str!("../test-data/in/compiler-in-2.json"); + let input = include_str!("../../test-data/in/compiler-in-2.json"); let input: CompilerInput = serde_json::from_str(input).unwrap(); let out = solc().async_compile(&input).await.unwrap(); let other = solc().async_compile(&serde_json::json!(input)).await.unwrap(); diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs new file mode 100644 index 000000000..0449107ef --- /dev/null +++ b/ethers-solc/src/compile/project.rs @@ -0,0 +1,181 @@ +//! Manages compiling of a `Project` + +use crate::{ + error::Result, resolver::GraphEdges, utils, ArtifactOutput, Graph, Project, ProjectPathsConfig, + SolFilesCache, Solc, SolcConfig, Source, Sources, +}; +use std::{ + collections::{hash_map, BTreeMap, HashMap}, + path::{Path, PathBuf}, +}; + +#[derive(Debug)] +pub struct ProjectCompiler<'a, T: ArtifactOutput> { + /// Contains the relationship of the source files and their imports + edges: GraphEdges, + project: &'a Project, + /// how to compile all the sources + sources: CompilerSources, +} + +impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { + pub fn new(project: &'a Project) -> Result { + Self::with_sources(project, project.paths.read_input_files()?) + } + + pub fn with_sources(project: &'a Project, sources: Sources) -> Result { + let graph = Graph::resolve_sources(&project.paths, sources)?; + // TODO this should return a type that still knows the relationships edges and nodes + let (versions, edges) = graph.into_sources_by_version(!project.auto_detect)?; + + let sources_by_version = versions.get(&project.allowed_lib_paths)?; + + let mode = if project.solc_jobs > 1 && sources_by_version.len() > 1 { + // if there are multiple different versions and we can use multiple jobs we can compile + // them in parallel + CompilerSources::Para(sources_by_version, project.solc_jobs) + } else { + CompilerSources::Sequ(sources_by_version) + }; + Ok(Self { edges, project, sources: mode }) + } + + /// Compiles all the sources + pub fn compile(self) { + let Self { edges: _, project: _, sources: _mode } = self; + + todo!() + } +} + +/// Determines how the `solc <-> sources` pairs are executed +#[derive(Debug)] +enum CompilerSources { + /// Compile all these sequentially + Sequ(BTreeMap), + /// Compile all these in parallel using a certain amount of jobs + Para(BTreeMap, usize), +} + +impl CompilerSources { + fn preprocess(self, _paths: &ProjectPathsConfig) -> Result> { + let cached_artifacts = BTreeMap::new(); + + Ok(Preprocessed { cached_artifacts, sources: self }) + } +} + +/// Contains a mixture of already compiled/cached artifacts and the input set of sources that still +/// need to be compiled. +#[derive(Debug)] +struct Preprocessed { + /// all artifacts that don't need to be compiled + cached_artifacts: BTreeMap, + + sources: CompilerSources, +} + +struct Cache<'a, T: ArtifactOutput> { + /// cache file + cache: SolFilesCache, + /// all already existing artifacts + cached_artifacts: BTreeMap, + /// relationship between all the files + edges: &'a GraphEdges, + /// how to configure solc + solc_config: &'a SolcConfig, + /// project paths + paths: &'a ProjectPathsConfig, + /// all files that were filtered because they haven't changed + filtered: Sources, + /// the file hashes + content_hashes: HashMap, +} + +impl<'a, T: ArtifactOutput> Cache<'a, T> { + /// Returns only those sources that + /// - are new + /// - were changed + /// - their imports were changed + /// - their artifact is missing + fn filter(&mut self, sources: Sources) -> Sources { + self.fill_hashes(&sources); + sources.into_iter().filter_map(|(file, source)| self.needs_solc(file, source)).collect() + } + + /// Returns `Some` if the file needs to be compiled and `None` if the artifact can be reu-used + fn needs_solc(&mut self, file: PathBuf, source: Source) -> Option<(PathBuf, Source)> { + if !self.is_dirty(&file) && + self.edges.imports(&file).iter().all(|file| !self.is_dirty(file)) + { + self.filtered.insert(file, source); + None + } else { + Some((file, source)) + } + } + + /// returns `false` if the corresponding cache entry remained unchanged otherwise `true` + fn is_dirty(&self, file: &Path) -> bool { + if let Some(hash) = self.content_hashes.get(file) { + let cache_path = utils::source_name(file, &self.paths.root); + if let Some(entry) = self.cache.entry(&cache_path) { + if entry.content_hash.as_bytes() != hash.as_bytes() { + tracing::trace!( + "changed content hash for cached artifact \"{}\"", + file.display() + ); + return true + } + if self.solc_config != &entry.solc_config { + tracing::trace!( + "changed solc config for cached artifact \"{}\"", + file.display() + ); + return true + } + // checks whether an artifact this file depends on was removed + if entry.artifacts.iter().any(|name| !self.has_artifact(file, name)) { + tracing::trace!( + "missing linked artifacts for cached artifact \"{}\"", + file.display() + ); + return true + } + return false + } + } + true + } + + /// Adds the file's hashes to the set if not set yet + fn fill_hashes(&mut self, sources: &Sources) { + for (file, source) in sources { + if let hash_map::Entry::Vacant(entry) = self.content_hashes.entry(file.clone()) { + entry.insert(source.content_hash()); + } + } + } + + /// Returns true if the artifact for the exists + fn has_artifact(&self, file: &Path, name: &str) -> bool { + let artifact_path = self.paths.artifacts.join(T::output_file(file, name)); + self.cached_artifacts.contains_key(&artifact_path) + } +} + +/// Abstraction over configured caching which can be either non-existent or an already loaded cache +enum ArtifactsCache<'a, T: ArtifactOutput> { + Ephemeral, + Cached(Cache<'a, T>), +} + +impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { + /// Filters out those sources that don't need to be compiled + fn filter(&mut self, sources: Sources) -> Sources { + match self { + ArtifactsCache::Ephemeral => sources, + ArtifactsCache::Cached(cache) => cache.filter(sources), + } + } +} diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index d68e7cd80..a8423dab6 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -247,12 +247,25 @@ impl Project { self.compile_with_version(&solc, sources) } + /// Compiles a set of contracts using `svm` managed solc installs + /// + /// # Example + /// + /// ``` + /// use ethers_solc::{artifacts::Source, Project, utils}; + /// # fn demo(project: Project) { + /// let project = Project::builder().build().unwrap(); + /// let files = utils::source_files("./src"); + /// let sources = Source::read_all(files).unwrap(); + /// let output = project.svm_compile(sources).unwrap(); + /// # } + /// ``` #[cfg(all(feature = "svm", feature = "async"))] #[tracing::instrument(skip(self, sources))] pub fn svm_compile(&self, sources: Sources) -> Result> { let graph = Graph::resolve_sources(&self.paths, sources)?; let sources_by_version = - graph.into_sources_by_version(!self.auto_detect)?.get(&self.allowed_lib_paths)?; + graph.into_sources_by_version(!self.auto_detect)?.0.get(&self.allowed_lib_paths)?; // run the compilation step for each version let compiled = if self.solc_jobs > 1 && sources_by_version.len() > 1 { @@ -285,7 +298,7 @@ impl Project { Ok(compiled) } - /// Compiles all sources with their intended `Solc` version in parallel. + /// Compiles all sources with their intended `Solc` versions in parallel. /// /// This runs `Self::solc_jobs` parallel `solc` jobs at most. #[cfg(all(feature = "svm", feature = "async"))] @@ -463,6 +476,7 @@ impl Project { tracing::trace!("start reading solfiles cache for incremental compilation"); let mut cache = SolFilesCache::read(&self.paths.cache)?; cache.remove_missing_files(); + let changed_files = cache.get_changed_or_missing_artifacts_files::( sources, Some(&self.solc_config), diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index e2c8403de..3aa4f8ac5 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -38,20 +38,48 @@ use solang_parser::pt::{Import, Loc, SourceUnitPart}; use crate::{error::Result, utils, ProjectPathsConfig, Solc, Source, Sources}; -/// Represents a fully-resolved solidity dependency graph. Each node in the graph -/// is a file and edges represent dependencies between them. -/// See also https://docs.soliditylang.org/en/latest/layout-of-source-files.html?highlight=import#importing-other-source-files -#[derive(Debug)] -pub struct Graph { - nodes: Vec, +/// The underlying edges of the graph which only contains the raw relationship data. +/// +/// This is kept separate from the `Graph` as the `Node`s get consumed when the `Solc` to `Sources` +/// set is determined. +#[derive(Debug, Clone)] +pub struct GraphEdges { /// The indices of `edges` correspond to the `nodes`. That is, `edges[0]` /// is the set of outgoing edges for `nodes[0]`. edges: Vec>, /// index maps for a solidity file to an index, for fast lookup. indices: HashMap, + /// reverse of `indices` for reverse lookup + rev_indices: HashMap, /// with how many input files we started with, corresponds to `let input_files = /// nodes[..num_input_files]`. num_input_files: usize, +} + +impl GraphEdges { + /// Returns a list of nodes the given node index points to for the given kind. + pub fn imported_nodes(&self, from: usize) -> &[usize] { + &self.edges[from] + } + + /// Returns the files imported files + pub fn imports(&self, path: impl AsRef) -> HashSet<&PathBuf> { + if let Some(start) = self.indices.get(path.as_ref()).copied() { + NodesIter::new(start, self).skip(1).map(move |idx| &self.rev_indices[&idx]).collect() + } else { + HashSet::new() + } + } +} + +/// Represents a fully-resolved solidity dependency graph. Each node in the graph +/// is a file and edges represent dependencies between them. +/// See also https://docs.soliditylang.org/en/latest/layout-of-source-files.html?highlight=import#importing-other-source-files +#[derive(Debug)] +pub struct Graph { + nodes: Vec, + /// relationship of the nodes + edges: GraphEdges, /// the root of the project this graph represents #[allow(unused)] root: PathBuf, @@ -60,12 +88,12 @@ pub struct Graph { impl Graph { /// Returns a list of nodes the given node index points to for the given kind. pub fn imported_nodes(&self, from: usize) -> &[usize] { - &self.edges[from] + self.edges.imported_nodes(from) } /// Returns all the resolved files and their index in the graph pub fn files(&self) -> &HashMap { - &self.indices + &self.edges.indices } /// Gets a node by index. @@ -80,7 +108,7 @@ impl Graph { /// /// if the `start` node id is not included in the graph pub fn node_ids(&self, start: usize) -> impl Iterator + '_ { - NodesIter::new(start, self) + NodesIter::new(start, &self.edges) } /// Same as `Self::node_ids` but returns the actual `Node` @@ -97,7 +125,7 @@ impl Graph { /// See `Self::resolve_sources` /// This won't yield any resolved library nodes pub fn input_nodes(&self) -> impl Iterator { - self.nodes.iter().take(self.num_input_files) + self.nodes.iter().take(self.edges.num_input_files) } /// Resolves a number of sources within the given config @@ -164,8 +192,13 @@ impl Graph { nodes.push(node); edges.push(resolved_imports); } - - Ok(Graph { nodes, edges, indices: index, num_input_files, root: paths.root.clone() }) + let edges = GraphEdges { + edges, + rev_indices: index.iter().map(|(k, v)| (*v, k.clone())).collect(), + indices: index, + num_input_files, + }; + Ok(Graph { nodes, edges, root: paths.root.clone() }) } /// Resolves the dependencies of a project's source contracts @@ -176,11 +209,12 @@ impl Graph { #[cfg(all(feature = "svm", feature = "async"))] impl Graph { - /// Returns all input files together with their appropriate version. + /// Consumes the nodes of the graph and returns all input files together with their appropriate + /// version and the edges of the graph /// /// First we determine the compatible version for each input file (from sources and test folder, /// see `Self::resolve`) and then we add all resolved library imports. - pub fn into_sources_by_version(self, offline: bool) -> Result { + pub fn into_sources_by_version(self, offline: bool) -> Result<(VersionedSources, GraphEdges)> { /// insert the imports of the given node into the sources map /// There can be following graph: /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` @@ -209,7 +243,7 @@ impl Graph { } let versioned_nodes = self.get_input_node_versions(offline)?; - let Self { nodes, edges, num_input_files, .. } = self; + let Self { nodes, edges, .. } = self; let mut versioned_sources = HashMap::with_capacity(versioned_nodes.len()); let mut all_nodes = nodes.into_iter().enumerate().collect::>(); @@ -221,11 +255,17 @@ impl Graph { // insert the input node in the sources set and remove it from the available set let node = all_nodes.remove(&idx).expect("node is preset. qed"); sources.insert(node.path, node.source); - insert_imports(idx, &mut all_nodes, &mut sources, &edges, num_input_files); + insert_imports( + idx, + &mut all_nodes, + &mut sources, + &edges.edges, + edges.num_input_files, + ); } versioned_sources.insert(version, sources); } - Ok(VersionedSources { inner: versioned_sources, offline }) + Ok((VersionedSources { inner: versioned_sources, offline }, edges)) } /// Writes the list of imported files into the given formatter: @@ -294,7 +334,8 @@ impl Graph { // on first error, instead gather all the errors and return a bundled error message instead let mut errors = Vec::new(); // we also don't want duplicate error diagnostic - let mut erroneous_nodes = std::collections::HashSet::with_capacity(self.num_input_files); + let mut erroneous_nodes = + std::collections::HashSet::with_capacity(self.edges.num_input_files); let all_versions = if offline { Solc::installed_versions() } else { Solc::all_versions() }; @@ -302,7 +343,7 @@ impl Graph { let mut versioned_nodes = HashMap::new(); // walking through the node's dep tree and filtering the versions along the way - for idx in 0..self.num_input_files { + for idx in 0..self.edges.num_input_files { let mut candidates = all_versions.iter().collect::>(); self.retain_compatible_versions(idx, &mut candidates); @@ -346,12 +387,12 @@ pub struct NodesIter<'a> { /// stack of nodes stack: VecDeque, visited: HashSet, - graph: &'a Graph, + graph: &'a GraphEdges, } impl<'a> NodesIter<'a> { - fn new(start: usize, graph: &'a Graph) -> Self { - Self { stack: VecDeque::from([start]), visited: Default::default(), graph } + fn new(start: usize, graph: &'a GraphEdges) -> Self { + Self { stack: VecDeque::from([start]), visited: HashSet::new(), graph } } } @@ -595,7 +636,7 @@ mod tests { let graph = Graph::resolve(&paths).unwrap(); - assert_eq!(graph.num_input_files, 1); + assert_eq!(graph.edges.num_input_files, 1); assert_eq!(graph.files().len(), 2); assert_eq!( @@ -614,7 +655,7 @@ mod tests { let graph = Graph::resolve(&paths).unwrap(); - assert_eq!(graph.num_input_files, 2); + assert_eq!(graph.edges.num_input_files, 2); assert_eq!(graph.files().len(), 3); assert_eq!( graph.files().clone(), From 8e16080b3080ca1013b2117d14e36f93d45ca208 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 24 Jan 2022 21:06:20 +0100 Subject: [PATCH 03/82] feat: more work on compile pipeline --- ethers-solc/src/artifacts.rs | 11 ++ ethers-solc/src/cache.rs | 18 +-- ethers-solc/src/compile/project.rs | 208 +++++++++++++++++++++++++++-- ethers-solc/src/lib.rs | 4 +- ethers-solc/src/resolver.rs | 16 ++- 5 files changed, 233 insertions(+), 24 deletions(-) diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index e671b9fcc..1176be91b 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -25,8 +25,12 @@ use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; /// An ordered list of files and their source pub type Sources = BTreeMap; +/// file -> [contract name] pub type Contracts = BTreeMap>; +/// file -> [(contract name + version)] +pub type VersionedContracts = BTreeMap>>; + /// Input type `solc` expects #[derive(Clone, Debug, Serialize, Deserialize)] pub struct CompilerInput { @@ -750,6 +754,13 @@ impl<'a> fmt::Display for OutputDiagnostics<'a> { } } +/// A contract and the compiler version used to compile it +#[derive(Clone, Debug, PartialEq)] +pub struct VersionedContract { + pub contract: Contract, + pub version: Version, +} + /// Represents a compiled solidity contract #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] pub struct Contract { diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 886355d40..13a41e1f5 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -393,15 +393,15 @@ impl CacheEntry { /// The disk path is the actual path where a file can be found on disk. /// A source name is the internal identifier and is the remaining part of the disk path starting /// with the configured source directory, (`contracts/contract.sol`) +/// +/// See also [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution) #[derive(Debug, Default)] pub struct PathMap { /// all libraries to the source set while keeping track of their actual disk path /// (`contracts/contract.sol` -> `/Users/.../contracts.sol`) - pub source_name_to_path: HashMap, + pub source_unit_name_to_path: HashMap, /// inverse of `source_name_to_path` : (`/Users/.../contracts.sol` -> `contracts/contract.sol`) - pub path_to_source_name: HashMap, - /* /// All paths, source names and actual file paths - * paths: Vec */ + pub path_to_source_unit_name: HashMap, } impl PathMap { @@ -424,25 +424,25 @@ impl PathMap { .iter() .map(|(path, contracts)| { let path = PathBuf::from(path); - let file = self.source_name_to_path.get(&path).cloned().unwrap_or(path); + let file = self.source_unit_name_to_path.get(&path).cloned().unwrap_or(path); (file, contracts.keys().cloned().collect::>()) }) .collect() } pub fn extend(&mut self, other: PathMap) { - self.source_name_to_path.extend(other.source_name_to_path); - self.path_to_source_name.extend(other.path_to_source_name); + self.source_unit_name_to_path.extend(other.source_unit_name_to_path); + self.path_to_source_unit_name.extend(other.path_to_source_unit_name); } /// Returns a new map with the source names as keys pub fn set_source_names(&self, sources: Sources) -> Sources { - Self::apply_mappings(sources, &self.path_to_source_name) + Self::apply_mappings(sources, &self.path_to_source_unit_name) } /// Returns a new map with the disk paths as keys pub fn set_disk_paths(&self, sources: Sources) -> Sources { - Self::apply_mappings(sources, &self.source_name_to_path) + Self::apply_mappings(sources, &self.source_unit_name_to_path) } } diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 0449107ef..2d5210d52 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -1,11 +1,89 @@ //! Manages compiling of a `Project` +//! +//! The compilation of a project is performed in several steps. +//! +//! First the project's dependency graph [`crate::Graph`] is constructed and all imported +//! dependencies are resolved. The graph holds all the relationships between the files and their +//! versions. From there the appropriate version set is derived +//! [`crate::Graph::into_sources_by_version()`] which need to be compiled with different +//! [`crate::Solc`] versions. +//! +//! At this point we check if we need to compile a source file or whether we can reuse an _existing_ +//! `Artifact`. We don't to compile if: +//! - caching is enabled +//! - the file is **not** dirty [`Cache::is_dirty()`] +//! - the artifact for that file exists +//! +//! This concludes the preprocessing, and we now have either +//! - only `Source` files that need to be compiled +//! - only cached `Artifacts`, compilation can be skipped. This is considered an unchanged, +//! cached project +//! - Mix of both `Source` and `Artifacts`, only the `Source` files need to be compiled, the +//! `Artifacts` can be reused. +//! +//! The final step is invoking `Solc` via the standard JSON format. +//! +//! ### Notes on [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution) +//! +//! In order to be able to support reproducible builds on all platforms, the Solidity compiler has +//! to abstract away the details of the filesystem where source files are stored. Paths used in +//! imports must work the same way everywhere while the command-line interface must be able to work +//! with platform-specific paths to provide good user experience. This section aims to explain in +//! detail how Solidity reconciles these requirements. +//! +//! The compiler maintains an internal database (virtual filesystem or VFS for short) where each +//! source unit is assigned a unique source unit name which is an opaque and unstructured +//! identifier. When you use the import statement, you specify an import path that references a +//! source unit name. If the compiler does not find any source unit name matching the import path in +//! the VFS, it invokes the callback, which is responsible for obtaining the source code to be +//! placed under that name. +//! +//! This becomes relevant when dealing with resolved imports +//! +//! #### Relative Imports +//! +//! ```solidity +//! import "./math/math.sol"; +//! import "contracts/tokens/token.sol"; +//! ``` +//! In the above `./math/math.sol` and `contracts/tokens/token.sol` are import paths while the +//! source unit names they translate to are `contracts/math/math.sol` and +//! `contracts/tokens/token.sol` respectively. +//! +//! #### Direct Imports +//! +//! An import that does not start with `./` or `../` is a direct import. +//! +//! ```solidity +//! import "/project/lib/util.sol"; // source unit name: /project/lib/util.sol +//! import "lib/util.sol"; // source unit name: lib/util.sol +//! import "@openzeppelin/address.sol"; // source unit name: @openzeppelin/address.sol +//! import "https://example.com/token.sol"; // source unit name: https://example.com/token.sol +//! ``` +//! +//! After applying any import remappings the import path simply becomes the source unit name. +//! +//! ##### Import Remapping +//! +//! ```solidity +//! import "github.com/ethereum/dapp-bin/library/math.sol"; // source unit name: dapp-bin/library/math.sol +//! ``` +//! +//! The compiler will look for the file in the VFS under `dapp-bin/library/math.sol`. If the file is +//! not available there, the source unit name will be passed to the Host Filesystem Loader, which +//! will then look in `/project/dapp-bin/library/iterable_mapping.sol` use crate::{ - error::Result, resolver::GraphEdges, utils, ArtifactOutput, Graph, Project, ProjectPathsConfig, - SolFilesCache, Solc, SolcConfig, Source, Sources, + artifacts::{Error, Settings, SourceFile, VersionedContract, VersionedContracts}, + error::Result, + remappings::Remapping, + resolver::GraphEdges, + utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, PathMap, Project, + ProjectPathsConfig, SolFilesCache, Solc, SolcConfig, Source, Sources, }; +use semver::Version; use std::{ - collections::{hash_map, BTreeMap, HashMap}, + collections::{hash_map, BTreeMap, HashMap, HashSet}, path::{Path, PathBuf}, }; @@ -19,13 +97,22 @@ pub struct ProjectCompiler<'a, T: ArtifactOutput> { } impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { + /// Create a new `ProjectCompiler` to bootstrap the compilation process of the project's + /// sources. + /// + /// # Example + /// + /// ```no_run + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// ``` pub fn new(project: &'a Project) -> Result { Self::with_sources(project, project.paths.read_input_files()?) } pub fn with_sources(project: &'a Project, sources: Sources) -> Result { let graph = Graph::resolve_sources(&project.paths, sources)?; - // TODO this should return a type that still knows the relationships edges and nodes let (versions, edges) = graph.into_sources_by_version(!project.auto_detect)?; let sources_by_version = versions.get(&project.allowed_lib_paths)?; @@ -33,14 +120,14 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { let mode = if project.solc_jobs > 1 && sources_by_version.len() > 1 { // if there are multiple different versions and we can use multiple jobs we can compile // them in parallel - CompilerSources::Para(sources_by_version, project.solc_jobs) + CompilerSources::Parallel(sources_by_version, project.solc_jobs) } else { - CompilerSources::Sequ(sources_by_version) + CompilerSources::Sequential(sources_by_version) }; Ok(Self { edges, project, sources: mode }) } - /// Compiles all the sources + /// Compiles all the sources of the `Project` pub fn compile(self) { let Self { edges: _, project: _, sources: _mode } = self; @@ -52,19 +139,111 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { #[derive(Debug)] enum CompilerSources { /// Compile all these sequentially - Sequ(BTreeMap), + Sequential(BTreeMap), /// Compile all these in parallel using a certain amount of jobs - Para(BTreeMap, usize), + Parallel(BTreeMap, usize), } impl CompilerSources { fn preprocess(self, _paths: &ProjectPathsConfig) -> Result> { let cached_artifacts = BTreeMap::new(); - Ok(Preprocessed { cached_artifacts, sources: self }) + todo!() + } + + /// Compiles all the files with `Solc` + fn compile( + self, + settings: Settings, + remappings: Vec, + ) -> Result { + match self { + CompilerSources::Sequential(input) => compile_sequential(input, settings, remappings), + CompilerSources::Parallel(input, j) => compile_parallel(input, j, settings, remappings), + } + } +} + +fn compile_sequential( + input: BTreeMap, + settings: Settings, + remappings: Vec, +) -> Result { + for (solc, sources) in input { + let version = solc.version()?; + + tracing::trace!( + "compiling {} sources with solc \"{}\"", + sources.len(), + solc.as_ref().display() + ); + + let mut paths = PathMap::default(); + // replace absolute path with source name to make solc happy + // TODO use correct path + let sources = paths.set_source_names(sources); + + let input = CompilerInput::with_sources(sources) + .settings(settings.clone()) + .normalize_evm_version(&version) + .with_remappings(remappings.clone()); + + tracing::trace!("calling solc with {} sources", input.sources.len()); + let output = solc.compile(&input)?; + tracing::trace!("compiled input, output has error: {}", output.has_error()); + } + + todo!() +} + +fn compile_parallel( + input: BTreeMap, + jobs: usize, + settings: Settings, + remappings: Vec, +) -> Result { + todo!() +} + +/// The aggregated output of (multiple) compile jobs +/// +/// This is effectively a solc version aware `CompilerOutput` +#[derive(Debug, Default)] +struct AggregatedCompilerOutput { + /// all errors from all `CompilerOutput` + /// + /// this is a set so that the same error from multiple `CompilerOutput`s only appears once + pub errors: HashSet, + /// All source files + pub sources: BTreeMap, + /// All compiled contracts combined with the solc version used to compile them + pub contracts: VersionedContracts, +} + +impl AggregatedCompilerOutput { + /// adds a new `CompilerOutput` to the aggregated output + fn extend(&mut self, version: Version, output: CompilerOutput) { + self.errors.extend(compiled.errors); + self.sources.extend(compiled.sources); + + for (file_name, new_contracts) in output.contracts { + let contracts = self.contracts.entry(file_name).or_default(); + for (contract_name, contract) in new_contracts { + let versioned = contracts.entry(contract_name).or_default(); + versioned.push(VersionedContract { contract, version: version.clone() }); + } + } } } +/// Captures the `CompilerOutput` and the `Solc` version that produced it +#[derive(Debug)] +struct VersionCompilerOutput { + output: CompilerOutput, + solc: Solc, + version: Version, +} + /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. #[derive(Debug)] @@ -72,9 +251,18 @@ struct Preprocessed { /// all artifacts that don't need to be compiled cached_artifacts: BTreeMap, + cache: SolFilesCache, + sources: CompilerSources, } +impl Preprocessed { + /// Drives the compilation process to completion + pub fn finish(self) {} +} + +/// A helper abstraction over the [`SolFilesCache`] used to determine what files need to compiled +/// and which `Artifacts` can be reused. struct Cache<'a, T: ArtifactOutput> { /// cache file cache: SolFilesCache, diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 9b3977bc1..2e3d5e933 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -466,8 +466,8 @@ impl Project { for (import, (source, path)) in self.resolved_libraries(&sources)? { // inserting with absolute path here and keep track of the source name <-> path mappings sources.insert(path.clone(), source); - paths.path_to_source_name.insert(path.clone(), import.clone()); - paths.source_name_to_path.insert(import, path); + paths.path_to_source_unit_name.insert(path.clone(), import.clone()); + paths.source_unit_name_to_path.insert(import, path); } tracing::trace!("resolved all libraries"); diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index 3aa4f8ac5..f656c6e36 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -62,9 +62,11 @@ impl GraphEdges { &self.edges[from] } - /// Returns the files imported files - pub fn imports(&self, path: impl AsRef) -> HashSet<&PathBuf> { - if let Some(start) = self.indices.get(path.as_ref()).copied() { + /// Returns all files imported by the given file + /// + /// *Note* this only returns the imports, the `file __excluded__ + pub fn imports(&self, file: impl AsRef) -> HashSet<&PathBuf> { + if let Some(start) = self.indices.get(file.as_ref()).copied() { NodesIter::new(start, self).skip(1).map(move |idx| &self.rev_indices[&idx]).collect() } else { HashSet::new() @@ -97,6 +99,10 @@ impl Graph { } /// Gets a node by index. + /// + /// # Panics + /// + /// if the `index` node id is not included in the graph pub fn node(&self, index: usize) -> &Node { &self.nodes[index] } @@ -128,6 +134,10 @@ impl Graph { self.nodes.iter().take(self.edges.num_input_files) } + pub fn imports(&self, path: impl AsRef) -> HashSet<&PathBuf> { + self.edges.imports(path) + } + /// Resolves a number of sources within the given config pub fn resolve_sources(paths: &ProjectPathsConfig, sources: Sources) -> Result { /// checks if the given target path was already resolved, if so it adds its id to the list From c6c75d340c1792a60abb1f13dee080ccc0586d27 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 24 Jan 2022 21:39:10 +0100 Subject: [PATCH 04/82] feat: add cache constructor --- ethers-solc/src/artifacts.rs | 6 +- ethers-solc/src/compile/project.rs | 147 +++++++++++++++++++---------- ethers-solc/src/lib.rs | 4 + 3 files changed, 105 insertions(+), 52 deletions(-) diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index 1176be91b..ebdffcc1c 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -1520,7 +1520,7 @@ pub struct StorageType { pub number_of_bytes: String, } -#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] +#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)] #[serde(rename_all = "camelCase")] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] @@ -1550,7 +1550,7 @@ impl fmt::Display for Error { } } -#[derive(Clone, Debug, Eq, PartialEq)] +#[derive(Clone, Debug, Eq, PartialEq, Hash)] pub enum Severity { Error, Warning, @@ -1633,7 +1633,7 @@ impl<'de> Deserialize<'de> for Severity { } } -#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] +#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)] pub struct SourceLocation { pub file: String, pub start: i32, diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 2d5210d52..bb67aa900 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -111,25 +111,39 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { Self::with_sources(project, project.paths.read_input_files()?) } + /// Bootstraps the compilation process by resolving the dependency graph of all sources and the + /// appropriate `Solc` -> `Sources` set as well as the compile mode to use (parallel, + /// sequential) + /// + /// Multiple (`Solc` -> `Sources`) pairs can be compiled in parallel if the `Project` allows + /// multiple `jobs`, see [`crate::Project::set_solc_jobs()`]. pub fn with_sources(project: &'a Project, sources: Sources) -> Result { let graph = Graph::resolve_sources(&project.paths, sources)?; let (versions, edges) = graph.into_sources_by_version(!project.auto_detect)?; let sources_by_version = versions.get(&project.allowed_lib_paths)?; - let mode = if project.solc_jobs > 1 && sources_by_version.len() > 1 { + let sources = if project.solc_jobs > 1 && sources_by_version.len() > 1 { // if there are multiple different versions and we can use multiple jobs we can compile // them in parallel CompilerSources::Parallel(sources_by_version, project.solc_jobs) } else { CompilerSources::Sequential(sources_by_version) }; - Ok(Self { edges, project, sources: mode }) + + Ok(Self { edges, project, sources }) } - /// Compiles all the sources of the `Project` - pub fn compile(self) { - let Self { edges: _, project: _, sources: _mode } = self; + /// Compiles all the sources of the `Project` in the appropriate mode + /// + /// If caching is enabled, the sources are filtered and only _dirty_ sources are recompiled. + /// + /// The output of the compile process can be a mix of reused artifacts and freshly compiled + /// `Contract`s + pub fn compile(self) -> Result<()> { + let Self { edges, project, sources } = self; + + let mut cache = ArtifactsCache::new(&project, &edges)?; todo!() } @@ -146,8 +160,6 @@ enum CompilerSources { impl CompilerSources { fn preprocess(self, _paths: &ProjectPathsConfig) -> Result> { - let cached_artifacts = BTreeMap::new(); - todo!() } @@ -155,20 +167,22 @@ impl CompilerSources { fn compile( self, settings: Settings, - remappings: Vec, + paths: &ProjectPathsConfig, ) -> Result { match self { - CompilerSources::Sequential(input) => compile_sequential(input, settings, remappings), - CompilerSources::Parallel(input, j) => compile_parallel(input, j, settings, remappings), + CompilerSources::Sequential(input) => compile_sequential(input, settings, paths), + CompilerSources::Parallel(input, j) => compile_parallel(input, j, settings, paths), } } } +/// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s fn compile_sequential( input: BTreeMap, settings: Settings, - remappings: Vec, + paths: &ProjectPathsConfig, ) -> Result { + let mut aggregated = AggregatedCompilerOutput::default(); for (solc, sources) in input { let version = solc.version()?; @@ -178,42 +192,60 @@ fn compile_sequential( solc.as_ref().display() ); - let mut paths = PathMap::default(); + let mut source_unit_map = PathMap::default(); // replace absolute path with source name to make solc happy // TODO use correct path - let sources = paths.set_source_names(sources); + let sources = source_unit_map.set_source_names(sources); let input = CompilerInput::with_sources(sources) .settings(settings.clone()) .normalize_evm_version(&version) - .with_remappings(remappings.clone()); + .with_remappings(paths.remappings.clone()); - tracing::trace!("calling solc with {} sources", input.sources.len()); - let output = solc.compile(&input)?; + tracing::trace!("calling solc `{}` with {} sources", version, input.sources.len()); + let mut output = solc.compile(&input)?; tracing::trace!("compiled input, output has error: {}", output.has_error()); - } - todo!() + // TODO reapply the paths? + + aggregated.extend(version, output); + } + Ok(aggregated) } fn compile_parallel( input: BTreeMap, jobs: usize, settings: Settings, - remappings: Vec, + paths: &ProjectPathsConfig, ) -> Result { todo!() } +/// Contains a mixture of already compiled/cached artifacts and the input set of sources that still +/// need to be compiled. +#[derive(Debug)] +struct Preprocessed { + /// all artifacts that don't need to be compiled + cached_artifacts: BTreeMap, + + cache: SolFilesCache, + + sources: CompilerSources, +} + +impl Preprocessed { + /// Drives the compilation process to completion + pub fn finish(self) {} +} + /// The aggregated output of (multiple) compile jobs /// /// This is effectively a solc version aware `CompilerOutput` #[derive(Debug, Default)] struct AggregatedCompilerOutput { /// all errors from all `CompilerOutput` - /// - /// this is a set so that the same error from multiple `CompilerOutput`s only appears once - pub errors: HashSet, + pub errors: Vec, /// All source files pub sources: BTreeMap, /// All compiled contracts combined with the solc version used to compile them @@ -223,8 +255,8 @@ struct AggregatedCompilerOutput { impl AggregatedCompilerOutput { /// adds a new `CompilerOutput` to the aggregated output fn extend(&mut self, version: Version, output: CompilerOutput) { - self.errors.extend(compiled.errors); - self.sources.extend(compiled.sources); + self.errors.extend(output.errors); + self.sources.extend(output.sources); for (file_name, new_contracts) in output.contracts { let contracts = self.contracts.entry(file_name).or_default(); @@ -236,31 +268,6 @@ impl AggregatedCompilerOutput { } } -/// Captures the `CompilerOutput` and the `Solc` version that produced it -#[derive(Debug)] -struct VersionCompilerOutput { - output: CompilerOutput, - solc: Solc, - version: Version, -} - -/// Contains a mixture of already compiled/cached artifacts and the input set of sources that still -/// need to be compiled. -#[derive(Debug)] -struct Preprocessed { - /// all artifacts that don't need to be compiled - cached_artifacts: BTreeMap, - - cache: SolFilesCache, - - sources: CompilerSources, -} - -impl Preprocessed { - /// Drives the compilation process to completion - pub fn finish(self) {} -} - /// A helper abstraction over the [`SolFilesCache`] used to determine what files need to compiled /// and which `Artifacts` can be reused. struct Cache<'a, T: ArtifactOutput> { @@ -359,6 +366,48 @@ enum ArtifactsCache<'a, T: ArtifactOutput> { } impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { + fn new(project: &'a Project, edges: &'a GraphEdges) -> Result { + let cache = if project.cached { + // read the cache file if it already exists + let cache = if project.cache_path().exists() { + let mut cache = SolFilesCache::read(project.cache_path())?; + // TODO this should take the project dir, since we're storing surce unit ids + // starting at the project dir? + cache.remove_missing_files(); + cache + } else { + SolFilesCache::default() + }; + + // read all artifacts + let cached_artifacts = if project.paths.artifacts.exists() { + tracing::trace!("reading artifacts from cache.."); + let artifacts = cache.read_artifacts::(&project.paths.artifacts)?; + tracing::trace!("read {} artifacts from cache", artifacts.len()); + artifacts + } else { + BTreeMap::default() + }; + + let cache = Cache { + cache, + cached_artifacts, + edges: &edges, + solc_config: &project.solc_config, + paths: &project.paths, + filtered: Default::default(), + content_hashes: Default::default(), + }; + + ArtifactsCache::Cached(cache) + } else { + // nothing to cache + ArtifactsCache::Ephemeral + }; + + Ok(cache) + } + /// Filters out those sources that don't need to be compiled fn filter(&mut self, sources: Sources) -> Sources { match self { diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 2e3d5e933..35c3285c9 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -121,6 +121,10 @@ impl Project { } /// Sets the maximum number of parallel `solc` processes to run simultaneously. + /// + /// # Panics + /// + /// if `jobs == 0` pub fn set_solc_jobs(&mut self, jobs: usize) { assert!(jobs > 0); self.solc_jobs = jobs; From 0ae4f1a3fc8e380650f5f35fba00b13a591ef008 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 24 Jan 2022 22:03:00 +0100 Subject: [PATCH 05/82] add artifact filtering --- ethers-solc/src/compile/project.rs | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index bb67aa900..221ad6881 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -141,10 +141,12 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// The output of the compile process can be a mix of reused artifacts and freshly compiled /// `Contract`s pub fn compile(self) -> Result<()> { - let Self { edges, project, sources } = self; + let Self { edges, project, mut sources } = self; let mut cache = ArtifactsCache::new(&project, &edges)?; + sources = sources.filtered(&mut cache); + todo!() } } @@ -159,8 +161,23 @@ enum CompilerSources { } impl CompilerSources { - fn preprocess(self, _paths: &ProjectPathsConfig) -> Result> { - todo!() + /// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`] + fn filtered(mut self, cache: &mut ArtifactsCache) -> Self { + fn filterd_sources( + sources: BTreeMap, + cache: &mut ArtifactsCache, + ) -> BTreeMap { + sources.into_iter().map(|(solc, sources)| (solc, cache.filter(sources))).collect() + } + + match self { + CompilerSources::Sequential(s) => { + CompilerSources::Sequential(filterd_sources(s, cache)) + } + CompilerSources::Parallel(s, j) => { + CompilerSources::Parallel(filterd_sources(s, cache), j) + } + } } /// Compiles all the files with `Solc` From c7a66467aa86856ecc51bc9181bc435c3c552bbe Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 24 Jan 2022 22:19:53 +0100 Subject: [PATCH 06/82] fine tune api --- ethers-solc/src/compile/project.rs | 48 +++++++++++++++++++----------- ethers-solc/src/lib.rs | 5 ++++ 2 files changed, 35 insertions(+), 18 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 221ad6881..2f0210333 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -140,14 +140,24 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// /// The output of the compile process can be a mix of reused artifacts and freshly compiled /// `Contract`s - pub fn compile(self) -> Result<()> { + pub fn compile(self) -> Result> { let Self { edges, project, mut sources } = self; let mut cache = ArtifactsCache::new(&project, &edges)?; + // retain only dirty sources sources = sources.filtered(&mut cache); - todo!() + let output = sources.compile(&project.solc_config.settings, &project.paths)?; + + // TODO rebuild cache + // TODO write artifacts + + Ok(ProjectCompileOutput2 { + output, + cached_artifacts: Default::default(), + ignored_error_codes: vec![], + }) } } @@ -183,7 +193,7 @@ impl CompilerSources { /// Compiles all the files with `Solc` fn compile( self, - settings: Settings, + settings: &Settings, paths: &ProjectPathsConfig, ) -> Result { match self { @@ -196,7 +206,7 @@ impl CompilerSources { /// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s fn compile_sequential( input: BTreeMap, - settings: Settings, + settings: &Settings, paths: &ProjectPathsConfig, ) -> Result { let mut aggregated = AggregatedCompilerOutput::default(); @@ -233,7 +243,7 @@ fn compile_sequential( fn compile_parallel( input: BTreeMap, jobs: usize, - settings: Settings, + settings: &Settings, paths: &ProjectPathsConfig, ) -> Result { todo!() @@ -241,19 +251,15 @@ fn compile_parallel( /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. -#[derive(Debug)] -struct Preprocessed { - /// all artifacts that don't need to be compiled +#[derive(Debug, Clone, PartialEq, Default)] +pub struct ProjectCompileOutput2 { + /// contains the aggregated `CompilerOutput` + /// + /// See [`CompilerSources::compile`] + output: AggregatedCompilerOutput, + /// All artifacts that were read from cache cached_artifacts: BTreeMap, - - cache: SolFilesCache, - - sources: CompilerSources, -} - -impl Preprocessed { - /// Drives the compilation process to completion - pub fn finish(self) {} + ignored_error_codes: Vec, } /// The aggregated output of (multiple) compile jobs @@ -270,6 +276,10 @@ struct AggregatedCompilerOutput { } impl AggregatedCompilerOutput { + pub fn is_empty(&self) -> bool { + self.contracts.is_empty() + } + /// adds a new `CompilerOutput` to the aggregated output fn extend(&mut self, version: Version, output: CompilerOutput) { self.errors.extend(output.errors); @@ -393,7 +403,9 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { cache.remove_missing_files(); cache } else { - SolFilesCache::default() + SolFilesCache::builder() + .root(project.root()) + .solc_config(project.solc_config.clone()) }; // read all artifacts diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 35c3285c9..606e6b251 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -120,6 +120,11 @@ impl Project { &self.paths.cache } + /// Returns the root directory of the project + pub fn root(&self) -> &PathBuf { + &self.paths.root + } + /// Sets the maximum number of parallel `solc` processes to run simultaneously. /// /// # Panics From 1346bb934b571b335d4397b989a92f326f03336a Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 24 Jan 2022 23:13:31 +0100 Subject: [PATCH 07/82] feat: prepare version integration --- ethers-solc/src/artifacts.rs | 2 + ethers-solc/src/cache.rs | 31 +++++--- ethers-solc/src/compile/project.rs | 112 +++++++++++++++++++---------- ethers-solc/src/lib.rs | 9 ++- ethers-solc/src/resolver.rs | 9 +-- 5 files changed, 112 insertions(+), 51 deletions(-) diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index ebdffcc1c..2dce28a7c 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -25,6 +25,8 @@ use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; /// An ordered list of files and their source pub type Sources = BTreeMap; +pub type VersionedSources = BTreeMap; + /// file -> [contract name] pub type Contracts = BTreeMap>; diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 13a41e1f5..488cd73c5 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -3,7 +3,7 @@ use crate::{ artifacts::{Contracts, Sources}, config::SolcConfig, error::{Result, SolcError}, - utils, ArtifactOutput, ProjectPathsConfig, + utils, ArtifactOutput, ProjectPathsConfig, Source, }; use serde::{Deserialize, Serialize}; use std::{ @@ -35,6 +35,10 @@ pub struct SolFilesCache { } impl SolFilesCache { + pub fn new(files: BTreeMap) -> Self { + Self { format: ETHERS_FORMAT_VERSION.to_string(), files } + } + /// # Example /// /// Autodetect solc version and default settings @@ -322,13 +326,7 @@ impl SolFilesCacheBuilder { let mut files = BTreeMap::new(); for (file, source) in sources { - let last_modification_date = fs::metadata(&file) - .map_err(|err| SolcError::io(err, file.clone()))? - .modified() - .map_err(|err| SolcError::io(err, file.clone()))? - .duration_since(UNIX_EPOCH) - .map_err(|err| SolcError::solc(err.to_string()))? - .as_millis() as u64; + let last_modification_date = CacheEntry::read_last_modification_date(&file)?; let imports = utils::find_import_paths(source.as_ref()).map(|m| m.as_str().to_owned()).collect(); @@ -382,10 +380,27 @@ pub struct CacheEntry { } impl CacheEntry { + pub fn new(_file: impl AsRef, _source: &Source) -> Result { + todo!() + } + /// Returns the time pub fn last_modified(&self) -> Duration { Duration::from_millis(self.last_modification_date) } + + /// Reads the last modification date from the file's metadata + pub fn read_last_modification_date(file: impl AsRef) -> Result { + let file = file.as_ref(); + let last_modification_date = fs::metadata(file) + .map_err(|err| SolcError::io(err, file.to_path_buf()))? + .modified() + .map_err(|err| SolcError::io(err, file.to_path_buf()))? + .duration_since(UNIX_EPOCH) + .map_err(|err| SolcError::solc(err.to_string()))? + .as_millis() as u64; + Ok(last_modification_date) + } } /// A helper type to handle source name/full disk mappings diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 2f0210333..c050a0364 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -74,16 +74,18 @@ //! will then look in `/project/dapp-bin/library/iterable_mapping.sol` use crate::{ - artifacts::{Error, Settings, SourceFile, VersionedContract, VersionedContracts}, + artifacts::{ + Error, Settings, SourceFile, VersionedContract, VersionedContracts, VersionedSources, + }, + cache::CacheEntry, error::Result, - remappings::Remapping, resolver::GraphEdges, utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, PathMap, Project, - ProjectPathsConfig, SolFilesCache, Solc, SolcConfig, Source, Sources, + ProjectPathsConfig, SolFilesCache, SolcConfig, Source, Sources, }; use semver::Version; use std::{ - collections::{hash_map, BTreeMap, HashMap, HashSet}, + collections::{hash_map, BTreeMap, HashMap}, path::{Path, PathBuf}, }; @@ -143,7 +145,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { pub fn compile(self) -> Result> { let Self { edges, project, mut sources } = self; - let mut cache = ArtifactsCache::new(&project, &edges)?; + let mut cache = ArtifactsCache::new(project, &edges)?; // retain only dirty sources sources = sources.filtered(&mut cache); @@ -151,12 +153,13 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { let output = sources.compile(&project.solc_config.settings, &project.paths)?; // TODO rebuild cache + // TODO write artifacts Ok(ProjectCompileOutput2 { output, cached_artifacts: Default::default(), - ignored_error_codes: vec![], + ignored_error_codes: project.ignored_error_codes.clone(), }) } } @@ -165,19 +168,25 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { #[derive(Debug)] enum CompilerSources { /// Compile all these sequentially - Sequential(BTreeMap), + Sequential(VersionedSources), /// Compile all these in parallel using a certain amount of jobs - Parallel(BTreeMap, usize), + Parallel(VersionedSources, usize), } impl CompilerSources { /// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`] - fn filtered(mut self, cache: &mut ArtifactsCache) -> Self { + fn filtered(self, cache: &mut ArtifactsCache) -> Self { fn filterd_sources( - sources: BTreeMap, + sources: VersionedSources, cache: &mut ArtifactsCache, - ) -> BTreeMap { - sources.into_iter().map(|(solc, sources)| (solc, cache.filter(sources))).collect() + ) -> VersionedSources { + sources + .into_iter() + .map(|(solc, (version, sources))| { + let sources = cache.filter(sources, &version); + (solc, (version, sources)) + }) + .collect() } match self { @@ -205,21 +214,19 @@ impl CompilerSources { /// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s fn compile_sequential( - input: BTreeMap, + input: VersionedSources, settings: &Settings, paths: &ProjectPathsConfig, ) -> Result { let mut aggregated = AggregatedCompilerOutput::default(); - for (solc, sources) in input { - let version = solc.version()?; - + for (solc, (version, sources)) in input { tracing::trace!( "compiling {} sources with solc \"{}\"", sources.len(), solc.as_ref().display() ); - let mut source_unit_map = PathMap::default(); + let source_unit_map = PathMap::default(); // replace absolute path with source name to make solc happy // TODO use correct path let sources = source_unit_map.set_source_names(sources); @@ -230,7 +237,7 @@ fn compile_sequential( .with_remappings(paths.remappings.clone()); tracing::trace!("calling solc `{}` with {} sources", version, input.sources.len()); - let mut output = solc.compile(&input)?; + let output = solc.compile(&input)?; tracing::trace!("compiled input, output has error: {}", output.has_error()); // TODO reapply the paths? @@ -241,10 +248,10 @@ fn compile_sequential( } fn compile_parallel( - input: BTreeMap, - jobs: usize, - settings: &Settings, - paths: &ProjectPathsConfig, + _input: VersionedSources, + _jobs: usize, + _settings: &Settings, + _paths: &ProjectPathsConfig, ) -> Result { todo!() } @@ -265,8 +272,8 @@ pub struct ProjectCompileOutput2 { /// The aggregated output of (multiple) compile jobs /// /// This is effectively a solc version aware `CompilerOutput` -#[derive(Debug, Default)] -struct AggregatedCompilerOutput { +#[derive(Clone, Debug, Default, PartialEq)] +pub struct AggregatedCompilerOutput { /// all errors from all `CompilerOutput` pub errors: Vec, /// All source files @@ -298,7 +305,7 @@ impl AggregatedCompilerOutput { /// A helper abstraction over the [`SolFilesCache`] used to determine what files need to compiled /// and which `Artifacts` can be reused. struct Cache<'a, T: ArtifactOutput> { - /// cache file + /// preexisting cache file cache: SolFilesCache, /// all already existing artifacts cached_artifacts: BTreeMap, @@ -310,6 +317,8 @@ struct Cache<'a, T: ArtifactOutput> { paths: &'a ProjectPathsConfig, /// all files that were filtered because they haven't changed filtered: Sources, + /// the corresponding cache entries for all sources that were deemed to be dirty + dirty_entries: Vec<(PathBuf, CacheEntry)>, /// the file hashes content_hashes: HashMap, } @@ -320,15 +329,23 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { /// - were changed /// - their imports were changed /// - their artifact is missing - fn filter(&mut self, sources: Sources) -> Sources { + fn filter(&mut self, sources: Sources, version: &Version) -> Sources { self.fill_hashes(&sources); - sources.into_iter().filter_map(|(file, source)| self.needs_solc(file, source)).collect() + sources + .into_iter() + .filter_map(|(file, source)| self.needs_solc(file, source, version)) + .collect() } /// Returns `Some` if the file needs to be compiled and `None` if the artifact can be reu-used - fn needs_solc(&mut self, file: PathBuf, source: Source) -> Option<(PathBuf, Source)> { - if !self.is_dirty(&file) && - self.edges.imports(&file).iter().all(|file| !self.is_dirty(file)) + fn needs_solc( + &mut self, + file: PathBuf, + source: Source, + version: &Version, + ) -> Option<(PathBuf, Source)> { + if !self.is_dirty(&file, version) && + self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version)) { self.filtered.insert(file, source); None @@ -338,7 +355,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { } /// returns `false` if the corresponding cache entry remained unchanged otherwise `true` - fn is_dirty(&self, file: &Path) -> bool { + fn is_dirty(&self, file: &Path, _version: &Version) -> bool { if let Some(hash) = self.content_hashes.get(file) { let cache_path = utils::source_name(file, &self.paths.root); if let Some(entry) = self.cache.entry(&cache_path) { @@ -403,9 +420,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { cache.remove_missing_files(); cache } else { - SolFilesCache::builder() - .root(project.root()) - .solc_config(project.solc_config.clone()) + SolFilesCache::default() }; // read all artifacts @@ -421,10 +436,11 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { let cache = Cache { cache, cached_artifacts, - edges: &edges, + edges, solc_config: &project.solc_config, paths: &project.paths, filtered: Default::default(), + dirty_entries: vec![], content_hashes: Default::default(), }; @@ -438,10 +454,32 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { } /// Filters out those sources that don't need to be compiled - fn filter(&mut self, sources: Sources) -> Sources { + fn filter(&mut self, sources: Sources, version: &Version) -> Sources { match self { ArtifactsCache::Ephemeral => sources, - ArtifactsCache::Cached(cache) => cache.filter(sources), + ArtifactsCache::Cached(cache) => cache.filter(sources, version), + } + } + + fn finish(self) -> Result> { + match self { + ArtifactsCache::Ephemeral => Ok(Default::default()), + ArtifactsCache::Cached(cache) => { + let Cache { cache, cached_artifacts, dirty_entries: _, filtered, edges: _, .. } = + cache; + // rebuild the cache file with all compiled contracts (dirty sources), and filtered + // sources (clean) + + let cache_entries = cache + .files + .into_iter() + .filter(|(path, _)| filtered.contains_key(path)) + .collect(); + + let _sol_cache = SolFilesCache::new(cache_entries); + + Ok(cached_artifacts) + } } } } diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 606e6b251..1c89e4459 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -273,8 +273,13 @@ impl Project { #[tracing::instrument(skip(self, sources))] pub fn svm_compile(&self, sources: Sources) -> Result> { let graph = Graph::resolve_sources(&self.paths, sources)?; - let sources_by_version = - graph.into_sources_by_version(!self.auto_detect)?.0.get(&self.allowed_lib_paths)?; + let sources_by_version: BTreeMap<_, _> = graph + .into_sources_by_version(!self.auto_detect)? + .0 + .get(&self.allowed_lib_paths)? + .into_iter() + .map(|(k, (_v, s))| (k, s)) + .collect(); // run the compilation step for each version let compiled = if self.solc_jobs > 1 && sources_by_version.len() > 1 { diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index f656c6e36..87b959efb 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -33,7 +33,7 @@ use std::{ use rayon::prelude::*; use regex::Match; -use semver::VersionReq; +use semver::{Version, VersionReq}; use solang_parser::pt::{Import, Loc, SourceUnitPart}; use crate::{error::Result, utils, ProjectPathsConfig, Solc, Source, Sources}; @@ -433,7 +433,7 @@ impl VersionedSources { pub fn get( self, allowed_lib_paths: &crate::AllowedLibPaths, - ) -> Result> { + ) -> Result> { use crate::SolcError; // we take the installer lock here to ensure installation checking is done in sync @@ -462,8 +462,9 @@ impl VersionedSources { Solc::blocking_install(version.as_ref())?; tracing::trace!("reinstalled solc: \"{}\"", version); } - sources_by_version - .insert(solc.arg("--allow-paths").arg(allowed_lib_paths.to_string()), sources); + let solc = solc.arg("--allow-paths").arg(allowed_lib_paths.to_string()); + let version = solc.version()?; + sources_by_version.insert(solc, (version, sources)); } Ok(sources_by_version) } From c5a932363c18afb44da24e210febb18fdff47532 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 24 Jan 2022 23:21:18 +0100 Subject: [PATCH 08/82] docs: more docs --- ethers-solc/src/compile/project.rs | 29 ++++++++++++++++++++++++----- ethers-solc/src/resolver.rs | 4 ++-- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index c050a0364..a899844ab 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -108,6 +108,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// use ethers_solc::Project; /// /// let project = Project::builder().build().unwrap(); + /// let output = project.compile().unwrap(); /// ``` pub fn new(project: &'a Project) -> Result { Self::with_sources(project, project.paths.read_input_files()?) @@ -152,13 +153,11 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { let output = sources.compile(&project.solc_config.settings, &project.paths)?; - // TODO rebuild cache - - // TODO write artifacts + let cached_artifacts = cache.finish()?; Ok(ProjectCompileOutput2 { output, - cached_artifacts: Default::default(), + cached_artifacts, ignored_error_codes: project.ignored_error_codes.clone(), }) } @@ -220,6 +219,10 @@ fn compile_sequential( ) -> Result { let mut aggregated = AggregatedCompilerOutput::default(); for (solc, (version, sources)) in input { + if sources.is_empty() { + // nothing to compile + continue + } tracing::trace!( "compiling {} sources with solc \"{}\"", sources.len(), @@ -240,7 +243,7 @@ fn compile_sequential( let output = solc.compile(&input)?; tracing::trace!("compiled input, output has error: {}", output.has_error()); - // TODO reapply the paths? + // TODO reapply the paths aggregated.extend(version, output); } @@ -350,6 +353,18 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { self.filtered.insert(file, source); None } else { + // TODO create a new dirty cacheentry + // let entry = CacheEntry { + // last_modification_date: CacheEntry::read_last_modification_date(&file).unwrap(), + // content_hash: source.content_hash(), + // source_name: utils::source_name(&file, &self.paths.root).into(), + // solc_config: solc_config.clone(), + // TODO determine imports via self.edges + // imports, + // version_pragmas, + // artifacts: vec![], + // }; + Some((file, source)) } } @@ -405,6 +420,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { /// Abstraction over configured caching which can be either non-existent or an already loaded cache enum ArtifactsCache<'a, T: ArtifactOutput> { + /// Cache nothing on disk Ephemeral, Cached(Cache<'a, T>), } @@ -461,6 +477,9 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { } } + /// rebuilds a new [`SolFileCache`] and writes it to disk + /// + /// Returns all cached artifacts fn finish(self) -> Result> { match self { ArtifactsCache::Ephemeral => Ok(Default::default()), diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index 87b959efb..f3eb54c47 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -33,7 +33,7 @@ use std::{ use rayon::prelude::*; use regex::Match; -use semver::{Version, VersionReq}; +use semver::VersionReq; use solang_parser::pt::{Import, Loc, SourceUnitPart}; use crate::{error::Result, utils, ProjectPathsConfig, Solc, Source, Sources}; @@ -433,7 +433,7 @@ impl VersionedSources { pub fn get( self, allowed_lib_paths: &crate::AllowedLibPaths, - ) -> Result> { + ) -> Result> { use crate::SolcError; // we take the installer lock here to ensure installation checking is done in sync From 3ec7a61346c9845285f3d2f6f60e222b05bcb732 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 26 Jan 2022 22:26:26 +0100 Subject: [PATCH 09/82] feat: add cacheentry2 --- ethers-solc/Cargo.toml | 2 +- ethers-solc/src/cache.rs | 27 ++++++++++ ethers-solc/src/compile/project.rs | 82 ++++++++++++++++++++++-------- ethers-solc/src/resolver.rs | 16 ++++++ 4 files changed, 105 insertions(+), 22 deletions(-) diff --git a/ethers-solc/Cargo.toml b/ethers-solc/Cargo.toml index 65ca316e6..e968f9025 100644 --- a/ethers-solc/Cargo.toml +++ b/ethers-solc/Cargo.toml @@ -17,7 +17,7 @@ keywords = ["ethereum", "web3", "solc", "solidity", "ethers"] ethers-core = { version = "^0.6.0", path = "../ethers-core", default-features = false } serde_json = "1.0.68" serde = { version = "1.0.130", features = ["derive"] } -semver = "1.0.4" +semver = { version = "1.0.4", features = ["serde"] } walkdir = "2.3.2" tokio = { version = "1.15.0", default-features = false, features = ["process", "io-util", "fs", "time"], optional = true } futures-util = { version = "^0.3", optional = true } diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 488cd73c5..c3faae675 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -5,6 +5,7 @@ use crate::{ error::{Result, SolcError}, utils, ArtifactOutput, ProjectPathsConfig, Source, }; +use semver::Version; use serde::{Deserialize, Serialize}; use std::{ collections::{BTreeMap, HashMap, HashSet}, @@ -366,6 +367,32 @@ impl SolFilesCacheBuilder { } } +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CacheEntry2 { + /// the last modification time of this file + pub last_modification_date: u64, + /// hash to identify whether the content of the file changed + pub content_hash: String, + /// identifier name see [`crate::util::source_name()`] + pub source_name: PathBuf, + /// what config was set when compiling this file + pub solc_config: SolcConfig, + /// fully resolved imports of the file + /// + /// all paths start relative from the project's root: `src/importedFile.sol` + pub imports: Vec, + /// The solidity version pragma + pub version_requirement: Option, + /// all artifacts produced for this file + /// + /// In theory a file can be compiled by different solc versions: + /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` + /// file `C` would be compiled twice, with `0.8.10` and `0.8.11`, producing two different + /// artifacts + pub artifacts: HashMap>, +} + #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct CacheEntry { diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index a899844ab..87e32991f 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -77,7 +77,7 @@ use crate::{ artifacts::{ Error, Settings, SourceFile, VersionedContract, VersionedContracts, VersionedSources, }, - cache::CacheEntry, + cache::{CacheEntry, CacheEntry2}, error::Result, resolver::GraphEdges, utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, PathMap, Project, @@ -85,7 +85,7 @@ use crate::{ }; use semver::Version; use std::{ - collections::{hash_map, BTreeMap, HashMap}, + collections::{hash_map, BTreeMap, HashMap, HashSet}, path::{Path, PathBuf}, }; @@ -143,16 +143,25 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// /// The output of the compile process can be a mix of reused artifacts and freshly compiled /// `Contract`s + /// + /// # Example + /// + /// ```no_run + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let output = project.compile().unwrap(); + /// ``` pub fn compile(self) -> Result> { let Self { edges, project, mut sources } = self; let mut cache = ArtifactsCache::new(project, &edges)?; - // retain only dirty sources + // retain and compile only dirty sources sources = sources.filtered(&mut cache); - let output = sources.compile(&project.solc_config.settings, &project.paths)?; + // get all cached artifacts let cached_artifacts = cache.finish()?; Ok(ProjectCompileOutput2 { @@ -231,7 +240,7 @@ fn compile_sequential( let source_unit_map = PathMap::default(); // replace absolute path with source name to make solc happy - // TODO use correct path + // TODO use correct source unit path let sources = source_unit_map.set_source_names(sources); let input = CompilerInput::with_sources(sources) @@ -321,12 +330,53 @@ struct Cache<'a, T: ArtifactOutput> { /// all files that were filtered because they haven't changed filtered: Sources, /// the corresponding cache entries for all sources that were deemed to be dirty - dirty_entries: Vec<(PathBuf, CacheEntry)>, + dirty_entries: HashMap)>, /// the file hashes content_hashes: HashMap, } impl<'a, T: ArtifactOutput> Cache<'a, T> { + /// Creates a new cache entry for the file + fn create_cache_entry(&self, file: &PathBuf, source: &Source) -> Result { + let imports = self + .edges + .imports(file) + .into_iter() + .map(|import| utils::source_name(import, &self.paths.root).to_path_buf()) + .collect(); + + let entry = CacheEntry2 { + last_modification_date: CacheEntry::read_last_modification_date(&file).unwrap(), + content_hash: source.content_hash(), + source_name: utils::source_name(&file, &self.paths.root).into(), + solc_config: self.solc_config.clone(), + imports, + version_requirement: self.edges.version_requirement(file).map(|v| v.to_string()), + // artifacts remain empty until we received the compiler output + artifacts: Default::default(), + }; + + Ok(entry) + } + + /// inserts a new cache entry for the given file + /// + /// If there is already an entry available for the file the given version is added to the set + fn insert_new_cache_entry( + &mut self, + file: &PathBuf, + source: &Source, + version: Version, + ) -> Result<()> { + if let Some((_, versions)) = self.dirty_entries.get_mut(file) { + versions.insert(version); + } else { + let entry = self.create_cache_entry(file, source)?; + self.dirty_entries.insert(file.clone(), (entry, HashSet::from([version]))); + } + Ok(()) + } + /// Returns only those sources that /// - are new /// - were changed @@ -336,12 +386,12 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { self.fill_hashes(&sources); sources .into_iter() - .filter_map(|(file, source)| self.needs_solc(file, source, version)) + .filter_map(|(file, source)| self.requires_solc(file, source, version)) .collect() } - /// Returns `Some` if the file needs to be compiled and `None` if the artifact can be reu-used - fn needs_solc( + /// Returns `Some` if the file _needs_ to be compiled and `None` if the artifact can be reu-used + fn requires_solc( &mut self, file: PathBuf, source: Source, @@ -353,17 +403,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { self.filtered.insert(file, source); None } else { - // TODO create a new dirty cacheentry - // let entry = CacheEntry { - // last_modification_date: CacheEntry::read_last_modification_date(&file).unwrap(), - // content_hash: source.content_hash(), - // source_name: utils::source_name(&file, &self.paths.root).into(), - // solc_config: solc_config.clone(), - // TODO determine imports via self.edges - // imports, - // version_pragmas, - // artifacts: vec![], - // }; + self.insert_new_cache_entry(&file, &source, version.clone()).unwrap(); Some((file, source)) } @@ -456,7 +496,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { solc_config: &project.solc_config, paths: &project.paths, filtered: Default::default(), - dirty_entries: vec![], + dirty_entries: Default::default(), content_hashes: Default::default(), }; diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index f3eb54c47..3f07ad617 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -51,6 +51,8 @@ pub struct GraphEdges { indices: HashMap, /// reverse of `indices` for reverse lookup rev_indices: HashMap, + /// the identified version requirement of a file + versions: HashMap>, /// with how many input files we started with, corresponds to `let input_files = /// nodes[..num_input_files]`. num_input_files: usize, @@ -72,6 +74,15 @@ impl GraphEdges { HashSet::new() } } + + /// Returns the `VersionReq` for the given file + pub fn version_requirement(&self, file: impl AsRef) -> Option<&VersionReq> { + self.indices + .get(file.as_ref()) + .and_then(|idx| self.versions.get(idx)) + .map(|v| v.as_ref()) + .flatten() + } } /// Represents a fully-resolved solidity dependency graph. Each node in the graph @@ -207,6 +218,11 @@ impl Graph { rev_indices: index.iter().map(|(k, v)| (*v, k.clone())).collect(), indices: index, num_input_files, + versions: nodes + .iter() + .enumerate() + .map(|(idx, node)| (idx, node.data.version_req.clone())) + .collect(), }; Ok(Graph { nodes, edges, root: paths.root.clone() }) } From c7d75dcf95357b78ee89b77742f3a988037c339e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 26 Jan 2022 22:40:07 +0100 Subject: [PATCH 10/82] replace cacheentry types --- ethers-solc/src/cache.rs | 113 +++++++++++++---------------- ethers-solc/src/compile/project.rs | 37 ++++++---- 2 files changed, 73 insertions(+), 77 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index c3faae675..20f7092b1 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -316,60 +316,62 @@ impl SolFilesCacheBuilder { sources: Sources, cache_file: Option, ) -> Result { - let format = self.format.unwrap_or_else(|| ETHERS_FORMAT_VERSION.to_string()); - let solc_config = self.solc_config.unwrap_or_else(|| SolcConfig::builder().build()); - - let root = self - .root - .map(Ok) - .unwrap_or_else(std::env::current_dir) - .map_err(|err| SolcError::io(err, "."))?; - - let mut files = BTreeMap::new(); - for (file, source) in sources { - let last_modification_date = CacheEntry::read_last_modification_date(&file)?; - let imports = - utils::find_import_paths(source.as_ref()).map(|m| m.as_str().to_owned()).collect(); - - let version_pragmas = utils::find_version_pragma(source.as_ref()) - .map(|v| vec![v.as_str().to_string()]) - .unwrap_or_default(); - - let entry = CacheEntry { - last_modification_date, - content_hash: source.content_hash(), - source_name: utils::source_name(&file, &root).into(), - solc_config: solc_config.clone(), - imports, - version_pragmas, - artifacts: vec![], - }; - files.insert(file, entry); - } - - let cache = if let Some(dest) = cache_file.as_ref().filter(|dest| dest.exists()) { - // read the existing cache and extend it by the files that changed - // (if we just wrote to the cache file, we'd overwrite the existing data) - let reader = - std::io::BufReader::new(File::open(dest).map_err(|err| SolcError::io(err, dest))?); - if let Ok(mut cache) = serde_json::from_reader::<_, SolFilesCache>(reader) { - cache.files.extend(files); - cache - } else { - tracing::error!("Failed to read existing cache file {}", dest.display()); - SolFilesCache { format, files } - } - } else { - SolFilesCache { format, files } - }; - - Ok(cache) + todo!() + // let format = self.format.unwrap_or_else(|| ETHERS_FORMAT_VERSION.to_string()); + // let solc_config = self.solc_config.unwrap_or_else(|| SolcConfig::builder().build()); + // + // let root = self + // .root + // .map(Ok) + // .unwrap_or_else(std::env::current_dir) + // .map_err(|err| SolcError::io(err, "."))?; + // + // let mut files = BTreeMap::new(); + // for (file, source) in sources { + // let last_modification_date = CacheEntry::read_last_modification_date(&file)?; + // let imports = + // utils::find_import_paths(source.as_ref()).map(|m| + // m.as_str().to_owned()).collect(); + // + // let version_pragmas = utils::find_version_pragma(source.as_ref()) + // .map(|v| vec![v.as_str().to_string()]) + // .unwrap_or_default(); + // + // let entry = CacheEntry { + // last_modification_date, + // content_hash: source.content_hash(), + // source_name: utils::source_name(&file, &root).into(), + // solc_config: solc_config.clone(), + // imports, + // version_pragmas, + // artifacts: vec![], + // }; + // files.insert(file, entry); + // } + // + // let cache = if let Some(dest) = cache_file.as_ref().filter(|dest| dest.exists()) { + // // read the existing cache and extend it by the files that changed + // // (if we just wrote to the cache file, we'd overwrite the existing data) + // let reader = + // std::io::BufReader::new(File::open(dest).map_err(|err| SolcError::io(err, + // dest))?); if let Ok(mut cache) = serde_json::from_reader::<_, + // SolFilesCache>(reader) { cache.files.extend(files); + // cache + // } else { + // tracing::error!("Failed to read existing cache file {}", dest.display()); + // SolFilesCache { format, files } + // } + // } else { + // SolFilesCache { format, files } + // }; + // + // Ok(cache) } } #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct CacheEntry2 { +pub struct CacheEntry { /// the last modification time of this file pub last_modification_date: u64, /// hash to identify whether the content of the file changed @@ -393,19 +395,6 @@ pub struct CacheEntry2 { pub artifacts: HashMap>, } -#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CacheEntry { - /// the last modification time of this file - pub last_modification_date: u64, - pub content_hash: String, - pub source_name: PathBuf, - pub solc_config: SolcConfig, - pub imports: Vec, - pub version_pragmas: Vec, - pub artifacts: Vec, -} - impl CacheEntry { pub fn new(_file: impl AsRef, _source: &Source) -> Result { todo!() diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 87e32991f..c4a447d3d 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -77,7 +77,7 @@ use crate::{ artifacts::{ Error, Settings, SourceFile, VersionedContract, VersionedContracts, VersionedSources, }, - cache::{CacheEntry, CacheEntry2}, + cache::CacheEntry, error::Result, resolver::GraphEdges, utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, PathMap, Project, @@ -330,14 +330,14 @@ struct Cache<'a, T: ArtifactOutput> { /// all files that were filtered because they haven't changed filtered: Sources, /// the corresponding cache entries for all sources that were deemed to be dirty - dirty_entries: HashMap)>, + dirty_entries: HashMap)>, /// the file hashes content_hashes: HashMap, } impl<'a, T: ArtifactOutput> Cache<'a, T> { /// Creates a new cache entry for the file - fn create_cache_entry(&self, file: &PathBuf, source: &Source) -> Result { + fn create_cache_entry(&self, file: &PathBuf, source: &Source) -> Result { let imports = self .edges .imports(file) @@ -345,7 +345,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { .map(|import| utils::source_name(import, &self.paths.root).to_path_buf()) .collect(); - let entry = CacheEntry2 { + let entry = CacheEntry { last_modification_date: CacheEntry::read_last_modification_date(&file).unwrap(), content_hash: source.content_hash(), source_name: utils::source_name(&file, &self.paths.root).into(), @@ -410,7 +410,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { } /// returns `false` if the corresponding cache entry remained unchanged otherwise `true` - fn is_dirty(&self, file: &Path, _version: &Version) -> bool { + fn is_dirty(&self, file: &Path, version: &Version) -> bool { if let Some(hash) = self.content_hashes.get(file) { let cache_path = utils::source_name(file, &self.paths.root); if let Some(entry) = self.cache.entry(&cache_path) { @@ -428,14 +428,22 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { ); return true } - // checks whether an artifact this file depends on was removed - if entry.artifacts.iter().any(|name| !self.has_artifact(file, name)) { - tracing::trace!( - "missing linked artifacts for cached artifact \"{}\"", - file.display() - ); + + if let Some(artifacts) = entry.artifacts.get(version) { + // checks whether an artifact this file depends on was removed + if artifacts.iter().any(|artifact_path| !self.has_artifact(artifact_path)) { + tracing::trace!( + "missing linked artifacts for cached artifact \"{}\"", + file.display() + ); + return true + } + } else { + // artifact does not exist return true } + + // all things match return false } } @@ -451,10 +459,9 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { } } - /// Returns true if the artifact for the exists - fn has_artifact(&self, file: &Path, name: &str) -> bool { - let artifact_path = self.paths.artifacts.join(T::output_file(file, name)); - self.cached_artifacts.contains_key(&artifact_path) + /// Returns true if the artifact exists + fn has_artifact(&self, artifact_path: &Path) -> bool { + self.cached_artifacts.contains_key(artifact_path) } } From c52dbe710b2e440415077a0ef573291f5e629d8d Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 27 Jan 2022 00:40:16 +0100 Subject: [PATCH 11/82] integrate new api --- ethers-solc/src/cache.rs | 14 ++ ethers-solc/src/compile/project.rs | 51 ++++-- ethers-solc/src/config.rs | 196 +------------------- ethers-solc/src/lib.rs | 1 + ethers-solc/src/output.rs | 282 +++++++++++++++++++++++++++++ 5 files changed, 335 insertions(+), 209 deletions(-) create mode 100644 ethers-solc/src/output.rs diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 20f7092b1..a595396cf 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -257,6 +257,20 @@ impl SolFilesCache { } Ok(artifacts) } + + pub(crate) fn retain(&mut self, files: I) + where + I: IntoIterator, + V: IntoIterator, + { + } + + pub(crate) fn extend(&mut self, files: I) + where + I: IntoIterator, + V: IntoIterator, + { + } } // async variants for read and write diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index c4a447d3d..2e59f51ce 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -85,7 +85,7 @@ use crate::{ }; use semver::Version; use std::{ - collections::{hash_map, BTreeMap, HashMap, HashSet}, + collections::{hash_map, hash_map::Entry, BTreeMap, HashMap, HashSet}, path::{Path, PathBuf}, }; @@ -161,6 +161,12 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { sources = sources.filtered(&mut cache); let output = sources.compile(&project.solc_config.settings, &project.paths)?; + // write all artifacts + if !project.no_artifacts { + // TOD get the artifact paths back + let artifacts = T::on_output(&output.contracts, &project.paths)?; + } + // get all cached artifacts let cached_artifacts = cache.finish()?; @@ -328,7 +334,7 @@ struct Cache<'a, T: ArtifactOutput> { /// project paths paths: &'a ProjectPathsConfig, /// all files that were filtered because they haven't changed - filtered: Sources, + filtered: HashMap)>, /// the corresponding cache entries for all sources that were deemed to be dirty dirty_entries: HashMap)>, /// the file hashes @@ -377,6 +383,18 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { Ok(()) } + /// inserts the filtered source with the fiven version + fn insert_filtered_source(&mut self, file: PathBuf, source: Source, version: Version) { + match self.filtered.entry(file) { + Entry::Occupied(mut entry) => { + entry.get_mut().1.insert(version); + } + Entry::Vacant(entry) => { + entry.insert((source, HashSet::from([version]))); + } + } + } + /// Returns only those sources that /// - are new /// - were changed @@ -398,13 +416,12 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { version: &Version, ) -> Option<(PathBuf, Source)> { if !self.is_dirty(&file, version) && - self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version)) + self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, &version)) { - self.filtered.insert(file, source); + self.insert_filtered_source(file, source, version.clone()); None } else { self.insert_new_cache_entry(&file, &source, version.clone()).unwrap(); - Some((file, source)) } } @@ -527,22 +544,20 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { /// rebuilds a new [`SolFileCache`] and writes it to disk /// /// Returns all cached artifacts - fn finish(self) -> Result> { + fn finish( + self, // TODO needs the artifact of the outpur + ) -> Result> { match self { ArtifactsCache::Ephemeral => Ok(Default::default()), ArtifactsCache::Cached(cache) => { - let Cache { cache, cached_artifacts, dirty_entries: _, filtered, edges: _, .. } = - cache; - // rebuild the cache file with all compiled contracts (dirty sources), and filtered - // sources (clean) - - let cache_entries = cache - .files - .into_iter() - .filter(|(path, _)| filtered.contains_key(path)) - .collect(); - - let _sol_cache = SolFilesCache::new(cache_entries); + let Cache { + mut cache, cached_artifacts, dirty_entries, filtered, edges: _, .. + } = cache; + + // keep only those files that were previously filtered (not dirty, reused) + cache.retain(filtered.iter().map(|(p, (_, v))| (p, v))); + + // TODO extend the cache with the new artifacts Ok(cached_artifacts) } diff --git a/ethers-solc/src/config.rs b/ethers-solc/src/config.rs index ba2e26dd0..cf832c836 100644 --- a/ethers-solc/src/config.rs +++ b/ethers-solc/src/config.rs @@ -1,5 +1,8 @@ use crate::{ - artifacts::{CompactContract, CompactContractRef, Contract, Settings}, + artifacts::{ + CompactContract, CompactContractRef, Contract, Settings, VersionedContract, + VersionedContracts, + }, cache::SOLIDITY_FILES_CACHE_FILENAME, error::{Result, SolcError, SolcIoError}, hh::HardhatArtifact, @@ -8,6 +11,7 @@ use crate::{ utils, CompilerOutput, Source, Sources, }; use ethers_core::{abi::Abi, types::Bytes}; +use semver::Version; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ collections::BTreeMap, @@ -442,196 +446,6 @@ impl SolcConfigBuilder { } } -pub type Artifacts = BTreeMap>; - -pub trait Artifact { - /// Returns the artifact's `Abi` and bytecode - fn into_inner(self) -> (Option, Option); - - /// Turns the artifact into a container type for abi, bytecode and deployed bytecode - fn into_compact_contract(self) -> CompactContract; - - /// Returns the contents of this type as a single tuple of abi, bytecode and deployed bytecode - fn into_parts(self) -> (Option, Option, Option); -} - -impl> Artifact for T { - fn into_inner(self) -> (Option, Option) { - let artifact = self.into_compact_contract(); - (artifact.abi, artifact.bin.and_then(|bin| bin.into_bytes())) - } - - fn into_compact_contract(self) -> CompactContract { - self.into() - } - - fn into_parts(self) -> (Option, Option, Option) { - self.into_compact_contract().into_parts() - } -} - -pub trait ArtifactOutput { - /// How Artifacts are stored - type Artifact: Artifact + DeserializeOwned; - - /// Handle the compiler output. - fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()>; - - /// Returns the file name for the contract's artifact - fn output_file_name(name: impl AsRef) -> PathBuf { - format!("{}.json", name.as_ref()).into() - } - - /// Returns the path to the contract's artifact location based on the contract's file and name - /// - /// This returns `contract.sol/contract.json` by default - fn output_file(contract_file: impl AsRef, name: impl AsRef) -> PathBuf { - let name = name.as_ref(); - contract_file - .as_ref() - .file_name() - .map(Path::new) - .map(|p| p.join(Self::output_file_name(name))) - .unwrap_or_else(|| Self::output_file_name(name)) - } - - /// The inverse of `contract_file_name` - /// - /// Expected to return the solidity contract's name derived from the file path - /// `sources/Greeter.sol` -> `Greeter` - fn contract_name(file: impl AsRef) -> Option { - file.as_ref().file_stem().and_then(|s| s.to_str().map(|s| s.to_string())) - } - - /// Whether the corresponding artifact of the given contract file and name exists - fn output_exists( - contract_file: impl AsRef, - name: impl AsRef, - root: impl AsRef, - ) -> bool { - root.as_ref().join(Self::output_file(contract_file, name)).exists() - } - - fn read_cached_artifact(path: impl AsRef) -> Result { - let path = path.as_ref(); - let file = fs::File::open(path).map_err(|err| SolcError::io(err, path))?; - let file = io::BufReader::new(file); - Ok(serde_json::from_reader(file)?) - } - - /// Read the cached artifacts from disk - fn read_cached_artifacts(files: I) -> Result> - where - I: IntoIterator, - T: Into, - { - let mut artifacts = BTreeMap::default(); - for path in files.into_iter() { - let path = path.into(); - let artifact = Self::read_cached_artifact(&path)?; - artifacts.insert(path, artifact); - } - Ok(artifacts) - } - - /// Convert a contract to the artifact type - fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact; - - /// Convert the compiler output into a set of artifacts - fn output_to_artifacts(output: CompilerOutput) -> Artifacts { - output - .contracts - .into_iter() - .map(|(file, contracts)| { - let contracts = contracts - .into_iter() - .map(|(name, c)| { - let contract = Self::contract_to_artifact(&file, &name, c); - (name, contract) - }) - .collect(); - (file, contracts) - }) - .collect() - } -} - -/// An Artifacts implementation that uses a compact representation -/// -/// Creates a single json artifact with -/// ```json -/// { -/// "abi": [], -/// "bin": "...", -/// "runtime-bin": "..." -/// } -/// ``` -#[derive(Debug, Copy, Clone, Eq, PartialEq)] -pub struct MinimalCombinedArtifacts; - -impl ArtifactOutput for MinimalCombinedArtifacts { - type Artifact = CompactContract; - - fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> { - fs::create_dir_all(&layout.artifacts) - .map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?; - for (file, contracts) in output.contracts.iter() { - for (name, contract) in contracts { - let artifact = Self::output_file(file, name); - let file = layout.artifacts.join(artifact); - if let Some(parent) = file.parent() { - fs::create_dir_all(parent).map_err(|err| { - SolcError::msg(format!( - "Failed to create artifact parent folder \"{}\": {}", - parent.display(), - err - )) - })?; - } - let min = CompactContractRef::from(contract); - fs::write(&file, serde_json::to_vec_pretty(&min)?) - .map_err(|err| SolcError::io(err, file))? - } - } - Ok(()) - } - - fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact { - Self::Artifact::from(contract) - } -} - -/// An Artifacts handler implementation that works the same as `MinimalCombinedArtifacts` but also -/// supports reading hardhat artifacts if an initial attempt to deserialize an artifact failed -#[derive(Debug, Copy, Clone, Eq, PartialEq)] -pub struct MinimalCombinedArtifactsHardhatFallback; - -impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback { - type Artifact = CompactContract; - - fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> { - MinimalCombinedArtifacts::on_output(output, layout) - } - - fn read_cached_artifact(path: impl AsRef) -> Result { - let path = path.as_ref(); - let content = fs::read_to_string(path).map_err(|err| SolcError::io(err, path))?; - if let Ok(a) = serde_json::from_str(&content) { - Ok(a) - } else { - tracing::error!("Failed to deserialize compact artifact"); - tracing::trace!("Fallback to hardhat artifact deserialization"); - let artifact = serde_json::from_str::(&content)?; - tracing::trace!("successfully deserialized hardhat artifact"); - Ok(artifact.into_compact_contract()) - } - } - - fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact { - MinimalCombinedArtifacts::contract_to_artifact(file, name, contract) - } -} - /// Helper struct for serializing `--allow-paths` arguments to Solc /// /// From the [Solc docs](https://docs.soliditylang.org/en/v0.8.9/using-the-compiler.html#base-path-and-import-remapping): diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 1c89e4459..5ff4b1893 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -8,6 +8,7 @@ use std::collections::btree_map::Entry; pub mod cache; pub mod hh; +pub mod output; mod resolver; pub use hh::{HardhatArtifact, HardhatArtifacts}; pub use resolver::Graph; diff --git a/ethers-solc/src/output.rs b/ethers-solc/src/output.rs new file mode 100644 index 000000000..eeeadf962 --- /dev/null +++ b/ethers-solc/src/output.rs @@ -0,0 +1,282 @@ +//! Output artifact handling + +use crate::{ + artifacts::{ + CompactContract, CompactContractRef, Contract, VersionedContract, VersionedContracts, + }, + error::Result, + HardhatArtifact, ProjectPathsConfig, SolcError, +}; +use ethers_core::{abi::Abi, types::Bytes}; +use semver::Version; +use serde::{de::DeserializeOwned, Serialize}; +use std::{ + collections::BTreeMap, + fs, io, + path::{Path, PathBuf}, +}; + +/// Bundled Artifacts: `file -> (contract name -> (Artifact, Version))` +pub type Artifacts = BTreeMap>>; + +pub trait Artifact { + /// Returns the artifact's `Abi` and bytecode + fn into_inner(self) -> (Option, Option); + + /// Turns the artifact into a container type for abi, bytecode and deployed bytecode + fn into_compact_contract(self) -> CompactContract; + + /// Returns the contents of this type as a single tuple of abi, bytecode and deployed bytecode + fn into_parts(self) -> (Option, Option, Option); +} + +impl> Artifact for T { + fn into_inner(self) -> (Option, Option) { + let artifact = self.into_compact_contract(); + (artifact.abi, artifact.bin.and_then(|bin| bin.into_bytes())) + } + + fn into_compact_contract(self) -> CompactContract { + self.into() + } + + fn into_parts(self) -> (Option, Option, Option) { + self.into_compact_contract().into_parts() + } +} + +pub trait ArtifactOutput { + /// How Artifacts are stored + type Artifact: Artifact + DeserializeOwned; + + /// Handle the compiler output. + fn on_output(output: &VersionedContracts, layout: &ProjectPathsConfig) -> Result<()>; + + /// Returns the file name for the contract's artifact + /// `Greeter.0.8.11.json` + fn output_file_name(name: impl AsRef) -> PathBuf { + format!("{}.json", name.as_ref()).into() + } + + /// Returns the file name for the contract's artifact and the given version + /// `Greeter.0.8.11.json` + fn versioned_output_file_name(name: impl AsRef, version: &Version) -> PathBuf { + format!("{}.{}.{}.{}.json", name.as_ref(), version.major, version.minor, version.patch) + .into() + } + + /// Returns the path to the contract's artifact location based on the contract's file and name + /// + /// This returns `contract.sol/contract.json` by default + fn output_file(contract_file: impl AsRef, name: impl AsRef) -> PathBuf { + let name = name.as_ref(); + contract_file + .as_ref() + .file_name() + .map(Path::new) + .map(|p| p.join(Self::output_file_name(name))) + .unwrap_or_else(|| Self::output_file_name(name)) + } + + /// Returns the path to the contract's artifact location based on the contract's file, name and + /// version + /// + /// This returns `contract.sol/contract.0.8.11.json` by default + fn versioned_output_file( + contract_file: impl AsRef, + name: impl AsRef, + version: &Version, + ) -> PathBuf { + let name = name.as_ref(); + contract_file + .as_ref() + .file_name() + .map(Path::new) + .map(|p| p.join(Self::versioned_output_file_name(name, version))) + .unwrap_or_else(|| Self::versioned_output_file_name(name, version)) + } + + /// The inverse of `contract_file_name` + /// + /// Expected to return the solidity contract's name derived from the file path + /// `sources/Greeter.sol` -> `Greeter` + fn contract_name(file: impl AsRef) -> Option { + // TODO support version + file.as_ref().file_stem().and_then(|s| s.to_str().map(|s| s.to_string())) + } + + /// Whether the corresponding artifact of the given contract file and name exists + fn output_exists( + contract_file: impl AsRef, + name: impl AsRef, + root: impl AsRef, + ) -> bool { + root.as_ref().join(Self::output_file(contract_file, name)).exists() + } + + fn read_cached_artifact(path: impl AsRef) -> Result { + let path = path.as_ref(); + let file = fs::File::open(path).map_err(|err| SolcError::io(err, path))?; + let file = io::BufReader::new(file); + Ok(serde_json::from_reader(file)?) + } + + /// Read the cached artifacts from disk + fn read_cached_artifacts(files: I) -> Result> + where + I: IntoIterator, + T: Into, + { + let mut artifacts = BTreeMap::default(); + for path in files.into_iter() { + let path = path.into(); + let artifact = Self::read_cached_artifact(&path)?; + artifacts.insert(path, artifact); + } + Ok(artifacts) + } + + /// Convert a contract to the artifact type + fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact; + + /// Convert a contract to the artifact type + fn contract_to_versioned_artifact( + file: &str, + name: &str, + version: &Version, + contract: Contract, + ) -> Self::Artifact { + todo!() + } + + fn versioned_contracts_to_artifacts( + file: &str, + name: &str, + contracts: Vec, + ) -> Vec<(Self::Artifact, Version)> { + todo!() + } + + /// Convert the compiler output into a set of artifacts + fn output_to_artifacts(contracts: VersionedContracts) -> Artifacts { + contracts + .into_iter() + .map(|(file, contracts)| { + let contracts = contracts + .into_iter() + .map(|(name, versioned)| { + let contracts = + Self::versioned_contracts_to_artifacts(&file, &name, versioned); + (name, contracts) + }) + .collect(); + (file, contracts) + }) + .collect() + } +} + +/// An Artifacts implementation that uses a compact representation +/// +/// Creates a single json artifact with +/// ```json +/// { +/// "abi": [], +/// "bin": "...", +/// "runtime-bin": "..." +/// } +/// ``` +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub struct MinimalCombinedArtifacts; + +impl ArtifactOutput for MinimalCombinedArtifacts { + type Artifact = CompactContract; + + fn on_output( + contracts: &VersionedContracts, + layout: &ProjectPathsConfig, + ) -> Result> { + fs::create_dir_all(&layout.artifacts) + .map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?; + let mut artifacts = Artifacts::new(); + + for (file, contracts) in contracts.iter() { + for (name, versioned_contracts) in contracts { + let mut contracts = Vec::with_capacity(versioned_contracts.len()); + + // check if the same contract compiled with multiple solc versions + for contract in versioned_contracts { + let artifact_path = if versioned_contracts.len() > 1 { + Self::versioned_output_file(file, name, &contract.version) + } else { + Self::output_file(file, name) + }; + let artifact = write_contract::( + &layout.artifacts.join(&artifact_path), + &contract.contract, + )?; + contracts.push((artifact, artifact_path)); + } + } + } + + Ok(artifacts) + } + + fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact { + Self::Artifact::from(contract) + } +} + +/// Writes the given +fn write_contract(out: &Path, contract: &Contract) -> Result +where + C: From<&Contract> + Serialize, +{ + if let Some(parent) = out.parent() { + fs::create_dir_all(parent).map_err(|err| { + SolcError::msg(format!( + "Failed to create artifact parent folder \"{}\": {}", + parent.display(), + err + )) + })?; + } + let c = C::from(contract); + fs::write(out, serde_json::to_vec_pretty(&c)?).map_err(|err| SolcError::io(err, out))?; + Ok(c) +} + +/// An Artifacts handler implementation that works the same as `MinimalCombinedArtifacts` but also +/// supports reading hardhat artifacts if an initial attempt to deserialize an artifact failed +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub struct MinimalCombinedArtifactsHardhatFallback; + +impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback { + type Artifact = CompactContract; + + fn on_output( + output: &VersionedContracts, + layout: &ProjectPathsConfig, + ) -> Result> { + MinimalCombinedArtifacts::on_output(output, layout) + } + + fn read_cached_artifact(path: impl AsRef) -> Result { + let path = path.as_ref(); + let content = fs::read_to_string(path).map_err(|err| SolcError::io(err, path))?; + if let Ok(a) = serde_json::from_str(&content) { + Ok(a) + } else { + tracing::error!("Failed to deserialize compact artifact"); + tracing::trace!("Fallback to hardhat artifact deserialization"); + let artifact = serde_json::from_str::(&content)?; + tracing::trace!("successfully deserialized hardhat artifact"); + Ok(artifact.into_compact_contract()) + } + } + + fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact { + MinimalCombinedArtifacts::contract_to_artifact(file, name, contract) + } +} From 20ca411cbe8f5adcde7bef48bad4bf7386e77618 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 27 Jan 2022 14:56:44 +0100 Subject: [PATCH 12/82] docs: more docs --- ethers-solc/src/artifacts.rs | 18 +++++++++++----- ethers-solc/src/compile/project.rs | 1 + ethers-solc/src/output.rs | 33 +++++++++++++++++++++++++++--- 3 files changed, 44 insertions(+), 8 deletions(-) diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index 2dce28a7c..01ac663a5 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -22,16 +22,24 @@ use crate::{ use ethers_core::abi::Address; use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; +/// solidity files are up of multiple `source units`, a solidity contract is such a `source unit`, +/// therefore a solidity file can contain multiple contracts: (1-N*) relationship. +/// +/// This types represents this mapping as `file name -> (contract name -> T)`, where the generic is +/// intended to represent contract specific information, like [`Contract`] itself, See [`Contracts`] +pub type FileContracts = BTreeMap>; + +/// file -> (contract name -> Contract) +pub type Contracts = FileContracts; + +/// file -> [(contract name -> Contract + solc version)] +pub type VersionedContracts = FileContracts>; + /// An ordered list of files and their source pub type Sources = BTreeMap; pub type VersionedSources = BTreeMap; -/// file -> [contract name] -pub type Contracts = BTreeMap>; - -/// file -> [(contract name + version)] -pub type VersionedContracts = BTreeMap>>; /// Input type `solc` expects #[derive(Clone, Debug, Serialize, Deserialize)] diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 2e59f51ce..281645110 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -79,6 +79,7 @@ use crate::{ }, cache::CacheEntry, error::Result, + output::ArtifactOutput, resolver::GraphEdges, utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, PathMap, Project, ProjectPathsConfig, SolFilesCache, SolcConfig, Source, Sources, diff --git a/ethers-solc/src/output.rs b/ethers-solc/src/output.rs index eeeadf962..5e4ccecdc 100644 --- a/ethers-solc/src/output.rs +++ b/ethers-solc/src/output.rs @@ -16,9 +16,21 @@ use std::{ path::{Path, PathBuf}, }; +/// Represents a written [`crate::Contract`] artifact +#[derive(Debug, Clone)] +pub struct WrittenArtifact { + /// The Artifact that was written + pub artifact: T, + /// path to the file where the `artifact` was written to + pub file: PathBuf, + /// `solc` version that produced this artifact + pub version: Version, +} + /// Bundled Artifacts: `file -> (contract name -> (Artifact, Version))` pub type Artifacts = BTreeMap>>; +/// A trait representation for a [`crate::Contract`] artifact pub trait Artifact { /// Returns the artifact's `Abi` and bytecode fn into_inner(self) -> (Option, Option); @@ -45,12 +57,27 @@ impl> Artifact for T { } } +/// Handler invoked with the output of `solc` +/// +/// Implementers of this trait are expected to take care of [`crate::Contract`] to +/// [`crate::ArtifactOutput::Artifact`] conversion and how that `Artifact` type is stored on disk, +/// this includes artifact file location and naming. +/// +/// Depending on the [`crate::Project`] contracts and their compatible versions, +/// [`crate::ProjectCompiler::compile()`] may invoke different `solc` executables on the same +/// solidity file leading to multiple [`crate::CompilerOutput`]s for the same `.sol` file. +/// In addition to the `solidity file` to `contract` relationship (1-N*) +/// [`crate::VersionedContracts`] also tracks the `contract` to (`artifact` + `solc version`) +/// relationship (1-N+). pub trait ArtifactOutput { - /// How Artifacts are stored + /// Represents the artifact that will be stored for a `Contract` type Artifact: Artifact + DeserializeOwned; - /// Handle the compiler output. - fn on_output(output: &VersionedContracts, layout: &ProjectPathsConfig) -> Result<()>; + /// Handle the aggregated set of compiled contracts from the solc [`crate::CompilerOutput`]. + /// + /// This will be invoked with all aggregated contracts from (multiple) solc `CompilerOutput`. + /// See [`crate::AggregatedCompilerOutput`] + fn on_output(contracts: &VersionedContracts, layout: &ProjectPathsConfig) -> Result<()>; /// Returns the file name for the contract's artifact /// `Greeter.0.8.11.json` From 117b66c908780b476b6f39f11d10840a8bf41b83 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 27 Jan 2022 15:13:20 +0100 Subject: [PATCH 13/82] feat: implement new output handler --- ethers-solc/src/artifacts.rs | 7 +- ethers-solc/src/output.rs | 169 ++++++++++++++--------------------- 2 files changed, 71 insertions(+), 105 deletions(-) diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index 01ac663a5..8e949fe6d 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -27,20 +27,19 @@ use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; /// /// This types represents this mapping as `file name -> (contract name -> T)`, where the generic is /// intended to represent contract specific information, like [`Contract`] itself, See [`Contracts`] -pub type FileContracts = BTreeMap>; +pub type FileToContractsMap = BTreeMap>; /// file -> (contract name -> Contract) -pub type Contracts = FileContracts; +pub type Contracts = FileToContractsMap; /// file -> [(contract name -> Contract + solc version)] -pub type VersionedContracts = FileContracts>; +pub type VersionedContracts = FileToContractsMap>; /// An ordered list of files and their source pub type Sources = BTreeMap; pub type VersionedSources = BTreeMap; - /// Input type `solc` expects #[derive(Clone, Debug, Serialize, Deserialize)] pub struct CompilerInput { diff --git a/ethers-solc/src/output.rs b/ethers-solc/src/output.rs index 5e4ccecdc..9c202de1e 100644 --- a/ethers-solc/src/output.rs +++ b/ethers-solc/src/output.rs @@ -2,7 +2,8 @@ use crate::{ artifacts::{ - CompactContract, CompactContractRef, Contract, VersionedContract, VersionedContracts, + CompactContract, CompactContractRef, Contract, FileToContractsMap, VersionedContract, + VersionedContracts, }, error::Result, HardhatArtifact, ProjectPathsConfig, SolcError, @@ -27,8 +28,11 @@ pub struct WrittenArtifact { pub version: Version, } +/// Represents the written Artifacts +pub type WrittenArtifacts = FileToContractsMap>>; + /// Bundled Artifacts: `file -> (contract name -> (Artifact, Version))` -pub type Artifacts = BTreeMap>>; +pub type Artifacts = FileToContractsMap>; /// A trait representation for a [`crate::Contract`] artifact pub trait Artifact { @@ -71,23 +75,56 @@ impl> Artifact for T { /// relationship (1-N+). pub trait ArtifactOutput { /// Represents the artifact that will be stored for a `Contract` - type Artifact: Artifact + DeserializeOwned; + type Artifact: Artifact + DeserializeOwned + From; /// Handle the aggregated set of compiled contracts from the solc [`crate::CompilerOutput`]. /// /// This will be invoked with all aggregated contracts from (multiple) solc `CompilerOutput`. /// See [`crate::AggregatedCompilerOutput`] - fn on_output(contracts: &VersionedContracts, layout: &ProjectPathsConfig) -> Result<()>; + fn on_output( + contracts: &VersionedContracts, + layout: &ProjectPathsConfig, + ) -> Result> { + fs::create_dir_all(&layout.artifacts) + .map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?; + let mut artifacts = WrittenArtifacts::new(); + + for (file, contracts) in contracts.iter() { + for (name, versioned_contracts) in contracts { + let mut contracts = Vec::with_capacity(versioned_contracts.len()); + // check if the same contract compiled with multiple solc versions + for contract in versioned_contracts { + let artifact_path = if versioned_contracts.len() > 1 { + Self::output_file_versioned(file, name, &contract.version) + } else { + Self::output_file(file, name) + }; + let artifact = write_contract::( + &layout.artifacts.join(&artifact_path), + &contract.contract, + )?; + + contracts.push(WrittenArtifact { + artifact, + file: artifact_path, + version: contract.version.clone(), + }); + } + } + } + + Ok(artifacts) + } /// Returns the file name for the contract's artifact - /// `Greeter.0.8.11.json` + /// `Greeter.json` fn output_file_name(name: impl AsRef) -> PathBuf { format!("{}.json", name.as_ref()).into() } /// Returns the file name for the contract's artifact and the given version /// `Greeter.0.8.11.json` - fn versioned_output_file_name(name: impl AsRef, version: &Version) -> PathBuf { + fn output_file_name_versioned(name: impl AsRef, version: &Version) -> PathBuf { format!("{}.{}.{}.{}.json", name.as_ref(), version.major, version.minor, version.patch) .into() } @@ -109,7 +146,7 @@ pub trait ArtifactOutput { /// version /// /// This returns `contract.sol/contract.0.8.11.json` by default - fn versioned_output_file( + fn output_file_versioned( contract_file: impl AsRef, name: impl AsRef, version: &Version, @@ -119,8 +156,8 @@ pub trait ArtifactOutput { .as_ref() .file_name() .map(Path::new) - .map(|p| p.join(Self::versioned_output_file_name(name, version))) - .unwrap_or_else(|| Self::versioned_output_file_name(name, version)) + .map(|p| p.join(Self::output_file_name_versioned(name, version))) + .unwrap_or_else(|| Self::output_file_name_versioned(name, version)) } /// The inverse of `contract_file_name` @@ -128,7 +165,6 @@ pub trait ArtifactOutput { /// Expected to return the solidity contract's name derived from the file path /// `sources/Greeter.sol` -> `Greeter` fn contract_name(file: impl AsRef) -> Option { - // TODO support version file.as_ref().file_stem().and_then(|s| s.to_str().map(|s| s.to_string())) } @@ -164,42 +200,8 @@ pub trait ArtifactOutput { } /// Convert a contract to the artifact type - fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact; - - /// Convert a contract to the artifact type - fn contract_to_versioned_artifact( - file: &str, - name: &str, - version: &Version, - contract: Contract, - ) -> Self::Artifact { - todo!() - } - - fn versioned_contracts_to_artifacts( - file: &str, - name: &str, - contracts: Vec, - ) -> Vec<(Self::Artifact, Version)> { - todo!() - } - - /// Convert the compiler output into a set of artifacts - fn output_to_artifacts(contracts: VersionedContracts) -> Artifacts { - contracts - .into_iter() - .map(|(file, contracts)| { - let contracts = contracts - .into_iter() - .map(|(name, versioned)| { - let contracts = - Self::versioned_contracts_to_artifacts(&file, &name, versioned); - (name, contracts) - }) - .collect(); - (file, contracts) - }) - .collect() + fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact { + Self::Artifact::from(contract) } } @@ -218,60 +220,6 @@ pub struct MinimalCombinedArtifacts; impl ArtifactOutput for MinimalCombinedArtifacts { type Artifact = CompactContract; - - fn on_output( - contracts: &VersionedContracts, - layout: &ProjectPathsConfig, - ) -> Result> { - fs::create_dir_all(&layout.artifacts) - .map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?; - let mut artifacts = Artifacts::new(); - - for (file, contracts) in contracts.iter() { - for (name, versioned_contracts) in contracts { - let mut contracts = Vec::with_capacity(versioned_contracts.len()); - - // check if the same contract compiled with multiple solc versions - for contract in versioned_contracts { - let artifact_path = if versioned_contracts.len() > 1 { - Self::versioned_output_file(file, name, &contract.version) - } else { - Self::output_file(file, name) - }; - let artifact = write_contract::( - &layout.artifacts.join(&artifact_path), - &contract.contract, - )?; - contracts.push((artifact, artifact_path)); - } - } - } - - Ok(artifacts) - } - - fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact { - Self::Artifact::from(contract) - } -} - -/// Writes the given -fn write_contract(out: &Path, contract: &Contract) -> Result -where - C: From<&Contract> + Serialize, -{ - if let Some(parent) = out.parent() { - fs::create_dir_all(parent).map_err(|err| { - SolcError::msg(format!( - "Failed to create artifact parent folder \"{}\": {}", - parent.display(), - err - )) - })?; - } - let c = C::from(contract); - fs::write(out, serde_json::to_vec_pretty(&c)?).map_err(|err| SolcError::io(err, out))?; - Ok(c) } /// An Artifacts handler implementation that works the same as `MinimalCombinedArtifacts` but also @@ -285,7 +233,7 @@ impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback { fn on_output( output: &VersionedContracts, layout: &ProjectPathsConfig, - ) -> Result> { + ) -> Result> { MinimalCombinedArtifacts::on_output(output, layout) } @@ -307,3 +255,22 @@ impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback { MinimalCombinedArtifacts::contract_to_artifact(file, name, contract) } } + +/// Writes the given contract to the `out` path creating all parent directories +fn write_contract(out: &Path, contract: &Contract) -> Result +where + C: From<&Contract> + Serialize, +{ + if let Some(parent) = out.parent() { + fs::create_dir_all(parent).map_err(|err| { + SolcError::msg(format!( + "Failed to create artifact parent folder \"{}\": {}", + parent.display(), + err + )) + })?; + } + let c = C::from(contract); + fs::write(out, serde_json::to_vec_pretty(&c)?).map_err(|err| SolcError::io(err, out))?; + Ok(c) +} From 0f33e5734f345d68b9d31e99030554b475230bd0 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 27 Jan 2022 15:23:44 +0100 Subject: [PATCH 14/82] feat: integrate cached files in new compile pipeline --- ethers-solc/src/compile/project.rs | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 281645110..afb80987e 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -79,7 +79,7 @@ use crate::{ }, cache::CacheEntry, error::Result, - output::ArtifactOutput, + output::{ArtifactOutput, WrittenArtifacts}, resolver::GraphEdges, utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, PathMap, Project, ProjectPathsConfig, SolFilesCache, SolcConfig, Source, Sources, @@ -163,16 +163,19 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { let output = sources.compile(&project.solc_config.settings, &project.paths)?; // write all artifacts - if !project.no_artifacts { - // TOD get the artifact paths back - let artifacts = T::on_output(&output.contracts, &project.paths)?; - } + let written_artifacts = if !project.no_artifacts { + T::on_output(&output.contracts, &project.paths)? + } else { + Default::default() + }; - // get all cached artifacts - let cached_artifacts = cache.finish()?; + // if caching was enabled, this will write to disk and get the artifacts that weren't + // compiled but reused + let cached_artifacts = cache.finish(&written_artifacts)?; Ok(ProjectCompileOutput2 { output, + written_artifacts, cached_artifacts, ignored_error_codes: project.ignored_error_codes.clone(), }) @@ -283,6 +286,8 @@ pub struct ProjectCompileOutput2 { /// /// See [`CompilerSources::compile`] output: AggregatedCompilerOutput, + /// all artifact files from `output` that were written + written_artifacts: WrittenArtifacts, /// All artifacts that were read from cache cached_artifacts: BTreeMap, ignored_error_codes: Vec, @@ -542,11 +547,14 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { } } - /// rebuilds a new [`SolFileCache`] and writes it to disk + /// Consumes the `Cache`, rebuilds the [`SolFileCache`] by merging all artifacts that were + /// filtered out in the previous step (`Cache::filtered`) and the artifacts that were just + /// written to disk `written_artifacts`. /// - /// Returns all cached artifacts + /// Returns all the _cached_ artifacts. fn finish( - self, // TODO needs the artifact of the outpur + self, + written_artifacts: &WrittenArtifacts, ) -> Result> { match self { ArtifactsCache::Ephemeral => Ok(Default::default()), From 6c6b5e12572d97902b106c6b11eddbc5d7e501c6 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 27 Jan 2022 16:39:24 +0100 Subject: [PATCH 15/82] refactor: more cache refactor --- ethers-solc/src/cache.rs | 11 ++++++++--- ethers-solc/src/compile/project.rs | 2 +- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index a595396cf..cd430d0da 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -258,6 +258,10 @@ impl SolFilesCache { Ok(artifacts) } + /// Retains only the `CacheEntry` specified by the file + version combination. + /// + /// In other words, only keep those cache entries with the paths (keys) that the iterator yields + /// and only keep the versions in the cache entry that the version iterator yields. pub(crate) fn retain(&mut self, files: I) where I: IntoIterator, @@ -265,10 +269,11 @@ impl SolFilesCache { { } - pub(crate) fn extend(&mut self, files: I) + /// Inserts the provided cache entries, if there is an existing `CacheEntry` it will be updated + /// but versions will be merged. + pub(crate) fn extend(&mut self, entries: I) where - I: IntoIterator, - V: IntoIterator, + I: IntoIterator, { } } diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index afb80987e..93fbc498d 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -466,7 +466,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { return true } - // all things match + // all things match, can be reused return false } } From d5810f380d8897054683c11db0d93812e9e4cf44 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 27 Jan 2022 20:33:26 +0100 Subject: [PATCH 16/82] docs: more docs --- ethers-solc/src/compile/project.rs | 20 ++++++++++++++++++++ ethers-solc/src/output.rs | 3 +++ 2 files changed, 23 insertions(+) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 93fbc498d..601bd138d 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -342,6 +342,14 @@ struct Cache<'a, T: ArtifactOutput> { /// all files that were filtered because they haven't changed filtered: HashMap)>, /// the corresponding cache entries for all sources that were deemed to be dirty + /// + /// `CacheEntry` are grouped by their solidity file. + /// During preprocessing the `artifacts` field of a new `CacheEntry` is left blank, because in + /// order to determine the artifacts of the solidity file, the file needs to be compiled first. + /// Only after the `CompilerOutput` is received and all compiled contracts are handled, see + /// [`crate::ArtifactOutput::on_output()`] all artifacts, their disk paths, are determined and + /// can be populated before the updated [`crate::SolFilesCache`] is finally written to disk, + /// see [`Cache::finish()`] dirty_entries: HashMap)>, /// the file hashes content_hashes: HashMap, @@ -566,6 +574,18 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { // keep only those files that were previously filtered (not dirty, reused) cache.retain(filtered.iter().map(|(p, (_, v))| (p, v))); + // add the artifacts to the cache entries, this way we can keep a mapping from + // solidity file to its artifacts + + dirty_entries.into_iter().map(|(file, (mut entry, versions))| { + + // TODO need reshuffling of source units to actual paths + // if let Some(contracts) = written_artifacts.get(&file) { + // + // + // } + }); + // TODO extend the cache with the new artifacts Ok(cached_artifacts) diff --git a/ethers-solc/src/output.rs b/ethers-solc/src/output.rs index 9c202de1e..d26fdb8d4 100644 --- a/ethers-solc/src/output.rs +++ b/ethers-solc/src/output.rs @@ -90,6 +90,7 @@ pub trait ArtifactOutput { let mut artifacts = WrittenArtifacts::new(); for (file, contracts) in contracts.iter() { + let mut entries = BTreeMap::new(); for (name, versioned_contracts) in contracts { let mut contracts = Vec::with_capacity(versioned_contracts.len()); // check if the same contract compiled with multiple solc versions @@ -110,7 +111,9 @@ pub trait ArtifactOutput { version: contract.version.clone(), }); } + entries.insert(name.to_string(), contracts); } + artifacts.insert(file.to_string(), entries); } Ok(artifacts) From ad82ba680987cec20343a84b6bfa72deae9d6fc0 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 27 Jan 2022 20:50:22 +0100 Subject: [PATCH 17/82] feat: add source name mapping --- ethers-solc/src/cache.rs | 6 ++-- ethers-solc/src/compile/project.rs | 46 +++++++++++++++++++++--------- ethers-solc/src/lib.rs | 8 +++--- 3 files changed, 40 insertions(+), 20 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index cd430d0da..84adf9b85 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -446,7 +446,7 @@ impl CacheEntry { /// /// See also [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution) #[derive(Debug, Default)] -pub struct PathMap { +pub struct SourceUnitNameMap { /// all libraries to the source set while keeping track of their actual disk path /// (`contracts/contract.sol` -> `/Users/.../contracts.sol`) pub source_unit_name_to_path: HashMap, @@ -454,7 +454,7 @@ pub struct PathMap { pub path_to_source_unit_name: HashMap, } -impl PathMap { +impl SourceUnitNameMap { fn apply_mappings(sources: Sources, mappings: &HashMap) -> Sources { sources .into_iter() @@ -480,7 +480,7 @@ impl PathMap { .collect() } - pub fn extend(&mut self, other: PathMap) { + pub fn extend(&mut self, other: SourceUnitNameMap) { self.source_unit_name_to_path.extend(other.source_unit_name_to_path); self.path_to_source_unit_name.extend(other.path_to_source_unit_name); } diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 601bd138d..55387e9cf 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -81,12 +81,12 @@ use crate::{ error::Result, output::{ArtifactOutput, WrittenArtifacts}, resolver::GraphEdges, - utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, PathMap, Project, - ProjectPathsConfig, SolFilesCache, SolcConfig, Source, Sources, + utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, Project, ProjectPathsConfig, + SolFilesCache, SolcConfig, Source, SourceUnitNameMap, Sources, }; use semver::Version; use std::{ - collections::{hash_map, hash_map::Entry, BTreeMap, HashMap, HashSet}, + collections::{btree_map::BTreeMap, hash_map, hash_map::Entry, BTreeMap, HashMap, HashSet}, path::{Path, PathBuf}, }; @@ -155,12 +155,19 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// ``` pub fn compile(self) -> Result> { let Self { edges, project, mut sources } = self; + // the map that keeps track of the mapping of resolved solidity file paths -> source unit + // names + let mut source_unit_map = SourceUnitNameMap::default(); let mut cache = ArtifactsCache::new(project, &edges)?; - // retain and compile only dirty sources - sources = sources.filtered(&mut cache); - let output = sources.compile(&project.solc_config.settings, &project.paths)?; + sources = sources + .filtered(&mut cache) + .set_source_unit_names(&project.paths, &mut source_unit_map); + + let mut output = sources.compile(&project.solc_config.settings, &project.paths)?; + + // TODO reapply the mappings to the contracts // write all artifacts let written_artifacts = if !project.no_artifacts { @@ -217,6 +224,26 @@ impl CompilerSources { } } + /// Sets the correct source unit names for all sources + fn set_source_unit_names( + self, + paths: &ProjectPathsConfig, + names: &mut SourceUnitNameMap, + ) -> Self { + fn set( + sources: VersionedSources, + paths: &ProjectPathsConfig, + cache: &mut SourceUnitNameMap, + ) -> VersionedSources { + todo!() + } + + match self { + CompilerSources::Sequential(s) => CompilerSources::Sequential(set(s, paths, names)), + CompilerSources::Parallel(s, j) => CompilerSources::Parallel(set(s, paths, names), j), + } + } + /// Compiles all the files with `Solc` fn compile( self, @@ -248,11 +275,6 @@ fn compile_sequential( solc.as_ref().display() ); - let source_unit_map = PathMap::default(); - // replace absolute path with source name to make solc happy - // TODO use correct source unit path - let sources = source_unit_map.set_source_names(sources); - let input = CompilerInput::with_sources(sources) .settings(settings.clone()) .normalize_evm_version(&version) @@ -262,8 +284,6 @@ fn compile_sequential( let output = solc.compile(&input)?; tracing::trace!("compiled input, output has error: {}", output.has_error()); - // TODO reapply the paths - aggregated.extend(version, output); } Ok(aggregated) diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 5ff4b1893..4dad819a7 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -33,7 +33,7 @@ pub mod utils; use crate::{ artifacts::Sources, - cache::PathMap, + cache::SourceUnitNameMap, error::{SolcError, SolcIoError}, }; use error::Result; @@ -324,7 +324,7 @@ impl Project { tracing::trace!("compile sources in parallel using {} solc jobs", self.solc_jobs); let mut compiled = ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone()); - let mut paths = PathMap::default(); + let mut paths = SourceUnitNameMap::default(); let mut jobs = Vec::with_capacity(sources_by_version.len()); let mut all_sources = BTreeMap::default(); @@ -475,7 +475,7 @@ impl Project { tracing::trace!("start preprocessing {} sources files", sources.len()); // keeps track of source names / disk paths - let mut paths = PathMap::default(); + let mut paths = SourceUnitNameMap::default(); tracing::trace!("start resolving libraries"); for (import, (source, path)) in self.resolved_libraries(&sources)? { @@ -570,7 +570,7 @@ impl Project { enum PreprocessedJob { Unchanged(BTreeMap), - Items(Sources, PathMap, BTreeMap), + Items(Sources, SourceUnitNameMap, BTreeMap), } pub struct ProjectBuilder { From e26ede9889605fc3fd114cc35071a4f294673c23 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 27 Jan 2022 21:28:18 +0100 Subject: [PATCH 18/82] feat: implement new parallel solc --- ethers-solc/src/compile/project.rs | 53 +++++++++++++++++++++++++++--- 1 file changed, 48 insertions(+), 5 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 55387e9cf..7892d9dd8 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -84,6 +84,7 @@ use crate::{ utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, Project, ProjectPathsConfig, SolFilesCache, SolcConfig, Source, SourceUnitNameMap, Sources, }; +use rayon::prelude::*; use semver::Version; use std::{ collections::{btree_map::BTreeMap, hash_map, hash_map::Entry, BTreeMap, HashMap, HashSet}, @@ -289,13 +290,46 @@ fn compile_sequential( Ok(aggregated) } +/// compiles the input set using `num_jobs` threads fn compile_parallel( - _input: VersionedSources, - _jobs: usize, - _settings: &Settings, - _paths: &ProjectPathsConfig, + input: VersionedSources, + num_jobs: usize, + settings: &Settings, + paths: &ProjectPathsConfig, ) -> Result { - todo!() + debug_assert!(num_jobs > 1); + tracing::trace!("compile sources in parallel using {} solc jobs", num_jobs); + + let mut jobs = Vec::with_capacity(input.len()); + for (solc, (version, sources)) in input { + if sources.is_empty() { + // nothing to compile + continue + } + + let job = CompilerInput::with_sources(sources) + .settings(settings.clone()) + .normalize_evm_version(&version) + .with_remappings(paths.remappings.clone()); + + jobs.push((solc, version, job)) + } + + // start a rayon threadpool that will execute all `Solc::compile()` processes + let pool = rayon::ThreadPoolBuilder::new().num_threads(num_jobs).build().unwrap(); + let outputs = pool.install(move || { + jobs.into_par_iter() + .map(|(solc, version, input)| { + tracing::trace!("calling solc `{}` with {} sources", version, input.sources.len()); + solc.compile(&input).map(|output| (version, output)) + }) + .collect::>>() + })?; + + let mut aggregated = AggregatedCompilerOutput::default(); + aggregated.extend_all(outputs); + + Ok(aggregated) } /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still @@ -331,6 +365,15 @@ impl AggregatedCompilerOutput { self.contracts.is_empty() } + fn extend_all(&mut self, out: I) + where + I: IntoIterator, + { + for (v, o) in out { + self.extend(v, o) + } + } + /// adds a new `CompilerOutput` to the aggregated output fn extend(&mut self, version: Version, output: CompilerOutput) { self.errors.extend(output.errors); From 270a60bb572fceb5cfbd9ae050985a30486a7182 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 27 Jan 2022 21:32:46 +0100 Subject: [PATCH 19/82] refactor: do a little cleanup --- ethers-solc/src/lib.rs | 284 ++--------------------------------------- 1 file changed, 9 insertions(+), 275 deletions(-) diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 4dad819a7..e6fc29bd9 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -4,11 +4,13 @@ pub mod artifacts; pub mod sourcemap; pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion}; -use std::collections::btree_map::Entry; +use std::collections::btree_map::{BTreeMap, Entry}; pub mod cache; pub mod hh; -pub mod output; +mod output; +pub use output::*; + mod resolver; pub use hh::{HardhatArtifact, HardhatArtifacts}; pub use resolver::Graph; @@ -136,37 +138,6 @@ impl Project { self.solc_jobs = jobs; } - #[tracing::instrument(skip_all, name = "Project::write_cache_file")] - fn write_cache_file( - &self, - sources: Sources, - artifacts: Vec<(PathBuf, Vec)>, - ) -> Result<()> { - tracing::trace!("inserting {} sources in file cache", sources.len()); - let mut cache = SolFilesCache::builder() - .root(&self.paths.root) - .solc_config(self.solc_config.clone()) - .insert_files(sources, Some(self.paths.cache.clone()))?; - tracing::trace!("source files inserted"); - - // add the artifacts for each file to the cache entry - for (file, artifacts) in artifacts { - if let Some(entry) = cache.files.get_mut(&file) { - entry.artifacts = artifacts; - } - } - - if let Some(cache_dir) = self.paths.cache.parent() { - tracing::trace!("creating cache file parent directory \"{}\"", cache_dir.display()); - fs::create_dir_all(cache_dir).map_err(|err| SolcError::io(err, cache_dir))? - } - - tracing::trace!("writing cache file to \"{}\"", self.paths.cache.display()); - cache.write(&self.paths.cache)?; - - Ok(()) - } - /// Returns all sources found under the project's configured sources path #[tracing::instrument(skip_all, fields(name = "sources"))] pub fn sources(&self) -> Result { @@ -197,29 +168,6 @@ impl Project { println!("cargo:rerun-if-changed={}", self.paths.sources.display()) } - /// Attempts to read all unique libraries that are used as imports like "hardhat/console.sol" - fn resolved_libraries( - &self, - sources: &Sources, - ) -> Result> { - let mut libs = BTreeMap::default(); - for source in sources.values() { - for import in source.parse_imports() { - if let Some(lib) = utils::resolve_library(&self.paths.libraries, import) { - if let Entry::Vacant(entry) = libs.entry(import.into()) { - tracing::trace!( - "resolved library import \"{}\" at \"{}\"", - import, - lib.display() - ); - entry.insert((Source::read(&lib)?, lib)); - } - } - } - } - Ok(libs) - } - /// Attempts to compile the contracts found at the configured source location, see /// `ProjectPathsConfig::sources`. /// @@ -273,24 +221,7 @@ impl Project { #[cfg(all(feature = "svm", feature = "async"))] #[tracing::instrument(skip(self, sources))] pub fn svm_compile(&self, sources: Sources) -> Result> { - let graph = Graph::resolve_sources(&self.paths, sources)?; - let sources_by_version: BTreeMap<_, _> = graph - .into_sources_by_version(!self.auto_detect)? - .0 - .get(&self.allowed_lib_paths)? - .into_iter() - .map(|(k, (_v, s))| (k, s)) - .collect(); - - // run the compilation step for each version - let compiled = if self.solc_jobs > 1 && sources_by_version.len() > 1 { - self.compile_many(sources_by_version)? - } else { - self.compile_sources(sources_by_version)? - }; - tracing::trace!("compiled all sources"); - - Ok(compiled) + todo!() } /// Compiles all sources with their intended `Solc` version sequentially. @@ -299,89 +230,7 @@ impl Project { &self, sources_by_version: BTreeMap>, ) -> Result> { - tracing::trace!("compiling sources using a single solc job"); - let mut compiled = - ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone()); - for (solc, sources) in sources_by_version { - tracing::trace!( - "compiling {} sources with solc \"{}\"", - sources.len(), - solc.as_ref().display() - ); - compiled.extend(self.compile_with_version(&solc, sources)?); - } - Ok(compiled) - } - - /// Compiles all sources with their intended `Solc` versions in parallel. - /// - /// This runs `Self::solc_jobs` parallel `solc` jobs at most. - #[cfg(all(feature = "svm", feature = "async"))] - fn compile_many( - &self, - sources_by_version: BTreeMap>, - ) -> Result> { - tracing::trace!("compile sources in parallel using {} solc jobs", self.solc_jobs); - let mut compiled = - ProjectCompileOutput::with_ignored_errors(self.ignored_error_codes.clone()); - let mut paths = SourceUnitNameMap::default(); - let mut jobs = Vec::with_capacity(sources_by_version.len()); - - let mut all_sources = BTreeMap::default(); - let mut all_artifacts = Vec::with_capacity(sources_by_version.len()); - - // preprocess all sources - for (solc, sources) in sources_by_version { - match self.preprocess_sources(sources)? { - PreprocessedJob::Unchanged(artifacts) => { - compiled.extend(ProjectCompileOutput::from_unchanged(artifacts)); - } - PreprocessedJob::Items(sources, map, cached_artifacts) => { - tracing::trace!("cached artifacts: \"{:?}\"", cached_artifacts.keys()); - tracing::trace!("compile sources: \"{:?}\"", sources.keys()); - - compiled.extend_artifacts(cached_artifacts); - // replace absolute path with source name to make solc happy - let sources = map.set_source_names(sources); - paths.extend(map); - - let input = CompilerInput::with_sources(sources) - .settings(self.solc_config.settings.clone()) - .normalize_evm_version(&solc.version()?) - .with_remappings(self.paths.remappings.clone()); - - jobs.push((solc, input)) - } - }; - } - tracing::trace!("execute {} compile jobs in parallel", jobs.len()); - - let outputs = tokio::runtime::Runtime::new() - .unwrap() - .block_on(Solc::compile_many(jobs, self.solc_jobs)); - - for (res, _, input) in outputs.into_outputs() { - let output = res?; - if !output.has_error() { - if self.cached { - // get all contract names of the files and map them to the disk file - all_sources.extend(paths.set_disk_paths(input.sources)); - all_artifacts.extend(paths.get_artifacts(&output.contracts)); - } - - if !self.no_artifacts { - Artifacts::on_output(&output, &self.paths)?; - } - } - compiled.extend_output(output); - } - - // write the cache file - if self.cached { - self.write_cache_file(all_sources, all_artifacts)?; - } - - Ok(compiled) + todo!() } /// Compiles the given source files with the exact `Solc` executable @@ -412,117 +261,7 @@ impl Project { solc: &Solc, sources: Sources, ) -> Result> { - let (sources, paths, cached_artifacts) = match self.preprocess_sources(sources)? { - PreprocessedJob::Unchanged(artifacts) => { - return Ok(ProjectCompileOutput::from_unchanged(artifacts)) - } - PreprocessedJob::Items(a, b, c) => (a, b, c), - }; - - let version = solc.version()?; - tracing::trace!( - "compiling {} files with {}. Using {} cached files", - sources.len(), - version, - cached_artifacts.len() - ); - tracing::trace!("cached artifacts: \"{:?}\"", cached_artifacts.keys()); - tracing::trace!("compile sources: \"{:?}\"", sources.keys()); - - // replace absolute path with source name to make solc happy - let sources = paths.set_source_names(sources); - - let input = CompilerInput::with_sources(sources) - .settings(self.solc_config.settings.clone()) - .normalize_evm_version(&version) - .with_remappings(self.paths.remappings.clone()); - - tracing::trace!("calling solc with {} sources", input.sources.len()); - let output = solc.compile(&input)?; - tracing::trace!("compiled input, output has error: {}", output.has_error()); - - if output.has_error() { - return Ok(ProjectCompileOutput::from_compiler_output( - output, - self.ignored_error_codes.clone(), - )) - } - - if self.cached { - // get all contract names of the files and map them to the disk file - let artifacts = paths.get_artifacts(&output.contracts); - // reapply to disk paths - let sources = paths.set_disk_paths(input.sources); - // create cache file - self.write_cache_file(sources, artifacts)?; - } - - // TODO: There seems to be some type redundancy here, c.f. discussion with @mattsse - if !self.no_artifacts { - Artifacts::on_output(&output, &self.paths)?; - } - - Ok(ProjectCompileOutput::from_compiler_output_and_cache( - output, - cached_artifacts, - self.ignored_error_codes.clone(), - )) - } - - /// Preprocesses the given source files by resolving their libs and check against cache if - /// configured - fn preprocess_sources(&self, mut sources: Sources) -> Result> { - tracing::trace!("start preprocessing {} sources files", sources.len()); - - // keeps track of source names / disk paths - let mut paths = SourceUnitNameMap::default(); - - tracing::trace!("start resolving libraries"); - for (import, (source, path)) in self.resolved_libraries(&sources)? { - // inserting with absolute path here and keep track of the source name <-> path mappings - sources.insert(path.clone(), source); - paths.path_to_source_unit_name.insert(path.clone(), import.clone()); - paths.source_unit_name_to_path.insert(import, path); - } - tracing::trace!("resolved all libraries"); - - // If there's a cache set, filter to only re-compile the files which were changed - let (sources, cached_artifacts) = if self.cached && self.paths.cache.exists() { - tracing::trace!("start reading solfiles cache for incremental compilation"); - let mut cache = SolFilesCache::read(&self.paths.cache)?; - cache.remove_missing_files(); - - let changed_files = cache.get_changed_or_missing_artifacts_files::( - sources, - Some(&self.solc_config), - &self.paths, - ); - tracing::trace!("detected {} changed files", changed_files.len()); - cache.remove_changed_files(&changed_files); - - let cached_artifacts = if self.paths.artifacts.exists() { - tracing::trace!("reading artifacts from cache.."); - let artifacts = cache.read_artifacts::(&self.paths.artifacts)?; - tracing::trace!("read {} artifacts from cache", artifacts.len()); - artifacts - } else { - BTreeMap::default() - }; - - // if nothing changed and all artifacts still exist - if changed_files.is_empty() { - tracing::trace!( - "unchanged source files, reusing artifacts {:?}", - cached_artifacts.keys() - ); - return Ok(PreprocessedJob::Unchanged(cached_artifacts)) - } - // There are changed files and maybe some cached files - (changed_files, cached_artifacts) - } else { - (sources, BTreeMap::default()) - }; - Ok(PreprocessedJob::Items(sources, paths, cached_artifacts)) + todo!() } /// Removes the project's artifacts and cache file @@ -568,11 +307,6 @@ impl Project { } } -enum PreprocessedJob { - Unchanged(BTreeMap), - Items(Sources, SourceUnitNameMap, BTreeMap), -} - pub struct ProjectBuilder { /// The layout of the paths: Option, @@ -834,7 +568,7 @@ impl ProjectCompileOutput { /// Get the (merged) solc compiler output /// ```no_run - /// use std::collections::BTreeMap; + /// use std::collections::btree_map::BTreeMap; /// use ethers_solc::artifacts::Contract; /// use ethers_solc::Project; /// @@ -946,7 +680,7 @@ impl ProjectCompileOutput { /// # Example /// /// ```no_run - /// use std::collections::BTreeMap; + /// use std::collections::btree_map::BTreeMap; /// use ethers_solc::artifacts::CompactContract; /// use ethers_solc::Project; /// From 5053823ff1ec8c514225fd77d973797f0f05a5c3 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 27 Jan 2022 21:36:17 +0100 Subject: [PATCH 20/82] refactor: even more cleanup --- ethers-solc/src/cache.rs | 141 +-------------------------------------- 1 file changed, 1 insertion(+), 140 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 84adf9b85..85f466288 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -8,7 +8,7 @@ use crate::{ use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - collections::{BTreeMap, HashMap, HashSet}, + collections::{btree_map::BTreeMap, BTreeMap, HashMap, HashSet}, fs::{self, File}, path::{Path, PathBuf}, time::{Duration, UNIX_EPOCH}, @@ -96,145 +96,6 @@ impl SolFilesCache { self.files.retain(|file, _| Path::new(file).exists()) } - pub fn remove_changed_files(&mut self, changed_files: &Sources) { - tracing::trace!("remove changed files from cache"); - self.files.retain(|file, _| !changed_files.contains_key(file)) - } - - /// Returns only the files that were changed from the provided sources, to save time - /// when compiling. - pub fn get_changed_files<'a>( - &'a self, - sources: Sources, - config: Option<&'a SolcConfig>, - ) -> Sources { - sources - .into_iter() - .filter(move |(file, source)| self.has_changed(file, source.content_hash(), config)) - .collect() - } - - /// Returns true if the given content hash or config differs from the file's - /// or the file does not exist - pub fn has_changed( - &self, - file: impl AsRef, - hash: impl AsRef<[u8]>, - config: Option<&SolcConfig>, - ) -> bool { - if let Some(entry) = self.files.get(file.as_ref()) { - if entry.content_hash.as_bytes() != hash.as_ref() { - return true - } - if let Some(config) = config { - if config != &entry.solc_config { - return true - } - } - false - } else { - true - } - } - - /// Returns only the files that were changed or are missing artifacts compared to previous - /// compiler execution, to save time when compiling. - pub fn get_changed_or_missing_artifacts_files<'a, T: ArtifactOutput>( - &'a self, - sources: Sources, - config: Option<&'a SolcConfig>, - paths: &ProjectPathsConfig, - ) -> Sources { - // all file hashes - let content_hashes: HashMap<_, _> = - sources.iter().map(|(file, source)| (file.clone(), source.content_hash())).collect(); - sources - .into_iter() - .filter(move |(file, _)| { - self.has_changed_or_missing_artifact::(file, &content_hashes, config, paths) - }) - .collect() - } - - /// Returns true if the given content hash or config differs from the file's - /// or the file does not exist or the files' artifacts are missing - pub fn has_changed_or_missing_artifact( - &self, - file: &Path, - hashes: &HashMap, - config: Option<&SolcConfig>, - paths: &ProjectPathsConfig, - ) -> bool { - let hash = hashes.get(file).unwrap().as_bytes(); - if let Some(entry) = self.files.get(file) { - if entry.content_hash.as_bytes() != hash { - tracing::trace!("changed content hash for cached artifact \"{}\"", file.display()); - return true - } - if let Some(config) = config { - if config != &entry.solc_config { - tracing::trace!( - "changed solc config for cached artifact \"{}\"", - file.display() - ); - return true - } - } - - // checks whether an artifact this file depends on was removed - if entry.artifacts.iter().any(|name| !T::output_exists(file, name, &paths.artifacts)) { - tracing::trace!( - "missing linked artifacts for cached artifact \"{}\"", - file.display() - ); - return true - } - - // check if any of the file's imported files changed - self.has_changed_imports(file, entry, hashes, paths, &mut HashSet::new()) - } else { - tracing::trace!("missing cached artifact for \"{}\"", file.display()); - true - } - } - - /// Returns true if the entry has any imports that were changed - pub(crate) fn has_changed_imports( - &self, - path: &Path, - entry: &CacheEntry, - hashes: &HashMap, - paths: &ProjectPathsConfig, - traversed: &mut HashSet, - ) -> bool { - let cwd = match path.parent() { - Some(inner) => inner, - None => return true, - }; - if !traversed.insert(path.to_path_buf()) { - // skip already traversed files, this prevents SO for circular imports - return false - } - - for import in entry.imports.iter() { - if let Some((import, import_path)) = paths - .resolve_import(cwd, Path::new(import.as_str())) - .ok() - .and_then(|import| self.files.get(&import).map(|e| (e, import))) - { - if let Some(hash) = hashes.get(&import_path) { - if import.content_hash == hash.as_str() && - !self.has_changed_imports(&import_path, import, hashes, paths, traversed) - { - return false - } - } - } - } - - !entry.imports.is_empty() - } - /// Checks if all artifact files exist pub fn all_artifacts_exist(&self, artifacts_root: &Path) -> bool { self.files.iter().all(|(file, entry)| { From 514a3299dd98bf41d393ba43406305bc4b28a6c7 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 27 Jan 2022 21:46:04 +0100 Subject: [PATCH 21/82] even more cleanup --- ethers-solc/src/cache.rs | 8 ++++---- ethers-solc/src/hh.rs | 26 +------------------------- ethers-solc/src/lib.rs | 6 +----- ethers-solc/src/output.rs | 12 +++++++----- 4 files changed, 13 insertions(+), 39 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 85f466288..e0aefb0e6 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -8,7 +8,7 @@ use crate::{ use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - collections::{btree_map::BTreeMap, BTreeMap, HashMap, HashSet}, + collections::{btree_map::BTreeMap, HashMap, HashSet}, fs::{self, File}, path::{Path, PathBuf}, time::{Duration, UNIX_EPOCH}, @@ -123,10 +123,10 @@ impl SolFilesCache { /// /// In other words, only keep those cache entries with the paths (keys) that the iterator yields /// and only keep the versions in the cache entry that the version iterator yields. - pub(crate) fn retain(&mut self, files: I) + pub(crate) fn retain<'a, I, V>(&mut self, files: I) where - I: IntoIterator, - V: IntoIterator, + I: IntoIterator, + V: IntoIterator, { } diff --git a/ethers-solc/src/hh.rs b/ethers-solc/src/hh.rs index dd79f75a7..37ad2fe30 100644 --- a/ethers-solc/src/hh.rs +++ b/ethers-solc/src/hh.rs @@ -7,7 +7,7 @@ use crate::{ }; use ethers_core::abi::Abi; use serde::{Deserialize, Serialize}; -use std::{collections::BTreeMap, fs}; +use std::{collections::btree_map::BTreeMap, fs}; const HH_ARTIFACT_VERSION: &str = "hh-sol-artifact-1"; @@ -56,30 +56,6 @@ pub struct HardhatArtifacts; impl ArtifactOutput for HardhatArtifacts { type Artifact = HardhatArtifact; - fn on_output(output: &CompilerOutput, layout: &ProjectPathsConfig) -> Result<()> { - fs::create_dir_all(&layout.artifacts) - .map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?; - for (file, contracts) in output.contracts.iter() { - for (name, contract) in contracts { - let artifact = Self::output_file(file, name); - let artifact_file = layout.artifacts.join(artifact); - if let Some(parent) = artifact_file.parent() { - fs::create_dir_all(parent).map_err(|err| { - SolcError::msg(format!( - "Failed to create artifact parent folder \"{}\": {}", - parent.display(), - err - )) - })?; - } - let artifact = Self::contract_to_artifact(file, name, contract.clone()); - fs::write(&artifact_file, serde_json::to_vec_pretty(&artifact)?) - .map_err(|err| SolcError::io(err, artifact_file))? - } - } - Ok(()) - } - fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact { let (bytecode, link_references, deployed_bytecode, deployed_link_references) = if let Some(evm) = contract.evm { diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index e6fc29bd9..85d20de50 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -21,10 +21,7 @@ pub use compile::*; mod config; -pub use config::{ - AllowedLibPaths, Artifact, ArtifactOutput, MinimalCombinedArtifacts, PathStyle, - ProjectPathsConfig, SolcConfig, -}; +pub use config::{AllowedLibPaths, PathStyle, ProjectPathsConfig, SolcConfig}; pub mod remappings; @@ -41,7 +38,6 @@ use crate::{ use error::Result; use std::{ borrow::Cow, - collections::BTreeMap, convert::TryInto, fmt, fs, marker::PhantomData, diff --git a/ethers-solc/src/output.rs b/ethers-solc/src/output.rs index d26fdb8d4..978ee519f 100644 --- a/ethers-solc/src/output.rs +++ b/ethers-solc/src/output.rs @@ -12,7 +12,7 @@ use ethers_core::{abi::Abi, types::Bytes}; use semver::Version; use serde::{de::DeserializeOwned, Serialize}; use std::{ - collections::BTreeMap, + collections::btree_map::BTreeMap, fs, io, path::{Path, PathBuf}, }; @@ -75,7 +75,7 @@ impl> Artifact for T { /// relationship (1-N+). pub trait ArtifactOutput { /// Represents the artifact that will be stored for a `Contract` - type Artifact: Artifact + DeserializeOwned + From; + type Artifact: Artifact + DeserializeOwned + Serialize; /// Handle the aggregated set of compiled contracts from the solc [`crate::CompilerOutput`]. /// @@ -203,9 +203,7 @@ pub trait ArtifactOutput { } /// Convert a contract to the artifact type - fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact { - Self::Artifact::from(contract) - } + fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact; } /// An Artifacts implementation that uses a compact representation @@ -223,6 +221,10 @@ pub struct MinimalCombinedArtifacts; impl ArtifactOutput for MinimalCombinedArtifacts { type Artifact = CompactContract; + + fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact { + Self::Artifact::from(contract) + } } /// An Artifacts handler implementation that works the same as `MinimalCombinedArtifacts` but also From c4d68832ee1d831344dbe1012cf24783f2fd1e0d Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 28 Jan 2022 00:05:41 +0100 Subject: [PATCH 22/82] chore: make it compile --- ethers-solc/src/cache.rs | 26 ++++++++++++++------------ ethers-solc/src/lib.rs | 37 +++++++++++++++++++------------------ ethers-solc/src/output.rs | 20 ++++++++++---------- 3 files changed, 43 insertions(+), 40 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index e0aefb0e6..09b3ea833 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -98,9 +98,10 @@ impl SolFilesCache { /// Checks if all artifact files exist pub fn all_artifacts_exist(&self, artifacts_root: &Path) -> bool { - self.files.iter().all(|(file, entry)| { - entry.artifacts.iter().all(|name| T::output_exists(file, name, artifacts_root)) - }) + // self.files.iter().all(|(file, entry)| { + // entry.artifacts.iter().all(|name| T::output_exists(file, name, artifacts_root)) + // }) + todo!() } /// Reads all cached artifacts from disk using the given ArtifactOutput handler @@ -108,15 +109,16 @@ impl SolFilesCache { &self, artifacts_root: &Path, ) -> Result> { - let mut artifacts = BTreeMap::default(); - for (file, entry) in &self.files { - for artifact in &entry.artifacts { - let artifact_file = artifacts_root.join(T::output_file(file, artifact)); - let artifact = T::read_cached_artifact(&artifact_file)?; - artifacts.insert(artifact_file, artifact); - } - } - Ok(artifacts) + todo!() + // let mut artifacts = BTreeMap::default(); + // for (file, entry) in &self.files { + // for artifact in &entry.artifacts { + // let artifact_file = artifacts_root.join(T::output_file(file, artifact)); + // let artifact = T::read_cached_artifact(&artifact_file)?; + // artifacts.insert(artifact_file, artifact); + // } + // } + // Ok(artifacts) } /// Retains only the `CacheEntry` specified by the file + version combination. diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 85d20de50..594ea262a 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -684,24 +684,25 @@ impl ProjectCompileOutput { /// let contracts: BTreeMap = project.compile().unwrap().into_artifacts().collect(); /// ``` pub fn into_artifacts(mut self) -> Box> { - let artifacts = self.artifacts.into_iter().filter_map(|(path, art)| { - T::contract_name(&path).map(|name| { - (format!("{}:{}", path.file_name().unwrap().to_string_lossy(), name), art) - }) - }); - - let artifacts: Box> = if let Some(output) = - self.compiler_output.take() - { - Box::new(artifacts.chain(T::output_to_artifacts(output).into_values().flatten().map( - |(name, artifact)| { - (format!("{}:{}", T::output_file_name(&name).display(), name), artifact) - }, - ))) - } else { - Box::new(artifacts) - }; - artifacts + // let artifacts = self.artifacts.into_iter().filter_map(|(path, art)| { + // T::contract_name(&path).map(|name| { + // (format!("{}:{}", path.file_name().unwrap().to_string_lossy(), name), art) + // }) + // }); + // + // let artifacts: Box> = if let Some(output) = + // self.compiler_output.take() + // { + // Box::new(artifacts.chain(T::output_to_artifacts(output).into_values().flatten().map( + // |(name, artifact)| { + // (format!("{}:{}", T::output_file_name(&name).display(), name), artifact) + // }, + // ))) + // } else { + // Box::new(artifacts) + // }; + // artifacts + todo!() } } diff --git a/ethers-solc/src/output.rs b/ethers-solc/src/output.rs index 978ee519f..84fef2242 100644 --- a/ethers-solc/src/output.rs +++ b/ethers-solc/src/output.rs @@ -87,7 +87,7 @@ pub trait ArtifactOutput { ) -> Result> { fs::create_dir_all(&layout.artifacts) .map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?; - let mut artifacts = WrittenArtifacts::new(); + let mut artifacts = WrittenArtifacts::default(); for (file, contracts) in contracts.iter() { let mut entries = BTreeMap::new(); @@ -100,9 +100,13 @@ pub trait ArtifactOutput { } else { Self::output_file(file, name) }; - let artifact = write_contract::( + + let artifact = + Self::contract_to_artifact(file, name, contract.contract.clone()); + + write_contract::( &layout.artifacts.join(&artifact_path), - &contract.contract, + &artifact, )?; contracts.push(WrittenArtifact { @@ -262,10 +266,7 @@ impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback { } /// Writes the given contract to the `out` path creating all parent directories -fn write_contract(out: &Path, contract: &Contract) -> Result -where - C: From<&Contract> + Serialize, -{ +fn write_contract(out: &Path, artifact: &T) -> Result<()> { if let Some(parent) = out.parent() { fs::create_dir_all(parent).map_err(|err| { SolcError::msg(format!( @@ -275,7 +276,6 @@ where )) })?; } - let c = C::from(contract); - fs::write(out, serde_json::to_vec_pretty(&c)?).map_err(|err| SolcError::io(err, out))?; - Ok(c) + fs::write(out, serde_json::to_vec_pretty(artifact)?).map_err(|err| SolcError::io(err, out))?; + Ok(()) } From dba9dbcb638bb1725182f32fcb6c90f8028954a3 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 28 Jan 2022 00:07:55 +0100 Subject: [PATCH 23/82] chore: make it compile with all features --- ethers-solc/src/compile/project.rs | 18 +++++------------- ethers-solc/src/output.rs | 2 +- 2 files changed, 6 insertions(+), 14 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 7892d9dd8..da48cdd9d 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -73,21 +73,13 @@ //! not available there, the source unit name will be passed to the Host Filesystem Loader, which //! will then look in `/project/dapp-bin/library/iterable_mapping.sol` -use crate::{ - artifacts::{ - Error, Settings, SourceFile, VersionedContract, VersionedContracts, VersionedSources, - }, - cache::CacheEntry, - error::Result, - output::{ArtifactOutput, WrittenArtifacts}, - resolver::GraphEdges, - utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, Project, ProjectPathsConfig, - SolFilesCache, SolcConfig, Source, SourceUnitNameMap, Sources, -}; +use crate::{artifacts::{ + Error, Settings, SourceFile, VersionedContract, VersionedContracts, VersionedSources, +}, cache::CacheEntry, error::Result, output::{WrittenArtifacts}, resolver::GraphEdges, utils, CompilerInput, CompilerOutput, Graph, Project, ProjectPathsConfig, SolFilesCache, SolcConfig, Source, SourceUnitNameMap, Sources, ArtifactOutput}; use rayon::prelude::*; use semver::Version; use std::{ - collections::{btree_map::BTreeMap, hash_map, hash_map::Entry, BTreeMap, HashMap, HashSet}, + collections::{btree_map::BTreeMap, hash_map, hash_map::Entry, HashMap, HashSet}, path::{Path, PathBuf}, }; @@ -635,7 +627,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { } = cache; // keep only those files that were previously filtered (not dirty, reused) - cache.retain(filtered.iter().map(|(p, (_, v))| (p, v))); + cache.retain(filtered.iter().map(|(p, (_, v))| (p.as_path(), v))); // add the artifacts to the cache entries, this way we can keep a mapping from // solidity file to its artifacts diff --git a/ethers-solc/src/output.rs b/ethers-solc/src/output.rs index 84fef2242..6f6808473 100644 --- a/ethers-solc/src/output.rs +++ b/ethers-solc/src/output.rs @@ -18,7 +18,7 @@ use std::{ }; /// Represents a written [`crate::Contract`] artifact -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq)] pub struct WrittenArtifact { /// The Artifact that was written pub artifact: T, From dda718aaff9f2776b45c6617b58675b7414c90c6 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 28 Jan 2022 00:18:13 +0100 Subject: [PATCH 24/82] chore: clippy fix --- ethers-solc/src/cache.rs | 18 +++++++-------- ethers-solc/src/compile/project.rs | 37 +++++++++++++++++++----------- ethers-solc/src/config.rs | 16 ++++--------- ethers-solc/src/hh.rs | 5 ++-- ethers-solc/src/lib.rs | 18 +++++++-------- ethers-solc/src/output.rs | 5 +--- ethers-solc/src/resolver.rs | 3 +-- 7 files changed, 50 insertions(+), 52 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 09b3ea833..9bfe5153c 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -3,13 +3,13 @@ use crate::{ artifacts::{Contracts, Sources}, config::SolcConfig, error::{Result, SolcError}, - utils, ArtifactOutput, ProjectPathsConfig, Source, + ArtifactOutput, Source, }; use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - collections::{btree_map::BTreeMap, HashMap, HashSet}, - fs::{self, File}, + collections::{btree_map::BTreeMap, HashMap}, + fs::{self}, path::{Path, PathBuf}, time::{Duration, UNIX_EPOCH}, }; @@ -97,7 +97,7 @@ impl SolFilesCache { } /// Checks if all artifact files exist - pub fn all_artifacts_exist(&self, artifacts_root: &Path) -> bool { + pub fn all_artifacts_exist(&self, _artifacts_root: &Path) -> bool { // self.files.iter().all(|(file, entry)| { // entry.artifacts.iter().all(|name| T::output_exists(file, name, artifacts_root)) // }) @@ -107,7 +107,7 @@ impl SolFilesCache { /// Reads all cached artifacts from disk using the given ArtifactOutput handler pub fn read_artifacts( &self, - artifacts_root: &Path, + _artifacts_root: &Path, ) -> Result> { todo!() // let mut artifacts = BTreeMap::default(); @@ -125,7 +125,7 @@ impl SolFilesCache { /// /// In other words, only keep those cache entries with the paths (keys) that the iterator yields /// and only keep the versions in the cache entry that the version iterator yields. - pub(crate) fn retain<'a, I, V>(&mut self, files: I) + pub fn retain<'a, I, V>(&mut self, _files: I) where I: IntoIterator, V: IntoIterator, @@ -134,7 +134,7 @@ impl SolFilesCache { /// Inserts the provided cache entries, if there is an existing `CacheEntry` it will be updated /// but versions will be merged. - pub(crate) fn extend(&mut self, entries: I) + pub fn extend(&mut self, _entries: I) where I: IntoIterator, { @@ -195,8 +195,8 @@ impl SolFilesCacheBuilder { /// If a `cache_file` path was provided it's used as base. pub fn insert_files( self, - sources: Sources, - cache_file: Option, + _sources: Sources, + _cache_file: Option, ) -> Result { todo!() // let format = self.format.unwrap_or_else(|| ETHERS_FORMAT_VERSION.to_string()); diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index da48cdd9d..e938484e2 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -73,9 +73,17 @@ //! not available there, the source unit name will be passed to the Host Filesystem Loader, which //! will then look in `/project/dapp-bin/library/iterable_mapping.sol` -use crate::{artifacts::{ - Error, Settings, SourceFile, VersionedContract, VersionedContracts, VersionedSources, -}, cache::CacheEntry, error::Result, output::{WrittenArtifacts}, resolver::GraphEdges, utils, CompilerInput, CompilerOutput, Graph, Project, ProjectPathsConfig, SolFilesCache, SolcConfig, Source, SourceUnitNameMap, Sources, ArtifactOutput}; +use crate::{ + artifacts::{ + Error, Settings, SourceFile, VersionedContract, VersionedContracts, VersionedSources, + }, + cache::CacheEntry, + error::Result, + output::WrittenArtifacts, + resolver::GraphEdges, + utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, Project, ProjectPathsConfig, + SolFilesCache, SolcConfig, Source, SourceUnitNameMap, Sources, +}; use rayon::prelude::*; use semver::Version; use std::{ @@ -158,7 +166,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { .filtered(&mut cache) .set_source_unit_names(&project.paths, &mut source_unit_map); - let mut output = sources.compile(&project.solc_config.settings, &project.paths)?; + let output = sources.compile(&project.solc_config.settings, &project.paths)?; // TODO reapply the mappings to the contracts @@ -224,9 +232,9 @@ impl CompilerSources { names: &mut SourceUnitNameMap, ) -> Self { fn set( - sources: VersionedSources, - paths: &ProjectPathsConfig, - cache: &mut SourceUnitNameMap, + _sources: VersionedSources, + _paths: &ProjectPathsConfig, + _cache: &mut SourceUnitNameMap, ) -> VersionedSources { todo!() } @@ -412,7 +420,7 @@ struct Cache<'a, T: ArtifactOutput> { impl<'a, T: ArtifactOutput> Cache<'a, T> { /// Creates a new cache entry for the file - fn create_cache_entry(&self, file: &PathBuf, source: &Source) -> Result { + fn create_cache_entry(&self, file: &Path, source: &Source) -> Result { let imports = self .edges .imports(file) @@ -423,7 +431,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { let entry = CacheEntry { last_modification_date: CacheEntry::read_last_modification_date(&file).unwrap(), content_hash: source.content_hash(), - source_name: utils::source_name(&file, &self.paths.root).into(), + source_name: utils::source_name(file, &self.paths.root).into(), solc_config: self.solc_config.clone(), imports, version_requirement: self.edges.version_requirement(file).map(|v| v.to_string()), @@ -439,7 +447,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { /// If there is already an entry available for the file the given version is added to the set fn insert_new_cache_entry( &mut self, - file: &PathBuf, + file: &Path, source: &Source, version: Version, ) -> Result<()> { @@ -447,7 +455,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { versions.insert(version); } else { let entry = self.create_cache_entry(file, source)?; - self.dirty_entries.insert(file.clone(), (entry, HashSet::from([version]))); + self.dirty_entries.insert(file.to_path_buf(), (entry, HashSet::from([version]))); } Ok(()) } @@ -485,7 +493,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { version: &Version, ) -> Option<(PathBuf, Source)> { if !self.is_dirty(&file, version) && - self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, &version)) + self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version)) { self.insert_filtered_source(file, source, version.clone()); None @@ -552,6 +560,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { } /// Abstraction over configured caching which can be either non-existent or an already loaded cache +#[allow(clippy::large_enum_variant)] enum ArtifactsCache<'a, T: ArtifactOutput> { /// Cache nothing on disk Ephemeral, @@ -617,7 +626,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { /// Returns all the _cached_ artifacts. fn finish( self, - written_artifacts: &WrittenArtifacts, + _written_artifacts: &WrittenArtifacts, ) -> Result> { match self { ArtifactsCache::Ephemeral => Ok(Default::default()), @@ -632,7 +641,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { // add the artifacts to the cache entries, this way we can keep a mapping from // solidity file to its artifacts - dirty_entries.into_iter().map(|(file, (mut entry, versions))| { + dirty_entries.into_iter().map(|(_file, (_entry, _versions))| { // TODO need reshuffling of source units to actual paths // if let Some(contracts) = written_artifacts.get(&file) { diff --git a/ethers-solc/src/config.rs b/ethers-solc/src/config.rs index 93f9a60b3..559401f1b 100644 --- a/ethers-solc/src/config.rs +++ b/ethers-solc/src/config.rs @@ -1,23 +1,17 @@ use crate::{ - artifacts::{ - CompactContract, CompactContractRef, Contract, Settings, VersionedContract, - VersionedContracts, - }, + artifacts::Settings, cache::SOLIDITY_FILES_CACHE_FILENAME, error::{Result, SolcError, SolcIoError}, - hh::HardhatArtifact, remappings::Remapping, resolver::Graph, - utils, CompilerOutput, Source, Sources, + utils, Source, Sources, }; -use ethers_core::{abi::Abi, types::Bytes}; -use semver::Version; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; + +use serde::{Deserialize, Serialize}; use std::{ - collections::BTreeMap, convert::TryFrom, fmt::{self, Formatter}, - fs, io, + fs, path::{Component, Path, PathBuf}, }; diff --git a/ethers-solc/src/hh.rs b/ethers-solc/src/hh.rs index 37ad2fe30..9a64c80b6 100644 --- a/ethers-solc/src/hh.rs +++ b/ethers-solc/src/hh.rs @@ -2,12 +2,11 @@ use crate::{ artifacts::{BytecodeObject, CompactContract, Contract, Offsets}, - error::{Result, SolcError}, - ArtifactOutput, CompilerOutput, ProjectPathsConfig, + ArtifactOutput, }; use ethers_core::abi::Abi; use serde::{Deserialize, Serialize}; -use std::{collections::btree_map::BTreeMap, fs}; +use std::collections::btree_map::BTreeMap; const HH_ARTIFACT_VERSION: &str = "hh-sol-artifact-1"; diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 594ea262a..7e72f5985 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -4,7 +4,7 @@ pub mod artifacts; pub mod sourcemap; pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion}; -use std::collections::btree_map::{BTreeMap, Entry}; +use std::collections::btree_map::BTreeMap; pub mod cache; pub mod hh; @@ -39,7 +39,7 @@ use error::Result; use std::{ borrow::Cow, convert::TryInto, - fmt, fs, + fmt, marker::PhantomData, path::{Path, PathBuf}, }; @@ -215,16 +215,16 @@ impl Project { /// # } /// ``` #[cfg(all(feature = "svm", feature = "async"))] - #[tracing::instrument(skip(self, sources))] - pub fn svm_compile(&self, sources: Sources) -> Result> { + // #[tracing::instrument(skip(self, sources))] + pub fn svm_compile(&self, _sources: Sources) -> Result> { todo!() } /// Compiles all sources with their intended `Solc` version sequentially. #[cfg(all(feature = "svm", feature = "async"))] - fn compile_sources( + pub fn compile_sources( &self, - sources_by_version: BTreeMap>, + _sources_by_version: BTreeMap>, ) -> Result> { todo!() } @@ -254,8 +254,8 @@ impl Project { /// ``` pub fn compile_with_version( &self, - solc: &Solc, - sources: Sources, + _solc: &Solc, + _sources: Sources, ) -> Result> { todo!() } @@ -683,7 +683,7 @@ impl ProjectCompileOutput { /// let project = Project::builder().build().unwrap(); /// let contracts: BTreeMap = project.compile().unwrap().into_artifacts().collect(); /// ``` - pub fn into_artifacts(mut self) -> Box> { + pub fn into_artifacts(self) -> Box> { // let artifacts = self.artifacts.into_iter().filter_map(|(path, art)| { // T::contract_name(&path).map(|name| { // (format!("{}:{}", path.file_name().unwrap().to_string_lossy(), name), art) diff --git a/ethers-solc/src/output.rs b/ethers-solc/src/output.rs index 6f6808473..6e1148075 100644 --- a/ethers-solc/src/output.rs +++ b/ethers-solc/src/output.rs @@ -1,10 +1,7 @@ //! Output artifact handling use crate::{ - artifacts::{ - CompactContract, CompactContractRef, Contract, FileToContractsMap, VersionedContract, - VersionedContracts, - }, + artifacts::{CompactContract, Contract, FileToContractsMap, VersionedContracts}, error::Result, HardhatArtifact, ProjectPathsConfig, SolcError, }; diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index 3f07ad617..c15ec3f51 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -80,8 +80,7 @@ impl GraphEdges { self.indices .get(file.as_ref()) .and_then(|idx| self.versions.get(idx)) - .map(|v| v.as_ref()) - .flatten() + .and_then(|v| v.as_ref()) } } From 77644b94f90a97bd6aa48b39ea9a85e7479026c3 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 28 Jan 2022 00:34:55 +0100 Subject: [PATCH 25/82] feat: integrate new compiler pipeline --- ethers-solc/src/compile/project.rs | 16 +++++++++++++++- ethers-solc/src/lib.rs | 25 ++++++++----------------- ethers-solc/src/resolver.rs | 8 +++++--- 3 files changed, 28 insertions(+), 21 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index e938484e2..9ce647479 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -82,7 +82,7 @@ use crate::{ output::WrittenArtifacts, resolver::GraphEdges, utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, Project, ProjectPathsConfig, - SolFilesCache, SolcConfig, Source, SourceUnitNameMap, Sources, + SolFilesCache, Solc, SolcConfig, Source, SourceUnitNameMap, Sources, }; use rayon::prelude::*; use semver::Version; @@ -139,6 +139,20 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { Ok(Self { edges, project, sources }) } + /// Compiles the sources with a pinned `Solc` instance + pub fn with_sources_and_solc( + project: &'a Project, + sources: Sources, + solc: Solc, + ) -> Result { + let version = solc.version()?; + let (sources, edges) = Graph::resolve_sources(&project.paths, sources)?.into_sources(); + let sources_by_version = BTreeMap::from([(solc, (version, sources))]); + let sources = CompilerSources::Sequential(sources_by_version); + + Ok(Self { edges, project, sources }) + } + /// Compiles all the sources of the `Project` in the appropriate mode /// /// If caching is enabled, the sources are filtered and only _dirty_ sources are recompiled. diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 7e72f5985..df25e7940 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -34,6 +34,7 @@ use crate::{ artifacts::Sources, cache::SourceUnitNameMap, error::{SolcError, SolcIoError}, + project::{ProjectCompileOutput2, ProjectCompiler}, }; use error::Result; use std::{ @@ -182,7 +183,7 @@ impl Project { /// # } /// ``` #[tracing::instrument(skip_all, name = "compile")] - pub fn compile(&self) -> Result> { + pub fn compile(&self) -> Result> { let sources = self.paths.read_input_files()?; tracing::trace!("found {} sources to compile: {:?}", sources.len(), sources.keys()); @@ -197,7 +198,6 @@ impl Project { solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string()); } - let sources = Graph::resolve_sources(&self.paths, sources)?.into_sources(); self.compile_with_version(&solc, sources) } @@ -216,17 +216,8 @@ impl Project { /// ``` #[cfg(all(feature = "svm", feature = "async"))] // #[tracing::instrument(skip(self, sources))] - pub fn svm_compile(&self, _sources: Sources) -> Result> { - todo!() - } - - /// Compiles all sources with their intended `Solc` version sequentially. - #[cfg(all(feature = "svm", feature = "async"))] - pub fn compile_sources( - &self, - _sources_by_version: BTreeMap>, - ) -> Result> { - todo!() + pub fn svm_compile(&self, sources: Sources) -> Result> { + ProjectCompiler::with_sources(self, sources)?.compile() } /// Compiles the given source files with the exact `Solc` executable @@ -254,10 +245,10 @@ impl Project { /// ``` pub fn compile_with_version( &self, - _solc: &Solc, - _sources: Sources, - ) -> Result> { - todo!() + solc: &Solc, + sources: Sources, + ) -> Result> { + ProjectCompiler::with_sources_and_solc(self, sources, solc.clone())?.compile() } /// Removes the project's artifacts and cache file diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index c15ec3f51..d25ebe6fa 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -132,9 +132,11 @@ impl Graph { self.node_ids(start).map(move |idx| self.node(idx)) } - /// Returns all files together with their paths - pub fn into_sources(self) -> Sources { - self.nodes.into_iter().map(|node| (node.path, node.source)).collect() + /// Consumes the `Graph`, effectively splitting the `nodes` and the `GraphEdges` off and + /// returning the `nodes` converted to `Sources` + pub fn into_sources(self) -> (Sources, GraphEdges) { + let Graph { nodes, edges, .. } = self; + (nodes.into_iter().map(|node| (node.path, node.source)).collect(), edges) } /// Returns an iterator that yields only those nodes that represent input files. From 741f9ee7527e43ebe81160c460f56b132410d0e9 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 28 Jan 2022 00:46:47 +0100 Subject: [PATCH 26/82] docs: more docs --- ethers-solc/src/lib.rs | 36 +++++++++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index df25e7940..17e5a4075 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -170,8 +170,9 @@ impl Project { /// /// NOTE: this does not check if the contracts were successfully compiled, see /// `CompilerOutput::has_error` instead. + /// /// NB: If the `svm` feature is enabled, this function will automatically detect - /// solc versions across files. + /// solc versions across files, see [`Self::svm_compile()`] /// /// # Example /// @@ -203,6 +204,16 @@ impl Project { /// Compiles a set of contracts using `svm` managed solc installs /// + /// This will autodetect the appropriate `Solc` version(s) to use when compiling the provided + /// `Sources`. Solc auto-detection follows semver rules, see also + /// [`crate::resolver::Graph::get_input_node_versions()`] + /// + /// # Errors + /// + /// This returns an error if contracts in the `Sources` set are incompatible (violate semver + /// rules) with their imports, for example source contract `A(=0.8.11)` imports dependency + /// `C(<0.8.0)`, which are incompatible. + /// /// # Example /// /// ``` @@ -215,7 +226,6 @@ impl Project { /// # } /// ``` #[cfg(all(feature = "svm", feature = "async"))] - // #[tracing::instrument(skip(self, sources))] pub fn svm_compile(&self, sources: Sources) -> Result> { ProjectCompiler::with_sources(self, sources)?.compile() } @@ -254,6 +264,22 @@ impl Project { /// Removes the project's artifacts and cache file /// /// If the cache file was the only file in the folder, this also removes the empty folder. + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// # fn demo(project: Project) { + /// let project = Project::builder().build().unwrap(); + /// let _ = project.compile().unwrap(); + /// assert!(project.artifacts_path().exists()); + /// assert!(project.cache_path().exists()); + /// + /// project.cleanup(); + /// assert!(!project.artifacts_path().exists()); + /// assert!(!project.cache_path().exists()); + /// # } + /// ``` pub fn cleanup(&self) -> std::result::Result<(), SolcIoError> { tracing::trace!("clean up project"); if self.cache_path().exists() { @@ -281,13 +307,13 @@ impl Project { Ok(()) } - /// Flattens the target file into a single string suitable for verification + /// Flattens the target solidity file into a single string suitable for verification. /// /// This method uses a dependency graph to resolve imported files and substitute /// import directives with the contents of target files. It will strip the pragma - /// version directives and SDPX license identifiers from imported files. + /// version directives and SDPX license identifiers from all imported files. /// - /// NOTE: the SDPX license identifier will be removed from the imported file + /// NB: the SDPX license identifier will be removed from the imported file /// only if it is found at the beginning of the file. pub fn flatten(&self, target: &Path) -> Result { self.paths.flatten(target) From 587a8c349d830990c5fcad1518abb321a0705096 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 28 Jan 2022 01:35:31 +0100 Subject: [PATCH 27/82] refactor: move stuff around --- .../src/{output.rs => artifact_output.rs} | 0 ethers-solc/src/compile/mod.rs | 2 + ethers-solc/src/compile/output.rs | 115 ++++++++++++++++++ ethers-solc/src/compile/project.rs | 70 +---------- ethers-solc/src/lib.rs | 11 +- ethers-solc/src/project_util.rs | 4 +- 6 files changed, 129 insertions(+), 73 deletions(-) rename ethers-solc/src/{output.rs => artifact_output.rs} (100%) create mode 100644 ethers-solc/src/compile/output.rs diff --git a/ethers-solc/src/output.rs b/ethers-solc/src/artifact_output.rs similarity index 100% rename from ethers-solc/src/output.rs rename to ethers-solc/src/artifact_output.rs diff --git a/ethers-solc/src/compile/mod.rs b/ethers-solc/src/compile/mod.rs index 03614f08d..c87f40cd6 100644 --- a/ethers-solc/src/compile/mod.rs +++ b/ethers-solc/src/compile/mod.rs @@ -16,6 +16,8 @@ use std::{ pub mod many; #[cfg(all(feature = "svm", feature = "async"))] +pub mod output; +#[cfg(all(feature = "svm", feature = "async"))] pub mod project; /// The name of the `solc` binary on the system diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs new file mode 100644 index 000000000..0edd819da --- /dev/null +++ b/ethers-solc/src/compile/output.rs @@ -0,0 +1,115 @@ +//! The output of a compiled project + +use crate::{ + artifacts::{Error, SourceFile, VersionedContract, VersionedContracts}, + ArtifactOutput, CompilerOutput, WrittenArtifacts, +}; +use semver::Version; +use std::{collections::BTreeMap, path::PathBuf}; + +/// Contains a mixture of already compiled/cached artifacts and the input set of sources that still +/// need to be compiled. +#[derive(Debug, Clone, PartialEq, Default)] +pub struct ProjectCompileOutput2 { + /// contains the aggregated `CompilerOutput` + /// + /// See [`CompilerSources::compile`] + pub(crate) compiler_output: AggregatedCompilerOutput, + /// all artifact files from `output` that were written + pub(crate) written_artifacts: WrittenArtifacts, + /// All artifacts that were read from cache + pub(crate) cached_artifacts: BTreeMap, + /// errors that should be omitted + pub(crate) ignored_error_codes: Vec, +} + +impl ProjectCompileOutput2 { + /// Get the (merged) solc compiler output + /// ```no_run + /// use std::collections::btree_map::BTreeMap; + /// use ethers_solc::artifacts::Contract; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let contracts: BTreeMap = + /// project.compile().unwrap().output().contracts_into_iter().collect(); + /// ``` + pub fn output(self) -> AggregatedCompilerOutput { + self.compiler_output + } + + /// Whether this type has a compiler output + pub fn has_compiled_contracts(&self) -> bool { + self.compiler_output.is_empty() + } + + /// Whether there were errors + pub fn has_compiler_errors(&self) -> bool { + self.compiler_output.has_error() + } + + /// Whether there were warnings + pub fn has_compiler_warnings(&self) -> bool { + self.compiler_output + .as_ref() + .map(|o| o.has_warning(&self.ignored_error_codes)) + .unwrap_or_default() + } +} +/// The aggregated output of (multiple) compile jobs +/// +/// This is effectively a solc version aware `CompilerOutput` +#[derive(Clone, Debug, Default, PartialEq)] +pub struct AggregatedCompilerOutput { + /// all errors from all `CompilerOutput` + pub errors: Vec, + /// All source files + pub sources: BTreeMap, + /// All compiled contracts combined with the solc version used to compile them + pub contracts: VersionedContracts, +} + +impl AggregatedCompilerOutput { + /// Whether the output contains a compiler error + pub fn has_error(&self) -> bool { + self.errors.iter().any(|err| err.severity.is_error()) + } + + /// Whether the output contains a compiler warning + pub fn has_warning(&self, ignored_error_codes: &[u64]) -> bool { + self.errors.iter().any(|err| { + if err.severity.is_warning() { + err.error_code.as_ref().map_or(false, |code| !ignored_error_codes.contains(code)) + } else { + false + } + }) + } + + pub fn is_empty(&self) -> bool { + self.contracts.is_empty() + } + + pub fn extend_all(&mut self, out: I) + where + I: IntoIterator, + { + for (v, o) in out { + self.extend(v, o) + } + } + + /// adds a new `CompilerOutput` to the aggregated output + pub fn extend(&mut self, version: Version, output: CompilerOutput) { + self.errors.extend(output.errors); + self.sources.extend(output.sources); + + for (file_name, new_contracts) in output.contracts { + let contracts = self.contracts.entry(file_name).or_default(); + for (contract_name, contract) in new_contracts { + let versioned = contracts.entry(contract_name).or_default(); + versioned.push(VersionedContract { contract, version: version.clone() }); + } + } + } +} diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 9ce647479..84a12c598 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -74,15 +74,14 @@ //! will then look in `/project/dapp-bin/library/iterable_mapping.sol` use crate::{ - artifacts::{ - Error, Settings, SourceFile, VersionedContract, VersionedContracts, VersionedSources, - }, + artifact_output::WrittenArtifacts, + artifacts::{Settings, VersionedSources}, cache::CacheEntry, error::Result, - output::WrittenArtifacts, + output::AggregatedCompilerOutput, resolver::GraphEdges, - utils, ArtifactOutput, CompilerInput, CompilerOutput, Graph, Project, ProjectPathsConfig, - SolFilesCache, Solc, SolcConfig, Source, SourceUnitNameMap, Sources, + utils, ArtifactOutput, CompilerInput, Graph, Project, ProjectCompileOutput2, + ProjectPathsConfig, SolFilesCache, Solc, SolcConfig, Source, SourceUnitNameMap, Sources, }; use rayon::prelude::*; use semver::Version; @@ -196,7 +195,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { let cached_artifacts = cache.finish(&written_artifacts)?; Ok(ProjectCompileOutput2 { - output, + compiler_output: output, written_artifacts, cached_artifacts, ignored_error_codes: project.ignored_error_codes.clone(), @@ -346,63 +345,6 @@ fn compile_parallel( Ok(aggregated) } -/// Contains a mixture of already compiled/cached artifacts and the input set of sources that still -/// need to be compiled. -#[derive(Debug, Clone, PartialEq, Default)] -pub struct ProjectCompileOutput2 { - /// contains the aggregated `CompilerOutput` - /// - /// See [`CompilerSources::compile`] - output: AggregatedCompilerOutput, - /// all artifact files from `output` that were written - written_artifacts: WrittenArtifacts, - /// All artifacts that were read from cache - cached_artifacts: BTreeMap, - ignored_error_codes: Vec, -} - -/// The aggregated output of (multiple) compile jobs -/// -/// This is effectively a solc version aware `CompilerOutput` -#[derive(Clone, Debug, Default, PartialEq)] -pub struct AggregatedCompilerOutput { - /// all errors from all `CompilerOutput` - pub errors: Vec, - /// All source files - pub sources: BTreeMap, - /// All compiled contracts combined with the solc version used to compile them - pub contracts: VersionedContracts, -} - -impl AggregatedCompilerOutput { - pub fn is_empty(&self) -> bool { - self.contracts.is_empty() - } - - fn extend_all(&mut self, out: I) - where - I: IntoIterator, - { - for (v, o) in out { - self.extend(v, o) - } - } - - /// adds a new `CompilerOutput` to the aggregated output - fn extend(&mut self, version: Version, output: CompilerOutput) { - self.errors.extend(output.errors); - self.sources.extend(output.sources); - - for (file_name, new_contracts) in output.contracts { - let contracts = self.contracts.entry(file_name).or_default(); - for (contract_name, contract) in new_contracts { - let versioned = contracts.entry(contract_name).or_default(); - versioned.push(VersionedContract { contract, version: version.clone() }); - } - } - } -} - /// A helper abstraction over the [`SolFilesCache`] used to determine what files need to compiled /// and which `Artifacts` can be reused. struct Cache<'a, T: ArtifactOutput> { diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 17e5a4075..99603abbc 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -6,25 +6,22 @@ pub mod sourcemap; pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion}; use std::collections::btree_map::BTreeMap; +mod artifact_output; pub mod cache; pub mod hh; -mod output; -pub use output::*; +pub use artifact_output::*; mod resolver; pub use hh::{HardhatArtifact, HardhatArtifacts}; pub use resolver::Graph; mod compile; - -pub use compile::*; +pub use compile::{output::ProjectCompileOutput2, *}; mod config; - pub use config::{AllowedLibPaths, PathStyle, ProjectPathsConfig, SolcConfig}; pub mod remappings; - use crate::{artifacts::Source, cache::SolFilesCache}; pub mod error; @@ -34,7 +31,7 @@ use crate::{ artifacts::Sources, cache::SourceUnitNameMap, error::{SolcError, SolcIoError}, - project::{ProjectCompileOutput2, ProjectCompiler}, + project::ProjectCompiler, }; use error::Result; use std::{ diff --git a/ethers-solc/src/project_util.rs b/ethers-solc/src/project_util.rs index aee3f2e86..a213e0324 100644 --- a/ethers-solc/src/project_util.rs +++ b/ethers-solc/src/project_util.rs @@ -4,7 +4,7 @@ use crate::{ error::{Result, SolcError}, hh::HardhatArtifacts, utils::tempdir, - ArtifactOutput, MinimalCombinedArtifacts, PathStyle, Project, ProjectCompileOutput, + ArtifactOutput, MinimalCombinedArtifacts, PathStyle, Project, ProjectCompileOutput2, ProjectPathsConfig, SolcIoError, }; use fs_extra::{dir, file}; @@ -55,7 +55,7 @@ impl TempProject { &self.inner } - pub fn compile(&self) -> Result> { + pub fn compile(&self) -> Result> { self.project().compile() } From 17a2829a364957fa12642b249ba6678b9e12dbf1 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 28 Jan 2022 01:49:30 +0100 Subject: [PATCH 28/82] refactor: start deprecating output type --- ethers-solc/src/compile/mod.rs | 1 - ethers-solc/src/lib.rs | 5 ++--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/ethers-solc/src/compile/mod.rs b/ethers-solc/src/compile/mod.rs index c87f40cd6..95696bd8b 100644 --- a/ethers-solc/src/compile/mod.rs +++ b/ethers-solc/src/compile/mod.rs @@ -15,7 +15,6 @@ use std::{ }; pub mod many; -#[cfg(all(feature = "svm", feature = "async"))] pub mod output; #[cfg(all(feature = "svm", feature = "async"))] pub mod project; diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 99603abbc..8f4a21453 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -31,7 +31,6 @@ use crate::{ artifacts::Sources, cache::SourceUnitNameMap, error::{SolcError, SolcIoError}, - project::ProjectCompiler, }; use error::Result; use std::{ @@ -224,7 +223,7 @@ impl Project { /// ``` #[cfg(all(feature = "svm", feature = "async"))] pub fn svm_compile(&self, sources: Sources) -> Result> { - ProjectCompiler::with_sources(self, sources)?.compile() + project::ProjectCompiler::with_sources(self, sources)?.compile() } /// Compiles the given source files with the exact `Solc` executable @@ -255,7 +254,7 @@ impl Project { solc: &Solc, sources: Sources, ) -> Result> { - ProjectCompiler::with_sources_and_solc(self, sources, solc.clone())?.compile() + project::ProjectCompiler::with_sources_and_solc(self, sources, solc.clone())?.compile() } /// Removes the project's artifacts and cache file From 850497f4e10c95c3d69e005d4475391bb1e9f4c7 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 28 Jan 2022 14:47:44 +0100 Subject: [PATCH 29/82] chore: make it compile again --- ethers-solc/src/compile/mod.rs | 1 - ethers-solc/src/compile/output.rs | 5 +---- ethers-solc/src/compile/project.rs | 5 ++++- ethers-solc/src/hh.rs | 7 +++++-- 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/ethers-solc/src/compile/mod.rs b/ethers-solc/src/compile/mod.rs index da3a98364..b14c0844f 100644 --- a/ethers-solc/src/compile/mod.rs +++ b/ethers-solc/src/compile/mod.rs @@ -17,7 +17,6 @@ use std::{ pub mod many; pub mod output; -#[cfg(all(feature = "svm", feature = "async"))] pub mod project; /// The name of the `solc` binary on the system diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index 0edd819da..1693f2fa6 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -50,10 +50,7 @@ impl ProjectCompileOutput2 { /// Whether there were warnings pub fn has_compiler_warnings(&self) -> bool { - self.compiler_output - .as_ref() - .map(|o| o.has_warning(&self.ignored_error_codes)) - .unwrap_or_default() + self.compiler_output.has_warning(&self.ignored_error_codes) } } /// The aggregated output of (multiple) compile jobs diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 84a12c598..74dff17a7 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -111,6 +111,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// let project = Project::builder().build().unwrap(); /// let output = project.compile().unwrap(); /// ``` + #[cfg(all(feature = "svm", feature = "async"))] pub fn new(project: &'a Project) -> Result { Self::with_sources(project, project.paths.read_input_files()?) } @@ -121,6 +122,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// /// Multiple (`Solc` -> `Sources`) pairs can be compiled in parallel if the `Project` allows /// multiple `jobs`, see [`crate::Project::set_solc_jobs()`]. + #[cfg(all(feature = "svm", feature = "async"))] pub fn with_sources(project: &'a Project, sources: Sources) -> Result { let graph = Graph::resolve_sources(&project.paths, sources)?; let (versions, edges) = graph.into_sources_by_version(!project.auto_detect)?; @@ -204,7 +206,8 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { } /// Determines how the `solc <-> sources` pairs are executed -#[derive(Debug)] +#[derive(Debug, Clone)] +#[allow(dead_code)] enum CompilerSources { /// Compile all these sequentially Sequential(VersionedSources), diff --git a/ethers-solc/src/hh.rs b/ethers-solc/src/hh.rs index 56176c829..d6edb6647 100644 --- a/ethers-solc/src/hh.rs +++ b/ethers-solc/src/hh.rs @@ -1,8 +1,11 @@ //! Hardhat support use crate::{ - artifacts::{BytecodeObject, CompactContract, Contract, Offsets}, - ArtifactOutput, CompactContractBytecode, + artifacts::{ + Bytecode, BytecodeObject, CompactContract, CompactContractBytecode, Contract, + ContractBytecode, DeployedBytecode, Offsets, + }, + ArtifactOutput, }; use ethers_core::abi::Abi; use serde::{Deserialize, Serialize}; From 14e020c06fb12f4c7cd930b00c1bedd8c0ea9202 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Fri, 28 Jan 2022 15:10:42 +0100 Subject: [PATCH 30/82] chore(deps): bump solc version 0.2.0 --- Cargo.lock | 2 +- Cargo.toml | 2 +- ethers-contract/Cargo.toml | 2 +- ethers-middleware/Cargo.toml | 2 +- ethers-solc/Cargo.toml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c9823b48b..09bc2909d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1369,7 +1369,7 @@ dependencies = [ [[package]] name = "ethers-solc" -version = "0.1.0" +version = "0.2.0" dependencies = [ "colored", "criterion", diff --git a/Cargo.toml b/Cargo.toml index 0d16a9f3b..0fd971765 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -88,7 +88,7 @@ ethers-core = { version = "^0.6.0", default-features = false, path = "./ethers-c ethers-providers = { version = "^0.6.0", default-features = false, path = "./ethers-providers" } ethers-signers = { version = "^0.6.0", default-features = false, path = "./ethers-signers" } ethers-middleware = { version = "^0.6.0", default-features = false, path = "./ethers-middleware" } -ethers-solc = { version = "^0.1.0", default-features = false, path = "./ethers-solc" } +ethers-solc = { version = "^0.2.0", default-features = false, path = "./ethers-solc" } ethers-etherscan = { version = "^0.2.0", default-features = false, path = "./ethers-etherscan" } [dev-dependencies] diff --git a/ethers-contract/Cargo.toml b/ethers-contract/Cargo.toml index dc29519bc..d3ac7f097 100644 --- a/ethers-contract/Cargo.toml +++ b/ethers-contract/Cargo.toml @@ -32,7 +32,7 @@ ethers-contract-abigen = { version = "^0.6.0", path = "ethers-contract-abigen" } ethers-contract-derive = { version = "^0.6.0", path = "ethers-contract-derive" } ethers-core = { version = "^0.6.0", path = "../ethers-core", default-features = false, features = ["eip712"]} ethers-derive-eip712 = { version = "^0.2.0", path = "../ethers-core/ethers-derive-eip712"} -ethers-solc = { version = "^0.1.0", path = "../ethers-solc", default-features = false } +ethers-solc = { version = "^0.2.0", path = "../ethers-solc", default-features = false } [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] tokio = { version = "1.5", default-features = false, features = ["macros"] } diff --git a/ethers-middleware/Cargo.toml b/ethers-middleware/Cargo.toml index ea3cae313..676059775 100644 --- a/ethers-middleware/Cargo.toml +++ b/ethers-middleware/Cargo.toml @@ -42,7 +42,7 @@ hex = { version = "0.4.3", default-features = false, features = ["std"] } rand = { version = "0.8.4", default-features = false } ethers-providers = { version = "^0.6.0", path = "../ethers-providers", default-features = false, features = ["ws", "rustls"] } once_cell = "1.8.0" -ethers-solc = { version = "^0.1.0", path = "../ethers-solc", default-features = false } +ethers-solc = { version = "^0.2.0", path = "../ethers-solc", default-features = false } serial_test = "0.5.1" [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] diff --git a/ethers-solc/Cargo.toml b/ethers-solc/Cargo.toml index e968f9025..c44573032 100644 --- a/ethers-solc/Cargo.toml +++ b/ethers-solc/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ethers-solc" -version = "0.1.0" +version = "0.2.0" authors = ["Matthias Seitz ", "Georgios Konstantopoulos "] license = "MIT OR Apache-2.0" edition = "2018" From 38da02735213cfd406741e8935841f9bf7f03fac Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 29 Jan 2022 12:52:23 +0100 Subject: [PATCH 31/82] feat: unify output types --- ethers-solc/src/artifact_output.rs | 110 ++++++++++++- ethers-solc/src/artifacts.rs | 69 +------- ethers-solc/src/compile/output.rs | 244 ++++++++++++++++++++++++++++- ethers-solc/src/lib.rs | 198 ----------------------- 4 files changed, 349 insertions(+), 272 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index 78711e379..ad26e49f8 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -2,9 +2,11 @@ use crate::{ artifacts::{ - CompactContract, CompactContractBytecode, Contract, FileToContractsMap, VersionedContracts, + CompactContract, CompactContractBytecode, Contract, FileToContractsMap, VersionedContract, + VersionedContracts, }, error::Result, + output::AggregatedCompilerOutput, HardhatArtifact, ProjectPathsConfig, SolcError, }; use ethers_core::{abi::Abi, types::Bytes}; @@ -27,8 +29,82 @@ pub struct WrittenArtifact { pub version: Version, } +/// local helper type alias +type ArtifactsMap = FileToContractsMap>>; + /// Represents the written Artifacts -pub type WrittenArtifacts = FileToContractsMap>>; +#[derive(Debug, Clone, PartialEq)] +pub struct WrittenArtifacts(pub ArtifactsMap); + +impl Default for WrittenArtifacts { + fn default() -> Self { + Self(Default::default()) + } +} + +impl AsRef> for WrittenArtifacts { + fn as_ref(&self) -> &ArtifactsMap { + &self.0 + } +} + +impl AsMut> for WrittenArtifacts { + fn as_mut(&mut self) -> &mut ArtifactsMap { + &mut self.0 + } +} + +impl WrittenArtifacts { + /// Returns an iterator over _all_ artifacts and `` + pub fn into_artifacts>( + self, + ) -> impl Iterator { + self.0.into_values().flat_map(|contract_artifacts| { + contract_artifacts.into_iter().flat_map(|(contract_name, artifacts)| { + artifacts.into_iter().filter_map(|artifact| { + O::contract_name(&artifact.file).map(|name| { + ( + format!( + "{}:{}", + artifact.file.file_name().unwrap().to_string_lossy(), + name + ), + artifact.artifact, + ) + }) + }) + }) + }) + } + + /// Finds the first artifact `T` with a matching contract name + pub fn find(&self, contract_name: impl AsRef) -> Option<&T> { + let contract_name = contract_name.as_ref(); + self.0.iter().find_map(|(file, contracts)| { + contracts.get(contract_name).and_then(|c| c.get(0).map(|a| &a.artifact)) + }) + } + + /// Removes the first artifact `T` with a matching contract name + /// + /// *Note:* if there are multiple artifacts (contract compiled with different solc) then this + /// returns the first artifact in that set + pub fn remove(&mut self, contract_name: impl AsRef) -> Option { + let contract_name = contract_name.as_ref(); + self.0.iter_mut().find_map(|(file, contracts)| { + let mut artifact = None; + if let Some((c, mut artifacts)) = contracts.remove_entry(contract_name) { + if !artifacts.is_empty() { + artifact = Some(artifacts.remove(0).artifact); + } + if !artifacts.is_empty() { + contracts.insert(c, artifacts); + } + } + artifact + }) + } +} /// Bundled Artifacts: `file -> (contract name -> (Artifact, Version))` pub type Artifacts = FileToContractsMap>; @@ -96,7 +172,7 @@ pub trait ArtifactOutput { ) -> Result> { fs::create_dir_all(&layout.artifacts) .map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?; - let mut artifacts = WrittenArtifacts::default(); + let mut artifacts = ArtifactsMap::new(); for (file, contracts) in contracts.iter() { let mut entries = BTreeMap::new(); @@ -129,7 +205,7 @@ pub trait ArtifactOutput { artifacts.insert(file.to_string(), entries); } - Ok(artifacts) + Ok(WrittenArtifacts(artifacts)) } /// Returns the file name for the contract's artifact @@ -217,6 +293,32 @@ pub trait ArtifactOutput { /// Convert a contract to the artifact type fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact; + + /// Convert the compiler output into a set of artifacts + fn output_to_artifacts(output: AggregatedCompilerOutput) -> Artifacts { + output + .contracts + .into_iter() + .map(|(file, all_contracts)| { + let contracts = all_contracts + .into_iter() + .map(|(name, versioned_contracts)| { + let artifacts = versioned_contracts + .into_iter() + .map(|c| { + let VersionedContract { contract, version } = c; + let artifact = Self::contract_to_artifact(&file, &name, contract); + (artifact, version) + }) + .collect(); + (name, artifacts) + }) + .collect(); + + (file, contracts) + }) + .collect() + } } /// An Artifacts implementation that uses a compact representation diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index 30780470e..b9ed59b69 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -15,6 +15,7 @@ use std::{ use crate::{ compile::*, error::SolcIoError, + output::AggregatedCompilerOutput, remappings::Remapping, sourcemap::{self, SourceMap, SyntaxError}, utils, @@ -560,10 +561,6 @@ impl CompilerOutput { }) } - pub fn diagnostics<'a>(&'a self, ignored_error_codes: &'a [u64]) -> OutputDiagnostics { - OutputDiagnostics { compiler_output: self, ignored_error_codes } - } - /// Finds the _first_ contract with the given name /// /// # Example @@ -699,70 +696,6 @@ impl OutputContracts { } } -/// Helper type to implement display for solc errors -#[derive(Clone, Debug)] -pub struct OutputDiagnostics<'a> { - compiler_output: &'a CompilerOutput, - ignored_error_codes: &'a [u64], -} - -impl<'a> OutputDiagnostics<'a> { - /// Returns true if there is at least one error of high severity - pub fn has_error(&self) -> bool { - self.compiler_output.has_error() - } - - /// Returns true if there is at least one warning - pub fn has_warning(&self) -> bool { - self.compiler_output.has_warning(self.ignored_error_codes) - } - - fn is_test>(&self, contract_path: T) -> bool { - if contract_path.as_ref().ends_with(".t.sol") { - return true - } - - self.compiler_output.find(&contract_path).map_or(false, |contract| { - contract.abi.map_or(false, |abi| abi.functions.contains_key("IS_TEST")) - }) - } -} - -impl<'a> fmt::Display for OutputDiagnostics<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.has_error() { - f.write_str("Compiler run failed")?; - } else if self.has_warning() { - f.write_str("Compiler run successful (with warnings)")?; - } else { - f.write_str("Compiler run successful")?; - } - for err in &self.compiler_output.errors { - if err.severity.is_warning() { - let is_ignored = err.error_code.as_ref().map_or(false, |code| { - if let Some(source_location) = &err.source_location { - // we ignore spdx and contract size warnings in test - // files. if we are looking at one of these warnings - // from a test file we skip - if self.is_test(&source_location.file) && (*code == 1878 || *code == 5574) { - return true - } - } - - self.ignored_error_codes.contains(code) - }); - - if !is_ignored { - writeln!(f, "\n{}", err)?; - } - } else { - writeln!(f, "\n{}", err)?; - } - } - Ok(()) - } -} - /// A contract and the compiler version used to compile it #[derive(Clone, Debug, PartialEq)] pub struct VersionedContract { diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index 1693f2fa6..6ab2636af 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -1,11 +1,15 @@ //! The output of a compiled project use crate::{ - artifacts::{Error, SourceFile, VersionedContract, VersionedContracts}, + artifacts::{ + CompactContractRef, Contract, Error, SourceFile, SourceFiles, VersionedContract, + VersionedContracts, + }, ArtifactOutput, CompilerOutput, WrittenArtifacts, }; use semver::Version; -use std::{collections::BTreeMap, path::PathBuf}; +use serde::{Deserialize, Serialize}; +use std::{borrow::Cow, collections::BTreeMap, fmt, path::PathBuf}; /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. @@ -24,6 +28,31 @@ pub struct ProjectCompileOutput2 { } impl ProjectCompileOutput2 { + /// All artifacts together with their contract file name and name `:` + /// + /// # Example + /// + /// ```no_run + /// use std::collections::btree_map::BTreeMap; + /// use ethers_solc::artifacts::CompactContractBytecode; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let contracts: BTreeMap = project.compile().unwrap().into_artifacts().collect(); + /// ``` + // TODO add ArtifactId (filename, contract name, version?) + pub fn into_artifacts(self) -> impl Iterator { + let Self { cached_artifacts, written_artifacts, .. } = self; + cached_artifacts + .into_iter() + .filter_map(|(path, art)| { + T::contract_name(&path).map(|name| { + (format!("{}:{}", path.file_name().unwrap().to_string_lossy(), name), art) + }) + }) + .chain(written_artifacts.into_artifacts::()) + } + /// Get the (merged) solc compiler output /// ```no_run /// use std::collections::btree_map::BTreeMap; @@ -43,6 +72,11 @@ impl ProjectCompileOutput2 { self.compiler_output.is_empty() } + /// Whether this type does not contain compiled contracts + pub fn is_unchanged(&self) -> bool { + !self.has_compiled_contracts() + } + /// Whether there were errors pub fn has_compiler_errors(&self) -> bool { self.compiler_output.has_error() @@ -52,7 +86,50 @@ impl ProjectCompileOutput2 { pub fn has_compiler_warnings(&self) -> bool { self.compiler_output.has_warning(&self.ignored_error_codes) } + + /// Finds the first contract with the given name and removes it from the set + pub fn remove(&mut self, contract_name: impl AsRef) -> Option { + let contract_name = contract_name.as_ref(); + if let artifact @ Some(_) = self.written_artifacts.remove(contract_name) { + return artifact + } + let key = self + .cached_artifacts + .iter() + .find_map(|(path, _)| { + T::contract_name(path).filter(|name| name == contract_name).map(|_| path) + })? + .clone(); + self.cached_artifacts.remove(&key) + } +} + +impl ProjectCompileOutput2 +where + T::Artifact: Clone, +{ + /// Finds the first contract with the given name + pub fn find(&self, contract_name: impl AsRef) -> Option<&T::Artifact> { + let contract_name = contract_name.as_ref(); + if let artifact @ Some(_) = self.written_artifacts.find(contract_name) { + return artifact + } + self.cached_artifacts.iter().find_map(|(path, art)| { + T::contract_name(path).filter(|name| name == contract_name).map(|_| art) + }) + } +} + +impl fmt::Display for ProjectCompileOutput2 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.compiler_output.is_empty() { + f.write_str("Nothing to compile") + } else { + self.compiler_output.diagnostics(&self.ignored_error_codes).fmt(f) + } + } } + /// The aggregated output of (multiple) compile jobs /// /// This is effectively a solc version aware `CompilerOutput` @@ -83,6 +160,10 @@ impl AggregatedCompilerOutput { }) } + pub fn diagnostics<'a>(&'a self, ignored_error_codes: &'a [u64]) -> OutputDiagnostics { + OutputDiagnostics { compiler_output: self, ignored_error_codes } + } + pub fn is_empty(&self) -> bool { self.contracts.is_empty() } @@ -109,4 +190,163 @@ impl AggregatedCompilerOutput { } } } + + /// Finds the _first_ contract with the given name + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// use ethers_solc::artifacts::*; + /// # fn demo(project: Project) { + /// let output = project.compile().unwrap().output(); + /// let contract = output.find("Greeter").unwrap(); + /// # } + /// ``` + pub fn find(&self, contract: impl AsRef) -> Option { + let contract_name = contract.as_ref(); + self.contracts_iter().find_map(|(name, contract)| { + (name == contract_name).then(|| CompactContractRef::from(contract)) + }) + } + + /// Removes the _first_ contract with the given name from the set + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// use ethers_solc::artifacts::*; + /// # fn demo(project: Project) { + /// let mut output = project.compile().unwrap().output(); + /// let contract = output.remove("Greeter").unwrap(); + /// # } + /// ``` + pub fn remove(&mut self, contract: impl AsRef) -> Option { + let contract_name = contract.as_ref(); + self.contracts.values_mut().find_map(|all_contracts| { + let mut contract = None; + if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) { + if !contracts.is_empty() { + contract = Some(contracts.remove(0).contract); + } + if !contracts.is_empty() { + all_contracts.insert(c, contracts); + } + } + contract + }) + } + + /// Iterate over all contracts and their names + pub fn contracts_iter(&self) -> impl Iterator { + self.contracts.values().flat_map(|c| { + c.into_iter().flat_map(|(name, c)| c.into_iter().map(move |c| (name, &c.contract))) + }) + } + + /// Iterate over all contracts and their names + pub fn contracts_into_iter(self) -> impl Iterator { + self.contracts.into_values().flat_map(|c| { + c.into_iter() + .flat_map(|(name, c)| c.into_iter().map(move |c| (name.clone(), c.contract))) + }) + } + + /// Given the contract file's path and the contract's name, tries to return the contract's + /// bytecode, runtime bytecode, and abi + pub fn get(&self, path: &str, contract: &str) -> Option { + self.contracts + .get(path) + .and_then(|contracts| { + contracts.get(contract).and_then(|c| c.get(0).map(|c| &c.contract)) + }) + .map(CompactContractRef::from) + } + + /// Returns the output's source files and contracts separately, wrapped in helper types that + /// provide several helper methods + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// # fn demo(project: Project) { + /// let output = project.compile().unwrap().output(); + /// let (sources, contracts) = output.split(); + /// # } + /// ``` + pub fn split(self) -> (SourceFiles, OutputContracts) { + // (SourceFiles(self.sources), OutputContracts(self.contracts)) + todo!() + } +} + +/// A wrapper helper type for the `Contracts` type alias +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct OutputContracts(pub usize); + +/// Helper type to implement display for solc errors +#[derive(Clone, Debug)] +pub struct OutputDiagnostics<'a> { + compiler_output: &'a AggregatedCompilerOutput, + ignored_error_codes: &'a [u64], +} + +impl<'a> OutputDiagnostics<'a> { + /// Returns true if there is at least one error of high severity + pub fn has_error(&self) -> bool { + self.compiler_output.has_error() + } + + /// Returns true if there is at least one warning + pub fn has_warning(&self) -> bool { + self.compiler_output.has_warning(self.ignored_error_codes) + } + + /// Returns true if the contract is a expected to be a test + fn is_test>(&self, contract_path: T) -> bool { + if contract_path.as_ref().ends_with(".t.sol") { + return true + } + + self.compiler_output.find(&contract_path).map_or(false, |contract| { + contract.abi.map_or(false, |abi| abi.functions.contains_key("IS_TEST")) + }) + } +} + +impl<'a> fmt::Display for OutputDiagnostics<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.has_error() { + f.write_str("Compiler run failed")?; + } else if self.has_warning() { + f.write_str("Compiler run successful (with warnings)")?; + } else { + f.write_str("Compiler run successful")?; + } + for err in &self.compiler_output.errors { + if err.severity.is_warning() { + let is_ignored = err.error_code.as_ref().map_or(false, |code| { + if let Some(source_location) = &err.source_location { + // we ignore spdx and contract size warnings in test + // files. if we are looking at one of these warnings + // from a test file we skip + if self.is_test(&source_location.file) && (*code == 1878 || *code == 5574) { + return true + } + } + + self.ignored_error_codes.contains(code) + }); + + if !is_ignored { + writeln!(f, "\n{}", err)?; + } + } else { + writeln!(f, "\n{}", err)?; + } + } + Ok(()) + } } diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index bb359975c..e79700c79 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -531,204 +531,6 @@ impl Default for ProjectBuilder { } } -/// The outcome of `Project::compile` -#[derive(Debug, Clone, PartialEq, Default)] -pub struct ProjectCompileOutput { - /// If solc was invoked multiple times in `Project::compile` then this contains a merged - /// version of all `CompilerOutput`s. If solc was called only once then `compiler_output` - /// holds the `CompilerOutput` of that call. - compiler_output: Option, - /// All artifacts that were read from cache - artifacts: BTreeMap, - ignored_error_codes: Vec, -} - -impl ProjectCompileOutput { - pub fn with_ignored_errors(ignored_errors: Vec) -> Self { - Self { - compiler_output: None, - artifacts: Default::default(), - ignored_error_codes: ignored_errors, - } - } - - pub fn from_unchanged(artifacts: BTreeMap) -> Self { - Self { compiler_output: None, artifacts, ignored_error_codes: vec![] } - } - - pub fn from_compiler_output( - compiler_output: CompilerOutput, - ignored_error_codes: Vec, - ) -> Self { - Self { - compiler_output: Some(compiler_output), - artifacts: Default::default(), - ignored_error_codes, - } - } - - pub fn from_compiler_output_and_cache( - compiler_output: CompilerOutput, - cache: BTreeMap, - ignored_error_codes: Vec, - ) -> Self { - Self { compiler_output: Some(compiler_output), artifacts: cache, ignored_error_codes } - } - - /// Get the (merged) solc compiler output - /// ```no_run - /// use std::collections::btree_map::BTreeMap; - /// use ethers_solc::artifacts::Contract; - /// use ethers_solc::Project; - /// - /// let project = Project::builder().build().unwrap(); - /// let contracts: BTreeMap = - /// project.compile().unwrap().output().contracts_into_iter().collect(); - /// ``` - pub fn output(self) -> CompilerOutput { - self.compiler_output.unwrap_or_default() - } - - /// Combine two outputs - pub fn extend(&mut self, compiled: ProjectCompileOutput) { - let ProjectCompileOutput { compiler_output, artifacts, .. } = compiled; - self.artifacts.extend(artifacts); - if let Some(output) = compiler_output { - self.extend_output(output); - } - } - - pub fn extend_output(&mut self, compiled: CompilerOutput) { - if let Some(output) = self.compiler_output.as_mut() { - output.errors.extend(compiled.errors); - output.sources.extend(compiled.sources); - output.contracts.extend(compiled.contracts); - } else { - self.compiler_output = Some(compiled); - } - } - - pub fn extend_artifacts(&mut self, artifacts: BTreeMap) { - self.artifacts.extend(artifacts); - } - - /// Whether this type does not contain compiled contracts - pub fn is_unchanged(&self) -> bool { - !self.has_compiled_contracts() - } - - /// Whether this type has a compiler output - pub fn has_compiled_contracts(&self) -> bool { - if let Some(output) = self.compiler_output.as_ref() { - !output.contracts.is_empty() - } else { - false - } - } - - /// Whether there were errors - pub fn has_compiler_errors(&self) -> bool { - self.compiler_output.as_ref().map(|o| o.has_error()).unwrap_or_default() - } - - /// Whether there were warnings - pub fn has_compiler_warnings(&self) -> bool { - self.compiler_output - .as_ref() - .map(|o| o.has_warning(&self.ignored_error_codes)) - .unwrap_or_default() - } - - /// Finds the first contract with the given name and removes it from the set - pub fn remove(&mut self, contract_name: impl AsRef) -> Option { - let contract_name = contract_name.as_ref(); - if let Some(output) = self.compiler_output.as_mut() { - if let contract @ Some(_) = output.contracts.iter_mut().find_map(|(file, c)| { - c.remove(contract_name).map(|c| T::contract_to_artifact(file, contract_name, c)) - }) { - return contract - } - } - let key = self - .artifacts - .iter() - .find_map(|(path, _)| { - T::contract_name(path).filter(|name| name == contract_name).map(|_| path) - })? - .clone(); - self.artifacts.remove(&key) - } -} - -impl ProjectCompileOutput -where - T::Artifact: Clone, -{ - /// Finds the first contract with the given name - pub fn find(&self, contract_name: impl AsRef) -> Option> { - let contract_name = contract_name.as_ref(); - if let Some(output) = self.compiler_output.as_ref() { - if let contract @ Some(_) = output.contracts.iter().find_map(|(file, contracts)| { - contracts - .get(contract_name) - .map(|c| T::contract_to_artifact(file, contract_name, c.clone())) - .map(Cow::Owned) - }) { - return contract - } - } - self.artifacts.iter().find_map(|(path, art)| { - T::contract_name(path).filter(|name| name == contract_name).map(|_| Cow::Borrowed(art)) - }) - } -} - -impl ProjectCompileOutput { - /// All artifacts together with their contract file name and name `:` - /// - /// # Example - /// - /// ```no_run - /// use std::collections::btree_map::BTreeMap; - /// use ethers_solc::artifacts::CompactContractBytecode; - /// use ethers_solc::Project; - /// - /// let project = Project::builder().build().unwrap(); - /// let contracts: BTreeMap = project.compile().unwrap().into_artifacts().collect(); - /// ``` - pub fn into_artifacts(self) -> Box> { - // let artifacts = self.artifacts.into_iter().filter_map(|(path, art)| { - // T::contract_name(&path).map(|name| { - // (format!("{}:{}", path.file_name().unwrap().to_string_lossy(), name), art) - // }) - // }); - // - // let artifacts: Box> = if let Some(output) = - // self.compiler_output.take() - // { - // Box::new(artifacts.chain(T::output_to_artifacts(output).into_values().flatten().map( - // |(name, artifact)| { - // (format!("{}:{}", T::output_file_name(&name).display(), name), artifact) - // }, - // ))) - // } else { - // Box::new(artifacts) - // }; - // artifacts - todo!() - } -} - -impl fmt::Display for ProjectCompileOutput { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Some(output) = self.compiler_output.as_ref() { - output.diagnostics(&self.ignored_error_codes).fmt(f) - } else { - f.write_str("Nothing to compile") - } - } -} - #[cfg(test)] mod tests { #[test] From 3ac275ca64b10fcfb131e9facf47391b3ca65a3a Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 29 Jan 2022 12:56:47 +0100 Subject: [PATCH 32/82] cargo fix --- ethers-solc/src/artifact_output.rs | 6 +++--- ethers-solc/src/artifacts.rs | 1 - ethers-solc/src/compile/output.rs | 4 ++-- ethers-solc/src/lib.rs | 4 +--- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index ad26e49f8..2edfc6d69 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -60,7 +60,7 @@ impl WrittenArtifacts { self, ) -> impl Iterator { self.0.into_values().flat_map(|contract_artifacts| { - contract_artifacts.into_iter().flat_map(|(contract_name, artifacts)| { + contract_artifacts.into_iter().flat_map(|(_contract_name, artifacts)| { artifacts.into_iter().filter_map(|artifact| { O::contract_name(&artifact.file).map(|name| { ( @@ -80,7 +80,7 @@ impl WrittenArtifacts { /// Finds the first artifact `T` with a matching contract name pub fn find(&self, contract_name: impl AsRef) -> Option<&T> { let contract_name = contract_name.as_ref(); - self.0.iter().find_map(|(file, contracts)| { + self.0.iter().find_map(|(_file, contracts)| { contracts.get(contract_name).and_then(|c| c.get(0).map(|a| &a.artifact)) }) } @@ -91,7 +91,7 @@ impl WrittenArtifacts { /// returns the first artifact in that set pub fn remove(&mut self, contract_name: impl AsRef) -> Option { let contract_name = contract_name.as_ref(); - self.0.iter_mut().find_map(|(file, contracts)| { + self.0.iter_mut().find_map(|(_file, contracts)| { let mut artifact = None; if let Some((c, mut artifacts)) = contracts.remove_entry(contract_name) { if !artifacts.is_empty() { diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index b9ed59b69..de170f12a 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -15,7 +15,6 @@ use std::{ use crate::{ compile::*, error::SolcIoError, - output::AggregatedCompilerOutput, remappings::Remapping, sourcemap::{self, SourceMap, SyntaxError}, utils, diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index 6ab2636af..c8f2db58a 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -9,7 +9,7 @@ use crate::{ }; use semver::Version; use serde::{Deserialize, Serialize}; -use std::{borrow::Cow, collections::BTreeMap, fmt, path::PathBuf}; +use std::{collections::BTreeMap, fmt, path::PathBuf}; /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. @@ -241,7 +241,7 @@ impl AggregatedCompilerOutput { /// Iterate over all contracts and their names pub fn contracts_iter(&self) -> impl Iterator { self.contracts.values().flat_map(|c| { - c.into_iter().flat_map(|(name, c)| c.into_iter().map(move |c| (name, &c.contract))) + c.iter().flat_map(|(name, c)| c.iter().map(move |c| (name, &c.contract))) }) } diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index e79700c79..fc8aa0074 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -4,7 +4,7 @@ pub mod artifacts; pub mod sourcemap; pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion}; -use std::collections::btree_map::BTreeMap; + mod artifact_output; pub mod cache; @@ -34,9 +34,7 @@ use crate::{ }; use error::Result; use std::{ - borrow::Cow, convert::TryInto, - fmt, marker::PhantomData, path::{Path, PathBuf}, }; From 4718eb45f911140837d41f6b3bd275b08e7f3e23 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 29 Jan 2022 13:23:20 +0100 Subject: [PATCH 33/82] refactor: add contracts wrapper --- ethers-solc/src/artifact_output.rs | 8 +- ethers-solc/src/artifacts.rs | 75 --------------- ethers-solc/src/compile/contracts.rs | 132 +++++++++++++++++++++++++++ ethers-solc/src/compile/mod.rs | 1 + ethers-solc/src/compile/output.rs | 53 ++--------- ethers-solc/src/lib.rs | 1 - 6 files changed, 146 insertions(+), 124 deletions(-) create mode 100644 ethers-solc/src/compile/contracts.rs diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index 2edfc6d69..845a8a03e 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -1,10 +1,8 @@ //! Output artifact handling use crate::{ - artifacts::{ - CompactContract, CompactContractBytecode, Contract, FileToContractsMap, VersionedContract, - VersionedContracts, - }, + artifacts::{CompactContract, CompactContractBytecode, Contract, FileToContractsMap}, + contracts::{VersionedContract, VersionedContracts}, error::Result, output::AggregatedCompilerOutput, HardhatArtifact, ProjectPathsConfig, SolcError, @@ -174,7 +172,7 @@ pub trait ArtifactOutput { .map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?; let mut artifacts = ArtifactsMap::new(); - for (file, contracts) in contracts.iter() { + for (file, contracts) in contracts.as_ref().iter() { let mut entries = BTreeMap::new(); for (name, versioned_contracts) in contracts { let mut contracts = Vec::with_capacity(versioned_contracts.len()); diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index de170f12a..60bebde27 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -32,9 +32,6 @@ pub type FileToContractsMap = BTreeMap>; /// file -> (contract name -> Contract) pub type Contracts = FileToContractsMap; -/// file -> [(contract name -> Contract + solc version)] -pub type VersionedContracts = FileToContractsMap>; - /// An ordered list of files and their source pub type Sources = BTreeMap; @@ -561,17 +558,6 @@ impl CompilerOutput { } /// Finds the _first_ contract with the given name - /// - /// # Example - /// - /// ``` - /// use ethers_solc::Project; - /// use ethers_solc::artifacts::*; - /// # fn demo(project: Project) { - /// let output = project.compile().unwrap().output(); - /// let contract = output.find("Greeter").unwrap(); - /// # } - /// ``` pub fn find(&self, contract: impl AsRef) -> Option { let contract_name = contract.as_ref(); self.contracts_iter().find_map(|(name, contract)| { @@ -580,17 +566,6 @@ impl CompilerOutput { } /// Finds the first contract with the given name and removes it from the set - /// - /// # Example - /// - /// ``` - /// use ethers_solc::Project; - /// use ethers_solc::artifacts::*; - /// # fn demo(project: Project) { - /// let mut output = project.compile().unwrap().output(); - /// let contract = output.remove("Greeter").unwrap(); - /// # } - /// ``` pub fn remove(&mut self, contract: impl AsRef) -> Option { let contract_name = contract.as_ref(); self.contracts.values_mut().find_map(|c| c.remove(contract_name)) @@ -617,16 +592,6 @@ impl CompilerOutput { /// Returns the output's source files and contracts separately, wrapped in helper types that /// provide several helper methods - /// - /// # Example - /// - /// ``` - /// use ethers_solc::Project; - /// # fn demo(project: Project) { - /// let output = project.compile().unwrap().output(); - /// let (sources, contracts) = output.split(); - /// # } - /// ``` pub fn split(self) -> (SourceFiles, OutputContracts) { (SourceFiles(self.sources), OutputContracts(self.contracts)) } @@ -638,17 +603,6 @@ pub struct OutputContracts(pub Contracts); impl OutputContracts { /// Returns an iterator over all contracts and their source names. - /// - /// ``` - /// use std::collections::BTreeMap; - /// use ethers_solc::{ artifacts::*, Artifact }; - /// # fn demo(contracts: OutputContracts) { - /// let contracts: BTreeMap = contracts - /// .into_contracts() - /// .map(|(k, c)| (k, c.into_compact_contract().unwrap())) - /// .collect(); - /// # } - /// ``` pub fn into_contracts(self) -> impl Iterator { self.0.into_values().flatten() } @@ -659,17 +613,6 @@ impl OutputContracts { } /// Finds the _first_ contract with the given name - /// - /// # Example - /// - /// ``` - /// use ethers_solc::Project; - /// use ethers_solc::artifacts::*; - /// # fn demo(project: Project) { - /// let output = project.compile().unwrap().output(); - /// let contract = output.find("Greeter").unwrap(); - /// # } - /// ``` pub fn find(&self, contract: impl AsRef) -> Option { let contract_name = contract.as_ref(); self.contracts_iter().find_map(|(name, contract)| { @@ -678,30 +621,12 @@ impl OutputContracts { } /// Finds the first contract with the given name and removes it from the set - /// - /// # Example - /// - /// ``` - /// use ethers_solc::Project; - /// use ethers_solc::artifacts::*; - /// # fn demo(project: Project) { - /// let (_, mut contracts) = project.compile().unwrap().output().split(); - /// let contract = contracts.remove("Greeter").unwrap(); - /// # } - /// ``` pub fn remove(&mut self, contract: impl AsRef) -> Option { let contract_name = contract.as_ref(); self.0.values_mut().find_map(|c| c.remove(contract_name)) } } -/// A contract and the compiler version used to compile it -#[derive(Clone, Debug, PartialEq)] -pub struct VersionedContract { - pub contract: Contract, - pub version: Version, -} - /// Represents a compiled solidity contract #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] pub struct Contract { diff --git a/ethers-solc/src/compile/contracts.rs b/ethers-solc/src/compile/contracts.rs new file mode 100644 index 000000000..b2703ba66 --- /dev/null +++ b/ethers-solc/src/compile/contracts.rs @@ -0,0 +1,132 @@ +use crate::artifacts::{CompactContractRef, Contract, FileToContractsMap}; +use semver::Version; +use serde::{Deserialize, Serialize}; +use std::collections::BTreeMap; + +/// file -> [(contract name -> Contract + solc version)] +#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] +#[serde(transparent)] +pub struct VersionedContracts(pub FileToContractsMap>); + +impl VersionedContracts { + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + pub fn len(&self) -> usize { + self.0.len() + } + + /// Finds the _first_ contract with the given name + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// use ethers_solc::artifacts::*; + /// # fn demo(project: Project) { + /// let output = project.compile().unwrap().output(); + /// let contract = output.find("Greeter").unwrap(); + /// # } + /// ``` + pub fn find(&self, contract: impl AsRef) -> Option { + let contract_name = contract.as_ref(); + self.contracts_iter().find_map(|(name, contract)| { + (name == contract_name).then(|| CompactContractRef::from(contract)) + }) + } + + /// Removes the _first_ contract with the given name from the set + /// + /// # Example + /// + /// ``` + /// use ethers_solc::Project; + /// use ethers_solc::artifacts::*; + /// # fn demo(project: Project) { + /// let (_, mut contracts) = project.compile().unwrap().output().split(); + /// let contract = contracts.remove("Greeter").unwrap(); + /// # } + /// ``` + pub fn remove(&mut self, contract: impl AsRef) -> Option { + let contract_name = contract.as_ref(); + self.0.values_mut().find_map(|all_contracts| { + let mut contract = None; + if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) { + if !contracts.is_empty() { + contract = Some(contracts.remove(0).contract); + } + if !contracts.is_empty() { + all_contracts.insert(c, contracts); + } + } + contract + }) + } + + /// Given the contract file's path and the contract's name, tries to return the contract's + /// bytecode, runtime bytecode, and abi + pub fn get(&self, path: &str, contract: &str) -> Option { + self.0 + .get(path) + .and_then(|contracts| { + contracts.get(contract).and_then(|c| c.get(0).map(|c| &c.contract)) + }) + .map(CompactContractRef::from) + } + + /// Iterate over all contracts and their names + pub fn contracts_iter(&self) -> impl Iterator { + self.0 + .values() + .flat_map(|c| c.iter().flat_map(|(name, c)| c.iter().map(move |c| (name, &c.contract)))) + } + + /// Returns an iterator over all contracts and their source names. + /// + /// ``` + /// use std::collections::BTreeMap; + /// use ethers_solc::{ artifacts::*, Artifact }; + /// # fn demo(contracts: OutputContracts) { + /// let contracts: BTreeMap = contracts + /// .into_contracts() + /// .map(|(k, c)| (k, c.into_compact_contract().unwrap())) + /// .collect(); + /// # } + /// ``` + pub fn into_contracts(self) -> impl Iterator { + self.0.into_values().flat_map(|c| { + c.into_iter() + .flat_map(|(name, c)| c.into_iter().map(move |c| (name.clone(), c.contract))) + }) + } +} + +impl AsRef>> for VersionedContracts { + fn as_ref(&self) -> &FileToContractsMap> { + &self.0 + } +} + +impl AsMut>> for VersionedContracts { + fn as_mut(&mut self) -> &mut FileToContractsMap> { + &mut self.0 + } +} + +impl IntoIterator for VersionedContracts { + type Item = (String, BTreeMap>); + type IntoIter = + std::collections::btree_map::IntoIter>>; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +/// A contract and the compiler version used to compile it +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct VersionedContract { + pub contract: Contract, + pub version: Version, +} diff --git a/ethers-solc/src/compile/mod.rs b/ethers-solc/src/compile/mod.rs index 9e05804f2..6171d2045 100644 --- a/ethers-solc/src/compile/mod.rs +++ b/ethers-solc/src/compile/mod.rs @@ -15,6 +15,7 @@ use std::{ str::FromStr, }; +pub mod contracts; pub mod many; pub mod output; pub mod project; diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index c8f2db58a..1fe5342c1 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -1,14 +1,11 @@ //! The output of a compiled project use crate::{ - artifacts::{ - CompactContractRef, Contract, Error, SourceFile, SourceFiles, VersionedContract, - VersionedContracts, - }, + artifacts::{CompactContractRef, Contract, Error, SourceFile, SourceFiles}, + contracts::{VersionedContract, VersionedContracts}, ArtifactOutput, CompilerOutput, WrittenArtifacts, }; use semver::Version; -use serde::{Deserialize, Serialize}; use std::{collections::BTreeMap, fmt, path::PathBuf}; /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still @@ -183,7 +180,7 @@ impl AggregatedCompilerOutput { self.sources.extend(output.sources); for (file_name, new_contracts) in output.contracts { - let contracts = self.contracts.entry(file_name).or_default(); + let contracts = self.contracts.as_mut().entry(file_name).or_default(); for (contract_name, contract) in new_contracts { let versioned = contracts.entry(contract_name).or_default(); versioned.push(VersionedContract { contract, version: version.clone() }); @@ -204,10 +201,7 @@ impl AggregatedCompilerOutput { /// # } /// ``` pub fn find(&self, contract: impl AsRef) -> Option { - let contract_name = contract.as_ref(); - self.contracts_iter().find_map(|(name, contract)| { - (name == contract_name).then(|| CompactContractRef::from(contract)) - }) + self.contracts.find(contract) } /// Removes the _first_ contract with the given name from the set @@ -223,45 +217,23 @@ impl AggregatedCompilerOutput { /// # } /// ``` pub fn remove(&mut self, contract: impl AsRef) -> Option { - let contract_name = contract.as_ref(); - self.contracts.values_mut().find_map(|all_contracts| { - let mut contract = None; - if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) { - if !contracts.is_empty() { - contract = Some(contracts.remove(0).contract); - } - if !contracts.is_empty() { - all_contracts.insert(c, contracts); - } - } - contract - }) + self.contracts.remove(contract) } /// Iterate over all contracts and their names pub fn contracts_iter(&self) -> impl Iterator { - self.contracts.values().flat_map(|c| { - c.iter().flat_map(|(name, c)| c.iter().map(move |c| (name, &c.contract))) - }) + self.contracts.contracts_iter() } /// Iterate over all contracts and their names pub fn contracts_into_iter(self) -> impl Iterator { - self.contracts.into_values().flat_map(|c| { - c.into_iter() - .flat_map(|(name, c)| c.into_iter().map(move |c| (name.clone(), c.contract))) - }) + self.contracts.into_contracts() } /// Given the contract file's path and the contract's name, tries to return the contract's /// bytecode, runtime bytecode, and abi pub fn get(&self, path: &str, contract: &str) -> Option { - self.contracts - .get(path) - .and_then(|contracts| { - contracts.get(contract).and_then(|c| c.get(0).map(|c| &c.contract)) - }) - .map(CompactContractRef::from) + self.contracts.get(path, contract) } /// Returns the output's source files and contracts separately, wrapped in helper types that @@ -276,16 +248,11 @@ impl AggregatedCompilerOutput { /// let (sources, contracts) = output.split(); /// # } /// ``` - pub fn split(self) -> (SourceFiles, OutputContracts) { - // (SourceFiles(self.sources), OutputContracts(self.contracts)) - todo!() + pub fn split(self) -> (SourceFiles, VersionedContracts) { + (SourceFiles(self.sources), self.contracts) } } -/// A wrapper helper type for the `Contracts` type alias -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] -pub struct OutputContracts(pub usize); - /// Helper type to implement display for solc errors #[derive(Clone, Debug)] pub struct OutputDiagnostics<'a> { diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index fc8aa0074..00ce75efd 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -5,7 +5,6 @@ pub mod sourcemap; pub use artifacts::{CompilerInput, CompilerOutput, EvmVersion}; - mod artifact_output; pub mod cache; pub mod hh; From 0e3ad2100a956d3a0165c6d654bdacf83aa389a1 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 29 Jan 2022 13:45:29 +0100 Subject: [PATCH 34/82] chore: replace ProjectCompileOutput --- ethers-solc/src/compile/output.rs | 8 ++++---- ethers-solc/src/compile/project.rs | 8 ++++---- ethers-solc/src/lib.rs | 8 ++++---- ethers-solc/src/project_util.rs | 4 ++-- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index 1fe5342c1..972ac8497 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -11,7 +11,7 @@ use std::{collections::BTreeMap, fmt, path::PathBuf}; /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. #[derive(Debug, Clone, PartialEq, Default)] -pub struct ProjectCompileOutput2 { +pub struct ProjectCompileOutput { /// contains the aggregated `CompilerOutput` /// /// See [`CompilerSources::compile`] @@ -24,7 +24,7 @@ pub struct ProjectCompileOutput2 { pub(crate) ignored_error_codes: Vec, } -impl ProjectCompileOutput2 { +impl ProjectCompileOutput { /// All artifacts together with their contract file name and name `:` /// /// # Example @@ -101,7 +101,7 @@ impl ProjectCompileOutput2 { } } -impl ProjectCompileOutput2 +impl ProjectCompileOutput where T::Artifact: Clone, { @@ -117,7 +117,7 @@ where } } -impl fmt::Display for ProjectCompileOutput2 { +impl fmt::Display for ProjectCompileOutput { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if self.compiler_output.is_empty() { f.write_str("Nothing to compile") diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 74dff17a7..9562d5fc9 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -80,8 +80,8 @@ use crate::{ error::Result, output::AggregatedCompilerOutput, resolver::GraphEdges, - utils, ArtifactOutput, CompilerInput, Graph, Project, ProjectCompileOutput2, - ProjectPathsConfig, SolFilesCache, Solc, SolcConfig, Source, SourceUnitNameMap, Sources, + utils, ArtifactOutput, CompilerInput, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, + SolFilesCache, Solc, SolcConfig, Source, SourceUnitNameMap, Sources, }; use rayon::prelude::*; use semver::Version; @@ -169,7 +169,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// let project = Project::builder().build().unwrap(); /// let output = project.compile().unwrap(); /// ``` - pub fn compile(self) -> Result> { + pub fn compile(self) -> Result> { let Self { edges, project, mut sources } = self; // the map that keeps track of the mapping of resolved solidity file paths -> source unit // names @@ -196,7 +196,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { // compiled but reused let cached_artifacts = cache.finish(&written_artifacts)?; - Ok(ProjectCompileOutput2 { + Ok(ProjectCompileOutput { compiler_output: output, written_artifacts, cached_artifacts, diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 00ce75efd..92f6e906d 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -15,7 +15,7 @@ pub use hh::{HardhatArtifact, HardhatArtifacts}; pub use resolver::Graph; mod compile; -pub use compile::{output::ProjectCompileOutput2, *}; +pub use compile::{output::ProjectCompileOutput, *}; mod config; pub use config::{AllowedLibPaths, PathStyle, ProjectPathsConfig, SolcConfig}; @@ -177,7 +177,7 @@ impl Project { /// # } /// ``` #[tracing::instrument(skip_all, name = "compile")] - pub fn compile(&self) -> Result> { + pub fn compile(&self) -> Result> { let sources = self.paths.read_input_files()?; tracing::trace!("found {} sources to compile: {:?}", sources.len(), sources.keys()); @@ -219,7 +219,7 @@ impl Project { /// # } /// ``` #[cfg(all(feature = "svm", feature = "async"))] - pub fn svm_compile(&self, sources: Sources) -> Result> { + pub fn svm_compile(&self, sources: Sources) -> Result> { project::ProjectCompiler::with_sources(self, sources)?.compile() } @@ -250,7 +250,7 @@ impl Project { &self, solc: &Solc, sources: Sources, - ) -> Result> { + ) -> Result> { project::ProjectCompiler::with_sources_and_solc(self, sources, solc.clone())?.compile() } diff --git a/ethers-solc/src/project_util.rs b/ethers-solc/src/project_util.rs index a213e0324..aee3f2e86 100644 --- a/ethers-solc/src/project_util.rs +++ b/ethers-solc/src/project_util.rs @@ -4,7 +4,7 @@ use crate::{ error::{Result, SolcError}, hh::HardhatArtifacts, utils::tempdir, - ArtifactOutput, MinimalCombinedArtifacts, PathStyle, Project, ProjectCompileOutput2, + ArtifactOutput, MinimalCombinedArtifacts, PathStyle, Project, ProjectCompileOutput, ProjectPathsConfig, SolcIoError, }; use fs_extra::{dir, file}; @@ -55,7 +55,7 @@ impl TempProject { &self.inner } - pub fn compile(&self) -> Result> { + pub fn compile(&self) -> Result> { self.project().compile() } From 15c7c52488b0916ad57ec29be9d5297fa9f27a47 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 29 Jan 2022 14:04:24 +0100 Subject: [PATCH 35/82] docs: add more docs --- ethers-solc/src/artifact_output.rs | 3 +++ ethers-solc/src/cache.rs | 11 +++++++++++ ethers-solc/src/lib.rs | 10 +++++++++- 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index 845a8a03e..148cffd2b 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -290,6 +290,9 @@ pub trait ArtifactOutput { } /// Convert a contract to the artifact type + /// + /// This is the core conversion function that takes care of converting a `Contract` into the + /// associated `Artifact` type fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact; /// Convert the compiler output into a set of artifacts diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 9bfe5153c..01eded501 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -105,6 +105,17 @@ impl SolFilesCache { } /// Reads all cached artifacts from disk using the given ArtifactOutput handler + /// + /// # Example + /// + /// ``` + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let cache = SolFilesCache::read(project.cache_path()).unwrap(); + /// let artifacts = cache.read_artifacts::(project.artifacts_path()).unwrap(); + /// ``` pub fn read_artifacts( &self, _artifacts_root: &Path, diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 92f6e906d..a14d5d9a7 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -27,7 +27,7 @@ pub mod error; pub mod utils; use crate::{ - artifacts::Sources, + artifacts::{Contract, Sources}, cache::SourceUnitNameMap, error::{SolcError, SolcIoError}, }; @@ -528,6 +528,14 @@ impl Default for ProjectBuilder { } } +impl ArtifactOutput for Project { + type Artifact = Artifacts::Artifact; + + fn contract_to_artifact(file: &str, name: &str, contract: Contract) -> Self::Artifact { + Artifacts::contract_to_artifact(file, name, contract) + } +} + #[cfg(test)] mod tests { #[test] From da2d7904b07d05300b24508ed87f00d17df7730e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 29 Jan 2022 14:08:06 +0100 Subject: [PATCH 36/82] feat: add offline mode --- ethers-solc/src/compile/project.rs | 2 +- ethers-solc/src/lib.rs | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 9562d5fc9..a988dd480 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -125,7 +125,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { #[cfg(all(feature = "svm", feature = "async"))] pub fn with_sources(project: &'a Project, sources: Sources) -> Result { let graph = Graph::resolve_sources(&project.paths, sources)?; - let (versions, edges) = graph.into_sources_by_version(!project.auto_detect)?; + let (versions, edges) = graph.into_sources_by_version(project.offline)?; let sources_by_version = versions.get(&project.allowed_lib_paths)?; diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index a14d5d9a7..1b13d8504 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -65,6 +65,8 @@ pub struct Project { pub allowed_lib_paths: AllowedLibPaths, /// Maximum number of `solc` processes to run simultaneously. solc_jobs: usize, + /// Offline mode, if set, network access (download solc) is disallowed + offline: bool, } impl Project { @@ -326,6 +328,8 @@ pub struct ProjectBuilder no_artifacts: bool, /// Whether automatic solc version detection is enabled auto_detect: bool, + /// Use offline mode + offline: bool, artifacts: PhantomData, /// Which error codes to ignore pub ignored_error_codes: Vec, @@ -380,6 +384,21 @@ impl ProjectBuilder { self } + /// Activates offline mode + /// + /// Prevents network possible access to download/check solc installs + #[must_use] + pub fn offline(self) -> Self { + self.set_cached(false) + } + + /// Sets the offline status + #[must_use] + pub fn set_offline(mut self, offline: bool) -> Self { + self.offline = offline; + self + } + /// Disables writing artifacts to disk #[must_use] pub fn no_artifacts(self) -> Self { From d720f8782a63759fea0eee2838f4ba8d50cb6240 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 29 Jan 2022 14:55:38 +0100 Subject: [PATCH 37/82] feat: more artifact helpers --- ethers-solc/src/artifact_output.rs | 210 +++++++++++++++++------------ ethers-solc/src/compile/output.rs | 4 +- ethers-solc/src/compile/project.rs | 4 +- ethers-solc/src/lib.rs | 5 + 4 files changed, 130 insertions(+), 93 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index 148cffd2b..737311bb6 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -2,9 +2,8 @@ use crate::{ artifacts::{CompactContract, CompactContractBytecode, Contract, FileToContractsMap}, - contracts::{VersionedContract, VersionedContracts}, + contracts::VersionedContracts, error::Result, - output::AggregatedCompilerOutput, HardhatArtifact, ProjectPathsConfig, SolcError, }; use ethers_core::{abi::Abi, types::Bytes}; @@ -16,9 +15,9 @@ use std::{ path::{Path, PathBuf}, }; -/// Represents a written [`crate::Contract`] artifact +/// Represents an artifact file representing a [`crate::Contract`] #[derive(Debug, Clone, PartialEq)] -pub struct WrittenArtifact { +pub struct ArtifactFile { /// The Artifact that was written pub artifact: T, /// path to the file where the `artifact` was written to @@ -27,32 +26,95 @@ pub struct WrittenArtifact { pub version: Version, } +impl ArtifactFile { + /// Writes the given contract to the `out` path creating all parent directories + pub fn write(&self) -> Result<()> { + if let Some(parent) = self.file.parent() { + fs::create_dir_all(parent).map_err(|err| { + SolcError::msg(format!( + "Failed to create artifact parent folder \"{}\": {}", + parent.display(), + err + )) + })?; + } + fs::write(&self.file, serde_json::to_vec_pretty(&self.artifact)?) + .map_err(|err| SolcError::io(err, &self.file))?; + Ok(()) + } +} + +impl ArtifactFile { + /// Sets the file to `root` adjoined to `self.file`. + pub fn join(&mut self, root: impl AsRef) { + self.file = root.as_ref().join(&self.file); + } + + /// Removes `base` from the artifact's path + pub fn strip_prefix(&mut self, base: impl AsRef) { + if let Ok(prefix) = self.file.strip_prefix(base) { + self.file = prefix.to_path_buf(); + } + } +} + /// local helper type alias -type ArtifactsMap = FileToContractsMap>>; +type ArtifactsMap = FileToContractsMap>>; -/// Represents the written Artifacts +/// Represents a set of Artifacts #[derive(Debug, Clone, PartialEq)] -pub struct WrittenArtifacts(pub ArtifactsMap); +pub struct Artifacts(pub ArtifactsMap); -impl Default for WrittenArtifacts { +impl Default for Artifacts { fn default() -> Self { Self(Default::default()) } } -impl AsRef> for WrittenArtifacts { +impl AsRef> for Artifacts { fn as_ref(&self) -> &ArtifactsMap { &self.0 } } -impl AsMut> for WrittenArtifacts { +impl AsMut> for Artifacts { fn as_mut(&mut self) -> &mut ArtifactsMap { &mut self.0 } } -impl WrittenArtifacts { +impl Artifacts { + /// Writes all artifacts into the given `artifacts_root` folder + pub fn write_all(&self) -> Result<()> { + for artifact in self.artifact_files() { + artifact.write()?; + } + Ok(()) + } +} + +impl Artifacts { + /// Sets the artifact files location to `root` adjoined to `self.file`. + pub fn join_all(&mut self, root: impl AsRef) { + let root = root.as_ref(); + self.artifact_files_mut().for_each(|artifact| artifact.join(root)) + } + + /// Removes `base` from all artifacts + pub fn strip_prefix_all(&mut self, base: impl AsRef) { + let base = base.as_ref(); + self.artifact_files_mut().for_each(|artifact| artifact.strip_prefix(base)) + } + + /// Iterate over all artifact files + pub fn artifact_files(&self) -> impl Iterator> { + self.0.values().flat_map(|c| c.values().flat_map(|artifacts| artifacts.into_iter())) + } + /// Iterate over all artifact files + pub fn artifact_files_mut(&mut self) -> impl Iterator> { + self.0.values_mut().flat_map(|c| c.values_mut().flat_map(|artifacts| artifacts.into_iter())) + } + /// Returns an iterator over _all_ artifacts and `` pub fn into_artifacts>( self, @@ -104,8 +166,8 @@ impl WrittenArtifacts { } } -/// Bundled Artifacts: `file -> (contract name -> (Artifact, Version))` -pub type Artifacts = FileToContractsMap>; +// /// Bundled Artifacts: `file -> (contract name -> (Artifact, Version))` +// pub type Artifacts = FileToContractsMap>; /// A trait representation for a [`crate::Contract`] artifact pub trait Artifact { @@ -167,43 +229,11 @@ pub trait ArtifactOutput { fn on_output( contracts: &VersionedContracts, layout: &ProjectPathsConfig, - ) -> Result> { - fs::create_dir_all(&layout.artifacts) - .map_err(|err| SolcError::msg(format!("Failed to create artifacts dir: {}", err)))?; - let mut artifacts = ArtifactsMap::new(); - - for (file, contracts) in contracts.as_ref().iter() { - let mut entries = BTreeMap::new(); - for (name, versioned_contracts) in contracts { - let mut contracts = Vec::with_capacity(versioned_contracts.len()); - // check if the same contract compiled with multiple solc versions - for contract in versioned_contracts { - let artifact_path = if versioned_contracts.len() > 1 { - Self::output_file_versioned(file, name, &contract.version) - } else { - Self::output_file(file, name) - }; - - let artifact = - Self::contract_to_artifact(file, name, contract.contract.clone()); - - write_contract::( - &layout.artifacts.join(&artifact_path), - &artifact, - )?; - - contracts.push(WrittenArtifact { - artifact, - file: artifact_path, - version: contract.version.clone(), - }); - } - entries.insert(name.to_string(), contracts); - } - artifacts.insert(file.to_string(), entries); - } - - Ok(WrittenArtifacts(artifacts)) + ) -> Result> { + let mut artifacts = Self::output_to_artifacts(contracts); + artifacts.join_all(&layout.artifacts); + artifacts.write_all()?; + Ok(artifacts) } /// Returns the file name for the contract's artifact @@ -267,6 +297,13 @@ pub trait ArtifactOutput { root.as_ref().join(Self::output_file(contract_file, name)).exists() } + /// Read the artifact that's stored at the given path + /// + /// # Errors + /// + /// Returns an error if + /// - The file does not exist + /// - The file's content couldn't be deserialized into the `Artifact` type fn read_cached_artifact(path: impl AsRef) -> Result { let path = path.as_ref(); let file = fs::File::open(path).map_err(|err| SolcError::io(err, path))?; @@ -274,7 +311,9 @@ pub trait ArtifactOutput { Ok(serde_json::from_reader(file)?) } - /// Read the cached artifacts from disk + /// Read the cached artifacts that are located the paths the iterator yields + /// + /// See [`Self::read_cached_artifact()`] fn read_cached_artifacts(files: I) -> Result> where I: IntoIterator, @@ -296,29 +335,37 @@ pub trait ArtifactOutput { fn contract_to_artifact(_file: &str, _name: &str, contract: Contract) -> Self::Artifact; /// Convert the compiler output into a set of artifacts - fn output_to_artifacts(output: AggregatedCompilerOutput) -> Artifacts { - output - .contracts - .into_iter() - .map(|(file, all_contracts)| { - let contracts = all_contracts - .into_iter() - .map(|(name, versioned_contracts)| { - let artifacts = versioned_contracts - .into_iter() - .map(|c| { - let VersionedContract { contract, version } = c; - let artifact = Self::contract_to_artifact(&file, &name, contract); - (artifact, version) - }) - .collect(); - (name, artifacts) - }) - .collect(); + /// + /// **Note:** This does only convert, but _NOT_ write the artifacts to disk, See + /// [`Self::on_output()`] + fn output_to_artifacts(contracts: &VersionedContracts) -> Artifacts { + let mut artifacts = ArtifactsMap::new(); + for (file, contracts) in contracts.as_ref().iter() { + let mut entries = BTreeMap::new(); + for (name, versioned_contracts) in contracts { + let mut contracts = Vec::with_capacity(versioned_contracts.len()); + // check if the same contract compiled with multiple solc versions + for contract in versioned_contracts { + let artifact_path = if versioned_contracts.len() > 1 { + Self::output_file_versioned(file, name, &contract.version) + } else { + Self::output_file(file, name) + }; + let artifact = + Self::contract_to_artifact(file, name, contract.contract.clone()); - (file, contracts) - }) - .collect() + contracts.push(ArtifactFile { + artifact, + file: artifact_path, + version: contract.version.clone(), + }); + } + entries.insert(name.to_string(), contracts); + } + artifacts.insert(file.to_string(), entries); + } + + Artifacts(artifacts) } } @@ -354,7 +401,7 @@ impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback { fn on_output( output: &VersionedContracts, layout: &ProjectPathsConfig, - ) -> Result> { + ) -> Result> { MinimalCombinedArtifacts::on_output(output, layout) } @@ -376,18 +423,3 @@ impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback { MinimalCombinedArtifacts::contract_to_artifact(file, name, contract) } } - -/// Writes the given contract to the `out` path creating all parent directories -fn write_contract(out: &Path, artifact: &T) -> Result<()> { - if let Some(parent) = out.parent() { - fs::create_dir_all(parent).map_err(|err| { - SolcError::msg(format!( - "Failed to create artifact parent folder \"{}\": {}", - parent.display(), - err - )) - })?; - } - fs::write(out, serde_json::to_vec_pretty(artifact)?).map_err(|err| SolcError::io(err, out))?; - Ok(()) -} diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index 972ac8497..a3e331b5a 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -3,7 +3,7 @@ use crate::{ artifacts::{CompactContractRef, Contract, Error, SourceFile, SourceFiles}, contracts::{VersionedContract, VersionedContracts}, - ArtifactOutput, CompilerOutput, WrittenArtifacts, + ArtifactOutput, Artifacts, CompilerOutput, }; use semver::Version; use std::{collections::BTreeMap, fmt, path::PathBuf}; @@ -17,7 +17,7 @@ pub struct ProjectCompileOutput { /// See [`CompilerSources::compile`] pub(crate) compiler_output: AggregatedCompilerOutput, /// all artifact files from `output` that were written - pub(crate) written_artifacts: WrittenArtifacts, + pub(crate) written_artifacts: Artifacts, /// All artifacts that were read from cache pub(crate) cached_artifacts: BTreeMap, /// errors that should be omitted diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index a988dd480..8c30d26a8 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -74,7 +74,7 @@ //! will then look in `/project/dapp-bin/library/iterable_mapping.sol` use crate::{ - artifact_output::WrittenArtifacts, + artifact_output::Artifacts, artifacts::{Settings, VersionedSources}, cache::CacheEntry, error::Result, @@ -585,7 +585,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { /// Returns all the _cached_ artifacts. fn finish( self, - _written_artifacts: &WrittenArtifacts, + _written_artifacts: &Artifacts, ) -> Result> { match self { ArtifactsCache::Ephemeral => Ok(Default::default()), diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 1b13d8504..281c19e4c 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -455,6 +455,7 @@ impl ProjectBuilder { ignored_error_codes, allowed_paths, solc_jobs, + offline, .. } = self; ProjectBuilder { @@ -464,6 +465,7 @@ impl ProjectBuilder { cached, no_artifacts, auto_detect, + offline, artifacts: PhantomData::default(), ignored_error_codes, allowed_paths, @@ -503,6 +505,7 @@ impl ProjectBuilder { ignored_error_codes, mut allowed_paths, solc_jobs, + offline, } = self; let solc = solc.unwrap_or_default(); @@ -526,6 +529,7 @@ impl ProjectBuilder { ignored_error_codes, allowed_lib_paths: allowed_paths.try_into()?, solc_jobs: solc_jobs.unwrap_or_else(::num_cpus::get), + offline, }) } } @@ -539,6 +543,7 @@ impl Default for ProjectBuilder { cached: true, no_artifacts: false, auto_detect: true, + offline: false, artifacts: PhantomData::default(), ignored_error_codes: Vec::new(), allowed_paths: vec![], From f2913a88d8218c42b382cac6343f51897dbb9ab0 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 29 Jan 2022 20:14:58 +0100 Subject: [PATCH 38/82] chore: cleanup cache --- ethers-solc/src/artifact_output.rs | 10 +- ethers-solc/src/cache.rs | 171 +++++++++-------------------- ethers-solc/src/compile/project.rs | 4 +- ethers-solc/src/lib.rs | 2 +- 4 files changed, 62 insertions(+), 125 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index 737311bb6..912427353 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -95,15 +95,17 @@ impl Artifacts { impl Artifacts { /// Sets the artifact files location to `root` adjoined to `self.file`. - pub fn join_all(&mut self, root: impl AsRef) { + pub fn join_all(&mut self, root: impl AsRef) -> &mut Self { let root = root.as_ref(); - self.artifact_files_mut().for_each(|artifact| artifact.join(root)) + self.artifact_files_mut().for_each(|artifact| artifact.join(root)); + self } /// Removes `base` from all artifacts - pub fn strip_prefix_all(&mut self, base: impl AsRef) { + pub fn strip_prefix_all(&mut self, base: impl AsRef) -> &mut Self { let base = base.as_ref(); - self.artifact_files_mut().for_each(|artifact| artifact.strip_prefix(base)) + self.artifact_files_mut().for_each(|artifact| artifact.strip_prefix(base)); + self } /// Iterate over all artifact files diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 01eded501..247b2008c 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -36,39 +36,21 @@ pub struct SolFilesCache { } impl SolFilesCache { + /// Create a new cache instance with the given files pub fn new(files: BTreeMap) -> Self { Self { format: ETHERS_FORMAT_VERSION.to_string(), files } } - /// # Example - /// - /// Autodetect solc version and default settings - /// - /// ```no_run - /// use ethers_solc::artifacts::Source; - /// use ethers_solc::cache::SolFilesCache; - /// let files = Source::read_all_from("./sources").unwrap(); - /// let config = SolFilesCache::builder().insert_files(files, None).unwrap(); - /// ``` - pub fn builder() -> SolFilesCacheBuilder { - SolFilesCacheBuilder::default() - } - - /// Whether this cache's format is the hardhat format identifier - pub fn is_hardhat_format(&self) -> bool { - self.format == HH_FORMAT_VERSION - } - - /// Whether this cache's format is our custom format identifier - pub fn is_ethers_format(&self) -> bool { - self.format == ETHERS_FORMAT_VERSION - } - /// Returns the corresponding `CacheEntry` for the file if it exists pub fn entry(&self, file: impl AsRef) -> Option<&CacheEntry> { self.files.get(file.as_ref()) } + /// Returns the corresponding `CacheEntry` for the file if it exists + pub fn entry_mut(&mut self, file: impl AsRef) -> Option<&mut CacheEntry> { + self.files.get_mut(file.as_ref()) + } + /// Reads the cache json file from the given path #[tracing::instrument(skip_all, name = "sol-files-cache::read")] pub fn read(path: impl AsRef) -> Result { @@ -91,17 +73,29 @@ impl SolFilesCache { Ok(()) } + /// Sets the artifact files location to `base` adjoined to the `CachEntries` artifacts. + pub fn join_all(&mut self, base: impl AsRef) -> &mut Self { + let base = base.as_ref(); + self.files.values_mut().for_each(|entry| entry.join(base)); + self + } + + /// Removes `base` from all artifact file paths + pub fn strip_prefix_all(&mut self, base: impl AsRef) -> &mut Self { + let base = base.as_ref(); + self.files.values_mut().for_each(|entry| entry.strip_prefix(base)); + self + } + + /// Removes all `CacheEntry` which source files are missing pub fn remove_missing_files(&mut self) { tracing::trace!("remove non existing files from cache"); - self.files.retain(|file, _| Path::new(file).exists()) + self.files.retain(|file, _| file.exists()) } /// Checks if all artifact files exist - pub fn all_artifacts_exist(&self, _artifacts_root: &Path) -> bool { - // self.files.iter().all(|(file, entry)| { - // entry.artifacts.iter().all(|name| T::output_exists(file, name, artifacts_root)) - // }) - todo!() + pub fn all_artifacts_exist(&self) -> bool { + self.files.values().all(|entry| entry.all_artifacts_exist()) } /// Reads all cached artifacts from disk using the given ArtifactOutput handler @@ -175,93 +169,6 @@ impl Default for SolFilesCache { } } -#[derive(Debug, Clone, Default)] -pub struct SolFilesCacheBuilder { - format: Option, - solc_config: Option, - root: Option, -} - -impl SolFilesCacheBuilder { - #[must_use] - pub fn format(mut self, format: impl Into) -> Self { - self.format = Some(format.into()); - self - } - - #[must_use] - pub fn solc_config(mut self, solc_config: SolcConfig) -> Self { - self.solc_config = Some(solc_config); - self - } - - #[must_use] - pub fn root(mut self, root: impl Into) -> Self { - self.root = Some(root.into()); - self - } - - /// Creates a new `SolFilesCache` instance - /// - /// If a `cache_file` path was provided it's used as base. - pub fn insert_files( - self, - _sources: Sources, - _cache_file: Option, - ) -> Result { - todo!() - // let format = self.format.unwrap_or_else(|| ETHERS_FORMAT_VERSION.to_string()); - // let solc_config = self.solc_config.unwrap_or_else(|| SolcConfig::builder().build()); - // - // let root = self - // .root - // .map(Ok) - // .unwrap_or_else(std::env::current_dir) - // .map_err(|err| SolcError::io(err, "."))?; - // - // let mut files = BTreeMap::new(); - // for (file, source) in sources { - // let last_modification_date = CacheEntry::read_last_modification_date(&file)?; - // let imports = - // utils::find_import_paths(source.as_ref()).map(|m| - // m.as_str().to_owned()).collect(); - // - // let version_pragmas = utils::find_version_pragma(source.as_ref()) - // .map(|v| vec![v.as_str().to_string()]) - // .unwrap_or_default(); - // - // let entry = CacheEntry { - // last_modification_date, - // content_hash: source.content_hash(), - // source_name: utils::source_name(&file, &root).into(), - // solc_config: solc_config.clone(), - // imports, - // version_pragmas, - // artifacts: vec![], - // }; - // files.insert(file, entry); - // } - // - // let cache = if let Some(dest) = cache_file.as_ref().filter(|dest| dest.exists()) { - // // read the existing cache and extend it by the files that changed - // // (if we just wrote to the cache file, we'd overwrite the existing data) - // let reader = - // std::io::BufReader::new(File::open(dest).map_err(|err| SolcError::io(err, - // dest))?); if let Ok(mut cache) = serde_json::from_reader::<_, - // SolFilesCache>(reader) { cache.files.extend(files); - // cache - // } else { - // tracing::error!("Failed to read existing cache file {}", dest.display()); - // SolFilesCache { format, files } - // } - // } else { - // SolFilesCache { format, files } - // }; - // - // Ok(cache) - } -} - #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct CacheEntry { @@ -310,6 +217,36 @@ impl CacheEntry { .as_millis() as u64; Ok(last_modification_date) } + + /// Iterator that yields all artifact files + pub fn artifacts(&self) -> impl Iterator { + self.artifacts.values().flat_map(|artifacts| artifacts.into_iter()) + } + + pub fn artifacts_mut(&mut self) -> impl Iterator { + self.artifacts.values_mut().flat_map(|artifacts| artifacts.into_iter()) + } + + /// Checks if all artifact files exist + pub fn all_artifacts_exist(&self) -> bool { + self.artifacts().all(|p| p.exists()) + } + + /// Sets the artifact's paths to `base` adjoined to the artifact's `path`. + pub fn join(&mut self, base: impl AsRef) { + let base = base.as_ref(); + self.artifacts_mut().for_each(|p| *p = p.join(base)) + } + + /// Removes `base` from the artifact's path + pub fn strip_prefix(&mut self, base: impl AsRef) { + let base = base.as_ref(); + self.artifacts_mut().for_each(|p| { + if let Ok(prefix) = p.strip_prefix(base) { + *p = prefix.to_path_buf(); + } + }) + } } /// A helper type to handle source name/full disk mappings diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 8c30d26a8..ec5e6ed19 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -532,9 +532,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { // read the cache file if it already exists let cache = if project.cache_path().exists() { let mut cache = SolFilesCache::read(project.cache_path())?; - // TODO this should take the project dir, since we're storing surce unit ids - // starting at the project dir? - cache.remove_missing_files(); + cache.join_all(project.artifacts_path()).remove_missing_files(); cache } else { SolFilesCache::default() diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 281c19e4c..9db23783f 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -66,7 +66,7 @@ pub struct Project { /// Maximum number of `solc` processes to run simultaneously. solc_jobs: usize, /// Offline mode, if set, network access (download solc) is disallowed - offline: bool, + pub offline: bool, } impl Project { From b6aaef5eac8fa8f6d1e9066febcce8fe92dfbccc Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 29 Jan 2022 20:41:20 +0100 Subject: [PATCH 39/82] chore: streamline types --- ethers-solc/src/artifact_output.rs | 9 +++++++-- ethers-solc/src/cache.rs | 21 ++++++++------------- ethers-solc/src/compile/contracts.rs | 4 ++-- ethers-solc/src/compile/output.rs | 28 ++++++---------------------- ethers-solc/src/compile/project.rs | 23 +++++++++-------------- ethers-solc/src/lib.rs | 6 +++--- 6 files changed, 35 insertions(+), 56 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index 912427353..7efb2c335 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -108,13 +108,18 @@ impl Artifacts { self } + /// Returns true if this type contains an artifact with the given path + pub fn has_artifact(&self, artifact_path: &Path) -> bool { + self.artifact_files().any(|artifact| artifact.file == artifact_path) + } + /// Iterate over all artifact files pub fn artifact_files(&self) -> impl Iterator> { - self.0.values().flat_map(|c| c.values().flat_map(|artifacts| artifacts.into_iter())) + self.0.values().flat_map(|c| c.values().flat_map(|artifacts| artifacts.iter())) } /// Iterate over all artifact files pub fn artifact_files_mut(&mut self) -> impl Iterator> { - self.0.values_mut().flat_map(|c| c.values_mut().flat_map(|artifacts| artifacts.into_iter())) + self.0.values_mut().flat_map(|c| c.values_mut().flat_map(|artifacts| artifacts.iter_mut())) } /// Returns an iterator over _all_ artifacts and `` diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 247b2008c..a394643b9 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -3,7 +3,7 @@ use crate::{ artifacts::{Contracts, Sources}, config::SolcConfig, error::{Result, SolcError}, - ArtifactOutput, Source, + Artifacts, Source, }; use semver::Version; use serde::{Deserialize, Serialize}; @@ -14,9 +14,6 @@ use std::{ time::{Duration, UNIX_EPOCH}, }; -/// Hardhat format version -const HH_FORMAT_VERSION: &str = "hh-sol-cache-2"; - /// ethers-rs format version /// /// `ethers-solc` uses a different format version id, but the actual format is consistent with @@ -104,16 +101,14 @@ impl SolFilesCache { /// /// ``` /// use ethers_solc::cache::SolFilesCache; - /// use ethers_solc::Project; + /// use ethers_solc::{MinimalCombinedArtifacts, Project}; /// /// let project = Project::builder().build().unwrap(); - /// let cache = SolFilesCache::read(project.cache_path()).unwrap(); - /// let artifacts = cache.read_artifacts::(project.artifacts_path()).unwrap(); + /// let mut cache = SolFilesCache::read(project.cache_path()).unwrap(); + /// cache.join_all(project.artifacts_path()); + /// let artifacts = cache.read_artifacts::().unwrap(); /// ``` - pub fn read_artifacts( - &self, - _artifacts_root: &Path, - ) -> Result> { + pub fn read_artifacts(&self) -> Result> { todo!() // let mut artifacts = BTreeMap::default(); // for (file, entry) in &self.files { @@ -220,11 +215,11 @@ impl CacheEntry { /// Iterator that yields all artifact files pub fn artifacts(&self) -> impl Iterator { - self.artifacts.values().flat_map(|artifacts| artifacts.into_iter()) + self.artifacts.values().flat_map(|artifacts| artifacts.iter()) } pub fn artifacts_mut(&mut self) -> impl Iterator { - self.artifacts.values_mut().flat_map(|artifacts| artifacts.into_iter()) + self.artifacts.values_mut().flat_map(|artifacts| artifacts.iter_mut()) } /// Checks if all artifact files exist diff --git a/ethers-solc/src/compile/contracts.rs b/ethers-solc/src/compile/contracts.rs index b2703ba66..f93034217 100644 --- a/ethers-solc/src/compile/contracts.rs +++ b/ethers-solc/src/compile/contracts.rs @@ -31,7 +31,7 @@ impl VersionedContracts { /// ``` pub fn find(&self, contract: impl AsRef) -> Option { let contract_name = contract.as_ref(); - self.contracts_iter().find_map(|(name, contract)| { + self.contracts().find_map(|(name, contract)| { (name == contract_name).then(|| CompactContractRef::from(contract)) }) } @@ -76,7 +76,7 @@ impl VersionedContracts { } /// Iterate over all contracts and their names - pub fn contracts_iter(&self) -> impl Iterator { + pub fn contracts(&self) -> impl Iterator { self.0 .values() .flat_map(|c| c.iter().flat_map(|(name, c)| c.iter().map(move |c| (name, &c.contract)))) diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index a3e331b5a..89ce07045 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -6,7 +6,7 @@ use crate::{ ArtifactOutput, Artifacts, CompilerOutput, }; use semver::Version; -use std::{collections::BTreeMap, fmt, path::PathBuf}; +use std::{collections::BTreeMap, fmt}; /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. @@ -19,7 +19,7 @@ pub struct ProjectCompileOutput { /// all artifact files from `output` that were written pub(crate) written_artifacts: Artifacts, /// All artifacts that were read from cache - pub(crate) cached_artifacts: BTreeMap, + pub(crate) cached_artifacts: Artifacts, /// errors that should be omitted pub(crate) ignored_error_codes: Vec, } @@ -40,14 +40,7 @@ impl ProjectCompileOutput { // TODO add ArtifactId (filename, contract name, version?) pub fn into_artifacts(self) -> impl Iterator { let Self { cached_artifacts, written_artifacts, .. } = self; - cached_artifacts - .into_iter() - .filter_map(|(path, art)| { - T::contract_name(&path).map(|name| { - (format!("{}:{}", path.file_name().unwrap().to_string_lossy(), name), art) - }) - }) - .chain(written_artifacts.into_artifacts::()) + cached_artifacts.into_artifacts::().chain(written_artifacts.into_artifacts::()) } /// Get the (merged) solc compiler output @@ -90,14 +83,7 @@ impl ProjectCompileOutput { if let artifact @ Some(_) = self.written_artifacts.remove(contract_name) { return artifact } - let key = self - .cached_artifacts - .iter() - .find_map(|(path, _)| { - T::contract_name(path).filter(|name| name == contract_name).map(|_| path) - })? - .clone(); - self.cached_artifacts.remove(&key) + self.cached_artifacts.remove(contract_name) } } @@ -111,9 +97,7 @@ where if let artifact @ Some(_) = self.written_artifacts.find(contract_name) { return artifact } - self.cached_artifacts.iter().find_map(|(path, art)| { - T::contract_name(path).filter(|name| name == contract_name).map(|_| art) - }) + self.cached_artifacts.find(contract_name) } } @@ -222,7 +206,7 @@ impl AggregatedCompilerOutput { /// Iterate over all contracts and their names pub fn contracts_iter(&self) -> impl Iterator { - self.contracts.contracts_iter() + self.contracts.contracts() } /// Iterate over all contracts and their names diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index ec5e6ed19..33cffac0d 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -354,7 +354,7 @@ struct Cache<'a, T: ArtifactOutput> { /// preexisting cache file cache: SolFilesCache, /// all already existing artifacts - cached_artifacts: BTreeMap, + cached_artifacts: Artifacts, /// relationship between all the files edges: &'a GraphEdges, /// how to configure solc @@ -484,7 +484,10 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { if let Some(artifacts) = entry.artifacts.get(version) { // checks whether an artifact this file depends on was removed - if artifacts.iter().any(|artifact_path| !self.has_artifact(artifact_path)) { + if artifacts + .iter() + .any(|artifact_path| !self.cached_artifacts.has_artifact(artifact_path)) + { tracing::trace!( "missing linked artifacts for cached artifact \"{}\"", file.display() @@ -511,11 +514,6 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { } } } - - /// Returns true if the artifact exists - fn has_artifact(&self, artifact_path: &Path) -> bool { - self.cached_artifacts.contains_key(artifact_path) - } } /// Abstraction over configured caching which can be either non-existent or an already loaded cache @@ -541,11 +539,11 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { // read all artifacts let cached_artifacts = if project.paths.artifacts.exists() { tracing::trace!("reading artifacts from cache.."); - let artifacts = cache.read_artifacts::(&project.paths.artifacts)?; - tracing::trace!("read {} artifacts from cache", artifacts.len()); + let artifacts = cache.read_artifacts::()?; + tracing::trace!("read {} artifacts from cache", artifacts.artifact_files().count()); artifacts } else { - BTreeMap::default() + Default::default() }; let cache = Cache { @@ -581,10 +579,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { /// written to disk `written_artifacts`. /// /// Returns all the _cached_ artifacts. - fn finish( - self, - _written_artifacts: &Artifacts, - ) -> Result> { + fn finish(self, _written_artifacts: &Artifacts) -> Result> { match self { ArtifactsCache::Ephemeral => Ok(Default::default()), ArtifactsCache::Cached(cache) => { diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 9db23783f..dab33f5e6 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -577,7 +577,7 @@ mod tests { assert!(!compiled.has_compiler_errors()); let contracts = compiled.output().contracts; // Contracts A to F - assert_eq!(contracts.keys().count(), 5); + assert_eq!(contracts.contracts().count(), 5); } #[test] @@ -604,7 +604,7 @@ mod tests { let compiled = project.compile().unwrap(); assert!(!compiled.has_compiler_errors()); let contracts = compiled.output().contracts; - assert_eq!(contracts.keys().count(), 3); + assert_eq!(contracts.contracts().count(), 3); } #[test] @@ -623,6 +623,6 @@ mod tests { let compiled = project.compile().unwrap(); assert!(!compiled.has_compiler_errors()); let contracts = compiled.output().contracts; - assert_eq!(contracts.keys().count(), 2); + assert_eq!(contracts.contracts().count(), 2); } } From f18f97de53786e06a57e66f8a83ff3f7f8908a17 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 29 Jan 2022 21:32:37 +0100 Subject: [PATCH 40/82] fix: better artifacts mapping --- ethers-solc/src/artifact_output.rs | 24 ++++++++++++++++++-- ethers-solc/src/cache.rs | 36 +++++++++++++++++++++++++----- 2 files changed, 52 insertions(+), 8 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index 7efb2c335..30d83a74e 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -8,13 +8,21 @@ use crate::{ }; use ethers_core::{abi::Abi, types::Bytes}; use semver::Version; -use serde::{de::DeserializeOwned, Serialize}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ collections::btree_map::BTreeMap, fs, io, path::{Path, PathBuf}, }; +#[derive(Debug, Clone, PartialEq)] +pub struct ArtifactInfo { + /// path to the file where the `artifact` was written to + pub file: PathBuf, + /// `solc` version that produced this artifact + pub version: Version, +} + /// Represents an artifact file representing a [`crate::Contract`] #[derive(Debug, Clone, PartialEq)] pub struct ArtifactFile { @@ -59,7 +67,7 @@ impl ArtifactFile { } /// local helper type alias -type ArtifactsMap = FileToContractsMap>>; +pub(crate) type ArtifactsMap = FileToContractsMap>>; /// Represents a set of Artifacts #[derive(Debug, Clone, PartialEq)] @@ -108,6 +116,18 @@ impl Artifacts { self } + /// Returns all `ArtifactFile`s for the contract with the matching name + fn get_contract_artifact_files(&self, contract_name: &str) -> Option<&Vec>> { + self.0.values().find_map(|all| all.get(contract_name)) + } + + /// Returns true if this type contains an artifact with the given path for the given contract + pub fn has_contract_artifact(&self, contract_name: &str, artifact_path: &Path) -> bool { + self.get_contract_artifact_files(contract_name) + .map(|artifacts| artifacts.into_iter().any(|artifact| artifact.file == artifact_path)) + .unwrap_or_default() + } + /// Returns true if this type contains an artifact with the given path pub fn has_artifact(&self, artifact_path: &Path) -> bool { self.artifact_files().any(|artifact| artifact.file == artifact_path) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index a394643b9..7ffd1dcf3 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -3,7 +3,7 @@ use crate::{ artifacts::{Contracts, Sources}, config::SolcConfig, error::{Result, SolcError}, - Artifacts, Source, + ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Source, }; use semver::Version; use serde::{Deserialize, Serialize}; @@ -49,6 +49,16 @@ impl SolFilesCache { } /// Reads the cache json file from the given path + /// # Example + /// + /// ``` + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let mut cache = SolFilesCache::read(project.cache_path()).unwrap(); + /// cache.join_all(project.artifacts_path()); + /// ``` #[tracing::instrument(skip_all, name = "sol-files-cache::read")] pub fn read(path: impl AsRef) -> Result { let path = path.as_ref(); @@ -109,7 +119,11 @@ impl SolFilesCache { /// let artifacts = cache.read_artifacts::().unwrap(); /// ``` pub fn read_artifacts(&self) -> Result> { - todo!() + let mut artifacts = ArtifactsMap::new(); + for (file, entry) in self.files.iter() { + // let mut entries = BTreeMap::new(); + } + // let mut artifacts = BTreeMap::default(); // for (file, entry) in &self.files { // for artifact in &entry.artifacts { @@ -118,7 +132,7 @@ impl SolFilesCache { // artifacts.insert(artifact_file, artifact); // } // } - // Ok(artifacts) + Ok(Artifacts(artifacts)) } /// Retains only the `CacheEntry` specified by the file + version combination. @@ -187,7 +201,7 @@ pub struct CacheEntry { /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` /// file `C` would be compiled twice, with `0.8.10` and `0.8.11`, producing two different /// artifacts - pub artifacts: HashMap>, + pub artifacts: BTreeMap>, } impl CacheEntry { @@ -213,13 +227,23 @@ impl CacheEntry { Ok(last_modification_date) } + fn read_artifact_files(&self) -> Result>> { + for (version, files) in self.artifacts.iter() { + for file in files { + // get the contract name based on the number of versions + } + } + + todo!() + } + /// Iterator that yields all artifact files pub fn artifacts(&self) -> impl Iterator { - self.artifacts.values().flat_map(|artifacts| artifacts.iter()) + self.artifacts.values().flat_map(|artifacts| artifacts.into_iter()) } pub fn artifacts_mut(&mut self) -> impl Iterator { - self.artifacts.values_mut().flat_map(|artifacts| artifacts.iter_mut()) + self.artifacts.values_mut().flat_map(|artifacts| artifacts.into_iter()) } /// Checks if all artifact files exist From fdaeb5700ee9e74823ad83b6b6fb81d2c47f6222 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sat, 29 Jan 2022 23:13:22 +0100 Subject: [PATCH 41/82] chore: some cleanup --- ethers-solc/src/artifact_output.rs | 10 +++++----- ethers-solc/src/cache.rs | 12 ++++++------ 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index 30d83a74e..bda8f3c0c 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -8,7 +8,7 @@ use crate::{ }; use ethers_core::{abi::Abi, types::Bytes}; use semver::Version; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde::{de::DeserializeOwned, Serialize}; use std::{ collections::btree_map::BTreeMap, fs, io, @@ -16,11 +16,11 @@ use std::{ }; #[derive(Debug, Clone, PartialEq)] -pub struct ArtifactInfo { - /// path to the file where the `artifact` was written to - pub file: PathBuf, +pub struct ArtifactsInfo { /// `solc` version that produced this artifact pub version: Version, + /// path to the file where the `artifact` was written to + pub file: PathBuf, } /// Represents an artifact file representing a [`crate::Contract`] @@ -124,7 +124,7 @@ impl Artifacts { /// Returns true if this type contains an artifact with the given path for the given contract pub fn has_contract_artifact(&self, contract_name: &str, artifact_path: &Path) -> bool { self.get_contract_artifact_files(contract_name) - .map(|artifacts| artifacts.into_iter().any(|artifact| artifact.file == artifact_path)) + .map(|artifacts| artifacts.iter().any(|artifact| artifact.file == artifact_path)) .unwrap_or_default() } diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 7ffd1dcf3..0d8e8a0d2 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -119,8 +119,8 @@ impl SolFilesCache { /// let artifacts = cache.read_artifacts::().unwrap(); /// ``` pub fn read_artifacts(&self) -> Result> { - let mut artifacts = ArtifactsMap::new(); - for (file, entry) in self.files.iter() { + let artifacts = ArtifactsMap::new(); + for (_file, _entry) in self.files.iter() { // let mut entries = BTreeMap::new(); } @@ -228,8 +228,8 @@ impl CacheEntry { } fn read_artifact_files(&self) -> Result>> { - for (version, files) in self.artifacts.iter() { - for file in files { + for (_version, files) in self.artifacts.iter() { + for _file in files { // get the contract name based on the number of versions } } @@ -239,11 +239,11 @@ impl CacheEntry { /// Iterator that yields all artifact files pub fn artifacts(&self) -> impl Iterator { - self.artifacts.values().flat_map(|artifacts| artifacts.into_iter()) + self.artifacts.values().flat_map(|artifacts| artifacts.iter()) } pub fn artifacts_mut(&mut self) -> impl Iterator { - self.artifacts.values_mut().flat_map(|artifacts| artifacts.into_iter()) + self.artifacts.values_mut().flat_map(|artifacts| artifacts.iter_mut()) } /// Checks if all artifact files exist From 87dc19e20896943176382aa529eb72ae527383c0 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 15:41:38 +0100 Subject: [PATCH 42/82] chore: change artifact --- ethers-solc/src/artifact_output.rs | 10 +--------- ethers-solc/src/cache.rs | 32 ++++++++++++++++++++++++++---- ethers-solc/src/compile/project.rs | 22 ++++++++------------ 3 files changed, 37 insertions(+), 27 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index bda8f3c0c..e0f7075c2 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -15,14 +15,6 @@ use std::{ path::{Path, PathBuf}, }; -#[derive(Debug, Clone, PartialEq)] -pub struct ArtifactsInfo { - /// `solc` version that produced this artifact - pub version: Version, - /// path to the file where the `artifact` was written to - pub file: PathBuf, -} - /// Represents an artifact file representing a [`crate::Contract`] #[derive(Debug, Clone, PartialEq)] pub struct ArtifactFile { @@ -66,7 +58,7 @@ impl ArtifactFile { } } -/// local helper type alias +/// local helper type alias `file name -> (contract name -> Vec<..>)` pub(crate) type ArtifactsMap = FileToContractsMap>>; /// Represents a set of Artifacts diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 0d8e8a0d2..ec2d81844 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -178,6 +178,11 @@ impl Default for SolFilesCache { } } +/// A `CacheEntry` in the cache file represents a solidity file +/// +/// A solidity file can contain several contracts, for every contract a separate `Artifact` is +/// emitted. so the `CacheEntry` tracks the artifacts by name. A file can be compiled with multiple +/// `solc` versions generating version specific artifacts. #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct CacheEntry { @@ -200,8 +205,11 @@ pub struct CacheEntry { /// In theory a file can be compiled by different solc versions: /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` /// file `C` would be compiled twice, with `0.8.10` and `0.8.11`, producing two different - /// artifacts - pub artifacts: BTreeMap>, + /// artifacts. + /// + /// This map tracks the artifacts by `name -> (Version -> PathBuf)`. + /// This mimics the default artifacts directory structure + pub artifacts: BTreeMap>, } impl CacheEntry { @@ -237,13 +245,29 @@ impl CacheEntry { todo!() } + /// Returns `true` if the artifacts set contains the given version + pub fn contains_version(&self, version: &Version) -> bool { + self.artifacts_versions().any(|(v, _)| v == version) + } + + /// Iterator that yields all artifact files and their version + pub fn artifacts_versions(&self) -> impl Iterator { + self.artifacts.values().flat_map(|artifacts| artifacts.into_iter()) + } + + /// Iterator that yields all artifact files and their version + pub fn artifacts_for_version(&self, version: &Version) -> impl Iterator { + self.artifacts_versions().filter_map(|(ver, file)| (ver == version).then(|| file)) + } + /// Iterator that yields all artifact files pub fn artifacts(&self) -> impl Iterator { - self.artifacts.values().flat_map(|artifacts| artifacts.iter()) + self.artifacts.values().flat_map(|artifacts| artifacts.values()) } + /// Mutable iterator over all artifact files pub fn artifacts_mut(&mut self) -> impl Iterator { - self.artifacts.values_mut().flat_map(|artifacts| artifacts.iter_mut()) + self.artifacts.values_mut().flat_map(|artifacts| artifacts.values_mut()) } /// Checks if all artifact files exist diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 33cffac0d..eeb127000 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -482,23 +482,17 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { return true } - if let Some(artifacts) = entry.artifacts.get(version) { - // checks whether an artifact this file depends on was removed - if artifacts - .iter() - .any(|artifact_path| !self.cached_artifacts.has_artifact(artifact_path)) - { - tracing::trace!( - "missing linked artifacts for cached artifact \"{}\"", - file.display() - ); - return true - } - } else { - // artifact does not exist + if !entry.contains_version(version) { + tracing::trace!("missing linked artifacts for version \"{}\"", version); return true } + if entry.artifacts_for_version(version).any(|artifact_path| { + // artifact does not exist + !self.cached_artifacts.has_artifact(artifact_path) + }) { + return true + } // all things match, can be reused return false } From b320761794598a6346a8cbcbaf0f81f2064dab51 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 15:42:25 +0100 Subject: [PATCH 43/82] chore: add configure solc fn --- ethers-solc/src/lib.rs | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index dab33f5e6..6810f142e 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -120,6 +120,14 @@ impl Project { &self.paths.root } + /// Applies the configured settings to the given `Solc` + fn configure_solc(&self, mut solc: Solc) -> Solc { + if !self.allowed_lib_paths.0.is_empty() { + solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string()); + } + solc + } + /// Sets the maximum number of parallel `solc` processes to run simultaneously. /// /// # Panics @@ -189,10 +197,7 @@ impl Project { return self.svm_compile(sources) } - let mut solc = self.solc.clone(); - if !self.allowed_lib_paths.0.is_empty() { - solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string()); - } + let solc = self.configure_solc(self.solc.clone()); self.compile_with_version(&solc, sources) } @@ -253,7 +258,12 @@ impl Project { solc: &Solc, sources: Sources, ) -> Result> { - project::ProjectCompiler::with_sources_and_solc(self, sources, solc.clone())?.compile() + project::ProjectCompiler::with_sources_and_solc( + self, + sources, + self.configure_solc(solc.clone()), + )? + .compile() } /// Removes the project's artifacts and cache file From 3fb6ea19f00a91b01b059f1430946f15c1317429 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 16:27:49 +0100 Subject: [PATCH 44/82] feat: add artifact reading --- ethers-solc/src/cache.rs | 53 +++++++++++++++--------------- ethers-solc/src/compile/project.rs | 3 +- ethers-solc/src/utils.rs | 10 ++++++ 3 files changed, 39 insertions(+), 27 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index ec2d81844..e75231130 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -3,10 +3,10 @@ use crate::{ artifacts::{Contracts, Sources}, config::SolcConfig, error::{Result, SolcError}, - ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Source, + utils, ArtifactFile, Artifacts, ArtifactsMap, Source, }; use semver::Version; -use serde::{Deserialize, Serialize}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ collections::{btree_map::BTreeMap, HashMap}, fs::{self}, @@ -63,9 +63,7 @@ impl SolFilesCache { pub fn read(path: impl AsRef) -> Result { let path = path.as_ref(); tracing::trace!("reading solfiles cache at {}", path.display()); - let file = fs::File::open(path).map_err(|err| SolcError::io(err, path))?; - let file = std::io::BufReader::new(file); - let cache: Self = serde_json::from_reader(file)?; + let cache: SolFilesCache = utils::read_json_file(path)?; tracing::trace!("read cache \"{}\" with {} entries", cache.format, cache.files.len()); Ok(cache) } @@ -118,20 +116,12 @@ impl SolFilesCache { /// cache.join_all(project.artifacts_path()); /// let artifacts = cache.read_artifacts::().unwrap(); /// ``` - pub fn read_artifacts(&self) -> Result> { - let artifacts = ArtifactsMap::new(); - for (_file, _entry) in self.files.iter() { - // let mut entries = BTreeMap::new(); + pub fn read_artifacts(&self) -> Result> { + let mut artifacts = ArtifactsMap::new(); + for (file, entry) in self.files.iter() { + let file_name = format!("{}", file.display()); + artifacts.insert(file_name, entry.read_artifact_files()?); } - - // let mut artifacts = BTreeMap::default(); - // for (file, entry) in &self.files { - // for artifact in &entry.artifacts { - // let artifact_file = artifacts_root.join(T::output_file(file, artifact)); - // let artifact = T::read_cached_artifact(&artifact_file)?; - // artifacts.insert(artifact_file, artifact); - // } - // } Ok(Artifacts(artifacts)) } @@ -235,14 +225,22 @@ impl CacheEntry { Ok(last_modification_date) } - fn read_artifact_files(&self) -> Result>> { - for (_version, files) in self.artifacts.iter() { - for _file in files { - // get the contract name based on the number of versions + /// Reads all artifact files associated with the `CacheEntry` + /// + /// **Note:** all artifact file paths should be absolute, see [`Self::join`] + fn read_artifact_files( + &self, + ) -> Result>>> { + let mut artifacts = BTreeMap::new(); + for (artifact_name, versioned_files) in self.artifacts.iter() { + let mut files = Vec::with_capacity(versioned_files.len()); + for (version, file) in versioned_files { + let artifact: Artifact = utils::read_json_file(file)?; + files.push(ArtifactFile { artifact, file: file.clone(), version: version.clone() }); } + artifacts.insert(artifact_name.clone(), files); } - - todo!() + Ok(artifacts) } /// Returns `true` if the artifacts set contains the given version @@ -256,8 +254,11 @@ impl CacheEntry { } /// Iterator that yields all artifact files and their version - pub fn artifacts_for_version(&self, version: &Version) -> impl Iterator { - self.artifacts_versions().filter_map(|(ver, file)| (ver == version).then(|| file)) + pub fn artifacts_for_version<'a>( + &'a self, + version: &'a Version, + ) -> impl Iterator + 'a { + self.artifacts_versions().filter_map(move |(ver, file)| (ver == version).then(|| file)) } /// Iterator that yields all artifact files diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index eeb127000..f680dee3f 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -533,7 +533,8 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { // read all artifacts let cached_artifacts = if project.paths.artifacts.exists() { tracing::trace!("reading artifacts from cache.."); - let artifacts = cache.read_artifacts::()?; + // if we failed to read the whole set of artifacts we use an empty set + let artifacts = cache.read_artifacts::().unwrap_or_default(); tracing::trace!("read {} artifacts from cache", artifacts.artifact_files().count()); artifacts } else { diff --git a/ethers-solc/src/utils.rs b/ethers-solc/src/utils.rs index f6672a5f3..6a0d34d69 100644 --- a/ethers-solc/src/utils.rs +++ b/ethers-solc/src/utils.rs @@ -6,6 +6,7 @@ use crate::{error::SolcError, SolcIoError}; use once_cell::sync::Lazy; use regex::{Match, Regex}; use semver::Version; +use serde::de::DeserializeOwned; use tiny_keccak::{Hasher, Keccak}; use walkdir::WalkDir; @@ -252,6 +253,15 @@ pub(crate) fn tempdir(name: &str) -> Result { tempfile::Builder::new().prefix(name).tempdir().map_err(|err| SolcIoError::new(err, name)) } +/// Reads the json file and deserialize it into the provided type +pub(crate) fn read_json_file(path: impl AsRef) -> Result { + let path = path.as_ref(); + let file = std::fs::File::open(path).map_err(|err| SolcError::io(err, path))?; + let file = std::io::BufReader::new(file); + let val: T = serde_json::from_reader(file)?; + Ok(val) +} + #[cfg(test)] mod tests { use super::*; From 1431584f46ce2acb36c81656d1f9f5fcf2a70190 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 16:48:57 +0100 Subject: [PATCH 45/82] feat: implement retain and extend --- ethers-solc/src/cache.rs | 61 ++++++++++++++++++++++++++++++++++------ 1 file changed, 52 insertions(+), 9 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index e75231130..99bd97940 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -3,12 +3,15 @@ use crate::{ artifacts::{Contracts, Sources}, config::SolcConfig, error::{Result, SolcError}, - utils, ArtifactFile, Artifacts, ArtifactsMap, Source, + utils, ArtifactFile, Artifacts, ArtifactsMap, }; use semver::Version; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ - collections::{btree_map::BTreeMap, HashMap}, + collections::{ + btree_map::{BTreeMap, Entry}, + HashMap, HashSet, + }, fs::{self}, path::{Path, PathBuf}, time::{Duration, UNIX_EPOCH}, @@ -129,19 +132,37 @@ impl SolFilesCache { /// /// In other words, only keep those cache entries with the paths (keys) that the iterator yields /// and only keep the versions in the cache entry that the version iterator yields. - pub fn retain<'a, I, V>(&mut self, _files: I) + pub fn retain<'a, I, V>(&mut self, files: I) where I: IntoIterator, V: IntoIterator, { + let mut files: HashMap<_, _> = files.into_iter().map(|(p, v)| (p, v)).collect(); + + self.files.retain(|file, entry| { + if let Some(versions) = files.remove(file.as_path()) { + entry.retain_versions(versions); + } + !entry.artifacts.is_empty() + }); } /// Inserts the provided cache entries, if there is an existing `CacheEntry` it will be updated /// but versions will be merged. - pub fn extend(&mut self, _entries: I) + pub fn extend(&mut self, entries: I) where I: IntoIterator, { + for (file, entry) in entries.into_iter() { + match self.files.entry(file) { + Entry::Vacant(e) => { + e.insert(entry); + } + Entry::Occupied(mut other) => { + other.get_mut().merge_artifacts(entry); + } + } + } } } @@ -203,11 +224,7 @@ pub struct CacheEntry { } impl CacheEntry { - pub fn new(_file: impl AsRef, _source: &Source) -> Result { - todo!() - } - - /// Returns the time + /// Returns the last modified timestamp `Duration` pub fn last_modified(&self) -> Duration { Duration::from_millis(self.last_modification_date) } @@ -243,6 +260,32 @@ impl CacheEntry { Ok(artifacts) } + /// Merges another `CacheEntries` artifacts into the existing set + fn merge_artifacts(&mut self, other: CacheEntry) { + for (name, artifacts) in other.artifacts { + match self.artifacts.entry(name) { + Entry::Vacant(entry) => { + entry.insert(artifacts); + } + Entry::Occupied(mut entry) => { + entry.get_mut().extend(artifacts.into_iter()); + } + } + } + } + + /// Retains only those artifacts that match the provided version. + pub fn retain_versions<'a, I>(&mut self, versions: I) + where + I: IntoIterator, + { + let versions = versions.into_iter().collect::>(); + self.artifacts.retain(|_, artifacts| { + artifacts.retain(|version, _| versions.contains(version)); + !artifacts.is_empty() + }) + } + /// Returns `true` if the artifacts set contains the given version pub fn contains_version(&self, version: &Version) -> bool { self.artifacts_versions().any(|(v, _)| v == version) From 2b554b54d5cf458b466a603c8e5e8cdec8628ced Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 17:15:03 +0100 Subject: [PATCH 46/82] feat: add cache extending --- ethers-solc/src/artifact_output.rs | 4 ++++ ethers-solc/src/cache.rs | 15 ++++++++++++- ethers-solc/src/compile/project.rs | 35 ++++++++++++++++++++---------- 3 files changed, 41 insertions(+), 13 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index e0f7075c2..8d22707cc 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -94,6 +94,10 @@ impl Artifacts { } impl Artifacts { + pub fn into_inner(self) -> ArtifactsMap { + self.0 + } + /// Sets the artifact files location to `root` adjoined to `self.file`. pub fn join_all(&mut self, root: impl AsRef) -> &mut Self { let root = root.as_ref(); diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 99bd97940..3265ffa6f 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -149,7 +149,7 @@ impl SolFilesCache { /// Inserts the provided cache entries, if there is an existing `CacheEntry` it will be updated /// but versions will be merged. - pub fn extend(&mut self, entries: I) + pub fn extend(&mut self, entries: I) where I: IntoIterator, { @@ -260,6 +260,19 @@ impl CacheEntry { Ok(artifacts) } + pub(crate) fn insert_artifacts<'a, I, T: 'a>(&mut self, artifacts: I) + where + I: IntoIterator>)>, + { + for (name, artifacts) in artifacts.into_iter().filter(|(_, a)| !a.is_empty()) { + let entries: BTreeMap<_, _> = artifacts + .into_iter() + .map(|artifact| (artifact.version.clone(), artifact.file.clone())) + .collect(); + self.artifacts.insert(name.clone(), entries); + } + } + /// Merges another `CacheEntries` artifacts into the existing set fn merge_artifacts(&mut self, other: CacheEntry) { for (name, artifacts) in other.artifacts { diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index f680dee3f..d4b91a9a8 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -574,12 +574,17 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { /// written to disk `written_artifacts`. /// /// Returns all the _cached_ artifacts. - fn finish(self, _written_artifacts: &Artifacts) -> Result> { + fn finish(self, written_artifacts: &Artifacts) -> Result> { match self { ArtifactsCache::Ephemeral => Ok(Default::default()), ArtifactsCache::Cached(cache) => { let Cache { - mut cache, cached_artifacts, dirty_entries, filtered, edges: _, .. + mut cache, + cached_artifacts, + mut dirty_entries, + filtered, + edges: _, + .. } = cache; // keep only those files that were previously filtered (not dirty, reused) @@ -587,17 +592,23 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { // add the artifacts to the cache entries, this way we can keep a mapping from // solidity file to its artifacts + // this step is necessary because the concrete artifacts are only known after solc + // was invoked and received as output, before that we merely know the the file and + // the versions so we add the artifacts on a file by file basis + for (file, artifacts) in written_artifacts.as_ref() { + let file_path = Path::new(&file); + if let Some((entry, versions)) = dirty_entries.get_mut(file_path) { + entry.insert_artifacts(artifacts.into_iter().map(|(name, artifacts)| { + let artifacts = artifacts + .into_iter() + .filter(|artifact| versions.contains(&artifact.version)) + .collect::>(); + (name, artifacts) + })); + } + } - dirty_entries.into_iter().map(|(_file, (_entry, _versions))| { - - // TODO need reshuffling of source units to actual paths - // if let Some(contracts) = written_artifacts.get(&file) { - // - // - // } - }); - - // TODO extend the cache with the new artifacts + cache.extend(dirty_entries.into_iter().map(|(file, (entry, _))| (file, entry))); Ok(cached_artifacts) } From 1651d97ed6e140ed3c091f7e3afed6cdc9660e73 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 17:16:15 +0100 Subject: [PATCH 47/82] feat: write to disk --- ethers-solc/src/compile/project.rs | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index d4b91a9a8..d33438cda 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -579,12 +579,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { ArtifactsCache::Ephemeral => Ok(Default::default()), ArtifactsCache::Cached(cache) => { let Cache { - mut cache, - cached_artifacts, - mut dirty_entries, - filtered, - edges: _, - .. + mut cache, cached_artifacts, mut dirty_entries, filtered, paths, .. } = cache; // keep only those files that were previously filtered (not dirty, reused) @@ -608,8 +603,12 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { } } + // add the new cache entries to the cache file cache.extend(dirty_entries.into_iter().map(|(file, (entry, _))| (file, entry))); + // write to disk + cache.write(&paths.cache)?; + Ok(cached_artifacts) } } From 1757db9f6a777d30cff32f6e2d0f3b19e5bfc028 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 17:19:23 +0100 Subject: [PATCH 48/82] chore: make clippy happy --- ethers-solc/src/cache.rs | 4 ++-- ethers-solc/src/compile/project.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 3265ffa6f..d469d5e67 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -179,7 +179,7 @@ impl SolFilesCache { pub async fn async_write(&self, path: impl AsRef) -> Result<()> { let path = path.as_ref(); let content = serde_json::to_vec_pretty(self)?; - Ok(tokio::fs::write(path, content).await.map_err(|err| SolcError::io(err, path))?) + tokio::fs::write(path, content).await.map_err(|err| SolcError::io(err, path)) } } @@ -306,7 +306,7 @@ impl CacheEntry { /// Iterator that yields all artifact files and their version pub fn artifacts_versions(&self) -> impl Iterator { - self.artifacts.values().flat_map(|artifacts| artifacts.into_iter()) + self.artifacts.values().flat_map(|artifacts| artifacts.iter()) } /// Iterator that yields all artifact files and their version diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index d33438cda..eaa15a8cf 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -593,9 +593,9 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { for (file, artifacts) in written_artifacts.as_ref() { let file_path = Path::new(&file); if let Some((entry, versions)) = dirty_entries.get_mut(file_path) { - entry.insert_artifacts(artifacts.into_iter().map(|(name, artifacts)| { + entry.insert_artifacts(artifacts.iter().map(|(name, artifacts)| { let artifacts = artifacts - .into_iter() + .iter() .filter(|artifact| versions.contains(&artifact.version)) .collect::>(); (name, artifacts) From 17d143173984eb824b3256cd57aa8648fc0d8022 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 22:25:55 +0100 Subject: [PATCH 49/82] feat: implement path mapping --- ethers-solc/src/cache.rs | 64 ++++++++++++++---------------- ethers-solc/src/compile/project.rs | 61 ++++++++++++++++++++-------- ethers-solc/src/resolver.rs | 31 +++++++++++++++ 3 files changed, 105 insertions(+), 51 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index d469d5e67..492a0b949 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -1,7 +1,8 @@ //! Support for compiling contracts use crate::{ - artifacts::{Contracts, Sources}, + artifacts::{Sources}, config::SolcConfig, + contracts::VersionedContracts, error::{Result, SolcError}, utils, ArtifactFile, Artifacts, ArtifactsMap, }; @@ -360,50 +361,43 @@ impl CacheEntry { pub struct SourceUnitNameMap { /// all libraries to the source set while keeping track of their actual disk path /// (`contracts/contract.sol` -> `/Users/.../contracts.sol`) - pub source_unit_name_to_path: HashMap, - /// inverse of `source_name_to_path` : (`/Users/.../contracts.sol` -> `contracts/contract.sol`) - pub path_to_source_unit_name: HashMap, + pub source_unit_name_to_absolute_path: HashMap, + // /// inverse of `source_name_to_path` : (`/Users/.../contracts.sol` -> + // `contracts/contract.sol`) pub aboslute_path_to_source_unit_name: HashMap, } impl SourceUnitNameMap { - fn apply_mappings(sources: Sources, mappings: &HashMap) -> Sources { + /// Sets the source unit names of the sources using the provided mapper + pub(crate) fn apply_source_names_with(&mut self, sources: Sources, mapper: M) -> Sources + where + M: for<'a> Fn(&Path) -> PathBuf, + { sources .into_iter() - .map(|(import, source)| { - if let Some(path) = mappings.get(&import).cloned() { - (path, source) - } else { - (import, source) - } + .map(|(file, source)| { + let source_unit_name = mapper(&file); + self.source_unit_name_to_absolute_path.insert(source_unit_name.clone(), file); + (source_unit_name, source) }) .collect() } - /// Returns all contract names of the files mapped with the disk path - pub fn get_artifacts(&self, contracts: &Contracts) -> Vec<(PathBuf, Vec)> { - contracts - .iter() - .map(|(path, contracts)| { - let path = PathBuf::from(path); - let file = self.source_unit_name_to_path.get(&path).cloned().unwrap_or(path); - (file, contracts.keys().cloned().collect::>()) + /// Reverses all previous source unit mappings + pub(crate) fn reverse(&self, contracts: VersionedContracts) -> VersionedContracts { + let contracts = contracts + .into_iter() + .map(|(source_unit_name, contracts)| { + if let Some(file) = + self.source_unit_name_to_absolute_path.get(Path::new(&source_unit_name)).cloned() + { + (format!("{}", file.display()), contracts) + } else { + (source_unit_name, contracts) + } }) - .collect() - } - - pub fn extend(&mut self, other: SourceUnitNameMap) { - self.source_unit_name_to_path.extend(other.source_unit_name_to_path); - self.path_to_source_unit_name.extend(other.path_to_source_unit_name); - } - - /// Returns a new map with the source names as keys - pub fn set_source_names(&self, sources: Sources) -> Sources { - Self::apply_mappings(sources, &self.path_to_source_unit_name) - } - - /// Returns a new map with the disk paths as keys - pub fn set_disk_paths(&self, sources: Sources) -> Sources { - Self::apply_mappings(sources, &self.source_unit_name_to_path) + .collect(); + VersionedContracts(contracts) } } diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index eaa15a8cf..dc5d8943c 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -69,9 +69,10 @@ //! import "github.com/ethereum/dapp-bin/library/math.sol"; // source unit name: dapp-bin/library/math.sol //! ``` //! -//! The compiler will look for the file in the VFS under `dapp-bin/library/math.sol`. If the file is -//! not available there, the source unit name will be passed to the Host Filesystem Loader, which -//! will then look in `/project/dapp-bin/library/iterable_mapping.sol` +//! If compiled with `solc github.com/ethereum/dapp-bin/=dapp-bin/` the compiler will look for the +//! file in the VFS under `dapp-bin/library/math.sol`. If the file is not available there, the +//! source unit name will be passed to the Host Filesystem Loader, which will then look in +//! `/project/dapp-bin/library/iterable_mapping.sol` use crate::{ artifact_output::Artifacts, @@ -177,13 +178,16 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { let mut cache = ArtifactsCache::new(project, &edges)?; // retain and compile only dirty sources - sources = sources - .filtered(&mut cache) - .set_source_unit_names(&project.paths, &mut source_unit_map); + sources = sources.filtered(&mut cache).set_source_unit_names( + &project.paths, + &edges, + &mut source_unit_map, + ); - let output = sources.compile(&project.solc_config.settings, &project.paths)?; + let mut output = sources.compile(&project.solc_config.settings, &project.paths)?; - // TODO reapply the mappings to the contracts + // reverse the applied source unit names + output.contracts = source_unit_map.reverse(output.contracts); // write all artifacts let written_artifacts = if !project.no_artifacts { @@ -242,22 +246,46 @@ impl CompilerSources { } /// Sets the correct source unit names for all sources + /// + /// This helps the compiler to find the right source in the `CompilerInput`. + /// the source unit name depends on how it is imported, + /// see [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution) + /// + /// For contracts imported from the project's src directory the source unit name is the relative + /// path, starting at the project's root path. + /// + /// The source name for a resolved library import is the applied remapping, also starting + /// relatively at the project's root path. fn set_source_unit_names( self, paths: &ProjectPathsConfig, + edges: &GraphEdges, names: &mut SourceUnitNameMap, ) -> Self { fn set( - _sources: VersionedSources, - _paths: &ProjectPathsConfig, - _cache: &mut SourceUnitNameMap, + sources: VersionedSources, + paths: &ProjectPathsConfig, + edges: &GraphEdges, + names: &mut SourceUnitNameMap, ) -> VersionedSources { - todo!() + sources + .into_iter() + .map(|(solc, (version, sources))| { + let sources = names.apply_source_names_with(sources, |file| { + edges.get_source_unit_name(file, &paths.root) + }); + (solc, (version, sources)) + }) + .collect() } match self { - CompilerSources::Sequential(s) => CompilerSources::Sequential(set(s, paths, names)), - CompilerSources::Parallel(s, j) => CompilerSources::Parallel(set(s, paths, names), j), + CompilerSources::Sequential(s) => { + CompilerSources::Sequential(set(s, paths, edges, names)) + } + CompilerSources::Parallel(s, j) => { + CompilerSources::Parallel(set(s, paths, edges, names), j) + } } } @@ -524,6 +552,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { // read the cache file if it already exists let cache = if project.cache_path().exists() { let mut cache = SolFilesCache::read(project.cache_path())?; + // TODO are relative? cache.join_all(project.artifacts_path()).remove_missing_files(); cache } else { @@ -588,8 +617,8 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { // add the artifacts to the cache entries, this way we can keep a mapping from // solidity file to its artifacts // this step is necessary because the concrete artifacts are only known after solc - // was invoked and received as output, before that we merely know the the file and - // the versions so we add the artifacts on a file by file basis + // was invoked and received as output, before that we merely know the file and + // the versions, so we add the artifacts on a file by file basis for (file, artifacts) in written_artifacts.as_ref() { let file_path = Path::new(&file); if let Some((entry, versions)) = dirty_entries.get_mut(file_path) { diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index d25ebe6fa..c82af99c7 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -55,6 +55,9 @@ pub struct GraphEdges { versions: HashMap>, /// with how many input files we started with, corresponds to `let input_files = /// nodes[..num_input_files]`. + /// + /// Combined with the `indices` this way we can determine if a file was original added to the + /// graph as input or was added as resolved import, see [`Self::is_input_file()`] num_input_files: usize, } @@ -75,6 +78,34 @@ impl GraphEdges { } } + /// Returns true if the `file` was originally included when the graph was first created and not + /// added when all `imports` were resolved + pub fn is_input_file(&self, file: impl AsRef) -> bool { + if let Some(idx) = self.indices.get(file.as_ref()).copied() { + idx < self.num_input_files + } else { + false + } + } + + /// Returns the source unit name for the `file` + /// + /// Read more about [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution) + /// + /// If the `file` is an `input` file, see [`Self::is_input_file()`], then this returns the + /// relative part to that file starting from the project's root directory. So that the + /// source unit name of `/user/projects/myproject/src/Contract.sol` is `src/Contract.sol` if the + /// `myproject` dir is the project's root directory. + /// + /// If the `file` is a resolved import, then this returns the relative part after the remappings + /// are applied, also starting at the project's root directory. + /// + /// **NOTE:** All remappings are already applied when imports are resolved, therefore the source + /// unit name is always determined by [`utils::source_name()`]. + pub fn get_source_unit_name(&self, file: impl AsRef, root: impl AsRef) -> PathBuf { + utils::source_name(file.as_ref(), root).to_path_buf() + } + /// Returns the `VersionReq` for the given file pub fn version_requirement(&self, file: impl AsRef) -> Option<&VersionReq> { self.indices From 8ea0eac408452f4906a41a41e22f0739f1bf6bdd Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 22:30:21 +0100 Subject: [PATCH 50/82] chore: nits --- ethers-solc/src/cache.rs | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 492a0b949..97b316e91 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -1,6 +1,6 @@ //! Support for compiling contracts use crate::{ - artifacts::{Sources}, + artifacts::Sources, config::SolcConfig, contracts::VersionedContracts, error::{Result, SolcError}, @@ -358,13 +358,10 @@ impl CacheEntry { /// /// See also [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution) #[derive(Debug, Default)] -pub struct SourceUnitNameMap { +pub(crate) struct SourceUnitNameMap { /// all libraries to the source set while keeping track of their actual disk path /// (`contracts/contract.sol` -> `/Users/.../contracts.sol`) pub source_unit_name_to_absolute_path: HashMap, - // /// inverse of `source_name_to_path` : (`/Users/.../contracts.sol` -> - // `contracts/contract.sol`) pub aboslute_path_to_source_unit_name: HashMap, } impl SourceUnitNameMap { @@ -388,8 +385,10 @@ impl SourceUnitNameMap { let contracts = contracts .into_iter() .map(|(source_unit_name, contracts)| { - if let Some(file) = - self.source_unit_name_to_absolute_path.get(Path::new(&source_unit_name)).cloned() + if let Some(file) = self + .source_unit_name_to_absolute_path + .get(Path::new(&source_unit_name)) + .cloned() { (format!("{}", file.display()), contracts) } else { From b354f7adcc4c5b40774614ff72b70883e7dba20e Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 23:08:21 +0100 Subject: [PATCH 51/82] feat: introduce states --- ethers-solc/src/compile/project.rs | 149 ++++++++++++++++++++--------- 1 file changed, 104 insertions(+), 45 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index dc5d8943c..12d259413 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -171,42 +171,86 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// let output = project.compile().unwrap(); /// ``` pub fn compile(self) -> Result> { - let Self { edges, project, mut sources } = self; - // the map that keeps track of the mapping of resolved solidity file paths -> source unit - // names - let mut source_unit_map = SourceUnitNameMap::default(); - - let mut cache = ArtifactsCache::new(project, &edges)?; - // retain and compile only dirty sources - sources = sources.filtered(&mut cache).set_source_unit_names( - &project.paths, - &edges, - &mut source_unit_map, - ); + todo!() + // let Self { edges, project, mut sources } = self; + // // the map that keeps track of the mapping of resolved solidity file paths -> source unit + // // names + // let mut source_unit_map = SourceUnitNameMap::default(); + // + // let mut cache = ArtifactsCache::new(project, &edges)?; + // // retain and compile only dirty sources + // sources = sources.filtered(&mut cache).set_source_unit_names( + // &project.paths, + // &edges, + // &mut source_unit_map, + // ); + // + // let mut output = sources.compile(&project.solc_config.settings, &project.paths)?; + // + // // reverse the applied source unit names + // output.contracts = source_unit_map.reverse(output.contracts); + // + // // write all artifacts + // let written_artifacts = if !project.no_artifacts { + // T::on_output(&output.contracts, &project.paths)? + // } else { + // Default::default() + // }; + // + // // if caching was enabled, this will write to disk and get the artifacts that weren't + // // compiled but reused + // let cached_artifacts = cache.finish(&written_artifacts)?; + // + // Ok(ProjectCompileOutput { + // compiler_output: output, + // written_artifacts, + // cached_artifacts, + // ignored_error_codes: project.ignored_error_codes.clone(), + // }) + } - let mut output = sources.compile(&project.solc_config.settings, &project.paths)?; + fn preprocess(self) -> Result> { + todo!() + } +} - // reverse the applied source unit names - output.contracts = source_unit_map.reverse(output.contracts); +/// A series of states that comprise the [`ProjectCompiler::compile()`] state machine +struct PreprocessedState<'a, T: ArtifactOutput> { + sources: CompilerSources, + cache: ArtifactsCache<'a, T>, + source_unit_map: SourceUnitNameMap, +} - // write all artifacts - let written_artifacts = if !project.no_artifacts { - T::on_output(&output.contracts, &project.paths)? - } else { - Default::default() - }; +impl<'a, T:ArtifactOutput> PreprocessedState<'a, T>{ - // if caching was enabled, this will write to disk and get the artifacts that weren't - // compiled but reused - let cached_artifacts = cache.finish(&written_artifacts)?; + fn new() -> Self { - Ok(ProjectCompileOutput { - compiler_output: output, - written_artifacts, - cached_artifacts, - ignored_error_codes: project.ignored_error_codes.clone(), - }) + todo!() } + + fn compile(self) -> Result> { + + todo!() + } +} + + +struct CompiledState<'a, T: ArtifactOutput> { + sources: CompilerSources, + cache: ArtifactsCache<'a, T>, + source_unit_map: SourceUnitNameMap, +} + +impl<'a, T:ArtifactOutput> CompiledState<'a, T>{ + + fn artifacts(self) -> Result> { + todo!() + } + +} + +struct ArtifactsState<'a, T: ArtifactOutput> { + cache: ArtifactsCache<'a, T>, } /// Determines how the `solc <-> sources` pairs are executed @@ -384,11 +428,9 @@ struct Cache<'a, T: ArtifactOutput> { /// all already existing artifacts cached_artifacts: Artifacts, /// relationship between all the files - edges: &'a GraphEdges, - /// how to configure solc - solc_config: &'a SolcConfig, - /// project paths - paths: &'a ProjectPathsConfig, + edges: GraphEdges, + /// the project + project: &'a Project, /// all files that were filtered because they haven't changed filtered: HashMap)>, /// the corresponding cache entries for all sources that were deemed to be dirty @@ -412,14 +454,14 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { .edges .imports(file) .into_iter() - .map(|import| utils::source_name(import, &self.paths.root).to_path_buf()) + .map(|import| utils::source_name(import, self.project.root()).to_path_buf()) .collect(); let entry = CacheEntry { last_modification_date: CacheEntry::read_last_modification_date(&file).unwrap(), content_hash: source.content_hash(), - source_name: utils::source_name(file, &self.paths.root).into(), - solc_config: self.solc_config.clone(), + source_name: utils::source_name(file, self.project.root()).into(), + solc_config: self.project.solc_config.clone(), imports, version_requirement: self.edges.version_requirement(file).map(|v| v.to_string()), // artifacts remain empty until we received the compiler output @@ -493,7 +535,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { /// returns `false` if the corresponding cache entry remained unchanged otherwise `true` fn is_dirty(&self, file: &Path, version: &Version) -> bool { if let Some(hash) = self.content_hashes.get(file) { - let cache_path = utils::source_name(file, &self.paths.root); + let cache_path = utils::source_name(file, self.project.root()); if let Some(entry) = self.cache.entry(&cache_path) { if entry.content_hash.as_bytes() != hash.as_bytes() { tracing::trace!( @@ -502,7 +544,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { ); return true } - if self.solc_config != &entry.solc_config { + if self.project.solc_config != entry.solc_config { tracing::trace!( "changed solc config for cached artifact \"{}\"", file.display() @@ -547,7 +589,7 @@ enum ArtifactsCache<'a, T: ArtifactOutput> { } impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { - fn new(project: &'a Project, edges: &'a GraphEdges) -> Result { + fn new(project: &'a Project, edges: GraphEdges) -> Result { let cache = if project.cached { // read the cache file if it already exists let cache = if project.cache_path().exists() { @@ -574,8 +616,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { cache, cached_artifacts, edges, - solc_config: &project.solc_config, - paths: &project.paths, + project, filtered: Default::default(), dirty_entries: Default::default(), content_hashes: Default::default(), @@ -608,7 +649,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { ArtifactsCache::Ephemeral => Ok(Default::default()), ArtifactsCache::Cached(cache) => { let Cache { - mut cache, cached_artifacts, mut dirty_entries, filtered, paths, .. + mut cache, cached_artifacts, mut dirty_entries, filtered, project, .. } = cache; // keep only those files that were previously filtered (not dirty, reused) @@ -636,10 +677,28 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { cache.extend(dirty_entries.into_iter().map(|(file, (entry, _))| (file, entry))); // write to disk - cache.write(&paths.cache)?; + cache.write(project.cache_path())?; Ok(cached_artifacts) } } } } + + +#[cfg(test)] +#[cfg(feature = "project-util")] +mod tests { + use crate::project_util::TempProject; + use super::*; + + #[test] + fn can_preprocess() { + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); + let project = Project::builder().paths(ProjectPathsConfig::dapptools(root).unwrap()).build().unwrap(); + + let compiler = ProjectCompiler::new(&project).unwrap(); + + + } +} \ No newline at end of file From ddc30ac7891e71aad2b6ebba9bb2fa20744ff3f7 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 23:35:20 +0100 Subject: [PATCH 52/82] feat: add compiler state machine --- ethers-solc/src/compile/project.rs | 158 +++++++++++++++++------------ 1 file changed, 91 insertions(+), 67 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 12d259413..7199e7751 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -82,7 +82,7 @@ use crate::{ output::AggregatedCompilerOutput, resolver::GraphEdges, utils, ArtifactOutput, CompilerInput, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, - SolFilesCache, Solc, SolcConfig, Source, SourceUnitNameMap, Sources, + SolFilesCache, Solc, Source, SourceUnitNameMap, Sources, }; use rayon::prelude::*; use semver::Version; @@ -171,86 +171,99 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// let output = project.compile().unwrap(); /// ``` pub fn compile(self) -> Result> { - todo!() - // let Self { edges, project, mut sources } = self; - // // the map that keeps track of the mapping of resolved solidity file paths -> source unit - // // names - // let mut source_unit_map = SourceUnitNameMap::default(); - // - // let mut cache = ArtifactsCache::new(project, &edges)?; - // // retain and compile only dirty sources - // sources = sources.filtered(&mut cache).set_source_unit_names( - // &project.paths, - // &edges, - // &mut source_unit_map, - // ); - // - // let mut output = sources.compile(&project.solc_config.settings, &project.paths)?; - // - // // reverse the applied source unit names - // output.contracts = source_unit_map.reverse(output.contracts); - // - // // write all artifacts - // let written_artifacts = if !project.no_artifacts { - // T::on_output(&output.contracts, &project.paths)? - // } else { - // Default::default() - // }; - // - // // if caching was enabled, this will write to disk and get the artifacts that weren't - // // compiled but reused - // let cached_artifacts = cache.finish(&written_artifacts)?; - // - // Ok(ProjectCompileOutput { - // compiler_output: output, - // written_artifacts, - // cached_artifacts, - // ignored_error_codes: project.ignored_error_codes.clone(), - // }) + // drive the compiler statemachine to completion + self.preprocess()?.compile()?.write_artifacts()?.write_cache() } + /// Does basic preprocessing + /// - sets proper source unit names + /// - check cache fn preprocess(self) -> Result> { - todo!() + let Self { edges, project, mut sources } = self; + // the map that keeps track of the mapping of resolved solidity file paths -> source unit + // names + let mut source_unit_map = SourceUnitNameMap::default(); + + let mut cache = ArtifactsCache::new(project, edges)?; + // retain and compile only dirty sources + sources = sources.filtered(&mut cache).set_source_unit_names( + &project.paths, + cache.edges(), + &mut source_unit_map, + ); + + Ok(PreprocessedState { sources, cache, source_unit_map }) } } /// A series of states that comprise the [`ProjectCompiler::compile()`] state machine +/// +/// The main reason is to debug all states individually struct PreprocessedState<'a, T: ArtifactOutput> { sources: CompilerSources, cache: ArtifactsCache<'a, T>, source_unit_map: SourceUnitNameMap, } -impl<'a, T:ArtifactOutput> PreprocessedState<'a, T>{ - - fn new() -> Self { - - todo!() - } - +impl<'a, T: ArtifactOutput> PreprocessedState<'a, T> { + /// advance to the next state by compiling all sources fn compile(self) -> Result> { + let PreprocessedState { sources, cache, source_unit_map } = self; + let mut output = + sources.compile(&cache.project().solc_config.settings, &cache.project().paths)?; - todo!() + // reverse the applied source unit names + output.contracts = source_unit_map.reverse(output.contracts); + + Ok(CompiledState { output, cache }) } } - +/// Represents the state after `solc` was successfully invoked struct CompiledState<'a, T: ArtifactOutput> { - sources: CompilerSources, + output: AggregatedCompilerOutput, cache: ArtifactsCache<'a, T>, - source_unit_map: SourceUnitNameMap, } -impl<'a, T:ArtifactOutput> CompiledState<'a, T>{ +impl<'a, T: ArtifactOutput> CompiledState<'a, T> { + /// advance to the next state by handling all artifacts + /// + /// Writes all output contracts to disk if enabled in the `Project` + fn write_artifacts(self) -> Result> { + let CompiledState { output, cache } = self; + // write all artifacts + let written_artifacts = if !cache.project().no_artifacts { + T::on_output(&output.contracts, &cache.project().paths)? + } else { + Default::default() + }; - fn artifacts(self) -> Result> { - todo!() + Ok(ArtifactsState { output, cache, written_artifacts }) } - } +/// Represents the state after all artifacts were written to disk struct ArtifactsState<'a, T: ArtifactOutput> { + output: AggregatedCompilerOutput, cache: ArtifactsCache<'a, T>, + written_artifacts: Artifacts, +} + +impl<'a, T: ArtifactOutput> ArtifactsState<'a, T> { + /// Writes the cache file + /// + /// this concludes the [`Project::compile()`] statemachine + fn write_cache(self) -> Result> { + let ArtifactsState { output, cache, written_artifacts } = self; + let ignored_error_codes = cache.project().ignored_error_codes.clone(); + let cached_artifacts = cache.finish(&written_artifacts)?; + Ok(ProjectCompileOutput { + compiler_output: output, + written_artifacts, + cached_artifacts, + ignored_error_codes, + }) + } } /// Determines how the `solc <-> sources` pairs are executed @@ -266,7 +279,7 @@ enum CompilerSources { impl CompilerSources { /// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`] fn filtered(self, cache: &mut ArtifactsCache) -> Self { - fn filterd_sources( + fn filtered_sources( sources: VersionedSources, cache: &mut ArtifactsCache, ) -> VersionedSources { @@ -281,10 +294,10 @@ impl CompilerSources { match self { CompilerSources::Sequential(s) => { - CompilerSources::Sequential(filterd_sources(s, cache)) + CompilerSources::Sequential(filtered_sources(s, cache)) } CompilerSources::Parallel(s, j) => { - CompilerSources::Parallel(filterd_sources(s, cache), j) + CompilerSources::Parallel(filtered_sources(s, cache), j) } } } @@ -584,7 +597,7 @@ impl<'a, T: ArtifactOutput> Cache<'a, T> { #[allow(clippy::large_enum_variant)] enum ArtifactsCache<'a, T: ArtifactOutput> { /// Cache nothing on disk - Ephemeral, + Ephemeral(GraphEdges, &'a Project), Cached(Cache<'a, T>), } @@ -594,7 +607,6 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { // read the cache file if it already exists let cache = if project.cache_path().exists() { let mut cache = SolFilesCache::read(project.cache_path())?; - // TODO are relative? cache.join_all(project.artifacts_path()).remove_missing_files(); cache } else { @@ -625,16 +637,30 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { ArtifactsCache::Cached(cache) } else { // nothing to cache - ArtifactsCache::Ephemeral + ArtifactsCache::Ephemeral(edges, project) }; Ok(cache) } + fn edges(&self) -> &GraphEdges { + match self { + ArtifactsCache::Ephemeral(edges, _) => edges, + ArtifactsCache::Cached(cache) => &cache.edges, + } + } + + fn project(&self) -> &'a Project { + match self { + ArtifactsCache::Ephemeral(_, project) => project, + ArtifactsCache::Cached(cache) => cache.project, + } + } + /// Filters out those sources that don't need to be compiled fn filter(&mut self, sources: Sources, version: &Version) -> Sources { match self { - ArtifactsCache::Ephemeral => sources, + ArtifactsCache::Ephemeral(_, _) => sources, ArtifactsCache::Cached(cache) => cache.filter(sources, version), } } @@ -646,7 +672,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { /// Returns all the _cached_ artifacts. fn finish(self, written_artifacts: &Artifacts) -> Result> { match self { - ArtifactsCache::Ephemeral => Ok(Default::default()), + ArtifactsCache::Ephemeral(_, _) => Ok(Default::default()), ArtifactsCache::Cached(cache) => { let Cache { mut cache, cached_artifacts, mut dirty_entries, filtered, project, .. @@ -685,20 +711,18 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { } } - #[cfg(test)] #[cfg(feature = "project-util")] mod tests { - use crate::project_util::TempProject; use super::*; + use crate::project_util::TempProject; #[test] fn can_preprocess() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); - let project = Project::builder().paths(ProjectPathsConfig::dapptools(root).unwrap()).build().unwrap(); + let project = + Project::builder().paths(ProjectPathsConfig::dapptools(root).unwrap()).build().unwrap(); let compiler = ProjectCompiler::new(&project).unwrap(); - - } -} \ No newline at end of file +} From d782fcdab3cbc6ca43d9eb52e4dce6f10a03d4d8 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 23:43:19 +0100 Subject: [PATCH 53/82] chore: move cache types to cache mod --- ethers-solc/src/cache.rs | 292 ++++++++++++++++++++++++++++- ethers-solc/src/compile/project.rs | 282 +--------------------------- 2 files changed, 292 insertions(+), 282 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 97b316e91..27d82da3c 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -4,14 +4,15 @@ use crate::{ config::SolcConfig, contracts::VersionedContracts, error::{Result, SolcError}, - utils, ArtifactFile, Artifacts, ArtifactsMap, + resolver::GraphEdges, + utils, ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Project, Source, }; use semver::Version; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ collections::{ btree_map::{BTreeMap, Entry}, - HashMap, HashSet, + hash_map, HashMap, HashSet, }, fs::{self}, path::{Path, PathBuf}, @@ -350,6 +351,293 @@ impl CacheEntry { } } +/// A helper abstraction over the [`SolFilesCache`] used to determine what files need to compiled +/// and which `Artifacts` can be reused. +pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput> { + /// preexisting cache file + cache: SolFilesCache, + /// all already existing artifacts + cached_artifacts: Artifacts, + /// relationship between all the files + edges: GraphEdges, + /// the project + project: &'a Project, + /// all files that were filtered because they haven't changed + filtered: HashMap)>, + /// the corresponding cache entries for all sources that were deemed to be dirty + /// + /// `CacheEntry` are grouped by their solidity file. + /// During preprocessing the `artifacts` field of a new `CacheEntry` is left blank, because in + /// order to determine the artifacts of the solidity file, the file needs to be compiled first. + /// Only after the `CompilerOutput` is received and all compiled contracts are handled, see + /// [`crate::ArtifactOutput::on_output()`] all artifacts, their disk paths, are determined and + /// can be populated before the updated [`crate::SolFilesCache`] is finally written to disk, + /// see [`Cache::finish()`] + dirty_entries: HashMap)>, + /// the file hashes + content_hashes: HashMap, +} + +impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { + /// Creates a new cache entry for the file + fn create_cache_entry(&self, file: &Path, source: &Source) -> Result { + let imports = self + .edges + .imports(file) + .into_iter() + .map(|import| utils::source_name(import, self.project.root()).to_path_buf()) + .collect(); + + let entry = CacheEntry { + last_modification_date: CacheEntry::read_last_modification_date(&file).unwrap(), + content_hash: source.content_hash(), + source_name: utils::source_name(file, self.project.root()).into(), + solc_config: self.project.solc_config.clone(), + imports, + version_requirement: self.edges.version_requirement(file).map(|v| v.to_string()), + // artifacts remain empty until we received the compiler output + artifacts: Default::default(), + }; + + Ok(entry) + } + + /// inserts a new cache entry for the given file + /// + /// If there is already an entry available for the file the given version is added to the set + fn insert_new_cache_entry( + &mut self, + file: &Path, + source: &Source, + version: Version, + ) -> Result<()> { + if let Some((_, versions)) = self.dirty_entries.get_mut(file) { + versions.insert(version); + } else { + let entry = self.create_cache_entry(file, source)?; + self.dirty_entries.insert(file.to_path_buf(), (entry, HashSet::from([version]))); + } + Ok(()) + } + + /// inserts the filtered source with the fiven version + fn insert_filtered_source(&mut self, file: PathBuf, source: Source, version: Version) { + match self.filtered.entry(file) { + hash_map::Entry::Occupied(mut entry) => { + entry.get_mut().1.insert(version); + } + hash_map::Entry::Vacant(entry) => { + entry.insert((source, HashSet::from([version]))); + } + } + } + + /// Returns only those sources that + /// - are new + /// - were changed + /// - their imports were changed + /// - their artifact is missing + fn filter(&mut self, sources: Sources, version: &Version) -> Sources { + self.fill_hashes(&sources); + sources + .into_iter() + .filter_map(|(file, source)| self.requires_solc(file, source, version)) + .collect() + } + + /// Returns `Some` if the file _needs_ to be compiled and `None` if the artifact can be reu-used + fn requires_solc( + &mut self, + file: PathBuf, + source: Source, + version: &Version, + ) -> Option<(PathBuf, Source)> { + if !self.is_dirty(&file, version) && + self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version)) + { + self.insert_filtered_source(file, source, version.clone()); + None + } else { + self.insert_new_cache_entry(&file, &source, version.clone()).unwrap(); + Some((file, source)) + } + } + + /// returns `false` if the corresponding cache entry remained unchanged otherwise `true` + fn is_dirty(&self, file: &Path, version: &Version) -> bool { + if let Some(hash) = self.content_hashes.get(file) { + let cache_path = utils::source_name(file, self.project.root()); + if let Some(entry) = self.cache.entry(&cache_path) { + if entry.content_hash.as_bytes() != hash.as_bytes() { + tracing::trace!( + "changed content hash for cached artifact \"{}\"", + file.display() + ); + return true + } + if self.project.solc_config != entry.solc_config { + tracing::trace!( + "changed solc config for cached artifact \"{}\"", + file.display() + ); + return true + } + + if !entry.contains_version(version) { + tracing::trace!("missing linked artifacts for version \"{}\"", version); + return true + } + + if entry.artifacts_for_version(version).any(|artifact_path| { + // artifact does not exist + !self.cached_artifacts.has_artifact(artifact_path) + }) { + return true + } + // all things match, can be reused + return false + } + } + true + } + + /// Adds the file's hashes to the set if not set yet + fn fill_hashes(&mut self, sources: &Sources) { + for (file, source) in sources { + if let hash_map::Entry::Vacant(entry) = self.content_hashes.entry(file.clone()) { + entry.insert(source.content_hash()); + } + } + } +} + +/// Abstraction over configured caching which can be either non-existent or an already loaded cache +#[allow(clippy::large_enum_variant)] +pub(crate) enum ArtifactsCache<'a, T: ArtifactOutput> { + /// Cache nothing on disk + Ephemeral(GraphEdges, &'a Project), + /// Handles the actual cached artifacts, detects artifacts that can be reused + Cached(ArtifactsCacheInner<'a, T>), +} + +impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { + pub fn new(project: &'a Project, edges: GraphEdges) -> Result { + let cache = if project.cached { + // read the cache file if it already exists + let cache = if project.cache_path().exists() { + let mut cache = SolFilesCache::read(project.cache_path())?; + cache.join_all(project.artifacts_path()).remove_missing_files(); + cache + } else { + SolFilesCache::default() + }; + + // read all artifacts + let cached_artifacts = if project.paths.artifacts.exists() { + tracing::trace!("reading artifacts from cache.."); + // if we failed to read the whole set of artifacts we use an empty set + let artifacts = cache.read_artifacts::().unwrap_or_default(); + tracing::trace!("read {} artifacts from cache", artifacts.artifact_files().count()); + artifacts + } else { + Default::default() + }; + + let cache = ArtifactsCacheInner { + cache, + cached_artifacts, + edges, + project, + filtered: Default::default(), + dirty_entries: Default::default(), + content_hashes: Default::default(), + }; + + ArtifactsCache::Cached(cache) + } else { + // nothing to cache + ArtifactsCache::Ephemeral(edges, project) + }; + + Ok(cache) + } + + pub fn edges(&self) -> &GraphEdges { + match self { + ArtifactsCache::Ephemeral(edges, _) => edges, + ArtifactsCache::Cached(cache) => &cache.edges, + } + } + + pub fn project(&self) -> &'a Project { + match self { + ArtifactsCache::Ephemeral(_, project) => project, + ArtifactsCache::Cached(cache) => cache.project, + } + } + + /// Filters out those sources that don't need to be compiled + pub fn filter(&mut self, sources: Sources, version: &Version) -> Sources { + match self { + ArtifactsCache::Ephemeral(_, _) => sources, + ArtifactsCache::Cached(cache) => cache.filter(sources, version), + } + } + + /// Consumes the `Cache`, rebuilds the [`SolFileCache`] by merging all artifacts that were + /// filtered out in the previous step (`Cache::filtered`) and the artifacts that were just + /// written to disk `written_artifacts`. + /// + /// Returns all the _cached_ artifacts. + pub fn write_cache( + self, + written_artifacts: &Artifacts, + ) -> Result> { + match self { + ArtifactsCache::Ephemeral(_, _) => Ok(Default::default()), + ArtifactsCache::Cached(cache) => { + let ArtifactsCacheInner { + mut cache, + cached_artifacts, + mut dirty_entries, + filtered, + project, + .. + } = cache; + + // keep only those files that were previously filtered (not dirty, reused) + cache.retain(filtered.iter().map(|(p, (_, v))| (p.as_path(), v))); + + // add the artifacts to the cache entries, this way we can keep a mapping from + // solidity file to its artifacts + // this step is necessary because the concrete artifacts are only known after solc + // was invoked and received as output, before that we merely know the file and + // the versions, so we add the artifacts on a file by file basis + for (file, artifacts) in written_artifacts.as_ref() { + let file_path = Path::new(&file); + if let Some((entry, versions)) = dirty_entries.get_mut(file_path) { + entry.insert_artifacts(artifacts.iter().map(|(name, artifacts)| { + let artifacts = artifacts + .iter() + .filter(|artifact| versions.contains(&artifact.version)) + .collect::>(); + (name, artifacts) + })); + } + } + + // add the new cache entries to the cache file + cache.extend(dirty_entries.into_iter().map(|(file, (entry, _))| (file, entry))); + + // write to disk + cache.write(project.cache_path())?; + + Ok(cached_artifacts) + } + } + } +} + /// A helper type to handle source name/full disk mappings /// /// The disk path is the actual path where a file can be found on disk. diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 7199e7751..ca0b94397 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -77,7 +77,7 @@ use crate::{ artifact_output::Artifacts, artifacts::{Settings, VersionedSources}, - cache::CacheEntry, + cache::{ArtifactsCache, CacheEntry}, error::Result, output::AggregatedCompilerOutput, resolver::GraphEdges, @@ -256,7 +256,7 @@ impl<'a, T: ArtifactOutput> ArtifactsState<'a, T> { fn write_cache(self) -> Result> { let ArtifactsState { output, cache, written_artifacts } = self; let ignored_error_codes = cache.project().ignored_error_codes.clone(); - let cached_artifacts = cache.finish(&written_artifacts)?; + let cached_artifacts = cache.write_cache(&written_artifacts)?; Ok(ProjectCompileOutput { compiler_output: output, written_artifacts, @@ -433,284 +433,6 @@ fn compile_parallel( Ok(aggregated) } -/// A helper abstraction over the [`SolFilesCache`] used to determine what files need to compiled -/// and which `Artifacts` can be reused. -struct Cache<'a, T: ArtifactOutput> { - /// preexisting cache file - cache: SolFilesCache, - /// all already existing artifacts - cached_artifacts: Artifacts, - /// relationship between all the files - edges: GraphEdges, - /// the project - project: &'a Project, - /// all files that were filtered because they haven't changed - filtered: HashMap)>, - /// the corresponding cache entries for all sources that were deemed to be dirty - /// - /// `CacheEntry` are grouped by their solidity file. - /// During preprocessing the `artifacts` field of a new `CacheEntry` is left blank, because in - /// order to determine the artifacts of the solidity file, the file needs to be compiled first. - /// Only after the `CompilerOutput` is received and all compiled contracts are handled, see - /// [`crate::ArtifactOutput::on_output()`] all artifacts, their disk paths, are determined and - /// can be populated before the updated [`crate::SolFilesCache`] is finally written to disk, - /// see [`Cache::finish()`] - dirty_entries: HashMap)>, - /// the file hashes - content_hashes: HashMap, -} - -impl<'a, T: ArtifactOutput> Cache<'a, T> { - /// Creates a new cache entry for the file - fn create_cache_entry(&self, file: &Path, source: &Source) -> Result { - let imports = self - .edges - .imports(file) - .into_iter() - .map(|import| utils::source_name(import, self.project.root()).to_path_buf()) - .collect(); - - let entry = CacheEntry { - last_modification_date: CacheEntry::read_last_modification_date(&file).unwrap(), - content_hash: source.content_hash(), - source_name: utils::source_name(file, self.project.root()).into(), - solc_config: self.project.solc_config.clone(), - imports, - version_requirement: self.edges.version_requirement(file).map(|v| v.to_string()), - // artifacts remain empty until we received the compiler output - artifacts: Default::default(), - }; - - Ok(entry) - } - - /// inserts a new cache entry for the given file - /// - /// If there is already an entry available for the file the given version is added to the set - fn insert_new_cache_entry( - &mut self, - file: &Path, - source: &Source, - version: Version, - ) -> Result<()> { - if let Some((_, versions)) = self.dirty_entries.get_mut(file) { - versions.insert(version); - } else { - let entry = self.create_cache_entry(file, source)?; - self.dirty_entries.insert(file.to_path_buf(), (entry, HashSet::from([version]))); - } - Ok(()) - } - - /// inserts the filtered source with the fiven version - fn insert_filtered_source(&mut self, file: PathBuf, source: Source, version: Version) { - match self.filtered.entry(file) { - Entry::Occupied(mut entry) => { - entry.get_mut().1.insert(version); - } - Entry::Vacant(entry) => { - entry.insert((source, HashSet::from([version]))); - } - } - } - - /// Returns only those sources that - /// - are new - /// - were changed - /// - their imports were changed - /// - their artifact is missing - fn filter(&mut self, sources: Sources, version: &Version) -> Sources { - self.fill_hashes(&sources); - sources - .into_iter() - .filter_map(|(file, source)| self.requires_solc(file, source, version)) - .collect() - } - - /// Returns `Some` if the file _needs_ to be compiled and `None` if the artifact can be reu-used - fn requires_solc( - &mut self, - file: PathBuf, - source: Source, - version: &Version, - ) -> Option<(PathBuf, Source)> { - if !self.is_dirty(&file, version) && - self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version)) - { - self.insert_filtered_source(file, source, version.clone()); - None - } else { - self.insert_new_cache_entry(&file, &source, version.clone()).unwrap(); - Some((file, source)) - } - } - - /// returns `false` if the corresponding cache entry remained unchanged otherwise `true` - fn is_dirty(&self, file: &Path, version: &Version) -> bool { - if let Some(hash) = self.content_hashes.get(file) { - let cache_path = utils::source_name(file, self.project.root()); - if let Some(entry) = self.cache.entry(&cache_path) { - if entry.content_hash.as_bytes() != hash.as_bytes() { - tracing::trace!( - "changed content hash for cached artifact \"{}\"", - file.display() - ); - return true - } - if self.project.solc_config != entry.solc_config { - tracing::trace!( - "changed solc config for cached artifact \"{}\"", - file.display() - ); - return true - } - - if !entry.contains_version(version) { - tracing::trace!("missing linked artifacts for version \"{}\"", version); - return true - } - - if entry.artifacts_for_version(version).any(|artifact_path| { - // artifact does not exist - !self.cached_artifacts.has_artifact(artifact_path) - }) { - return true - } - // all things match, can be reused - return false - } - } - true - } - - /// Adds the file's hashes to the set if not set yet - fn fill_hashes(&mut self, sources: &Sources) { - for (file, source) in sources { - if let hash_map::Entry::Vacant(entry) = self.content_hashes.entry(file.clone()) { - entry.insert(source.content_hash()); - } - } - } -} - -/// Abstraction over configured caching which can be either non-existent or an already loaded cache -#[allow(clippy::large_enum_variant)] -enum ArtifactsCache<'a, T: ArtifactOutput> { - /// Cache nothing on disk - Ephemeral(GraphEdges, &'a Project), - Cached(Cache<'a, T>), -} - -impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { - fn new(project: &'a Project, edges: GraphEdges) -> Result { - let cache = if project.cached { - // read the cache file if it already exists - let cache = if project.cache_path().exists() { - let mut cache = SolFilesCache::read(project.cache_path())?; - cache.join_all(project.artifacts_path()).remove_missing_files(); - cache - } else { - SolFilesCache::default() - }; - - // read all artifacts - let cached_artifacts = if project.paths.artifacts.exists() { - tracing::trace!("reading artifacts from cache.."); - // if we failed to read the whole set of artifacts we use an empty set - let artifacts = cache.read_artifacts::().unwrap_or_default(); - tracing::trace!("read {} artifacts from cache", artifacts.artifact_files().count()); - artifacts - } else { - Default::default() - }; - - let cache = Cache { - cache, - cached_artifacts, - edges, - project, - filtered: Default::default(), - dirty_entries: Default::default(), - content_hashes: Default::default(), - }; - - ArtifactsCache::Cached(cache) - } else { - // nothing to cache - ArtifactsCache::Ephemeral(edges, project) - }; - - Ok(cache) - } - - fn edges(&self) -> &GraphEdges { - match self { - ArtifactsCache::Ephemeral(edges, _) => edges, - ArtifactsCache::Cached(cache) => &cache.edges, - } - } - - fn project(&self) -> &'a Project { - match self { - ArtifactsCache::Ephemeral(_, project) => project, - ArtifactsCache::Cached(cache) => cache.project, - } - } - - /// Filters out those sources that don't need to be compiled - fn filter(&mut self, sources: Sources, version: &Version) -> Sources { - match self { - ArtifactsCache::Ephemeral(_, _) => sources, - ArtifactsCache::Cached(cache) => cache.filter(sources, version), - } - } - - /// Consumes the `Cache`, rebuilds the [`SolFileCache`] by merging all artifacts that were - /// filtered out in the previous step (`Cache::filtered`) and the artifacts that were just - /// written to disk `written_artifacts`. - /// - /// Returns all the _cached_ artifacts. - fn finish(self, written_artifacts: &Artifacts) -> Result> { - match self { - ArtifactsCache::Ephemeral(_, _) => Ok(Default::default()), - ArtifactsCache::Cached(cache) => { - let Cache { - mut cache, cached_artifacts, mut dirty_entries, filtered, project, .. - } = cache; - - // keep only those files that were previously filtered (not dirty, reused) - cache.retain(filtered.iter().map(|(p, (_, v))| (p.as_path(), v))); - - // add the artifacts to the cache entries, this way we can keep a mapping from - // solidity file to its artifacts - // this step is necessary because the concrete artifacts are only known after solc - // was invoked and received as output, before that we merely know the file and - // the versions, so we add the artifacts on a file by file basis - for (file, artifacts) in written_artifacts.as_ref() { - let file_path = Path::new(&file); - if let Some((entry, versions)) = dirty_entries.get_mut(file_path) { - entry.insert_artifacts(artifacts.iter().map(|(name, artifacts)| { - let artifacts = artifacts - .iter() - .filter(|artifact| versions.contains(&artifact.version)) - .collect::>(); - (name, artifacts) - })); - } - } - - // add the new cache entries to the cache file - cache.extend(dirty_entries.into_iter().map(|(file, (entry, _))| (file, entry))); - - // write to disk - cache.write(project.cache_path())?; - - Ok(cached_artifacts) - } - } - } -} - #[cfg(test)] #[cfg(feature = "project-util")] mod tests { From cb45e51d8e4f8d4b47c2fc5fd5dcb8ea7db69aeb Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Sun, 30 Jan 2022 23:48:44 +0100 Subject: [PATCH 54/82] chore: make clippy happy --- ethers-solc/src/compile/project.rs | 17 +++++++---------- ethers-solc/src/lib.rs | 2 +- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index ca0b94397..a47f92d0d 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -77,19 +77,16 @@ use crate::{ artifact_output::Artifacts, artifacts::{Settings, VersionedSources}, - cache::{ArtifactsCache, CacheEntry}, + cache::ArtifactsCache, error::Result, output::AggregatedCompilerOutput, resolver::GraphEdges, - utils, ArtifactOutput, CompilerInput, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, - SolFilesCache, Solc, Source, SourceUnitNameMap, Sources, + ArtifactOutput, CompilerInput, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, Solc, + SourceUnitNameMap, Sources, }; use rayon::prelude::*; -use semver::Version; -use std::{ - collections::{btree_map::BTreeMap, hash_map, hash_map::Entry, HashMap, HashSet}, - path::{Path, PathBuf}, -}; + +use std::collections::btree_map::BTreeMap; #[derive(Debug)] pub struct ProjectCompiler<'a, T: ArtifactOutput> { @@ -437,7 +434,7 @@ fn compile_parallel( #[cfg(feature = "project-util")] mod tests { use super::*; - use crate::project_util::TempProject; + use std::path::PathBuf; #[test] fn can_preprocess() { @@ -445,6 +442,6 @@ mod tests { let project = Project::builder().paths(ProjectPathsConfig::dapptools(root).unwrap()).build().unwrap(); - let compiler = ProjectCompiler::new(&project).unwrap(); + let _compiler = ProjectCompiler::new(&project).unwrap(); } } diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 6810f142e..8511c7dbb 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -21,7 +21,7 @@ mod config; pub use config::{AllowedLibPaths, PathStyle, ProjectPathsConfig, SolcConfig}; pub mod remappings; -use crate::{artifacts::Source, cache::SolFilesCache}; +use crate::artifacts::Source; pub mod error; pub mod utils; From 7a0f8d51ecf95de2d0b9e9ec34187d098dd893be Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 31 Jan 2022 13:34:57 +0100 Subject: [PATCH 55/82] feat: add debug derives --- ethers-solc/src/artifact_output.rs | 4 ++-- ethers-solc/src/cache.rs | 2 ++ ethers-solc/src/compile/project.rs | 6 +++++- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index 8d22707cc..f4782f6ee 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -11,7 +11,7 @@ use semver::Version; use serde::{de::DeserializeOwned, Serialize}; use std::{ collections::btree_map::BTreeMap, - fs, io, + fmt, fs, io, path::{Path, PathBuf}, }; @@ -243,7 +243,7 @@ where /// relationship (1-N+). pub trait ArtifactOutput { /// Represents the artifact that will be stored for a `Contract` - type Artifact: Artifact + DeserializeOwned + Serialize; + type Artifact: Artifact + DeserializeOwned + Serialize + fmt::Debug; /// Handle the aggregated set of compiled contracts from the solc [`crate::CompilerOutput`]. /// diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 27d82da3c..03165bc4a 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -353,6 +353,7 @@ impl CacheEntry { /// A helper abstraction over the [`SolFilesCache`] used to determine what files need to compiled /// and which `Artifacts` can be reused. +#[derive(Debug)] pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput> { /// preexisting cache file cache: SolFilesCache, @@ -513,6 +514,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { /// Abstraction over configured caching which can be either non-existent or an already loaded cache #[allow(clippy::large_enum_variant)] +#[derive(Debug)] pub(crate) enum ArtifactsCache<'a, T: ArtifactOutput> { /// Cache nothing on disk Ephemeral(GraphEdges, &'a Project), diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index a47f92d0d..dfaf84931 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -196,6 +196,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// A series of states that comprise the [`ProjectCompiler::compile()`] state machine /// /// The main reason is to debug all states individually +#[derive(Debug)] struct PreprocessedState<'a, T: ArtifactOutput> { sources: CompilerSources, cache: ArtifactsCache<'a, T>, @@ -217,6 +218,7 @@ impl<'a, T: ArtifactOutput> PreprocessedState<'a, T> { } /// Represents the state after `solc` was successfully invoked +#[derive(Debug)] struct CompiledState<'a, T: ArtifactOutput> { output: AggregatedCompilerOutput, cache: ArtifactsCache<'a, T>, @@ -240,6 +242,7 @@ impl<'a, T: ArtifactOutput> CompiledState<'a, T> { } /// Represents the state after all artifacts were written to disk +#[derive(Debug)] struct ArtifactsState<'a, T: ArtifactOutput> { output: AggregatedCompilerOutput, cache: ArtifactsCache<'a, T>, @@ -442,6 +445,7 @@ mod tests { let project = Project::builder().paths(ProjectPathsConfig::dapptools(root).unwrap()).build().unwrap(); - let _compiler = ProjectCompiler::new(&project).unwrap(); + let compiler = ProjectCompiler::new(&project).unwrap(); + let prep = compiler.preprocess().unwrap(); } } From 1332582cce97f0cdaef50d18158b9cdb04e3e4ac Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 31 Jan 2022 14:47:25 +0100 Subject: [PATCH 56/82] fix: use resolved import source unit names --- ethers-solc/src/cache.rs | 30 +++++++++++++++++++++------- ethers-solc/src/compile/contracts.rs | 5 +++++ ethers-solc/src/compile/project.rs | 14 ++++++++++++- ethers-solc/src/lib.rs | 4 ++-- ethers-solc/src/resolver.rs | 15 +++++++++----- 5 files changed, 53 insertions(+), 15 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 03165bc4a..2d0069686 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -43,6 +43,14 @@ impl SolFilesCache { Self { format: ETHERS_FORMAT_VERSION.to_string(), files } } + pub fn is_empty(&self) -> bool { + self.files.is_empty() + } + + pub fn len(&self) -> usize { + self.files.len() + } + /// Returns the corresponding `CacheEntry` for the file if it exists pub fn entry(&self, file: impl AsRef) -> Option<&CacheEntry> { self.files.get(file.as_ref()) @@ -356,15 +364,15 @@ impl CacheEntry { #[derive(Debug)] pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput> { /// preexisting cache file - cache: SolFilesCache, + pub cache: SolFilesCache, /// all already existing artifacts - cached_artifacts: Artifacts, + pub cached_artifacts: Artifacts, /// relationship between all the files - edges: GraphEdges, + pub edges: GraphEdges, /// the project - project: &'a Project, + pub project: &'a Project, /// all files that were filtered because they haven't changed - filtered: HashMap)>, + pub filtered: HashMap)>, /// the corresponding cache entries for all sources that were deemed to be dirty /// /// `CacheEntry` are grouped by their solidity file. @@ -374,9 +382,9 @@ pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput> { /// [`crate::ArtifactOutput::on_output()`] all artifacts, their disk paths, are determined and /// can be populated before the updated [`crate::SolFilesCache`] is finally written to disk, /// see [`Cache::finish()`] - dirty_entries: HashMap)>, + pub dirty_entries: HashMap)>, /// the file hashes - content_hashes: HashMap, + pub content_hashes: HashMap, } impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { @@ -564,6 +572,14 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { Ok(cache) } + #[cfg(test)] + pub fn as_cached(&self) -> Option<&ArtifactsCacheInner<'a, T>> { + match self { + ArtifactsCache::Ephemeral(_, _) => None, + ArtifactsCache::Cached(cached) => Some(cached), + } + } + pub fn edges(&self) -> &GraphEdges { match self { ArtifactsCache::Ephemeral(edges, _) => edges, diff --git a/ethers-solc/src/compile/contracts.rs b/ethers-solc/src/compile/contracts.rs index f93034217..9380414bf 100644 --- a/ethers-solc/src/compile/contracts.rs +++ b/ethers-solc/src/compile/contracts.rs @@ -17,6 +17,11 @@ impl VersionedContracts { self.0.len() } + /// Returns an iterator over all files + pub fn files(&self) -> impl Iterator + '_ { + self.0.keys() + } + /// Finds the _first_ contract with the given name /// /// # Example diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index dfaf84931..878c27c96 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -128,7 +128,7 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { let sources_by_version = versions.get(&project.allowed_lib_paths)?; let sources = if project.solc_jobs > 1 && sources_by_version.len() > 1 { - // if there are multiple different versions and we can use multiple jobs we can compile + // if there are multiple different versions, and we can use multiple jobs we can compile // them in parallel CompilerSources::Parallel(sources_by_version, project.solc_jobs) } else { @@ -447,5 +447,17 @@ mod tests { let compiler = ProjectCompiler::new(&project).unwrap(); let prep = compiler.preprocess().unwrap(); + let cache = prep.cache.as_cached().unwrap(); + // 3 contracts + assert_eq!(cache.dirty_entries.len(), 3); + assert!(cache.filtered.is_empty()); + assert!(cache.cache.is_empty()); + assert_eq!(prep.source_unit_map.source_unit_name_to_absolute_path.len(), 3); + + let compiled = prep.compile().unwrap(); + let files = compiled.output.contracts.files().collect::>(); + assert_eq!(files.len(), 3); + + // dbg!(compiled.output.sources.keys()); } } diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 8511c7dbb..7ec948395 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -518,11 +518,11 @@ impl ProjectBuilder { offline, } = self; + let paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?; + let solc = solc.unwrap_or_default(); let solc_config = solc_config.unwrap_or_else(|| SolcConfig::builder().build()); - let paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?; - if allowed_paths.is_empty() { // allow every contract under root by default allowed_paths.push(paths.root.clone()) diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index 5bda7934f..92f289918 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -97,13 +97,18 @@ impl GraphEdges { /// source unit name of `/user/projects/myproject/src/Contract.sol` is `src/Contract.sol` if the /// `myproject` dir is the project's root directory. /// - /// If the `file` is a resolved import, then this returns the relative part after the remappings - /// are applied, also starting at the project's root directory. + /// If the `file` is a resolved import, then this returns the resolved path, after remappings + /// were applied, because, solc also applies them during their VFS lookup and they are provided + /// in the `CompilerInput`'s `Settings`: see [`CompilerInput::with_remappings()`] /// - /// **NOTE:** All remappings are already applied when imports are resolved, therefore the source - /// unit name is always determined by [`utils::source_name()`]. + /// Read more about [Import Remapping](https://docs.soliditylang.org/en/develop/path-resolution.html#import-remapping) pub fn get_source_unit_name(&self, file: impl AsRef, root: impl AsRef) -> PathBuf { - utils::source_name(file.as_ref(), root).to_path_buf() + let file = file.as_ref(); + if self.is_input_file(file) { + utils::source_name(file, root).to_path_buf() + } else { + file.to_path_buf() + } } /// Returns the `VersionReq` for the given file From 0e7d08f14ccdfe1b4f5f0e5f65b792f54244cbd4 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 31 Jan 2022 20:22:44 +0100 Subject: [PATCH 57/82] fix: failing tests --- ethers-solc/src/cache.rs | 11 --- ethers-solc/src/compile/output.rs | 2 +- ethers-solc/src/lib.rs | 3 + .../test-data/solidity-files-cache.json | 75 ------------------- 4 files changed, 4 insertions(+), 87 deletions(-) delete mode 100644 ethers-solc/test-data/solidity-files-cache.json diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 2d0069686..c04cd529b 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -705,14 +705,3 @@ impl SourceUnitNameMap { VersionedContracts(contracts) } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_parse_solidity_files_cache() { - let input = include_str!("../test-data/solidity-files-cache.json"); - let _ = serde_json::from_str::(input).unwrap(); - } -} diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index 89ce07045..85b65a0fb 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -103,7 +103,7 @@ where impl fmt::Display for ProjectCompileOutput { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.compiler_output.is_empty() { + if self.compiler_output.contracts.is_empty() && self.compiler_output.errors.is_empty() { f.write_str("Nothing to compile") } else { self.compiler_output.diagnostics(&self.ignored_error_codes).fmt(f) diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 7ec948395..a6d8313d1 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -572,6 +572,8 @@ impl ArtifactOutput for Project { #[cfg(test)] mod tests { + use crate::remappings::Remapping; + #[test] #[cfg(all(feature = "svm", feature = "async"))] fn test_build_all_versions() { @@ -627,6 +629,7 @@ mod tests { .root(&root) .sources(root.join("src")) .lib(root.join("lib")) + .remappings(Remapping::find_many(&root.join("lib"))) .build() .unwrap(); let project = Project::builder().no_artifacts().paths(paths).ephemeral().build().unwrap(); diff --git a/ethers-solc/test-data/solidity-files-cache.json b/ethers-solc/test-data/solidity-files-cache.json deleted file mode 100644 index 2572b13e1..000000000 --- a/ethers-solc/test-data/solidity-files-cache.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "_format": "hh-sol-cache-2", - "files": { - "Greeter.sol": { - "lastModificationDate": 1634246369587, - "contentHash": "483b7f4f64b06a04a24bd0af7c3bf8b7", - "sourceName": "contracts/Greeter.sol", - "solcConfig": { - "version": "0.8.4", - "settings": { - "optimizer": { - "enabled": false, - "runs": 200 - }, - "outputSelection": { - "*": { - "*": [ - "abi", - "evm.bytecode", - "evm.deployedBytecode", - "evm.methodIdentifiers" - ], - "": [ - "ast" - ] - } - } - } - }, - "imports": [ - "hardhat/console.sol" - ], - "versionPragmas": [ - "^0.8.0" - ], - "artifacts": [ - "Greeter" - ] - }, - "console.sol": { - "lastModificationDate": 1634245289287, - "contentHash": "cc4777addd464ea56fa35b1c45df0591", - "sourceName": "hardhat/console.sol", - "solcConfig": { - "version": "0.8.4", - "settings": { - "optimizer": { - "enabled": false, - "runs": 200 - }, - "outputSelection": { - "*": { - "*": [ - "abi", - "evm.bytecode", - "evm.deployedBytecode", - "evm.methodIdentifiers" - ], - "": [ - "ast" - ] - } - } - } - }, - "imports": [], - "versionPragmas": [ - ">=0.4.22 <0.9.0" - ], - "artifacts": [ - "console" - ] - } - } -} From 6e5a98fba523f8170ab022e87de157338da7f4d3 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 31 Jan 2022 20:30:34 +0100 Subject: [PATCH 58/82] test: test multiple libs properly --- ethers-solc/src/lib.rs | 5 +++++ .../test-data/test-contract-libs/lib1/{ => bar/src}/Bar.sol | 0 .../test-data/test-contract-libs/lib2/{ => baz/src}/Baz.sol | 0 ethers-solc/test-data/test-contract-libs/src/Foo.sol | 4 ++-- 4 files changed, 7 insertions(+), 2 deletions(-) rename ethers-solc/test-data/test-contract-libs/lib1/{ => bar/src}/Bar.sol (100%) rename ethers-solc/test-data/test-contract-libs/lib2/{ => baz/src}/Baz.sol (100%) diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index a6d8313d1..0b2e062fc 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -604,6 +604,11 @@ mod tests { .sources(root.join("src")) .lib(root.join("lib1")) .lib(root.join("lib2")) + .remappings( + Remapping::find_many(&root.join("lib1")) + .into_iter() + .chain(Remapping::find_many(&root.join("lib2"))), + ) .build() .unwrap(); let project = Project::builder() diff --git a/ethers-solc/test-data/test-contract-libs/lib1/Bar.sol b/ethers-solc/test-data/test-contract-libs/lib1/bar/src/Bar.sol similarity index 100% rename from ethers-solc/test-data/test-contract-libs/lib1/Bar.sol rename to ethers-solc/test-data/test-contract-libs/lib1/bar/src/Bar.sol diff --git a/ethers-solc/test-data/test-contract-libs/lib2/Baz.sol b/ethers-solc/test-data/test-contract-libs/lib2/baz/src/Baz.sol similarity index 100% rename from ethers-solc/test-data/test-contract-libs/lib2/Baz.sol rename to ethers-solc/test-data/test-contract-libs/lib2/baz/src/Baz.sol diff --git a/ethers-solc/test-data/test-contract-libs/src/Foo.sol b/ethers-solc/test-data/test-contract-libs/src/Foo.sol index ab9eba943..9e96fe3ce 100644 --- a/ethers-solc/test-data/test-contract-libs/src/Foo.sol +++ b/ethers-solc/test-data/test-contract-libs/src/Foo.sol @@ -1,6 +1,6 @@ pragma solidity 0.8.6; -import "../lib1/Bar.sol"; -import "../lib2/Baz.sol"; +import "bar/Bar.sol"; +import "baz/Baz.sol"; contract Foo is Bar, Baz {} From 57ed568ea852a0ad7175ccdc7f317ff82d55be47 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 31 Jan 2022 20:36:30 +0100 Subject: [PATCH 59/82] chore: make clippy happy --- ethers-solc/src/compile/project.rs | 5 +---- ethers-solc/tests/project.rs | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 878c27c96..21a9b0477 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -455,9 +455,6 @@ mod tests { assert_eq!(prep.source_unit_map.source_unit_name_to_absolute_path.len(), 3); let compiled = prep.compile().unwrap(); - let files = compiled.output.contracts.files().collect::>(); - assert_eq!(files.len(), 3); - - // dbg!(compiled.output.sources.keys()); + assert_eq!(compiled.output.contracts.files().count(), 3); } } diff --git a/ethers-solc/tests/project.rs b/ethers-solc/tests/project.rs index f7b362d7b..784fae530 100644 --- a/ethers-solc/tests/project.rs +++ b/ethers-solc/tests/project.rs @@ -376,5 +376,5 @@ fn can_flatten_file_with_duplicates() { assert_eq!(result.matches("contract Foo {").count(), 1); assert_eq!(result.matches("contract Bar {").count(), 1); assert_eq!(result.matches("contract FooBar {").count(), 1); - assert_eq!(result.matches(";").count(), 1); + assert_eq!(result.matches(';').count(), 1); } From 82dabeb85caf2d651b2b9dcaa90367df037d10f6 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 31 Jan 2022 20:41:22 +0100 Subject: [PATCH 60/82] chore: update CHANGELOG --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a92aa447c..58e131e1c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,6 +38,10 @@ ### Unreleased +- Total revamp of the `Project::compile` pipeline + [#802](https://github.com/gakonst/ethers-rs/pull/802) + - Support multiple versions of compiled contracts + - Breaking: deprecate hardhat cache file compatibility, cache file now tracks artifact paths and their versions - Fix flatten replacement target location [#846](https://github.com/gakonst/ethers-rs/pull/846) - Fix duplicate files during flattening From 8e1b1754f73bed75049c5b982d45eae309979462 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 31 Jan 2022 20:55:02 +0100 Subject: [PATCH 61/82] fix: doc tests --- ethers-solc/src/cache.rs | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index c04cd529b..10f33def5 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -62,15 +62,22 @@ impl SolFilesCache { } /// Reads the cache json file from the given path + /// + /// # Errors + /// + /// If the cache file does not exist + /// /// # Example /// /// ``` + /// # fn t() { /// use ethers_solc::cache::SolFilesCache; /// use ethers_solc::Project; /// /// let project = Project::builder().build().unwrap(); /// let mut cache = SolFilesCache::read(project.cache_path()).unwrap(); /// cache.join_all(project.artifacts_path()); + /// # } /// ``` #[tracing::instrument(skip_all, name = "sol-files-cache::read")] pub fn read(path: impl AsRef) -> Result { @@ -81,7 +88,7 @@ impl SolFilesCache { Ok(cache) } - /// Write the cache to json file + /// Write the cache as json file to the given path pub fn write(&self, path: impl AsRef) -> Result<()> { let path = path.as_ref(); let file = fs::File::create(path).map_err(|err| SolcError::io(err, path))?; @@ -122,12 +129,14 @@ impl SolFilesCache { /// /// ``` /// use ethers_solc::cache::SolFilesCache; - /// use ethers_solc::{MinimalCombinedArtifacts, Project}; - /// + /// use ethers_solc::Project; + /// use ethers_solc::artifacts::CompactContractBytecode; + /// # fn t() { /// let project = Project::builder().build().unwrap(); /// let mut cache = SolFilesCache::read(project.cache_path()).unwrap(); /// cache.join_all(project.artifacts_path()); - /// let artifacts = cache.read_artifacts::().unwrap(); + /// let artifacts = cache.read_artifacts::().unwrap(); + /// # } /// ``` pub fn read_artifacts(&self) -> Result> { let mut artifacts = ArtifactsMap::new(); From 5ed8bbbe4354a8348daa3e2397d6143423630277 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 31 Jan 2022 23:34:07 +0100 Subject: [PATCH 62/82] fix: set offline mode correctly --- ethers-solc/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 0b2e062fc..299b49325 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -399,7 +399,7 @@ impl ProjectBuilder { /// Prevents network possible access to download/check solc installs #[must_use] pub fn offline(self) -> Self { - self.set_cached(false) + self.set_offline(true) } /// Sets the offline status From 705fa9bcce1dfbecf7a7e34a417acf28ee0014d7 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 31 Jan 2022 23:44:44 +0100 Subject: [PATCH 63/82] chore: make it compile again --- ethers-solc/src/artifacts.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index 3f81d87f1..6f6e22f4c 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -1724,7 +1724,7 @@ pub struct SourceLocation { pub end: i32, } -#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] +#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, Hash)] pub struct SecondarySourceLocation { pub file: Option, pub start: Option, From bf6e5f129fed384fe100b8abb33f228a94448c6f Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 2 Feb 2022 13:44:53 +0100 Subject: [PATCH 64/82] Update ethers-solc/src/artifacts.rs Co-authored-by: Georgios Konstantopoulos --- ethers-solc/src/artifacts.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index 6f6e22f4c..029452d49 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -22,7 +22,7 @@ use crate::{ use ethers_core::abi::Address; use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; -/// solidity files are up of multiple `source units`, a solidity contract is such a `source unit`, +/// Solidity files are made up of multiple `source units`, a solidity contract is such a `source unit`, /// therefore a solidity file can contain multiple contracts: (1-N*) relationship. /// /// This types represents this mapping as `file name -> (contract name -> T)`, where the generic is From 49b9942e9056670d057b49c30aa28d76adf5846d Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Mon, 31 Jan 2022 23:54:30 +0100 Subject: [PATCH 65/82] feat: find remappings by default --- ethers-solc/src/config.rs | 9 +++++++-- ethers-solc/tests/project.rs | 2 +- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/ethers-solc/src/config.rs b/ethers-solc/src/config.rs index 559401f1b..0d749a954 100644 --- a/ethers-solc/src/config.rs +++ b/ethers-solc/src/config.rs @@ -414,6 +414,9 @@ impl ProjectPathsConfigBuilder { pub fn build_with_root(self, root: impl Into) -> ProjectPathsConfig { let root = canonicalized(root); + + let libraries = self.libraries.unwrap_or_else(|| ProjectPathsConfig::find_libs(&root)); + ProjectPathsConfig { cache: self .cache @@ -423,8 +426,10 @@ impl ProjectPathsConfigBuilder { .unwrap_or_else(|| ProjectPathsConfig::find_artifacts_dir(&root)), sources: self.sources.unwrap_or_else(|| ProjectPathsConfig::find_source_dir(&root)), tests: self.tests.unwrap_or_else(|| root.join("tests")), - libraries: self.libraries.unwrap_or_else(|| ProjectPathsConfig::find_libs(&root)), - remappings: self.remappings.unwrap_or_default(), + remappings: self.remappings.unwrap_or_else(|| { + libraries.iter().flat_map(|lib| Remapping::find_many(lib)).collect() + }), + libraries, root, } } diff --git a/ethers-solc/tests/project.rs b/ethers-solc/tests/project.rs index 72e4fb711..2c8a55119 100644 --- a/ethers-solc/tests/project.rs +++ b/ethers-solc/tests/project.rs @@ -42,7 +42,7 @@ fn can_compile_hardhat_sample() { } #[test] -fn can_compile_dapp_sample() { +fn can_compile_dapp_sample2() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); let project = TempProject::::new(paths).unwrap(); From 83a2f094a7fc7f8fad4f8a1c29ba61da48df01ba Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 2 Feb 2022 11:55:46 +0100 Subject: [PATCH 66/82] typos --- ethers-solc/src/artifacts.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index 029452d49..278946c15 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -1744,7 +1744,7 @@ pub struct SourceFile { pub struct SourceFiles(pub BTreeMap); impl SourceFiles { - /// Returns an iterator over the the source files' ids and path + /// Returns an iterator over the source files' ids and path /// /// ``` /// use std::collections::BTreeMap; From 0a871aee9872358511bd5ff687e1c8a484afb179 Mon Sep 17 00:00:00 2001 From: rakita Date: Tue, 1 Feb 2022 11:58:45 +0100 Subject: [PATCH 67/82] add eth_syncing RPC (#848) * add eth_syncing RPC * Changelo updated * small comments * Intermediate SyncingStatus --- CHANGELOG.md | 1 + ethers-providers/src/lib.rs | 13 +++++++++++++ ethers-providers/src/provider.rs | 28 ++++++++++++++++++++++++++-- 3 files changed, 40 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d8155570a..1ae2797ec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ ### Unreleased +- Add `eth_syncing` [848](https://github.com/gakonst/ethers-rs/pull/848) - Fix overflow and possible divide-by-zero in `estimate_priority_fee` - Add BSC mainnet and testnet to the list of known chains [831](https://github.com/gakonst/ethers-rs/pull/831) diff --git a/ethers-providers/src/lib.rs b/ethers-providers/src/lib.rs index 8d9323177..632d1a43a 100644 --- a/ethers-providers/src/lib.rs +++ b/ethers-providers/src/lib.rs @@ -79,6 +79,15 @@ where } } +/// Structure used in eth_syncing RPC +#[derive(Clone, Debug)] +pub enum SyncingStatus { + /// When client is synced to highest block, eth_syncing with return string "false" + IsFalse, + /// When client is still syncing past blocks we get IsSyncing information. + IsSyncing { starting_block: U256, current_block: U256, highest_block: U256 }, +} + /// A middleware allows customizing requests send and received from an ethereum node. /// /// Writing a middleware is as simple as: @@ -303,6 +312,10 @@ pub trait Middleware: Sync + Send + Debug { self.inner().call(tx, block).await.map_err(FromErr::from) } + async fn syncing(&self) -> Result { + self.inner().syncing().await.map_err(FromErr::from) + } + async fn get_chainid(&self) -> Result { self.inner().get_chainid().await.map_err(FromErr::from) } diff --git a/ethers-providers/src/provider.rs b/ethers-providers/src/provider.rs index d5a9b23c2..15eae5fae 100644 --- a/ethers-providers/src/provider.rs +++ b/ethers-providers/src/provider.rs @@ -3,7 +3,7 @@ use crate::{ pubsub::{PubsubClient, SubscriptionStream}, stream::{FilterWatcher, DEFAULT_POLL_INTERVAL}, FromErr, Http as HttpProvider, JsonRpcClient, JsonRpcClientWrapper, MockProvider, - PendingTransaction, QuorumProvider, + PendingTransaction, QuorumProvider, SyncingStatus, }; #[cfg(feature = "celo")] @@ -23,7 +23,7 @@ use ethers_core::{ utils, }; use hex::FromHex; -use serde::{de::DeserializeOwned, Serialize}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; use thiserror::Error; use url::{ParseError, Url}; @@ -497,6 +497,30 @@ impl Middleware for Provider

{ self.request("eth_chainId", ()).await } + /// Return current client syncing status. If IsFalse sync is over. + async fn syncing(&self) -> Result { + #[derive(Debug, Serialize, Deserialize)] + #[serde(untagged)] + pub enum SyncingStatusIntermediate { + /// When client is synced to highest block, eth_syncing with return string "false" + IsFalse(bool), + /// When client is still syncing past blocks we get IsSyncing information. + IsSyncing { starting_block: U256, current_block: U256, highest_block: U256 }, + } + let intermediate: SyncingStatusIntermediate = self.request("eth_syncing", ()).await?; + match intermediate { + SyncingStatusIntermediate::IsFalse(false) => Ok(SyncingStatus::IsFalse), + SyncingStatusIntermediate::IsFalse(true) => Err(ProviderError::CustomError( + "eth_syncing returned `true` that is undefined value.".to_owned(), + )), + SyncingStatusIntermediate::IsSyncing { + starting_block, + current_block, + highest_block, + } => Ok(SyncingStatus::IsSyncing { starting_block, current_block, highest_block }), + } + } + /// Returns the network version. async fn get_net_version(&self) -> Result { self.request("net_version", ()).await From 1a26010af87b8e08cd3d5d9b85fc68f4b15a8013 Mon Sep 17 00:00:00 2001 From: wolflo <33909953+wolflo@users.noreply.github.com> Date: Tue, 1 Feb 2022 03:59:54 -0700 Subject: [PATCH 68/82] fix(core): adjust Ganache for new cli output (#851) --- ethers-core/src/utils/ganache.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ethers-core/src/utils/ganache.rs b/ethers-core/src/utils/ganache.rs index 3c4fb1d1b..dfea10174 100644 --- a/ethers-core/src/utils/ganache.rs +++ b/ethers-core/src/utils/ganache.rs @@ -183,7 +183,7 @@ impl Ganache { let mut line = String::new(); reader.read_line(&mut line).expect("Failed to read line from ganache process"); - if line.starts_with("Listening on") { + if line.contains("Listening on") { break } From 5fe8d5a2ab2d5496d8a8dd2290d58344678c6434 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 2 Feb 2022 13:56:15 +0100 Subject: [PATCH 69/82] fix: review comments --- ethers-solc/src/artifacts.rs | 4 ++-- ethers-solc/src/compile/output.rs | 1 - ethers-solc/src/resolver.rs | 2 -- ethers-solc/src/utils.rs | 3 ++- ethers-solc/tests/project.rs | 17 +++++++++-------- 5 files changed, 13 insertions(+), 14 deletions(-) diff --git a/ethers-solc/src/artifacts.rs b/ethers-solc/src/artifacts.rs index 278946c15..ccbbc0a79 100644 --- a/ethers-solc/src/artifacts.rs +++ b/ethers-solc/src/artifacts.rs @@ -22,8 +22,8 @@ use crate::{ use ethers_core::abi::Address; use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; -/// Solidity files are made up of multiple `source units`, a solidity contract is such a `source unit`, -/// therefore a solidity file can contain multiple contracts: (1-N*) relationship. +/// Solidity files are made up of multiple `source units`, a solidity contract is such a `source +/// unit`, therefore a solidity file can contain multiple contracts: (1-N*) relationship. /// /// This types represents this mapping as `file name -> (contract name -> T)`, where the generic is /// intended to represent contract specific information, like [`Contract`] itself, See [`Contracts`] diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index 85b65a0fb..f51334813 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -37,7 +37,6 @@ impl ProjectCompileOutput { /// let project = Project::builder().build().unwrap(); /// let contracts: BTreeMap = project.compile().unwrap().into_artifacts().collect(); /// ``` - // TODO add ArtifactId (filename, contract name, version?) pub fn into_artifacts(self) -> impl Iterator { let Self { cached_artifacts, written_artifacts, .. } = self; cached_artifacts.into_artifacts::().chain(written_artifacts.into_artifacts::()) diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index 92f289918..5fe605033 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -68,8 +68,6 @@ impl GraphEdges { } /// Returns all files imported by the given file - /// - /// *Note* this only returns the imports, the `file __excluded__ pub fn imports(&self, file: impl AsRef) -> HashSet<&PathBuf> { if let Some(start) = self.indices.get(file.as_ref()).copied() { NodesIter::new(start, self).skip(1).map(move |idx| &self.rev_indices[&idx]).collect() diff --git a/ethers-solc/src/utils.rs b/ethers-solc/src/utils.rs index abc1304d0..02e2e2c25 100644 --- a/ethers-solc/src/utils.rs +++ b/ethers-solc/src/utils.rs @@ -262,7 +262,8 @@ pub(crate) fn read_json_file(path: impl AsRef) -> Res Ok(val) } -/// Creates the +/// Creates the parent directory of the `file` and all its ancestors if it does not exist +/// See [`std::fs::create_dir_all()`] pub fn create_parent_dir_all(file: impl AsRef) -> Result<(), SolcError> { let file = file.as_ref(); if let Some(parent) = file.parent() { diff --git a/ethers-solc/tests/project.rs b/ethers-solc/tests/project.rs index 2c8a55119..aee3f358c 100644 --- a/ethers-solc/tests/project.rs +++ b/ethers-solc/tests/project.rs @@ -53,14 +53,15 @@ fn can_compile_dapp_sample2() { // nothing to compile let compiled = project.compile().unwrap(); - assert!(compiled.find("Dapp").is_some()); - assert!(compiled.is_unchanged()); - - // delete artifacts - std::fs::remove_dir_all(&project.paths().artifacts).unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.find("Dapp").is_some()); - assert!(!compiled.is_unchanged()); + println!("{}", compiled); + // assert!(compiled.find("Dapp").is_some()); + // assert!(compiled.is_unchanged()); + // + // // delete artifacts + // std::fs::remove_dir_all(&project.paths().artifacts).unwrap(); + // let compiled = project.compile().unwrap(); + // assert!(compiled.find("Dapp").is_some()); + // assert!(!compiled.is_unchanged()); } #[test] From f3a5f58ec1f7582f32f8776ccce4e609b299de25 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 2 Feb 2022 15:07:11 +0100 Subject: [PATCH 70/82] fix: cache relative path bug --- Cargo.lock | 37 ++++++++++++++++++++++++++++++ ethers-solc/Cargo.toml | 2 ++ ethers-solc/src/cache.rs | 16 ++++++++----- ethers-solc/src/compile/project.rs | 24 +++++++++++++++++++ 4 files changed, 73 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e86997f96..025ecdb5b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1056,6 +1056,19 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "env_logger" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + [[package]] name = "eth-keystore" version = "0.3.0" @@ -1374,6 +1387,7 @@ dependencies = [ "colored", "criterion", "dunce", + "env_logger", "ethers-core", "fs_extra", "futures-util", @@ -1399,6 +1413,7 @@ dependencies = [ "tiny-keccak", "tokio", "tracing", + "tracing-subscriber", "walkdir", ] @@ -1783,6 +1798,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + [[package]] name = "hyper" version = "0.14.16" @@ -2046,6 +2067,15 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + [[package]] name = "matches" version = "0.1.9" @@ -2832,6 +2862,9 @@ name = "regex-automata" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax", +] [[package]] name = "regex-syntax" @@ -3831,9 +3864,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5312f325fe3588e277415f5a6cca1f4ccad0f248c4cd5a4bd33032d7286abc22" dependencies = [ "ansi_term", + "lazy_static", + "matchers", + "regex", "sharded-slab", "smallvec", "thread_local", + "tracing", "tracing-core", "tracing-log", ] diff --git a/ethers-solc/Cargo.toml b/ethers-solc/Cargo.toml index c44573032..2d7ca476a 100644 --- a/ethers-solc/Cargo.toml +++ b/ethers-solc/Cargo.toml @@ -51,6 +51,8 @@ getrandom = { version = "0.2", features = ["js"] } [dev-dependencies] criterion = { version = "0.3", features = ["async_tokio"] } pretty_assertions = "1.0.0" +env_logger = "*" +tracing-subscriber = {version = "0.3", default-features = false, features = ["env-filter", "fmt"]} rand = "0.8.4" tempfile = "3.3.0" tokio = { version = "1.15.0", features = ["full"] } diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 10f33def5..1b9a40aa5 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -24,7 +24,7 @@ use std::{ /// `ethers-solc` uses a different format version id, but the actual format is consistent with /// hardhat This allows ethers-solc to detect if the cache file was written by hardhat or /// `ethers-solc` -const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-1"; +const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-2"; /// The file name of the default cache file pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json"; @@ -354,15 +354,15 @@ impl CacheEntry { /// Sets the artifact's paths to `base` adjoined to the artifact's `path`. pub fn join(&mut self, base: impl AsRef) { let base = base.as_ref(); - self.artifacts_mut().for_each(|p| *p = p.join(base)) + self.artifacts_mut().for_each(|p| *p = base.join(&*p)) } /// Removes `base` from the artifact's path pub fn strip_prefix(&mut self, base: impl AsRef) { let base = base.as_ref(); self.artifacts_mut().for_each(|p| { - if let Ok(prefix) = p.strip_prefix(base) { - *p = prefix.to_path_buf(); + if let Ok(rem) = p.strip_prefix(base) { + *p = rem.to_path_buf(); } }) } @@ -507,8 +507,11 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { } if entry.artifacts_for_version(version).any(|artifact_path| { - // artifact does not exist - !self.cached_artifacts.has_artifact(artifact_path) + let missing_artifact = !self.cached_artifacts.has_artifact(artifact_path); + if missing_artifact { + tracing::trace!("missing artifact \"{}\"", artifact_path.display()); + } + missing_artifact }) { return true } @@ -656,6 +659,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { // add the new cache entries to the cache file cache.extend(dirty_entries.into_iter().map(|(file, (entry, _))| (file, entry))); + cache.strip_prefix_all(project.artifacts_path()); // write to disk cache.write(project.cache_path())?; diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 21a9b0477..93be83031 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -437,8 +437,15 @@ fn compile_parallel( #[cfg(feature = "project-util")] mod tests { use super::*; + use crate::{cache::SolFilesCache, project_util::TempProject, MinimalCombinedArtifacts}; use std::path::PathBuf; + fn init_tracing() { + tracing_subscriber::fmt() + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + .init(); + } + #[test] fn can_preprocess() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); @@ -457,4 +464,21 @@ mod tests { let compiled = prep.compile().unwrap(); assert_eq!(compiled.output.contracts.files().count(), 3); } + + #[test] + fn can_detect_cached_files() { + init_tracing(); + + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); + let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); + let project = TempProject::::new(paths).unwrap(); + + let compiled = project.compile().unwrap(); + assert!(!compiled.has_compiler_errors()); + + let inner = project.project(); + let compiler = ProjectCompiler::new(inner).unwrap(); + let prep = compiler.preprocess().unwrap(); + assert!(prep.cache.as_cached().unwrap().dirty_entries.is_empty()) + } } From 1ca1173a5c81c1a0daa746d09135b1fd1f2b6dd4 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 2 Feb 2022 15:09:35 +0100 Subject: [PATCH 71/82] chore: add cache example --- .../test-data/solidity-files-cache.json | 110 ++++++++++++++++++ 1 file changed, 110 insertions(+) create mode 100644 ethers-solc/test-data/solidity-files-cache.json diff --git a/ethers-solc/test-data/solidity-files-cache.json b/ethers-solc/test-data/solidity-files-cache.json new file mode 100644 index 000000000..0d5ca6727 --- /dev/null +++ b/ethers-solc/test-data/solidity-files-cache.json @@ -0,0 +1,110 @@ +{ + "_format": "ethers-rs-sol-cache-2", + "files": { + "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/lib/ds-test/src/test.sol": { + "lastModificationDate": 1638218667720, + "contentHash": "5d45a46528eaf8a26f0a8d93669f3148", + "sourceName": "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/lib/ds-test/src/test.sol", + "solcConfig": { + "settings": { + "optimizer": { + "enabled": false, + "runs": 200 + }, + "outputSelection": { + "*": { + "": [ + "ast" + ], + "*": [ + "abi", + "evm.bytecode", + "evm.deployedBytecode", + "evm.methodIdentifiers" + ] + } + }, + "evmVersion": "london" + } + }, + "imports": [], + "versionRequirement": ">=0.4.23", + "artifacts": { + "DSTest": { + "0.8.11+commit.d7f03943.Darwin.appleclang": "test.sol/DSTest.json" + } + } + }, + "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.sol": { + "lastModificationDate": 1638193396942, + "contentHash": "a41ddb3b99ae6b72b59341eabf948542", + "sourceName": "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.sol", + "solcConfig": { + "settings": { + "optimizer": { + "enabled": false, + "runs": 200 + }, + "outputSelection": { + "*": { + "": [ + "ast" + ], + "*": [ + "abi", + "evm.bytecode", + "evm.deployedBytecode", + "evm.methodIdentifiers" + ] + } + }, + "evmVersion": "london" + } + }, + "imports": [], + "versionRequirement": ">=0.6.6", + "artifacts": { + "Dapp": { + "0.8.11+commit.d7f03943.Darwin.appleclang": "Dapp.sol/Dapp.json" + } + } + }, + "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.t.sol": { + "lastModificationDate": 1638193396942, + "contentHash": "5f5038d89f69269d0734659efaa2ec52", + "sourceName": "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.t.sol", + "solcConfig": { + "settings": { + "optimizer": { + "enabled": false, + "runs": 200 + }, + "outputSelection": { + "*": { + "": [ + "ast" + ], + "*": [ + "abi", + "evm.bytecode", + "evm.deployedBytecode", + "evm.methodIdentifiers" + ] + } + }, + "evmVersion": "london" + } + }, + "imports": [ + "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/lib/ds-test/src/test.sol", + "/Users/Matthias/git/rust/ethers-rs/ethers-solc/test-data/dapp-sample/src/Dapp.sol" + ], + "versionRequirement": ">=0.6.6", + "artifacts": { + "DappTest": { + "0.8.11+commit.d7f03943.Darwin.appleclang": "Dapp.t.sol/DappTest.json" + } + } + } + } +} \ No newline at end of file From a524c7c716e24733fcf40db93ca8ee5cc7c37b7c Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 2 Feb 2022 16:39:16 +0100 Subject: [PATCH 72/82] chore: use absolute paths --- ethers-solc/src/cache.rs | 18 +++++++-- ethers-solc/src/compile/output.rs | 8 +++- ethers-solc/src/compile/project.rs | 2 +- ethers-solc/src/project_util.rs | 15 +++++++ ethers-solc/tests/project.rs | 63 ++++++++++++++++++------------ 5 files changed, 74 insertions(+), 32 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 1b9a40aa5..7a0aa4efc 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -91,6 +91,7 @@ impl SolFilesCache { /// Write the cache as json file to the given path pub fn write(&self, path: impl AsRef) -> Result<()> { let path = path.as_ref(); + utils::create_parent_dir_all(path)?; let file = fs::File::create(path).map_err(|err| SolcError::io(err, path))?; tracing::trace!("writing cache to json file: \"{}\"", path.display()); serde_json::to_writer_pretty(file, self)?; @@ -115,7 +116,13 @@ impl SolFilesCache { /// Removes all `CacheEntry` which source files are missing pub fn remove_missing_files(&mut self) { tracing::trace!("remove non existing files from cache"); - self.files.retain(|file, _| file.exists()) + self.files.retain(|file, _| { + let exists = file.exists(); + if !exists { + tracing::trace!("remove {} from cache", file.display()); + } + exists + }) } /// Checks if all artifact files exist @@ -470,6 +477,9 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { source: Source, version: &Version, ) -> Option<(PathBuf, Source)> { + dbg!(self.is_dirty(&file, version)); + dbg!(file.clone()); + if !self.is_dirty(&file, version) && self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version)) { @@ -484,8 +494,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { /// returns `false` if the corresponding cache entry remained unchanged otherwise `true` fn is_dirty(&self, file: &Path, version: &Version) -> bool { if let Some(hash) = self.content_hashes.get(file) { - let cache_path = utils::source_name(file, self.project.root()); - if let Some(entry) = self.cache.entry(&cache_path) { + if let Some(entry) = self.cache.entry(&file) { if entry.content_hash.as_bytes() != hash.as_bytes() { tracing::trace!( "changed content hash for cached artifact \"{}\"", @@ -518,6 +527,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { // all things match, can be reused return false } + tracing::trace!("Missing cache entry for {}", file.display()); } true } @@ -547,7 +557,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { let cache = if project.cached { // read the cache file if it already exists let cache = if project.cache_path().exists() { - let mut cache = SolFilesCache::read(project.cache_path())?; + let mut cache = SolFilesCache::read(project.cache_path()).unwrap_or_default(); cache.join_all(project.artifacts_path()).remove_missing_files(); cache } else { diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index f51334813..c64436016 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -63,7 +63,7 @@ impl ProjectCompileOutput { /// Whether this type does not contain compiled contracts pub fn is_unchanged(&self) -> bool { - !self.has_compiled_contracts() + self.compiler_output.is_unchanged() } /// Whether there were errors @@ -102,7 +102,7 @@ where impl fmt::Display for ProjectCompileOutput { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.compiler_output.contracts.is_empty() && self.compiler_output.errors.is_empty() { + if self.compiler_output.is_unchanged() { f.write_str("Nothing to compile") } else { self.compiler_output.diagnostics(&self.ignored_error_codes).fmt(f) @@ -148,6 +148,10 @@ impl AggregatedCompilerOutput { self.contracts.is_empty() } + pub fn is_unchanged(&self) -> bool { + self.contracts.is_empty() && self.errors.is_empty() + } + pub fn extend_all(&mut self, out: I) where I: IntoIterator, diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 93be83031..b71866bc4 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -437,7 +437,7 @@ fn compile_parallel( #[cfg(feature = "project-util")] mod tests { use super::*; - use crate::{cache::SolFilesCache, project_util::TempProject, MinimalCombinedArtifacts}; + use crate::{project_util::TempProject, MinimalCombinedArtifacts}; use std::path::PathBuf; fn init_tracing() { diff --git a/ethers-solc/src/project_util.rs b/ethers-solc/src/project_util.rs index aee3f2e86..f09ba4324 100644 --- a/ethers-solc/src/project_util.rs +++ b/ethers-solc/src/project_util.rs @@ -77,6 +77,21 @@ impl TempProject { &mut self.project_mut().paths } + /// Returns the path to the artifacts directory + pub fn artifacts_path(&self) -> &PathBuf { + &self.paths().artifacts + } + + /// Returns the path to the sources directory + pub fn sources_path(&self) -> &PathBuf { + &self.paths().sources + } + + /// Returns the path to the cache file + pub fn cache_path(&self) -> &PathBuf { + &self.paths().cache + } + /// The root path of the temporary workspace pub fn root(&self) -> &Path { self.project().paths.root.as_path() diff --git a/ethers-solc/tests/project.rs b/ethers-solc/tests/project.rs index aee3f358c..02c1cddec 100644 --- a/ethers-solc/tests/project.rs +++ b/ethers-solc/tests/project.rs @@ -1,7 +1,7 @@ //! project tests use std::{ - collections::HashMap, + collections::{HashMap, HashSet}, io, path::{Path, PathBuf}, str::FromStr, @@ -13,6 +13,14 @@ use ethers_solc::{ remappings::Remapping, Graph, MinimalCombinedArtifacts, Project, ProjectPathsConfig, }; +use pretty_assertions::assert_eq; + +#[allow(unused)] +fn init_tracing() { + tracing_subscriber::fmt() + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + .init(); +} #[test] fn can_compile_hardhat_sample() { @@ -42,7 +50,7 @@ fn can_compile_hardhat_sample() { } #[test] -fn can_compile_dapp_sample2() { +fn can_compile_dapp_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); let project = TempProject::::new(paths).unwrap(); @@ -53,15 +61,19 @@ fn can_compile_dapp_sample2() { // nothing to compile let compiled = project.compile().unwrap(); - println!("{}", compiled); - // assert!(compiled.find("Dapp").is_some()); - // assert!(compiled.is_unchanged()); - // - // // delete artifacts - // std::fs::remove_dir_all(&project.paths().artifacts).unwrap(); - // let compiled = project.compile().unwrap(); - // assert!(compiled.find("Dapp").is_some()); - // assert!(!compiled.is_unchanged()); + assert!(compiled.find("Dapp").is_some()); + assert!(compiled.is_unchanged()); + + let cache = SolFilesCache::read(project.cache_path()).unwrap(); + + // delete artifacts + std::fs::remove_dir_all(&project.paths().artifacts).unwrap(); + let compiled = project.compile().unwrap(); + assert!(compiled.find("Dapp").is_some()); + assert!(!compiled.is_unchanged()); + + let updated_cache = SolFilesCache::read(project.cache_path()).unwrap(); + assert_eq!(cache, updated_cache); } #[test] @@ -140,6 +152,7 @@ fn can_compile_dapp_detect_changes_in_libs() { #[test] fn can_compile_dapp_detect_changes_in_sources() { + init_tracing(); let project = TempProject::::dapptools().unwrap(); let src = project @@ -267,26 +280,26 @@ fn can_compile_dapp_sample_with_cache() { assert!(compiled.find("NewContract").is_some()); assert!(!compiled.is_unchanged()); assert_eq!( - compiled.into_artifacts().map(|(name, _)| name).collect::>(), - vec![ - "Dapp.json:Dapp", - "DappTest.json:DappTest", - "DSTest.json:DSTest", - "NewContract.json:NewContract" - ] + compiled.into_artifacts().map(|(name, _)| name).collect::>(), + HashSet::from([ + "Dapp.json:Dapp".to_string(), + "DappTest.json:DappTest".to_string(), + "DSTest.json:DSTest".to_string(), + "NewContract.json:NewContract".to_string(), + ]) ); // old cached artifact is not taken from the cache std::fs::copy(cache_testdata_dir.join("Dapp.sol"), root.join("src/Dapp.sol")).unwrap(); let compiled = project.compile().unwrap(); assert_eq!( - compiled.into_artifacts().map(|(name, _)| name).collect::>(), - vec![ - "DappTest.json:DappTest", - "NewContract.json:NewContract", - "DSTest.json:DSTest", - "Dapp.json:Dapp" - ] + compiled.into_artifacts().map(|(name, _)| name).collect::>(), + HashSet::from([ + "DappTest.json:DappTest".to_string(), + "NewContract.json:NewContract".to_string(), + "DSTest.json:DSTest".to_string(), + "Dapp.json:Dapp".to_string(), + ]) ); // deleted artifact is not taken from the cache From 0fa69c24395f13a6f8732b64533d721cb26845bd Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 2 Feb 2022 18:01:32 +0100 Subject: [PATCH 73/82] fix: remove overwritten files from cache --- ethers-solc/src/cache.rs | 44 ++++++++++++++++++++----------- ethers-solc/src/compile/output.rs | 4 +-- ethers-solc/tests/project.rs | 1 + 3 files changed, 31 insertions(+), 18 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 7a0aa4efc..7542e4306 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -168,6 +168,8 @@ impl SolFilesCache { self.files.retain(|file, entry| { if let Some(versions) = files.remove(file.as_path()) { entry.retain_versions(versions); + } else { + return false } !entry.artifacts.is_empty() }); @@ -405,7 +407,7 @@ pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput> { impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { /// Creates a new cache entry for the file - fn create_cache_entry(&self, file: &Path, source: &Source) -> Result { + fn create_cache_entry(&self, file: &Path, source: &Source) -> CacheEntry { let imports = self .edges .imports(file) @@ -414,7 +416,8 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { .collect(); let entry = CacheEntry { - last_modification_date: CacheEntry::read_last_modification_date(&file).unwrap(), + last_modification_date: CacheEntry::read_last_modification_date(&file) + .unwrap_or_default(), content_hash: source.content_hash(), source_name: utils::source_name(file, self.project.root()).into(), solc_config: self.project.solc_config.clone(), @@ -424,25 +427,19 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { artifacts: Default::default(), }; - Ok(entry) + entry } /// inserts a new cache entry for the given file /// /// If there is already an entry available for the file the given version is added to the set - fn insert_new_cache_entry( - &mut self, - file: &Path, - source: &Source, - version: Version, - ) -> Result<()> { + fn insert_new_cache_entry(&mut self, file: &Path, source: &Source, version: Version) { if let Some((_, versions)) = self.dirty_entries.get_mut(file) { versions.insert(version); } else { - let entry = self.create_cache_entry(file, source)?; + let entry = self.create_cache_entry(file, source); self.dirty_entries.insert(file.to_path_buf(), (entry, HashSet::from([version]))); } - Ok(()) } /// inserts the filtered source with the fiven version @@ -477,16 +474,14 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> { source: Source, version: &Version, ) -> Option<(PathBuf, Source)> { - dbg!(self.is_dirty(&file, version)); - dbg!(file.clone()); - if !self.is_dirty(&file, version) && self.edges.imports(&file).iter().all(|file| !self.is_dirty(file, version)) { self.insert_filtered_source(file, source, version.clone()); None } else { - self.insert_new_cache_entry(&file, &source, version.clone()).unwrap(); + self.insert_new_cache_entry(&file, &source, version.clone()); + Some((file, source)) } } @@ -638,7 +633,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { ArtifactsCache::Cached(cache) => { let ArtifactsCacheInner { mut cache, - cached_artifacts, + mut cached_artifacts, mut dirty_entries, filtered, project, @@ -664,6 +659,23 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { (name, artifacts) })); } + + // cached artifacts that were overwritten also need to be removed from the + // `cached_artifacts` set + if let Some((f, mut cached)) = cached_artifacts.0.remove_entry(file) { + cached.retain(|name, files| { + if let Some(written_files) = artifacts.get(name) { + files.retain(|f| { + written_files.iter().all(|other| other.version != f.version) + }); + return !files.is_empty() + } + false + }); + if !cached.is_empty() { + cached_artifacts.0.insert(f, cached); + } + } } // add the new cache entries to the cache file diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index c64436016..289b8d23b 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -17,9 +17,9 @@ pub struct ProjectCompileOutput { /// See [`CompilerSources::compile`] pub(crate) compiler_output: AggregatedCompilerOutput, /// all artifact files from `output` that were written - pub(crate) written_artifacts: Artifacts, + pub written_artifacts: Artifacts, /// All artifacts that were read from cache - pub(crate) cached_artifacts: Artifacts, + pub cached_artifacts: Artifacts, /// errors that should be omitted pub(crate) ignored_error_codes: Vec, } diff --git a/ethers-solc/tests/project.rs b/ethers-solc/tests/project.rs index 02c1cddec..369d08bdd 100644 --- a/ethers-solc/tests/project.rs +++ b/ethers-solc/tests/project.rs @@ -228,6 +228,7 @@ fn can_compile_dapp_detect_changes_in_sources() { assert!(compiled.find("DssSpellTestBase").is_some()); // ensure change is detected assert!(!compiled.is_unchanged()); + // and all recompiled artifacts are different for (p, artifact) in compiled.into_artifacts() { let other = artifacts.remove(&p).unwrap(); From e2d9d05e15b93917212578b138117f640cc93e40 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Wed, 2 Feb 2022 18:37:37 +0100 Subject: [PATCH 74/82] fix: rustfmt --- ethers-solc/src/cache.rs | 4 ++-- ethers-solc/src/compile/mod.rs | 2 +- ethers-solc/src/compile/output.rs | 4 ++-- ethers-solc/src/compile/project.rs | 5 ++--- ethers-solc/src/config.rs | 6 +++--- ethers-solc/src/lib.rs | 4 +--- ethers-solc/tests/project.rs | 1 - 7 files changed, 11 insertions(+), 15 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 7542e4306..8b1c59910 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -12,7 +12,7 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ collections::{ btree_map::{BTreeMap, Entry}, - hash_map, HashMap, HashSet, + hash_map, BTreeSet, HashMap, HashSet, }, fs::{self}, path::{Path, PathBuf}, @@ -236,7 +236,7 @@ pub struct CacheEntry { /// fully resolved imports of the file /// /// all paths start relative from the project's root: `src/importedFile.sol` - pub imports: Vec, + pub imports: BTreeSet, /// The solidity version pragma pub version_requirement: Option, /// all artifacts produced for this file diff --git a/ethers-solc/src/compile/mod.rs b/ethers-solc/src/compile/mod.rs index 6171d2045..a26f0fa7b 100644 --- a/ethers-solc/src/compile/mod.rs +++ b/ethers-solc/src/compile/mod.rs @@ -501,7 +501,7 @@ impl Solc { .stdout(Stdio::piped()) .spawn() .map_err(|err| SolcError::io(err, &self.solc))?; - let stdin = child.stdin.take().unwrap(); + let stdin = child.stdin.take().expect("Stdin exists."); serde_json::to_writer(stdin, input)?; compile_output(child.wait_with_output().map_err(|err| SolcError::io(err, &self.solc))?) } diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index 289b8d23b..c64436016 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -17,9 +17,9 @@ pub struct ProjectCompileOutput { /// See [`CompilerSources::compile`] pub(crate) compiler_output: AggregatedCompilerOutput, /// all artifact files from `output` that were written - pub written_artifacts: Artifacts, + pub(crate) written_artifacts: Artifacts, /// All artifacts that were read from cache - pub cached_artifacts: Artifacts, + pub(crate) cached_artifacts: Artifacts, /// errors that should be omitted pub(crate) ignored_error_codes: Vec, } diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index b71866bc4..bc21b3f9d 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -124,7 +124,6 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { pub fn with_sources(project: &'a Project, sources: Sources) -> Result { let graph = Graph::resolve_sources(&project.paths, sources)?; let (versions, edges) = graph.into_sources_by_version(project.offline)?; - let sources_by_version = versions.get(&project.allowed_lib_paths)?; let sources = if project.solc_jobs > 1 && sources_by_version.len() > 1 { @@ -366,6 +365,7 @@ fn compile_sequential( paths: &ProjectPathsConfig, ) -> Result { let mut aggregated = AggregatedCompilerOutput::default(); + tracing::trace!("compiling {} jobs sequentially", input.len()); for (solc, (version, sources)) in input { if sources.is_empty() { // nothing to compile @@ -440,6 +440,7 @@ mod tests { use crate::{project_util::TempProject, MinimalCombinedArtifacts}; use std::path::PathBuf; + #[allow(unused)] fn init_tracing() { tracing_subscriber::fmt() .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) @@ -467,8 +468,6 @@ mod tests { #[test] fn can_detect_cached_files() { - init_tracing(); - let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); let project = TempProject::::new(paths).unwrap(); diff --git a/ethers-solc/src/config.rs b/ethers-solc/src/config.rs index 0d749a954..a6f2b14ec 100644 --- a/ethers-solc/src/config.rs +++ b/ethers-solc/src/config.rs @@ -426,9 +426,9 @@ impl ProjectPathsConfigBuilder { .unwrap_or_else(|| ProjectPathsConfig::find_artifacts_dir(&root)), sources: self.sources.unwrap_or_else(|| ProjectPathsConfig::find_source_dir(&root)), tests: self.tests.unwrap_or_else(|| root.join("tests")), - remappings: self.remappings.unwrap_or_else(|| { - libraries.iter().flat_map(|lib| Remapping::find_many(lib)).collect() - }), + remappings: self + .remappings + .unwrap_or_else(|| libraries.iter().flat_map(Remapping::find_many).collect()), libraries, root, } diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 299b49325..3a4578837 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -1,5 +1,3 @@ -#![doc = include_str ! ("../README.md")] - pub mod artifacts; pub mod sourcemap; @@ -122,7 +120,7 @@ impl Project { /// Applies the configured settings to the given `Solc` fn configure_solc(&self, mut solc: Solc) -> Solc { - if !self.allowed_lib_paths.0.is_empty() { + if self.allowed_lib_paths.0.is_empty() { solc = solc.arg("--allow-paths").arg(self.allowed_lib_paths.to_string()); } solc diff --git a/ethers-solc/tests/project.rs b/ethers-solc/tests/project.rs index 369d08bdd..09ba9aa17 100644 --- a/ethers-solc/tests/project.rs +++ b/ethers-solc/tests/project.rs @@ -85,7 +85,6 @@ fn can_compile_dapp_detect_changes_in_libs() { .paths_mut() .remappings .push(Remapping::from_str(&format!("remapping={}/", remapping.display())).unwrap()); - project.project_mut().auto_detect = false; let src = project .add_source( From ff9599caf13a0c132a3542697abf8bf0ec89622a Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 3 Feb 2022 01:49:30 +0100 Subject: [PATCH 75/82] chore: more helper functions --- ethers-solc/src/artifact_output.rs | 14 ++++ ethers-solc/src/cache.rs | 96 ++++++++++++++++++++++++++-- ethers-solc/src/compile/contracts.rs | 9 +++ ethers-solc/src/error.rs | 3 + ethers-solc/src/utils.rs | 2 +- 5 files changed, 117 insertions(+), 7 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index 5512d628a..ed6d84998 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -197,6 +197,20 @@ pub trait Artifact { /// Returns the contents of this type as a single tuple of abi, bytecode and deployed bytecode fn into_parts(self) -> (Option, Option, Option); + + /// Same as [`Self::into_parts()`] but returns `Err` if an element is `None` + fn try_into_parts(self) -> Result<(Abi, Bytes, Bytes)> + where + Self: Sized, + { + let (abi, bytecode, deployed_bytecode) = self.into_parts(); + + Ok(( + abi.ok_or_else(|| SolcError::msg("abi missing"))?, + bytecode.ok_or_else(|| SolcError::msg("bytecode missing"))?, + deployed_bytecode.ok_or_else(|| SolcError::msg("deployed bytecode missing"))?, + )) + } } impl Artifact for T diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 8b1c59910..5f2f03637 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -5,7 +5,8 @@ use crate::{ contracts::VersionedContracts, error::{Result, SolcError}, resolver::GraphEdges, - utils, ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Project, Source, + utils, ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Project, ProjectPathsConfig, + Source, }; use semver::Version; use serde::{de::DeserializeOwned, Deserialize, Serialize}; @@ -63,6 +64,8 @@ impl SolFilesCache { /// Reads the cache json file from the given path /// + /// See also [`Self::read_joined()`] + /// /// # Errors /// /// If the cache file does not exist @@ -88,6 +91,26 @@ impl SolFilesCache { Ok(cache) } + /// Reads the cache json file from the given path and returns the cache with modified paths + /// + /// + /// # Example + /// + /// ``` + /// # fn t() { + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); + /// # } + /// ``` + pub fn read_joined(paths: &ProjectPathsConfig) -> Result { + let mut cache = SolFilesCache::read(&paths.cache)?; + cache.join_all(&paths.artifacts); + Ok(cache) + } + /// Write the cache as json file to the given path pub fn write(&self, path: impl AsRef) -> Result<()> { let path = path.as_ref(); @@ -130,6 +153,59 @@ impl SolFilesCache { self.files.values().all(|entry| entry.all_artifacts_exist()) } + /// Returns the path to the artifact of the given `(file, contract)` pair + /// + /// # Example + /// + /// ``` + /// # fn t() { + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); + /// cache.find_artifact_path("/.../src/Greeter.sol", "Greeter"); + /// # } + /// ``` + pub fn find_artifact_path( + &self, + contract_file: impl AsRef, + contract_name: impl AsRef, + ) -> Option<&PathBuf> { + let entry = self.entry(contract_file)?; + entry.find_artifact_path(contract_name) + } + + /// Finds the path to the artifact of the given `(file, contract)` pair, see + /// [`Self::find_artifact_path()`], and reads the artifact as json file + /// # Example + /// + /// ``` + /// # fn t() { + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); + /// cache.read_artifact("/.../src/Greeter.sol", "Greeter"); + /// # } + /// ``` + pub fn read_artifact( + &self, + contract_file: impl AsRef, + contract_name: impl AsRef, + ) -> Result { + let contract_file = contract_file.as_ref(); + let contract_name = contract_name.as_ref(); + + let artifact_path = + self.find_artifact_path(contract_file, contract_name).ok_or_else(|| { + SolcError::ArtifactNotFound(contract_file.to_path_buf(), contract_name.to_string()) + })?; + + utils::read_json_file(artifact_path) + } + /// Reads all cached artifacts from disk using the given ArtifactOutput handler /// /// # Example @@ -140,8 +216,7 @@ impl SolFilesCache { /// use ethers_solc::artifacts::CompactContractBytecode; /// # fn t() { /// let project = Project::builder().build().unwrap(); - /// let mut cache = SolFilesCache::read(project.cache_path()).unwrap(); - /// cache.join_all(project.artifacts_path()); + /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); /// let artifacts = cache.read_artifacts::().unwrap(); /// # } /// ``` @@ -257,6 +332,17 @@ impl CacheEntry { Duration::from_millis(self.last_modification_date) } + /// Returns the artifact path for the contract name + /// ``` + /// use ethers_solc::cache::CacheEntry; + /// # fn t(entry: CacheEntry) { + /// entry.find_artifact_path("Greeter"); + /// # } + /// ``` + pub fn find_artifact_path(&self, contract_name: impl AsRef) -> Option<&PathBuf> { + self.artifacts.get(contract_name.as_ref())?.iter().next().map(|(_, p)| p) + } + /// Reads the last modification date from the file's metadata pub fn read_last_modification_date(file: impl AsRef) -> Result { let file = file.as_ref(); @@ -552,9 +638,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { let cache = if project.cached { // read the cache file if it already exists let cache = if project.cache_path().exists() { - let mut cache = SolFilesCache::read(project.cache_path()).unwrap_or_default(); - cache.join_all(project.artifacts_path()).remove_missing_files(); - cache + SolFilesCache::read_joined(&project.paths).unwrap_or_default() } else { SolFilesCache::default() }; diff --git a/ethers-solc/src/compile/contracts.rs b/ethers-solc/src/compile/contracts.rs index 9380414bf..47e6db53b 100644 --- a/ethers-solc/src/compile/contracts.rs +++ b/ethers-solc/src/compile/contracts.rs @@ -87,6 +87,15 @@ impl VersionedContracts { .flat_map(|c| c.iter().flat_map(|(name, c)| c.iter().map(move |c| (name, &c.contract)))) } + /// Returns an iterator over (`file`, `name`, `Contract`) + pub fn contracts_with_files(&self) -> impl Iterator { + self.0.iter().flat_map(|(file, contracts)| { + contracts + .iter() + .flat_map(move |(name, c)| c.iter().map(move |c| (file, name, &c.contract))) + }) + } + /// Returns an iterator over all contracts and their source names. /// /// ``` diff --git a/ethers-solc/src/error.rs b/ethers-solc/src/error.rs index 72f8d940a..2180aa9ae 100644 --- a/ethers-solc/src/error.rs +++ b/ethers-solc/src/error.rs @@ -34,6 +34,9 @@ pub enum SolcError { #[error("{0}")] Message(String), + #[error("No artifact found for `{}:{}`", .0.display(), .1)] + ArtifactNotFound(PathBuf, String), + #[cfg(feature = "project-util")] #[error(transparent)] FsExtra(#[from] fs_extra::error::Error), diff --git a/ethers-solc/src/utils.rs b/ethers-solc/src/utils.rs index 02e2e2c25..43a52cdca 100644 --- a/ethers-solc/src/utils.rs +++ b/ethers-solc/src/utils.rs @@ -254,7 +254,7 @@ pub(crate) fn tempdir(name: &str) -> Result { } /// Reads the json file and deserialize it into the provided type -pub(crate) fn read_json_file(path: impl AsRef) -> Result { +pub fn read_json_file(path: impl AsRef) -> Result { let path = path.as_ref(); let file = std::fs::File::open(path).map_err(|err| SolcError::io(err, path))?; let file = std::io::BufReader::new(file); From a73fe91201e1a8f3129088eaac8d68be9594d8ef Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 3 Feb 2022 02:02:53 +0100 Subject: [PATCH 76/82] chore: export AggregatedOutput --- ethers-solc/src/lib.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 3a4578837..33898fc8c 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -13,7 +13,10 @@ pub use hh::{HardhatArtifact, HardhatArtifacts}; pub use resolver::Graph; mod compile; -pub use compile::{output::ProjectCompileOutput, *}; +pub use compile::{ + output::{AggregatedCompilerOutput, ProjectCompileOutput}, + *, +}; mod config; pub use config::{AllowedLibPaths, PathStyle, ProjectPathsConfig, SolcConfig}; From 7b3bb577d08800c6497b0ab2ab9e5167a99d51e6 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 3 Feb 2022 13:06:44 +0100 Subject: [PATCH 77/82] feat: implement helper functions --- ethers-solc/src/artifact_output.rs | 63 ++++++++++++++++++++++++-- ethers-solc/src/cache.rs | 2 +- ethers-solc/src/compile/output.rs | 71 +++++++++++++++++++++++++++--- ethers-solc/src/compile/project.rs | 12 ++--- ethers-solc/tests/project.rs | 9 ++-- 5 files changed, 137 insertions(+), 20 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index ed6d84998..c2f7d07c7 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -57,6 +57,32 @@ pub(crate) type ArtifactsMap = FileToContractsMap>>; #[derive(Debug, Clone, PartialEq)] pub struct Artifacts(pub ArtifactsMap); +impl From> for Artifacts { + fn from(m: ArtifactsMap) -> Self { + Self(m) + } +} + +impl<'a, T> IntoIterator for &'a Artifacts { + type Item = (&'a String, &'a BTreeMap>>); + type IntoIter = + std::collections::btree_map::Iter<'a, String, BTreeMap>>>; + + fn into_iter(self) -> Self::IntoIter { + (&self.0).into_iter() + } +} + +impl IntoIterator for Artifacts { + type Item = (String, BTreeMap>>); + type IntoIter = + std::collections::btree_map::IntoIter>>>; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + impl Default for Artifacts { fn default() -> Self { Self(Default::default()) @@ -152,6 +178,40 @@ impl Artifacts { }) } + /// Returns an iterator that yields the tuple `(file, contract name, artifact)` + /// + /// **NOTE** this returns the path as is + pub fn into_artifacts_with_files(self) -> impl Iterator { + self.0.into_iter().flat_map(|(f, contract_artifacts)| { + contract_artifacts.into_iter().flat_map(move |(name, artifacts)| { + let contract_name = name.clone(); + let file = f.clone(); + artifacts + .into_iter() + .map(move |artifact| (file.clone(), contract_name.clone(), artifact.artifact)) + }) + }) + } + /// Strips the given prefix from all artifact file paths to make them relative to the given + /// `root` argument + pub fn into_stripped_file_prefixes(self, base: impl AsRef) -> Self { + let base = base.as_ref(); + let artifacts = self + .0 + .into_iter() + .map(|(file, c)| { + let file_path = Path::new(&file); + if let Ok(p) = file_path.strip_prefix(base) { + (p.to_string_lossy().to_string(), c) + } else { + (file, c) + } + }) + .collect(); + + Artifacts(artifacts) + } + /// Finds the first artifact `T` with a matching contract name pub fn find(&self, contract_name: impl AsRef) -> Option<&T> { let contract_name = contract_name.as_ref(); @@ -181,9 +241,6 @@ impl Artifacts { } } -// /// Bundled Artifacts: `file -> (contract name -> (Artifact, Version))` -// pub type Artifacts = FileToContractsMap>; - /// A trait representation for a [`crate::Contract`] artifact pub trait Artifact { /// Returns the artifact's `Abi` and bytecode diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 5f2f03637..aacc06e68 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -705,7 +705,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { /// Consumes the `Cache`, rebuilds the [`SolFileCache`] by merging all artifacts that were /// filtered out in the previous step (`Cache::filtered`) and the artifacts that were just - /// written to disk `written_artifacts`. + /// compiled and written to disk `written_artifacts`. /// /// Returns all the _cached_ artifacts. pub fn write_cache( diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index c64436016..6f9500624 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -6,7 +6,7 @@ use crate::{ ArtifactOutput, Artifacts, CompilerOutput, }; use semver::Version; -use std::{collections::BTreeMap, fmt}; +use std::{collections::BTreeMap, fmt, path::Path}; /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. @@ -16,8 +16,8 @@ pub struct ProjectCompileOutput { /// /// See [`CompilerSources::compile`] pub(crate) compiler_output: AggregatedCompilerOutput, - /// all artifact files from `output` that were written - pub(crate) written_artifacts: Artifacts, + /// all artifact files from `output` that were freshly compiled and written + pub(crate) compiled_artifacts: Artifacts, /// All artifacts that were read from cache pub(crate) cached_artifacts: Artifacts, /// errors that should be omitted @@ -27,6 +27,8 @@ pub struct ProjectCompileOutput { impl ProjectCompileOutput { /// All artifacts together with their contract file name and name `:` /// + /// This returns a chained iterator of both cached and recompiled contract artifacts + /// /// # Example /// /// ```no_run @@ -38,8 +40,53 @@ impl ProjectCompileOutput { /// let contracts: BTreeMap = project.compile().unwrap().into_artifacts().collect(); /// ``` pub fn into_artifacts(self) -> impl Iterator { - let Self { cached_artifacts, written_artifacts, .. } = self; - cached_artifacts.into_artifacts::().chain(written_artifacts.into_artifacts::()) + let Self { cached_artifacts, compiled_artifacts, .. } = self; + cached_artifacts.into_artifacts::().chain(compiled_artifacts.into_artifacts::()) + } + + /// All artifacts together with their contract file and name as tuple `(file, contract + /// name, artifact)` + /// + /// This returns a chained iterator of both cached and recompiled contract artifacts + /// + /// # Example + /// + /// ```no_run + /// use std::collections::btree_map::BTreeMap; + /// use ethers_solc::artifacts::CompactContractBytecode; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let contracts: Vec<(String, String, CompactContractBytecode)> = project.compile().unwrap().into_artifacts_with_files().collect(); + /// ``` + /// + /// **NOTE** the `file` will be returned as is, see also [`Self::with_stripped_file_prefixes()`] + pub fn into_artifacts_with_files(self) -> impl Iterator { + let Self { cached_artifacts, compiled_artifacts, .. } = self; + cached_artifacts + .into_artifacts_with_files() + .chain(compiled_artifacts.into_artifacts_with_files()) + } + + /// Strips the given prefix from all artifact file paths to make them relative to the given + /// `root` argument + /// + /// # Example + /// + /// Make all artifact files relative tot the project's root directory + /// + /// ```no_run + /// use ethers_solc::artifacts::CompactContractBytecode; + /// use ethers_solc::Project; + /// + /// let project = Project::builder().build().unwrap(); + /// let output = project.compile().unwrap().with_stripped_file_prefixes(project.root()); + /// ``` + pub fn with_stripped_file_prefixes(mut self, base: impl AsRef) -> Self { + let base = base.as_ref(); + self.cached_artifacts = self.cached_artifacts.into_stripped_file_prefixes(base); + self.compiled_artifacts = self.compiled_artifacts.into_stripped_file_prefixes(base); + self } /// Get the (merged) solc compiler output @@ -79,11 +126,21 @@ impl ProjectCompileOutput { /// Finds the first contract with the given name and removes it from the set pub fn remove(&mut self, contract_name: impl AsRef) -> Option { let contract_name = contract_name.as_ref(); - if let artifact @ Some(_) = self.written_artifacts.remove(contract_name) { + if let artifact @ Some(_) = self.compiled_artifacts.remove(contract_name) { return artifact } self.cached_artifacts.remove(contract_name) } + + /// Returns the set of `Artifacts` that were cached and got reused during [`Project::compile()`] + pub fn cached_artifacts(&self) -> &Artifacts { + &self.cached_artifacts + } + + /// Returns the set of `Artifacts` that were compiled with `solc` in [`Project::compile()`] + pub fn compiled_artifacts(&self) -> &Artifacts { + &self.compiled_artifacts + } } impl ProjectCompileOutput @@ -93,7 +150,7 @@ where /// Finds the first contract with the given name pub fn find(&self, contract_name: impl AsRef) -> Option<&T::Artifact> { let contract_name = contract_name.as_ref(); - if let artifact @ Some(_) = self.written_artifacts.find(contract_name) { + if let artifact @ Some(_) = self.compiled_artifacts.find(contract_name) { return artifact } self.cached_artifacts.find(contract_name) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index bc21b3f9d..33ef0ceac 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -230,13 +230,13 @@ impl<'a, T: ArtifactOutput> CompiledState<'a, T> { fn write_artifacts(self) -> Result> { let CompiledState { output, cache } = self; // write all artifacts - let written_artifacts = if !cache.project().no_artifacts { + let compiled_artifacts = if !cache.project().no_artifacts { T::on_output(&output.contracts, &cache.project().paths)? } else { Default::default() }; - Ok(ArtifactsState { output, cache, written_artifacts }) + Ok(ArtifactsState { output, cache, compiled_artifacts }) } } @@ -245,7 +245,7 @@ impl<'a, T: ArtifactOutput> CompiledState<'a, T> { struct ArtifactsState<'a, T: ArtifactOutput> { output: AggregatedCompilerOutput, cache: ArtifactsCache<'a, T>, - written_artifacts: Artifacts, + compiled_artifacts: Artifacts, } impl<'a, T: ArtifactOutput> ArtifactsState<'a, T> { @@ -253,12 +253,12 @@ impl<'a, T: ArtifactOutput> ArtifactsState<'a, T> { /// /// this concludes the [`Project::compile()`] statemachine fn write_cache(self) -> Result> { - let ArtifactsState { output, cache, written_artifacts } = self; + let ArtifactsState { output, cache, compiled_artifacts } = self; let ignored_error_codes = cache.project().ignored_error_codes.clone(); - let cached_artifacts = cache.write_cache(&written_artifacts)?; + let cached_artifacts = cache.write_cache(&compiled_artifacts)?; Ok(ProjectCompileOutput { compiler_output: output, - written_artifacts, + compiled_artifacts, cached_artifacts, ignored_error_codes, }) diff --git a/ethers-solc/tests/project.rs b/ethers-solc/tests/project.rs index 09ba9aa17..725858737 100644 --- a/ethers-solc/tests/project.rs +++ b/ethers-solc/tests/project.rs @@ -11,7 +11,7 @@ use ethers_solc::{ cache::{SolFilesCache, SOLIDITY_FILES_CACHE_FILENAME}, project_util::*, remappings::Remapping, - Graph, MinimalCombinedArtifacts, Project, ProjectPathsConfig, + Graph, MinimalCombinedArtifacts, Project, ProjectCompileOutput, ProjectPathsConfig, }; use pretty_assertions::assert_eq; @@ -237,6 +237,7 @@ fn can_compile_dapp_detect_changes_in_sources() { #[test] fn can_compile_dapp_sample_with_cache() { + init_tracing(); let tmp_dir = tempfile::tempdir().unwrap(); let root = tmp_dir.path(); let cache = root.join("cache").join(SOLIDITY_FILES_CACHE_FILENAME); @@ -304,8 +305,10 @@ fn can_compile_dapp_sample_with_cache() { // deleted artifact is not taken from the cache std::fs::remove_file(&project.paths.sources.join("Dapp.sol")).unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.find("Dapp").is_none()); + let compiled: ProjectCompileOutput<_> = project.compile().unwrap(); + dbg!(compiled.cached_artifacts().as_ref().keys()); + dbg!(compiled.compiled_artifacts().as_ref().keys()); + // assert!(compiled.find("Dapp").is_none()); } fn copy_dir_all(src: impl AsRef, dst: impl AsRef) -> io::Result<()> { From c15b634df05fcb1e512cae8cb1cac49094f7fa36 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 3 Feb 2022 14:19:46 +0100 Subject: [PATCH 78/82] feat: even more helpers --- ethers-solc/src/artifact_output.rs | 5 ++- ethers-solc/src/cache.rs | 71 ++++++++++++++++++++++++------ ethers-solc/src/compile/output.rs | 2 +- ethers-solc/tests/project.rs | 5 +-- 4 files changed, 62 insertions(+), 21 deletions(-) diff --git a/ethers-solc/src/artifact_output.rs b/ethers-solc/src/artifact_output.rs index c2f7d07c7..899f6c0e7 100644 --- a/ethers-solc/src/artifact_output.rs +++ b/ethers-solc/src/artifact_output.rs @@ -69,7 +69,7 @@ impl<'a, T> IntoIterator for &'a Artifacts { std::collections::btree_map::Iter<'a, String, BTreeMap>>>; fn into_iter(self) -> Self::IntoIter { - (&self.0).into_iter() + self.0.iter() } } @@ -184,7 +184,7 @@ impl Artifacts { pub fn into_artifacts_with_files(self) -> impl Iterator { self.0.into_iter().flat_map(|(f, contract_artifacts)| { contract_artifacts.into_iter().flat_map(move |(name, artifacts)| { - let contract_name = name.clone(); + let contract_name = name; let file = f.clone(); artifacts .into_iter() @@ -192,6 +192,7 @@ impl Artifacts { }) }) } + /// Strips the given prefix from all artifact file paths to make them relative to the given /// `root` argument pub fn into_stripped_file_prefixes(self, base: impl AsRef) -> Self { diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index aacc06e68..0ff8e7181 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -79,7 +79,7 @@ impl SolFilesCache { /// /// let project = Project::builder().build().unwrap(); /// let mut cache = SolFilesCache::read(project.cache_path()).unwrap(); - /// cache.join_all(project.artifacts_path()); + /// cache.join_artifacts_files(project.artifacts_path()); /// # } /// ``` #[tracing::instrument(skip_all, name = "sol-files-cache::read")] @@ -107,7 +107,7 @@ impl SolFilesCache { /// ``` pub fn read_joined(paths: &ProjectPathsConfig) -> Result { let mut cache = SolFilesCache::read(&paths.cache)?; - cache.join_all(&paths.artifacts); + cache.join_artifacts_files(&paths.artifacts); Ok(cache) } @@ -116,27 +116,33 @@ impl SolFilesCache { let path = path.as_ref(); utils::create_parent_dir_all(path)?; let file = fs::File::create(path).map_err(|err| SolcError::io(err, path))?; - tracing::trace!("writing cache to json file: \"{}\"", path.display()); + tracing::trace!( + "writing cache with {} entries to json file: \"{}\"", + self.files.len(), + path.display() + ); serde_json::to_writer_pretty(file, self)?; tracing::trace!("cache file located: \"{}\"", path.display()); Ok(()) } /// Sets the artifact files location to `base` adjoined to the `CachEntries` artifacts. - pub fn join_all(&mut self, base: impl AsRef) -> &mut Self { + pub fn join_artifacts_files(&mut self, base: impl AsRef) -> &mut Self { let base = base.as_ref(); - self.files.values_mut().for_each(|entry| entry.join(base)); + self.files.values_mut().for_each(|entry| entry.join_artifacts_files(base)); self } /// Removes `base` from all artifact file paths - pub fn strip_prefix_all(&mut self, base: impl AsRef) -> &mut Self { + pub fn strip_artifact_files_prefixes(&mut self, base: impl AsRef) -> &mut Self { let base = base.as_ref(); - self.files.values_mut().for_each(|entry| entry.strip_prefix(base)); + self.files.values_mut().for_each(|entry| entry.strip_artifact_files_prefixes(base)); self } - /// Removes all `CacheEntry` which source files are missing + /// Removes all `CacheEntry` which source files don't exist on disk + /// + /// **NOTE:** this assumes the `files` are absolute pub fn remove_missing_files(&mut self) { tracing::trace!("remove non existing files from cache"); self.files.retain(|file, _| { @@ -153,6 +159,38 @@ impl SolFilesCache { self.files.values().all(|entry| entry.all_artifacts_exist()) } + /// Strips the given prefix from all `file` paths that identify a `CacheEntry` to make them + /// relative to the given `base` argument + /// + /// In other words this sets the keys (the file path of a solidity file) relative to the `base` + /// argument, so that the key `/Users/me/project/src/Greeter.sol` will be changed to + /// `src/Greeter.sol` if `base` is `/Users/me/project` + /// + /// # Example + /// + /// ``` + /// # fn t() { + /// use ethers_solc::cache::SolFilesCache; + /// use ethers_solc::Project; + /// let cache = SolFilesCache::read(project.cache_path()) + /// .unwrap() + /// .with_stripped_file_prefixes(project.root()); + /// let project = Project::builder().build().unwrap(); + /// cache.read_artifact("src/Greeter.sol", "Greeter").unwrap(); + /// # } + /// ``` + /// + /// **Note:** this only affects the source files, see [`Self::strip_artifact_files_prefixes()`] + pub fn with_stripped_file_prefixes(mut self, base: impl AsRef) -> Self { + let base = base.as_ref(); + self.files = self + .files + .into_iter() + .map(|(f, e)| (utils::source_name(&f, base).to_path_buf(), e)) + .collect(); + self + } + /// Returns the path to the artifact of the given `(file, contract)` pair /// /// # Example @@ -164,7 +202,7 @@ impl SolFilesCache { /// /// let project = Project::builder().build().unwrap(); /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); - /// cache.find_artifact_path("/.../src/Greeter.sol", "Greeter"); + /// cache.find_artifact_path("/Users/git/myproject/src/Greeter.sol", "Greeter"); /// # } /// ``` pub fn find_artifact_path( @@ -187,9 +225,12 @@ impl SolFilesCache { /// /// let project = Project::builder().build().unwrap(); /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); - /// cache.read_artifact("/.../src/Greeter.sol", "Greeter"); + /// cache.read_artifact("/Users/git/myproject/src/Greeter.sol", "Greeter"); /// # } /// ``` + /// + /// **NOTE**: unless the cache's `files` keys were modified `contract_file` is expected to be + /// absolute, see [``] pub fn read_artifact( &self, contract_file: impl AsRef, @@ -447,13 +488,13 @@ impl CacheEntry { } /// Sets the artifact's paths to `base` adjoined to the artifact's `path`. - pub fn join(&mut self, base: impl AsRef) { + pub fn join_artifacts_files(&mut self, base: impl AsRef) { let base = base.as_ref(); self.artifacts_mut().for_each(|p| *p = base.join(&*p)) } /// Removes `base` from the artifact's path - pub fn strip_prefix(&mut self, base: impl AsRef) { + pub fn strip_artifact_files_prefixes(&mut self, base: impl AsRef) { let base = base.as_ref(); self.artifacts_mut().for_each(|p| { if let Ok(rem) = p.strip_prefix(base) { @@ -637,12 +678,14 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { pub fn new(project: &'a Project, edges: GraphEdges) -> Result { let cache = if project.cached { // read the cache file if it already exists - let cache = if project.cache_path().exists() { + let mut cache = if project.cache_path().exists() { SolFilesCache::read_joined(&project.paths).unwrap_or_default() } else { SolFilesCache::default() }; + cache.remove_missing_files(); + // read all artifacts let cached_artifacts = if project.paths.artifacts.exists() { tracing::trace!("reading artifacts from cache.."); @@ -765,7 +808,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { // add the new cache entries to the cache file cache.extend(dirty_entries.into_iter().map(|(file, (entry, _))| (file, entry))); - cache.strip_prefix_all(project.artifacts_path()); + cache.strip_artifact_files_prefixes(project.artifacts_path()); // write to disk cache.write(project.cache_path())?; diff --git a/ethers-solc/src/compile/output.rs b/ethers-solc/src/compile/output.rs index 6f9500624..3cd0a9dcc 100644 --- a/ethers-solc/src/compile/output.rs +++ b/ethers-solc/src/compile/output.rs @@ -69,7 +69,7 @@ impl ProjectCompileOutput { } /// Strips the given prefix from all artifact file paths to make them relative to the given - /// `root` argument + /// `base` argument /// /// # Example /// diff --git a/ethers-solc/tests/project.rs b/ethers-solc/tests/project.rs index 725858737..137e45d1b 100644 --- a/ethers-solc/tests/project.rs +++ b/ethers-solc/tests/project.rs @@ -237,7 +237,6 @@ fn can_compile_dapp_detect_changes_in_sources() { #[test] fn can_compile_dapp_sample_with_cache() { - init_tracing(); let tmp_dir = tempfile::tempdir().unwrap(); let root = tmp_dir.path(); let cache = root.join("cache").join(SOLIDITY_FILES_CACHE_FILENAME); @@ -306,9 +305,7 @@ fn can_compile_dapp_sample_with_cache() { // deleted artifact is not taken from the cache std::fs::remove_file(&project.paths.sources.join("Dapp.sol")).unwrap(); let compiled: ProjectCompileOutput<_> = project.compile().unwrap(); - dbg!(compiled.cached_artifacts().as_ref().keys()); - dbg!(compiled.compiled_artifacts().as_ref().keys()); - // assert!(compiled.find("Dapp").is_none()); + assert!(compiled.find("Dapp").is_none()); } fn copy_dir_all(src: impl AsRef, dst: impl AsRef) -> io::Result<()> { From e19103cc7822fe7d80112685779a353978ab65d2 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 3 Feb 2022 16:34:44 +0100 Subject: [PATCH 79/82] fix: failing doc tests --- ethers-solc/src/cache.rs | 12 +++++++----- ethers-solc/src/compile/project.rs | 25 +++++++++++++++++++------ ethers-solc/src/resolver.rs | 12 +++++++----- 3 files changed, 33 insertions(+), 16 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 0ff8e7181..51c30fa1e 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -169,14 +169,15 @@ impl SolFilesCache { /// # Example /// /// ``` - /// # fn t() { + /// fn t() { + /// use ethers_solc::artifacts::CompactContract; /// use ethers_solc::cache::SolFilesCache; /// use ethers_solc::Project; + /// let project = Project::builder().build().unwrap(); /// let cache = SolFilesCache::read(project.cache_path()) /// .unwrap() /// .with_stripped_file_prefixes(project.root()); - /// let project = Project::builder().build().unwrap(); - /// cache.read_artifact("src/Greeter.sol", "Greeter").unwrap(); + /// cache.read_artifact::("src/Greeter.sol", "Greeter").unwrap(); /// # } /// ``` /// @@ -219,13 +220,14 @@ impl SolFilesCache { /// # Example /// /// ``` - /// # fn t() { + /// fn t() { /// use ethers_solc::cache::SolFilesCache; /// use ethers_solc::Project; + /// use ethers_solc::artifacts::CompactContract; /// /// let project = Project::builder().build().unwrap(); /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); - /// cache.read_artifact("/Users/git/myproject/src/Greeter.sol", "Greeter"); + /// cache.read_artifact::("/Users/git/myproject/src/Greeter.sol", "Greeter"); /// # } /// ``` /// diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 33ef0ceac..28a0dc3d9 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -372,9 +372,10 @@ fn compile_sequential( continue } tracing::trace!( - "compiling {} sources with solc \"{}\"", + "compiling {} sources with solc \"{}\" {:?}", sources.len(), - solc.as_ref().display() + solc.as_ref().display(), + solc.args ); let input = CompilerInput::with_sources(sources) @@ -382,7 +383,12 @@ fn compile_sequential( .normalize_evm_version(&version) .with_remappings(paths.remappings.clone()); - tracing::trace!("calling solc `{}` with {} sources", version, input.sources.len()); + tracing::trace!( + "calling solc `{}` with {} sources {:?}", + version, + input.sources.len(), + input.sources.keys() + ); let output = solc.compile(&input)?; tracing::trace!("compiled input, output has error: {}", output.has_error()); @@ -421,7 +427,13 @@ fn compile_parallel( let outputs = pool.install(move || { jobs.into_par_iter() .map(|(solc, version, input)| { - tracing::trace!("calling solc `{}` with {} sources", version, input.sources.len()); + tracing::trace!( + "calling solc `{}` {:?} with {} sources: {:?}", + version, + solc.args, + input.sources.len(), + input.sources.keys() + ); solc.compile(&input).map(|output| (version, output)) }) .collect::>>() @@ -442,9 +454,10 @@ mod tests { #[allow(unused)] fn init_tracing() { - tracing_subscriber::fmt() + let _ = tracing_subscriber::fmt() .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) - .init(); + .try_init() + .ok(); } #[test] diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index 5fe605033..c111e1712 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -102,11 +102,13 @@ impl GraphEdges { /// Read more about [Import Remapping](https://docs.soliditylang.org/en/develop/path-resolution.html#import-remapping) pub fn get_source_unit_name(&self, file: impl AsRef, root: impl AsRef) -> PathBuf { let file = file.as_ref(); - if self.is_input_file(file) { - utils::source_name(file, root).to_path_buf() - } else { - file.to_path_buf() - } + // let f = if self.is_input_file(file) { + // utils::source_name(file, root).to_path_buf() + // } else { + // file.to_path_buf() + // }; + // dbg!(f.clone()); + file.to_path_buf() } /// Returns the `VersionReq` for the given file From a03a7bb94c59bd1ebf50921ac202e0f84566a4b4 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 3 Feb 2022 16:43:55 +0100 Subject: [PATCH 80/82] refactor: remove source name map tracking --- ethers-solc/src/cache.rs | 62 +--------------------------- ethers-solc/src/compile/project.rs | 66 +++--------------------------- ethers-solc/src/lib.rs | 1 - ethers-solc/src/resolver.rs | 25 ----------- 4 files changed, 7 insertions(+), 147 deletions(-) diff --git a/ethers-solc/src/cache.rs b/ethers-solc/src/cache.rs index 51c30fa1e..bf1ec42b1 100644 --- a/ethers-solc/src/cache.rs +++ b/ethers-solc/src/cache.rs @@ -2,7 +2,6 @@ use crate::{ artifacts::Sources, config::SolcConfig, - contracts::VersionedContracts, error::{Result, SolcError}, resolver::GraphEdges, utils, ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Project, ProjectPathsConfig, @@ -177,7 +176,7 @@ impl SolFilesCache { /// let cache = SolFilesCache::read(project.cache_path()) /// .unwrap() /// .with_stripped_file_prefixes(project.root()); - /// cache.read_artifact::("src/Greeter.sol", "Greeter").unwrap(); + /// let artifact: CompactContract = cache.read_artifact("src/Greeter.sol", "Greeter").unwrap(); /// # } /// ``` /// @@ -227,7 +226,7 @@ impl SolFilesCache { /// /// let project = Project::builder().build().unwrap(); /// let cache = SolFilesCache::read_joined(&project.paths).unwrap(); - /// cache.read_artifact::("/Users/git/myproject/src/Greeter.sol", "Greeter"); + /// let artifact: CompactContract = cache.read_artifact("/Users/git/myproject/src/Greeter.sol", "Greeter").unwrap(); /// # } /// ``` /// @@ -726,13 +725,6 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { } } - pub fn edges(&self) -> &GraphEdges { - match self { - ArtifactsCache::Ephemeral(edges, _) => edges, - ArtifactsCache::Cached(cache) => &cache.edges, - } - } - pub fn project(&self) -> &'a Project { match self { ArtifactsCache::Ephemeral(_, project) => project, @@ -819,53 +811,3 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> { } } } - -/// A helper type to handle source name/full disk mappings -/// -/// The disk path is the actual path where a file can be found on disk. -/// A source name is the internal identifier and is the remaining part of the disk path starting -/// with the configured source directory, (`contracts/contract.sol`) -/// -/// See also [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution) -#[derive(Debug, Default)] -pub(crate) struct SourceUnitNameMap { - /// all libraries to the source set while keeping track of their actual disk path - /// (`contracts/contract.sol` -> `/Users/.../contracts.sol`) - pub source_unit_name_to_absolute_path: HashMap, -} - -impl SourceUnitNameMap { - /// Sets the source unit names of the sources using the provided mapper - pub(crate) fn apply_source_names_with(&mut self, sources: Sources, mapper: M) -> Sources - where - M: for<'a> Fn(&Path) -> PathBuf, - { - sources - .into_iter() - .map(|(file, source)| { - let source_unit_name = mapper(&file); - self.source_unit_name_to_absolute_path.insert(source_unit_name.clone(), file); - (source_unit_name, source) - }) - .collect() - } - - /// Reverses all previous source unit mappings - pub(crate) fn reverse(&self, contracts: VersionedContracts) -> VersionedContracts { - let contracts = contracts - .into_iter() - .map(|(source_unit_name, contracts)| { - if let Some(file) = self - .source_unit_name_to_absolute_path - .get(Path::new(&source_unit_name)) - .cloned() - { - (format!("{}", file.display()), contracts) - } else { - (source_unit_name, contracts) - } - }) - .collect(); - VersionedContracts(contracts) - } -} diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 28a0dc3d9..6e58355fa 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -82,7 +82,7 @@ use crate::{ output::AggregatedCompilerOutput, resolver::GraphEdges, ArtifactOutput, CompilerInput, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, Solc, - SourceUnitNameMap, Sources, + Sources, }; use rayon::prelude::*; @@ -176,19 +176,12 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { /// - check cache fn preprocess(self) -> Result> { let Self { edges, project, mut sources } = self; - // the map that keeps track of the mapping of resolved solidity file paths -> source unit - // names - let mut source_unit_map = SourceUnitNameMap::default(); let mut cache = ArtifactsCache::new(project, edges)?; // retain and compile only dirty sources - sources = sources.filtered(&mut cache).set_source_unit_names( - &project.paths, - cache.edges(), - &mut source_unit_map, - ); + sources = sources.filtered(&mut cache); - Ok(PreprocessedState { sources, cache, source_unit_map }) + Ok(PreprocessedState { sources, cache }) } } @@ -199,19 +192,15 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> { struct PreprocessedState<'a, T: ArtifactOutput> { sources: CompilerSources, cache: ArtifactsCache<'a, T>, - source_unit_map: SourceUnitNameMap, } impl<'a, T: ArtifactOutput> PreprocessedState<'a, T> { /// advance to the next state by compiling all sources fn compile(self) -> Result> { - let PreprocessedState { sources, cache, source_unit_map } = self; - let mut output = + let PreprocessedState { sources, cache } = self; + let output = sources.compile(&cache.project().solc_config.settings, &cache.project().paths)?; - // reverse the applied source unit names - output.contracts = source_unit_map.reverse(output.contracts); - Ok(CompiledState { output, cache }) } } @@ -301,50 +290,6 @@ impl CompilerSources { } } - /// Sets the correct source unit names for all sources - /// - /// This helps the compiler to find the right source in the `CompilerInput`. - /// the source unit name depends on how it is imported, - /// see [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution) - /// - /// For contracts imported from the project's src directory the source unit name is the relative - /// path, starting at the project's root path. - /// - /// The source name for a resolved library import is the applied remapping, also starting - /// relatively at the project's root path. - fn set_source_unit_names( - self, - paths: &ProjectPathsConfig, - edges: &GraphEdges, - names: &mut SourceUnitNameMap, - ) -> Self { - fn set( - sources: VersionedSources, - paths: &ProjectPathsConfig, - edges: &GraphEdges, - names: &mut SourceUnitNameMap, - ) -> VersionedSources { - sources - .into_iter() - .map(|(solc, (version, sources))| { - let sources = names.apply_source_names_with(sources, |file| { - edges.get_source_unit_name(file, &paths.root) - }); - (solc, (version, sources)) - }) - .collect() - } - - match self { - CompilerSources::Sequential(s) => { - CompilerSources::Sequential(set(s, paths, edges, names)) - } - CompilerSources::Parallel(s, j) => { - CompilerSources::Parallel(set(s, paths, edges, names), j) - } - } - } - /// Compiles all the files with `Solc` fn compile( self, @@ -473,7 +418,6 @@ mod tests { assert_eq!(cache.dirty_entries.len(), 3); assert!(cache.filtered.is_empty()); assert!(cache.cache.is_empty()); - assert_eq!(prep.source_unit_map.source_unit_name_to_absolute_path.len(), 3); let compiled = prep.compile().unwrap(); assert_eq!(compiled.output.contracts.files().count(), 3); diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 33898fc8c..7a7bb93a7 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -29,7 +29,6 @@ pub mod utils; use crate::{ artifacts::{Contract, Sources}, - cache::SourceUnitNameMap, error::{SolcError, SolcIoError}, }; use error::Result; diff --git a/ethers-solc/src/resolver.rs b/ethers-solc/src/resolver.rs index c111e1712..ab406477c 100644 --- a/ethers-solc/src/resolver.rs +++ b/ethers-solc/src/resolver.rs @@ -86,31 +86,6 @@ impl GraphEdges { } } - /// Returns the source unit name for the `file` - /// - /// Read more about [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution) - /// - /// If the `file` is an `input` file, see [`Self::is_input_file()`], then this returns the - /// relative part to that file starting from the project's root directory. So that the - /// source unit name of `/user/projects/myproject/src/Contract.sol` is `src/Contract.sol` if the - /// `myproject` dir is the project's root directory. - /// - /// If the `file` is a resolved import, then this returns the resolved path, after remappings - /// were applied, because, solc also applies them during their VFS lookup and they are provided - /// in the `CompilerInput`'s `Settings`: see [`CompilerInput::with_remappings()`] - /// - /// Read more about [Import Remapping](https://docs.soliditylang.org/en/develop/path-resolution.html#import-remapping) - pub fn get_source_unit_name(&self, file: impl AsRef, root: impl AsRef) -> PathBuf { - let file = file.as_ref(); - // let f = if self.is_input_file(file) { - // utils::source_name(file, root).to_path_buf() - // } else { - // file.to_path_buf() - // }; - // dbg!(f.clone()); - file.to_path_buf() - } - /// Returns the `VersionReq` for the given file pub fn version_requirement(&self, file: impl AsRef) -> Option<&VersionReq> { self.indices From 072bbb514721df67ca5359682dc081434aa632fc Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 3 Feb 2022 18:33:01 +0100 Subject: [PATCH 81/82] fix: determine artifacts in ephemeral mode --- ethers-solc/src/compile/project.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ethers-solc/src/compile/project.rs b/ethers-solc/src/compile/project.rs index 6e58355fa..5708494d1 100644 --- a/ethers-solc/src/compile/project.rs +++ b/ethers-solc/src/compile/project.rs @@ -222,7 +222,7 @@ impl<'a, T: ArtifactOutput> CompiledState<'a, T> { let compiled_artifacts = if !cache.project().no_artifacts { T::on_output(&output.contracts, &cache.project().paths)? } else { - Default::default() + T::output_to_artifacts(&output.contracts) }; Ok(ArtifactsState { output, cache, compiled_artifacts }) @@ -350,7 +350,7 @@ fn compile_parallel( paths: &ProjectPathsConfig, ) -> Result { debug_assert!(num_jobs > 1); - tracing::trace!("compile sources in parallel using {} solc jobs", num_jobs); + tracing::trace!("compile sources in parallel using up to {} solc jobs", num_jobs); let mut jobs = Vec::with_capacity(input.len()); for (solc, (version, sources)) in input { From 18e8218a359160a42750bcc830b36a650d537d62 Mon Sep 17 00:00:00 2001 From: Matthias Seitz Date: Thu, 3 Feb 2022 21:55:51 +0100 Subject: [PATCH 82/82] refactor: allowed paths should not fail --- ethers-solc/src/config.rs | 60 ++++++++++++--------------------------- ethers-solc/src/lib.rs | 3 +- ethers-solc/src/utils.rs | 14 +++++++++ 3 files changed, 33 insertions(+), 44 deletions(-) diff --git a/ethers-solc/src/config.rs b/ethers-solc/src/config.rs index a6f2b14ec..26e29e683 100644 --- a/ethers-solc/src/config.rs +++ b/ethers-solc/src/config.rs @@ -9,7 +9,6 @@ use crate::{ use serde::{Deserialize, Serialize}; use std::{ - convert::TryFrom, fmt::{self, Formatter}, fs, path::{Component, Path, PathBuf}, @@ -356,27 +355,27 @@ pub struct ProjectPathsConfigBuilder { impl ProjectPathsConfigBuilder { pub fn root(mut self, root: impl Into) -> Self { - self.root = Some(canonicalized(root)); + self.root = Some(utils::canonicalized(root)); self } pub fn cache(mut self, cache: impl Into) -> Self { - self.cache = Some(canonicalized(cache)); + self.cache = Some(utils::canonicalized(cache)); self } pub fn artifacts(mut self, artifacts: impl Into) -> Self { - self.artifacts = Some(canonicalized(artifacts)); + self.artifacts = Some(utils::canonicalized(artifacts)); self } pub fn sources(mut self, sources: impl Into) -> Self { - self.sources = Some(canonicalized(sources)); + self.sources = Some(utils::canonicalized(sources)); self } pub fn tests(mut self, tests: impl Into) -> Self { - self.tests = Some(canonicalized(tests)); + self.tests = Some(utils::canonicalized(tests)); self } @@ -387,14 +386,14 @@ impl ProjectPathsConfigBuilder { } pub fn lib(mut self, lib: impl Into) -> Self { - self.libraries.get_or_insert_with(Vec::new).push(canonicalized(lib)); + self.libraries.get_or_insert_with(Vec::new).push(utils::canonicalized(lib)); self } pub fn libs(mut self, libs: impl IntoIterator>) -> Self { let libraries = self.libraries.get_or_insert_with(Vec::new); for lib in libs.into_iter() { - libraries.push(canonicalized(lib)); + libraries.push(utils::canonicalized(lib)); } self } @@ -413,7 +412,7 @@ impl ProjectPathsConfigBuilder { } pub fn build_with_root(self, root: impl Into) -> ProjectPathsConfig { - let root = canonicalized(root); + let root = utils::canonicalized(root); let libraries = self.libraries.unwrap_or_else(|| ProjectPathsConfig::find_libs(&root)); @@ -445,20 +444,6 @@ impl ProjectPathsConfigBuilder { } } -/// Returns the same path config but with canonicalized paths. -/// -/// This will take care of potential symbolic linked directories. -/// For example, the tempdir library is creating directories hosted under `/var/`, which in OS X -/// is a symbolic link to `/private/var/`. So if when we try to resolve imports and a path is -/// rooted in a symbolic directory we might end up with different paths for the same file, like -/// `private/var/.../Dapp.sol` and `/var/.../Dapp.sol` -/// -/// This canonicalizes all the paths but does not treat non existing dirs as an error -fn canonicalized(path: impl Into) -> PathBuf { - let path = path.into(); - utils::canonicalize(&path).unwrap_or(path) -} - /// The config to use when compiling the contracts #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] pub struct SolcConfig { @@ -531,19 +516,10 @@ impl fmt::Display for AllowedLibPaths { } } -impl> TryFrom> for AllowedLibPaths { - type Error = SolcIoError; - - fn try_from(libs: Vec) -> std::result::Result { - let libs = libs - .into_iter() - .map(|lib| { - let path: PathBuf = lib.into(); - let lib = utils::canonicalize(&path)?; - Ok(lib) - }) - .collect::, _>>()?; - Ok(AllowedLibPaths(libs)) +impl> From> for AllowedLibPaths { + fn from(libs: Vec) -> Self { + let libs = libs.into_iter().map(utils::canonicalized).collect(); + AllowedLibPaths(libs) } } @@ -567,13 +543,13 @@ mod tests { assert_eq!(ProjectPathsConfig::find_source_dir(root), contracts,); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).sources, - canonicalized(contracts), + utils::canonicalized(contracts), ); std::fs::File::create(&src).unwrap(); assert_eq!(ProjectPathsConfig::find_source_dir(root), src,); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).sources, - canonicalized(src), + utils::canonicalized(src), ); assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,); @@ -581,13 +557,13 @@ mod tests { assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), artifacts,); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).artifacts, - canonicalized(artifacts), + utils::canonicalized(artifacts), ); std::fs::File::create(&out).unwrap(); assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).artifacts, - canonicalized(out), + utils::canonicalized(out), ); assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],); @@ -595,13 +571,13 @@ mod tests { assert_eq!(ProjectPathsConfig::find_libs(root), vec![node_modules.clone()],); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).libraries, - vec![canonicalized(node_modules)], + vec![utils::canonicalized(node_modules)], ); std::fs::File::create(&lib).unwrap(); assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],); assert_eq!( ProjectPathsConfig::builder().build_with_root(&root).libraries, - vec![canonicalized(lib)], + vec![utils::canonicalized(lib)], ); } } diff --git a/ethers-solc/src/lib.rs b/ethers-solc/src/lib.rs index 7a7bb93a7..b45df5500 100644 --- a/ethers-solc/src/lib.rs +++ b/ethers-solc/src/lib.rs @@ -33,7 +33,6 @@ use crate::{ }; use error::Result; use std::{ - convert::TryInto, marker::PhantomData, path::{Path, PathBuf}, }; @@ -537,7 +536,7 @@ impl ProjectBuilder { auto_detect, artifacts, ignored_error_codes, - allowed_lib_paths: allowed_paths.try_into()?, + allowed_lib_paths: allowed_paths.into(), solc_jobs: solc_jobs.unwrap_or_else(::num_cpus::get), offline, }) diff --git a/ethers-solc/src/utils.rs b/ethers-solc/src/utils.rs index 43a52cdca..cdb36af7b 100644 --- a/ethers-solc/src/utils.rs +++ b/ethers-solc/src/utils.rs @@ -83,6 +83,20 @@ pub fn canonicalize(path: impl AsRef) -> Result { dunce::canonicalize(&path).map_err(|err| SolcIoError::new(err, path)) } +/// Returns the same path config but with canonicalized paths. +/// +/// This will take care of potential symbolic linked directories. +/// For example, the tempdir library is creating directories hosted under `/var/`, which in OS X +/// is a symbolic link to `/private/var/`. So if when we try to resolve imports and a path is +/// rooted in a symbolic directory we might end up with different paths for the same file, like +/// `private/var/.../Dapp.sol` and `/var/.../Dapp.sol` +/// +/// This canonicalizes all the paths but does not treat non existing dirs as an error +pub fn canonicalized(path: impl Into) -> PathBuf { + let path = path.into(); + canonicalize(&path).unwrap_or(path) +} + /// Returns the path to the library if the source path is in fact determined to be a library path, /// and it exists. /// Note: this does not handle relative imports or remappings.