diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs index e7a2b378542..38c20230527 100644 --- a/src/cargo/core/compiler/fingerprint.rs +++ b/src/cargo/core/compiler/fingerprint.rs @@ -478,9 +478,7 @@ enum LocalFingerprint { /// The `dep_info` file, when present, also lists a number of other files /// for us to look at. If any of those files are newer than this file then /// we need to recompile. - CheckDepInfo { - dep_info: PathBuf, - }, + CheckDepInfo { dep_info: PathBuf }, /// This represents a nonempty set of `rerun-if-changed` annotations printed /// out by a build script. The `output` file is a arelative file anchored at @@ -500,10 +498,7 @@ enum LocalFingerprint { /// build script. The exact env var and value are hashed here. There's no /// filesystem dependence here, and if the values are changed the hash will /// change forcing a recompile. - RerunIfEnvChanged { - var: String, - val: Option, - }, + RerunIfEnvChanged { var: String, val: Option }, } enum StaleFile { @@ -762,7 +757,7 @@ impl Fingerprint { let t = FileTime::from_system_time(SystemTime::now()); drop(filetime::set_file_times(f, t, t)); } - return mtime; + mtime }) .min(); @@ -1350,7 +1345,7 @@ fn compare_old_fingerprint( debug_assert_eq!(util::to_hex(old_fingerprint.hash()), old_fingerprint_short); let result = new_fingerprint.compare(&old_fingerprint); assert!(result.is_err()); - return result; + result } fn log_compare(unit: &Unit<'_>, compare: &CargoResult<()>) { @@ -1444,7 +1439,7 @@ where }); } - return None; + None } fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String { diff --git a/src/cargo/core/package_id_spec.rs b/src/cargo/core/package_id_spec.rs index 53d7cfc3d97..7b749a55e79 100644 --- a/src/cargo/core/package_id_spec.rs +++ b/src/cargo/core/package_id_spec.rs @@ -5,6 +5,7 @@ use semver::Version; use serde::{de, ser}; use url::Url; +use crate::core::interning::InternedString; use crate::core::PackageId; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::{validate_package_name, ToSemver, ToUrl}; @@ -20,7 +21,7 @@ use crate::util::{validate_package_name, ToSemver, ToUrl}; /// sufficient to uniquely define a package ID. #[derive(Clone, PartialEq, Eq, Debug, Hash, Ord, PartialOrd)] pub struct PackageIdSpec { - name: String, + name: InternedString, version: Option, url: Option, } @@ -66,7 +67,7 @@ impl PackageIdSpec { }; validate_package_name(name, "pkgid", "")?; Ok(PackageIdSpec { - name: name.to_string(), + name: InternedString::new(name), version, url: None, }) @@ -86,7 +87,7 @@ impl PackageIdSpec { /// fields filled in. pub fn from_package_id(package_id: PackageId) -> PackageIdSpec { PackageIdSpec { - name: package_id.name().to_string(), + name: package_id.name(), version: Some(package_id.version().clone()), url: Some(package_id.source_id().url().clone()), } @@ -117,19 +118,19 @@ impl PackageIdSpec { match parts.next() { Some(part) => { let version = part.to_semver()?; - (name_or_version.to_string(), Some(version)) + (InternedString::new(name_or_version), Some(version)) } None => { if name_or_version.chars().next().unwrap().is_alphabetic() { - (name_or_version.to_string(), None) + (InternedString::new(name_or_version), None) } else { let version = name_or_version.to_semver()?; - (path_name.to_string(), Some(version)) + (InternedString::new(path_name), Some(version)) } } } } - None => (path_name.to_string(), None), + None => (InternedString::new(path_name), None), } }; Ok(PackageIdSpec { @@ -139,8 +140,8 @@ impl PackageIdSpec { }) } - pub fn name(&self) -> &str { - &self.name + pub fn name(&self) -> InternedString { + self.name } pub fn version(&self) -> Option<&Version> { @@ -157,7 +158,7 @@ impl PackageIdSpec { /// Checks whether the given `PackageId` matches the `PackageIdSpec`. pub fn matches(&self, package_id: PackageId) -> bool { - if self.name() != &*package_id.name() { + if self.name() != package_id.name() { return false; } @@ -234,7 +235,7 @@ impl fmt::Display for PackageIdSpec { } else { write!(f, "{}", url)?; } - if url.path_segments().unwrap().next_back().unwrap() != self.name { + if url.path_segments().unwrap().next_back().unwrap() != &*self.name { printed_name = true; write!(f, "#{}", self.name)?; } @@ -273,6 +274,7 @@ impl<'de> de::Deserialize<'de> for PackageIdSpec { #[cfg(test)] mod tests { use super::PackageIdSpec; + use crate::core::interning::InternedString; use crate::core::{PackageId, SourceId}; use crate::util::ToSemver; use url::Url; @@ -288,7 +290,7 @@ mod tests { ok( "https://crates.io/foo#1.2.3", PackageIdSpec { - name: "foo".to_string(), + name: InternedString::new("foo"), version: Some("1.2.3".to_semver().unwrap()), url: Some(Url::parse("https://crates.io/foo").unwrap()), }, @@ -296,7 +298,7 @@ mod tests { ok( "https://crates.io/foo#bar:1.2.3", PackageIdSpec { - name: "bar".to_string(), + name: InternedString::new("bar"), version: Some("1.2.3".to_semver().unwrap()), url: Some(Url::parse("https://crates.io/foo").unwrap()), }, @@ -304,7 +306,7 @@ mod tests { ok( "crates.io/foo", PackageIdSpec { - name: "foo".to_string(), + name: InternedString::new("foo"), version: None, url: Some(Url::parse("cargo://crates.io/foo").unwrap()), }, @@ -312,7 +314,7 @@ mod tests { ok( "crates.io/foo#1.2.3", PackageIdSpec { - name: "foo".to_string(), + name: InternedString::new("foo"), version: Some("1.2.3".to_semver().unwrap()), url: Some(Url::parse("cargo://crates.io/foo").unwrap()), }, @@ -320,7 +322,7 @@ mod tests { ok( "crates.io/foo#bar", PackageIdSpec { - name: "bar".to_string(), + name: InternedString::new("bar"), version: None, url: Some(Url::parse("cargo://crates.io/foo").unwrap()), }, @@ -328,7 +330,7 @@ mod tests { ok( "crates.io/foo#bar:1.2.3", PackageIdSpec { - name: "bar".to_string(), + name: InternedString::new("bar"), version: Some("1.2.3".to_semver().unwrap()), url: Some(Url::parse("cargo://crates.io/foo").unwrap()), }, @@ -336,7 +338,7 @@ mod tests { ok( "foo", PackageIdSpec { - name: "foo".to_string(), + name: InternedString::new("foo"), version: None, url: None, }, @@ -344,7 +346,7 @@ mod tests { ok( "foo:1.2.3", PackageIdSpec { - name: "foo".to_string(), + name: InternedString::new("foo"), version: Some("1.2.3".to_semver().unwrap()), url: None, }, diff --git a/src/cargo/core/profiles.rs b/src/cargo/core/profiles.rs index 8cf142981c8..80b4fa6a6f1 100644 --- a/src/cargo/core/profiles.rs +++ b/src/cargo/core/profiles.rs @@ -282,7 +282,7 @@ impl ProfileMaker { let name_matches: Vec = packages .package_ids() .filter_map(|pkg_id| { - if pkg_id.name().as_str() == spec.name() { + if pkg_id.name() == spec.name() { Some(pkg_id.to_string()) } else { None @@ -292,7 +292,7 @@ impl ProfileMaker { if name_matches.is_empty() { let suggestion = packages .package_ids() - .map(|p| (lev_distance(spec.name(), &p.name()), p.name())) + .map(|p| (lev_distance(&*spec.name(), &p.name()), p.name())) .filter(|&(d, _)| d < 4) .min_by_key(|p| p.0) .map(|p| p.1); diff --git a/src/cargo/core/resolver/conflict_cache.rs b/src/cargo/core/resolver/conflict_cache.rs index e2ae54e9bd0..d652e5bbe3b 100644 --- a/src/cargo/core/resolver/conflict_cache.rs +++ b/src/cargo/core/resolver/conflict_cache.rs @@ -234,7 +234,7 @@ impl ConflictCache { /// all the `PackageId` entries are activated. pub fn contains(&self, dep: &Dependency, con: &ConflictMap) -> bool { if let Some(cst) = self.con_from_dep.get(dep) { - cst.contains(con.keys().cloned(), &con) + cst.contains(con.keys().cloned(), con) } else { false } diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index c494a073678..7071d88d1aa 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -1,4 +1,4 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::num::NonZeroU64; use std::rc::Rc; @@ -8,12 +8,12 @@ use failure::{bail, ensure}; use log::debug; use crate::core::interning::InternedString; -use crate::core::{Dependency, FeatureValue, PackageId, SourceId, Summary}; +use crate::core::{Dependency, PackageId, SourceId, Summary}; use crate::util::CargoResult; use crate::util::Graph; -use super::errors::ActivateResult; -use super::types::{ConflictMap, ConflictReason, DepInfo, Method, RegistryQueryer}; +use super::dep_cache::RegistryQueryer; +use super::types::{ConflictMap, FeaturesSet, Method}; pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; pub use super::encode::{Metadata, WorkspaceResolve}; @@ -27,7 +27,7 @@ pub use super::resolve::Resolve; pub struct Context { pub activations: Activations, /// list the features that are activated for each package - pub resolve_features: im_rc::HashMap>>, + pub resolve_features: im_rc::HashMap, /// get the package that will be linking to a native library by its links attribute pub links: im_rc::HashMap, /// for each package the list of names it can see, @@ -77,7 +77,7 @@ impl From<&semver::Version> for SemverCompatibility { } impl PackageId { - pub fn as_activations_key(&self) -> (InternedString, SourceId, SemverCompatibility) { + pub fn as_activations_key(self) -> (InternedString, SourceId, SemverCompatibility) { (self.name(), self.source_id(), self.version().into()) } } @@ -100,7 +100,7 @@ impl Context { /// Activate this summary by inserting it into our list of known activations. /// /// Returns `true` if this summary with the given method is already activated. - pub fn flag_activated(&mut self, summary: &Summary, method: &Method<'_>) -> CargoResult { + pub fn flag_activated(&mut self, summary: &Summary, method: &Method) -> CargoResult { let id = summary.package_id(); let age: ContextAge = self.age(); match self.activations.entry(id.as_activations_key()) { @@ -125,7 +125,7 @@ impl Context { } } debug!("checking if {} is already activated", summary.package_id()); - let (features, use_default) = match *method { + let (features, use_default) = match method { Method::Everything | Method::Required { all_features: true, .. @@ -140,44 +140,13 @@ impl Context { let has_default_feature = summary.features().contains_key("default"); Ok(match self.resolve_features.get(&id) { Some(prev) => { - features.iter().all(|f| prev.contains(f)) + features.is_subset(prev) && (!use_default || prev.contains("default") || !has_default_feature) } None => features.is_empty() && (!use_default || !has_default_feature), }) } - pub fn build_deps( - &mut self, - registry: &mut RegistryQueryer<'_>, - parent: Option<&Summary>, - candidate: &Summary, - method: &Method<'_>, - ) -> ActivateResult> { - // First, figure out our set of dependencies based on the requested set - // of features. This also calculates what features we're going to enable - // for our own dependencies. - let deps = self.resolve_features(parent, candidate, method)?; - - // Next, transform all dependencies into a list of possible candidates - // which can satisfy that dependency. - let mut deps = deps - .into_iter() - .map(|(dep, features)| { - let candidates = registry.query(&dep)?; - Ok((dep, candidates, Rc::new(features))) - }) - .collect::>>()?; - - // Attempt to resolve dependencies with fewer candidates before trying - // dependencies with more candidates. This way if the dependency with - // only one candidate can't be resolved we don't have to do a bunch of - // work before we figure that out. - deps.sort_by_key(|&(_, ref a, _)| a.len()); - - Ok(deps) - } - /// Returns the `ContextAge` of this `Context`. /// For now we use (len of activations) as the age. /// See the `ContextAge` docs for more details. @@ -211,115 +180,6 @@ impl Context { Some(max) } - /// Returns all dependencies and the features we want from them. - pub fn resolve_features<'b>( - &mut self, - parent: Option<&Summary>, - s: &'b Summary, - method: &'b Method<'_>, - ) -> ActivateResult)>> { - let dev_deps = match *method { - Method::Everything => true, - Method::Required { dev_deps, .. } => dev_deps, - }; - - // First, filter by dev-dependencies. - let deps = s.dependencies(); - let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); - - let reqs = build_requirements(s, method)?; - let mut ret = Vec::new(); - let mut used_features = HashSet::new(); - let default_dep = (false, Vec::new()); - - // Next, collect all actually enabled dependencies and their features. - for dep in deps { - // Skip optional dependencies, but not those enabled through a - // feature - if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) { - continue; - } - // So we want this dependency. Move the features we want from - // `feature_deps` to `ret` and register ourselves as using this - // name. - let base = reqs.deps.get(&dep.name_in_toml()).unwrap_or(&default_dep); - used_features.insert(dep.name_in_toml()); - let always_required = !dep.is_optional() - && !s - .dependencies() - .iter() - .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml()); - if always_required && base.0 { - return Err(match parent { - None => failure::format_err!( - "Package `{}` does not have feature `{}`. It has a required dependency \ - with that name, but only optional dependencies can be used as features.", - s.package_id(), - dep.name_in_toml() - ) - .into(), - Some(p) => ( - p.package_id(), - ConflictReason::RequiredDependencyAsFeatures(dep.name_in_toml()), - ) - .into(), - }); - } - let mut base = base.1.clone(); - base.extend(dep.features().iter()); - for feature in base.iter() { - if feature.contains('/') { - return Err(failure::format_err!( - "feature names may not contain slashes: `{}`", - feature - ) - .into()); - } - } - ret.push((dep.clone(), base)); - } - - // Any entries in `reqs.dep` which weren't used are bugs in that the - // package does not actually have those dependencies. We classified - // them as dependencies in the first place because there is no such - // feature, either. - let remaining = reqs - .deps - .keys() - .cloned() - .filter(|s| !used_features.contains(s)) - .collect::>(); - if !remaining.is_empty() { - let features = remaining.join(", "); - return Err(match parent { - None => failure::format_err!( - "Package `{}` does not have these features: `{}`", - s.package_id(), - features - ) - .into(), - Some(p) => (p.package_id(), ConflictReason::MissingFeatures(features)).into(), - }); - } - - // Record what list of features is active for this package. - if !reqs.used.is_empty() { - let pkgid = s.package_id(); - - let set = Rc::make_mut( - self.resolve_features - .entry(pkgid) - .or_insert_with(|| Rc::new(HashSet::new())), - ); - - for feature in reqs.used { - set.insert(feature); - } - } - - Ok(ret) - } - pub fn resolve_replacements( &self, registry: &RegistryQueryer<'_>, @@ -346,136 +206,3 @@ impl Context { graph } } - -/// Takes requested features for a single package from the input `Method` and -/// recurses to find all requested features, dependencies and requested -/// dependency features in a `Requirements` object, returning it to the resolver. -fn build_requirements<'a, 'b: 'a>( - s: &'a Summary, - method: &'b Method<'_>, -) -> CargoResult> { - let mut reqs = Requirements::new(s); - - match *method { - Method::Everything - | Method::Required { - all_features: true, .. - } => { - for key in s.features().keys() { - reqs.require_feature(*key)?; - } - for dep in s.dependencies().iter().filter(|d| d.is_optional()) { - reqs.require_dependency(dep.name_in_toml()); - } - } - Method::Required { - all_features: false, - features: requested, - .. - } => { - for &f in requested.iter() { - reqs.require_value(&FeatureValue::new(f, s))?; - } - } - } - match *method { - Method::Everything - | Method::Required { - uses_default_features: true, - .. - } => { - if s.features().contains_key("default") { - reqs.require_feature(InternedString::new("default"))?; - } - } - Method::Required { - uses_default_features: false, - .. - } => {} - } - Ok(reqs) -} - -struct Requirements<'a> { - summary: &'a Summary, - // The deps map is a mapping of package name to list of features enabled. - // Each package should be enabled, and each package should have the - // specified set of features enabled. The boolean indicates whether this - // package was specifically requested (rather than just requesting features - // *within* this package). - deps: HashMap)>, - // The used features set is the set of features which this local package had - // enabled, which is later used when compiling to instruct the code what - // features were enabled. - used: HashSet, - visited: HashSet, -} - -impl<'r> Requirements<'r> { - fn new(summary: &Summary) -> Requirements<'_> { - Requirements { - summary, - deps: HashMap::new(), - used: HashSet::new(), - visited: HashSet::new(), - } - } - - fn require_crate_feature(&mut self, package: InternedString, feat: InternedString) { - self.used.insert(package); - self.deps - .entry(package) - .or_insert((false, Vec::new())) - .1 - .push(feat); - } - - fn seen(&mut self, feat: InternedString) -> bool { - if self.visited.insert(feat) { - self.used.insert(feat); - false - } else { - true - } - } - - fn require_dependency(&mut self, pkg: InternedString) { - if self.seen(pkg) { - return; - } - self.deps.entry(pkg).or_insert((false, Vec::new())).0 = true; - } - - fn require_feature(&mut self, feat: InternedString) -> CargoResult<()> { - if feat.is_empty() || self.seen(feat) { - return Ok(()); - } - for fv in self - .summary - .features() - .get(feat.as_str()) - .expect("must be a valid feature") - { - match *fv { - FeatureValue::Feature(ref dep_feat) if **dep_feat == *feat => failure::bail!( - "cyclic feature dependency: feature `{}` depends on itself", - feat - ), - _ => {} - } - self.require_value(fv)?; - } - Ok(()) - } - - fn require_value<'f>(&mut self, fv: &'f FeatureValue) -> CargoResult<()> { - match fv { - FeatureValue::Feature(feat) => self.require_feature(*feat)?, - FeatureValue::Crate(dep) => self.require_dependency(*dep), - FeatureValue::CrateFeature(dep, dep_feat) => { - self.require_crate_feature(*dep, *dep_feat) - } - }; - Ok(()) - } -} diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs new file mode 100644 index 00000000000..75f96b7e3d9 --- /dev/null +++ b/src/cargo/core/resolver/dep_cache.rs @@ -0,0 +1,478 @@ +//! There are 2 sources of facts for the resolver: +//! +//! - The `Registry` tells us for a `Dependency` what versions are available to fulfil it. +//! - The `Summary` tells us for a version (and features) what dependencies need to be fulfilled for it to be activated. +//! +//! These constitute immutable facts, the soled ground truth that all other inference depends on. +//! Theoretically this could all be enumerated ahead of time, but we want to be lazy and only +//! look up things we need to. The compromise is to cache the results as they are computed. +//! +//! This module impl that cache in all the gory details + +use std::cmp::Ordering; +use std::collections::{BTreeSet, HashMap, HashSet}; +use std::rc::Rc; + +use log::debug; + +use crate::core::interning::InternedString; +use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, Summary}; +use crate::util::errors::CargoResult; + +use crate::core::resolver::types::{Candidate, ConflictReason, DepInfo, FeaturesSet}; +use crate::core::resolver::{ActivateResult, Method}; + +pub struct RegistryQueryer<'a> { + pub registry: &'a mut (dyn Registry + 'a), + replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: &'a HashSet, + /// If set the list of dependency candidates will be sorted by minimal + /// versions first. That allows `cargo update -Z minimal-versions` which will + /// specify minimum dependency versions to be used. + minimal_versions: bool, + /// a cache of `Candidate`s that fulfil a `Dependency` + registry_cache: HashMap>>, + /// a cache of `Dependency`s that are required for a `Summary` + summary_cache: HashMap< + (Option, Summary, Method), + Rc<(HashSet, Rc>)>, + >, + /// all the cases we ended up using a supplied replacement + used_replacements: HashMap, +} + +impl<'a> RegistryQueryer<'a> { + pub fn new( + registry: &'a mut dyn Registry, + replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: &'a HashSet, + minimal_versions: bool, + ) -> Self { + RegistryQueryer { + registry, + replacements, + try_to_use, + minimal_versions, + registry_cache: HashMap::new(), + summary_cache: HashMap::new(), + used_replacements: HashMap::new(), + } + } + + pub fn used_replacement_for(&self, p: PackageId) -> Option<(PackageId, PackageId)> { + self.used_replacements.get(&p).map(|&r| (p, r)) + } + + /// Queries the `registry` to return a list of candidates for `dep`. + /// + /// This method is the location where overrides are taken into account. If + /// any candidates are returned which match an override then the override is + /// applied by performing a second query for what the override should + /// return. + pub fn query(&mut self, dep: &Dependency) -> CargoResult>> { + if let Some(out) = self.registry_cache.get(dep).cloned() { + return Ok(out); + } + + let mut ret = Vec::new(); + self.registry.query( + dep, + &mut |s| { + ret.push(Candidate { + summary: s, + replace: None, + }); + }, + false, + )?; + for candidate in ret.iter_mut() { + let summary = &candidate.summary; + + let mut potential_matches = self + .replacements + .iter() + .filter(|&&(ref spec, _)| spec.matches(summary.package_id())); + + let &(ref spec, ref dep) = match potential_matches.next() { + None => continue, + Some(replacement) => replacement, + }; + debug!( + "found an override for {} {}", + dep.package_name(), + dep.version_req() + ); + + let mut summaries = self.registry.query_vec(dep, false)?.into_iter(); + let s = summaries.next().ok_or_else(|| { + failure::format_err!( + "no matching package for override `{}` found\n\ + location searched: {}\n\ + version required: {}", + spec, + dep.source_id(), + dep.version_req() + ) + })?; + let summaries = summaries.collect::>(); + if !summaries.is_empty() { + let bullets = summaries + .iter() + .map(|s| format!(" * {}", s.package_id())) + .collect::>(); + failure::bail!( + "the replacement specification `{}` matched \ + multiple packages:\n * {}\n{}", + spec, + s.package_id(), + bullets.join("\n") + ); + } + + // The dependency should be hard-coded to have the same name and an + // exact version requirement, so both of these assertions should + // never fail. + assert_eq!(s.version(), summary.version()); + assert_eq!(s.name(), summary.name()); + + let replace = if s.source_id() == summary.source_id() { + debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s); + None + } else { + Some(s) + }; + let matched_spec = spec.clone(); + + // Make sure no duplicates + if let Some(&(ref spec, _)) = potential_matches.next() { + failure::bail!( + "overlapping replacement specifications found:\n\n \ + * {}\n * {}\n\nboth specifications match: {}", + matched_spec, + spec, + summary.package_id() + ); + } + + for dep in summary.dependencies() { + debug!("\t{} => {}", dep.package_name(), dep.version_req()); + } + if let Some(r) = &replace { + self.used_replacements + .insert(summary.package_id(), r.package_id()); + } + + candidate.replace = replace; + } + + // When we attempt versions for a package we'll want to do so in a + // sorted fashion to pick the "best candidates" first. Currently we try + // prioritized summaries (those in `try_to_use`) and failing that we + // list everything from the maximum version to the lowest version. + ret.sort_unstable_by(|a, b| { + let a_in_previous = self.try_to_use.contains(&a.summary.package_id()); + let b_in_previous = self.try_to_use.contains(&b.summary.package_id()); + let previous_cmp = a_in_previous.cmp(&b_in_previous).reverse(); + match previous_cmp { + Ordering::Equal => { + let cmp = a.summary.version().cmp(b.summary.version()); + if self.minimal_versions { + // Lower version ordered first. + cmp + } else { + // Higher version ordered first. + cmp.reverse() + } + } + _ => previous_cmp, + } + }); + + let out = Rc::new(ret); + + self.registry_cache.insert(dep.clone(), out.clone()); + + Ok(out) + } + + /// Find out what dependencies will be added by activating `candidate`, + /// with features described in `method`. Then look up in the `registry` + /// the candidates that will fulfil each of these dependencies, as it is the + /// next obvious question. + pub fn build_deps( + &mut self, + parent: Option, + candidate: &Summary, + method: &Method, + ) -> ActivateResult, Rc>)>> { + // if we have calculated a result before, then we can just return it, + // as it is a "pure" query of its arguments. + if let Some(out) = self + .summary_cache + .get(&(parent, candidate.clone(), method.clone())) + .cloned() + { + return Ok(out); + } + // First, figure out our set of dependencies based on the requested set + // of features. This also calculates what features we're going to enable + // for our own dependencies. + let (used_features, deps) = resolve_features(parent, candidate, method)?; + + // Next, transform all dependencies into a list of possible candidates + // which can satisfy that dependency. + let mut deps = deps + .into_iter() + .map(|(dep, features)| { + let candidates = self.query(&dep)?; + Ok((dep, candidates, features)) + }) + .collect::>>()?; + + // Attempt to resolve dependencies with fewer candidates before trying + // dependencies with more candidates. This way if the dependency with + // only one candidate can't be resolved we don't have to do a bunch of + // work before we figure that out. + deps.sort_by_key(|&(_, ref a, _)| a.len()); + + let out = Rc::new((used_features, Rc::new(deps))); + + // If we succeed we add the result to the cache so we can use it again next time. + // We dont cache the failure cases as they dont impl Clone. + self.summary_cache + .insert((parent, candidate.clone(), method.clone()), out.clone()); + + Ok(out) + } +} + +/// Returns the features we ended up using and +/// all dependencies and the features we want from each of them. +pub fn resolve_features<'b>( + parent: Option, + s: &'b Summary, + method: &'b Method, +) -> ActivateResult<(HashSet, Vec<(Dependency, FeaturesSet)>)> { + let dev_deps = match *method { + Method::Everything => true, + Method::Required { dev_deps, .. } => dev_deps, + }; + + // First, filter by dev-dependencies. + let deps = s.dependencies(); + let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); + + let reqs = build_requirements(s, method)?; + let mut ret = Vec::new(); + let mut used_features = HashSet::new(); + let default_dep = (false, BTreeSet::new()); + + // Next, collect all actually enabled dependencies and their features. + for dep in deps { + // Skip optional dependencies, but not those enabled through a + // feature + if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) { + continue; + } + // So we want this dependency. Move the features we want from + // `feature_deps` to `ret` and register ourselves as using this + // name. + let base = reqs.deps.get(&dep.name_in_toml()).unwrap_or(&default_dep); + used_features.insert(dep.name_in_toml()); + let always_required = !dep.is_optional() + && !s + .dependencies() + .iter() + .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml()); + if always_required && base.0 { + return Err(match parent { + None => failure::format_err!( + "Package `{}` does not have feature `{}`. It has a required dependency \ + with that name, but only optional dependencies can be used as features.", + s.package_id(), + dep.name_in_toml() + ) + .into(), + Some(p) => ( + p, + ConflictReason::RequiredDependencyAsFeatures(dep.name_in_toml()), + ) + .into(), + }); + } + let mut base = base.1.clone(); + base.extend(dep.features().iter()); + for feature in base.iter() { + if feature.contains('/') { + return Err(failure::format_err!( + "feature names may not contain slashes: `{}`", + feature + ) + .into()); + } + } + ret.push((dep.clone(), Rc::new(base))); + } + + // Any entries in `reqs.dep` which weren't used are bugs in that the + // package does not actually have those dependencies. We classified + // them as dependencies in the first place because there is no such + // feature, either. + let remaining = reqs + .deps + .keys() + .cloned() + .filter(|s| !used_features.contains(s)) + .collect::>(); + if !remaining.is_empty() { + let features = remaining.join(", "); + return Err(match parent { + None => failure::format_err!( + "Package `{}` does not have these features: `{}`", + s.package_id(), + features + ) + .into(), + Some(p) => (p, ConflictReason::MissingFeatures(features)).into(), + }); + } + + Ok((reqs.into_used(), ret)) +} + +/// Takes requested features for a single package from the input `Method` and +/// recurses to find all requested features, dependencies and requested +/// dependency features in a `Requirements` object, returning it to the resolver. +fn build_requirements<'a, 'b: 'a>( + s: &'a Summary, + method: &'b Method, +) -> CargoResult> { + let mut reqs = Requirements::new(s); + + match method { + Method::Everything + | Method::Required { + all_features: true, .. + } => { + for key in s.features().keys() { + reqs.require_feature(*key)?; + } + for dep in s.dependencies().iter().filter(|d| d.is_optional()) { + reqs.require_dependency(dep.name_in_toml()); + } + } + Method::Required { + all_features: false, + features: requested, + .. + } => { + for &f in requested.iter() { + reqs.require_value(&FeatureValue::new(f, s))?; + } + } + } + match *method { + Method::Everything + | Method::Required { + uses_default_features: true, + .. + } => { + if s.features().contains_key("default") { + reqs.require_feature(InternedString::new("default"))?; + } + } + Method::Required { + uses_default_features: false, + .. + } => {} + } + Ok(reqs) +} + +struct Requirements<'a> { + summary: &'a Summary, + // The deps map is a mapping of package name to list of features enabled. + // Each package should be enabled, and each package should have the + // specified set of features enabled. The boolean indicates whether this + // package was specifically requested (rather than just requesting features + // *within* this package). + deps: HashMap)>, + // The used features set is the set of features which this local package had + // enabled, which is later used when compiling to instruct the code what + // features were enabled. + used: HashSet, + visited: HashSet, +} + +impl Requirements<'_> { + fn new(summary: &Summary) -> Requirements<'_> { + Requirements { + summary, + deps: HashMap::new(), + used: HashSet::new(), + visited: HashSet::new(), + } + } + + fn into_used(self) -> HashSet { + self.used + } + + fn require_crate_feature(&mut self, package: InternedString, feat: InternedString) { + self.used.insert(package); + self.deps + .entry(package) + .or_insert((false, BTreeSet::new())) + .1 + .insert(feat); + } + + fn seen(&mut self, feat: InternedString) -> bool { + if self.visited.insert(feat) { + self.used.insert(feat); + false + } else { + true + } + } + + fn require_dependency(&mut self, pkg: InternedString) { + if self.seen(pkg) { + return; + } + self.deps.entry(pkg).or_insert((false, BTreeSet::new())).0 = true; + } + + fn require_feature(&mut self, feat: InternedString) -> CargoResult<()> { + if feat.is_empty() || self.seen(feat) { + return Ok(()); + } + for fv in self + .summary + .features() + .get(feat.as_str()) + .expect("must be a valid feature") + { + match *fv { + FeatureValue::Feature(ref dep_feat) if **dep_feat == *feat => failure::bail!( + "cyclic feature dependency: feature `{}` depends on itself", + feat + ), + _ => {} + } + self.require_value(fv)?; + } + Ok(()) + } + + fn require_value(&mut self, fv: &FeatureValue) -> CargoResult<()> { + match fv { + FeatureValue::Feature(feat) => self.require_feature(*feat)?, + FeatureValue::Crate(dep) => self.require_dependency(*dep), + FeatureValue::CrateFeature(dep, dep_feat) => { + self.require_crate_feature(*dep, *dep_feat) + } + }; + Ok(()) + } +} diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index d186883a658..a6a589da8be 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -54,7 +54,6 @@ use std::time::{Duration, Instant}; use log::{debug, trace}; -use crate::core::interning::InternedString; use crate::core::PackageIdSpec; use crate::core::{Dependency, PackageId, Registry, Summary}; use crate::util::config::Config; @@ -62,8 +61,9 @@ use crate::util::errors::CargoResult; use crate::util::profile; use self::context::{Activations, Context}; +use self::dep_cache::RegistryQueryer; use self::types::{Candidate, ConflictMap, ConflictReason, DepsFrame}; -use self::types::{RcVecIter, RegistryQueryer, RemainingDeps, ResolverProgress}; +use self::types::{FeaturesSet, RcVecIter, RemainingDeps, ResolverProgress}; pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; pub use self::encode::{Metadata, WorkspaceResolve}; @@ -73,6 +73,7 @@ pub use self::types::Method; mod conflict_cache; mod context; +mod dep_cache; mod encode; mod errors; mod resolve; @@ -119,7 +120,7 @@ mod types; /// When we have a decision for how to implement is without breaking existing functionality /// this flag can be removed. pub fn resolve( - summaries: &[(Summary, Method<'_>)], + summaries: &[(Summary, Method)], replacements: &[(PackageIdSpec, Dependency)], registry: &mut dyn Registry, try_to_use: &HashSet, @@ -167,7 +168,7 @@ pub fn resolve( fn activate_deps_loop( mut cx: Context, registry: &mut RegistryQueryer<'_>, - summaries: &[(Summary, Method<'_>)], + summaries: &[(Summary, Method)], config: Option<&Config>, ) -> CargoResult { let mut backtrack_stack = Vec::new(); @@ -184,7 +185,7 @@ fn activate_deps_loop( summary: summary.clone(), replace: None, }; - let res = activate(&mut cx, registry, None, candidate, method); + let res = activate(&mut cx, registry, None, candidate, method.clone()); match res { Ok(Some((frame, _))) => remaining_deps.push(frame), Ok(None) => (), @@ -372,7 +373,7 @@ fn activate_deps_loop( let pid = candidate.summary.package_id(); let method = Method::Required { dev_deps: false, - features: &features, + features: Rc::clone(&features), all_features: false, uses_default_features: dep.uses_default_features(), }; @@ -383,7 +384,7 @@ fn activate_deps_loop( dep.package_name(), candidate.summary.version() ); - let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, &method); + let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, method); let successfully_activated = match res { // Success! We've now activated our `candidate` in our context @@ -595,7 +596,7 @@ fn activate( registry: &mut RegistryQueryer<'_>, parent: Option<(&Summary, &Dependency)>, candidate: Candidate, - method: &Method<'_>, + method: Method, ) -> ActivateResult> { let candidate_pid = candidate.summary.package_id(); if let Some((parent, dep)) = parent { @@ -656,11 +657,11 @@ fn activate( } } - let activated = cx.flag_activated(&candidate.summary, method)?; + let activated = cx.flag_activated(&candidate.summary, &method)?; let candidate = match candidate.replace { Some(replace) => { - if cx.flag_activated(&replace, method)? && activated { + if cx.flag_activated(&replace, &method)? && activated { return Ok(None); } trace!( @@ -680,11 +681,23 @@ fn activate( }; let now = Instant::now(); - let deps = cx.build_deps(registry, parent.map(|p| p.0), &candidate, method)?; + let (used_features, deps) = + &*registry.build_deps(parent.map(|p| p.0.package_id()), &candidate, &method)?; + + // Record what list of features is active for this package. + if !used_features.is_empty() { + Rc::make_mut( + cx.resolve_features + .entry(candidate.package_id()) + .or_insert_with(Rc::default), + ) + .extend(used_features); + } + let frame = DepsFrame { parent: candidate, just_for_error_messages: false, - remaining_siblings: RcVecIter::new(Rc::new(deps)), + remaining_siblings: RcVecIter::new(Rc::clone(deps)), }; Ok(Some((frame, now.elapsed()))) } @@ -697,7 +710,7 @@ struct BacktrackFrame { remaining_candidates: RemainingCandidates, parent: Summary, dep: Dependency, - features: Rc>, + features: FeaturesSet, conflicting_activations: ConflictMap, } @@ -883,7 +896,7 @@ fn generalize_conflicting( // Thus, if all the things it can resolve to have already ben determined // to be conflicting, then we can just say that we conflict with the parent. if registry - .query(&critical_parents_dep) + .query(critical_parents_dep) .expect("an already used dep now error!?") .iter() .rev() // the last one to be tried is the least likely to be in the cache, so start with that. @@ -894,13 +907,13 @@ fn generalize_conflicting( other.summary.package_id(), backtrack_critical_reason.clone(), ); - past_conflicting_activations.contains(&dep, &con) + past_conflicting_activations.contains(dep, &con) }) { let mut con = conflicting_activations.clone(); con.remove(&backtrack_critical_id); con.insert(*critical_parent, backtrack_critical_reason); - past_conflicting_activations.insert(&dep, &con); + past_conflicting_activations.insert(dep, &con); return Some(con); } } diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs index dc12cbc0566..1a5eea551ce 100644 --- a/src/cargo/core/resolver/types.rs +++ b/src/cargo/core/resolver/types.rs @@ -1,13 +1,11 @@ use std::cmp::Ordering; -use std::collections::{BTreeMap, HashMap, HashSet}; +use std::collections::{BTreeMap, BTreeSet}; use std::ops::Range; use std::rc::Rc; use std::time::{Duration, Instant}; -use log::debug; - use crate::core::interning::InternedString; -use crate::core::{Dependency, PackageId, PackageIdSpec, Registry, Summary}; +use crate::core::{Dependency, PackageId, Summary}; use crate::util::errors::CargoResult; use crate::util::Config; @@ -91,205 +89,36 @@ impl ResolverProgress { } } -pub struct RegistryQueryer<'a> { - pub registry: &'a mut (dyn Registry + 'a), - replacements: &'a [(PackageIdSpec, Dependency)], - try_to_use: &'a HashSet, - // If set the list of dependency candidates will be sorted by minimal - // versions first. That allows `cargo update -Z minimal-versions` which will - // specify minimum dependency versions to be used. - minimal_versions: bool, - cache: HashMap>>, - used_replacements: HashMap, -} - -impl<'a> RegistryQueryer<'a> { - pub fn new( - registry: &'a mut dyn Registry, - replacements: &'a [(PackageIdSpec, Dependency)], - try_to_use: &'a HashSet, - minimal_versions: bool, - ) -> Self { - RegistryQueryer { - registry, - replacements, - try_to_use, - minimal_versions, - cache: HashMap::new(), - used_replacements: HashMap::new(), - } - } - - pub fn used_replacement_for(&self, p: PackageId) -> Option<(PackageId, PackageId)> { - self.used_replacements.get(&p).map(|&r| (p, r)) - } - - /// Queries the `registry` to return a list of candidates for `dep`. - /// - /// This method is the location where overrides are taken into account. If - /// any candidates are returned which match an override then the override is - /// applied by performing a second query for what the override should - /// return. - pub fn query(&mut self, dep: &Dependency) -> CargoResult>> { - if let Some(out) = self.cache.get(dep).cloned() { - return Ok(out); - } - - let mut ret = Vec::new(); - self.registry.query( - dep, - &mut |s| { - ret.push(Candidate { - summary: s, - replace: None, - }); - }, - false, - )?; - for candidate in ret.iter_mut() { - let summary = &candidate.summary; - - let mut potential_matches = self - .replacements - .iter() - .filter(|&&(ref spec, _)| spec.matches(summary.package_id())); - - let &(ref spec, ref dep) = match potential_matches.next() { - None => continue, - Some(replacement) => replacement, - }; - debug!( - "found an override for {} {}", - dep.package_name(), - dep.version_req() - ); - - let mut summaries = self.registry.query_vec(dep, false)?.into_iter(); - let s = summaries.next().ok_or_else(|| { - failure::format_err!( - "no matching package for override `{}` found\n\ - location searched: {}\n\ - version required: {}", - spec, - dep.source_id(), - dep.version_req() - ) - })?; - let summaries = summaries.collect::>(); - if !summaries.is_empty() { - let bullets = summaries - .iter() - .map(|s| format!(" * {}", s.package_id())) - .collect::>(); - failure::bail!( - "the replacement specification `{}` matched \ - multiple packages:\n * {}\n{}", - spec, - s.package_id(), - bullets.join("\n") - ); - } - - // The dependency should be hard-coded to have the same name and an - // exact version requirement, so both of these assertions should - // never fail. - assert_eq!(s.version(), summary.version()); - assert_eq!(s.name(), summary.name()); - - let replace = if s.source_id() == summary.source_id() { - debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s); - None - } else { - Some(s) - }; - let matched_spec = spec.clone(); - - // Make sure no duplicates - if let Some(&(ref spec, _)) = potential_matches.next() { - failure::bail!( - "overlapping replacement specifications found:\n\n \ - * {}\n * {}\n\nboth specifications match: {}", - matched_spec, - spec, - summary.package_id() - ); - } - - for dep in summary.dependencies() { - debug!("\t{} => {}", dep.package_name(), dep.version_req()); - } - if let Some(r) = &replace { - if let Some(old) = self - .used_replacements - .insert(summary.package_id(), r.package_id()) - { - debug_assert_eq!( - r.package_id(), - old, - "we are inconsistent about witch replacement is used for {:?}, \ - one time we used {:?}, \ - now we are adding {:?}.", - summary.package_id(), - old, - r.package_id() - ) - } - } - - candidate.replace = replace; - } - - // When we attempt versions for a package we'll want to do so in a - // sorted fashion to pick the "best candidates" first. Currently we try - // prioritized summaries (those in `try_to_use`) and failing that we - // list everything from the maximum version to the lowest version. - ret.sort_unstable_by(|a, b| { - let a_in_previous = self.try_to_use.contains(&a.summary.package_id()); - let b_in_previous = self.try_to_use.contains(&b.summary.package_id()); - let previous_cmp = a_in_previous.cmp(&b_in_previous).reverse(); - match previous_cmp { - Ordering::Equal => { - let cmp = a.summary.version().cmp(b.summary.version()); - if self.minimal_versions { - // Lower version ordered first. - cmp - } else { - // Higher version ordered first. - cmp.reverse() - } - } - _ => previous_cmp, - } - }); - - let out = Rc::new(ret); - - self.cache.insert(dep.clone(), out.clone()); - - Ok(out) - } -} - -#[derive(Clone, Copy)] -pub enum Method<'a> { +/// The preferred way to store the set of activated features for a package. +/// This is sorted so that it impls Hash, and owns its contents, +/// needed so it can be part of the key for caching in the `DepsCache`. +/// It is also cloned often as part of `Context`, hence the `RC`. +/// `im-rs::OrdSet` was slower of small sets like this, +/// but this can change with improvements to std, im, or llvm. +/// Using a consistent type for this allows us to use the highly +/// optimized comparison operators like `is_subset` at the interfaces. +pub type FeaturesSet = Rc>; + +#[derive(Clone, Eq, PartialEq, Hash)] +pub enum Method { Everything, // equivalent to Required { dev_deps: true, all_features: true, .. } Required { dev_deps: bool, - features: &'a [InternedString], + features: FeaturesSet, all_features: bool, uses_default_features: bool, }, } -impl<'r> Method<'r> { - pub fn split_features(features: &[String]) -> Vec { +impl Method { + pub fn split_features(features: &[String]) -> BTreeSet { features .iter() .flat_map(|s| s.split_whitespace()) .flat_map(|s| s.split(',')) .filter(|s| !s.is_empty()) - .map(|s| InternedString::new(s)) - .collect::>() + .map(InternedString::new) + .collect::>() } } @@ -399,10 +228,10 @@ impl RemainingDeps { } } -// Information about the dependencies for a crate, a tuple of: -// -// (dependency info, candidates, features activated) -pub type DepInfo = (Dependency, Rc>, Rc>); +/// Information about the dependencies for a crate, a tuple of: +/// +/// (dependency info, candidates, features activated) +pub type DepInfo = (Dependency, Rc>, FeaturesSet); /// All possible reasons that a package might fail to activate. /// diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index 0ec78f46c67..fb52b9179d7 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -1,6 +1,7 @@ use std::borrow::Borrow; use std::collections::{BTreeMap, HashMap}; use std::fmt::Display; +use std::hash::{Hash, Hasher}; use std::mem; use std::rc::Rc; @@ -137,6 +138,14 @@ impl PartialEq for Summary { } } +impl Eq for Summary {} + +impl Hash for Summary { + fn hash(&self, state: &mut H) { + self.inner.package_id.hash(state); + } +} + // Checks features for errors, bailing out a CargoResult:Err if invalid, // and creates FeatureValues for each feature. fn build_feature_map( diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 3d1719dfc34..ffe7cef5557 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -23,6 +23,7 @@ use std::collections::{BTreeSet, HashMap, HashSet}; use std::iter::FromIterator; use std::path::PathBuf; +use std::rc::Rc; use std::sync::Arc; use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; @@ -299,7 +300,7 @@ pub fn compile_ws<'a>( let features = Method::split_features(features); let method = Method::Required { dev_deps: ws.require_optional_deps() || filter.need_dev_deps(build_config.mode), - features: &features, + features: Rc::new(features), all_features, uses_default_features: !no_default_features, }; diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index c48354c7199..352f3712ff8 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -461,10 +461,7 @@ fn install_one( let mut pkg_map = BTreeMap::new(); for (bin_name, opt_pkg_id) in &duplicates { let key = opt_pkg_id.map_or_else(|| "unknown".to_string(), |pkg_id| pkg_id.to_string()); - pkg_map - .entry(key) - .or_insert_with(|| Vec::new()) - .push(bin_name); + pkg_map.entry(key).or_insert_with(Vec::new).push(bin_name); } for (pkg_descr, bin_names) in &pkg_map { config.shell().status( diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 48acfeaab35..b03168f1922 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -1,4 +1,5 @@ use std::collections::HashSet; +use std::rc::Rc; use log::{debug, trace}; @@ -43,7 +44,7 @@ pub fn resolve_ws_precisely<'a>( } else { Method::Required { dev_deps: true, - features: &features, + features: Rc::new(features), all_features: false, uses_default_features: !no_default_features, } @@ -53,7 +54,7 @@ pub fn resolve_ws_precisely<'a>( pub fn resolve_ws_with_method<'a>( ws: &Workspace<'a>, - method: Method<'_>, + method: Method, specs: &[PackageIdSpec], ) -> CargoResult<(PackageSet<'a>, Resolve)> { let mut registry = PackageRegistry::new(ws.config())?; @@ -138,7 +139,7 @@ fn resolve_with_registry<'cfg>( pub fn resolve_with_previous<'cfg>( registry: &mut PackageRegistry<'cfg>, ws: &Workspace<'cfg>, - method: Method<'_>, + method: Method, previous: Option<&Resolve>, to_avoid: Option<&HashSet>, specs: &[PackageIdSpec], @@ -222,7 +223,7 @@ pub fn resolve_with_previous<'cfg>( let mut summaries = Vec::new(); if ws.config().cli_unstable().package_features { let mut members = Vec::new(); - match method { + match &method { Method::Everything => members.extend(ws.members()), Method::Required { features, @@ -241,7 +242,7 @@ pub fn resolve_with_previous<'cfg>( // of current workspace. Add all packages from workspace to get `foo` // into the resolution graph. if members.is_empty() { - if !(features.is_empty() && !all_features && uses_default_features) { + if !(features.is_empty() && !all_features && *uses_default_features) { failure::bail!("cannot specify features for packages outside of workspace"); } members.extend(ws.members()); @@ -250,7 +251,7 @@ pub fn resolve_with_previous<'cfg>( } for member in members { let summary = registry.lock(member.summary().clone()); - summaries.push((summary, method)) + summaries.push((summary, method.clone())) } } else { for member in ws.members() { @@ -281,13 +282,13 @@ pub fn resolve_with_previous<'cfg>( } => { let base = Method::Required { dev_deps, - features: &[], + features: Rc::default(), all_features, uses_default_features: true, }; let member_id = member.package_id(); match ws.current_opt() { - Some(current) if member_id == current.package_id() => method, + Some(current) if member_id == current.package_id() => method.clone(), _ => { if specs.iter().any(|spec| spec.matches(member_id)) { base diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs index 66f95d1d397..46078c5a3b0 100644 --- a/src/cargo/sources/registry/index.rs +++ b/src/cargo/sources/registry/index.rs @@ -269,10 +269,10 @@ impl<'cfg> RegistryIndex<'cfg> { yanked_whitelist: &HashSet, f: &mut dyn FnMut(Summary), ) -> CargoResult<()> { - if self.config.cli_unstable().offline { - if self.query_inner_with_online(dep, load, yanked_whitelist, f, false)? != 0 { - return Ok(()); - } + if self.config.cli_unstable().offline + && self.query_inner_with_online(dep, load, yanked_whitelist, f, false)? != 0 + { + return Ok(()); // If offline, and there are no matches, try again with online. // This is necessary for dependencies that are not used (such as // target-cfg or optional), but are not downloaded. Normally the diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index be2baca7ae3..06e6b9d9816 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -1201,7 +1201,7 @@ impl TomlManifest { ); } - let mut dep = replacement.to_dependency(spec.name(), cx, None)?; + let mut dep = replacement.to_dependency(spec.name().as_str(), cx, None)?; { let version = spec.version().ok_or_else(|| { failure::format_err!(