diff --git a/src/cargo/core/mod.rs b/src/cargo/core/mod.rs index b0b0895f0ef..aa2c75d1a6c 100644 --- a/src/cargo/core/mod.rs +++ b/src/cargo/core/mod.rs @@ -6,7 +6,7 @@ pub use self::package_id_spec::PackageIdSpec; pub use self::registry::Registry; pub use self::resolver::Resolve; pub use self::shell::{Shell, MultiShell, ShellConfig, Verbosity, ColorConfig}; -pub use self::source::{Source, SourceId, SourceMap, SourceSet, GitReference}; +pub use self::source::{Source, SourceId, SourceMap, GitReference}; pub use self::summary::Summary; pub mod source; diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 9aa3c86699c..c3ec0dd6183 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -1,15 +1,16 @@ +use std::cell::{Ref, RefCell}; use std::collections::HashMap; use std::fmt; use std::hash; -use std::slice; use std::path::{Path, PathBuf}; + use semver::Version; -use core::{Dependency, Manifest, PackageId, SourceId, Registry, Target, Summary, Metadata}; +use core::{Dependency, Manifest, PackageId, SourceId, Target}; +use core::{Summary, Metadata, SourceMap}; use ops; -use util::{CargoResult, graph, Config}; +use util::{CargoResult, Config, LazyCell, ChainError, internal, human}; use rustc_serialize::{Encoder,Encodable}; -use core::source::Source; /// Information about a package that is available somewhere in the file system. /// @@ -118,78 +119,46 @@ impl hash::Hash for Package { } } -#[derive(PartialEq,Clone,Debug)] -pub struct PackageSet { - packages: Vec, +pub struct PackageSet<'cfg> { + packages: Vec<(PackageId, LazyCell)>, + sources: RefCell>, } -impl PackageSet { - pub fn new(packages: &[Package]) -> PackageSet { - //assert!(packages.len() > 0, - // "PackageSet must be created with at least one package") - PackageSet { packages: packages.to_vec() } - } - - pub fn is_empty(&self) -> bool { - self.packages.is_empty() - } - - pub fn len(&self) -> usize { - self.packages.len() - } - - pub fn pop(&mut self) -> Package { - self.packages.pop().expect("PackageSet.pop: empty set") - } - - /// Get a package by name out of the set - pub fn get(&self, name: &str) -> &Package { - self.packages.iter().find(|pkg| name == pkg.name()) - .expect("PackageSet.get: empty set") - } - - pub fn get_all(&self, names: &[&str]) -> Vec<&Package> { - names.iter().map(|name| self.get(*name) ).collect() - } - - pub fn packages(&self) -> &[Package] { &self.packages } - - // For now, assume that the package set contains only one package with a - // given name - pub fn sort(&self) -> Option { - let mut graph = graph::Graph::new(); - - for pkg in self.packages.iter() { - let deps: Vec<&str> = pkg.dependencies().iter() - .map(|dep| dep.name()) - .collect(); - - graph.add(pkg.name(), &deps); +impl<'cfg> PackageSet<'cfg> { + pub fn new(package_ids: &[PackageId], + sources: SourceMap<'cfg>) -> PackageSet<'cfg> { + PackageSet { + packages: package_ids.iter().map(|id| { + (id.clone(), LazyCell::new(None)) + }).collect(), + sources: RefCell::new(sources), } - - let pkgs = match graph.sort() { - Some(pkgs) => pkgs, - None => return None, - }; - let pkgs = pkgs.iter().map(|name| { - self.get(*name).clone() - }).collect(); - - Some(PackageSet { - packages: pkgs - }) } - pub fn iter(&self) -> slice::Iter { - self.packages.iter() + pub fn package_ids<'a>(&'a self) -> Box + 'a> { + Box::new(self.packages.iter().map(|&(ref p, _)| p)) } -} -impl Registry for PackageSet { - fn query(&mut self, name: &Dependency) -> CargoResult> { - Ok(self.packages.iter() - .filter(|pkg| name.name() == pkg.name()) - .map(|pkg| pkg.summary().clone()) - .collect()) + pub fn get(&self, id: &PackageId) -> CargoResult<&Package> { + let slot = try!(self.packages.iter().find(|p| p.0 == *id).chain_error(|| { + internal(format!("couldn't find `{}` in package set", id)) + })); + let slot = &slot.1; + if let Some(pkg) = slot.borrow() { + return Ok(pkg) + } + let mut sources = self.sources.borrow_mut(); + let source = try!(sources.get_mut(id.source_id()).chain_error(|| { + internal(format!("couldn't find source for `{}`", id)) + })); + let pkg = try!(source.download(id).chain_error(|| { + human("unable to get packages from source") + })); + assert!(slot.fill(pkg).is_ok()); + Ok(slot.borrow().unwrap()) + } + + pub fn sources(&self) -> Ref> { + self.sources.borrow() } } diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index fc994aeaf57..f744f4937be 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -1,7 +1,7 @@ -use std::collections::HashSet; -use std::collections::hash_map::HashMap; +use std::collections::{HashSet, HashMap}; use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId, Package}; +use core::PackageSet; use util::{CargoResult, ChainError, Config, human, profile}; /// Source of information about a group of packages. @@ -85,30 +85,9 @@ impl<'cfg> PackageRegistry<'cfg> { } } - pub fn get(&mut self, package_ids: &[PackageId]) -> CargoResult> { + pub fn get(self, package_ids: &[PackageId]) -> PackageSet<'cfg> { trace!("getting packages; sources={}", self.sources.len()); - - // TODO: Only call source with package ID if the package came from the - // source - let mut ret = Vec::new(); - - for (_, source) in self.sources.sources_mut() { - try!(source.download(package_ids)); - let packages = try!(source.get(package_ids)); - - ret.extend(packages.into_iter()); - } - - // TODO: Return earlier if fail - assert!(package_ids.len() == ret.len(), - "could not get packages from registry; ids={:?}; ret={:?}", - package_ids, ret); - - Ok(ret) - } - - pub fn move_sources(self) -> SourceMap<'cfg> { - self.sources + PackageSet::new(package_ids, self.sources) } fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> { diff --git a/src/cargo/core/source.rs b/src/cargo/core/source.rs index 74446f0eb68..64779af3731 100644 --- a/src/cargo/core/source.rs +++ b/src/cargo/core/source.rs @@ -9,7 +9,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use url::Url; -use core::{Summary, Package, PackageId, Registry, Dependency}; +use core::{Package, PackageId, Registry}; use sources::{PathSource, GitSource, RegistrySource}; use sources::git; use util::{human, Config, CargoResult, ToUrl}; @@ -24,13 +24,7 @@ pub trait Source: Registry { /// The download method fetches the full package for each name and /// version specified. - fn download(&mut self, packages: &[PackageId]) -> CargoResult<()>; - - /// The get method returns the Path of each specified package on the - /// local file system. It assumes that `download` was already called, - /// and that the packages are already locally available on the file - /// system. - fn get(&self, packages: &[PackageId]) -> CargoResult>; + fn download(&mut self, package: &PackageId) -> CargoResult; /// Generates a unique string which represents the fingerprint of the /// current state of the source. @@ -443,65 +437,6 @@ impl<'a, 'src> Iterator for SourcesMut<'a, 'src> { } } -/// List of `Source` implementors. `SourceSet` itself implements `Source`. -pub struct SourceSet<'src> { - sources: Vec> -} - -impl<'src> SourceSet<'src> { - pub fn new(sources: Vec>) -> SourceSet<'src> { - SourceSet { sources: sources } - } -} - -impl<'src> Registry for SourceSet<'src> { - fn query(&mut self, name: &Dependency) -> CargoResult> { - let mut ret = Vec::new(); - - for source in self.sources.iter_mut() { - ret.extend(try!(source.query(name)).into_iter()); - } - - Ok(ret) - } -} - -impl<'src> Source for SourceSet<'src> { - fn update(&mut self) -> CargoResult<()> { - for source in self.sources.iter_mut() { - try!(source.update()); - } - - Ok(()) - } - - fn download(&mut self, packages: &[PackageId]) -> CargoResult<()> { - for source in self.sources.iter_mut() { - try!(source.download(packages)); - } - - Ok(()) - } - - fn get(&self, packages: &[PackageId]) -> CargoResult> { - let mut ret = Vec::new(); - - for source in self.sources.iter() { - ret.extend(try!(source.get(packages)).into_iter()); - } - - Ok(ret) - } - - fn fingerprint(&self, id: &Package) -> CargoResult { - let mut ret = String::new(); - for source in self.sources.iter() { - ret.push_str(&try!(source.fingerprint(id))[..]); - } - Ok(ret) - } -} - #[cfg(test)] mod tests { use super::{SourceId, Kind, GitReference}; diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index b14b4025de7..c4918f9c352 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -2,9 +2,8 @@ use std::default::Default; use std::fs; use std::path::Path; -use core::{Package, PackageSet, Profiles}; -use core::source::{Source, SourceMap}; -use core::registry::PackageRegistry; +use core::{Package, Profiles}; +use core::source::Source; use util::{CargoResult, human, ChainError, Config}; use ops::{self, Layout, Context, BuildConfig, Kind, Unit}; @@ -26,18 +25,7 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> { return rm_rf(&target_dir); } - // Load the lockfile (if one's available) - let lockfile = root.root().join("Cargo.lock"); - let source_id = root.package_id().source_id(); - let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) { - Some(resolve) => resolve, - None => bail!("a Cargo.lock must exist before cleaning") - }; - - // Create a compilation context to have access to information like target - // filenames and such - let srcs = SourceMap::new(); - let pkgs = PackageSet::new(&[]); + let (resolve, packages) = try!(ops::fetch(manifest_path, opts.config)); let dest = if opts.release {"release"} else {"debug"}; let host_layout = Layout::new(opts.config, &root, None, dest); @@ -45,22 +33,16 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> { Layout::new(opts.config, &root, Some(target), dest) }); - let cx = try!(Context::new(&resolve, &srcs, &pkgs, opts.config, + let cx = try!(Context::new(&resolve, &packages, opts.config, host_layout, target_layout, BuildConfig::default(), root.manifest().profiles())); - let mut registry = PackageRegistry::new(opts.config); - // resolve package specs and remove the corresponding packages for spec in opts.spec { + // Translate the spec to a Package let pkgid = try!(resolve.query(spec)); - - // Translate the PackageId to a Package - let pkg = { - try!(registry.add_sources(&[pkgid.source_id().clone()])); - (try!(registry.get(&[pkgid.clone()]))).into_iter().next().unwrap() - }; + let pkg = try!(packages.get(&pkgid)); // And finally, clean everything out! for target in pkg.targets() { diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 8aa6b987817..a4ef08528be 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -28,7 +28,7 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use core::registry::PackageRegistry; -use core::{Source, SourceId, SourceMap, PackageSet, Package, Target}; +use core::{Source, SourceId, PackageSet, Package, Target}; use core::{Profile, TargetKind, Profiles}; use core::resolver::{Method, Resolve}; use ops::{self, BuildOutput, ExecEngine}; @@ -102,7 +102,7 @@ pub fn resolve_dependencies<'a>(root_package: &Package, source: Option>, features: Vec, no_default_features: bool) - -> CargoResult<(Vec, Resolve, SourceMap<'a>)> { + -> CargoResult<(PackageSet<'a>, Resolve)> { let override_ids = try!(source_ids_from_config(config, root_package.root())); @@ -133,10 +133,10 @@ pub fn resolve_dependencies<'a>(root_package: &Package, try!(ops::resolve_with_previous(&mut registry, root_package, method, Some(&resolve), None)); - let packages = try!(ops::get_resolved_packages(&resolved_with_overrides, - &mut registry)); + let packages = ops::get_resolved_packages(&resolved_with_overrides, + registry); - Ok((packages, resolved_with_overrides, registry.move_sources())) + Ok((packages, resolved_with_overrides)) } pub fn compile_pkg<'a>(root_package: &Package, @@ -158,7 +158,7 @@ pub fn compile_pkg<'a>(root_package: &Package, bail!("jobs must be at least 1") } - let (packages, resolve_with_overrides, sources) = { + let (packages, resolve_with_overrides) = { try!(resolve_dependencies(root_package, config, source, features, no_default_features)) }; @@ -180,8 +180,9 @@ pub fn compile_pkg<'a>(root_package: &Package, invalid_spec.join(", ")) } - let to_builds = packages.iter().filter(|p| pkgids.contains(&p.package_id())) - .collect::>(); + let to_builds = try!(pkgids.iter().map(|id| { + packages.get(id) + }).collect::>>()); let mut general_targets = Vec::new(); let mut package_targets = Vec::new(); @@ -245,9 +246,8 @@ pub fn compile_pkg<'a>(root_package: &Package, } try!(ops::compile_targets(&package_targets, - &PackageSet::new(&packages), + &packages, &resolve_with_overrides, - &sources, config, build_config, root_package.manifest().profiles(), diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs index 23267578fa7..0617a68bfb0 100644 --- a/src/cargo/ops/cargo_fetch.rs +++ b/src/cargo/ops/cargo_fetch.rs @@ -1,23 +1,27 @@ use std::path::Path; use core::registry::PackageRegistry; -use core::{Package, PackageId, Resolve}; +use core::{Package, PackageId, Resolve, PackageSet}; use ops; -use util::{CargoResult, Config, human, ChainError}; +use util::{CargoResult, Config}; /// Executes `cargo fetch`. -pub fn fetch(manifest_path: &Path, config: &Config) -> CargoResult<()> { +pub fn fetch<'a>(manifest_path: &Path, + config: &'a Config) + -> CargoResult<(Resolve, PackageSet<'a>)> { let package = try!(Package::for_path(manifest_path, config)); let mut registry = PackageRegistry::new(config); let resolve = try!(ops::resolve_pkg(&mut registry, &package)); - let _ = try!(get_resolved_packages(&resolve, &mut registry)); - Ok(()) + let packages = get_resolved_packages(&resolve, registry); + for id in resolve.iter() { + try!(packages.get(id)); + } + Ok((resolve, packages)) } -pub fn get_resolved_packages(resolve: &Resolve, registry: &mut PackageRegistry) - -> CargoResult> { +pub fn get_resolved_packages<'a>(resolve: &Resolve, + registry: PackageRegistry<'a>) + -> PackageSet<'a> { let ids: Vec = resolve.iter().cloned().collect(); - registry.get(&ids).chain_error(|| { - human("unable to get packages from source") - }) + registry.get(&ids) } diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index bbd12c25611..666c75aa887 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -137,9 +137,8 @@ fn select_pkg<'a, T>(mut source: T, let deps = try!(source.query(&dep)); match deps.iter().map(|p| p.package_id()).max() { Some(pkgid) => { - try!(source.download(&[pkgid.clone()])); - Ok((try!(source.get(&[pkgid.clone()])).remove(0), - Box::new(source))) + let pkg = try!(source.download(pkgid)); + Ok((pkg, Box::new(source))) } None => { let vers_info = vers.map(|v| format!(" with version `{}`", v)) diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs index 17b9656f01f..7a037828d03 100644 --- a/src/cargo/ops/cargo_output_metadata.rs +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -3,7 +3,7 @@ use std::path::Path; use rustc_serialize::{Encodable, Encoder}; use core::resolver::Resolve; -use core::{Source, Package, PackageId}; +use core::{Source, Package, PackageId, PackageSet}; use ops; use sources::PathSource; use util::config::Config; @@ -51,6 +51,10 @@ fn metadata_full(opt: OutputMetadataOptions, config: &Config) -> CargoResult, - no_default_features: bool) - -> CargoResult<(Vec, Resolve)> { +fn resolve_dependencies<'a>(manifest: &Path, + config: &'a Config, + features: Vec, + no_default_features: bool) + -> CargoResult<(PackageSet<'a>, Resolve)> { let mut source = try!(PathSource::for_path(manifest.parent().unwrap(), config)); try!(source.update()); let package = try!(source.root_package()); - let deps = try!(ops::resolve_dependencies(&package, - config, - Some(Box::new(source)), - features, - no_default_features)); - - let (packages, resolve_with_overrides, _) = deps; - - Ok((packages, resolve_with_overrides)) + ops::resolve_dependencies(&package, + config, + Some(Box::new(source)), + features, + no_default_features) } diff --git a/src/cargo/ops/cargo_rustc/context.rs b/src/cargo/ops/cargo_rustc/context.rs index 22025b1785b..8d88ac384cc 100644 --- a/src/cargo/ops/cargo_rustc/context.rs +++ b/src/cargo/ops/cargo_rustc/context.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use regex::Regex; -use core::{SourceMap, Package, PackageId, PackageSet, Resolve, Target, Profile}; +use core::{Package, PackageId, PackageSet, Resolve, Target, Profile}; use core::{TargetKind, LibKind, Profiles, Metadata, Dependency}; use core::dependency::Kind as DepKind; use util::{self, CargoResult, ChainError, internal, Config, profile, Cfg, human}; @@ -14,6 +14,7 @@ use super::TargetConfig; use super::custom_build::{BuildState, BuildScripts}; use super::fingerprint::Fingerprint; use super::layout::{Layout, LayoutProxy}; +use super::links::Links; use super::{Kind, Compilation, BuildConfig}; use super::{ProcessEngine, ExecEngine}; @@ -28,8 +29,8 @@ pub struct Unit<'a> { pub struct Context<'a, 'cfg: 'a> { pub config: &'cfg Config, pub resolve: &'a Resolve, - pub sources: &'a SourceMap<'cfg>, pub compilation: Compilation<'cfg>, + pub packages: &'a PackageSet<'cfg>, pub build_state: Arc, pub build_explicit_deps: HashMap, (PathBuf, Vec)>, pub exec_engine: Arc>, @@ -37,13 +38,13 @@ pub struct Context<'a, 'cfg: 'a> { pub compiled: HashSet>, pub build_config: BuildConfig, pub build_scripts: HashMap, Arc>, + pub links: Links<'a>, host: Layout, target: Option, target_triple: String, target_info: TargetInfo, host_info: TargetInfo, - package_set: &'a PackageSet, profiles: &'a Profiles, } @@ -57,8 +58,7 @@ struct TargetInfo { impl<'a, 'cfg> Context<'a, 'cfg> { pub fn new(resolve: &'a Resolve, - sources: &'a SourceMap<'cfg>, - deps: &'a PackageSet, + packages: &'a PackageSet<'cfg>, config: &'cfg Config, host: Layout, target_layout: Option, @@ -83,13 +83,12 @@ impl<'a, 'cfg> Context<'a, 'cfg> { host: host, target: target_layout, resolve: resolve, - sources: sources, - package_set: deps, + packages: packages, config: config, target_info: target_info, host_info: host_info, compilation: Compilation::new(config), - build_state: Arc::new(BuildState::new(&build_config, deps)), + build_state: Arc::new(BuildState::new(&build_config)), build_config: build_config, exec_engine: engine, fingerprints: HashMap::new(), @@ -97,6 +96,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { compiled: HashSet::new(), build_scripts: HashMap::new(), build_explicit_deps: HashMap::new(), + links: Links::new(), }) } @@ -213,14 +213,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// Returns the appropriate output directory for the specified package and /// target. pub fn out_dir(&self, unit: &Unit) -> PathBuf { - let out_dir = self.layout(unit.pkg, unit.kind); - if unit.target.is_custom_build() { - out_dir.build(unit.pkg) - } else if unit.target.is_example() { - out_dir.examples().to_path_buf() - } else { - out_dir.root().to_path_buf() - } + self.layout(unit.pkg, unit.kind).out_dir(unit.pkg, unit.target) } /// Return the (prefix, suffix) pair for dynamic libraries. @@ -345,7 +338,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// For a package, return all targets which are registered as dependencies /// for that package. - pub fn dep_targets(&self, unit: &Unit<'a>) -> Vec> { + pub fn dep_targets(&self, unit: &Unit<'a>) -> CargoResult>> { if unit.profile.run_custom_build { return self.dep_run_custom_build(unit) } else if unit.profile.doc { @@ -354,7 +347,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { let id = unit.pkg.package_id(); let deps = self.resolve.deps(id).into_iter().flat_map(|a| a); - let mut ret = deps.map(|id| self.get_package(id)).filter(|dep| { + let mut ret = try!(deps.filter(|dep| { unit.pkg.dependencies().iter().filter(|d| { d.name() == dep.name() }).any(|d| { @@ -391,22 +384,27 @@ impl<'a, 'cfg> Context<'a, 'cfg> { // actually used! true }) - }).filter_map(|pkg| { - pkg.targets().iter().find(|t| t.is_lib()).map(|t| { - Unit { - pkg: pkg, - target: t, - profile: self.lib_profile(id), - kind: unit.kind.for_target(t), + }).filter_map(|id| { + match self.get_package(id) { + Ok(pkg) => { + pkg.targets().iter().find(|t| t.is_lib()).map(|t| { + Ok(Unit { + pkg: pkg, + target: t, + profile: self.lib_profile(id), + kind: unit.kind.for_target(t), + }) + }) } - }) - }).collect::>(); + Err(e) => Some(Err(e)) + } + }).collect::>>()); // If this target is a build script, then what we've collected so far is // all we need. If this isn't a build script, then it depends on the // build script if there is one. if unit.target.is_custom_build() { - return ret + return Ok(ret) } ret.extend(self.dep_build_script(unit)); @@ -415,7 +413,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { // didn't include `pkg` in the return values, so we need to special case // it here and see if we need to push `(pkg, pkg_lib_target)`. if unit.target.is_lib() { - return ret + return Ok(ret) } ret.extend(self.maybe_lib(unit)); @@ -431,20 +429,21 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } })); } - ret + Ok(ret) } /// Returns the dependencies needed to run a build script. /// /// The `unit` provided must represent an execution of a build script, and /// the returned set of units must all be run before `unit` is run. - pub fn dep_run_custom_build(&self, unit: &Unit<'a>) -> Vec> { + pub fn dep_run_custom_build(&self, unit: &Unit<'a>) + -> CargoResult>> { // If this build script's execution has been overridden then we don't // actually depend on anything, we've reached the end of the dependency // chain as we've got all the info we're gonna get. let key = (unit.pkg.package_id().clone(), unit.kind); if self.build_state.outputs.lock().unwrap().contains_key(&key) { - return Vec::new() + return Ok(Vec::new()) } // When not overridden, then the dependencies to run a build script are: @@ -460,7 +459,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> { profile: &self.profiles.dev, ..*unit }; - self.dep_targets(&tmp).iter().filter_map(|unit| { + let deps = try!(self.dep_targets(&tmp)); + Ok(deps.iter().filter_map(|unit| { if !unit.target.linkable() || unit.pkg.manifest().links().is_none() { return None } @@ -469,15 +469,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> { profile: self.build_script_profile(unit.pkg.package_id()), kind: Kind::Host, // build scripts always compiled for the host ..*unit - })).collect() + })).collect()) } /// Returns the dependencies necessary to document a package - fn doc_deps(&self, unit: &Unit<'a>) -> Vec> { + fn doc_deps(&self, unit: &Unit<'a>) -> CargoResult>> { let deps = self.resolve.deps(unit.pkg.package_id()).into_iter(); - let deps = deps.flat_map(|a| a).map(|id| { - self.get_package(id) - }).filter(|dep| { + let deps = deps.flat_map(|a| a).filter(|dep| { unit.pkg.dependencies().iter().filter(|d| { d.name() == dep.name() }).any(|dep| { @@ -487,15 +485,20 @@ impl<'a, 'cfg> Context<'a, 'cfg> { _ => false, } }) - }).filter_map(|dep| { - dep.targets().iter().find(|t| t.is_lib()).map(|t| (dep, t)) + }).map(|dep| { + self.get_package(dep) }); // To document a library, we depend on dependencies actually being // built. If we're documenting *all* libraries, then we also depend on // the documentation of the library being built. let mut ret = Vec::new(); - for (dep, lib) in deps { + for dep in deps { + let dep = try!(dep); + let lib = match dep.targets().iter().find(|t| t.is_lib()) { + Some(lib) => lib, + None => continue, + }; ret.push(Unit { pkg: dep, target: lib, @@ -519,7 +522,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { if unit.target.is_bin() { ret.extend(self.maybe_lib(unit)); } - ret + Ok(ret) } /// If a build script is scheduled to be run for the package specified by @@ -566,10 +569,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } /// Gets a package for the given package id. - pub fn get_package(&self, id: &PackageId) -> &'a Package { - self.package_set.iter() - .find(|pkg| id == pkg.package_id()) - .expect("Should have found package") + pub fn get_package(&self, id: &PackageId) -> CargoResult<&'a Package> { + self.packages.get(id) } /// Get the user-specified linker for a particular host or target diff --git a/src/cargo/ops/cargo_rustc/custom_build.rs b/src/cargo/ops/cargo_rustc/custom_build.rs index bc83ecbdc5a..929e54b932a 100644 --- a/src/cargo/ops/cargo_rustc/custom_build.rs +++ b/src/cargo/ops/cargo_rustc/custom_build.rs @@ -5,7 +5,7 @@ use std::path::{PathBuf, Path}; use std::str; use std::sync::{Mutex, Arc}; -use core::{PackageId, PackageSet}; +use core::PackageId; use util::{CargoResult, human, Human}; use util::{internal, ChainError, profile, paths}; use util::Freshness; @@ -33,6 +33,7 @@ pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>; pub struct BuildState { pub outputs: Mutex, + overrides: HashMap<(String, Kind), BuildOutput>, } #[derive(Default)] @@ -65,8 +66,7 @@ pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work, Freshness)> { let _p = profile::start(format!("build script prepare: {}/{}", unit.pkg, unit.target.name())); - let key = (unit.pkg.package_id().clone(), unit.kind); - let overridden = cx.build_state.outputs.lock().unwrap().contains_key(&key); + let overridden = cx.build_state.has_override(unit); let (work_dirty, work_fresh) = if overridden { (Work::new(|_| Ok(())), Work::new(|_| Ok(()))) } else { @@ -124,7 +124,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) // This information will be used at build-time later on to figure out which // sorts of variables need to be discovered at that time. let lib_deps = { - cx.dep_run_custom_build(unit).iter().filter_map(|unit| { + try!(cx.dep_run_custom_build(unit)).iter().filter_map(|unit| { if unit.profile.run_custom_build { Some((unit.pkg.manifest().links().unwrap().to_string(), unit.pkg.package_id().clone())) @@ -238,34 +238,34 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) } impl BuildState { - pub fn new(config: &super::BuildConfig, - packages: &PackageSet) -> BuildState { - let mut sources = HashMap::new(); - for package in packages.iter() { - match package.manifest().links() { - Some(links) => { - sources.insert(links.to_string(), - package.package_id().clone()); - } - None => {} - } - } - let mut outputs = HashMap::new(); + pub fn new(config: &super::BuildConfig) -> BuildState { + let mut overrides = HashMap::new(); let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host)); let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target)); for ((name, output), kind) in i1.chain(i2) { - // If no package is using the library named `name`, then this is - // just an override that we ignore. - if let Some(id) = sources.get(name) { - outputs.insert((id.clone(), kind), output.clone()); - } + overrides.insert((name.clone(), kind), output.clone()); + } + BuildState { + outputs: Mutex::new(HashMap::new()), + overrides: overrides, } - BuildState { outputs: Mutex::new(outputs) } } fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) { self.outputs.lock().unwrap().insert((id, kind), output); } + + fn has_override(&self, unit: &Unit) -> bool { + let key = unit.pkg.manifest().links().map(|l| (l.to_string(), unit.kind)); + match key.and_then(|k| self.overrides.get(&k)) { + Some(output) => { + self.insert(unit.pkg.package_id().clone(), unit.kind, + output.clone()); + true + } + None => false, + } + } } impl BuildOutput { @@ -372,25 +372,27 @@ impl BuildOutput { /// The given set of targets to this function is the initial set of /// targets/profiles which are being built. pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, - units: &[Unit<'b>]) { + units: &[Unit<'b>]) + -> CargoResult<()> { let mut ret = HashMap::new(); for unit in units { - build(&mut ret, cx, unit); + try!(build(&mut ret, cx, unit)); } cx.build_scripts.extend(ret.into_iter().map(|(k, v)| { (k, Arc::new(v)) })); + return Ok(()); // Recursive function to build up the map we're constructing. This function // memoizes all of its return values as it goes along. fn build<'a, 'b, 'cfg>(out: &'a mut HashMap, BuildScripts>, cx: &Context<'b, 'cfg>, unit: &Unit<'b>) - -> &'a BuildScripts { + -> CargoResult<&'a BuildScripts> { // Do a quick pre-flight check to see if we've already calculated the // set of dependencies. if out.contains_key(unit) { - return &out[unit] + return Ok(&out[unit]) } let mut ret = BuildScripts::default(); @@ -398,8 +400,8 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { add_to_link(&mut ret, unit.pkg.package_id(), unit.kind); } - for unit in cx.dep_targets(unit).iter() { - let dep_scripts = build(out, cx, unit); + for unit in try!(cx.dep_targets(unit)).iter() { + let dep_scripts = try!(build(out, cx, unit)); if unit.target.for_host() { ret.plugins.extend(dep_scripts.to_link.iter() @@ -416,7 +418,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, add_to_link(prev, &pkg, kind); } prev.plugins.extend(ret.plugins); - prev + Ok(prev) } // When adding an entry to 'to_link' we only actually push it on if the diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs index fcfc6c95217..d0007f52dcd 100644 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -315,7 +315,8 @@ fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) // elsewhere. Also skip fingerprints of binaries because they don't actually // induce a recompile, they're just dependencies in the sense that they need // to be built. - let deps = try!(cx.dep_targets(unit).iter().filter(|u| { + let deps = try!(cx.dep_targets(unit)); + let deps = try!(deps.iter().filter(|u| { !u.target.is_custom_build() && !u.target.is_bin() }).map(|unit| { calculate(cx, unit).map(|fingerprint| { @@ -562,7 +563,8 @@ fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult> { fn pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult { let source_id = pkg.package_id().source_id(); - let source = try!(cx.sources.get(source_id).chain_error(|| { + let sources = cx.packages.sources(); + let source = try!(sources.get(source_id).chain_error(|| { internal("missing package source") })); source.fingerprint(pkg) diff --git a/src/cargo/ops/cargo_rustc/job_queue.rs b/src/cargo/ops/cargo_rustc/job_queue.rs index 30d6394c28f..ff35daac2eb 100644 --- a/src/cargo/ops/cargo_rustc/job_queue.rs +++ b/src/cargo/ops/cargo_rustc/job_queue.rs @@ -8,7 +8,7 @@ use term::color::YELLOW; use core::{PackageId, Target, Profile}; use util::{Config, DependencyQueue, Fresh, Dirty, Freshness}; -use util::{CargoResult, Dependency, profile, internal}; +use util::{CargoResult, profile, internal}; use super::{Context, Kind, Unit}; use super::job::Job; @@ -68,11 +68,16 @@ impl<'a> JobQueue<'a> { } } - pub fn enqueue(&mut self, cx: &Context<'a, 'a>, - unit: &Unit<'a>, job: Job, fresh: Freshness) { + pub fn enqueue<'cfg>(&mut self, + cx: &Context<'a, 'cfg>, + unit: &Unit<'a>, + job: Job, + fresh: Freshness) -> CargoResult<()> { let key = Key::new(unit); - self.queue.queue(cx, Fresh, key, Vec::new()).push((job, fresh)); + let deps = try!(key.dependencies(cx)); + self.queue.queue(Fresh, key, Vec::new(), &deps).push((job, fresh)); *self.counts.entry(key.pkg).or_insert(0) += 1; + Ok(()) } /// Execute all jobs necessary to build the dependency graph. @@ -230,17 +235,26 @@ impl<'a> JobQueue<'a> { } } -impl<'a> Dependency for Key<'a> { - type Context = Context<'a, 'a>; +impl<'a> Key<'a> { + fn new(unit: &Unit<'a>) -> Key<'a> { + Key { + pkg: unit.pkg.package_id(), + target: unit.target, + profile: unit.profile, + kind: unit.kind, + } + } - fn dependencies(&self, cx: &Context<'a, 'a>) -> Vec> { + fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) + -> CargoResult>> { let unit = Unit { - pkg: cx.get_package(self.pkg), + pkg: try!(cx.get_package(self.pkg)), target: self.target, profile: self.profile, kind: self.kind, }; - cx.dep_targets(&unit).iter().filter_map(|unit| { + let targets = try!(cx.dep_targets(&unit)); + Ok(targets.iter().filter_map(|unit| { // Binaries aren't actually needed to *compile* tests, just to run // them, so we don't include this dependency edge in the job graph. if self.target.is_test() && unit.target.is_bin() { @@ -248,18 +262,7 @@ impl<'a> Dependency for Key<'a> { } else { Some(Key::new(unit)) } - }).collect() - } -} - -impl<'a> Key<'a> { - fn new(unit: &Unit<'a>) -> Key<'a> { - Key { - pkg: unit.pkg.package_id(), - target: unit.target, - profile: unit.profile, - kind: unit.kind, - } + }).collect()) } } diff --git a/src/cargo/ops/cargo_rustc/layout.rs b/src/cargo/ops/cargo_rustc/layout.rs index 6bf8b3e153d..d25a8199d1a 100644 --- a/src/cargo/ops/cargo_rustc/layout.rs +++ b/src/cargo/ops/cargo_rustc/layout.rs @@ -49,7 +49,7 @@ use std::fs; use std::io; use std::path::{PathBuf, Path}; -use core::Package; +use core::{Package, Target}; use util::Config; use util::hex::short_hash; @@ -116,6 +116,7 @@ impl Layout { pub fn dest(&self) -> &Path { &self.root } pub fn deps(&self) -> &Path { &self.deps } pub fn examples(&self) -> &Path { &self.examples } + pub fn root(&self) -> &Path { &self.root } pub fn fingerprint(&self, package: &Package) -> PathBuf { self.fingerprint.join(&self.pkg_dir(package)) @@ -154,4 +155,14 @@ impl<'a> LayoutProxy<'a> { pub fn build_out(&self, pkg: &Package) -> PathBuf { self.root.build_out(pkg) } pub fn proxy(&self) -> &'a Layout { self.root } + + pub fn out_dir(&self, pkg: &Package, target: &Target) -> PathBuf { + if target.is_custom_build() { + self.build(pkg) + } else if target.is_example() { + self.examples().to_path_buf() + } else { + self.root().to_path_buf() + } + } } diff --git a/src/cargo/ops/cargo_rustc/links.rs b/src/cargo/ops/cargo_rustc/links.rs index e2a7ad4a822..779cd13739b 100644 --- a/src/cargo/ops/cargo_rustc/links.rs +++ b/src/cargo/ops/cargo_rustc/links.rs @@ -1,38 +1,49 @@ -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; -use core::{PackageId, PackageSet}; +use core::PackageId; use util::CargoResult; +use super::Unit; -// Validate that there are no duplicated native libraries among packages and -// that all packages with `links` also have a build script. -pub fn validate(deps: &PackageSet) -> CargoResult<()> { - let mut map: HashMap<_, &PackageId> = HashMap::new(); +pub struct Links<'a> { + validated: HashSet<&'a PackageId>, + links: HashMap, +} + +impl<'a> Links<'a> { + pub fn new() -> Links<'a> { + Links { + validated: HashSet::new(), + links: HashMap::new(), + } + } - for dep in deps.iter() { - let lib = match dep.manifest().links() { + pub fn validate(&mut self, unit: &Unit<'a>) -> CargoResult<()> { + if !self.validated.insert(unit.pkg.package_id()) { + return Ok(()) + } + let lib = match unit.pkg.manifest().links() { Some(lib) => lib, - None => continue, + None => return Ok(()), }; - if let Some(prev) = map.get(&lib) { - let dep = dep.package_id(); - if prev.name() == dep.name() && prev.source_id() == dep.source_id() { + if let Some(prev) = self.links.get(lib) { + let pkg = unit.pkg.package_id(); + if prev.name() == pkg.name() && prev.source_id() == pkg.source_id() { bail!("native library `{}` is being linked to by more \ than one version of the same package, but it can \ only be linked once; try updating or pinning your \ dependencies to ensure that this package only shows \ - up once\n\n {}\n {}", lib, prev, dep) + up once\n\n {}\n {}", lib, prev, pkg) } else { bail!("native library `{}` is being linked to by more than \ one package, and can only be linked to by one \ - package\n\n {}\n {}", lib, prev, dep) + package\n\n {}\n {}", lib, prev, pkg) } } - if !dep.manifest().targets().iter().any(|t| t.is_custom_build()) { + if !unit.pkg.manifest().targets().iter().any(|t| t.is_custom_build()) { bail!("package `{}` specifies that it links to `{}` but does not \ - have a custom build script", dep.package_id(), lib) + have a custom build script", unit.pkg.package_id(), lib) } - map.insert(lib, dep.package_id()); + self.links.insert(lib.to_string(), unit.pkg.package_id()); + Ok(()) } - - Ok(()) } diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs index f76ba09e5c9..b71da0fcc43 100644 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -6,7 +6,7 @@ use std::io::prelude::*; use std::path::{self, PathBuf}; use std::sync::Arc; -use core::{SourceMap, Package, PackageId, PackageSet, Target, Resolve}; +use core::{Package, PackageId, PackageSet, Target, Resolve}; use core::{Profile, Profiles}; use util::{self, CargoResult, human}; use util::{Config, internal, ChainError, profile, join_paths}; @@ -51,14 +51,13 @@ pub struct TargetConfig { pub overrides: HashMap, } -pub type PackagesToBuild<'a> = [(&'a Package,Vec<(&'a Target,&'a Profile)>)]; +pub type PackagesToBuild<'a> = [(&'a Package, Vec<(&'a Target,&'a Profile)>)]; // Returns a mapping of the root package plus its immediate dependencies to // where the compiled libraries are all located. pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>, - deps: &'a PackageSet, + packages: &'a PackageSet<'cfg>, resolve: &'a Resolve, - sources: &'a SourceMap<'cfg>, config: &'cfg Config, build_config: BuildConfig, profiles: &'a Profiles) @@ -78,23 +77,22 @@ pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>, } }) }).collect::>(); - try!(links::validate(deps)); let dest = if build_config.release {"release"} else {"debug"}; - let root = deps.iter().find(|p| p.package_id() == resolve.root()).unwrap(); + let root = try!(packages.get(resolve.root())); let host_layout = Layout::new(config, root, None, &dest); let target_layout = build_config.requested_target.as_ref().map(|target| { layout::Layout::new(config, root, Some(&target), &dest) }); - let mut cx = try!(Context::new(resolve, sources, deps, config, + let mut cx = try!(Context::new(resolve, packages, config, host_layout, target_layout, build_config, profiles)); let mut queue = JobQueue::new(&cx); try!(cx.prepare(root)); - custom_build::build_map(&mut cx, &units); + try!(custom_build::build_map(&mut cx, &units)); for unit in units.iter() { // Build up a list of pending jobs, each of which represent @@ -131,7 +129,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>, if !unit.target.is_lib() { continue } // Include immediate lib deps as well - for unit in cx.dep_targets(unit).iter() { + for unit in try!(cx.dep_targets(unit)).iter() { let pkgid = unit.pkg.package_id(); if !unit.target.is_lib() { continue } if unit.profile.doc { continue } @@ -178,6 +176,7 @@ fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>, let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name())); try!(fingerprint::prepare_init(cx, unit)); + try!(cx.links.validate(unit)); let (dirty, fresh, freshness) = if unit.profile.run_custom_build { try!(custom_build::prepare(cx, unit)) @@ -192,11 +191,11 @@ fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>, let dirty = work.then(dirty); (dirty, fresh, freshness) }; - jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness); + try!(jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness)); drop(p); // Be sure to compile all dependencies of this target as well. - for unit in cx.dep_targets(unit).iter() { + for unit in try!(cx.dep_targets(unit)).iter() { try!(compile(cx, jobs, unit)); } Ok(()) @@ -371,12 +370,14 @@ fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult { .cwd(cx.config.cwd()) .arg("--crate-name").arg(&unit.target.crate_name()); - let mut doc_dir = cx.config.target_dir(cx.get_package(cx.resolve.root())); if let Some(target) = cx.requested_target() { rustdoc.arg("--target").arg(target); - doc_dir.push(target); } - doc_dir.push("doc"); + + // the "root" directory ends in 'debug' or 'release', and we want it to end + // in 'doc' instead + let doc_dir = cx.layout(unit.pkg, unit.kind).proxy().root(); + let doc_dir = doc_dir.parent().unwrap().join("doc"); // Create the documentation directory ahead of time as rustdoc currently has // a bug where concurrent invocations will race to create this directory if @@ -561,7 +562,7 @@ fn build_deps_args(cmd: &mut CommandPrototype, cx: &Context, unit: &Unit) cmd.env("OUT_DIR", &layout.build_out(unit.pkg)); } - for unit in cx.dep_targets(unit).iter() { + for unit in try!(cx.dep_targets(unit)).iter() { if unit.target.linkable() { try!(link_to(cmd, cx, unit)); } diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs index 4a66b1c3159..0cdb02f25d3 100644 --- a/src/cargo/sources/git/source.rs +++ b/src/cargo/sources/git/source.rs @@ -187,16 +187,12 @@ impl<'cfg> Source for GitSource<'cfg> { self.path_source.as_mut().unwrap().update() } - fn download(&mut self, _: &[PackageId]) -> CargoResult<()> { - // TODO: assert! that the PackageId is contained by the source - Ok(()) - } - - fn get(&self, ids: &[PackageId]) -> CargoResult> { - trace!("getting packages for package ids `{:?}` from `{:?}`", ids, - self.remote); - self.path_source.as_ref().expect("BUG: update() must be called \ - before get()").get(ids) + fn download(&mut self, id: &PackageId) -> CargoResult { + trace!("getting packages for package id `{}` from `{:?}`", id, + self.remote); + self.path_source.as_mut() + .expect("BUG: update() must be called before get()") + .download(id) } fn fingerprint(&self, _pkg: &Package) -> CargoResult { diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 206a9f0a400..26373c7ba7e 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -302,18 +302,13 @@ impl<'cfg> Source for PathSource<'cfg> { Ok(()) } - fn download(&mut self, _: &[PackageId]) -> CargoResult<()>{ - // TODO: assert! that the PackageId is contained by the source - Ok(()) - } - - fn get(&self, ids: &[PackageId]) -> CargoResult> { - trace!("getting packages; ids={:?}", ids); + fn download(&mut self, id: &PackageId) -> CargoResult { + trace!("getting packages; id={}", id); - Ok(self.packages.iter() - .filter(|pkg| ids.iter().any(|id| pkg.package_id() == id)) - .cloned() - .collect()) + let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id); + pkg.cloned().ok_or_else(|| { + internal(format!("failed to find {} in path source", id)) + }) } fn fingerprint(&self, pkg: &Package) -> CargoResult { diff --git a/src/cargo/sources/registry.rs b/src/cargo/sources/registry.rs index 7e0ad0558e3..1c68475e057 100644 --- a/src/cargo/sources/registry.rs +++ b/src/cargo/sources/registry.rs @@ -187,7 +187,6 @@ pub struct RegistrySource<'cfg> { src_path: PathBuf, config: &'cfg Config, handle: Option, - sources: HashMap>, hashes: HashMap<(String, String), String>, // (name, vers) => cksum cache: HashMap>, updated: bool, @@ -239,7 +238,6 @@ impl<'cfg> RegistrySource<'cfg> { config: config, source_id: source_id.clone(), handle: None, - sources: HashMap::new(), hashes: HashMap::new(), cache: HashMap::new(), updated: false, @@ -537,37 +535,24 @@ impl<'cfg> Source for RegistrySource<'cfg> { Ok(()) } - fn download(&mut self, packages: &[PackageId]) -> CargoResult<()> { + fn download(&mut self, package: &PackageId) -> CargoResult { let config = try!(self.config()); let url = try!(config.dl.to_url().map_err(internal)); - for package in packages.iter() { - if self.source_id != *package.source_id() { continue } - if self.sources.contains_key(package) { continue } - - let mut url = url.clone(); - url.path_mut().unwrap().push(package.name().to_string()); - url.path_mut().unwrap().push(package.version().to_string()); - url.path_mut().unwrap().push("download".to_string()); - let path = try!(self.download_package(package, &url).chain_error(|| { - internal(format!("failed to download package `{}` from {}", - package, url)) - })); - let path = try!(self.unpack_package(package, path).chain_error(|| { - internal(format!("failed to unpack package `{}`", package)) - })); - let mut src = PathSource::new(&path, &self.source_id, self.config); - try!(src.update()); - self.sources.insert(package.clone(), src); - } - Ok(()) - } + let mut url = url.clone(); + url.path_mut().unwrap().push(package.name().to_string()); + url.path_mut().unwrap().push(package.version().to_string()); + url.path_mut().unwrap().push("download".to_string()); + let path = try!(self.download_package(package, &url).chain_error(|| { + internal(format!("failed to download package `{}` from {}", + package, url)) + })); + let path = try!(self.unpack_package(package, path).chain_error(|| { + internal(format!("failed to unpack package `{}`", package)) + })); - fn get(&self, packages: &[PackageId]) -> CargoResult> { - let mut ret = Vec::new(); - for src in self.sources.values() { - ret.extend(try!(src.get(packages)).into_iter()); - } - Ok(ret) + let mut src = PathSource::new(&path, &self.source_id, self.config); + try!(src.update()); + src.download(package) } fn fingerprint(&self, pkg: &Package) -> CargoResult { diff --git a/src/cargo/util/dependency_queue.rs b/src/cargo/util/dependency_queue.rs index 9db5d103c59..5165adc249a 100644 --- a/src/cargo/util/dependency_queue.rs +++ b/src/cargo/util/dependency_queue.rs @@ -46,19 +46,13 @@ pub enum Freshness { Dirty, } -/// A trait for discovering the dependencies of a piece of data. -pub trait Dependency: Hash + Eq + Clone { - type Context; - fn dependencies(&self, cx: &Self::Context) -> Vec; -} - impl Freshness { pub fn combine(&self, other: Freshness) -> Freshness { match *self { Fresh => other, Dirty => Dirty } } } -impl DependencyQueue { +impl DependencyQueue { /// Creates a new dependency queue with 0 packages. pub fn new() -> DependencyQueue { DependencyQueue { @@ -73,8 +67,11 @@ impl DependencyQueue { /// /// It is assumed that any dependencies of this package will eventually also /// be added to the dependency queue. - pub fn queue(&mut self, cx: &K::Context, fresh: Freshness, - key: K, value: V) -> &mut V { + pub fn queue(&mut self, + fresh: Freshness, + key: K, + value: V, + dependencies: &[K]) -> &mut V { let slot = match self.dep_map.entry(key.clone()) { Occupied(v) => return &mut v.into_mut().1, Vacant(v) => v, @@ -85,9 +82,10 @@ impl DependencyQueue { } let mut my_dependencies = HashSet::new(); - for dep in key.dependencies(cx).into_iter() { + for dep in dependencies { assert!(my_dependencies.insert(dep.clone())); - let rev = self.reverse_dep_map.entry(dep).or_insert(HashSet::new()); + let rev = self.reverse_dep_map.entry(dep.clone()) + .or_insert(HashSet::new()); assert!(rev.insert(key.clone())); } &mut slot.insert((my_dependencies, value)).1 diff --git a/src/cargo/util/lazy_cell.rs b/src/cargo/util/lazy_cell.rs new file mode 100644 index 00000000000..a13eaf31ba0 --- /dev/null +++ b/src/cargo/util/lazy_cell.rs @@ -0,0 +1,55 @@ +//! A lazily fill Cell, but with frozen contents. +//! +//! With a `RefCell`, the inner contents cannot be borrowed for the lifetime of +//! the entire object, but only of the borrows returned. A `LazyCell` is a +//! variation on `RefCell` which allows borrows tied to the lifetime of the +//! outer object. +//! +//! The limitation of a `LazyCell` is that after initialized, it can never be +//! modified. + +use std::cell::UnsafeCell; + +pub struct LazyCell { + inner: UnsafeCell>, +} + +impl LazyCell { + /// Creates a new empty lazy cell. + pub fn new(init: Option) -> LazyCell { + LazyCell { inner: UnsafeCell::new(init) } + } + + /// Put a value into this cell. + /// + /// This function will fail if the cell has already been filled. + pub fn fill(&self, t: T) -> Result<(), T> { + unsafe { + let slot = self.inner.get(); + if (*slot).is_none() { + *slot = Some(t); + Ok(()) + } else { + Err(t) + } + } + } + + /// Borrows the contents of this lazy cell for the duration of the cell + /// itself. + /// + /// This function will return `Some` if the cell has been previously + /// initialized, and `None` if it has not yet been initialized. + pub fn borrow(&self) -> Option<&T> { + unsafe { + (*self.inner.get()).as_ref() + } + } + + /// Consumes this `LazyCell`, returning the underlying value. + pub fn into_inner(self) -> Option { + unsafe { + self.inner.into_inner() + } + } +} diff --git a/src/cargo/util/mod.rs b/src/cargo/util/mod.rs index e2f2cb971c2..da0f63a6a01 100644 --- a/src/cargo/util/mod.rs +++ b/src/cargo/util/mod.rs @@ -1,6 +1,5 @@ pub use self::cfg::{Cfg, CfgExpr}; pub use self::config::Config; -pub use self::dependency_queue::Dependency; pub use self::dependency_queue::{DependencyQueue, Fresh, Dirty, Freshness}; pub use self::errors::{CargoResult, CargoError, ChainError, CliResult}; pub use self::errors::{CliError, ProcessError, CargoTestError}; @@ -8,6 +7,7 @@ pub use self::errors::{Human, caused_human}; pub use self::errors::{process_error, internal_error, internal, human}; pub use self::graph::Graph; pub use self::hex::{to_hex, short_hash, hash_u64}; +pub use self::lazy_cell::LazyCell; pub use self::lev_distance::{lev_distance}; pub use self::paths::{join_paths, path2bytes, bytes2path, dylib_path}; pub use self::paths::{normalize_path, dylib_path_envvar, without_prefix}; @@ -37,3 +37,4 @@ mod rustc; mod sha256; mod shell_escape; mod vcs; +mod lazy_cell; diff --git a/tests/test_cargo_registry.rs b/tests/test_cargo_registry.rs index 793235edece..bbc531978ad 100644 --- a/tests/test_cargo_registry.rs +++ b/tests/test_cargo_registry.rs @@ -567,6 +567,7 @@ test!(login_with_no_cargo_dir { }); test!(bad_license_file { + Package::new("foo", "1.0.0").publish(); let p = project("all") .file("Cargo.toml", r#" [project] @@ -957,21 +958,52 @@ test!(update_same_prefix_oh_my_how_was_this_a_bug { execs().with_status(0)); }); -test!(use_semver { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] +test!(use_semver { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] - [dependencies] - foo = "1.2.3-alpha.0" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); + [dependencies] + foo = "1.2.3-alpha.0" + "#) + .file("src/main.rs", "fn main() {}"); + p.build(); - Package::new("foo", "1.2.3-alpha.0").publish(); + Package::new("foo", "1.2.3-alpha.0").publish(); assert_that(p.cargo("build"), execs().with_status(0)); }); + +test!(only_download_relevant { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [target.foo.dependencies] + foo = "*" + [dev-dependencies] + bar = "*" + [dependencies] + baz = "*" + "#) + .file("src/main.rs", "fn main() {}"); + p.build(); + + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0").publish(); + Package::new("baz", "0.1.0").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stdout(&format!("\ +{updating} registry `[..]` +{downloading} baz v0.1.0 ([..]) +{compiling} baz v0.1.0 ([..]) +{compiling} bar v0.5.0 ([..]) +", downloading = DOWNLOADING, compiling = COMPILING, updating = UPDATING))); +});