From 6694fdb6775deaf88db3cf0cc7c8b706bb61870a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20Kr=C3=BCger?= Date: Fri, 1 May 2020 00:48:38 +0200 Subject: [PATCH 01/24] clippy fixes --- crates/cargo-test-support/src/registry.rs | 2 +- src/bin/cargo/commands/tree.rs | 2 +- .../core/compiler/build_context/target_info.rs | 2 +- src/cargo/core/compiler/compile_kind.rs | 2 +- src/cargo/core/compiler/standard_lib.rs | 4 ++-- src/cargo/core/compiler/unit.rs | 2 +- src/cargo/core/compiler/unit_graph.rs | 2 +- src/cargo/core/manifest.rs | 14 +++++++------- src/cargo/core/registry.rs | 2 +- src/cargo/core/resolver/conflict_cache.rs | 2 +- src/cargo/core/resolver/encode.rs | 6 +++--- src/cargo/lib.rs | 2 +- src/cargo/ops/cargo_fetch.rs | 2 +- src/cargo/ops/cargo_output_metadata.rs | 2 +- src/cargo/ops/registry.rs | 2 +- tests/testsuite/multitarget.rs | 2 +- 16 files changed, 25 insertions(+), 25 deletions(-) diff --git a/crates/cargo-test-support/src/registry.rs b/crates/cargo-test-support/src/registry.rs index cf6cff8d2c2..6d3db585a34 100644 --- a/crates/cargo-test-support/src/registry.rs +++ b/crates/cargo-test-support/src/registry.rs @@ -442,7 +442,7 @@ impl Package { } else { registry_path.join(&file) }; - let prev = fs::read_to_string(&dst).unwrap_or(String::new()); + let prev = fs::read_to_string(&dst).unwrap_or_default(); t!(fs::create_dir_all(dst.parent().unwrap())); t!(fs::write(&dst, prev + &line[..] + "\n")); diff --git a/src/bin/cargo/commands/tree.rs b/src/bin/cargo/commands/tree.rs index b98ee5f7aad..9cfff98fe48 100644 --- a/src/bin/cargo/commands/tree.rs +++ b/src/bin/cargo/commands/tree.rs @@ -206,7 +206,7 @@ fn parse_edge_kinds(config: &Config, args: &ArgMatches<'_>) -> CargoResult 1 && !config.cli_unstable().multitarget { bail!("specifying multiple `--target` flags requires `-Zmultitarget`") } - if targets.len() != 0 { + if !targets.is_empty() { return Ok(targets .iter() .map(|value| Ok(CompileKind::Target(CompileTarget::new(value)?))) diff --git a/src/cargo/core/compiler/standard_lib.rs b/src/cargo/core/compiler/standard_lib.rs index 0ae0b651546..c1c7f875c65 100644 --- a/src/cargo/core/compiler/standard_lib.rs +++ b/src/cargo/core/compiler/standard_lib.rs @@ -161,7 +161,7 @@ pub fn generate_std_roots( let features = std_features.activated_features(pkg.package_id(), FeaturesFor::NormalOrDev); for kind in kinds { - let list = ret.entry(*kind).or_insert(Vec::new()); + let list = ret.entry(*kind).or_insert_with(Vec::new); list.push(interner.intern( pkg, lib, @@ -173,7 +173,7 @@ pub fn generate_std_roots( )); } } - return Ok(ret); + Ok(ret) } fn detect_sysroot_src_path(target_data: &RustcTargetData) -> CargoResult { diff --git a/src/cargo/core/compiler/unit.rs b/src/cargo/core/compiler/unit.rs index ebbba387983..84565a6517e 100644 --- a/src/cargo/core/compiler/unit.rs +++ b/src/cargo/core/compiler/unit.rs @@ -204,6 +204,6 @@ impl UnitInterner { } let item = Rc::new(item.clone()); me.cache.insert(item.clone()); - return item; + item } } diff --git a/src/cargo/core/compiler/unit_graph.rs b/src/cargo/core/compiler/unit_graph.rs index 2db9015058e..350c040abad 100644 --- a/src/cargo/core/compiler/unit_graph.rs +++ b/src/cargo/core/compiler/unit_graph.rs @@ -113,6 +113,6 @@ pub fn emit_serialized_unit_graph(root_units: &[Unit], unit_graph: &UnitGraph) - let stdout = std::io::stdout(); let mut lock = stdout.lock(); serde_json::to_writer(&mut lock, &s)?; - write!(lock, "\n")?; + writeln!(lock)?; Ok(()) } diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index c5c15a4a8b0..646708e5901 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -669,7 +669,7 @@ impl Target { .set_name(name) .set_doctest(true) .set_doc(true); - return target; + target } pub fn bin_target( @@ -684,7 +684,7 @@ impl Target { .set_name(name) .set_required_features(required_features) .set_doc(true); - return target; + target } /// Builds a `Target` corresponding to the `build = "build.rs"` entry. @@ -696,7 +696,7 @@ impl Target { .set_for_host(true) .set_benched(false) .set_tested(false); - return target; + target } pub fn metabuild_target(name: &str) -> Target { @@ -707,7 +707,7 @@ impl Target { .set_for_host(true) .set_benched(false) .set_tested(false); - return target; + target } pub fn example_target( @@ -733,7 +733,7 @@ impl Target { .set_required_features(required_features) .set_tested(false) .set_benched(false); - return target; + target } pub fn test_target( @@ -748,7 +748,7 @@ impl Target { .set_name(name) .set_required_features(required_features) .set_benched(false); - return target; + target } pub fn bench_target( @@ -763,7 +763,7 @@ impl Target { .set_name(name) .set_required_features(required_features) .set_tested(false); - return target; + target } pub fn name(&self) -> &str { diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index 5b45a4bc501..8a05b4510d6 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -622,7 +622,7 @@ fn lock( // Lock the summary's ID if possible let summary = match pair { - Some((precise, _)) => summary.override_id(precise.clone()), + Some((precise, _)) => summary.override_id(*precise), None => summary, }; summary.map_dependencies(|dep| { diff --git a/src/cargo/core/resolver/conflict_cache.rs b/src/cargo/core/resolver/conflict_cache.rs index c59e4117810..e0e64b96a5d 100644 --- a/src/cargo/core/resolver/conflict_cache.rs +++ b/src/cargo/core/resolver/conflict_cache.rs @@ -213,7 +213,7 @@ impl ConflictCache { for c in con.keys() { self.dep_from_pid - .entry(c.clone()) + .entry(*c) .or_insert_with(HashSet::new) .insert(dep.clone()); } diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 5fb4631952f..4d711498235 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -260,7 +260,7 @@ impl EncodableResolve { let mut g = Graph::new(); for &(ref id, _) in live_pkgs.values() { - g.add(id.clone()); + g.add(*id); } for &(ref id, pkg) in live_pkgs.values() { @@ -271,7 +271,7 @@ impl EncodableResolve { for edge in deps.iter() { if let Some(to_depend_on) = lookup_id(edge) { - g.link(id.clone(), to_depend_on); + g.link(*id, to_depend_on); } } } @@ -282,7 +282,7 @@ impl EncodableResolve { if let Some(ref replace) = pkg.replace { assert!(pkg.dependencies.is_none()); if let Some(replace_id) = lookup_id(replace) { - replacements.insert(id.clone(), replace_id); + replacements.insert(*id, replace_id); } } } diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index a5c94f73fa8..b386ee7f558 100644 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -144,7 +144,7 @@ pub fn display_error(err: &Error, shell: &mut Shell) { /// and context. pub fn display_warning_with_error(warning: &str, err: &Error, shell: &mut Shell) { drop(shell.warn(warning)); - drop(writeln!(shell.err(), "")); + drop(writeln!(shell.err())); _display_error(err, shell, false); } diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs index a21c9443eb0..1e0d855d0d1 100644 --- a/src/cargo/ops/cargo_fetch.rs +++ b/src/cargo/ops/cargo_fetch.rs @@ -39,7 +39,7 @@ pub fn fetch<'a>( deps.iter().any(|d| { // If no target was specified then all dependencies are // fetched. - if options.targets.len() == 0 { + if options.targets.is_empty() { return true; } diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs index d10a6a4f3c9..e6261d3a927 100644 --- a/src/cargo/ops/cargo_output_metadata.rs +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -176,7 +176,7 @@ fn build_resolve_graph_r( let deps: Vec = resolve .deps(pkg_id) .filter(|(_dep_id, deps)| { - if requested_kinds == &[CompileKind::Host] { + if requested_kinds == [CompileKind::Host] { true } else { requested_kinds.iter().any(|kind| { diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index 4d47bc45342..570e6f522bd 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -378,7 +378,7 @@ fn registry( token: token_config, index: index_config, } = registry_configuration(config, registry.clone())?; - let opt_index = index_config.as_ref().or(index.as_ref()); + let opt_index = index_config.as_ref().or_else(|| index.as_ref()); let sid = get_source_id(config, opt_index, registry.as_ref())?; if !sid.is_remote_registry() { bail!( diff --git a/tests/testsuite/multitarget.rs b/tests/testsuite/multitarget.rs index 1235642cb56..a72203c5779 100644 --- a/tests/testsuite/multitarget.rs +++ b/tests/testsuite/multitarget.rs @@ -35,7 +35,7 @@ fn simple_build() { .masquerade_as_nightly_cargo() .run(); - assert!(p.target_bin(&t1, "foo").is_file()); + assert!(p.target_bin(t1, "foo").is_file()); assert!(p.target_bin(&t2, "foo").is_file()); } From e94facea1544112562d3b2aacefd996dabb6fa7f Mon Sep 17 00:00:00 2001 From: Rain Date: Fri, 1 May 2020 17:02:44 -0700 Subject: [PATCH 02/24] features: allow activated_features_unverified to communicate not-present I'm currently writing [`cargo-guppy`](https://github.com/facebookincubator/cargo-guppy), a toolkit for analyzing Rust dependency graphs. As part of that I'm writing some tests to compare guppy to `cargo`, to ensure that it produces the same results for the subset of analyses `cargo` can do. While writing tests I found it useful to distinguish between packages that weren't present at all and packages that were activated but with no features. This commit adds that functionality to the `_unverified` method. --- src/cargo/core/resolver/features.rs | 20 +++++++++----------- src/cargo/ops/cargo_clean.rs | 5 +++-- src/cargo/ops/cargo_compile.rs | 5 ++++- 3 files changed, 16 insertions(+), 14 deletions(-) diff --git a/src/cargo/core/resolver/features.rs b/src/cargo/core/resolver/features.rs index acbdec424b3..cea01d113a4 100644 --- a/src/cargo/core/resolver/features.rs +++ b/src/cargo/core/resolver/features.rs @@ -208,36 +208,34 @@ impl ResolvedFeatures { pkg_id: PackageId, features_for: FeaturesFor, ) -> Vec { - self.activated_features_int(pkg_id, features_for, true) + self.activated_features_int(pkg_id, features_for) + .expect("activated_features for invalid package") } - /// Variant of `activated_features` that returns an empty Vec if this is + /// Variant of `activated_features` that returns `None` if this is /// not a valid pkg_id/is_build combination. Used in places which do /// not know which packages are activated (like `cargo clean`). pub fn activated_features_unverified( &self, pkg_id: PackageId, features_for: FeaturesFor, - ) -> Vec { - self.activated_features_int(pkg_id, features_for, false) + ) -> Option> { + self.activated_features_int(pkg_id, features_for).ok() } fn activated_features_int( &self, pkg_id: PackageId, features_for: FeaturesFor, - verify: bool, - ) -> Vec { + ) -> CargoResult> { if let Some(legacy) = &self.legacy { - legacy.get(&pkg_id).map_or_else(Vec::new, |v| v.clone()) + Ok(legacy.get(&pkg_id).map_or_else(Vec::new, |v| v.clone())) } else { let is_build = self.opts.decouple_host_deps && features_for == FeaturesFor::HostDep; if let Some(fs) = self.activated_features.get(&(pkg_id, is_build)) { - fs.iter().cloned().collect() - } else if verify { - panic!("features did not find {:?} {:?}", pkg_id, is_build) + Ok(fs.iter().cloned().collect()) } else { - Vec::new() + anyhow::bail!("features did not find {:?} {:?}", pkg_id, is_build) } } } diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index beb9b03cd39..0a4166940e3 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -129,8 +129,9 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { // Use unverified here since this is being more // exhaustive than what is actually needed. let features_for = unit_for.map_to_features_for(); - let features = - features.activated_features_unverified(pkg.package_id(), features_for); + let features = features + .activated_features_unverified(pkg.package_id(), features_for) + .unwrap_or_default(); units.push(interner.intern( pkg, target, profile, *kind, *mode, features, /*is_std*/ false, )); diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 29e3abf018e..762da60b02b 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -978,7 +978,10 @@ pub fn resolve_all_features( .proc_macro(); for dep in deps { let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build()); - for feature in resolved_features.activated_features_unverified(dep_id, features_for) { + for feature in resolved_features + .activated_features_unverified(dep_id, features_for) + .unwrap_or_default() + { features.insert(format!("{}/{}", dep.name_in_toml(), feature)); } } From 4fd483507a3e1471635a178a0eeddb7752b301bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kornel=20Lesin=CC=81ski?= Date: Mon, 27 Apr 2020 20:17:36 +0100 Subject: [PATCH 03/24] Hint git-fetch-with-cli on git errors --- src/cargo/sources/git/utils.rs | 39 ++++++++++++++-- tests/testsuite/git_auth.rs | 82 ++++++++++++++++++++++++++++++++++ 2 files changed, 118 insertions(+), 3 deletions(-) diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs index 08efd065b4e..1d28d36ccfa 100644 --- a/src/cargo/sources/git/utils.rs +++ b/src/cargo/sources/git/utils.rs @@ -4,7 +4,7 @@ use crate::util::paths; use crate::util::process_builder::process; use crate::util::{network, Config, IntoUrl, Progress}; use curl::easy::{Easy, List}; -use git2::{self, ObjectType}; +use git2::{self, ErrorClass, ObjectType}; use log::{debug, info}; use serde::ser; use serde::Serialize; @@ -97,10 +97,43 @@ impl GitRemote { reference: &GitReference, cargo_config: &Config, ) -> CargoResult<(GitDatabase, GitRevision)> { + let format_error = |e: anyhow::Error, operation| { + let may_be_libgit_fault = e + .chain() + .filter_map(|e| e.downcast_ref::()) + .any(|e| match e.class() { + ErrorClass::Net + | ErrorClass::Ssl + | ErrorClass::Submodule + | ErrorClass::FetchHead + | ErrorClass::Ssh + | ErrorClass::Callback + | ErrorClass::Http => true, + _ => false, + }); + let uses_cli = cargo_config + .net_config() + .ok() + .and_then(|config| config.git_fetch_with_cli) + .unwrap_or(false); + let msg = if !uses_cli && may_be_libgit_fault { + format!( + r"failed to {} into: {} + If your environment requires git authentication or proxying, try enabling `git-fetch-with-cli` + https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli", + operation, + into.display() + ) + } else { + format!("failed to {} into: {}", operation, into.display()) + }; + e.context(msg) + }; + let mut repo_and_rev = None; if let Ok(mut repo) = git2::Repository::open(into) { self.fetch_into(&mut repo, cargo_config) - .chain_err(|| format!("failed to fetch into {}", into.display()))?; + .map_err(|e| format_error(e, "fetch"))?; if let Ok(rev) = reference.resolve(&repo) { repo_and_rev = Some((repo, rev)); } @@ -110,7 +143,7 @@ impl GitRemote { None => { let repo = self .clone_into(into, cargo_config) - .chain_err(|| format!("failed to clone into: {}", into.display()))?; + .map_err(|e| format_error(e, "clone"))?; let rev = reference.resolve(&repo)?; (repo, rev) } diff --git a/tests/testsuite/git_auth.rs b/tests/testsuite/git_auth.rs index 258014cf653..5aea1a9c5c1 100644 --- a/tests/testsuite/git_auth.rs +++ b/tests/testsuite/git_auth.rs @@ -265,3 +265,85 @@ Caused by: .run(); t.join().ok().unwrap(); } + +#[cargo_test] +fn net_err_suggests_fetch_with_cli() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + foo = { git = "ssh://needs-proxy.invalid/git" } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[UPDATING] git repository `ssh://needs-proxy.invalid/git` +warning: spurious network error[..] +warning: spurious network error[..] +[ERROR] failed to get `foo` as a dependency of package `foo v0.0.0 [..]` + +Caused by: + failed to load source for dependency `foo` + +Caused by: + Unable to update ssh://needs-proxy.invalid/git + +Caused by: + failed to clone into: [..] + If your environment requires git authentication or proxying, try enabling `git-fetch-with-cli` + https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli + +Caused by: + failed to resolve address for needs-proxy.invalid[..] +", + ) + .run(); + + p.change_file( + ".cargo/config", + " + [net] + git-fetch-with-cli = true + ", + ); + + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[UPDATING] git repository `ssh://needs-proxy.invalid/git` +[RUNNING] `git fetch[..] +[ERROR] failed to get `foo` as a dependency of package `foo v0.0.0 [..]` + +Caused by: + failed to load source for dependency `foo` + +Caused by: + Unable to update ssh://needs-proxy.invalid/git + +Caused by: + failed to fetch into: [..] + +Caused by: + [..]process didn't exit successfully[..] +--- stderr +ssh: Could not resolve hostname[..] +fatal: [..] + +Please make sure you have the correct access rights +and the repository exists. +[..]", + ) + .run(); +} From 1840d1631d225495228c387ae13932a8cb3c5f1a Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Mon, 4 May 2020 10:53:18 -0700 Subject: [PATCH 04/24] Rename bitcode-in-rlib flag to embed-bitcode This flag changed names in nightly, so let's rename it here in Cargo to get our CI passing and enable the same wins for avoiding bitcode. --- src/cargo/core/compiler/build_context/target_info.rs | 12 ++++++------ src/cargo/core/compiler/mod.rs | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index 28d9589e1ef..8258f4d8853 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -41,7 +41,7 @@ pub struct TargetInfo { /// Extra flags to pass to `rustdoc`, see `env_args`. pub rustdocflags: Vec, /// Remove this when it hits stable (1.44) - pub supports_bitcode_in_rlib: Option, + pub supports_embed_bitcode: Option, } /// Kind of each file generated by a Unit, part of `FileType`. @@ -111,10 +111,10 @@ impl TargetInfo { .args(&rustflags) .env_remove("RUSTC_LOG"); - let mut bitcode_in_rlib_test = process.clone(); - bitcode_in_rlib_test.arg("-Cbitcode-in-rlib"); - let supports_bitcode_in_rlib = match kind { - CompileKind::Host => Some(rustc.cached_output(&bitcode_in_rlib_test).is_ok()), + let mut embed_bitcode_test = process.clone(); + embed_bitcode_test.arg("-Cembed-bitcode"); + let supports_embed_bitcode = match kind { + CompileKind::Host => Some(rustc.cached_output(&embed_bitcode_test).is_ok()), _ => None, }; @@ -202,7 +202,7 @@ impl TargetInfo { "RUSTDOCFLAGS", )?, cfg, - supports_bitcode_in_rlib, + supports_embed_bitcode, }) } diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index cce5301cd48..c5b09373e07 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -815,10 +815,10 @@ fn build_base_args( .bcx .target_data .info(CompileKind::Host) - .supports_bitcode_in_rlib + .supports_embed_bitcode .unwrap() { - cmd.arg("-Cbitcode-in-rlib=no"); + cmd.arg("-Cembed-bitcode=no"); } } } From e97b36ab336440ddc641b23eaa84d49f70d0bc8c Mon Sep 17 00:00:00 2001 From: Bastian Kauschke Date: Mon, 4 May 2020 21:12:02 +0200 Subject: [PATCH 05/24] double negation --- src/cargo/util/toml/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index b0295a7245e..77e0dbb8385 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -78,7 +78,7 @@ fn do_read_manifest( let (mut manifest, paths) = TomlManifest::to_real_manifest(&manifest, source_id, package_root, config)?; add_unused(manifest.warnings_mut()); - if !manifest.targets().iter().any(|t| !t.is_custom_build()) { + if manifest.targets().iter().all(|t| t.is_custom_build()) { bail!( "no targets specified in the manifest\n \ either src/lib.rs, src/main.rs, a [lib] section, or \ From e2219254695b290ff1ef9d8c2015939af9a9ab1f Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Thu, 30 Apr 2020 12:24:50 -0700 Subject: [PATCH 06/24] Don't force rustc to do codegen for LTO builds This commit updates Cargo's implementation of LTO builds to do less work and hopefully be faster when doing a cold build. Additionaly this should save space on disk! The general idea is that the compiler does not need object files if it's only going to perform LTO with some artifacts. In this case all rustc needs to do is load bitcode from dependencies. This means that if you're doing an LTO build generating object code for intermediate dependencies is just wasted time! Here Cargo is updated with more intrusive knowledge about LTO. Cargo will now analyze the dependency graph to figure out which crates are being compiled with LTO, and then it will figure out which dependencies only need to have bitcode in them. Pure-bitcode artifacts are emitted with the `-Clinker-plugin-lto` flag. Some artifacts are still used in multiple scenarios (such as those shared between build scripts and final artifacts), so those are not compiled with `-Clinker-plugin-lto` since the linker is not guaranteed to know how to perform LTO. This functionality was recently implemented in rust-lang/rust#71528 where rustc is now capable of reading bitcode from `-Clinker-plugin-lto` rlibs. Previously rustc would only read its own format of bitcode, but this has now been extended! This support is now on nightly, hence this PR. --- .../compiler/build_context/target_info.rs | 2 +- src/cargo/core/compiler/context/mod.rs | 8 + src/cargo/core/compiler/fingerprint.rs | 8 +- src/cargo/core/compiler/lto.rs | 116 ++++++++ src/cargo/core/compiler/mod.rs | 41 +-- tests/testsuite/lto.rs | 254 ++++++++++++++++++ 6 files changed, 410 insertions(+), 19 deletions(-) create mode 100644 src/cargo/core/compiler/lto.rs diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index 8d9274c0bda..17ea6c6411e 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -40,7 +40,7 @@ pub struct TargetInfo { pub rustflags: Vec, /// Extra flags to pass to `rustdoc`, see `env_args`. pub rustdocflags: Vec, - /// Remove this when it hits stable (1.44) + /// Remove this when it hits stable (1.45) pub supports_embed_bitcode: Option, } diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index 0855fc09ce7..ae5c1382947 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -16,6 +16,7 @@ use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts}; use super::fingerprint::Fingerprint; use super::job_queue::JobQueue; use super::layout::Layout; +use super::lto::Lto; use super::unit_graph::UnitDep; use super::{BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor}; @@ -72,6 +73,11 @@ pub struct Context<'a, 'cfg> { /// jobserver clients for each Unit (which eventually becomes a rustc /// process). pub rustc_clients: HashMap, + + /// Map of the LTO-status of each unit. This indicates what sort of + /// compilation is happening (only object, only bitcode, both, etc), and is + /// precalculated early on. + pub lto: HashMap, } impl<'a, 'cfg> Context<'a, 'cfg> { @@ -111,6 +117,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { rmeta_required: HashSet::new(), rustc_clients: HashMap::new(), pipelining, + lto: HashMap::new(), }) } @@ -123,6 +130,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { self.prepare_units()?; self.prepare()?; custom_build::build_map(&mut self)?; + super::lto::generate(&mut self)?; self.check_collistions()?; for unit in &self.bcx.roots { diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs index 89263cc7c9b..598abdec8e1 100644 --- a/src/cargo/core/compiler/fingerprint.rs +++ b/src/cargo/core/compiler/fingerprint.rs @@ -71,6 +71,7 @@ //! -C incremental=… flag | ✓ | //! mtime of sources | ✓[^3] | //! RUSTFLAGS/RUSTDOCFLAGS | ✓ | +//! LTO flags | ✓ | //! is_std | | ✓ //! //! [^1]: Build script and bin dependencies are not included. @@ -1241,7 +1242,12 @@ fn calculate_normal(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult), + + /// This rustc invocation only needs to produce bitcode, there's no need to + /// produce object files, so we can pass `-Clinker-plugin-lto` + OnlyBitcode, + + /// This rustc invocation needs to embed bitcode in object files. This means + /// that object files may be used for a normal link, and the crate may be + /// loaded for LTO later, so both are required. + EmbedBitcode, + + /// Nothing related to LTO is required of this compilation. + None, +} + +pub fn generate(cx: &mut Context<'_, '_>) -> CargoResult<()> { + let mut map = HashMap::new(); + for unit in cx.bcx.roots.iter() { + calculate(cx, &mut map, unit, false)?; + } + cx.lto = map; + Ok(()) +} + +fn calculate( + cx: &Context<'_, '_>, + map: &mut HashMap, + unit: &Unit, + require_bitcode: bool, +) -> CargoResult<()> { + let (lto, require_bitcode_for_deps) = if unit.target.for_host() { + // Disable LTO for host builds since we only really want to perform LTO + // for the final binary, and LTO on plugins/build scripts/proc macros is + // largely not desired. + (Lto::None, false) + } else if unit.target.can_lto() { + // Otherwise if this target can perform LTO then we're going to read the + // LTO value out of the profile. + assert!(!require_bitcode); // can't depend on binaries/staticlib/etc + match unit.profile.lto { + profiles::Lto::Named(s) => match s.as_str() { + "n" | "no" | "off" => (Lto::Run(Some(s)), false), + _ => (Lto::Run(Some(s)), true), + }, + profiles::Lto::Bool(true) => (Lto::Run(None), true), + profiles::Lto::Bool(false) => (Lto::None, false), + } + } else if require_bitcode { + // Otherwise we're a dependency of something, an rlib. This means that + // if our parent required bitcode of some kind then we need to generate + // bitcode. + (Lto::OnlyBitcode, true) + } else { + (Lto::None, false) + }; + + match map.entry(unit.clone()) { + // If we haven't seen this unit before then insert our value and keep + // going. + Entry::Vacant(v) => { + v.insert(lto); + } + + Entry::Occupied(mut v) => { + let result = match (lto, v.get()) { + // Targets which execute LTO cannot be depended on, so these + // units should only show up once in the dependency graph, so we + // should never hit this case. + (Lto::Run(_), _) | (_, Lto::Run(_)) => { + unreachable!("lto-able targets shouldn't show up twice") + } + + // If we calculated the same thing as before then we can bail + // out quickly. + (Lto::OnlyBitcode, Lto::OnlyBitcode) | (Lto::None, Lto::None) => return Ok(()), + + // This is where the trickiness happens. This unit needs + // bitcode and the previously calculated value for this unit + // says it didn't need bitcode (or vice versa). This means that + // we're a shared dependency between some targets which require + // LTO and some which don't. This means that instead of being + // either only-objects or only-bitcode we have to embed both in + // rlibs (used for different compilations), so we switch to + // embedding bitcode. + (Lto::OnlyBitcode, Lto::None) + | (Lto::OnlyBitcode, Lto::EmbedBitcode) + | (Lto::None, Lto::OnlyBitcode) + | (Lto::None, Lto::EmbedBitcode) => Lto::EmbedBitcode, + + // Currently this variant is never calculated above, so no need + // to handle this case. + (Lto::EmbedBitcode, _) => unreachable!(), + }; + // No need to recurse if we calculated the same value as before. + if result == *v.get() { + return Ok(()); + } + v.insert(result); + } + } + + for dep in cx.unit_deps(unit) { + calculate(cx, map, &dep.unit, require_bitcode_for_deps)?; + } + Ok(()) +} diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index c5b09373e07..dd2dbea39ea 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -10,6 +10,7 @@ mod job; mod job_queue; mod layout; mod links; +mod lto; mod output_depinfo; pub mod standard_lib; mod timings; @@ -42,7 +43,7 @@ use self::output_depinfo::output_depinfo; use self::unit_graph::UnitDep; pub use crate::core::compiler::unit::{Unit, UnitInterner}; use crate::core::manifest::TargetSourcePath; -use crate::core::profiles::{Lto, PanicStrategy, Profile}; +use crate::core::profiles::{PanicStrategy, Profile}; use crate::core::{Edition, Feature, InternedString, PackageId, Target}; use crate::util::errors::{self, CargoResult, CargoResultExt, ProcessError, VerboseError}; use crate::util::machine_message::Message; @@ -740,7 +741,6 @@ fn build_base_args( let bcx = cx.bcx; let Profile { ref opt_level, - ref lto, codegen_units, debuginfo, debug_assertions, @@ -793,24 +793,31 @@ fn build_base_args( cmd.arg("-C").arg(format!("panic={}", panic)); } - // Disable LTO for host builds as prefer_dynamic and it are mutually - // exclusive. - let lto_possible = unit.target.can_lto() && !unit.target.for_host(); - match lto { - Lto::Bool(true) => { - if lto_possible { - cmd.args(&["-C", "lto"]); - } + match cx.lto[&unit] { + lto::Lto::Run(None) => { + cmd.arg("-C").arg("lto"); + } + lto::Lto::Run(Some(s)) => { + cmd.arg("-C").arg(format!("lto={}", s)); } - Lto::Named(s) => { - if lto_possible { - cmd.arg("-C").arg(format!("lto={}", s)); + lto::Lto::EmbedBitcode => {} // this is rustc's default + lto::Lto::OnlyBitcode => { + // Note that this compiler flag, like the one below, is just an + // optimization in terms of build time. If we don't pass it then + // both object code and bitcode will show up. This is lagely just + // compat until the feature lands on stable and we can remove the + // conditional branch. + if cx + .bcx + .target_data + .info(CompileKind::Host) + .supports_embed_bitcode + .unwrap() + { + cmd.arg("-Clinker-plugin-lto"); } } - // If LTO isn't being enabled then there's no need for bitcode to be - // present in the intermediate artifacts, so shave off some build time - // by removing it. - Lto::Bool(false) => { + lto::Lto::None => { if cx .bcx .target_data diff --git a/tests/testsuite/lto.rs b/tests/testsuite/lto.rs index 4d77b2186e2..a671e27e04d 100644 --- a/tests/testsuite/lto.rs +++ b/tests/testsuite/lto.rs @@ -3,6 +3,10 @@ use cargo_test_support::registry::Package; #[cargo_test] fn with_deps() { + if !cargo_test_support::is_nightly() { + return; + } + Package::new("bar", "0.0.1").publish(); let p = project() @@ -22,5 +26,255 @@ fn with_deps() { ) .file("src/main.rs", "extern crate bar; fn main() {}") .build(); + p.cargo("build -v --release") + .with_stderr_contains("[..]`rustc[..]--crate-name bar[..]-Clinker-plugin-lto[..]`") + .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`") + .run(); +} + +#[cargo_test] +fn shared_deps() { + if !cargo_test_support::is_nightly() { + return; + } + + Package::new("bar", "0.0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.0.0" + + [dependencies] + bar = "*" + + [build-dependencies] + bar = "*" + + [profile.release] + lto = true + "#, + ) + .file("build.rs", "extern crate bar; fn main() {}") + .file("src/main.rs", "extern crate bar; fn main() {}") + .build(); + p.cargo("build -v --release") + .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`") + .run(); +} + +#[cargo_test] +fn build_dep_not_ltod() { + if !cargo_test_support::is_nightly() { + return; + } + + Package::new("bar", "0.0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.0.0" + + [build-dependencies] + bar = "*" + + [profile.release] + lto = true + "#, + ) + .file("build.rs", "extern crate bar; fn main() {}") + .file("src/main.rs", "fn main() {}") + .build(); + p.cargo("build -v --release") + .with_stderr_contains("[..]`rustc[..]--crate-name bar[..]-Cembed-bitcode=no[..]`") + .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`") + .run(); +} + +#[cargo_test] +fn complicated() { + if !cargo_test_support::is_nightly() { + return; + } + + Package::new("dep-shared", "0.0.1") + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("dep-normal2", "0.0.1") + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("dep-normal", "0.0.1") + .dep("dep-shared", "*") + .dep("dep-normal2", "*") + .file( + "src/lib.rs", + " + pub fn foo() { + dep_shared::foo(); + dep_normal2::foo(); + } + ", + ) + .publish(); + Package::new("dep-build2", "0.0.1") + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("dep-build", "0.0.1") + .dep("dep-shared", "*") + .dep("dep-build2", "*") + .file( + "src/lib.rs", + " + pub fn foo() { + dep_shared::foo(); + dep_build2::foo(); + } + ", + ) + .publish(); + Package::new("dep-proc-macro2", "0.0.1") + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("dep-proc-macro", "0.0.1") + .proc_macro(true) + .dep("dep-shared", "*") + .dep("dep-proc-macro2", "*") + .file( + "src/lib.rs", + " + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_attribute] + pub fn foo(_: TokenStream, a: TokenStream) -> TokenStream { + dep_shared::foo(); + dep_proc_macro2::foo(); + a + } + ", + ) + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.0.0" + + [lib] + crate-type = ['cdylib', 'staticlib'] + + [dependencies] + dep-normal = "*" + dep-proc-macro = "*" + + [build-dependencies] + dep-build = "*" + + [profile.release] + lto = true + "#, + ) + .file("build.rs", "fn main() { dep_build::foo() }") + .file( + "src/main.rs", + "#[dep_proc_macro::foo] fn main() { dep_normal::foo() }", + ) + .file( + "src/lib.rs", + "#[dep_proc_macro::foo] pub fn foo() { dep_normal::foo() }", + ) + .build(); + p.cargo("build -v --release") + // normal deps and their transitive dependencies do not need object + // code, so they should have linker-plugin-lto specified + .with_stderr_contains("[..]`rustc[..]--crate-name dep_normal2 [..]-Clinker-plugin-lto[..]`") + .with_stderr_contains("[..]`rustc[..]--crate-name dep_normal [..]-Clinker-plugin-lto[..]`") + // build dependencies and their transitive deps don't need any bitcode, + // so embedding should be turned off + .with_stderr_contains("[..]`rustc[..]--crate-name dep_build2 [..]-Cembed-bitcode=no[..]`") + .with_stderr_contains("[..]`rustc[..]--crate-name dep_build [..]-Cembed-bitcode=no[..]`") + .with_stderr_contains( + "[..]`rustc[..]--crate-name build_script_build [..]-Cembed-bitcode=no[..]`", + ) + // proc macro deps are the same as build deps here + .with_stderr_contains( + "[..]`rustc[..]--crate-name dep_proc_macro2 [..]-Cembed-bitcode=no[..]`", + ) + .with_stderr_contains( + "[..]`rustc[..]--crate-name dep_proc_macro [..]-Cembed-bitcode=no[..]`", + ) + .with_stderr_contains("[..]`rustc[..]--crate-name test [..]--crate-type bin[..]-C lto[..]`") + .with_stderr_contains( + "[..]`rustc[..]--crate-name test [..]--crate-type cdylib[..]-C lto[..]`", + ) + .with_stderr_contains("[..]`rustc[..]--crate-name dep_shared [..]`") + .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-C lto[..]") + .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-Clinker-plugin-lto[..]") + .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-Cembed-bitcode[..]") + .run(); +} + +#[cargo_test] +fn off_in_manifest_works() { + if !cargo_test_support::is_nightly() { + return; + } + + Package::new("bar", "0.0.1") + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.0.0" + + [dependencies] + bar = "*" + + [profile.release] + lto = "off" + "#, + ) + .file("src/main.rs", "fn main() { bar::foo() }") + .build(); + p.cargo("build -v --release").run(); +} + +#[cargo_test] +fn between_builds() { + if !cargo_test_support::is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.0.0" + + [profile.release] + lto = true + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file("src/main.rs", "fn main() { test::foo() }") + .build(); + p.cargo("build -v --release --lib").run(); p.cargo("build -v --release").run(); } From 7a06be149c114bffb7c65732eca09f3060afd96f Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Sun, 26 Apr 2020 13:20:44 -0700 Subject: [PATCH 07/24] Add CrateType to replace LibKind. --- .../compiler/build_context/target_info.rs | 40 +++--- src/cargo/core/compiler/compilation.rs | 2 +- .../compiler/context/compilation_files.rs | 25 ++-- src/cargo/core/compiler/custom_build.rs | 2 +- src/cargo/core/compiler/mod.rs | 14 ++- src/cargo/core/compiler/unit.rs | 8 +- src/cargo/core/compiler/unit_dependencies.rs | 4 +- src/cargo/core/manifest.rs | 118 ++++-------------- src/cargo/core/mod.rs | 2 +- src/cargo/ops/cargo_compile.rs | 4 +- src/cargo/util/toml/mod.rs | 2 +- src/cargo/util/toml/targets.rs | 13 +- 12 files changed, 89 insertions(+), 145 deletions(-) diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index 17ea6c6411e..698a9e4080f 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -1,4 +1,4 @@ -use crate::core::compiler::{BuildOutput, CompileKind, CompileTarget}; +use crate::core::compiler::{BuildOutput, CompileKind, CompileTarget, CrateType}; use crate::core::{Dependency, TargetKind, Workspace}; use crate::util::config::{Config, StringList, TargetConfig}; use crate::util::{CargoResult, CargoResultExt, ProcessBuilder, Rustc}; @@ -25,7 +25,7 @@ pub struct TargetInfo { /// `Some((prefix, suffix))`, for example `libcargo.so` would be /// `Some(("lib", ".so")). The value is `None` if the crate type is not /// supported. - crate_types: RefCell>>, + crate_types: RefCell>>, /// `cfg` information extracted from `rustc --print=cfg`. cfg: Vec, /// Path to the sysroot. @@ -123,10 +123,16 @@ impl TargetInfo { } let crate_type_process = process.clone(); - const KNOWN_CRATE_TYPES: &[&str] = - &["bin", "rlib", "dylib", "cdylib", "staticlib", "proc-macro"]; + const KNOWN_CRATE_TYPES: &[CrateType] = &[ + CrateType::Bin, + CrateType::Rlib, + CrateType::Dylib, + CrateType::Cdylib, + CrateType::Staticlib, + CrateType::ProcMacro, + ]; for crate_type in KNOWN_CRATE_TYPES.iter() { - process.arg("--crate-type").arg(crate_type); + process.arg("--crate-type").arg(crate_type.as_str()); } process.arg("--print=sysroot"); @@ -140,7 +146,7 @@ impl TargetInfo { let mut map = HashMap::new(); for crate_type in KNOWN_CRATE_TYPES { let out = parse_crate_type(crate_type, &process, &output, &error, &mut lines)?; - map.insert(crate_type.to_string(), out); + map.insert(crate_type.clone(), out); } let line = match lines.next() { @@ -228,13 +234,19 @@ impl TargetInfo { /// Returns `None` if the target does not support the given crate type. pub fn file_types( &self, - crate_type: &str, + crate_type: &CrateType, flavor: FileFlavor, kind: &TargetKind, target_triple: &str, ) -> CargoResult>> { + let crate_type = if *crate_type == CrateType::Lib { + CrateType::Rlib + } else { + crate_type.clone() + }; + let mut crate_types = self.crate_types.borrow_mut(); - let entry = crate_types.entry(crate_type.to_string()); + let entry = crate_types.entry(crate_type.clone()); let crate_type_info = match entry { Entry::Occupied(o) => &*o.into_mut(), Entry::Vacant(v) => { @@ -255,7 +267,7 @@ impl TargetInfo { // See rust-lang/cargo#4500. if target_triple.ends_with("-windows-msvc") - && crate_type.ends_with("dylib") + && (crate_type == CrateType::Dylib || crate_type == CrateType::Cdylib) && suffix == ".dll" { ret.push(FileType { @@ -265,7 +277,7 @@ impl TargetInfo { should_replace_hyphens: false, }) } else if target_triple.ends_with("windows-gnu") - && crate_type.ends_with("dylib") + && (crate_type == CrateType::Dylib || crate_type == CrateType::Cdylib) && suffix == ".dll" { // LD can link DLL directly, but LLD requires the import library. @@ -278,7 +290,7 @@ impl TargetInfo { } // See rust-lang/cargo#4535. - if target_triple.starts_with("wasm32-") && crate_type == "bin" && suffix == ".js" { + if target_triple.starts_with("wasm32-") && crate_type == CrateType::Bin && suffix == ".js" { ret.push(FileType { suffix: ".wasm".to_string(), prefix: prefix.clone(), @@ -319,10 +331,10 @@ impl TargetInfo { Ok(Some(ret)) } - fn discover_crate_type(&self, crate_type: &str) -> CargoResult> { + fn discover_crate_type(&self, crate_type: &CrateType) -> CargoResult> { let mut process = self.crate_type_process.clone(); - process.arg("--crate-type").arg(crate_type); + process.arg("--crate-type").arg(crate_type.as_str()); let output = process.exec_with_output().chain_err(|| { format!( @@ -353,7 +365,7 @@ impl TargetInfo { /// This function can not handle more than one file per type (with wasm32-unknown-emscripten, there /// are two files for bin (`.wasm` and `.js`)). fn parse_crate_type( - crate_type: &str, + crate_type: &CrateType, cmd: &ProcessBuilder, output: &str, error: &str, diff --git a/src/cargo/core/compiler/compilation.rs b/src/cargo/core/compiler/compilation.rs index efcaff1355e..7324618ff45 100644 --- a/src/cargo/core/compiler/compilation.rs +++ b/src/cargo/core/compiler/compilation.rs @@ -167,7 +167,7 @@ impl<'cfg> Compilation<'cfg> { } for crate_type in unit.target.rustc_crate_types() { - p.arg("--crate-type").arg(crate_type); + p.arg("--crate-type").arg(crate_type.as_str()); } Ok(p) diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs index 9fc2f4117a4..b2c48d4d7cd 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/context/compilation_files.rs @@ -9,7 +9,7 @@ use lazycell::LazyCell; use log::info; use super::{BuildContext, CompileKind, Context, FileFlavor, Layout}; -use crate::core::compiler::{CompileMode, CompileTarget, Unit}; +use crate::core::compiler::{CompileMode, CompileTarget, CrateType, Unit}; use crate::core::{Target, TargetKind, Workspace}; use crate::util::{self, CargoResult}; @@ -274,7 +274,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { let info = bcx.target_data.info(kind); let file_types = info .file_types( - "bin", + &CrateType::Bin, FileFlavor::Normal, &TargetKind::Bin, bcx.target_data.short_name(&kind), @@ -416,12 +416,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { let info = bcx.target_data.info(unit.kind); let file_stem = self.file_stem(unit); - let mut add = |crate_type: &str, flavor: FileFlavor| -> CargoResult<()> { - let crate_type = if crate_type == "lib" { - "rlib" - } else { - crate_type - }; + let mut add = |crate_type: &CrateType, flavor: FileFlavor| -> CargoResult<()> { let file_types = info.file_types( crate_type, flavor, @@ -465,22 +460,22 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { } Ok(()) }; - match *unit.target.kind() { + match unit.target.kind() { TargetKind::Bin | TargetKind::CustomBuild | TargetKind::ExampleBin | TargetKind::Bench | TargetKind::Test => { - add("bin", FileFlavor::Normal)?; + add(&CrateType::Bin, FileFlavor::Normal)?; } TargetKind::Lib(..) | TargetKind::ExampleLib(..) if unit.mode.is_any_test() => { - add("bin", FileFlavor::Normal)?; + add(&CrateType::Bin, FileFlavor::Normal)?; } - TargetKind::ExampleLib(ref kinds) | TargetKind::Lib(ref kinds) => { - for kind in kinds { + TargetKind::ExampleLib(crate_types) | TargetKind::Lib(crate_types) => { + for crate_type in crate_types { add( - kind.crate_type(), - if kind.linkable() { + crate_type, + if crate_type.is_linkable() { FileFlavor::Linkable { rmeta: false } } else { FileFlavor::Normal diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index 38af4e7d2a6..6b642392018 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -738,7 +738,7 @@ pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> { if dep_unit.target.for_host() { ret.plugins.extend(dep_scripts.to_link.iter().cloned()); - } else if dep_unit.target.linkable() { + } else if dep_unit.target.is_linkable() { for &(pkg, metadata) in dep_scripts.to_link.iter() { add_to_link(&mut ret, pkg, metadata); } diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index dd2dbea39ea..a24aaf0ab9b 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -4,6 +4,7 @@ mod build_plan; mod compilation; mod compile_kind; mod context; +mod crate_type; mod custom_build; mod fingerprint; mod job; @@ -35,6 +36,7 @@ use self::build_plan::BuildPlan; pub use self::compilation::{Compilation, Doctest}; pub use self::compile_kind::{CompileKind, CompileTarget}; pub use self::context::{Context, Metadata}; +pub use self::crate_type::CrateType; pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts}; pub use self::job::Freshness; use self::job::{Job, Work}; @@ -532,7 +534,7 @@ where fn prepare_rustc( cx: &mut Context<'_, '_>, - crate_types: &[&str], + crate_types: &[CrateType], unit: &Unit, ) -> CargoResult { let is_primary = cx.is_primary_package(unit); @@ -734,7 +736,7 @@ fn build_base_args( cx: &mut Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit, - crate_types: &[&str], + crate_types: &[CrateType], ) -> CargoResult<()> { assert!(!unit.mode.is_run_custom_build()); @@ -764,7 +766,7 @@ fn build_base_args( if !test { for crate_type in crate_types.iter() { - cmd.arg("--crate-type").arg(crate_type); + cmd.arg("--crate-type").arg(crate_type.as_str()); } } @@ -780,7 +782,7 @@ fn build_base_args( } let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) - || (crate_types.contains(&"dylib") && bcx.ws.members().any(|p| *p != unit.pkg)); + || (crate_types.contains(&CrateType::Dylib) && bcx.ws.members().any(|p| *p != unit.pkg)); if prefer_dynamic { cmd.arg("-C").arg("prefer-dynamic"); } @@ -984,7 +986,7 @@ fn build_deps_args( // error in the future (see PR #4797). if !deps .iter() - .any(|dep| !dep.unit.mode.is_doc() && dep.unit.target.linkable()) + .any(|dep| !dep.unit.mode.is_doc() && dep.unit.target.is_linkable()) { if let Some(dep) = deps .iter() @@ -1088,7 +1090,7 @@ pub fn extern_args( }; for dep in deps { - if dep.unit.target.linkable() && !dep.unit.mode.is_doc() { + if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() { link_to(dep, dep.extern_crate_name, dep.noprelude)?; } } diff --git a/src/cargo/core/compiler/unit.rs b/src/cargo/core/compiler/unit.rs index 84565a6517e..a7f80321d41 100644 --- a/src/cargo/core/compiler/unit.rs +++ b/src/cargo/core/compiler/unit.rs @@ -1,5 +1,5 @@ -use crate::core::compiler::{CompileKind, CompileMode}; -use crate::core::manifest::{LibKind, Target, TargetKind}; +use crate::core::compiler::{CompileKind, CompileMode, CrateType}; +use crate::core::manifest::{Target, TargetKind}; use crate::core::{profiles::Profile, InternedString, Package}; use crate::util::hex::short_hash; use crate::util::Config; @@ -178,9 +178,9 @@ impl UnitInterner { // // At some point in the future, it would be nice to have a // first-class way of overriding or specifying crate-types. - (true, TargetKind::Lib(crate_types)) if crate_types.contains(&LibKind::Dylib) => { + (true, TargetKind::Lib(crate_types)) if crate_types.contains(&CrateType::Dylib) => { let mut new_target = Target::clone(target); - new_target.set_kind(TargetKind::Lib(vec![LibKind::Rlib])); + new_target.set_kind(TargetKind::Lib(vec![CrateType::Rlib])); new_target } _ => target.clone(), diff --git a/src/cargo/core/compiler/unit_dependencies.rs b/src/cargo/core/compiler/unit_dependencies.rs index 57cb6c68d06..da2ee2660cf 100644 --- a/src/cargo/core/compiler/unit_dependencies.rs +++ b/src/cargo/core/compiler/unit_dependencies.rs @@ -476,7 +476,7 @@ fn maybe_lib( unit.pkg .targets() .iter() - .find(|t| t.linkable()) + .find(|t| t.is_linkable()) .map(|t| { let mode = check_or_build_mode(unit.mode, t); new_unit_dep( @@ -681,7 +681,7 @@ fn connect_run_custom_build_deps(unit_dependencies: &mut UnitGraph) { // Only deps with `links`. .filter(|other| { other.unit.pkg != unit.pkg - && other.unit.target.linkable() + && other.unit.target.is_linkable() && other.unit.pkg.manifest().links().is_some() }) // Get the RunCustomBuild for other lib. diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index 646708e5901..6e96cdf4c3d 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -10,6 +10,7 @@ use serde::ser; use serde::Serialize; use url::Url; +use crate::core::compiler::CrateType; use crate::core::interning::InternedString; use crate::core::resolver::ResolveBehavior; use crate::core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary}; @@ -96,73 +97,13 @@ pub struct ManifestMetadata { pub links: Option, } -#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub enum LibKind { - Lib, - Rlib, - Dylib, - ProcMacro, - Other(String), -} - -impl LibKind { - /// Returns the argument suitable for `--crate-type` to pass to rustc. - pub fn crate_type(&self) -> &str { - match *self { - LibKind::Lib => "lib", - LibKind::Rlib => "rlib", - LibKind::Dylib => "dylib", - LibKind::ProcMacro => "proc-macro", - LibKind::Other(ref s) => s, - } - } - - pub fn linkable(&self) -> bool { - match *self { - LibKind::Lib | LibKind::Rlib | LibKind::Dylib | LibKind::ProcMacro => true, - LibKind::Other(..) => false, - } - } - - pub fn requires_upstream_objects(&self) -> bool { - match *self { - // "lib" == "rlib" and is a compilation that doesn't actually - // require upstream object files to exist, only upstream metadata - // files. As a result, it doesn't require upstream artifacts - LibKind::Lib | LibKind::Rlib => false, - - // Everything else, however, is some form of "linkable output" or - // something that requires upstream object files. - _ => true, - } - } -} - -impl fmt::Debug for LibKind { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.crate_type().fmt(f) - } -} - -impl<'a> From<&'a String> for LibKind { - fn from(string: &'a String) -> Self { - match string.as_ref() { - "lib" => LibKind::Lib, - "rlib" => LibKind::Rlib, - "dylib" => LibKind::Dylib, - "proc-macro" => LibKind::ProcMacro, - s => LibKind::Other(s.to_string()), - } - } -} - #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum TargetKind { - Lib(Vec), + Lib(Vec), Bin, Test, Bench, - ExampleLib(Vec), + ExampleLib(Vec), ExampleBin, CustomBuild, } @@ -173,8 +114,8 @@ impl ser::Serialize for TargetKind { S: ser::Serializer, { use self::TargetKind::*; - match *self { - Lib(ref kinds) => s.collect_seq(kinds.iter().map(LibKind::crate_type)), + match self { + Lib(kinds) => s.collect_seq(kinds.iter().map(|t| t.to_string())), Bin => ["bin"].serialize(s), ExampleBin | ExampleLib(_) => ["example"].serialize(s), Test => ["test"].serialize(s), @@ -303,7 +244,7 @@ struct SerializedTarget<'a> { kind: &'a TargetKind, /// Corresponds to `--crate-type` compiler attribute. /// See https://doc.rust-lang.org/reference/linkage.html - crate_types: Vec<&'a str>, + crate_types: Vec, name: &'a str, src_path: Option<&'a PathBuf>, edition: &'a str, @@ -659,7 +600,7 @@ impl Target { pub fn lib_target( name: &str, - crate_targets: Vec, + crate_targets: Vec, src_path: PathBuf, edition: Edition, ) -> Target { @@ -712,15 +653,12 @@ impl Target { pub fn example_target( name: &str, - crate_targets: Vec, + crate_targets: Vec, src_path: PathBuf, required_features: Option>, edition: Edition, ) -> Target { - let kind = if crate_targets.is_empty() - || crate_targets - .iter() - .all(|t| *t == LibKind::Other("bin".into())) + let kind = if crate_targets.is_empty() || crate_targets.iter().all(|t| *t == CrateType::Bin) { TargetKind::ExampleBin } else { @@ -812,9 +750,9 @@ impl Target { pub fn doctestable(&self) -> bool { match self.kind() { - TargetKind::Lib(ref kinds) => kinds - .iter() - .any(|k| *k == LibKind::Rlib || *k == LibKind::Lib || *k == LibKind::ProcMacro), + TargetKind::Lib(ref kinds) => kinds.iter().any(|k| { + *k == CrateType::Rlib || *k == CrateType::Lib || *k == CrateType::ProcMacro + }), _ => false, } } @@ -836,29 +774,25 @@ impl Target { pub fn is_dylib(&self) -> bool { match self.kind() { - TargetKind::Lib(libs) => libs.iter().any(|l| *l == LibKind::Dylib), + TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Dylib), _ => false, } } pub fn is_cdylib(&self) -> bool { - let libs = match self.kind() { - TargetKind::Lib(libs) => libs, - _ => return false, - }; - libs.iter().any(|l| match *l { - LibKind::Other(ref s) => s == "cdylib", + match self.kind() { + TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Cdylib), _ => false, - }) + } } /// Returns whether this target produces an artifact which can be linked /// into a Rust crate. /// /// This only returns true for certain kinds of libraries. - pub fn linkable(&self) -> bool { + pub fn is_linkable(&self) -> bool { match self.kind() { - TargetKind::Lib(kinds) => kinds.iter().any(|k| k.linkable()), + TargetKind::Lib(kinds) => kinds.iter().any(|k| k.is_linkable()), _ => false, } } @@ -900,25 +834,23 @@ impl Target { } /// Returns the arguments suitable for `--crate-type` to pass to rustc. - pub fn rustc_crate_types(&self) -> Vec<&str> { + pub fn rustc_crate_types(&self) -> Vec { match self.kind() { - TargetKind::Lib(ref kinds) | TargetKind::ExampleLib(ref kinds) => { - kinds.iter().map(LibKind::crate_type).collect() - } + TargetKind::Lib(kinds) | TargetKind::ExampleLib(kinds) => kinds.clone(), TargetKind::CustomBuild | TargetKind::Bench | TargetKind::Test | TargetKind::ExampleBin - | TargetKind::Bin => vec!["bin"], + | TargetKind::Bin => vec![CrateType::Bin], } } pub fn can_lto(&self) -> bool { match self.kind() { - TargetKind::Lib(ref v) => { - !v.contains(&LibKind::Rlib) - && !v.contains(&LibKind::Dylib) - && !v.contains(&LibKind::Lib) + TargetKind::Lib(v) => { + !v.contains(&CrateType::Rlib) + && !v.contains(&CrateType::Dylib) + && !v.contains(&CrateType::Lib) } _ => true, } diff --git a/src/cargo/core/mod.rs b/src/cargo/core/mod.rs index b73c546493b..3d664d153b7 100644 --- a/src/cargo/core/mod.rs +++ b/src/cargo/core/mod.rs @@ -5,7 +5,7 @@ pub use self::features::{ pub use self::features::{CliUnstable, Edition, Feature, Features}; pub use self::interning::InternedString; pub use self::manifest::{EitherManifest, VirtualManifest}; -pub use self::manifest::{LibKind, Manifest, Target, TargetKind}; +pub use self::manifest::{Manifest, Target, TargetKind}; pub use self::package::{Package, PackageSet}; pub use self::package_id::PackageId; pub use self::package_id_spec::PackageIdSpec; diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 762da60b02b..24ce6f901bb 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -832,9 +832,11 @@ fn generate_targets( for proposal in filter_targets(packages, Target::is_lib, false, mode) { let Proposal { target, pkg, .. } = proposal; if mode.is_doc_test() && !target.doctestable() { + let types = target.rustc_crate_types(); + let types_str: Vec<&str> = types.iter().map(|t| t.as_str()).collect(); ws.config().shell().warn(format!( "doc tests are not supported for crate type(s) `{}` in package `{}`", - target.rustc_crate_types().join(", "), + types_str.join(", "), pkg.name() ))?; } else { diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index 77e0dbb8385..c8cceecde87 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -14,7 +14,7 @@ use serde::{Deserialize, Serialize}; use url::Url; use crate::core::dependency::DepKind; -use crate::core::manifest::{LibKind, ManifestMetadata, TargetSourcePath, Warnings}; +use crate::core::manifest::{ManifestMetadata, TargetSourcePath, Warnings}; use crate::core::resolver::ResolveBehavior; use crate::core::{Dependency, InternedString, Manifest, PackageId, Summary, Target}; use crate::core::{Edition, EitherManifest, Feature, Features, VirtualManifest, Workspace}; diff --git a/src/cargo/util/toml/targets.rs b/src/cargo/util/toml/targets.rs index fee2b6691c0..b383adee102 100644 --- a/src/cargo/util/toml/targets.rs +++ b/src/cargo/util/toml/targets.rs @@ -15,9 +15,10 @@ use std::fs::{self, DirEntry}; use std::path::{Path, PathBuf}; use super::{ - LibKind, PathValue, StringOrBool, StringOrVec, TomlBenchTarget, TomlBinTarget, - TomlExampleTarget, TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget, + PathValue, StringOrBool, StringOrVec, TomlBenchTarget, TomlBinTarget, TomlExampleTarget, + TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget, }; +use crate::core::compiler::CrateType; use crate::core::{Edition, Feature, Features, Target}; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::restricted_names; @@ -222,15 +223,15 @@ fn clean_lib( if kinds.len() > 1 { anyhow::bail!("cannot mix `proc-macro` crate type with others"); } - vec![LibKind::ProcMacro] + vec![CrateType::ProcMacro] } (_, Some(true), Some(true)) => { anyhow::bail!("`lib.plugin` and `lib.proc-macro` cannot both be `true`") } (Some(kinds), _, _) => kinds.iter().map(|s| s.into()).collect(), - (None, Some(true), _) => vec![LibKind::Dylib], - (None, _, Some(true)) => vec![LibKind::ProcMacro], - (None, _, _) => vec![LibKind::Lib], + (None, Some(true), _) => vec![CrateType::Dylib], + (None, _, Some(true)) => vec![CrateType::ProcMacro], + (None, _, _) => vec![CrateType::Lib], }; let mut target = Target::lib_target(&lib.name(), crate_types, path, edition); From eac3b66bd457c6beb4295b077871300e290b2448 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Tue, 5 May 2020 14:20:56 -0700 Subject: [PATCH 08/24] Rework how Cargo computes the rustc file outputs. --- src/cargo/core/compiler/build_context/mod.rs | 2 +- .../compiler/build_context/target_info.rs | 241 ++++++++--- .../compiler/context/compilation_files.rs | 383 +++++++----------- src/cargo/core/compiler/context/mod.rs | 4 +- src/cargo/core/compiler/crate_type.rs | 97 +++++ src/cargo/core/compiler/fingerprint.rs | 18 +- src/cargo/core/compiler/layout.rs | 7 +- src/cargo/core/compiler/mod.rs | 54 +-- src/cargo/core/compiler/output_depinfo.rs | 2 +- src/cargo/core/manifest.rs | 29 +- src/cargo/ops/mod.rs | 2 +- tests/testsuite/bench.rs | 2 +- tests/testsuite/build_plan.rs | 4 +- tests/testsuite/cache_messages.rs | 20 + tests/testsuite/dep_info.rs | 16 +- tests/testsuite/metabuild.rs | 6 +- tests/testsuite/out_dir.rs | 4 +- tests/testsuite/test.rs | 2 +- 18 files changed, 516 insertions(+), 377 deletions(-) create mode 100644 src/cargo/core/compiler/crate_type.rs diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs index 8dac81cffd2..ac7d6eca295 100644 --- a/src/cargo/core/compiler/build_context/mod.rs +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -10,7 +10,7 @@ use std::collections::HashMap; use std::path::PathBuf; mod target_info; -pub use self::target_info::{FileFlavor, RustcTargetData, TargetInfo}; +pub use self::target_info::{FileFlavor, FileType, RustcTargetData, TargetInfo}; /// The build context, containing all information about a build task. /// diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index 698a9e4080f..d3bab3365bf 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -1,5 +1,5 @@ -use crate::core::compiler::{BuildOutput, CompileKind, CompileTarget, CrateType}; -use crate::core::{Dependency, TargetKind, Workspace}; +use crate::core::compiler::{BuildOutput, CompileKind, CompileMode, CompileTarget, CrateType}; +use crate::core::{Dependency, Target, TargetKind, Workspace}; use crate::util::config::{Config, StringList, TargetConfig}; use crate::util::{CargoResult, CargoResultExt, ProcessBuilder, Rustc}; use cargo_platform::{Cfg, CfgExpr}; @@ -49,41 +49,73 @@ pub struct TargetInfo { pub enum FileFlavor { /// Not a special file type. Normal, - /// Like `Normal`, but not directly executable + /// Like `Normal`, but not directly executable. + /// For example, a `.wasm` file paired with the "normal" `.js` file. Auxiliary, /// Something you can link against (e.g., a library). - Linkable { rmeta: bool }, + Linkable, + /// An `.rmeta` Rust metadata file. + Rmeta, /// Piece of external debug information (e.g., `.dSYM`/`.pdb` file). DebugInfo, } /// Type of each file generated by a Unit. +#[derive(Debug)] pub struct FileType { /// The kind of file. pub flavor: FileFlavor, + /// The crate-type that generates this file. + /// + /// `None` for things that aren't associated with a specific crate type, + /// for example `rmeta` files. + pub crate_type: Option, /// The suffix for the file (for example, `.rlib`). /// This is an empty string for executables on Unix-like platforms. suffix: String, /// The prefix for the file (for example, `lib`). /// This is an empty string for things like executables. prefix: String, - /// Flag to convert hyphen to underscore. - /// - /// wasm bin targets will generate two files in deps such as - /// "web-stuff.js" and "web_stuff.wasm". Note the different usages of "-" - /// and "_". This flag indicates that the stem "web-stuff" should be - /// converted to "web_stuff". + /// Flag to convert hyphen to underscore when uplifting. should_replace_hyphens: bool, } impl FileType { - pub fn filename(&self, stem: &str) -> String { - let stem = if self.should_replace_hyphens { - stem.replace("-", "_") + /// The filename for this FileType crated by rustc. + pub fn output_filename(&self, target: &Target, metadata: Option<&str>) -> String { + match metadata { + Some(metadata) => format!( + "{}{}-{}{}", + self.prefix, + target.crate_name(), + metadata, + self.suffix + ), + None => format!("{}{}{}", self.prefix, target.crate_name(), self.suffix), + } + } + + /// The filename for this FileType that Cargo should use when "uplifting" + /// it to the destination directory. + pub fn uplift_filename(&self, target: &Target) -> String { + let name = if self.should_replace_hyphens { + target.crate_name() } else { - stem.to_string() + target.name().to_string() }; - format!("{}{}{}", self.prefix, stem, self.suffix) + format!("{}{}{}", self.prefix, name, self.suffix) + } + + /// Creates a new instance representing a `.rmeta` file. + pub fn new_rmeta() -> FileType { + // Note that even binaries use the `lib` prefix. + FileType { + flavor: FileFlavor::Rmeta, + crate_type: None, + suffix: ".rmeta".to_string(), + prefix: "lib".to_string(), + should_replace_hyphens: true, + } } } @@ -232,11 +264,10 @@ impl TargetInfo { /// Returns the list of file types generated by the given crate type. /// /// Returns `None` if the target does not support the given crate type. - pub fn file_types( + fn file_types( &self, crate_type: &CrateType, flavor: FileFlavor, - kind: &TargetKind, target_triple: &str, ) -> CargoResult>> { let crate_type = if *crate_type == CrateType::Lib { @@ -262,58 +293,95 @@ impl TargetInfo { suffix: suffix.clone(), prefix: prefix.clone(), flavor, - should_replace_hyphens: false, + crate_type: Some(crate_type.clone()), + should_replace_hyphens: crate_type != CrateType::Bin, }]; - // See rust-lang/cargo#4500. - if target_triple.ends_with("-windows-msvc") - && (crate_type == CrateType::Dylib || crate_type == CrateType::Cdylib) - && suffix == ".dll" - { - ret.push(FileType { - suffix: ".dll.lib".to_string(), - prefix: prefix.clone(), - flavor: FileFlavor::Normal, - should_replace_hyphens: false, - }) - } else if target_triple.ends_with("windows-gnu") - && (crate_type == CrateType::Dylib || crate_type == CrateType::Cdylib) - && suffix == ".dll" - { - // LD can link DLL directly, but LLD requires the import library. - ret.push(FileType { - suffix: ".dll.a".to_string(), - prefix: "lib".to_string(), - flavor: FileFlavor::Normal, - should_replace_hyphens: false, - }) + // Window shared library import/export files. + if crate_type.is_dynamic() { + if target_triple.ends_with("-windows-msvc") { + assert!(suffix == ".dll"); + // See https://docs.microsoft.com/en-us/cpp/build/reference/working-with-import-libraries-and-export-files + // for more information about DLL import/export files. + ret.push(FileType { + suffix: ".dll.lib".to_string(), + prefix: prefix.clone(), + flavor: FileFlavor::Auxiliary, + crate_type: Some(crate_type.clone()), + should_replace_hyphens: true, + }); + // NOTE: lld does not produce these + ret.push(FileType { + suffix: ".dll.exp".to_string(), + prefix: prefix.clone(), + flavor: FileFlavor::Auxiliary, + crate_type: Some(crate_type.clone()), + should_replace_hyphens: true, + }); + } else if target_triple.ends_with("windows-gnu") { + assert!(suffix == ".dll"); + // See https://cygwin.com/cygwin-ug-net/dll.html for more + // information about GNU import libraries. + // LD can link DLL directly, but LLD requires the import library. + ret.push(FileType { + suffix: ".dll.a".to_string(), + prefix: "lib".to_string(), + flavor: FileFlavor::Auxiliary, + crate_type: Some(crate_type.clone()), + should_replace_hyphens: true, + }) + } } - // See rust-lang/cargo#4535. if target_triple.starts_with("wasm32-") && crate_type == CrateType::Bin && suffix == ".js" { + // emscripten binaries generate a .js file, which loads a .wasm + // file. ret.push(FileType { suffix: ".wasm".to_string(), prefix: prefix.clone(), flavor: FileFlavor::Auxiliary, + crate_type: Some(crate_type.clone()), + // Name `foo-bar` will generate a `foo_bar.js` and + // `foo_bar.wasm`. Cargo will translate the underscore and + // copy `foo_bar.js` to `foo-bar.js`. However, the wasm + // filename is embedded in the .js file with an underscore, so + // it should not contain hyphens. should_replace_hyphens: true, - }) + }); + // And a map file for debugging. This is only emitted with debug=2 + // (-g4 for emcc). + ret.push(FileType { + suffix: ".wasm.map".to_string(), + prefix: prefix.clone(), + flavor: FileFlavor::DebugInfo, + crate_type: Some(crate_type.clone()), + should_replace_hyphens: true, + }); } - // See rust-lang/cargo#4490, rust-lang/cargo#4960. - // Only uplift debuginfo for binaries. - // - Tests are run directly from `target/debug/deps/` with the - // metadata hash still in the filename. - // - Examples are only uplifted for apple because the symbol file - // needs to match the executable file name to be found (i.e., it - // needs to remove the hash in the filename). On Windows, the path - // to the .pdb with the hash is embedded in the executable. + // Handle separate debug files. let is_apple = target_triple.contains("-apple-"); - if *kind == TargetKind::Bin || (*kind == TargetKind::ExampleBin && is_apple) { + if matches!( + crate_type, + CrateType::Bin | CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro + ) { if is_apple { + let suffix = if crate_type == CrateType::Bin { + ".dSYM".to_string() + } else { + ".dylib.dSYM".to_string() + }; ret.push(FileType { - suffix: ".dSYM".to_string(), + suffix, prefix: prefix.clone(), flavor: FileFlavor::DebugInfo, + crate_type: Some(crate_type.clone()), + // macOS tools like lldb use all sorts of magic to locate + // dSYM files. See https://lldb.llvm.org/use/symbols.html + // for some details. It seems like a `.dSYM` located next + // to the executable with the same name is one method. The + // dSYM should have the same hyphens as the executable for + // the names to match. should_replace_hyphens: false, }) } else if target_triple.ends_with("-msvc") { @@ -321,8 +389,13 @@ impl TargetInfo { suffix: ".pdb".to_string(), prefix: prefix.clone(), flavor: FileFlavor::DebugInfo, - // rustc calls the linker with underscores, and the - // filename is embedded in the executable. + crate_type: Some(crate_type.clone()), + // The absolute path to the pdb file is embedded in the + // executable. If the exe/pdb pair is moved to another + // machine, then debuggers will look in the same directory + // of the exe with the original pdb filename. Since the + // original name contains underscores, they need to be + // preserved. should_replace_hyphens: true, }) } @@ -353,6 +426,62 @@ impl TargetInfo { &mut output.lines(), )?) } + + /// Returns all the file types generated by rustc for the given mode/target_kind. + /// + /// The first value is a Vec of file types generated, the second value is + /// a list of CrateTypes that are not supported by the given target. + pub fn rustc_outputs( + &self, + mode: CompileMode, + target_kind: &TargetKind, + target_triple: &str, + ) -> CargoResult<(Vec, Vec)> { + match mode { + CompileMode::Build => self.calc_rustc_outputs(target_kind, target_triple), + CompileMode::Test | CompileMode::Bench => { + match self.file_types(&CrateType::Bin, FileFlavor::Normal, target_triple)? { + Some(fts) => Ok((fts, Vec::new())), + None => Ok((Vec::new(), vec![CrateType::Bin])), + } + } + CompileMode::Check { .. } => Ok((vec![FileType::new_rmeta()], Vec::new())), + CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::RunCustomBuild => { + panic!("asked for rustc output for non-rustc mode") + } + } + } + + fn calc_rustc_outputs( + &self, + target_kind: &TargetKind, + target_triple: &str, + ) -> CargoResult<(Vec, Vec)> { + let mut unsupported = Vec::new(); + let mut result = Vec::new(); + let crate_types = target_kind.rustc_crate_types(); + for crate_type in &crate_types { + let flavor = if crate_type.is_linkable() { + FileFlavor::Linkable + } else { + FileFlavor::Normal + }; + let file_types = self.file_types(&crate_type, flavor, target_triple)?; + match file_types { + Some(types) => { + result.extend(types); + } + None => { + unsupported.push(crate_type.clone()); + } + } + } + if !result.is_empty() && !crate_types.iter().any(|ct| ct.requires_upstream_objects()) { + // Only add rmeta if pipelining. + result.push(FileType::new_rmeta()); + } + Ok((result, unsupported)) + } } /// Takes rustc output (using specialized command line args), and calculates the file prefix and @@ -537,9 +666,7 @@ pub struct RustcTargetData { host_info: TargetInfo, /// Build information for targets that we're building for. This will be - /// empty if the `--target` flag is not passed, and currently also only ever - /// has at most one entry, but eventually we'd like to support multi-target - /// builds with Cargo. + /// empty if the `--target` flag is not passed. target_config: HashMap, target_info: HashMap, } diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs index b2c48d4d7cd..09a8bf0587b 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/context/compilation_files.rs @@ -9,7 +9,7 @@ use lazycell::LazyCell; use log::info; use super::{BuildContext, CompileKind, Context, FileFlavor, Layout}; -use crate::core::compiler::{CompileMode, CompileTarget, CrateType, Unit}; +use crate::core::compiler::{CompileMode, CompileTarget, CrateType, FileType, Unit}; use crate::core::{Target, TargetKind, Workspace}; use crate::util::{self, CargoResult}; @@ -85,10 +85,7 @@ pub struct CompilationFiles<'a, 'cfg> { roots: Vec, ws: &'a Workspace<'cfg>, /// Metadata hash to use for each unit. - /// - /// `None` if the unit should not use a metadata data hash (like rustdoc, - /// or some dylibs). - metas: HashMap>, + metas: HashMap, /// For each Unit, a list all files produced. outputs: HashMap>>>, } @@ -151,15 +148,18 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { } } - /// Gets the metadata for a target in a specific profile. - /// We build to the path `"{filename}-{target_metadata}"`. - /// We use a linking step to link/copy to a predictable filename - /// like `target/debug/libfoo.{a,so,rlib}` and such. + /// Gets the metadata for the given unit. + /// + /// See module docs for more details. /// - /// Returns `None` if the unit should not use a metadata data hash (like + /// Returns `None` if the unit should not use a metadata hash (like /// rustdoc, or some dylibs). - pub fn metadata(&self, unit: &Unit) -> Option { - self.metas[unit] + pub fn metadata(&self, bcx: &BuildContext<'_, '_>, unit: &Unit) -> Option { + if should_use_metadata(bcx, unit) { + Some(self.metas[unit]) + } else { + None + } } /// Gets the short hash based only on the `PackageId`. @@ -169,8 +169,8 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { util::short_hash(&hashable) } - /// Returns the appropriate output directory for the specified package and - /// target. + /// Returns the directory where the artifacts for the given unit are + /// initially created. pub fn out_dir(&self, unit: &Unit) -> PathBuf { if unit.mode.is_doc() { self.layout(unit.kind).doc().to_path_buf() @@ -191,12 +191,11 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { } /// Directory name to use for a package in the form `NAME-HASH`. + /// + /// The hash is unique per Unit. pub fn pkg_dir(&self, unit: &Unit) -> String { let name = unit.pkg.package_id().name(); - match self.metas[unit] { - Some(ref meta) => format!("{}-{}", name, meta), - None => format!("{}-{}", name, self.target_short_hash(unit)), - } + format!("{}-{}", name, self.metas[unit]) } /// Returns the root of the build output tree for the host @@ -252,14 +251,6 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { self.build_script_run_dir(unit).join("out") } - /// Returns the file stem for a given target/profile combo (with metadata). - pub fn file_stem(&self, unit: &Unit) -> String { - match self.metas[unit] { - Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), metadata), - None => self.bin_stem(unit), - } - } - /// Returns the path to the executable binary for the given bin target. /// /// This should only to be used when a `Unit` is not available. @@ -272,13 +263,12 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { assert!(target.is_bin()); let dest = self.layout(kind).dest(); let info = bcx.target_data.info(kind); - let file_types = info - .file_types( - &CrateType::Bin, - FileFlavor::Normal, + let (file_types, _) = info + .rustc_outputs( + CompileMode::Build, &TargetKind::Bin, bcx.target_data.short_name(&kind), - )? + ) .expect("target must support `bin`"); let file_type = file_types @@ -286,10 +276,12 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { .find(|file_type| file_type.flavor == FileFlavor::Normal) .expect("target must support `bin`"); - Ok(dest.join(file_type.filename(target.name()))) + Ok(dest.join(file_type.uplift_filename(target))) } /// Returns the filenames that the given unit will generate. + /// + /// Note: It is not guaranteed that all of the files will be generated. pub(super) fn outputs( &self, unit: &Unit, @@ -300,57 +292,50 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { .map(Arc::clone) } - /// Returns the bin filename for a given target, without extension and metadata. - fn bin_stem(&self, unit: &Unit) -> String { - if unit.target.allows_dashes() { - unit.target.name().to_string() - } else { - unit.target.crate_name() + /// Returns the path where the output for the given unit and FileType + /// should be uplifted to. + /// + /// Returns `None` if the unit shouldn't be uplifted (for example, a + /// dependent rlib). + fn uplift_to(&self, unit: &Unit, file_type: &FileType, from_path: &Path) -> Option { + // Tests, check, doc, etc. should not be uplifted. + if unit.mode != CompileMode::Build || file_type.flavor == FileFlavor::Rmeta { + return None; + } + // Only uplift: + // - Binaries: The user always wants to see these, even if they are + // implicitly built (for example for integration tests). + // - dylibs: This ensures that the dynamic linker pulls in all the + // latest copies (even if the dylib was built from a previous cargo + // build). There are complex reasons for this, see #8139, #6167, #6162. + // - Things directly requested from the command-line (the "roots"). + // This one is a little questionable for rlibs (see #6131), but is + // historically how Cargo has operated. This is primarily useful to + // give the user access to staticlibs and cdylibs. + if !unit.target.is_bin() + && !unit.target.is_custom_build() + && file_type.crate_type != Some(CrateType::Dylib) + && !self.roots.contains(unit) + { + return None; } - } - /// Returns a tuple `(hard_link_dir, filename_stem)` for the primary - /// output file for the given unit. - /// - /// `hard_link_dir` is the directory where the file should be hard-linked - /// ("uplifted") to. For example, `/path/to/project/target`. - /// - /// `filename_stem` is the base filename without an extension. - /// - /// This function returns it in two parts so the caller can add - /// prefix/suffix to filename separately. - /// - /// Returns an `Option` because in some cases we don't want to link - /// (eg a dependent lib). - fn link_stem(&self, unit: &Unit) -> Option<(PathBuf, String)> { - let out_dir = self.out_dir(unit); - let bin_stem = self.bin_stem(unit); // Stem without metadata. - let file_stem = self.file_stem(unit); // Stem with metadata. - - // We currently only lift files up from the `deps` directory. If - // it was compiled into something like `example/` or `doc/` then - // we don't want to link it up. - if out_dir.ends_with("deps") { - // Don't lift up library dependencies. - if unit.target.is_bin() || self.roots.contains(unit) || unit.target.is_dylib() { - Some(( - out_dir.parent().unwrap().to_owned(), - if unit.mode.is_any_test() { - file_stem - } else { - bin_stem - }, - )) - } else { - None - } - } else if bin_stem == file_stem { - None - } else if out_dir.ends_with("examples") || out_dir.parent().unwrap().ends_with("build") { - Some((out_dir, bin_stem)) + let filename = file_type.uplift_filename(&unit.target); + let uplift_path = if unit.target.is_example() { + // Examples live in their own little world. + self.layout(unit.kind).examples().join(filename) + } else if unit.target.is_custom_build() { + self.build_script_dir(unit).join(filename) } else { - None + self.layout(unit.kind).dest().join(filename) + }; + if from_path == uplift_path { + // This can happen with things like examples that reside in the + // same directory, do not have a metadata hash (like on Windows), + // and do not have hyphens. + return None; } + Some(uplift_path) } fn calc_outputs( @@ -359,18 +344,6 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult>> { let ret = match unit.mode { - CompileMode::Check { .. } => { - // This may be confusing. rustc outputs a file named `lib*.rmeta` - // for both libraries and binaries. - let file_stem = self.file_stem(unit); - let path = self.out_dir(unit).join(format!("lib{}.rmeta", file_stem)); - vec![OutputFile { - path, - hardlink: None, - export_path: None, - flavor: FileFlavor::Linkable { rmeta: false }, - }] - } CompileMode::Doc { .. } => { let path = self .out_dir(unit) @@ -394,131 +367,77 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { // but Cargo does not know about that. vec![] } - CompileMode::Test | CompileMode::Build | CompileMode::Bench => { - self.calc_outputs_rustc(unit, bcx)? - } + CompileMode::Test + | CompileMode::Build + | CompileMode::Bench + | CompileMode::Check { .. } => self.calc_outputs_rustc(unit, bcx)?, }; info!("Target filenames: {:?}", ret); Ok(Arc::new(ret)) } + /// Computes the actual, full pathnames for all the files generated by rustc. + /// + /// The `OutputFile` also contains the paths where those files should be + /// "uplifted" to. fn calc_outputs_rustc( &self, unit: &Unit, bcx: &BuildContext<'a, 'cfg>, ) -> CargoResult> { - let mut ret = Vec::new(); - let mut unsupported = Vec::new(); - let out_dir = self.out_dir(unit); - let link_stem = self.link_stem(unit); + let info = bcx.target_data.info(unit.kind); - let file_stem = self.file_stem(unit); - - let mut add = |crate_type: &CrateType, flavor: FileFlavor| -> CargoResult<()> { - let file_types = info.file_types( - crate_type, - flavor, - unit.target.kind(), - bcx.target_data.short_name(&unit.kind), - )?; - - match file_types { - Some(types) => { - for file_type in types { - let path = out_dir.join(file_type.filename(&file_stem)); - // Don't create hardlink for tests - let hardlink = if unit.mode.is_any_test() { - None - } else { - link_stem - .as_ref() - .map(|&(ref ld, ref ls)| ld.join(file_type.filename(ls))) - }; - let export_path = if unit.target.is_custom_build() { - None - } else { - self.export_dir.as_ref().and_then(|export_dir| { - hardlink - .as_ref() - .map(|hardlink| export_dir.join(hardlink.file_name().unwrap())) - }) - }; - ret.push(OutputFile { - path, - hardlink, - export_path, - flavor: file_type.flavor, - }); - } - } - // Not supported; don't worry about it. - None => { - unsupported.push(crate_type.to_string()); - } - } - Ok(()) - }; - match unit.target.kind() { - TargetKind::Bin - | TargetKind::CustomBuild - | TargetKind::ExampleBin - | TargetKind::Bench - | TargetKind::Test => { - add(&CrateType::Bin, FileFlavor::Normal)?; - } - TargetKind::Lib(..) | TargetKind::ExampleLib(..) if unit.mode.is_any_test() => { - add(&CrateType::Bin, FileFlavor::Normal)?; - } - TargetKind::ExampleLib(crate_types) | TargetKind::Lib(crate_types) => { - for crate_type in crate_types { - add( - crate_type, - if crate_type.is_linkable() { - FileFlavor::Linkable { rmeta: false } - } else { - FileFlavor::Normal - }, - )?; - } - let path = out_dir.join(format!("lib{}.rmeta", file_stem)); - if !unit.requires_upstream_objects() { - ret.push(OutputFile { - path, - hardlink: None, - export_path: None, - flavor: FileFlavor::Linkable { rmeta: true }, - }); - } - } - } - if ret.is_empty() { + let triple = bcx.target_data.short_name(&unit.kind); + let (file_types, unsupported) = + info.rustc_outputs(unit.mode, unit.target.kind(), triple)?; + if file_types.is_empty() { if !unsupported.is_empty() { + let unsupported_strs: Vec<_> = unsupported.iter().map(|ct| ct.as_str()).collect(); anyhow::bail!( "cannot produce {} for `{}` as the target `{}` \ does not support these crate types", - unsupported.join(", "), + unsupported_strs.join(", "), unit.pkg, - bcx.target_data.short_name(&unit.kind), + triple, ) } anyhow::bail!( "cannot compile `{}` as the target `{}` does not \ support any of the output crate types", unit.pkg, - bcx.target_data.short_name(&unit.kind), + triple, ); } - Ok(ret) + + // Convert FileType to OutputFile. + let mut outputs = Vec::new(); + for file_type in file_types { + let meta = self.metadata(bcx, unit).map(|m| m.to_string()); + let path = out_dir.join(file_type.output_filename(&unit.target, meta.as_deref())); + let hardlink = self.uplift_to(unit, &file_type, &path); + let export_path = if unit.target.is_custom_build() { + None + } else { + self.export_dir.as_ref().and_then(|export_dir| { + hardlink + .as_ref() + .map(|hardlink| export_dir.join(hardlink.file_name().unwrap())) + }) + }; + outputs.push(OutputFile { + path, + hardlink, + export_path, + flavor: file_type.flavor, + }); + } + Ok(outputs) } } -fn metadata_of( - unit: &Unit, - cx: &Context<'_, '_>, - metas: &mut HashMap>, -) -> Option { +fn metadata_of(unit: &Unit, cx: &Context<'_, '_>, metas: &mut HashMap) -> Metadata { if !metas.contains_key(unit) { let meta = compute_metadata(unit, cx, metas); metas.insert(unit.clone(), meta); @@ -532,48 +451,9 @@ fn metadata_of( fn compute_metadata( unit: &Unit, cx: &Context<'_, '_>, - metas: &mut HashMap>, -) -> Option { - if unit.mode.is_doc_test() { - // Doc tests do not have metadata. - return None; - } - // No metadata for dylibs because of a couple issues: - // - macOS encodes the dylib name in the executable, - // - Windows rustc multiple files of which we can't easily link all of them. - // - // No metadata for bin because of an issue: - // - wasm32 rustc/emcc encodes the `.wasm` name in the `.js` (rust-lang/cargo#4535). - // - msvc: The path to the PDB is embedded in the executable, and we don't - // want the PDB path to include the hash in it. - // - // Two exceptions: - // 1) Upstream dependencies (we aren't exporting + need to resolve name conflict), - // 2) `__CARGO_DEFAULT_LIB_METADATA` env var. - // - // Note, however, that the compiler's build system at least wants - // path dependencies (eg libstd) to have hashes in filenames. To account for - // that we have an extra hack here which reads the - // `__CARGO_DEFAULT_LIB_METADATA` environment variable and creates a - // hash in the filename if that's present. - // - // This environment variable should not be relied on! It's - // just here for rustbuild. We need a more principled method - // doing this eventually. + metas: &mut HashMap, +) -> Metadata { let bcx = &cx.bcx; - let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA"); - let short_name = bcx.target_data.short_name(&unit.kind); - if !(unit.mode.is_any_test() || unit.mode.is_check()) - && (unit.target.is_dylib() - || unit.target.is_cdylib() - || (unit.target.is_executable() && short_name.starts_with("wasm32-")) - || (unit.target.is_executable() && short_name.contains("msvc"))) - && unit.pkg.package_id().source_id().is_path() - && __cargo_default_lib_metadata.is_err() - { - return None; - } - let mut hasher = SipHasher::new(); // This is a generic version number that can be changed to make @@ -633,7 +513,7 @@ fn compute_metadata( // Seed the contents of `__CARGO_DEFAULT_LIB_METADATA` to the hasher if present. // This should be the release channel, to get a different hash for each channel. - if let Ok(ref channel) = __cargo_default_lib_metadata { + if let Ok(ref channel) = env::var("__CARGO_DEFAULT_LIB_METADATA") { channel.hash(&mut hasher); } @@ -646,7 +526,7 @@ fn compute_metadata( // with user dependencies. unit.is_std.hash(&mut hasher); - Some(Metadata(hasher.finish())) + Metadata(hasher.finish()) } fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut SipHasher) { @@ -680,3 +560,46 @@ fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut SipHasher) { // the future when cranelift sees more use, and people want to switch // between different backends without recompiling. } + +/// Returns whether or not this unit should use a metadata hash. +fn should_use_metadata(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool { + if unit.mode.is_doc_test() { + // Doc tests do not have metadata. + return false; + } + if unit.mode.is_any_test() || unit.mode.is_check() { + // These always use metadata. + return true; + } + // No metadata in these cases: + // + // - dylibs: + // - macOS encodes the dylib name in the executable, so it can't be renamed. + // - TODO: Are there other good reasons? If not, maybe this should be macos specific? + // - Windows MSVC executables: The path to the PDB is embedded in the + // executable, and we don't want the PDB path to include the hash in it. + // - wasm32 executables: When using emscripten, the path to the .wasm file + // is embedded in the .js file, so we don't want the hash in there. + // TODO: Is this necessary for wasm32-unknown-unknown? + // + // This is only done for local packages, as we don't expect to export + // dependencies. + // + // The __CARGO_DEFAULT_LIB_METADATA env var is used to override this to + // force metadata in the hash. This is only used for building libstd. For + // example, if libstd is placed in a common location, we don't want a file + // named /usr/lib/libstd.so which could conflict with other rustc + // installs. TODO: Is this still a realistic concern? + // See https://github.com/rust-lang/cargo/issues/3005 + let short_name = bcx.target_data.short_name(&unit.kind); + if (unit.target.is_dylib() + || unit.target.is_cdylib() + || (unit.target.is_executable() && short_name.starts_with("wasm32-")) + || (unit.target.is_executable() && short_name.contains("msvc"))) + && unit.pkg.package_id().source_id().is_path() + && env::var("__CARGO_DEFAULT_LIB_METADATA").is_err() + { + return false; + } + true +} diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index ae5c1382947..318dc8655b4 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -262,7 +262,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// Returns the executable for the specified unit (if any). pub fn get_executable(&mut self, unit: &Unit) -> CargoResult> { for output in self.outputs(unit)?.iter() { - if output.flavor == FileFlavor::DebugInfo { + if output.flavor != FileFlavor::Normal { continue; } @@ -377,7 +377,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata { assert!(unit.mode.is_run_custom_build()); self.files() - .metadata(unit) + .metadata(self.bcx, unit) .expect("build script should always have hash") } diff --git a/src/cargo/core/compiler/crate_type.rs b/src/cargo/core/compiler/crate_type.rs new file mode 100644 index 00000000000..fd0126cbe83 --- /dev/null +++ b/src/cargo/core/compiler/crate_type.rs @@ -0,0 +1,97 @@ +use std::fmt; + +#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub enum CrateType { + Bin, + Lib, + Rlib, + Dylib, + Cdylib, + Staticlib, + ProcMacro, + Other(String), +} + +impl CrateType { + pub fn as_str(&self) -> &str { + match self { + CrateType::Bin => "bin", + CrateType::Lib => "lib", + CrateType::Rlib => "rlib", + CrateType::Dylib => "dylib", + CrateType::Cdylib => "cdylib", + CrateType::Staticlib => "staticlib", + CrateType::ProcMacro => "proc-macro", + CrateType::Other(s) => s, + } + } + + pub fn is_linkable(&self) -> bool { + match self { + CrateType::Lib | CrateType::Rlib | CrateType::Dylib | CrateType::ProcMacro => true, + CrateType::Bin | CrateType::Cdylib | CrateType::Staticlib | CrateType::Other(..) => { + false + } + } + } + + pub fn is_dynamic(&self) -> bool { + match self { + CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro => true, + CrateType::Lib + | CrateType::Rlib + | CrateType::Bin + | CrateType::Staticlib + | CrateType::Other(..) => false, + } + } + + pub fn requires_upstream_objects(&self) -> bool { + match self { + // "lib" == "rlib" and is a compilation that doesn't actually + // require upstream object files to exist, only upstream metadata + // files. As a result, it doesn't require upstream artifacts + CrateType::Lib | CrateType::Rlib => false, + + // Everything else, however, is some form of "linkable output" or + // something that requires upstream object files. + _ => true, + } + } +} + +impl fmt::Display for CrateType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_str().fmt(f) + } +} + +impl<'a> From<&'a String> for CrateType { + fn from(s: &'a String) -> Self { + match s.as_str() { + "bin" => CrateType::Bin, + "lib" => CrateType::Lib, + "rlib" => CrateType::Rlib, + "dylib" => CrateType::Dylib, + "cdylib" => CrateType::Cdylib, + "staticlib" => CrateType::Staticlib, + "procmacro" => CrateType::ProcMacro, + _ => CrateType::Other(s.clone()), + } + } +} + +impl fmt::Debug for CrateType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.to_string().fmt(f) + } +} + +impl serde::Serialize for CrateType { + fn serialize(&self, s: S) -> Result + where + S: serde::ser::Serializer, + { + self.to_string().serialize(s) + } +} diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs index 598abdec8e1..eaf88e2ee5c 100644 --- a/src/cargo/core/compiler/fingerprint.rs +++ b/src/cargo/core/compiler/fingerprint.rs @@ -104,8 +104,9 @@ //! - A "dep-info" file which contains a list of source filenames for the //! target. See below for details. //! - An `invoked.timestamp` file whose filesystem mtime is updated every time -//! the Unit is built. This is an experimental feature used for cleaning -//! unused artifacts. +//! the Unit is built. This is used for capturing the time when the build +//! starts, to detect if files are changed in the middle of the build. See +//! below for more details. //! //! Note that some units are a little different. A Unit for *running* a build //! script or for `rustdoc` does not have a dep-info file (it's not @@ -352,7 +353,7 @@ pub fn prepare_target(cx: &mut Context<'_, '_>, unit: &Unit, force: bool) -> Car )); let bcx = cx.bcx; let new = cx.files().fingerprint_dir(unit); - let loc = new.join(&filename(cx, unit)); + let loc = new.join(&filename(unit)); debug!("fingerprint at: {}", loc.display()); @@ -1523,7 +1524,7 @@ pub fn prepare_init(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> { pub fn dep_info_loc(cx: &mut Context<'_, '_>, unit: &Unit) -> PathBuf { cx.files() .fingerprint_dir(unit) - .join(&format!("dep-{}", filename(cx, unit))) + .join(&format!("dep-{}", filename(unit))) } /// Returns an absolute path that target directory. @@ -1679,11 +1680,8 @@ where None } -fn filename(cx: &mut Context<'_, '_>, unit: &Unit) -> String { - // file_stem includes metadata hash. Thus we have a different - // fingerprint for every metadata hash version. This works because - // even if the package is fresh, we'll still link the fresh target - let file_stem = cx.files().file_stem(unit); +/// Returns the filename used for the fingerprint file. +fn filename(unit: &Unit) -> String { let kind = unit.target.kind().description(); let flavor = if unit.mode.is_any_test() { "test-" @@ -1694,7 +1692,7 @@ fn filename(cx: &mut Context<'_, '_>, unit: &Unit) -> String { } else { "" }; - format!("{}{}-{}", flavor, kind, file_stem) + format!("{}{}-{}", flavor, kind, unit.target.name()) } #[repr(u8)] diff --git a/src/cargo/core/compiler/layout.rs b/src/cargo/core/compiler/layout.rs index a896d5be38b..c9f8fcedff0 100644 --- a/src/cargo/core/compiler/layout.rs +++ b/src/cargo/core/compiler/layout.rs @@ -26,16 +26,17 @@ //! # packages //! .fingerprint/ //! # Each package is in a separate directory. +//! # Note that different target kinds have different filename prefixes. //! $pkgname-$META/ //! # Set of source filenames for this package. -//! dep-lib-$pkgname-$META +//! dep-lib-$targetname //! # Timestamp when this package was last built. //! invoked.timestamp //! # The fingerprint hash. -//! lib-$pkgname-$META +//! lib-$targetname //! # Detailed information used for logging the reason why //! # something is being recompiled. -//! lib-$pkgname-$META.json +//! lib-$targetname.json //! # The console output from the compiler. This is cached //! # so that warnings can be redisplayed for "fresh" units. //! output diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index a24aaf0ab9b..ef404c5e989 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -31,7 +31,7 @@ use lazycell::LazyCell; use log::debug; pub use self::build_config::{BuildConfig, CompileMode, MessageFormat}; -pub use self::build_context::{BuildContext, FileFlavor, RustcTargetData, TargetInfo}; +pub use self::build_context::{BuildContext, FileFlavor, FileType, RustcTargetData, TargetInfo}; use self::build_plan::BuildPlan; pub use self::compilation::{Compilation, Doctest}; pub use self::compile_kind::{CompileKind, CompileTarget}; @@ -192,17 +192,12 @@ fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc) -> Car // don't pass the `-l` flags. let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); let pass_cdylib_link_args = unit.target.is_cdylib(); - let do_rename = unit.target.allows_dashes() && !unit.mode.is_any_test(); - let real_name = unit.target.name().to_string(); - let crate_name = unit.target.crate_name(); - // Rely on `target_filenames` iterator as source of truth rather than rederiving filestem. - let rustc_dep_info_loc = if do_rename && cx.files().metadata(unit).is_none() { - root.join(&crate_name) - } else { - root.join(&cx.files().file_stem(unit)) - } - .with_extension("d"); + let dep_info_name = match cx.files().metadata(cx.bcx, unit) { + Some(metadata) => format!("{}-{}.d", unit.target.crate_name(), metadata), + None => format!("{}.d", unit.target.crate_name()), + }; + let rustc_dep_info_loc = root.join(dep_info_name); let dep_info_loc = fingerprint::dep_info_loc(cx, unit); rustc.args(cx.bcx.rustflags_args(unit)); @@ -294,20 +289,6 @@ fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc) -> Car .chain_err(|| format!("could not compile `{}`.", name))?; } - if do_rename && real_name != crate_name { - let dst = &outputs[0].path; - let src = dst.with_file_name( - dst.file_name() - .unwrap() - .to_str() - .unwrap() - .replace(&real_name, &crate_name), - ); - if src.exists() && src.file_name() != dst.file_name() { - fs::rename(&src, &dst).chain_err(|| format!("could not rename crate {:?}", src))?; - } - } - if rustc_dep_info_loc.exists() { fingerprint::translate_dep_info( &rustc_dep_info_loc, @@ -888,7 +869,7 @@ fn build_base_args( cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } - match cx.files().metadata(unit) { + match cx.files().metadata(cx.bcx, unit) { Some(m) => { cmd.arg("-C").arg(&format!("metadata={}", m)); cmd.arg("-C").arg(&format!("extra-filename=-{}", m)); @@ -1069,19 +1050,18 @@ pub fn extern_args( }; let outputs = cx.outputs(&dep.unit)?; - let mut outputs = outputs.iter().filter_map(|output| match output.flavor { - FileFlavor::Linkable { rmeta } => Some((output, rmeta)), - _ => None, - }); - - if cx.only_requires_rmeta(unit, &dep.unit) { - let (output, _rmeta) = outputs - .find(|(_output, rmeta)| *rmeta) - .expect("failed to find rlib dep for pipelined dep"); + + if cx.only_requires_rmeta(unit, &dep.unit) || dep.unit.mode.is_check() { + // Example: rlib dependency for an rlib, rmeta is all that is required. + let output = outputs + .iter() + .find(|output| output.flavor == FileFlavor::Rmeta) + .expect("failed to find rmeta dep for pipelined dep"); pass(&output.path); } else { - for (output, rmeta) in outputs { - if !rmeta { + // Example: a bin needs `rlib` for dependencies, it cannot use rmeta. + for output in outputs.iter() { + if output.flavor == FileFlavor::Linkable { pass(&output.path); } } diff --git a/src/cargo/core/compiler/output_depinfo.rs b/src/cargo/core/compiler/output_depinfo.rs index 284cb338cd5..3a9fac2c6d4 100644 --- a/src/cargo/core/compiler/output_depinfo.rs +++ b/src/cargo/core/compiler/output_depinfo.rs @@ -132,7 +132,7 @@ pub fn output_depinfo(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> for output in cx .outputs(unit)? .iter() - .filter(|o| o.flavor != FileFlavor::DebugInfo) + .filter(|o| !matches!(o.flavor, FileFlavor::DebugInfo | FileFlavor::Auxiliary)) { if let Some(ref link_dst) = output.hardlink { let output_path = link_dst.with_extension("d"); diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index 6e96cdf4c3d..994603aca57 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -164,6 +164,18 @@ impl TargetKind { _ => true, } } + + /// Returns the arguments suitable for `--crate-type` to pass to rustc. + pub fn rustc_crate_types(&self) -> Vec { + match self { + TargetKind::Lib(kinds) | TargetKind::ExampleLib(kinds) => kinds.clone(), + TargetKind::CustomBuild + | TargetKind::Bench + | TargetKind::Test + | TargetKind::ExampleBin + | TargetKind::Bin => vec![CrateType::Bin], + } + } } /// Information about a binary, a library, an example, etc. that is part of the @@ -757,14 +769,6 @@ impl Target { } } - /// Whether or not this target allows dashes in its filename. - /// - /// Rustc will always emit filenames with underscores, and Cargo will - /// rename them back to dashes if this is true. - pub fn allows_dashes(&self) -> bool { - self.is_bin() || self.is_example() || self.is_custom_build() - } - pub fn is_lib(&self) -> bool { match self.kind() { TargetKind::Lib(_) => true, @@ -835,14 +839,7 @@ impl Target { /// Returns the arguments suitable for `--crate-type` to pass to rustc. pub fn rustc_crate_types(&self) -> Vec { - match self.kind() { - TargetKind::Lib(kinds) | TargetKind::ExampleLib(kinds) => kinds.clone(), - TargetKind::CustomBuild - | TargetKind::Bench - | TargetKind::Test - | TargetKind::ExampleBin - | TargetKind::Bin => vec![CrateType::Bin], - } + self.kind().rustc_crate_types() } pub fn can_lto(&self) -> bool { diff --git a/src/cargo/ops/mod.rs b/src/cargo/ops/mod.rs index eed36c7867f..dc45fc41dcb 100644 --- a/src/cargo/ops/mod.rs +++ b/src/cargo/ops/mod.rs @@ -1,6 +1,6 @@ pub use self::cargo_clean::{clean, CleanOptions}; pub use self::cargo_compile::{ - compile, compile_with_exec, compile_ws, resolve_all_features, CompileOptions, + compile, compile_with_exec, compile_ws, create_bcx, resolve_all_features, CompileOptions, }; pub use self::cargo_compile::{CompileFilter, FilterRule, LibRule, Packages}; pub use self::cargo_doc::{doc, DocOptions}; diff --git a/tests/testsuite/bench.rs b/tests/testsuite/bench.rs index 225d5c9625e..b4d7de19aa8 100644 --- a/tests/testsuite/bench.rs +++ b/tests/testsuite/bench.rs @@ -1614,7 +1614,7 @@ fn json_artifact_includes_executable_for_benchmark() { { "executable": "[..]/foo/target/release/deps/benchmark-[..][EXE]", "features": [], - "filenames": [ "[..]/foo/target/release/deps/benchmark-[..][EXE]" ], + "filenames": "{...}", "fresh": false, "package_id": "foo 0.0.1 ([..])", "profile": "{...}", diff --git a/tests/testsuite/build_plan.rs b/tests/testsuite/build_plan.rs index 092ff90e637..a6e8eaac8dd 100644 --- a/tests/testsuite/build_plan.rs +++ b/tests/testsuite/build_plan.rs @@ -154,9 +154,7 @@ fn cargo_build_plan_build_script() { "env": "{...}", "kind": null, "links": "{...}", - "outputs": [ - "[..]/foo/target/debug/build/[..]/build_script_build-[..]" - ], + "outputs": "{...}", "package_name": "foo", "package_version": "0.5.0", "program": "rustc", diff --git a/tests/testsuite/cache_messages.rs b/tests/testsuite/cache_messages.rs index 0503306b943..feaeff7e62a 100644 --- a/tests/testsuite/cache_messages.rs +++ b/tests/testsuite/cache_messages.rs @@ -491,3 +491,23 @@ fn rustc_workspace_wrapper() { .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]") .run(); } + +#[cargo_test] +fn wacky_hashless_fingerprint() { + // On Windows, executables don't have hashes. This checks for a bad + // assumption that caused bad caching. + let p = project() + .file("src/bin/a.rs", "fn main() { let unused = 1; }") + .file("src/bin/b.rs", "fn main() {}") + .build(); + p.cargo("build --bin b") + .with_stderr_does_not_contain("[..]unused[..]") + .run(); + p.cargo("build --bin a") + .with_stderr_contains("[..]unused[..]") + .run(); + // This should not pick up the cache from `a`. + p.cargo("build --bin b") + .with_stderr_does_not_contain("[..]unused[..]") + .run(); +} diff --git a/tests/testsuite/dep_info.rs b/tests/testsuite/dep_info.rs index 886ef862cc4..4223fb31198 100644 --- a/tests/testsuite/dep_info.rs +++ b/tests/testsuite/dep_info.rs @@ -272,13 +272,13 @@ fn relative_depinfo_paths_ws() { assert_deps_contains( &p, - "target/debug/.fingerprint/pm-*/dep-lib-pm-*", + "target/debug/.fingerprint/pm-*/dep-lib-pm", &[(1, "src/lib.rs"), (2, "debug/deps/libpmdep-*.rlib")], ); assert_deps_contains( &p, - &format!("target/{}/debug/.fingerprint/foo-*/dep-bin-foo*", host), + &format!("target/{}/debug/.fingerprint/foo-*/dep-bin-foo", host), &[ (1, "src/main.rs"), ( @@ -296,7 +296,7 @@ fn relative_depinfo_paths_ws() { assert_deps_contains( &p, - "target/debug/.fingerprint/foo-*/dep-build-script-build_script_build-*", + "target/debug/.fingerprint/foo-*/dep-build-script-build-script-build", &[(1, "build.rs"), (2, "debug/deps/libbdep-*.rlib")], ); @@ -400,13 +400,13 @@ fn relative_depinfo_paths_no_ws() { assert_deps_contains( &p, - "target/debug/.fingerprint/pm-*/dep-lib-pm-*", + "target/debug/.fingerprint/pm-*/dep-lib-pm", &[(1, "src/lib.rs"), (2, "debug/deps/libpmdep-*.rlib")], ); assert_deps_contains( &p, - "target/debug/.fingerprint/foo-*/dep-bin-foo*", + "target/debug/.fingerprint/foo-*/dep-bin-foo", &[ (1, "src/main.rs"), ( @@ -424,7 +424,7 @@ fn relative_depinfo_paths_no_ws() { assert_deps_contains( &p, - "target/debug/.fingerprint/foo-*/dep-build-script-build_script_build-*", + "target/debug/.fingerprint/foo-*/dep-build-script-build-script-build", &[(1, "build.rs"), (2, "debug/deps/libbdep-*.rlib")], ); @@ -461,7 +461,7 @@ fn reg_dep_source_not_tracked() { assert_deps( &p, - "target/debug/.fingerprint/regdep-*/dep-lib-regdep-*", + "target/debug/.fingerprint/regdep-*/dep-lib-regdep", |info_path, entries| { for (kind, path) in entries { if *kind == 1 { @@ -513,7 +513,7 @@ fn canonical_path() { assert_deps_contains( &p, - "target/debug/.fingerprint/foo-*/dep-lib-foo-*", + "target/debug/.fingerprint/foo-*/dep-lib-foo", &[(1, "src/lib.rs"), (2, "debug/deps/libregdep-*.rmeta")], ); } diff --git a/tests/testsuite/metabuild.rs b/tests/testsuite/metabuild.rs index ece3c7aece8..77f4843aa3a 100644 --- a/tests/testsuite/metabuild.rs +++ b/tests/testsuite/metabuild.rs @@ -497,7 +497,7 @@ fn metabuild_build_plan() { "compile_mode": "build", "kind": null, "deps": [0, 1], - "outputs": ["[..]/target/debug/build/foo-[..]/metabuild_foo-[..][EXE]"], + "outputs": "{...}", "links": "{...}", "program": "rustc", "args": "{...}", @@ -686,9 +686,7 @@ fn metabuild_json_artifact() { { "executable": null, "features": [], - "filenames": [ - "[..]/foo/target/debug/build/foo-[..]/metabuild-foo[EXE]" - ], + "filenames": "{...}", "fresh": false, "package_id": "foo [..]", "profile": "{...}", diff --git a/tests/testsuite/out_dir.rs b/tests/testsuite/out_dir.rs index 15b26f61980..91365137c9a 100644 --- a/tests/testsuite/out_dir.rs +++ b/tests/testsuite/out_dir.rs @@ -90,8 +90,8 @@ fn dynamic_library_with_debug() { check_dir_contents( &p.root().join("out"), &["libfoo.so"], - &["libfoo.dylib"], - &["foo.dll", "foo.dll.lib"], + &["libfoo.dylib", "libfoo.dylib.dSYM"], + &["foo.dll", "foo.dll.exp", "foo.dll.lib", "foo.pdb"], &["foo.dll", "libfoo.dll.a"], ); } diff --git a/tests/testsuite/test.rs b/tests/testsuite/test.rs index 7f1561e3372..68fb5bc8182 100644 --- a/tests/testsuite/test.rs +++ b/tests/testsuite/test.rs @@ -3360,7 +3360,7 @@ fn json_artifact_includes_test_flag() { "name":"foo", "src_path":"[..]lib.rs" }, - "filenames":["[..]/foo-[..]"], + "filenames":"{...}", "fresh": false } From a8997cbc0f68427d5d00560594b48ad6f4bd683b Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Tue, 5 May 2020 14:22:06 -0700 Subject: [PATCH 09/24] Implement new `clean -p` using globs. --- src/cargo/core/compiler/mod.rs | 1 + src/cargo/ops/cargo_clean.rs | 274 ++++++++++++++++----------------- tests/testsuite/clean.rs | 168 +++++++++++++++++++- 3 files changed, 301 insertions(+), 142 deletions(-) diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index ef404c5e989..315ec0bc644 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -41,6 +41,7 @@ pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts}; pub use self::job::Freshness; use self::job::{Job, Work}; use self::job_queue::{JobQueue, JobState}; +pub(crate) use self::layout::Layout; use self::output_depinfo::output_depinfo; use self::unit_graph::UnitDep; pub use crate::core::compiler::unit::{Unit, UnitInterner}; diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index 0a4166940e3..b9761dbeff0 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -1,22 +1,12 @@ -use crate::core::InternedString; -use std::collections::HashMap; -use std::fs; -use std::path::Path; - -use crate::core::compiler::unit_dependencies; -use crate::core::compiler::BuildContext; -use crate::core::compiler::{ - BuildConfig, CompileKind, CompileMode, Context, RustcTargetData, UnitInterner, -}; -use crate::core::profiles::{Profiles, UnitFor}; -use crate::core::resolver::features::HasDevUnits; -use crate::core::resolver::ResolveOpts; -use crate::core::{PackageIdSpec, Workspace}; +use crate::core::compiler::{CompileKind, CompileMode, Layout, RustcTargetData}; +use crate::core::profiles::Profiles; +use crate::core::{InternedString, PackageIdSpec, Workspace}; use crate::ops; -use crate::ops::resolve::WorkspaceResolve; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::paths; use crate::util::Config; +use std::fs; +use std::path::Path; pub struct CleanOptions<'a> { pub config: &'a Config, @@ -61,135 +51,130 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { if opts.spec.is_empty() { return rm_rf(&target_dir.into_path_unlocked(), config); } - let mut build_config = BuildConfig::new(config, Some(1), &opts.targets, CompileMode::Build)?; - build_config.requested_profile = opts.requested_profile; - let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?; - // Resolve for default features. In the future, `cargo clean` should be rewritten - // so that it doesn't need to guess filename hashes. - let resolve_opts = ResolveOpts::new( - /*dev_deps*/ true, - &[], - /*all features*/ false, - /*default*/ true, - ); - let specs = opts - .spec - .iter() - .map(|spec| PackageIdSpec::parse(spec)) - .collect::>>()?; - let ws_resolve = ops::resolve_ws_with_opts( - ws, - &target_data, - &build_config.requested_kinds, - &resolve_opts, - &specs, - HasDevUnits::Yes, - )?; - let WorkspaceResolve { - pkg_set, - targeted_resolve: resolve, - resolved_features: features, - .. - } = ws_resolve; - - let interner = UnitInterner::new(); - let mut units = Vec::new(); - - for spec in opts.spec.iter() { - // Translate the spec to a Package - let pkgid = resolve.query(spec)?; - let pkg = pkg_set.get_one(pkgid)?; - - // Generate all relevant `Unit` targets for this package - for target in pkg.targets() { - for kind in build_config - .requested_kinds - .iter() - .chain(Some(&CompileKind::Host)) - { - for mode in CompileMode::all_modes() { - for unit_for in UnitFor::all_values() { - let profile = if mode.is_run_custom_build() { - profiles.get_profile_run_custom_build(&profiles.get_profile( - pkg.package_id(), - ws.is_member(pkg), - /*is_local*/ true, - *unit_for, - CompileMode::Build, - )) - } else { - profiles.get_profile( - pkg.package_id(), - ws.is_member(pkg), - /*is_local*/ true, - *unit_for, - *mode, - ) - }; - // Use unverified here since this is being more - // exhaustive than what is actually needed. - let features_for = unit_for.map_to_features_for(); - let features = features - .activated_features_unverified(pkg.package_id(), features_for) - .unwrap_or_default(); - units.push(interner.intern( - pkg, target, profile, *kind, *mode, features, /*is_std*/ false, - )); - } - } - } + + // Clean specific packages. + let requested_kinds = CompileKind::from_requested_targets(config, &opts.targets)?; + let target_data = RustcTargetData::new(ws, &requested_kinds)?; + let (pkg_set, resolve) = ops::resolve_ws(ws)?; + let prof_dir_name = profiles.get_dir_name(); + let host_layout = Layout::new(ws, None, &prof_dir_name)?; + // Convert requested kinds to a Vec of layouts. + let target_layouts: Vec<(CompileKind, Layout)> = requested_kinds + .into_iter() + .filter_map(|kind| match kind { + CompileKind::Target(target) => match Layout::new(ws, Some(target), &prof_dir_name) { + Ok(layout) => Some(Ok((kind, layout))), + Err(e) => Some(Err(e)), + }, + CompileKind::Host => None, + }) + .collect::>()?; + // A Vec of layouts. This is a little convoluted because there can only be + // one host_layout. + let layouts = if opts.targets.is_empty() { + vec![(CompileKind::Host, &host_layout)] + } else { + target_layouts + .iter() + .map(|(kind, layout)| (*kind, layout)) + .collect() + }; + // Create a Vec that also includes the host for things that need to clean both. + let layouts_with_host: Vec<(CompileKind, &Layout)> = + std::iter::once((CompileKind::Host, &host_layout)) + .chain(layouts.iter().map(|(k, l)| (*k, *l))) + .collect(); + + // Cleaning individual rustdoc crates is currently not supported. + // For example, the search index would need to be rebuilt to fully + // remove it (otherwise you're left with lots of broken links). + // Doc tests produce no output. + + // Get Packages for the specified specs. + let mut packages = Vec::new(); + for spec_str in opts.spec.iter() { + // Translate the spec to a Package. + let spec = PackageIdSpec::parse(spec_str)?; + if spec.version().is_some() { + config.shell().warn(&format!( + "version qualifier in `-p {}` is ignored, \ + cleaning all versions of `{}` found", + spec_str, + spec.name() + ))?; + } + if spec.url().is_some() { + config.shell().warn(&format!( + "url qualifier in `-p {}` ignored, \ + cleaning all versions of `{}` found", + spec_str, + spec.name() + ))?; } + let matches: Vec<_> = resolve.iter().filter(|id| spec.matches(*id)).collect(); + if matches.is_empty() { + anyhow::bail!("package ID specification `{}` matched no packages", spec); + } + packages.extend(pkg_set.get_many(matches)?); } - let unit_graph = unit_dependencies::build_unit_dependencies( - ws, - &pkg_set, - &resolve, - &features, - None, - &units, - &Default::default(), - build_config.mode, - &target_data, - &profiles, - &interner, - )?; - let extra_args = HashMap::new(); - let bcx = BuildContext::new( - ws, - pkg_set, - &build_config, - profiles, - extra_args, - target_data, - units, - unit_graph, - )?; - let mut cx = Context::new(&bcx)?; - cx.prepare_units()?; - - for unit in &bcx.roots { - if unit.mode.is_doc() || unit.mode.is_doc_test() { - // Cleaning individual rustdoc crates is currently not supported. - // For example, the search index would need to be rebuilt to fully - // remove it (otherwise you're left with lots of broken links). - // Doc tests produce no output. - continue; + for pkg in packages { + let pkg_dir = format!("{}-*", pkg.name()); + + // Clean fingerprints. + for (_, layout) in &layouts_with_host { + rm_rf_glob(&layout.fingerprint().join(&pkg_dir), config)?; } - rm_rf(&cx.files().fingerprint_dir(unit), config)?; - if unit.target.is_custom_build() { - if unit.mode.is_run_custom_build() { - rm_rf(&cx.files().build_script_out_dir(unit), config)?; - } else { - rm_rf(&cx.files().build_script_dir(unit), config)?; + + for target in pkg.targets() { + if target.is_custom_build() { + // Get both the build_script_build and the output directory. + for (_, layout) in &layouts_with_host { + rm_rf_glob(&layout.build().join(&pkg_dir), config)?; + } + continue; } - continue; - } + let crate_name = target.crate_name(); + for &mode in &[ + CompileMode::Build, + CompileMode::Test, + CompileMode::Check { test: false }, + ] { + for (compile_kind, layout) in &layouts { + let triple = target_data.short_name(compile_kind); + + let (file_types, _unsupported) = target_data + .info(*compile_kind) + .rustc_outputs(mode, target.kind(), triple)?; + let (dir, uplift_dir) = if target.is_example() { + (layout.examples(), layout.examples()) + } else { + (layout.deps(), layout.dest()) + }; + for file_type in file_types { + // Some files include a hash in the filename, some don't. + let hashed_name = file_type.output_filename(target, Some("*")); + let unhashed_name = file_type.output_filename(target, None); + rm_rf_glob(&dir.join(&hashed_name), config)?; + rm_rf(&dir.join(&unhashed_name), config)?; + // Remove dep-info file generated by rustc. It is not tracked in + // file_types. It does not have a prefix. + let hashed_dep_info = dir.join(format!("{}-*.d", crate_name)); + let unhashed_dep_info = dir.join(format!("{}.d", crate_name)); + rm_rf_glob(&hashed_dep_info, config)?; + rm_rf(&unhashed_dep_info, config)?; - for output in cx.outputs(unit)?.iter() { - rm_rf(&output.path, config)?; - if let Some(ref dst) = output.hardlink { - rm_rf(dst, config)?; + // Remove the uplifted copy. + let uplifted_path = uplift_dir.join(file_type.uplift_filename(target)); + rm_rf(&uplifted_path, config)?; + // Dep-info generated by Cargo itself. + let dep_info = uplifted_path.with_extension("d"); + rm_rf(&dep_info, config)?; + } + // TODO: what to do about build_script_build? + let incremental = layout.incremental().join(format!("{}-*", crate_name)); + rm_rf_glob(&incremental, config)?; + } } } } @@ -197,8 +182,19 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { Ok(()) } +fn rm_rf_glob(pattern: &Path, config: &Config) -> CargoResult<()> { + // TODO: Display utf8 warning to user? Or switch to globset? + let pattern = pattern + .to_str() + .ok_or_else(|| anyhow::anyhow!("expected utf-8 path"))?; + for path in glob::glob(pattern)? { + rm_rf(&path?, config)?; + } + Ok(()) +} + fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> { - let m = fs::metadata(path); + let m = fs::symlink_metadata(path); if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) { config .shell() diff --git a/tests/testsuite/clean.rs b/tests/testsuite/clean.rs index 32f3b29b539..673dc4969a1 100644 --- a/tests/testsuite/clean.rs +++ b/tests/testsuite/clean.rs @@ -1,9 +1,10 @@ //! Tests for the `cargo clean` command. -use std::env; - +use cargo_test_support::paths::CargoPathExt; use cargo_test_support::registry::Package; -use cargo_test_support::{basic_bin_manifest, basic_manifest, git, main_file, project}; +use cargo_test_support::{basic_bin_manifest, basic_manifest, git, main_file, project, rustc_host}; +use std::env; +use std::path::Path; #[cargo_test] fn cargo_clean_simple() { @@ -291,6 +292,7 @@ fn clean_verbose() { [REMOVING] [..] [REMOVING] [..] [REMOVING] [..] +[REMOVING] [..] ", ) .run(); @@ -319,3 +321,163 @@ fn clean_remove_rlib_rmeta() { assert!(!p.target_debug_dir().join("libfoo.rlib").exists()); assert!(!rmeta.exists()); } + +#[cargo_test] +fn package_cleans_all_the_things() { + // -p cleans everything + // Use dashes everywhere to make sure dash/underscore stuff is handled. + for crate_type in &["rlib", "dylib", "cdylib", "staticlib", "proc-macro"] { + // Try each crate type individually since the behavior changes when + // they are combined. + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo-bar" + version = "0.1.0" + + [lib] + crate-type = ["{}"] + "#, + crate_type + ), + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build").run(); + p.cargo("clean -p foo-bar").run(); + assert_all_clean(&p.build_dir()); + } + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo-bar" + version = "0.1.0" + edition = "2018" + + [lib] + crate-type = ["rlib", "dylib", "staticlib"] + + [[example]] + name = "foo-ex-rlib" + crate-type = ["rlib"] + test = true + + [[example]] + name = "foo-ex-cdylib" + crate-type = ["cdylib"] + test = true + + [[example]] + name = "foo-ex-bin" + test = true + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("src/bin/other-main.rs", "fn main() {}") + .file("examples/foo-ex-rlib.rs", "") + .file("examples/foo-ex-cdylib.rs", "") + .file("examples/foo-ex-bin.rs", "fn main() {}") + .file("tests/foo-test.rs", "") + .file("benches/foo-bench.rs", "") + .file("build.rs", "fn main() {}") + .build(); + + p.cargo("build --all-targets") + .env("CARGO_INCREMENTAL", "1") + .run(); + p.cargo("test --all-targets") + .env("CARGO_INCREMENTAL", "1") + .run(); + p.cargo("check --all-targets") + .env("CARGO_INCREMENTAL", "1") + .run(); + p.cargo("clean -p foo-bar").run(); + assert_all_clean(&p.build_dir()); + + // Try some targets. + p.cargo("build --all-targets --target") + .arg(rustc_host()) + .run(); + p.cargo("clean -p foo-bar --target").arg(rustc_host()).run(); + assert_all_clean(&p.build_dir()); +} + +// Ensures that all files for the package have been deleted. +fn assert_all_clean(build_dir: &Path) { + let walker = walkdir::WalkDir::new(build_dir).into_iter(); + for entry in walker.filter_entry(|e| { + let path = e.path(); + // This is a known limitation, clean can't differentiate between + // the different build scripts from different packages. + !(path + .file_name() + .unwrap() + .to_str() + .unwrap() + .starts_with("build_script_build") + && path + .parent() + .unwrap() + .file_name() + .unwrap() + .to_str() + .unwrap() + == "incremental") + }) { + let entry = entry.unwrap(); + let path = entry.path(); + if let ".rustc_info.json" | ".cargo-lock" = path.file_name().unwrap().to_str().unwrap() { + continue; + } + if path.is_symlink() || path.is_file() { + panic!("{:?} was not cleaned", path); + } + } +} + +#[cargo_test] +fn clean_spec_multiple() { + // clean -p foo where foo matches multiple versions + Package::new("bar", "1.0.0").publish(); + Package::new("bar", "2.0.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar1 = {version="1.0", package="bar"} + bar2 = {version="2.0", package="bar"} + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.cargo("clean -p bar:1.0.0") + .with_stderr( + "warning: version qualifier in `-p bar:1.0.0` is ignored, \ + cleaning all versions of `bar` found", + ) + .run(); + let mut walker = walkdir::WalkDir::new(p.build_dir()) + .into_iter() + .filter_map(|e| e.ok()) + .filter(|e| { + let n = e.file_name().to_str().unwrap(); + n.starts_with("bar") || n.starts_with("libbar") + }); + if let Some(e) = walker.next() { + panic!("{:?} was not cleaned", e.path()); + } +} From 5babb2aea8ad69196b4587868d1f3fddb864414b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kornel=20Lesin=CC=81ski?= Date: Wed, 6 May 2020 14:56:11 +0100 Subject: [PATCH 10/24] Avoid testing git-specific error messages #8166 --- tests/testsuite/git_auth.rs | 27 ++------------------------- 1 file changed, 2 insertions(+), 25 deletions(-) diff --git a/tests/testsuite/git_auth.rs b/tests/testsuite/git_auth.rs index 5aea1a9c5c1..8fa920d0f6a 100644 --- a/tests/testsuite/git_auth.rs +++ b/tests/testsuite/git_auth.rs @@ -320,30 +320,7 @@ Caused by: p.cargo("build -v") .with_status(101) - .with_stderr( - "\ -[UPDATING] git repository `ssh://needs-proxy.invalid/git` -[RUNNING] `git fetch[..] -[ERROR] failed to get `foo` as a dependency of package `foo v0.0.0 [..]` - -Caused by: - failed to load source for dependency `foo` - -Caused by: - Unable to update ssh://needs-proxy.invalid/git - -Caused by: - failed to fetch into: [..] - -Caused by: - [..]process didn't exit successfully[..] ---- stderr -ssh: Could not resolve hostname[..] -fatal: [..] - -Please make sure you have the correct access rights -and the repository exists. -[..]", - ) + .with_stderr_contains("[..]Unable to update[..]") + .with_stderr_does_not_contain("[..]try enabling `git-fetch-with-cli`[..]") .run(); } From bd5f563856295e455f943207809ff85adf0d1401 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Wed, 6 May 2020 09:36:07 -0700 Subject: [PATCH 11/24] clean -p: call `get_many` once. --- src/cargo/ops/cargo_clean.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index b9761dbeff0..a8b39c4b5b9 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -91,7 +91,7 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { // Doc tests produce no output. // Get Packages for the specified specs. - let mut packages = Vec::new(); + let mut pkg_ids = Vec::new(); for spec_str in opts.spec.iter() { // Translate the spec to a Package. let spec = PackageIdSpec::parse(spec_str)?; @@ -115,8 +115,9 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { if matches.is_empty() { anyhow::bail!("package ID specification `{}` matched no packages", spec); } - packages.extend(pkg_set.get_many(matches)?); + pkg_ids.extend(matches); } + let packages = pkg_set.get_many(pkg_ids)?; for pkg in packages { let pkg_dir = format!("{}-*", pkg.name()); From 7438770be8d1d02b6569d22ee5e38b4b63eea66b Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Wed, 6 May 2020 12:06:39 -0700 Subject: [PATCH 12/24] Revert always computing filename Metadata. When a unit does not have Metadata, the computation of fingerprints depends on reusing the same fingerprint file to detect if the output needs to be rebuilt. The previous change that put each unit's fingerprint into a separate directory was wrong, and broke change detection in this case (for example, executables on MSVC). Instead, use a different approach to deal with compiler output caching by using the same naming convention as the fingerprint file itself. --- .../compiler/context/compilation_files.rs | 59 ++++++++++++++----- src/cargo/core/compiler/context/mod.rs | 2 +- src/cargo/core/compiler/fingerprint.rs | 27 ++------- src/cargo/core/compiler/layout.rs | 2 +- src/cargo/core/compiler/mod.rs | 4 +- tests/testsuite/cache_messages.rs | 27 +++++++-- 6 files changed, 73 insertions(+), 48 deletions(-) diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs index 09a8bf0587b..aef7e69dc28 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/context/compilation_files.rs @@ -85,7 +85,7 @@ pub struct CompilationFiles<'a, 'cfg> { roots: Vec, ws: &'a Workspace<'cfg>, /// Metadata hash to use for each unit. - metas: HashMap, + metas: HashMap>, /// For each Unit, a list all files produced. outputs: HashMap>>>, } @@ -154,12 +154,8 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { /// /// Returns `None` if the unit should not use a metadata hash (like /// rustdoc, or some dylibs). - pub fn metadata(&self, bcx: &BuildContext<'_, '_>, unit: &Unit) -> Option { - if should_use_metadata(bcx, unit) { - Some(self.metas[unit]) - } else { - None - } + pub fn metadata(&self, unit: &Unit) -> Option { + self.metas[unit] } /// Gets the short hash based only on the `PackageId`. @@ -192,10 +188,14 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { /// Directory name to use for a package in the form `NAME-HASH`. /// - /// The hash is unique per Unit. - pub fn pkg_dir(&self, unit: &Unit) -> String { + /// Note that some units may share the same directory, so care should be + /// taken in those cases! + fn pkg_dir(&self, unit: &Unit) -> String { let name = unit.pkg.package_id().name(); - format!("{}-{}", name, self.metas[unit]) + match self.metas[unit] { + Some(ref meta) => format!("{}-{}", name, meta), + None => format!("{}-{}", name, self.target_short_hash(unit)), + } } /// Returns the root of the build output tree for the host @@ -220,9 +220,29 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { self.layout(unit.kind).fingerprint().join(dir) } + /// Returns the path for a file in the fingerprint directory. + /// + /// The "prefix" should be something to distinguish the file from other + /// files in the fingerprint directory. + pub fn fingerprint_file_path(&self, unit: &Unit, prefix: &str) -> PathBuf { + // Different targets need to be distinguished in the + let kind = unit.target.kind().description(); + let flavor = if unit.mode.is_any_test() { + "test-" + } else if unit.mode.is_doc() { + "doc-" + } else if unit.mode.is_run_custom_build() { + "run-" + } else { + "" + }; + let name = format!("{}{}{}-{}", prefix, flavor, kind, unit.target.name()); + self.fingerprint_dir(unit).join(name) + } + /// Path where compiler output is cached. pub fn message_cache_path(&self, unit: &Unit) -> PathBuf { - self.fingerprint_dir(unit).join("output") + self.fingerprint_file_path(unit, "output-") } /// Returns the directory where a compiled build script is stored. @@ -414,7 +434,7 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { // Convert FileType to OutputFile. let mut outputs = Vec::new(); for file_type in file_types { - let meta = self.metadata(bcx, unit).map(|m| m.to_string()); + let meta = self.metadata(unit).map(|m| m.to_string()); let path = out_dir.join(file_type.output_filename(&unit.target, meta.as_deref())); let hardlink = self.uplift_to(unit, &file_type, &path); let export_path = if unit.target.is_custom_build() { @@ -437,7 +457,11 @@ impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { } } -fn metadata_of(unit: &Unit, cx: &Context<'_, '_>, metas: &mut HashMap) -> Metadata { +fn metadata_of( + unit: &Unit, + cx: &Context<'_, '_>, + metas: &mut HashMap>, +) -> Option { if !metas.contains_key(unit) { let meta = compute_metadata(unit, cx, metas); metas.insert(unit.clone(), meta); @@ -451,9 +475,12 @@ fn metadata_of(unit: &Unit, cx: &Context<'_, '_>, metas: &mut HashMap, - metas: &mut HashMap, -) -> Metadata { + metas: &mut HashMap>, +) -> Option { let bcx = &cx.bcx; + if !should_use_metadata(bcx, unit) { + return None; + } let mut hasher = SipHasher::new(); // This is a generic version number that can be changed to make @@ -526,7 +553,7 @@ fn compute_metadata( // with user dependencies. unit.is_std.hash(&mut hasher); - Metadata(hasher.finish()) + Some(Metadata(hasher.finish())) } fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut SipHasher) { diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index 318dc8655b4..60f2b8af34d 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -377,7 +377,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata { assert!(unit.mode.is_run_custom_build()); self.files() - .metadata(self.bcx, unit) + .metadata(unit) .expect("build script should always have hash") } diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs index eaf88e2ee5c..145a622bb1f 100644 --- a/src/cargo/core/compiler/fingerprint.rs +++ b/src/cargo/core/compiler/fingerprint.rs @@ -43,8 +43,9 @@ //! The `Metadata` hash is a hash added to the output filenames to isolate //! each unit. See the documentation in the `compilation_files` module for //! more details. NOTE: Not all output files are isolated via filename hashes -//! (like dylibs), but the fingerprint directory always has the `Metadata` -//! hash in its directory name. +//! (like dylibs). The fingerprint directory uses a hash, but sometimes units +//! share the same fingerprint directory (when they don't have Metadata) so +//! care should be taken to handle this! //! //! Fingerprints and Metadata are similar, and track some of the same things. //! The Metadata contains information that is required to keep Units separate. @@ -352,8 +353,7 @@ pub fn prepare_target(cx: &mut Context<'_, '_>, unit: &Unit, force: bool) -> Car unit.target.name() )); let bcx = cx.bcx; - let new = cx.files().fingerprint_dir(unit); - let loc = new.join(&filename(unit)); + let loc = cx.files().fingerprint_file_path(unit, ""); debug!("fingerprint at: {}", loc.display()); @@ -1522,9 +1522,7 @@ pub fn prepare_init(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> { /// Returns the location that the dep-info file will show up at for the `unit` /// specified. pub fn dep_info_loc(cx: &mut Context<'_, '_>, unit: &Unit) -> PathBuf { - cx.files() - .fingerprint_dir(unit) - .join(&format!("dep-{}", filename(unit))) + cx.files().fingerprint_file_path(unit, "dep-") } /// Returns an absolute path that target directory. @@ -1680,21 +1678,6 @@ where None } -/// Returns the filename used for the fingerprint file. -fn filename(unit: &Unit) -> String { - let kind = unit.target.kind().description(); - let flavor = if unit.mode.is_any_test() { - "test-" - } else if unit.mode.is_doc() { - "doc-" - } else if unit.mode.is_run_custom_build() { - "run-" - } else { - "" - }; - format!("{}{}-{}", flavor, kind, unit.target.name()) -} - #[repr(u8)] enum DepInfoPathType { // src/, e.g. src/lib.rs diff --git a/src/cargo/core/compiler/layout.rs b/src/cargo/core/compiler/layout.rs index c9f8fcedff0..53c615fbe1d 100644 --- a/src/cargo/core/compiler/layout.rs +++ b/src/cargo/core/compiler/layout.rs @@ -39,7 +39,7 @@ //! lib-$targetname.json //! # The console output from the compiler. This is cached //! # so that warnings can be redisplayed for "fresh" units. -//! output +//! output-lib-$targetname //! //! # This is the root directory for all rustc artifacts except build //! # scripts, examples, and test and bench executables. Almost every diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 315ec0bc644..93922ea7e0e 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -194,7 +194,7 @@ fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc) -> Car let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); let pass_cdylib_link_args = unit.target.is_cdylib(); - let dep_info_name = match cx.files().metadata(cx.bcx, unit) { + let dep_info_name = match cx.files().metadata(unit) { Some(metadata) => format!("{}-{}.d", unit.target.crate_name(), metadata), None => format!("{}.d", unit.target.crate_name()), }; @@ -870,7 +870,7 @@ fn build_base_args( cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } - match cx.files().metadata(cx.bcx, unit) { + match cx.files().metadata(unit) { Some(m) => { cmd.arg("-C").arg(&format!("metadata={}", m)); cmd.arg("-C").arg(&format!("extra-filename=-{}", m)); diff --git a/tests/testsuite/cache_messages.rs b/tests/testsuite/cache_messages.rs index feaeff7e62a..1df15b22764 100644 --- a/tests/testsuite/cache_messages.rs +++ b/tests/testsuite/cache_messages.rs @@ -194,7 +194,7 @@ fn clears_cache_after_fix() { // Fill the cache. p.cargo("check").with_stderr_contains("[..]asdf[..]").run(); let cpath = p - .glob("target/debug/.fingerprint/foo-*/output") + .glob("target/debug/.fingerprint/foo-*/output-*") .next() .unwrap() .unwrap(); @@ -215,7 +215,10 @@ fn clears_cache_after_fix() { ", ) .run(); - assert_eq!(p.glob("target/debug/.fingerprint/foo-*/output").count(), 0); + assert_eq!( + p.glob("target/debug/.fingerprint/foo-*/output-*").count(), + 0 + ); // And again, check the cache is correct. p.cargo("check") @@ -253,7 +256,10 @@ fn rustdoc() { let rustdoc_stderr = as_str(&rustdoc_output.stderr); assert!(rustdoc_stderr.contains("private")); assert!(rustdoc_stderr.contains("\x1b[")); - assert_eq!(p.glob("target/debug/.fingerprint/foo-*/output").count(), 1); + assert_eq!( + p.glob("target/debug/.fingerprint/foo-*/output-*").count(), + 1 + ); // Check the cached output. let rustdoc_output = p @@ -331,14 +337,23 @@ fn doesnt_create_extra_files() { p.cargo("build").run(); - assert_eq!(p.glob("target/debug/.fingerprint/foo-*/output").count(), 0); - assert_eq!(p.glob("target/debug/.fingerprint/dep-*/output").count(), 0); + assert_eq!( + p.glob("target/debug/.fingerprint/foo-*/output-*").count(), + 0 + ); + assert_eq!( + p.glob("target/debug/.fingerprint/dep-*/output-*").count(), + 0 + ); if is_coarse_mtime() { sleep_ms(1000); } p.change_file("src/lib.rs", "fn unused() {}"); p.cargo("build").run(); - assert_eq!(p.glob("target/debug/.fingerprint/foo-*/output").count(), 1); + assert_eq!( + p.glob("target/debug/.fingerprint/foo-*/output-*").count(), + 1 + ); } #[cargo_test] From b3616c08a450d3ea1d036f94bd583bf73219f54d Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Thu, 7 May 2020 19:05:05 -0700 Subject: [PATCH 13/24] Try to remove secrets from http.debug. --- src/cargo/ops/registry.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index 570e6f522bd..a20a22c15d1 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -556,7 +556,12 @@ pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult< }; match str::from_utf8(data) { Ok(s) => { - for line in s.lines() { + for mut line in s.lines() { + if line.starts_with("Authorization:") { + line = "Authorization: [REDACTED]"; + } else if line[..line.len().min(10)].eq_ignore_ascii_case("set-cookie") { + line = "set-cookie: [REDACTED]"; + } log!(level, "http-debug: {} {}", prefix, line); } } From 0f9442ffc2091fc17df4827c2bede4c36374d038 Mon Sep 17 00:00:00 2001 From: Hanif Bin Ariffin Date: Fri, 8 May 2020 05:13:49 -0400 Subject: [PATCH 14/24] Updated comments in resolve.rs to reflect actual data strcture used. --- src/cargo/core/resolver/resolve.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs index 2d442c0214a..030f956cdb2 100644 --- a/src/cargo/core/resolver/resolve.rs +++ b/src/cargo/core/resolver/resolve.rs @@ -16,14 +16,14 @@ use std::fmt; /// for each package. pub struct Resolve { /// A graph, whose vertices are packages and edges are dependency specifications - /// from `Cargo.toml`. We need a `Vec` here because the same package + /// from `Cargo.toml`. We need a `HashSet` here because the same package /// might be present in both `[dependencies]` and `[build-dependencies]`. graph: Graph>, /// Replacements from the `[replace]` table. replacements: HashMap, /// Inverted version of `replacements`. reverse_replacements: HashMap, - /// An empty `HashSet` to avoid creating a new `HashSet` for every package + /// An empty `Vec` to avoid creating a new `Vec` for every package /// that does not have any features, and to avoid using `Option` to /// simplify the API. empty_features: Vec, From 2a9722592f98acbed9fa45ce1f185fcc2323761c Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 8 May 2020 08:23:31 -0700 Subject: [PATCH 15/24] Update assertions in LTO calculations Turns out a case I thought didn't happen can indeed happen. Units may depend on other units which are LTO-able because integration tests can depend on binaries. Handle that case internally and remove a few panics. Closes #8223 --- src/cargo/core/compiler/lto.rs | 17 +++++----- tests/testsuite/lto.rs | 58 ++++++++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+), 8 deletions(-) diff --git a/src/cargo/core/compiler/lto.rs b/src/cargo/core/compiler/lto.rs index 0ea29353d29..c808520d39b 100644 --- a/src/cargo/core/compiler/lto.rs +++ b/src/cargo/core/compiler/lto.rs @@ -45,8 +45,10 @@ fn calculate( (Lto::None, false) } else if unit.target.can_lto() { // Otherwise if this target can perform LTO then we're going to read the - // LTO value out of the profile. - assert!(!require_bitcode); // can't depend on binaries/staticlib/etc + // LTO value out of the profile. Note that we ignore `require_bitcode` + // here because if a unit depends on another unit than can LTO this + // isn't a rustc-level dependency but rather a Cargo-level dependency. + // For example this is an integration test depending on a binary. match unit.profile.lto { profiles::Lto::Named(s) => match s.as_str() { "n" | "no" | "off" => (Lto::Run(Some(s)), false), @@ -73,12 +75,11 @@ fn calculate( Entry::Occupied(mut v) => { let result = match (lto, v.get()) { - // Targets which execute LTO cannot be depended on, so these - // units should only show up once in the dependency graph, so we - // should never hit this case. - (Lto::Run(_), _) | (_, Lto::Run(_)) => { - unreachable!("lto-able targets shouldn't show up twice") - } + // Once we're running LTO we keep running LTO. We should always + // calculate the same thing here each iteration because if we + // see this twice then it means, for example, two unit tests + // depend on a binary, which is normal. + (Lto::Run(s), _) | (_, &Lto::Run(s)) => Lto::Run(s), // If we calculated the same thing as before then we can bail // out quickly. diff --git a/tests/testsuite/lto.rs b/tests/testsuite/lto.rs index a671e27e04d..f5e2fb5bd68 100644 --- a/tests/testsuite/lto.rs +++ b/tests/testsuite/lto.rs @@ -278,3 +278,61 @@ fn between_builds() { p.cargo("build -v --release --lib").run(); p.cargo("build -v --release").run(); } + +#[cargo_test] +fn test_all() { + if !cargo_test_support::is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + + [profile.release] + lto = true + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("tests/a.rs", "") + .file("tests/b.rs", "") + .build(); + p.cargo("test --release -v") + .with_stderr_contains("[RUNNING] `rustc[..]--crate-name foo[..]-C lto[..]") + .run(); +} + +#[cargo_test] +fn test_all_and_bench() { + if !cargo_test_support::is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + + [profile.release] + lto = true + [profile.bench] + lto = true + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("tests/a.rs", "") + .file("tests/b.rs", "") + .build(); + p.cargo("test --release -v") + .with_stderr_contains("[RUNNING] `rustc[..]--crate-name a[..]-C lto[..]") + .with_stderr_contains("[RUNNING] `rustc[..]--crate-name b[..]-C lto[..]") + .with_stderr_contains("[RUNNING] `rustc[..]--crate-name foo[..]-C lto[..]") + .run(); +} From ee648ea310f8a539f8eef3fea59b3e6431e9dd91 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Fri, 8 May 2020 10:18:45 -0700 Subject: [PATCH 16/24] Document that bench is unstable in the man page. --- src/doc/man/cargo-bench.adoc | 10 ++++++++++ src/doc/man/generated/cargo-bench.html | 14 ++++++++++++++ src/etc/man/cargo-bench.1 | 20 ++++++++++++++++++-- 3 files changed, 42 insertions(+), 2 deletions(-) diff --git a/src/doc/man/cargo-bench.adoc b/src/doc/man/cargo-bench.adoc index c65c66c9ea9..79acc6f2674 100644 --- a/src/doc/man/cargo-bench.adoc +++ b/src/doc/man/cargo-bench.adoc @@ -36,6 +36,16 @@ The libtest harness may be disabled by setting `harness = false` in the target manifest settings, in which case your code will need to provide its own `main` function to handle running benchmarks. + +> **Note**: The +> link:https://doc.rust-lang.org/nightly/unstable-book/library-features/test.html[`#[bench\]` attribute] +> is currently unstable and only available on the +> link:https://doc.rust-lang.org/book/appendix-07-nightly-rust.html[nightly channel]. +> There are some packages available on +> link:https://crates.io/keywords/benchmark[crates.io] that may help with +> running benchmarks on the stable channel, such as +> link:https://crates.io/crates/criterion[Criterion]. + == OPTIONS === Benchmark Options diff --git a/src/doc/man/generated/cargo-bench.html b/src/doc/man/generated/cargo-bench.html index bdba100732a..d97adce558f 100644 --- a/src/doc/man/generated/cargo-bench.html +++ b/src/doc/man/generated/cargo-bench.html @@ -42,6 +42,20 @@

DESCRIPTION

manifest settings, in which case your code will need to provide its own main function to handle running benchmarks.

+
+
+
+

Note: The +#[bench] attribute +is currently unstable and only available on the +nightly channel. +There are some packages available on +crates.io that may help with +running benchmarks on the stable channel, such as +Criterion.

+
+
+
diff --git a/src/etc/man/cargo-bench.1 b/src/etc/man/cargo-bench.1 index 708d2066a8b..442f164d893 100644 --- a/src/etc/man/cargo-bench.1 +++ b/src/etc/man/cargo-bench.1 @@ -2,12 +2,12 @@ .\" Title: cargo-bench .\" Author: [see the "AUTHOR(S)" section] .\" Generator: Asciidoctor 2.0.10 -.\" Date: 2020-02-06 +.\" Date: 2020-05-08 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" -.TH "CARGO\-BENCH" "1" "2020-02-06" "\ \&" "\ \&" +.TH "CARGO\-BENCH" "1" "2020-05-08" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 @@ -59,6 +59,22 @@ the test harness to tell it to run only benchmarks. The libtest harness may be disabled by setting \fBharness = false\fP in the target manifest settings, in which case your code will need to provide its own \fBmain\fP function to handle running benchmarks. +.RS 3 +.ll -.6i +.sp +\fBNote\fP: The +\c +.URL "https://doc.rust\-lang.org/nightly/unstable\-book/library\-features/test.html" "\fB#[bench]\fP attribute" +is currently unstable and only available on the +.URL "https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html" "nightly channel" "." +There are some packages available on +.URL "https://crates.io/keywords/benchmark" "crates.io" " " +that may help with +running benchmarks on the stable channel, such as +.URL "https://crates.io/crates/criterion" "Criterion" "." +.br +.RE +.ll .SH "OPTIONS" .SS "Benchmark Options" .sp From 63ffc6ae883f366ae008148c8e6fe93bbbd3a3b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20Kr=C3=BCger?= Date: Fri, 8 May 2020 23:16:30 +0200 Subject: [PATCH 17/24] more clippy fixes --- src/cargo/core/compiler/build_context/target_info.rs | 6 +++--- src/cargo/core/compiler/mod.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index d3bab3365bf..ab3dc1c26a9 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -375,7 +375,7 @@ impl TargetInfo { suffix, prefix: prefix.clone(), flavor: FileFlavor::DebugInfo, - crate_type: Some(crate_type.clone()), + crate_type: Some(crate_type), // macOS tools like lldb use all sorts of magic to locate // dSYM files. See https://lldb.llvm.org/use/symbols.html // for some details. It seems like a `.dSYM` located next @@ -389,7 +389,7 @@ impl TargetInfo { suffix: ".pdb".to_string(), prefix: prefix.clone(), flavor: FileFlavor::DebugInfo, - crate_type: Some(crate_type.clone()), + crate_type: Some(crate_type), // The absolute path to the pdb file is embedded in the // executable. If the exe/pdb pair is moved to another // machine, then debuggers will look in the same directory @@ -466,7 +466,7 @@ impl TargetInfo { } else { FileFlavor::Normal }; - let file_types = self.file_types(&crate_type, flavor, target_triple)?; + let file_types = self.file_types(crate_type, flavor, target_triple)?; match file_types { Some(types) => { result.extend(types); diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 93922ea7e0e..aaa7c7ef591 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -777,7 +777,7 @@ fn build_base_args( cmd.arg("-C").arg(format!("panic={}", panic)); } - match cx.lto[&unit] { + match cx.lto[unit] { lto::Lto::Run(None) => { cmd.arg("-C").arg("lto"); } From 67b10f745d10c1de90c307a31910a0bb2fe533f8 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Mon, 11 May 2020 13:45:18 -0700 Subject: [PATCH 18/24] Move SipHasher to an isolated module. --- .../compiler/context/compilation_files.rs | 8 +++---- src/cargo/core/compiler/context/mod.rs | 1 - src/cargo/util/hasher.rs | 24 +++++++++++++++++++ src/cargo/util/hex.rs | 9 ++++--- src/cargo/util/mod.rs | 2 ++ src/cargo/util/rustc.rs | 10 ++++---- 6 files changed, 38 insertions(+), 16 deletions(-) create mode 100644 src/cargo/util/hasher.rs diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs index aef7e69dc28..04d75fe0ac5 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/context/compilation_files.rs @@ -1,7 +1,7 @@ use std::collections::HashMap; use std::env; use std::fmt; -use std::hash::{Hash, Hasher, SipHasher}; +use std::hash::{Hash, Hasher}; use std::path::{Path, PathBuf}; use std::sync::Arc; @@ -11,7 +11,7 @@ use log::info; use super::{BuildContext, CompileKind, Context, FileFlavor, Layout}; use crate::core::compiler::{CompileMode, CompileTarget, CrateType, FileType, Unit}; use crate::core::{Target, TargetKind, Workspace}; -use crate::util::{self, CargoResult}; +use crate::util::{self, CargoResult, StableHasher}; /// The `Metadata` is a hash used to make unique file names for each unit in a /// build. It is also use for symbol mangling. @@ -481,7 +481,7 @@ fn compute_metadata( if !should_use_metadata(bcx, unit) { return None; } - let mut hasher = SipHasher::new(); + let mut hasher = StableHasher::new(); // This is a generic version number that can be changed to make // backwards-incompatible changes to any file structures in the output @@ -556,7 +556,7 @@ fn compute_metadata( Some(Metadata(hasher.finish())) } -fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut SipHasher) { +fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) { let vers = &bcx.rustc().version; if vers.pre.is_empty() || bcx.config.cli_unstable().separate_nightlies { // For stable, keep the artifacts separate. This helps if someone is diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index 60f2b8af34d..fbba329c2e1 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -1,4 +1,3 @@ -#![allow(deprecated)] use std::collections::{BTreeSet, HashMap, HashSet}; use std::path::PathBuf; use std::sync::{Arc, Mutex}; diff --git a/src/cargo/util/hasher.rs b/src/cargo/util/hasher.rs new file mode 100644 index 00000000000..01e15ae2c04 --- /dev/null +++ b/src/cargo/util/hasher.rs @@ -0,0 +1,24 @@ +//! Implementation of a hasher that produces the same values across releases. +//! +//! The hasher should be fast and have a low chance of collisions (but is not +//! sufficient for cryptographic purposes). +#![allow(deprecated)] + +use std::hash::{Hasher, SipHasher}; + +pub struct StableHasher(SipHasher); + +impl StableHasher { + pub fn new() -> StableHasher { + StableHasher(SipHasher::new()) + } +} + +impl Hasher for StableHasher { + fn finish(&self) -> u64 { + self.0.finish() + } + fn write(&mut self, bytes: &[u8]) { + self.0.write(bytes) + } +} diff --git a/src/cargo/util/hex.rs b/src/cargo/util/hex.rs index baeb34781ec..0263e7a2d9a 100644 --- a/src/cargo/util/hex.rs +++ b/src/cargo/util/hex.rs @@ -1,7 +1,6 @@ -#![allow(deprecated)] - +use super::StableHasher; use std::fs::File; -use std::hash::{Hash, Hasher, SipHasher}; +use std::hash::{Hash, Hasher}; use std::io::Read; pub fn to_hex(num: u64) -> String { @@ -18,13 +17,13 @@ pub fn to_hex(num: u64) -> String { } pub fn hash_u64(hashable: H) -> u64 { - let mut hasher = SipHasher::new(); + let mut hasher = StableHasher::new(); hashable.hash(&mut hasher); hasher.finish() } pub fn hash_u64_file(mut file: &File) -> std::io::Result { - let mut hasher = SipHasher::new_with_keys(0, 0); + let mut hasher = StableHasher::new(); let mut buf = [0; 64 * 1024]; loop { let n = file.read(&mut buf)?; diff --git a/src/cargo/util/mod.rs b/src/cargo/util/mod.rs index 45e44ba61cf..7f2ba2697e4 100644 --- a/src/cargo/util/mod.rs +++ b/src/cargo/util/mod.rs @@ -9,6 +9,7 @@ pub use self::errors::{CargoResult, CargoResultExt, CliResult, Test}; pub use self::errors::{CargoTestError, CliError, ProcessError}; pub use self::flock::{FileLock, Filesystem}; pub use self::graph::Graph; +pub use self::hasher::StableHasher; pub use self::hex::{hash_u64, short_hash, to_hex}; pub use self::into_url::IntoUrl; pub use self::into_url_with_base::IntoUrlWithBase; @@ -39,6 +40,7 @@ pub mod diagnostic_server; pub mod errors; mod flock; pub mod graph; +mod hasher; pub mod hex; pub mod important_paths; pub mod into_url; diff --git a/src/cargo/util/rustc.rs b/src/cargo/util/rustc.rs index eb2176b7b9e..2ab08fec6cc 100644 --- a/src/cargo/util/rustc.rs +++ b/src/cargo/util/rustc.rs @@ -1,8 +1,6 @@ -#![allow(deprecated)] // for SipHasher - use std::collections::hash_map::{Entry, HashMap}; use std::env; -use std::hash::{Hash, Hasher, SipHasher}; +use std::hash::{Hash, Hasher}; use std::path::{Path, PathBuf}; use std::sync::Mutex; @@ -11,7 +9,7 @@ use serde::{Deserialize, Serialize}; use crate::core::InternedString; use crate::util::paths; -use crate::util::{self, profile, CargoResult, CargoResultExt, ProcessBuilder}; +use crate::util::{self, profile, CargoResult, CargoResultExt, ProcessBuilder, StableHasher}; /// Information on the `rustc` executable #[derive(Debug)] @@ -222,7 +220,7 @@ impl Drop for Cache { } fn rustc_fingerprint(path: &Path, rustup_rustc: &Path) -> CargoResult { - let mut hasher = SipHasher::new(); + let mut hasher = StableHasher::new(); let path = paths::resolve_executable(path)?; path.hash(&mut hasher); @@ -266,7 +264,7 @@ fn rustc_fingerprint(path: &Path, rustup_rustc: &Path) -> CargoResult { } fn process_fingerprint(cmd: &ProcessBuilder) -> u64 { - let mut hasher = SipHasher::new(); + let mut hasher = StableHasher::new(); cmd.get_args().hash(&mut hasher); let mut env = cmd.get_envs().iter().collect::>(); env.sort_unstable(); From ce86e866e9802a961d43a7b38d52464ea711f752 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Mon, 11 May 2020 13:07:00 -0700 Subject: [PATCH 19/24] Add context to some fs errors. --- src/cargo/core/compiler/mod.rs | 4 ++-- src/cargo/core/compiler/output_depinfo.rs | 3 +-- src/cargo/core/compiler/timings.rs | 21 +++++++++++++---- src/cargo/ops/cargo_install.rs | 4 +--- src/cargo/ops/cargo_new.rs | 9 ++++---- src/cargo/ops/fix.rs | 8 +++---- src/cargo/ops/vendor.rs | 3 +-- src/cargo/sources/git/utils.rs | 3 +-- src/cargo/sources/registry/local.rs | 2 +- src/cargo/sources/registry/mod.rs | 3 ++- src/cargo/sources/registry/remote.rs | 5 ++-- src/cargo/util/config/mod.rs | 6 +++-- src/cargo/util/flock.rs | 3 +-- src/cargo/util/paths.rs | 28 +++++++++++++++++++++-- src/cargo/util/sha256.rs | 4 ++-- 15 files changed, 69 insertions(+), 37 deletions(-) diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index aaa7c7ef591..8a22e4aa050 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -1144,7 +1144,7 @@ fn on_stderr_line( // Check if caching is enabled. if let Some((path, cell)) = &mut options.cache_cell { // Cache the output, which will be replayed later when Fresh. - let f = cell.try_borrow_mut_with(|| File::create(path))?; + let f = cell.try_borrow_mut_with(|| paths::create(path))?; debug_assert!(!line.contains('\n')); f.write_all(line.as_bytes())?; f.write_all(&[b'\n'])?; @@ -1332,7 +1332,7 @@ fn replay_output_cache( // We sometimes have gigabytes of output from the compiler, so avoid // loading it all into memory at once, as that can cause OOM where // otherwise there would be none. - let file = fs::File::open(&path)?; + let file = paths::open(&path)?; let mut reader = std::io::BufReader::new(file); let mut line = String::new(); loop { diff --git a/src/cargo/core/compiler/output_depinfo.rs b/src/cargo/core/compiler/output_depinfo.rs index 3a9fac2c6d4..d017a81f700 100644 --- a/src/cargo/core/compiler/output_depinfo.rs +++ b/src/cargo/core/compiler/output_depinfo.rs @@ -23,7 +23,6 @@ //! be detected via changes to `Cargo.lock`. use std::collections::{BTreeSet, HashSet}; -use std::fs::File; use std::io::{BufWriter, Write}; use std::path::{Path, PathBuf}; @@ -148,7 +147,7 @@ pub fn output_depinfo(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> } // Otherwise write it all out - let mut outfile = BufWriter::new(File::create(output_path)?); + let mut outfile = BufWriter::new(paths::create(output_path)?); write!(outfile, "{}:", target_fn)?; for dep in &deps { write!(outfile, " {}", dep)?; diff --git a/src/cargo/core/compiler/timings.rs b/src/cargo/core/compiler/timings.rs index 4704eb12813..20b4c7f600b 100644 --- a/src/cargo/core/compiler/timings.rs +++ b/src/cargo/core/compiler/timings.rs @@ -10,7 +10,6 @@ use crate::util::cpu::State; use crate::util::machine_message::{self, Message}; use crate::util::{paths, CargoResult, Config}; use std::collections::HashMap; -use std::fs::File; use std::io::{BufWriter, Write}; use std::time::{Duration, Instant, SystemTime}; @@ -122,6 +121,17 @@ impl<'cfg> Timings<'cfg> { .collect(); let start_str = humantime::format_rfc3339_seconds(SystemTime::now()).to_string(); let profile = bcx.build_config.requested_profile.to_string(); + let last_cpu_state = if enabled { + match State::current() { + Ok(state) => Some(state), + Err(e) => { + log::info!("failed to get CPU state, CPU tracking disabled: {:?}", e); + None + } + } + } else { + None + }; Timings { config: bcx.config, @@ -138,7 +148,7 @@ impl<'cfg> Timings<'cfg> { unit_times: Vec::new(), active: HashMap::new(), concurrency: Vec::new(), - last_cpu_state: if enabled { State::current().ok() } else { None }, + last_cpu_state, last_cpu_recording: Instant::now(), cpu_usage: Vec::new(), } @@ -287,7 +297,10 @@ impl<'cfg> Timings<'cfg> { } let current = match State::current() { Ok(s) => s, - Err(_) => return, + Err(e) => { + log::info!("failed to get CPU state: {:?}", e); + return; + } }; let pct_idle = current.idle_since(prev); *prev = current; @@ -323,7 +336,7 @@ impl<'cfg> Timings<'cfg> { let duration = d_as_f64(self.start.elapsed()); let timestamp = self.start_str.replace(&['-', ':'][..], ""); let filename = format!("cargo-timing-{}.html", timestamp); - let mut f = BufWriter::new(File::create(&filename)?); + let mut f = BufWriter::new(paths::create(&filename)?); let roots: Vec<&str> = self .root_targets .iter() diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index fd343f85465..0406a56063d 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -374,9 +374,7 @@ fn install_one( if !source_id.is_path() && fs::rename(src, &dst).is_ok() { continue; } - fs::copy(src, &dst).chain_err(|| { - format_err!("failed to copy `{}` to `{}`", src.display(), dst.display()) - })?; + paths::copy(src, &dst)?; } let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs index bb6262c5f2e..fbd2a1637b3 100644 --- a/src/cargo/ops/cargo_new.rs +++ b/src/cargo/ops/cargo_new.rs @@ -9,7 +9,6 @@ use serde::Deserialize; use std::collections::BTreeMap; use std::env; use std::fmt; -use std::fs; use std::io::{BufRead, BufReader, ErrorKind}; use std::path::{Path, PathBuf}; use std::process::Command; @@ -562,10 +561,10 @@ fn write_ignore_file( VersionControl::NoVcs => return Ok("".to_string()), }; - let ignore: String = match fs::File::open(&fp_ignore) { - Err(why) => match why.kind() { - ErrorKind::NotFound => list.format_new(vcs), - _ => return Err(anyhow::format_err!("{}", why)), + let ignore: String = match paths::open(&fp_ignore) { + Err(err) => match err.downcast_ref::() { + Some(io_err) if io_err.kind() == ErrorKind::NotFound => list.format_new(vcs), + _ => return Err(err), }, Ok(file) => list.format_existing(BufReader::new(file), vcs), }; diff --git a/src/cargo/ops/fix.rs b/src/cargo/ops/fix.rs index 1983e9387a8..b5702cb8afb 100644 --- a/src/cargo/ops/fix.rs +++ b/src/cargo/ops/fix.rs @@ -41,7 +41,6 @@ use std::collections::{BTreeSet, HashMap, HashSet}; use std::env; use std::ffi::OsString; -use std::fs; use std::path::{Path, PathBuf}; use std::process::{self, Command, ExitStatus}; use std::str; @@ -55,7 +54,7 @@ use crate::core::Workspace; use crate::ops::{self, CompileOptions}; use crate::util::diagnostic_server::{Message, RustfixDiagnosticServer}; use crate::util::errors::CargoResult; -use crate::util::{self, Config, ProcessBuilder}; +use crate::util::{self, paths, Config, ProcessBuilder}; use crate::util::{existing_vcs_repo, LockServer, LockServerClient}; const FIX_ENV: &str = "__CARGO_FIX_PLZ"; @@ -256,8 +255,7 @@ pub fn fix_maybe_exec_rustc() -> CargoResult { if !output.status.success() { if env::var_os(BROKEN_CODE_ENV).is_none() { for (path, file) in fixes.files.iter() { - fs::write(path, &file.original_code) - .with_context(|| format!("failed to write file `{}`", path))?; + paths::write(path, &file.original_code)?; } } log_failed_fix(&output.stderr)?; @@ -517,7 +515,7 @@ fn rustfix_and_fix( } } let new_code = fixed.finish()?; - fs::write(&file, new_code).with_context(|| format!("failed to write file `{}`", file))?; + paths::write(&file, new_code)?; } Ok(()) diff --git a/src/cargo/ops/vendor.rs b/src/cargo/ops/vendor.rs index b3edd243c29..77792333461 100644 --- a/src/cargo/ops/vendor.rs +++ b/src/cargo/ops/vendor.rs @@ -330,8 +330,7 @@ fn cp_sources( paths::create_dir_all(dst.parent().unwrap())?; - fs::copy(&p, &dst) - .chain_err(|| format!("failed to copy `{}` to `{}`", p.display(), dst.display()))?; + paths::copy(&p, &dst)?; let cksum = Sha256::new().update_path(dst)?.finish_hex(); cksums.insert(relative.to_str().unwrap().replace("\\", "/"), cksum); } diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs index 1d28d36ccfa..f8190a2ddae 100644 --- a/src/cargo/sources/git/utils.rs +++ b/src/cargo/sources/git/utils.rs @@ -10,7 +10,6 @@ use serde::ser; use serde::Serialize; use std::env; use std::fmt; -use std::fs::File; use std::path::{Path, PathBuf}; use std::process::Command; use url::Url; @@ -363,7 +362,7 @@ impl<'a> GitCheckout<'a> { info!("reset {} to {}", self.repo.path().display(), self.revision); let object = self.repo.find_object(self.revision.0, None)?; reset(&self.repo, &object, config)?; - File::create(ok_file)?; + paths::create(ok_file)?; Ok(()) } diff --git a/src/cargo/sources/registry/local.rs b/src/cargo/sources/registry/local.rs index c3ded744a96..d9c1221b04c 100644 --- a/src/cargo/sources/registry/local.rs +++ b/src/cargo/sources/registry/local.rs @@ -85,7 +85,7 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> { // crate files here never change in that we're not the one writing them, // so it's not our responsibility to synchronize access to them. let path = self.root.join(&crate_file).into_path_unlocked(); - let mut crate_file = File::open(&path)?; + let mut crate_file = paths::open(&path)?; // If we've already got an unpacked version of this crate, then skip the // checksum below as it is in theory already verified. diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs index 4c340026cbf..d49b46318e9 100644 --- a/src/cargo/sources/registry/mod.rs +++ b/src/cargo/sources/registry/mod.rs @@ -468,7 +468,8 @@ impl<'cfg> RegistrySource<'cfg> { .create(true) .read(true) .write(true) - .open(&path)?; + .open(&path) + .chain_err(|| format!("failed to open `{}`", path.display()))?; let gz = GzDecoder::new(tarball); let mut tar = Archive::new(gz); diff --git a/src/cargo/sources/registry/remote.rs b/src/cargo/sources/registry/remote.rs index 52bb4ca7e4f..6dc4db00cc0 100644 --- a/src/cargo/sources/registry/remote.rs +++ b/src/cargo/sources/registry/remote.rs @@ -225,7 +225,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> { // Create a dummy file to record the mtime for when we updated the // index. - File::create(&path.join(LAST_UPDATED_FILE))?; + paths::create(&path.join(LAST_UPDATED_FILE))?; Ok(()) } @@ -283,7 +283,8 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> { .create(true) .read(true) .write(true) - .open(&path)?; + .open(&path) + .chain_err(|| format!("failed to open `{}`", path.display()))?; let meta = dst.metadata()?; if meta.len() > 0 { return Ok(dst); diff --git a/src/cargo/util/config/mod.rs b/src/cargo/util/config/mod.rs index 260d011c851..51cba89f797 100644 --- a/src/cargo/util/config/mod.rs +++ b/src/cargo/util/config/mod.rs @@ -1622,9 +1622,11 @@ pub fn save_credentials(cfg: &Config, token: String, registry: Option) - let contents = toml.to_string(); file.seek(SeekFrom::Start(0))?; - file.write_all(contents.as_bytes())?; + file.write_all(contents.as_bytes()) + .chain_err(|| format!("failed to write to `{}`", file.path().display()))?; file.file().set_len(contents.len() as u64)?; - set_permissions(file.file(), 0o600)?; + set_permissions(file.file(), 0o600) + .chain_err(|| format!("failed to set permissions of `{}`", file.path().display()))?; return Ok(()); diff --git a/src/cargo/util/flock.rs b/src/cargo/util/flock.rs index d31fa2ba8cd..2d732bb7ae3 100644 --- a/src/cargo/util/flock.rs +++ b/src/cargo/util/flock.rs @@ -148,8 +148,7 @@ impl Filesystem { /// Handles errors where other Cargo processes are also attempting to /// concurrently create this directory. pub fn create_dir(&self) -> CargoResult<()> { - paths::create_dir_all(&self.root)?; - Ok(()) + paths::create_dir_all(&self.root) } /// Returns an adaptor that can be used to print the path of this diff --git a/src/cargo/util/paths.rs b/src/cargo/util/paths.rs index 7b907128e13..c5a09fdae05 100644 --- a/src/cargo/util/paths.rs +++ b/src/cargo/util/paths.rs @@ -1,6 +1,6 @@ use std::env; use std::ffi::{OsStr, OsString}; -use std::fs::{self, OpenOptions}; +use std::fs::{self, File, OpenOptions}; use std::io; use std::io::prelude::*; use std::iter; @@ -162,6 +162,18 @@ pub fn append(path: &Path, contents: &[u8]) -> CargoResult<()> { Ok(()) } +/// Creates a new file. +pub fn create>(path: P) -> CargoResult { + let path = path.as_ref(); + File::create(path).chain_err(|| format!("failed to create file `{}`", path.display())) +} + +/// Opens an existing file. +pub fn open>(path: P) -> CargoResult { + let path = path.as_ref(); + File::open(path).chain_err(|| format!("failed to open file `{}`", path.display())) +} + pub fn mtime(path: &Path) -> CargoResult { let meta = fs::metadata(path).chain_err(|| format!("failed to stat `{}`", path.display()))?; Ok(FileTime::from_last_modification_time(&meta)) @@ -265,7 +277,11 @@ pub fn remove_dir_all>(p: P) -> CargoResult<()> { } fn _remove_dir_all(p: &Path) -> CargoResult<()> { - if p.symlink_metadata()?.file_type().is_symlink() { + if p.symlink_metadata() + .chain_err(|| format!("could not get metadata for `{}` to remove", p.display()))? + .file_type() + .is_symlink() + { return remove_file(p); } let entries = p @@ -394,6 +410,14 @@ fn _link_or_copy(src: &Path, dst: &Path) -> CargoResult<()> { Ok(()) } +/// Copies a file from one location to another. +pub fn copy, Q: AsRef>(from: P, to: Q) -> CargoResult { + let from = from.as_ref(); + let to = to.as_ref(); + fs::copy(from, to) + .chain_err(|| format!("failed to copy `{}` to `{}`", from.display(), to.display())) +} + /// Changes the filesystem mtime (and atime if possible) for the given file. /// /// This intentionally does not return an error, as this is sometimes not diff --git a/src/cargo/util/sha256.rs b/src/cargo/util/sha256.rs index f1b80594cfb..36bb2cef6d7 100644 --- a/src/cargo/util/sha256.rs +++ b/src/cargo/util/sha256.rs @@ -1,4 +1,4 @@ -use crate::util::{CargoResult, CargoResultExt}; +use crate::util::{paths, CargoResult, CargoResultExt}; use crypto_hash::{Algorithm, Hasher}; use std::fs::File; use std::io::{self, Read, Write}; @@ -30,7 +30,7 @@ impl Sha256 { pub fn update_path>(&mut self, path: P) -> CargoResult<&mut Sha256> { let path = path.as_ref(); - let file = File::open(path)?; + let file = paths::open(path)?; self.update_file(&file) .chain_err(|| format!("failed to read `{}`", path.display()))?; Ok(self) From 09084a365f612192fa026754b448f5b0be231890 Mon Sep 17 00:00:00 2001 From: Jan-Erik Rediger Date: Tue, 12 May 2020 16:45:55 +0200 Subject: [PATCH 20/24] Expand error message to explain that a string was found With `opt-level = "3"` this previously said: must be an integer, `z`, or `s`, but found: 3 for ... The error message doesn't make that super clear. This should now be a bit more clear. Fixes #8234 --- src/cargo/util/toml/mod.rs | 2 +- tests/testsuite/config.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index c8cceecde87..c7858deffb1 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -321,7 +321,7 @@ impl<'de> de::Deserialize<'de> for TomlOptLevel { } else { Err(E::custom(format!( "must be an integer, `z`, or `s`, \ - but found: {}", + but found the string: \"{}\"", value ))) } diff --git a/tests/testsuite/config.rs b/tests/testsuite/config.rs index 46dbca69d50..4cbea32f54a 100644 --- a/tests/testsuite/config.rs +++ b/tests/testsuite/config.rs @@ -574,7 +574,7 @@ opt-level = 'foo' error in [..]/.cargo/config: could not load config key `profile.dev.opt-level` Caused by: - must be an integer, `z`, or `s`, but found: foo", + must be an integer, `z`, or `s`, but found the string: \"foo\"", ); let config = ConfigBuilder::new() @@ -587,7 +587,7 @@ Caused by: error in environment variable `CARGO_PROFILE_DEV_OPT_LEVEL`: could not load config key `profile.dev.opt-level` Caused by: - must be an integer, `z`, or `s`, but found: asdf", + must be an integer, `z`, or `s`, but found the string: \"asdf\"", ); } From 7274307af4eecd221c152ad9affd0a7eeaffab44 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Tue, 12 May 2020 08:03:35 -0700 Subject: [PATCH 21/24] Ignore broken console output in some situations. --- src/bin/cargo/cli.rs | 21 ++-- src/bin/cargo/commands/locate_project.rs | 4 +- src/bin/cargo/commands/metadata.rs | 4 +- src/bin/cargo/commands/pkgid.rs | 2 +- src/bin/cargo/commands/read_manifest.rs | 4 +- src/bin/cargo/commands/tree.rs | 2 +- src/bin/cargo/commands/verify_project.rs | 14 +-- src/bin/cargo/commands/version.rs | 7 +- src/cargo/core/compiler/job_queue.rs | 4 +- src/cargo/core/compiler/mod.rs | 4 +- src/cargo/core/compiler/timings.rs | 2 +- src/cargo/core/compiler/unit_graph.rs | 2 +- src/cargo/core/manifest.rs | 2 +- src/cargo/core/resolver/features.rs | 8 +- src/cargo/core/shell.rs | 129 ++++++++++++++--------- src/cargo/lib.rs | 6 -- src/cargo/ops/cargo_install.rs | 6 +- src/cargo/ops/cargo_package.rs | 4 +- src/cargo/ops/registry.rs | 25 +++-- src/cargo/ops/tree/mod.rs | 31 ++++-- src/cargo/ops/vendor.rs | 19 ++-- src/cargo/util/config/mod.rs | 42 ++++++++ tests/testsuite/cargo_command.rs | 23 ++++ tests/testsuite/owner.rs | 13 ++- tests/testsuite/search.rs | 57 ++++++++-- 25 files changed, 294 insertions(+), 141 deletions(-) diff --git a/src/bin/cargo/cli.rs b/src/bin/cargo/cli.rs index 7fc1da57a86..23ed0ca53a3 100644 --- a/src/bin/cargo/cli.rs +++ b/src/bin/cargo/cli.rs @@ -1,5 +1,5 @@ use cargo::core::features; -use cargo::{self, CliResult, Config}; +use cargo::{self, drop_print, drop_println, CliResult, Config}; use clap::{AppSettings, Arg, ArgMatches}; use super::commands; @@ -25,7 +25,8 @@ pub fn main(config: &mut Config) -> CliResult { }; if args.value_of("unstable-features") == Some("help") { - println!( + drop_println!( + config, " Available unstable (nightly-only) flags: @@ -40,7 +41,8 @@ Available unstable (nightly-only) flags: Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" ); if !features::nightly_features_allowed() { - println!( + drop_println!( + config, "\nUnstable flags are only available on the nightly channel \ of Cargo, but this is the `{}` channel.\n\ {}", @@ -48,7 +50,8 @@ Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" features::SEE_CHANNELS ); } - println!( + drop_println!( + config, "\nSee https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ for more information about these flags." ); @@ -58,7 +61,7 @@ Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" let is_verbose = args.occurrences_of("verbose") > 0; if args.is_present("version") { let version = get_version_string(is_verbose); - print!("{}", version); + drop_print!(config, "{}", version); return Ok(()); } @@ -69,19 +72,19 @@ Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" } if args.is_present("list") { - println!("Installed Commands:"); + drop_println!(config, "Installed Commands:"); for command in list_commands(config) { match command { CommandInfo::BuiltIn { name, about } => { let summary = about.unwrap_or_default(); let summary = summary.lines().next().unwrap_or(&summary); // display only the first line - println!(" {:<20} {}", name, summary) + drop_println!(config, " {:<20} {}", name, summary); } CommandInfo::External { name, path } => { if is_verbose { - println!(" {:<20} {}", name, path.display()) + drop_println!(config, " {:<20} {}", name, path.display()); } else { - println!(" {}", name) + drop_println!(config, " {}", name); } } } diff --git a/src/bin/cargo/commands/locate_project.rs b/src/bin/cargo/commands/locate_project.rs index df0c424aa4a..5897de108b3 100644 --- a/src/bin/cargo/commands/locate_project.rs +++ b/src/bin/cargo/commands/locate_project.rs @@ -1,6 +1,4 @@ use crate::command_prelude::*; - -use cargo::print_json; use serde::Serialize; pub fn cli() -> App { @@ -30,6 +28,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { let location = ProjectLocation { root }; - print_json(&location); + config.shell().print_json(&location); Ok(()) } diff --git a/src/bin/cargo/commands/metadata.rs b/src/bin/cargo/commands/metadata.rs index 1130f074e8f..616df735379 100644 --- a/src/bin/cargo/commands/metadata.rs +++ b/src/bin/cargo/commands/metadata.rs @@ -1,7 +1,5 @@ use crate::command_prelude::*; - use cargo::ops::{self, OutputMetadataOptions}; -use cargo::print_json; pub fn cli() -> App { subcommand("metadata") @@ -54,6 +52,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { }; let result = ops::output_metadata(&ws, &options)?; - print_json(&result); + config.shell().print_json(&result); Ok(()) } diff --git a/src/bin/cargo/commands/pkgid.rs b/src/bin/cargo/commands/pkgid.rs index 57be0d11877..453c95a184d 100644 --- a/src/bin/cargo/commands/pkgid.rs +++ b/src/bin/cargo/commands/pkgid.rs @@ -37,6 +37,6 @@ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { let ws = args.workspace(config)?; let spec = args.value_of("spec").or_else(|| args.value_of("package")); let spec = ops::pkgid(&ws, spec)?; - println!("{}", spec); + cargo::drop_println!(config, "{}", spec); Ok(()) } diff --git a/src/bin/cargo/commands/read_manifest.rs b/src/bin/cargo/commands/read_manifest.rs index fe2528b18aa..96cba1e082a 100644 --- a/src/bin/cargo/commands/read_manifest.rs +++ b/src/bin/cargo/commands/read_manifest.rs @@ -1,7 +1,5 @@ use crate::command_prelude::*; -use cargo::print_json; - pub fn cli() -> App { subcommand("read-manifest") .about( @@ -17,6 +15,6 @@ Deprecated, use `cargo metadata --no-deps` instead.\ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { let ws = args.workspace(config)?; - print_json(&ws.current()?); + config.shell().print_json(&ws.current()?); Ok(()) } diff --git a/src/bin/cargo/commands/tree.rs b/src/bin/cargo/commands/tree.rs index 9cfff98fe48..7eeac6cb8e7 100644 --- a/src/bin/cargo/commands/tree.rs +++ b/src/bin/cargo/commands/tree.rs @@ -102,7 +102,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { if args.is_present("version") { let verbose = args.occurrences_of("verbose") > 0; let version = cli::get_version_string(verbose); - print!("{}", version); + cargo::drop_print!(config, "{}", version); return Ok(()); } let prefix = if args.is_present("no-indent") { diff --git a/src/bin/cargo/commands/verify_project.rs b/src/bin/cargo/commands/verify_project.rs index fe2b42aebed..ea5ac117803 100644 --- a/src/bin/cargo/commands/verify_project.rs +++ b/src/bin/cargo/commands/verify_project.rs @@ -3,8 +3,6 @@ use crate::command_prelude::*; use std::collections::HashMap; use std::process; -use cargo::print_json; - pub fn cli() -> App { subcommand("verify-project") .about("Check correctness of crate manifest") @@ -13,19 +11,15 @@ pub fn cli() -> App { } pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - fn fail(reason: &str, value: &str) -> ! { + if let Err(e) = args.workspace(config) { let mut h = HashMap::new(); - h.insert(reason.to_string(), value.to_string()); - print_json(&h); + h.insert("invalid".to_string(), e.to_string()); + config.shell().print_json(&h); process::exit(1) } - if let Err(e) = args.workspace(config) { - fail("invalid", &e.to_string()) - } - let mut h = HashMap::new(); h.insert("success".to_string(), "true".to_string()); - print_json(&h); + config.shell().print_json(&h); Ok(()) } diff --git a/src/bin/cargo/commands/version.rs b/src/bin/cargo/commands/version.rs index 81c6838e7ab..73172826150 100644 --- a/src/bin/cargo/commands/version.rs +++ b/src/bin/cargo/commands/version.rs @@ -1,6 +1,5 @@ -use crate::command_prelude::*; - use crate::cli; +use crate::command_prelude::*; pub fn cli() -> App { subcommand("version") @@ -8,9 +7,9 @@ pub fn cli() -> App { .arg(opt("quiet", "No output printed to stdout").short("q")) } -pub fn exec(_config: &mut Config, args: &ArgMatches<'_>) -> CliResult { +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { let verbose = args.occurrences_of("verbose") > 0; let version = cli::get_version_string(verbose); - print!("{}", version); + cargo::drop_print!(config, "{}", version); Ok(()) } diff --git a/src/cargo/core/compiler/job_queue.rs b/src/cargo/core/compiler/job_queue.rs index 7a103b13d21..a1f2bba16d3 100644 --- a/src/cargo/core/compiler/job_queue.rs +++ b/src/cargo/core/compiler/job_queue.rs @@ -500,7 +500,7 @@ impl<'cfg> DrainState<'cfg> { plan.update(&module_name, &cmd, &filenames)?; } Message::Stdout(out) => { - cx.bcx.config.shell().stdout_println(out); + writeln!(cx.bcx.config.shell().out(), "{}", out)?; } Message::Stderr(err) => { let mut shell = cx.bcx.config.shell(); @@ -700,7 +700,7 @@ impl<'cfg> DrainState<'cfg> { success: error.is_none(), } .to_json_string(); - cx.bcx.config.shell().stdout_println(msg); + writeln!(cx.bcx.config.shell().out(), "{}", msg)?; } if let Some(e) = error { diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 8a22e4aa050..f662495a1a1 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -146,7 +146,7 @@ fn compile<'cfg>( &unit.target, cx.files().message_cache_path(unit), cx.bcx.build_config.message_format, - cx.bcx.config.shell().supports_color(), + cx.bcx.config.shell().err_supports_color(), ) } else { Work::noop() @@ -1109,7 +1109,7 @@ struct OutputOptions { impl OutputOptions { fn new(cx: &Context<'_, '_>, unit: &Unit) -> OutputOptions { let look_for_metadata_directive = cx.rmeta_required(unit); - let color = cx.bcx.config.shell().supports_color(); + let color = cx.bcx.config.shell().err_supports_color(); let path = cx.files().message_cache_path(unit); // Remove old cache, ignore ENOENT, which is the common case. drop(fs::remove_file(&path)); diff --git a/src/cargo/core/compiler/timings.rs b/src/cargo/core/compiler/timings.rs index 20b4c7f600b..8302475defd 100644 --- a/src/cargo/core/compiler/timings.rs +++ b/src/cargo/core/compiler/timings.rs @@ -245,7 +245,7 @@ impl<'cfg> Timings<'cfg> { rmeta_time: unit_time.rmeta_time, } .to_json_string(); - self.config.shell().stdout_println(msg); + crate::drop_println!(self.config, "{}", msg); } self.unit_times.push(unit_time); } diff --git a/src/cargo/core/compiler/unit_graph.rs b/src/cargo/core/compiler/unit_graph.rs index 350c040abad..d242f6b0497 100644 --- a/src/cargo/core/compiler/unit_graph.rs +++ b/src/cargo/core/compiler/unit_graph.rs @@ -113,6 +113,6 @@ pub fn emit_serialized_unit_graph(root_units: &[Unit], unit_graph: &UnitGraph) - let stdout = std::io::stdout(); let mut lock = stdout.lock(); serde_json::to_writer(&mut lock, &s)?; - writeln!(lock)?; + drop(writeln!(lock)); Ok(()) } diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index 994603aca57..221b769295b 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -500,7 +500,7 @@ impl Manifest { pub fn print_teapot(&self, config: &Config) { if let Some(teapot) = self.im_a_teapot { if config.cli_unstable().print_im_a_teapot { - println!("im-a-teapot = {}", teapot); + crate::drop_println!(config, "im-a-teapot = {}", teapot); } } } diff --git a/src/cargo/core/resolver/features.rs b/src/cargo/core/resolver/features.rs index cea01d113a4..8f71007f610 100644 --- a/src/cargo/core/resolver/features.rs +++ b/src/cargo/core/resolver/features.rs @@ -573,9 +573,13 @@ impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { for ((pkg_id, dep_kind), features) in &self.activated_features { let r_features = self.resolve.features(*pkg_id); if !r_features.iter().eq(features.iter()) { - eprintln!( + crate::drop_eprintln!( + self.ws.config(), "{}/{:?} features mismatch\nresolve: {:?}\nnew: {:?}\n", - pkg_id, dep_kind, r_features, features + pkg_id, + dep_kind, + r_features, + features ); found = true; } diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs index eae8a70cc8d..a3e72b5261e 100644 --- a/src/cargo/core/shell.rs +++ b/src/cargo/core/shell.rs @@ -14,13 +14,13 @@ pub enum Verbosity { Quiet, } -/// An abstraction around a `Write`able object that remembers preferences for output verbosity and -/// color. +/// An abstraction around console output that remembers preferences for output +/// verbosity and color. pub struct Shell { - /// the `Write`able object, either with or without color support (represented by different enum - /// variants) - err: ShellOut, - /// How verbose messages should be + /// Wrapper around stdout/stderr. This helps with supporting sending + /// output to a memory buffer which is useful for tests. + output: ShellOut, + /// How verbose messages should be. verbosity: Verbosity, /// Flag that indicates the current line needs to be cleared before /// printing. Used when a progress bar is currently displayed. @@ -29,7 +29,7 @@ pub struct Shell { impl fmt::Debug for Shell { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.err { + match self.output { ShellOut::Write(_) => f .debug_struct("Shell") .field("verbosity", &self.verbosity) @@ -49,8 +49,9 @@ enum ShellOut { Write(Box), /// Color-enabled stdio, with information on whether color should be used Stream { - stream: StandardStream, - tty: bool, + stdout: StandardStream, + stderr: StandardStream, + stderr_tty: bool, color_choice: ColorChoice, }, } @@ -70,11 +71,13 @@ impl Shell { /// Creates a new shell (color choice and verbosity), defaulting to 'auto' color and verbose /// output. pub fn new() -> Shell { + let auto = ColorChoice::CargoAuto.to_termcolor_color_choice(); Shell { - err: ShellOut::Stream { - stream: StandardStream::stderr(ColorChoice::CargoAuto.to_termcolor_color_choice()), + output: ShellOut::Stream { + stdout: StandardStream::stdout(auto), + stderr: StandardStream::stderr(auto), color_choice: ColorChoice::CargoAuto, - tty: atty::is(atty::Stream::Stderr), + stderr_tty: atty::is(atty::Stream::Stderr), }, verbosity: Verbosity::Verbose, needs_clear: false, @@ -84,7 +87,7 @@ impl Shell { /// Creates a shell from a plain writable object, with no color, and max verbosity. pub fn from_write(out: Box) -> Shell { Shell { - err: ShellOut::Write(out), + output: ShellOut::Write(out), verbosity: Verbosity::Verbose, needs_clear: false, } @@ -105,18 +108,12 @@ impl Shell { if self.needs_clear { self.err_erase_line(); } - self.err.print(status, message, color, justified) + self.output + .message_stderr(status, message, color, justified) } } } - pub fn stdout_println(&mut self, message: impl fmt::Display) { - if self.needs_clear { - self.err_erase_line(); - } - println!("{}", message); - } - /// Sets whether the next print should clear the current line. pub fn set_needs_clear(&mut self, needs_clear: bool) { self.needs_clear = needs_clear; @@ -129,31 +126,44 @@ impl Shell { /// Returns the width of the terminal in spaces, if any. pub fn err_width(&self) -> Option { - match self.err { - ShellOut::Stream { tty: true, .. } => imp::stderr_width(), + match self.output { + ShellOut::Stream { + stderr_tty: true, .. + } => imp::stderr_width(), _ => None, } } /// Returns `true` if stderr is a tty. pub fn is_err_tty(&self) -> bool { - match self.err { - ShellOut::Stream { tty, .. } => tty, + match self.output { + ShellOut::Stream { stderr_tty, .. } => stderr_tty, _ => false, } } - /// Gets a reference to the underlying writer. + /// Gets a reference to the underlying stdout writer. + pub fn out(&mut self) -> &mut dyn Write { + if self.needs_clear { + self.err_erase_line(); + } + self.output.stdout() + } + + /// Gets a reference to the underlying stderr writer. pub fn err(&mut self) -> &mut dyn Write { if self.needs_clear { self.err_erase_line(); } - self.err.as_write() + self.output.stderr() } /// Erase from cursor to end of line. pub fn err_erase_line(&mut self) { - if let ShellOut::Stream { tty: true, .. } = self.err { + if let ShellOut::Stream { + stderr_tty: true, .. + } = self.output + { imp::err_erase_line(self); self.needs_clear = false; } @@ -216,7 +226,8 @@ impl Shell { if self.needs_clear { self.err_erase_line(); } - self.err.print(&"error", Some(&message), Red, false) + self.output + .message_stderr(&"error", Some(&message), Red, false) } /// Prints an amber 'warning' message. @@ -245,10 +256,11 @@ impl Shell { /// Updates the color choice (always, never, or auto) from a string.. pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> { if let ShellOut::Stream { - ref mut stream, + ref mut stdout, + ref mut stderr, ref mut color_choice, .. - } = self.err + } = self.output { let cfg = match color { Some("always") => ColorChoice::Always, @@ -263,7 +275,9 @@ impl Shell { ), }; *color_choice = cfg; - *stream = StandardStream::stderr(cfg.to_termcolor_color_choice()); + let choice = cfg.to_termcolor_color_choice(); + *stdout = StandardStream::stdout(choice); + *stderr = StandardStream::stderr(choice); } Ok(()) } @@ -273,17 +287,17 @@ impl Shell { /// If we are not using a color stream, this will always return `Never`, even if the color /// choice has been set to something else. pub fn color_choice(&self) -> ColorChoice { - match self.err { + match self.output { ShellOut::Stream { color_choice, .. } => color_choice, ShellOut::Write(_) => ColorChoice::Never, } } /// Whether the shell supports color. - pub fn supports_color(&self) -> bool { - match &self.err { + pub fn err_supports_color(&self) -> bool { + match &self.output { ShellOut::Write(_) => false, - ShellOut::Stream { stream, .. } => stream.supports_color(), + ShellOut::Stream { stderr, .. } => stderr.supports_color(), } } @@ -302,6 +316,11 @@ impl Shell { self.err().write_all(message)?; Ok(()) } + + pub fn print_json(&mut self, obj: &T) { + let encoded = serde_json::to_string(&obj).unwrap(); + drop(writeln!(self.out(), "{}", encoded)); + } } impl Default for Shell { @@ -314,7 +333,7 @@ impl ShellOut { /// Prints out a message with a status. The status comes first, and is bold plus the given /// color. The status can be justified, in which case the max width that will right align is /// 12 chars. - fn print( + fn message_stderr( &mut self, status: &dyn fmt::Display, message: Option<&dyn fmt::Display>, @@ -322,20 +341,20 @@ impl ShellOut { justified: bool, ) -> CargoResult<()> { match *self { - ShellOut::Stream { ref mut stream, .. } => { - stream.reset()?; - stream.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?; + ShellOut::Stream { ref mut stderr, .. } => { + stderr.reset()?; + stderr.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?; if justified { - write!(stream, "{:>12}", status)?; + write!(stderr, "{:>12}", status)?; } else { - write!(stream, "{}", status)?; - stream.set_color(ColorSpec::new().set_bold(true))?; - write!(stream, ":")?; + write!(stderr, "{}", status)?; + stderr.set_color(ColorSpec::new().set_bold(true))?; + write!(stderr, ":")?; } - stream.reset()?; + stderr.reset()?; match message { - Some(message) => writeln!(stream, " {}", message)?, - None => write!(stream, " ")?, + Some(message) => writeln!(stderr, " {}", message)?, + None => write!(stderr, " ")?, } } ShellOut::Write(ref mut w) => { @@ -353,10 +372,18 @@ impl ShellOut { Ok(()) } - /// Gets this object as a `io::Write`. - fn as_write(&mut self) -> &mut dyn Write { + /// Gets stdout as a `io::Write`. + fn stdout(&mut self) -> &mut dyn Write { + match *self { + ShellOut::Stream { ref mut stdout, .. } => stdout, + ShellOut::Write(ref mut w) => w, + } + } + + /// Gets stderr as a `io::Write`. + fn stderr(&mut self) -> &mut dyn Write { match *self { - ShellOut::Stream { ref mut stream, .. } => stream, + ShellOut::Stream { ref mut stderr, .. } => stderr, ShellOut::Write(ref mut w) => w, } } @@ -404,7 +431,7 @@ mod imp { // This is the "EL - Erase in Line" sequence. It clears from the cursor // to the end of line. // https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_sequences - let _ = shell.err.as_write().write_all(b"\x1B[K"); + let _ = shell.output.stderr().write_all(b"\x1B[K"); } } diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index b386ee7f558..19cbd49c101 100644 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -34,7 +34,6 @@ use crate::core::shell::Verbosity::Verbose; use crate::core::Shell; use anyhow::Error; use log::debug; -use serde::ser; use std::fmt; pub use crate::util::errors::{InternalError, VerboseError}; @@ -93,11 +92,6 @@ impl fmt::Display for VersionInfo { } } -pub fn print_json(obj: &T) { - let encoded = serde_json::to_string(&obj).unwrap(); - println!("{}", encoded); -} - pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! { debug!("exit_with_error; err={:?}", err); if let Some(ref err) = err.error { diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index 0406a56063d..e450f82f3de 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -9,11 +9,11 @@ use tempfile::Builder as TempFileBuilder; use crate::core::compiler::Freshness; use crate::core::compiler::{CompileKind, DefaultExecutor, Executor}; use crate::core::{Edition, Package, PackageId, Source, SourceId, Workspace}; -use crate::ops; use crate::ops::common_for_install_and_uninstall::*; use crate::sources::{GitSource, SourceConfigMap}; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::{paths, Config, Filesystem}; +use crate::{drop_println, ops}; struct Transaction { bins: Vec, @@ -531,9 +531,9 @@ pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> { let root = resolve_root(dst, config)?; let tracker = InstallTracker::load(config, &root)?; for (k, v) in tracker.all_installed_bins() { - println!("{}:", k); + drop_println!(config, "{}:", k); for bin in v { - println!(" {}", bin); + drop_println!(config, " {}", bin); } } Ok(()) diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index b84e6b3caf1..4b59afdae1e 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -15,12 +15,12 @@ use tar::{Archive, Builder, EntryType, Header}; use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor}; use crate::core::{Feature, Shell, Verbosity, Workspace}; use crate::core::{Package, PackageId, PackageSet, Resolve, Source, SourceId}; -use crate::ops; use crate::sources::PathSource; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::paths; use crate::util::toml::TomlManifest; use crate::util::{self, restricted_names, Config, FileLock}; +use crate::{drop_println, ops}; pub struct PackageOpts<'cfg> { pub config: &'cfg Config, @@ -102,7 +102,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult