Skip to content

rustbuild: Fix recompilation of stage0 tools dir #41227

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 13, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 24 additions & 9 deletions src/bootstrap/compile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,7 @@ pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
cargo.env("CFG_DEFAULT_AR", s);
}
build.run(&mut cargo);
update_mtime(build, &librustc_stamp(build, compiler, target));
}

/// Same as `std_link`, only for librustc
Expand Down Expand Up @@ -305,6 +306,12 @@ fn libtest_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
build.cargo_out(compiler, Mode::Libtest, target).join(".libtest.stamp")
}

/// Cargo's output path for librustc in a given stage, compiled by a particular
/// compiler for the specified target.
fn librustc_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
build.cargo_out(compiler, Mode::Librustc, target).join(".librustc.stamp")
}

fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
let out = output(Command::new(compiler)
.arg(format!("-print-file-name={}", file)));
Expand Down Expand Up @@ -407,6 +414,23 @@ fn add_to_sysroot(out_dir: &Path, sysroot_dst: &Path) {
}
}

/// Build a tool in `src/tools`
///
/// This will build the specified tool with the specified `host` compiler in
/// `stage` into the normal cargo output directory.
pub fn maybe_clean_tools(build: &Build, stage: u32, target: &str, mode: Mode) {
let compiler = Compiler::new(stage, &build.config.build);

let stamp = match mode {
Mode::Libstd => libstd_stamp(build, &compiler, target),
Mode::Libtest => libtest_stamp(build, &compiler, target),
Mode::Librustc => librustc_stamp(build, &compiler, target),
_ => panic!(),
};
let out_dir = build.cargo_out(&compiler, Mode::Tool, target);
build.clear_if_dirty(&out_dir, &stamp);
}

/// Build a tool in `src/tools`
///
/// This will build the specified tool with the specified `host` compiler in
Expand All @@ -416,15 +440,6 @@ pub fn tool(build: &Build, stage: u32, target: &str, tool: &str) {

let compiler = Compiler::new(stage, &build.config.build);

// FIXME: need to clear out previous tool and ideally deps, may require
// isolating output directories or require a pseudo shim step to
// clear out all the info.
//
// Maybe when libstd is compiled it should clear out the rustc of the
// corresponding stage?
// let out_dir = build.cargo_out(stage, &host, Mode::Librustc, target);
// build.clear_if_dirty(&out_dir, &libstd_stamp(build, stage, &host, target));

let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
let mut dir = build.src.join(tool);
if !dir.exists() {
Expand Down
176 changes: 143 additions & 33 deletions src/bootstrap/step.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
//! along with the actual implementation elsewhere. You can find more comments
//! about how to define rules themselves below.

use std::collections::{BTreeMap, HashSet};
use std::collections::{BTreeMap, HashSet, HashMap};
use std::mem;

use check::{self, TestKind};
Expand Down Expand Up @@ -533,34 +533,44 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
//
// Tools used during the build system but not shipped
rules.build("tool-rustbook", "src/tools/rustbook")
.dep(|s| s.name("librustc"))
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("librustc-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "rustbook"));
rules.build("tool-error-index", "src/tools/error_index_generator")
.dep(|s| s.name("librustc"))
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("librustc-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "error_index_generator"));
rules.build("tool-tidy", "src/tools/tidy")
.dep(|s| s.name("libstd"))
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "tidy"));
rules.build("tool-linkchecker", "src/tools/linkchecker")
.dep(|s| s.name("libstd"))
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "linkchecker"));
rules.build("tool-cargotest", "src/tools/cargotest")
.dep(|s| s.name("libstd"))
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "cargotest"));
rules.build("tool-compiletest", "src/tools/compiletest")
.dep(|s| s.name("libtest"))
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libtest-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "compiletest"));
rules.build("tool-build-manifest", "src/tools/build-manifest")
.dep(|s| s.name("libstd"))
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "build-manifest"));
rules.build("tool-qemu-test-server", "src/tools/qemu-test-server")
.dep(|s| s.name("libstd"))
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "qemu-test-server"));
rules.build("tool-qemu-test-client", "src/tools/qemu-test-client")
.dep(|s| s.name("libstd"))
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "qemu-test-client"));
rules.build("tool-cargo", "cargo")
.dep(|s| s.name("libstd"))
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.dep(|s| s.stage(0).host(s.target).name("openssl"))
.dep(move |s| {
// Cargo depends on procedural macros, which requires a full host
Expand All @@ -572,7 +582,8 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
.run(move |s| compile::tool(build, s.stage, s.target, "cargo"));
rules.build("tool-rls", "rls")
.host(true)
.dep(|s| s.name("librustc"))
.dep(|s| s.name("librustc-tool"))
.dep(|s| s.stage(0).host(s.target).name("openssl"))
.dep(move |s| {
// rls, like cargo, uses procedural macros
s.name("librustc-link")
Expand All @@ -581,6 +592,25 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
})
.run(move |s| compile::tool(build, s.stage, s.target, "rls"));

// "pseudo rule" which represents completely cleaning out the tools dir in
// one stage. This needs to happen whenever a dependency changes (e.g.
// libstd, libtest, librustc) and all of the tool compilations above will
// be sequenced after this rule.
rules.build("maybe-clean-tools", "path/to/nowhere")
.after("librustc-tool")
.after("libtest-tool")
.after("libstd-tool");

rules.build("librustc-tool", "path/to/nowhere")
.dep(|s| s.name("librustc"))
.run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Librustc));
rules.build("libtest-tool", "path/to/nowhere")
.dep(|s| s.name("libtest"))
.run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libtest));
rules.build("libstd-tool", "path/to/nowhere")
.dep(|s| s.name("libstd"))
.run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libstd));

// ========================================================================
// Documentation targets
rules.doc("doc-book", "src/doc/book")
Expand Down Expand Up @@ -828,6 +858,11 @@ struct Rule<'a> {
/// Whether this rule is only for the build triple, not anything in hosts or
/// targets.
only_build: bool,

/// A list of "order only" dependencies. This rules does not actually
/// depend on these rules, but if they show up in the dependency graph then
/// this rule must be executed after all these rules.
after: Vec<&'a str>,
}

#[derive(PartialEq)]
Expand All @@ -851,6 +886,7 @@ impl<'a> Rule<'a> {
host: false,
only_host_build: false,
only_build: false,
after: Vec::new(),
}
}
}
Expand All @@ -870,6 +906,11 @@ impl<'a, 'b> RuleBuilder<'a, 'b> {
self
}

fn after(&mut self, step: &'a str) -> &mut Self {
self.rule.after.push(step);
self
}

fn run<F>(&mut self, f: F) -> &mut Self
where F: Fn(&Step<'a>) + 'a,
{
Expand Down Expand Up @@ -1153,31 +1194,52 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
/// From the top level targets `steps` generate a topological ordering of
/// all steps needed to run those steps.
fn expand(&self, steps: &[Step<'a>]) -> Vec<Step<'a>> {
// First up build a graph of steps and their dependencies. The `nodes`
// map is a map from step to a unique number. The `edges` map is a
// map from these unique numbers to a list of other numbers,
// representing dependencies.
let mut nodes = HashMap::new();
nodes.insert(Step::noop(), 0);
let mut edges = HashMap::new();
edges.insert(0, HashSet::new());
for step in steps {
self.build_graph(step.clone(), &mut nodes, &mut edges);
}

// Now that we've built up the actual dependency graph, draw more
// dependency edges to satisfy the `after` dependencies field for each
// rule.
self.satisfy_after_deps(&nodes, &mut edges);

// And finally, perform a topological sort to return a list of steps to
// execute.
let mut order = Vec::new();
let mut added = HashSet::new();
added.insert(Step::noop());
for step in steps.iter().cloned() {
self.fill(step, &mut order, &mut added);
let mut visited = HashSet::new();
visited.insert(0);
let idx_to_node = nodes.iter().map(|p| (*p.1, p.0)).collect::<HashMap<_, _>>();
for idx in nodes.values() {
self.topo_sort(*idx, &idx_to_node, &edges, &mut visited, &mut order);
}
return order
}

/// Performs topological sort of dependencies rooted at the `step`
/// specified, pushing all results onto the `order` vector provided.
/// Builds the dependency graph rooted at `step`.
///
/// In other words, when this method returns, the `order` vector will
/// contain a list of steps which if executed in order will eventually
/// complete the `step` specified as well.
///
/// The `added` set specified here is the set of steps that are already
/// present in `order` (and hence don't need to be added again).
fn fill(&self,
step: Step<'a>,
order: &mut Vec<Step<'a>>,
added: &mut HashSet<Step<'a>>) {
if !added.insert(step.clone()) {
return
/// The `nodes` and `edges` maps are filled out according to the rule
/// described by `step.name`.
fn build_graph(&self,
step: Step<'a>,
nodes: &mut HashMap<Step<'a>, usize>,
edges: &mut HashMap<usize, HashSet<usize>>) -> usize {
use std::collections::hash_map::Entry;

let idx = nodes.len();
match nodes.entry(step.clone()) {
Entry::Vacant(e) => { e.insert(idx); }
Entry::Occupied(e) => return *e.get(),
}

let mut deps = Vec::new();
for dep in self.rules[step.name].deps.iter() {
let dep = dep(&step);
if dep.name.starts_with("default:") {
Expand All @@ -1189,13 +1251,61 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
let host = self.build.config.host.iter().any(|h| h == dep.target);
let rules = self.rules.values().filter(|r| r.default);
for rule in rules.filter(|r| r.kind == kind && (!r.host || host)) {
self.fill(dep.name(rule.name), order, added);
deps.push(self.build_graph(dep.name(rule.name), nodes, edges));
}
} else {
self.fill(dep, order, added);
deps.push(self.build_graph(dep, nodes, edges));
}
}

edges.entry(idx).or_insert(HashSet::new()).extend(deps);
return idx
}

/// Given a dependency graph with a finished list of `nodes`, fill out more
/// dependency `edges`.
///
/// This is the step which satisfies all `after` listed dependencies in
/// `Rule` above.
fn satisfy_after_deps(&self,
nodes: &HashMap<Step<'a>, usize>,
edges: &mut HashMap<usize, HashSet<usize>>) {
// Reverse map from the name of a step to the node indices that it
// appears at.
let mut name_to_idx = HashMap::new();
for (step, &idx) in nodes {
name_to_idx.entry(step.name).or_insert(Vec::new()).push(idx);
}

for (step, idx) in nodes {
if *step == Step::noop() {
continue
}
for after in self.rules[step.name].after.iter() {
// This is the critical piece of an `after` dependency. If the
// dependency isn't actually in our graph then no edge is drawn,
// only if it's already present do we draw the edges.
if let Some(idxs) = name_to_idx.get(after) {
edges.get_mut(idx).unwrap()
.extend(idxs.iter().cloned());
}
}
}
order.push(step);
}

fn topo_sort(&self,
cur: usize,
nodes: &HashMap<usize, &Step<'a>>,
edges: &HashMap<usize, HashSet<usize>>,
visited: &mut HashSet<usize>,
order: &mut Vec<Step<'a>>) {
if !visited.insert(cur) {
return
}
for dep in edges[&cur].iter() {
self.topo_sort(*dep, nodes, edges, visited, order);
}
order.push(nodes[&cur].clone());
}
}

Expand Down