Skip to content

Commit 8c7c0b4

Browse files
committed
auto merge of #9034 : catamorphism/rust/rustpkg-workcache, r=metajack
r? @metajack or @brson - This pull request makes rustpkg use the workcache library to avoid recompilation.
2 parents af259a6 + a8194ed commit 8c7c0b4

15 files changed

+792
-538
lines changed

src/libextra/workcache.rs

+146-20
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,15 @@
1212

1313
use digest::Digest;
1414
use json;
15+
use json::ToJson;
1516
use sha1::Sha1;
1617
use serialize::{Encoder, Encodable, Decoder, Decodable};
1718
use arc::{Arc,RWArc};
1819
use treemap::TreeMap;
19-
2020
use std::cell::Cell;
2121
use std::comm::{PortOne, oneshot};
2222
use std::either::{Either, Left, Right};
23-
use std::io;
24-
use std::run;
25-
use std::task;
23+
use std::{io, os, task};
2624

2725
/**
2826
*
@@ -107,11 +105,27 @@ impl WorkKey {
107105
}
108106
}
109107

108+
// FIXME #8883: The key should be a WorkKey and not a ~str.
109+
// This is working around some JSON weirdness.
110+
#[deriving(Clone, Eq, Encodable, Decodable)]
111+
struct WorkMap(TreeMap<~str, KindMap>);
112+
110113
#[deriving(Clone, Eq, Encodable, Decodable)]
111-
struct WorkMap(TreeMap<WorkKey, ~str>);
114+
struct KindMap(TreeMap<~str, ~str>);
112115

113116
impl WorkMap {
114117
fn new() -> WorkMap { WorkMap(TreeMap::new()) }
118+
119+
fn insert_work_key(&mut self, k: WorkKey, val: ~str) {
120+
let WorkKey { kind, name } = k;
121+
match self.find_mut(&name) {
122+
Some(&KindMap(ref mut m)) => { m.insert(kind, val); return; }
123+
None => ()
124+
}
125+
let mut new_map = TreeMap::new();
126+
new_map.insert(kind, val);
127+
self.insert(name, KindMap(new_map));
128+
}
115129
}
116130

117131
struct Database {
@@ -123,11 +137,15 @@ struct Database {
123137
impl Database {
124138

125139
pub fn new(p: Path) -> Database {
126-
Database {
140+
let mut rslt = Database {
127141
db_filename: p,
128142
db_cache: TreeMap::new(),
129143
db_dirty: false
144+
};
145+
if os::path_exists(&rslt.db_filename) {
146+
rslt.load();
130147
}
148+
rslt
131149
}
132150

133151
pub fn prepare(&self,
@@ -154,6 +172,41 @@ impl Database {
154172
self.db_cache.insert(k,v);
155173
self.db_dirty = true
156174
}
175+
176+
// FIXME #4330: This should have &mut self and should set self.db_dirty to false.
177+
fn save(&self) {
178+
let f = io::file_writer(&self.db_filename, [io::Create, io::Truncate]).unwrap();
179+
self.db_cache.to_json().to_pretty_writer(f);
180+
}
181+
182+
fn load(&mut self) {
183+
assert!(!self.db_dirty);
184+
assert!(os::path_exists(&self.db_filename));
185+
let f = io::file_reader(&self.db_filename);
186+
match f {
187+
Err(e) => fail!("Couldn't load workcache database %s: %s",
188+
self.db_filename.to_str(), e.to_str()),
189+
Ok(r) =>
190+
match json::from_reader(r) {
191+
Err(e) => fail!("Couldn't parse workcache database (from file %s): %s",
192+
self.db_filename.to_str(), e.to_str()),
193+
Ok(r) => {
194+
let mut decoder = json::Decoder(r);
195+
self.db_cache = Decodable::decode(&mut decoder);
196+
}
197+
}
198+
}
199+
}
200+
}
201+
202+
// FIXME #4330: use &mut self here
203+
#[unsafe_destructor]
204+
impl Drop for Database {
205+
fn drop(&self) {
206+
if self.db_dirty {
207+
self.save();
208+
}
209+
}
157210
}
158211

159212
struct Logger {
@@ -172,12 +225,20 @@ impl Logger {
172225
}
173226
}
174227
228+
type FreshnessMap = TreeMap<~str,extern fn(&str,&str)->bool>;
229+
175230
#[deriving(Clone)]
176231
struct Context {
177232
db: RWArc<Database>,
178233
logger: RWArc<Logger>,
179234
cfg: Arc<json::Object>,
180-
freshness: Arc<TreeMap<~str,extern fn(&str,&str)->bool>>
235+
/// Map from kinds (source, exe, url, etc.) to a freshness function.
236+
/// The freshness function takes a name (e.g. file path) and value
237+
/// (e.g. hash of file contents) and determines whether it's up-to-date.
238+
/// For example, in the file case, this would read the file off disk,
239+
/// hash it, and return the result of comparing the given hash and the
240+
/// read hash for equality.
241+
freshness: Arc<FreshnessMap>
181242
}
182243
183244
struct Prep<'self> {
@@ -205,6 +266,7 @@ fn json_encode<T:Encodable<json::Encoder>>(t: &T) -> ~str {
205266
206267
// FIXME(#5121)
207268
fn json_decode<T:Decodable<json::Decoder>>(s: &str) -> T {
269+
debug!("json decoding: %s", s);
208270
do io::with_str_reader(s) |rdr| {
209271
let j = json::from_reader(rdr).unwrap();
210272
let mut decoder = json::Decoder(j);
@@ -230,11 +292,18 @@ impl Context {
230292
pub fn new(db: RWArc<Database>,
231293
lg: RWArc<Logger>,
232294
cfg: Arc<json::Object>) -> Context {
295+
Context::new_with_freshness(db, lg, cfg, Arc::new(TreeMap::new()))
296+
}
297+
298+
pub fn new_with_freshness(db: RWArc<Database>,
299+
lg: RWArc<Logger>,
300+
cfg: Arc<json::Object>,
301+
freshness: Arc<FreshnessMap>) -> Context {
233302
Context {
234303
db: db,
235304
logger: lg,
236305
cfg: cfg,
237-
freshness: Arc::new(TreeMap::new())
306+
freshness: freshness
238307
}
239308
}
240309

@@ -249,6 +318,36 @@ impl Context {
249318

250319
}
251320

321+
impl Exec {
322+
pub fn discover_input(&mut self,
323+
dependency_kind: &str,
324+
dependency_name: &str,
325+
dependency_val: &str) {
326+
debug!("Discovering input %s %s %s", dependency_kind, dependency_name, dependency_val);
327+
self.discovered_inputs.insert_work_key(WorkKey::new(dependency_kind, dependency_name),
328+
dependency_val.to_owned());
329+
}
330+
pub fn discover_output(&mut self,
331+
dependency_kind: &str,
332+
dependency_name: &str,
333+
dependency_val: &str) {
334+
debug!("Discovering output %s %s %s", dependency_kind, dependency_name, dependency_val);
335+
self.discovered_outputs.insert_work_key(WorkKey::new(dependency_kind, dependency_name),
336+
dependency_val.to_owned());
337+
}
338+
339+
// returns pairs of (kind, name)
340+
pub fn lookup_discovered_inputs(&self) -> ~[(~str, ~str)] {
341+
let mut rs = ~[];
342+
for (k, v) in self.discovered_inputs.iter() {
343+
for (k1, _) in v.iter() {
344+
rs.push((k1.clone(), k.clone()));
345+
}
346+
}
347+
rs
348+
}
349+
}
350+
252351
impl<'self> Prep<'self> {
253352
fn new(ctxt: &'self Context, fn_name: &'self str) -> Prep<'self> {
254353
Prep {
@@ -257,18 +356,30 @@ impl<'self> Prep<'self> {
257356
declared_inputs: WorkMap::new()
258357
}
259358
}
359+
360+
pub fn lookup_declared_inputs(&self) -> ~[~str] {
361+
let mut rs = ~[];
362+
for (_, v) in self.declared_inputs.iter() {
363+
for (inp, _) in v.iter() {
364+
rs.push(inp.clone());
365+
}
366+
}
367+
rs
368+
}
260369
}
261370

262371
impl<'self> Prep<'self> {
263-
fn declare_input(&mut self, kind:&str, name:&str, val:&str) {
264-
self.declared_inputs.insert(WorkKey::new(kind, name),
372+
pub fn declare_input(&mut self, kind: &str, name: &str, val: &str) {
373+
debug!("Declaring input %s %s %s", kind, name, val);
374+
self.declared_inputs.insert_work_key(WorkKey::new(kind, name),
265375
val.to_owned());
266376
}
267377

268378
fn is_fresh(&self, cat: &str, kind: &str,
269379
name: &str, val: &str) -> bool {
270380
let k = kind.to_owned();
271381
let f = self.ctxt.freshness.get().find(&k);
382+
debug!("freshness for: %s/%s/%s/%s", cat, kind, name, val)
272383
let fresh = match f {
273384
None => fail!("missing freshness-function for '%s'", kind),
274385
Some(f) => (*f)(name, val)
@@ -286,27 +397,31 @@ impl<'self> Prep<'self> {
286397
}
287398

288399
fn all_fresh(&self, cat: &str, map: &WorkMap) -> bool {
289-
for (k, v) in map.iter() {
290-
if ! self.is_fresh(cat, k.kind, k.name, *v) {
291-
return false;
400+
for (k_name, kindmap) in map.iter() {
401+
for (k_kind, v) in kindmap.iter() {
402+
if ! self.is_fresh(cat, *k_kind, *k_name, *v) {
403+
return false;
292404
}
405+
}
293406
}
294407
return true;
295408
}
296409

297-
fn exec<T:Send +
410+
pub fn exec<T:Send +
298411
Encodable<json::Encoder> +
299412
Decodable<json::Decoder>>(
300-
&'self self, blk: ~fn(&Exec) -> T) -> T {
413+
&'self self, blk: ~fn(&mut Exec) -> T) -> T {
301414
self.exec_work(blk).unwrap()
302415
}
303416

304417
fn exec_work<T:Send +
305418
Encodable<json::Encoder> +
306419
Decodable<json::Decoder>>( // FIXME(#5121)
307-
&'self self, blk: ~fn(&Exec) -> T) -> Work<'self, T> {
420+
&'self self, blk: ~fn(&mut Exec) -> T) -> Work<'self, T> {
308421
let mut bo = Some(blk);
309422

423+
debug!("exec_work: looking up %s and %?", self.fn_name,
424+
self.declared_inputs);
310425
let cached = do self.ctxt.db.read |db| {
311426
db.prepare(self.fn_name, &self.declared_inputs)
312427
};
@@ -316,21 +431,26 @@ impl<'self> Prep<'self> {
316431
if self.all_fresh("declared input",&self.declared_inputs) &&
317432
self.all_fresh("discovered input", disc_in) &&
318433
self.all_fresh("discovered output", disc_out) => {
434+
debug!("Cache hit!");
435+
debug!("Trying to decode: %? / %? / %?",
436+
disc_in, disc_out, *res);
319437
Left(json_decode(*res))
320438
}
321439

322440
_ => {
441+
debug!("Cache miss!");
323442
let (port, chan) = oneshot();
324443
let blk = bo.take_unwrap();
325444
let chan = Cell::new(chan);
326445

446+
// What happens if the task fails?
327447
do task::spawn {
328-
let exe = Exec {
448+
let mut exe = Exec {
329449
discovered_inputs: WorkMap::new(),
330450
discovered_outputs: WorkMap::new(),
331451
};
332452
let chan = chan.take();
333-
let v = blk(&exe);
453+
let v = blk(&mut exe);
334454
chan.send((exe, v));
335455
}
336456
Right(port)
@@ -371,17 +491,23 @@ impl<'self, T:Send +
371491
}
372492

373493

374-
//#[test]
494+
#[test]
375495
fn test() {
376496
use std::io::WriterUtil;
497+
use std::{os, run};
377498

378499
let pth = Path("foo.c");
379500
{
380501
let r = io::file_writer(&pth, [io::Create]);
381502
r.unwrap().write_str("int main() { return 0; }");
382503
}
383504

384-
let cx = Context::new(RWArc::new(Database::new(Path("db.json"))),
505+
let db_path = os::self_exe_path().expect("workcache::test failed").pop().push("db.json");
506+
if os::path_exists(&db_path) {
507+
os::remove_file(&db_path);
508+
}
509+
510+
let cx = Context::new(RWArc::new(Database::new(db_path)),
385511
RWArc::new(Logger::new()),
386512
Arc::new(TreeMap::new()));
387513

0 commit comments

Comments
 (0)