From 8cbc02238da9f36245d61e1d77ecd214eeed2e6e Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Mon, 13 Nov 2017 15:13:44 +0100 Subject: [PATCH 01/21] incr.comp.: Include header when loading cache files in order to get the same byte offsets as when saving. --- src/librustc/ty/maps/on_disk_cache.rs | 4 +-- .../persist/file_format.rs | 18 ++++++++----- src/librustc_incremental/persist/load.rs | 26 +++++++++++-------- 3 files changed, 28 insertions(+), 20 deletions(-) diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 24ce8fb299598..a301b0ce6a7de 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -58,10 +58,10 @@ impl<'sess> OnDiskCache<'sess> { /// so far) will eagerly deserialize the complete cache. Once we are /// dealing with larger amounts of data (i.e. cached query results), /// deserialization will need to happen lazily. - pub fn new(sess: &'sess Session, data: &[u8]) -> OnDiskCache<'sess> { + pub fn new(sess: &'sess Session, data: &[u8], start_pos: usize) -> OnDiskCache<'sess> { debug_assert!(sess.opts.incremental.is_some()); - let mut decoder = opaque::Decoder::new(&data[..], 0); + let mut decoder = opaque::Decoder::new(&data[..], start_pos); let header = Header::decode(&mut decoder).unwrap(); let prev_diagnostics: FxHashMap<_, _> = { diff --git a/src/librustc_incremental/persist/file_format.rs b/src/librustc_incremental/persist/file_format.rs index 7d1400b6b95a5..7d27b842a68a7 100644 --- a/src/librustc_incremental/persist/file_format.rs +++ b/src/librustc_incremental/persist/file_format.rs @@ -53,19 +53,25 @@ pub fn write_file_header(stream: &mut W) -> io::Result<()> { /// Reads the contents of a file with a file header as defined in this module. /// -/// - Returns `Ok(Some(data))` if the file existed and was generated by a +/// - Returns `Ok(Some(data, pos))` if the file existed and was generated by a /// compatible compiler version. `data` is the entire contents of the file -/// *after* the header. +/// and `pos` points to the first byte after the header. /// - Returns `Ok(None)` if the file did not exist or was generated by an /// incompatible version of the compiler. /// - Returns `Err(..)` if some kind of IO error occurred while reading the /// file. -pub fn read_file(sess: &Session, path: &Path) -> io::Result>> { +pub fn read_file(sess: &Session, path: &Path) -> io::Result, usize)>> { if !path.exists() { return Ok(None); } let mut file = File::open(path)?; + let file_size = file.metadata()?.len() as usize; + + let mut data = Vec::with_capacity(file_size); + file.read_to_end(&mut data)?; + + let mut file = io::Cursor::new(data); // Check FILE_MAGIC { @@ -107,10 +113,8 @@ pub fn read_file(sess: &Session, path: &Path) -> io::Result>> { } } - let mut data = vec![]; - file.read_to_end(&mut data)?; - - Ok(Some(data)) + let post_header_start_pos = file.position() as usize; + Ok(Some((file.into_inner(), post_header_start_pos))) } fn report_format_mismatch(sess: &Session, file: &Path, message: &str) { diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs index 158e9f2677a72..624a9ed930ad0 100644 --- a/src/librustc_incremental/persist/load.rs +++ b/src/librustc_incremental/persist/load.rs @@ -42,9 +42,9 @@ pub fn dep_graph_tcx_init<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { } let work_products_path = work_products_path(tcx.sess); - if let Some(work_products_data) = load_data(tcx.sess, &work_products_path) { + if let Some((work_products_data, start_pos)) = load_data(tcx.sess, &work_products_path) { // Decode the list of work_products - let mut work_product_decoder = Decoder::new(&work_products_data[..], 0); + let mut work_product_decoder = Decoder::new(&work_products_data[..], start_pos); let work_products: Vec = RustcDecodable::decode(&mut work_product_decoder).unwrap_or_else(|e| { let msg = format!("Error decoding `work-products` from incremental \ @@ -77,9 +77,9 @@ pub fn dep_graph_tcx_init<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { } } -fn load_data(sess: &Session, path: &Path) -> Option> { +fn load_data(sess: &Session, path: &Path) -> Option<(Vec, usize)> { match file_format::read_file(sess, path) { - Ok(Some(data)) => return Some(data), + Ok(Some(data_and_pos)) => return Some(data_and_pos), Ok(None) => { // The file either didn't exist or was produced by an incompatible // compiler version. Neither is an error. @@ -126,8 +126,8 @@ pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap { debug!("load_prev_metadata_hashes() - File: {}", file_path.display()); - let data = match file_format::read_file(tcx.sess, &file_path) { - Ok(Some(data)) => data, + let (data, start_pos) = match file_format::read_file(tcx.sess, &file_path) { + Ok(Some(data_and_pos)) => data_and_pos, Ok(None) => { debug!("load_prev_metadata_hashes() - File produced by incompatible \ compiler version: {}", file_path.display()); @@ -141,7 +141,7 @@ pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap { }; debug!("load_prev_metadata_hashes() - Decoding hashes"); - let mut decoder = Decoder::new(&data, 0); + let mut decoder = Decoder::new(&data, start_pos); let _ = Svh::decode(&mut decoder).unwrap(); let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap(); @@ -171,8 +171,8 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph { return empty } - if let Some(bytes) = load_data(sess, &dep_graph_path(sess)) { - let mut decoder = Decoder::new(&bytes, 0); + if let Some((bytes, start_pos)) = load_data(sess, &dep_graph_path(sess)) { + let mut decoder = Decoder::new(&bytes, start_pos); let prev_commandline_args_hash = u64::decode(&mut decoder) .expect("Error reading commandline arg hash from cached dep-graph"); @@ -184,6 +184,10 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph { // We can't reuse the cache, purge it. debug!("load_dep_graph_new: differing commandline arg hashes"); + delete_all_session_dir_contents(sess) + .expect("Failed to delete invalidated incr. comp. session \ + directory contents."); + // No need to do any further work return empty } @@ -202,8 +206,8 @@ pub fn load_query_result_cache<'sess>(sess: &'sess Session) -> OnDiskCache<'sess return OnDiskCache::new_empty(sess.codemap()); } - if let Some(bytes) = load_data(sess, &query_cache_path(sess)) { - OnDiskCache::new(sess, &bytes[..]) + if let Some((bytes, start_pos)) = load_data(sess, &query_cache_path(sess)) { + OnDiskCache::new(sess, &bytes[..], start_pos) } else { OnDiskCache::new_empty(sess.codemap()) } From c08e03ac46d408a19df480ff5ef7cca4abffdc15 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Mon, 13 Nov 2017 15:25:09 +0100 Subject: [PATCH 02/21] incr.comp.: Add position() method to TyEncoder. --- src/librustc/ty/codec.rs | 12 ++++++++++-- src/librustc_metadata/decoder.rs | 7 +++++++ 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/src/librustc/ty/codec.rs b/src/librustc/ty/codec.rs index 1c793920bf2e4..164aac303af80 100644 --- a/src/librustc/ty/codec.rs +++ b/src/librustc/ty/codec.rs @@ -19,7 +19,7 @@ use hir::def_id::{DefId, CrateNum}; use middle::const_val::ByteArray; use rustc_data_structures::fx::FxHashMap; -use rustc_serialize::{Decodable, Decoder, Encoder, Encodable}; +use rustc_serialize::{Decodable, Decoder, Encoder, Encodable, opaque}; use std::hash::Hash; use std::intrinsics; use ty::{self, Ty, TyCtxt}; @@ -53,6 +53,13 @@ pub trait TyEncoder: Encoder { fn position(&self) -> usize; } +impl<'buf> TyEncoder for opaque::Encoder<'buf> { + #[inline] + fn position(&self) -> usize { + self.position() + } +} + /// Encode the given value or a previously cached shorthand. pub fn encode_with_shorthand(encoder: &mut E, value: &T, @@ -113,6 +120,8 @@ pub trait TyDecoder<'a, 'tcx: 'a>: Decoder { fn peek_byte(&self) -> u8; + fn position(&self) -> usize; + fn cached_ty_for_shorthand(&mut self, shorthand: usize, or_insert_with: F) @@ -142,7 +151,6 @@ pub fn decode_ty<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> 'tcx: 'a, { // Handle shorthands first, if we have an usize > 0x80. - // if self.opaque.data[self.opaque.position()] & 0x80 != 0 { if decoder.positioned_at_shorthand() { let pos = decoder.read_usize()?; assert!(pos >= SHORTHAND_OFFSET); diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index e63037f4da1ef..5fee7173c87e7 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -217,14 +217,21 @@ impl<'doc, 'tcx> Decoder for DecodeContext<'doc, 'tcx> { impl<'a, 'tcx: 'a> TyDecoder<'a, 'tcx> for DecodeContext<'a, 'tcx> { + #[inline] fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.tcx.expect("missing TyCtxt in DecodeContext") } + #[inline] fn peek_byte(&self) -> u8 { self.opaque.data[self.opaque.position()] } + #[inline] + fn position(&self) -> usize { + self.opaque.position() + } + fn cached_ty_for_shorthand(&mut self, shorthand: usize, or_insert_with: F) From bc96d9d8c9d7c476cfbbb758b1fde6826ae65980 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Mon, 13 Nov 2017 15:46:46 +0100 Subject: [PATCH 03/21] incr.comp.: Implement UseSpecializedXXcodable for DefIndex and DefId. --- src/librustc/hir/def_id.rs | 43 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index 428f154c1b66a..58a9ea0641043 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -11,7 +11,7 @@ use ty; use rustc_data_structures::indexed_vec::Idx; -use serialize::{self, Encoder, Decoder}; +use serialize::{self, Encoder, Decoder, Decodable, Encodable}; use std::fmt; use std::u32; @@ -146,6 +146,20 @@ impl DefIndex { } } +impl serialize::UseSpecializedEncodable for DefIndex { + #[inline] + fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_u32(self.0) + } +} + +impl serialize::UseSpecializedDecodable for DefIndex { + #[inline] + fn default_decode(d: &mut D) -> Result { + d.read_u32().map(DefIndex) + } +} + #[derive(Copy, Clone, Eq, PartialEq, Hash)] pub enum DefIndexAddressSpace { Low = 0, @@ -188,7 +202,6 @@ impl fmt::Debug for DefId { } } - impl DefId { /// Make a local `DefId` with the given index. pub fn local(index: DefIndex) -> DefId { @@ -199,3 +212,29 @@ impl DefId { self.krate == LOCAL_CRATE } } + +impl serialize::UseSpecializedEncodable for DefId { + #[inline] + fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { + let DefId { + krate, + index, + } = *self; + + krate.encode(s)?; + index.encode(s) + } +} + +impl serialize::UseSpecializedDecodable for DefId { + #[inline] + fn default_decode(d: &mut D) -> Result { + let krate = CrateNum::decode(d)?; + let index = DefIndex::decode(d)?; + + Ok(DefId { + krate, + index + }) + } +} From 9ac102645fd6413ae460b34ec4c06eec25979355 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Mon, 13 Nov 2017 15:48:52 +0100 Subject: [PATCH 04/21] incr.comp.: Properly use ty::codec::decode_cnum() in rustc_metadata::decoder. --- src/librustc_metadata/decoder.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 5fee7173c87e7..83d0a1fdfdb7b 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -295,12 +295,7 @@ impl<'a, 'tcx, T> SpecializedDecoder> for DecodeContext<'a, 'tcx> { impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { fn specialized_decode(&mut self) -> Result { - let cnum = CrateNum::from_u32(u32::decode(self)?); - if cnum == LOCAL_CRATE { - Ok(self.cdata().cnum) - } else { - Ok(self.cdata().cnum_map.borrow()[cnum]) - } + ty_codec::decode_cnum(self) } } From 3bd333c988d479c11d5da18463d4c9f020f4f56e Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Mon, 13 Nov 2017 16:35:51 +0100 Subject: [PATCH 05/21] incr.comp.: Add CacheEncoder for encoding query results into the incr.comp. cache. --- src/librustc/ty/context.rs | 4 +- src/librustc/ty/maps/on_disk_cache.rs | 133 ++++++++++++++++++++++---- 2 files changed, 118 insertions(+), 19 deletions(-) diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 37c4346a7dc93..a5ca699fd5d91 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1311,9 +1311,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn serialize_query_result_cache(self, encoder: &mut E) -> Result<(), E::Error> - where E: ::rustc_serialize::Encoder + where E: ty::codec::TyEncoder { - self.on_disk_query_result_cache.serialize(encoder) + self.on_disk_query_result_cache.serialize(self, encoder) } } diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index a301b0ce6a7de..8d65fd50ee534 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -13,7 +13,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::indexed_vec::Idx; use errors::Diagnostic; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque, - SpecializedDecoder}; + SpecializedDecoder, SpecializedEncoder}; use session::Session; use std::borrow::Cow; use std::cell::RefCell; @@ -21,6 +21,9 @@ use std::collections::BTreeMap; use std::mem; use syntax::codemap::{CodeMap, StableFilemapId}; use syntax_pos::{BytePos, Span, NO_EXPANSION, DUMMY_SP}; +use ty; +use ty::codec::{self as ty_codec}; +use ty::context::TyCtxt; /// `OnDiskCache` provides an interface to incr. comp. data cached from the /// previous compilation session. This data will eventually include the results @@ -46,11 +49,7 @@ struct Header { prev_filemap_starts: BTreeMap, } -// This type is used only for (de-)serialization. -#[derive(RustcEncodable, RustcDecodable)] -struct Body { - diagnostics: Vec<(SerializedDepNodeIndex, Vec)>, -} +type EncodedPrevDiagnostics = Vec<(SerializedDepNodeIndex, Vec)>; impl<'sess> OnDiskCache<'sess> { /// Create a new OnDiskCache instance from the serialized data in `data`. @@ -64,14 +63,21 @@ impl<'sess> OnDiskCache<'sess> { let mut decoder = opaque::Decoder::new(&data[..], start_pos); let header = Header::decode(&mut decoder).unwrap(); - let prev_diagnostics: FxHashMap<_, _> = { + let prev_diagnostics = { let mut decoder = CacheDecoder { opaque: decoder, codemap: sess.codemap(), prev_filemap_starts: &header.prev_filemap_starts, }; - let body = Body::decode(&mut decoder).unwrap(); - body.diagnostics.into_iter().collect() + + let prev_diagnostics: FxHashMap<_, _> = { + let diagnostics = EncodedPrevDiagnostics::decode(&mut decoder) + .expect("Error while trying to decode prev. diagnostics \ + from incr. comp. cache."); + diagnostics.into_iter().collect() + }; + + prev_diagnostics }; OnDiskCache { @@ -91,11 +97,21 @@ impl<'sess> OnDiskCache<'sess> { } } - pub fn serialize<'a, 'tcx, E>(&self, - encoder: &mut E) - -> Result<(), E::Error> - where E: Encoder - { + pub fn serialize<'a, 'gcx, 'lcx, E>(&self, + tcx: TyCtxt<'a, 'gcx, 'lcx>, + encoder: &mut E) + -> Result<(), E::Error> + where E: ty_codec::TyEncoder + { + // Serializing the DepGraph should not modify it: + let _in_ignore = tcx.dep_graph.in_ignore(); + + let mut encoder = CacheEncoder { + encoder, + type_shorthands: FxHashMap(), + predicate_shorthands: FxHashMap(), + }; + let prev_filemap_starts: BTreeMap<_, _> = self .codemap .files() @@ -103,16 +119,16 @@ impl<'sess> OnDiskCache<'sess> { .map(|fm| (fm.start_pos, StableFilemapId::new(fm))) .collect(); - Header { prev_filemap_starts }.encode(encoder)?; + Header { prev_filemap_starts }.encode(&mut encoder)?; - let diagnostics: Vec<(SerializedDepNodeIndex, Vec)> = + let diagnostics: EncodedPrevDiagnostics = self.current_diagnostics .borrow() .iter() .map(|(k, v)| (SerializedDepNodeIndex::new(k.index()), v.clone())) .collect(); - Body { diagnostics }.encode(encoder)?; + diagnostics.encode(&mut encoder)?; Ok(()) } @@ -152,6 +168,9 @@ impl<'sess> OnDiskCache<'sess> { } } + +//- DECODING ------------------------------------------------------------------- + /// A decoder that can read the incr. comp. cache. It is similar to the one /// we use for crate metadata decoding in that it can rebase spans and /// eventually will also handle things that contain `Ty` instances. @@ -229,3 +248,83 @@ impl<'a> SpecializedDecoder for CacheDecoder<'a> { Ok(DUMMY_SP) } } + + +//- ENCODING ------------------------------------------------------------------- + +struct CacheEncoder<'enc, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + encoder: &'enc mut E, + type_shorthands: FxHashMap, usize>, + predicate_shorthands: FxHashMap, usize>, +} + +impl<'enc, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'enc, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + fn position(&self) -> usize { + self.encoder.position() + } +} + +impl<'enc, 'tcx, E> SpecializedEncoder> for CacheEncoder<'enc, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + fn specialized_encode(&mut self, ty: &ty::Ty<'tcx>) -> Result<(), Self::Error> { + ty_codec::encode_with_shorthand(self, ty, + |encoder| &mut encoder.type_shorthands) + } +} + +impl<'enc, 'tcx, E> SpecializedEncoder> + for CacheEncoder<'enc, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + fn specialized_encode(&mut self, + predicates: &ty::GenericPredicates<'tcx>) + -> Result<(), Self::Error> { + ty_codec::encode_predicates(self, predicates, + |encoder| &mut encoder.predicate_shorthands) + } +} + +macro_rules! encoder_methods { + ($($name:ident($ty:ty);)*) => { + $(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> { + self.encoder.$name(value) + })* + } +} + +impl<'enc, 'tcx, E> Encoder for CacheEncoder<'enc, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + type Error = E::Error; + + fn emit_nil(&mut self) -> Result<(), Self::Error> { + Ok(()) + } + + encoder_methods! { + emit_usize(usize); + emit_u128(u128); + emit_u64(u64); + emit_u32(u32); + emit_u16(u16); + emit_u8(u8); + + emit_isize(isize); + emit_i128(i128); + emit_i64(i64); + emit_i32(i32); + emit_i16(i16); + emit_i8(i8); + + emit_bool(bool); + emit_f64(f64); + emit_f32(f32); + emit_char(char); + emit_str(&str); + } +} From 15db1652f88dc8c20c5cb157978a1ac3ec447b9e Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Tue, 14 Nov 2017 12:03:57 +0100 Subject: [PATCH 06/21] incr.comp.: Implement TyDecoder for on_disk_cache::CacheDecoder. --- src/librustc/hir/def_id.rs | 4 + src/librustc/hir/mod.rs | 29 ++- src/librustc/ty/maps/on_disk_cache.rs | 243 ++++++++++++++++++++++++-- 3 files changed, 263 insertions(+), 13 deletions(-) diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index 58a9ea0641043..b2eefca7fe232 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -32,6 +32,10 @@ newtype_index!(CrateNum /// A CrateNum value that indicates that something is wrong. const INVALID_CRATE = u32::MAX - 1, + + /// A special CrateNum that we use for the tcx.rcache when decoding from + /// the incr. comp. cache. + const RESERVED_FOR_INCR_COMP_CACHE = u32::MAX - 2, }); impl CrateNum { diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index c9b1d70e7b60d..3bc2736586f45 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -45,6 +45,7 @@ use ty::AdtKind; use rustc_data_structures::indexed_vec; +use serialize::{self, Encoder, Encodable, Decoder, Decodable}; use std::collections::BTreeMap; use std::fmt; @@ -85,13 +86,37 @@ pub mod svh; /// the local_id part of the HirId changing, which is a very useful property in /// incremental compilation where we have to persist things through changes to /// the code base. -#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug, - RustcEncodable, RustcDecodable)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] pub struct HirId { pub owner: DefIndex, pub local_id: ItemLocalId, } +impl serialize::UseSpecializedEncodable for HirId { + fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { + let HirId { + owner, + local_id, + } = *self; + + owner.encode(s)?; + local_id.encode(s) + } +} + +impl serialize::UseSpecializedDecodable for HirId { + fn default_decode(d: &mut D) -> Result { + let owner = DefIndex::decode(d)?; + let local_id = ItemLocalId::decode(d)?; + + Ok(HirId { + owner, + local_id + }) + } +} + + /// An `ItemLocalId` uniquely identifies something within a given "item-like", /// that is within a hir::Item, hir::TraitItem, or hir::ImplItem. There is no /// guarantee that the numerical value of a given `ItemLocalId` corresponds to diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 8d65fd50ee534..20bddb1560357 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -9,21 +9,29 @@ // except according to those terms. use dep_graph::{DepNodeIndex, SerializedDepNodeIndex}; -use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::indexed_vec::Idx; use errors::Diagnostic; +use hir; +use hir::def_id::{CrateNum, DefIndex, DefId, RESERVED_FOR_INCR_COMP_CACHE, + LOCAL_CRATE}; +use hir::map::definitions::{Definitions, DefPathTable}; +use middle::const_val::ByteArray; +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::indexed_vec::{IndexVec, Idx}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque, - SpecializedDecoder, SpecializedEncoder}; + SpecializedDecoder, SpecializedEncoder, + UseSpecializedDecodable}; use session::Session; use std::borrow::Cow; use std::cell::RefCell; use std::collections::BTreeMap; use std::mem; +use syntax::ast::NodeId; use syntax::codemap::{CodeMap, StableFilemapId}; use syntax_pos::{BytePos, Span, NO_EXPANSION, DUMMY_SP}; use ty; -use ty::codec::{self as ty_codec}; +use ty::codec::{self as ty_codec, TyDecoder}; use ty::context::TyCtxt; +use ty::subst::Substs; /// `OnDiskCache` provides an interface to incr. comp. data cached from the /// previous compilation session. This data will eventually include the results @@ -65,9 +73,12 @@ impl<'sess> OnDiskCache<'sess> { let prev_diagnostics = { let mut decoder = CacheDecoder { + tcx: None, opaque: decoder, codemap: sess.codemap(), prev_filemap_starts: &header.prev_filemap_starts, + cnum_map: &IndexVec::new(), + prev_def_path_tables: &Vec::new(), }; let prev_diagnostics: FxHashMap<_, _> = { @@ -110,6 +121,7 @@ impl<'sess> OnDiskCache<'sess> { encoder, type_shorthands: FxHashMap(), predicate_shorthands: FxHashMap(), + definitions: tcx.hir.definitions(), }; let prev_filemap_starts: BTreeMap<_, _> = self @@ -174,13 +186,16 @@ impl<'sess> OnDiskCache<'sess> { /// A decoder that can read the incr. comp. cache. It is similar to the one /// we use for crate metadata decoding in that it can rebase spans and /// eventually will also handle things that contain `Ty` instances. -struct CacheDecoder<'a> { - opaque: opaque::Decoder<'a>, - codemap: &'a CodeMap, - prev_filemap_starts: &'a BTreeMap, +struct CacheDecoder<'a, 'tcx: 'a, 'x> { + tcx: Option>, + opaque: opaque::Decoder<'x>, + codemap: &'x CodeMap, + prev_filemap_starts: &'x BTreeMap, + cnum_map: &'x IndexVec>, + prev_def_path_tables: &'x Vec, } -impl<'a> CacheDecoder<'a> { +impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> { fn find_filemap_prev_bytepos(&self, prev_bytepos: BytePos) -> Option<(BytePos, StableFilemapId)> { @@ -200,7 +215,7 @@ macro_rules! decoder_methods { } } -impl<'sess> Decoder for CacheDecoder<'sess> { +impl<'a, 'tcx, 'x> Decoder for CacheDecoder<'a, 'tcx, 'x> { type Error = String; decoder_methods! { @@ -232,7 +247,65 @@ impl<'sess> Decoder for CacheDecoder<'sess> { } } -impl<'a> SpecializedDecoder for CacheDecoder<'a> { +impl<'a, 'tcx: 'a, 'x> ty_codec::TyDecoder<'a, 'tcx> for CacheDecoder<'a, 'tcx, 'x> { + + #[inline] + fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { + self.tcx.expect("missing TyCtxt in CacheDecoder") + } + + #[inline] + fn position(&self) -> usize { + self.opaque.position() + } + + #[inline] + fn peek_byte(&self) -> u8 { + self.opaque.data[self.opaque.position()] + } + + fn cached_ty_for_shorthand(&mut self, + shorthand: usize, + or_insert_with: F) + -> Result, Self::Error> + where F: FnOnce(&mut Self) -> Result, Self::Error> + { + let tcx = self.tcx(); + + let cache_key = ty::CReaderCacheKey { + cnum: RESERVED_FOR_INCR_COMP_CACHE, + pos: shorthand, + }; + + if let Some(&ty) = tcx.rcache.borrow().get(&cache_key) { + return Ok(ty); + } + + let ty = or_insert_with(self)?; + tcx.rcache.borrow_mut().insert(cache_key, ty); + Ok(ty) + } + + fn with_position(&mut self, pos: usize, f: F) -> R + where F: FnOnce(&mut Self) -> R + { + debug_assert!(pos < self.opaque.data.len()); + + let new_opaque = opaque::Decoder::new(self.opaque.data, pos); + let old_opaque = mem::replace(&mut self.opaque, new_opaque); + let r = f(self); + self.opaque = old_opaque; + r + } + + fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum { + self.cnum_map[cnum].unwrap_or_else(|| { + bug!("Could not find new CrateNum for {:?}", cnum) + }) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { fn specialized_decode(&mut self) -> Result { let lo = BytePos::decode(self)?; let hi = BytePos::decode(self)?; @@ -249,6 +322,142 @@ impl<'a> SpecializedDecoder for CacheDecoder<'a> { } } +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + let cnum = CrateNum::from_u32(u32::decode(self)?); + let mapped = self.map_encoded_cnum_to_current(cnum); + Ok(mapped) + } +} + +// This impl makes sure that we get a runtime error when we try decode a +// DefIndex that is not contained in a DefId. Such a case would be problematic +// because we would not know how to transform the DefIndex to the current +// context. +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + bug!("Trying to decode DefIndex outside the context of a DefId") + } +} + +// Both the CrateNum and the DefIndex of a DefId can change in between two +// compilation sessions. We use the DefPathHash, which is stable across +// sessions, to map the old DefId to the new one. +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + // Decode the unmapped CrateNum + let prev_cnum = CrateNum::default_decode(self)?; + + // Decode the unmapped DefIndex + let def_index = DefIndex::default_decode(self)?; + + // Unmapped CrateNum and DefIndex are valid keys for the *cached* + // DefPathTables, so we use them to look up the DefPathHash. + let def_path_hash = self.prev_def_path_tables[prev_cnum.index()] + .def_path_hash(def_index); + + // Using the DefPathHash, we can lookup the new DefId + Ok(self.tcx().def_path_hash_to_def_id.as_ref().unwrap()[&def_path_hash]) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + // Decode the unmapped DefIndex of the HirId. + let def_index = DefIndex::default_decode(self)?; + + // Use the unmapped DefIndex to look up the DefPathHash in the cached + // DefPathTable. For HirIds we know that we always have to look in the + // *local* DefPathTable. + let def_path_hash = self.prev_def_path_tables[LOCAL_CRATE.index()] + .def_path_hash(def_index); + + // Use the DefPathHash to map to the current DefId. + let def_id = self.tcx() + .def_path_hash_to_def_id + .as_ref() + .unwrap()[&def_path_hash]; + + // The ItemLocalId needs no remapping. + let local_id = hir::ItemLocalId::decode(self)?; + + // Reconstruct the HirId and look up the corresponding NodeId in the + // context of the current session. + Ok(hir::HirId { + owner: def_id.index, + local_id + }) + } +} + +// NodeIds are not stable across compilation sessions, so we store them in their +// HirId representation. This allows use to map them to the current NodeId. +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + let hir_id = hir::HirId::decode(self)?; + Ok(self.tcx().hir.hir_to_node_id(hir_id)) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder> for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + ty_codec::decode_ty(self) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder> +for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + ty_codec::decode_predicates(self) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder<&'tcx Substs<'tcx>> for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result<&'tcx Substs<'tcx>, Self::Error> { + ty_codec::decode_substs(self) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder> for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + ty_codec::decode_region(self) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder<&'tcx ty::Slice>> +for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice>, Self::Error> { + ty_codec::decode_ty_slice(self) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder<&'tcx ty::AdtDef> for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result<&'tcx ty::AdtDef, Self::Error> { + ty_codec::decode_adt_def(self) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder<&'tcx ty::Slice>> + for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) + -> Result<&'tcx ty::Slice>, Self::Error> { + ty_codec::decode_existential_predicate_slice(self) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder> for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + ty_codec::decode_byte_array(self) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder<&'tcx ty::Const<'tcx>> +for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result<&'tcx ty::Const<'tcx>, Self::Error> { + ty_codec::decode_const(self) + } +} + //- ENCODING ------------------------------------------------------------------- @@ -258,6 +467,7 @@ struct CacheEncoder<'enc, 'tcx, E> encoder: &'enc mut E, type_shorthands: FxHashMap, usize>, predicate_shorthands: FxHashMap, usize>, + definitions: &'enc Definitions, } impl<'enc, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'enc, 'tcx, E> @@ -289,6 +499,17 @@ impl<'enc, 'tcx, E> SpecializedEncoder> } } +// NodeIds are not stable across compilation sessions, so we store them in their +// HirId representation. This allows use to map them to the current NodeId. +impl<'enc, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + fn specialized_encode(&mut self, node_id: &NodeId) -> Result<(), Self::Error> { + let hir_id = self.definitions.node_to_hir_id(*node_id); + hir_id.encode(self) + } +} + macro_rules! encoder_methods { ($($name:ident($ty:ty);)*) => { $(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> { From bedb44cca89541bc5e03b029a7b6682b632fd10d Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Tue, 14 Nov 2017 14:07:12 +0100 Subject: [PATCH 07/21] incr.comp.: Allow for mapping from prev-session-CrateNums to current-session-CrateNums in OnDiskCaches. --- src/librustc/ty/context.rs | 2 +- src/librustc/ty/maps/on_disk_cache.rs | 77 +++++++++++++++++++++++++-- 2 files changed, 73 insertions(+), 6 deletions(-) diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index a5ca699fd5d91..512860a02b056 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1313,7 +1313,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { -> Result<(), E::Error> where E: ty::codec::TyEncoder { - self.on_disk_query_result_cache.serialize(self, encoder) + self.on_disk_query_result_cache.serialize(self, self.cstore, encoder) } } diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 20bddb1560357..c2303c4a2391d 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -15,12 +15,13 @@ use hir::def_id::{CrateNum, DefIndex, DefId, RESERVED_FOR_INCR_COMP_CACHE, LOCAL_CRATE}; use hir::map::definitions::{Definitions, DefPathTable}; use middle::const_val::ByteArray; +use middle::cstore::CrateStore; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque, SpecializedDecoder, SpecializedEncoder, UseSpecializedDecodable}; -use session::Session; +use session::{CrateDisambiguator, Session}; use std::borrow::Cow; use std::cell::RefCell; use std::collections::BTreeMap; @@ -45,8 +46,10 @@ pub struct OnDiskCache<'sess> { // compilation session. current_diagnostics: RefCell>>, - // This will eventually be needed for creating Decoders that can rebase - // spans. + + prev_cnums: Vec<(u32, String, CrateDisambiguator)>, + cnum_map: RefCell>>>, + _prev_filemap_starts: BTreeMap, codemap: &'sess CodeMap, } @@ -55,6 +58,7 @@ pub struct OnDiskCache<'sess> { #[derive(RustcEncodable, RustcDecodable)] struct Header { prev_filemap_starts: BTreeMap, + prev_cnums: Vec<(u32, String, CrateDisambiguator)>, } type EncodedPrevDiagnostics = Vec<(SerializedDepNodeIndex, Vec)>; @@ -94,6 +98,8 @@ impl<'sess> OnDiskCache<'sess> { OnDiskCache { prev_diagnostics, _prev_filemap_starts: header.prev_filemap_starts, + prev_cnums: header.prev_cnums, + cnum_map: RefCell::new(None), codemap: sess.codemap(), current_diagnostics: RefCell::new(FxHashMap()), } @@ -103,6 +109,8 @@ impl<'sess> OnDiskCache<'sess> { OnDiskCache { prev_diagnostics: FxHashMap(), _prev_filemap_starts: BTreeMap::new(), + prev_cnums: vec![], + cnum_map: RefCell::new(None), codemap, current_diagnostics: RefCell::new(FxHashMap()), } @@ -110,6 +118,7 @@ impl<'sess> OnDiskCache<'sess> { pub fn serialize<'a, 'gcx, 'lcx, E>(&self, tcx: TyCtxt<'a, 'gcx, 'lcx>, + cstore: &CrateStore, encoder: &mut E) -> Result<(), E::Error> where E: ty_codec::TyEncoder @@ -124,6 +133,8 @@ impl<'sess> OnDiskCache<'sess> { definitions: tcx.hir.definitions(), }; + + // Encode the file header let prev_filemap_starts: BTreeMap<_, _> = self .codemap .files() @@ -131,8 +142,21 @@ impl<'sess> OnDiskCache<'sess> { .map(|fm| (fm.start_pos, StableFilemapId::new(fm))) .collect(); - Header { prev_filemap_starts }.encode(&mut encoder)?; + let sorted_cnums = sorted_cnums_including_local_crate(cstore); + + let prev_cnums: Vec<_> = sorted_cnums.iter().map(|&cnum| { + let crate_name = tcx.original_crate_name(cnum).as_str().to_string(); + let crate_disambiguator = tcx.crate_disambiguator(cnum); + (cnum.as_u32(), crate_name, crate_disambiguator) + }).collect(); + Header { + prev_filemap_starts, + prev_cnums, + }.encode(&mut encoder)?; + + + // Encode Diagnostics let diagnostics: EncodedPrevDiagnostics = self.current_diagnostics .borrow() @@ -142,7 +166,16 @@ impl<'sess> OnDiskCache<'sess> { diagnostics.encode(&mut encoder)?; - Ok(()) + return Ok(()); + + fn sorted_cnums_including_local_crate(cstore: &CrateStore) -> Vec { + let mut cnums = vec![LOCAL_CRATE]; + cnums.extend_from_slice(&cstore.crates_untracked()[..]); + cnums.sort_unstable(); + // Just to be sure... + cnums.dedup(); + cnums + } } /// Load a diagnostic emitted during the previous compilation session. @@ -178,6 +211,40 @@ impl<'sess> OnDiskCache<'sess> { x.extend(diagnostics.into_iter()); } + + // This function builds mapping from previous-session-CrateNum to + // current-session-CrateNum. There might be CrateNums from the previous + // Session that don't occur in the current one. For these, the mapping + // maps to None. + fn compute_cnum_map(tcx: TyCtxt, + prev_cnums: &[(u32, String, CrateDisambiguator)]) + -> IndexVec> + { + let _in_ignore = tcx.dep_graph.in_ignore(); + + let current_cnums = tcx.all_crate_nums(LOCAL_CRATE).iter().map(|&cnum| { + let crate_name = tcx.original_crate_name(cnum) + .as_str() + .to_string(); + let crate_disambiguator = tcx.crate_disambiguator(cnum); + ((crate_name, crate_disambiguator), cnum) + }).collect::>(); + + let map_size = prev_cnums.iter() + .map(|&(cnum, ..)| cnum) + .max() + .unwrap_or(0) + 1; + let mut map = IndexVec::new(); + map.resize(map_size as usize, None); + + for &(prev_cnum, ref crate_name, crate_disambiguator) in prev_cnums { + let key = (crate_name.clone(), crate_disambiguator); + map[CrateNum::from_u32(prev_cnum)] = current_cnums.get(&key).cloned(); + } + + map[LOCAL_CRATE] = Some(LOCAL_CRATE); + map + } } From de0317e267812563e87cd0a03f9e5d486c0bdae8 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Tue, 14 Nov 2017 14:40:56 +0100 Subject: [PATCH 08/21] incr.comp.: Encode DefPathTables for reconstructing DefIds. --- src/librustc/ty/maps/on_disk_cache.rs | 32 +++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index c2303c4a2391d..7431d6dd497b8 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -49,6 +49,7 @@ pub struct OnDiskCache<'sess> { prev_cnums: Vec<(u32, String, CrateDisambiguator)>, cnum_map: RefCell>>>, + prev_def_path_tables: Vec, _prev_filemap_starts: BTreeMap, codemap: &'sess CodeMap, @@ -73,9 +74,12 @@ impl<'sess> OnDiskCache<'sess> { debug_assert!(sess.opts.incremental.is_some()); let mut decoder = opaque::Decoder::new(&data[..], start_pos); + + + // Decode the header let header = Header::decode(&mut decoder).unwrap(); - let prev_diagnostics = { + let (prev_diagnostics, prev_def_path_tables) = { let mut decoder = CacheDecoder { tcx: None, opaque: decoder, @@ -85,6 +89,7 @@ impl<'sess> OnDiskCache<'sess> { prev_def_path_tables: &Vec::new(), }; + // Decode Diagnostics let prev_diagnostics: FxHashMap<_, _> = { let diagnostics = EncodedPrevDiagnostics::decode(&mut decoder) .expect("Error while trying to decode prev. diagnostics \ @@ -92,7 +97,12 @@ impl<'sess> OnDiskCache<'sess> { diagnostics.into_iter().collect() }; - prev_diagnostics + // Decode DefPathTables + let prev_def_path_tables: Vec = + Decodable::decode(&mut decoder) + .expect("Error while trying to decode cached DefPathTables"); + + (prev_diagnostics, prev_def_path_tables) }; OnDiskCache { @@ -100,6 +110,7 @@ impl<'sess> OnDiskCache<'sess> { _prev_filemap_starts: header.prev_filemap_starts, prev_cnums: header.prev_cnums, cnum_map: RefCell::new(None), + prev_def_path_tables, codemap: sess.codemap(), current_diagnostics: RefCell::new(FxHashMap()), } @@ -111,6 +122,7 @@ impl<'sess> OnDiskCache<'sess> { _prev_filemap_starts: BTreeMap::new(), prev_cnums: vec![], cnum_map: RefCell::new(None), + prev_def_path_tables: Vec::new(), codemap, current_diagnostics: RefCell::new(FxHashMap()), } @@ -166,6 +178,22 @@ impl<'sess> OnDiskCache<'sess> { diagnostics.encode(&mut encoder)?; + + // Encode all DefPathTables + let upstream_def_path_tables = tcx.all_crate_nums(LOCAL_CRATE) + .iter() + .map(|&cnum| (cnum, cstore.def_path_table(cnum))) + .collect::>(); + let def_path_tables: Vec<&DefPathTable> = sorted_cnums.into_iter().map(|cnum| { + if cnum == LOCAL_CRATE { + tcx.hir.definitions().def_path_table() + } else { + &*upstream_def_path_tables[&cnum] + } + }).collect(); + + def_path_tables.encode(&mut encoder)?; + return Ok(()); fn sorted_cnums_including_local_crate(cstore: &CrateStore) -> Vec { From 2087d5ebfa06a32b4142f3cebbe4add0e62a9588 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Tue, 14 Nov 2017 14:50:03 +0100 Subject: [PATCH 09/21] incr.comp.: Do some verification on data decoded from incr. comp. cache. --- src/librustc/ty/maps/on_disk_cache.rs | 68 ++++++++++++++++++++++++--- 1 file changed, 62 insertions(+), 6 deletions(-) diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 7431d6dd497b8..121b81111f217 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -34,6 +34,11 @@ use ty::codec::{self as ty_codec, TyDecoder}; use ty::context::TyCtxt; use ty::subst::Substs; +// Some magic values used for verifying that encoding and decoding. These are +// basically random numbers. +const PREV_DIAGNOSTICS_TAG: u64 = 0x1234_5678_A1A1_A1A1; +const DEF_PATH_TABLE_TAG: u64 = 0x1234_5678_B2B2_B2B2; + /// `OnDiskCache` provides an interface to incr. comp. data cached from the /// previous compilation session. This data will eventually include the results /// of a few selected queries (like `typeck_tables_of` and `mir_optimized`) and @@ -91,15 +96,17 @@ impl<'sess> OnDiskCache<'sess> { // Decode Diagnostics let prev_diagnostics: FxHashMap<_, _> = { - let diagnostics = EncodedPrevDiagnostics::decode(&mut decoder) - .expect("Error while trying to decode prev. diagnostics \ - from incr. comp. cache."); + let diagnostics: EncodedPrevDiagnostics = + decode_tagged(&mut decoder, PREV_DIAGNOSTICS_TAG) + .expect("Error while trying to decode previous session \ + diagnostics from incr. comp. cache."); + diagnostics.into_iter().collect() }; // Decode DefPathTables let prev_def_path_tables: Vec = - Decodable::decode(&mut decoder) + decode_tagged(&mut decoder, DEF_PATH_TABLE_TAG) .expect("Error while trying to decode cached DefPathTables"); (prev_diagnostics, prev_def_path_tables) @@ -176,7 +183,7 @@ impl<'sess> OnDiskCache<'sess> { .map(|(k, v)| (SerializedDepNodeIndex::new(k.index()), v.clone())) .collect(); - diagnostics.encode(&mut encoder)?; + encoder.encode_tagged(PREV_DIAGNOSTICS_TAG, &diagnostics)?; // Encode all DefPathTables @@ -192,7 +199,7 @@ impl<'sess> OnDiskCache<'sess> { } }).collect(); - def_path_tables.encode(&mut encoder)?; + encoder.encode_tagged(DEF_PATH_TABLE_TAG, &def_path_tables)?; return Ok(()); @@ -342,6 +349,30 @@ impl<'a, 'tcx, 'x> Decoder for CacheDecoder<'a, 'tcx, 'x> { } } +// Decode something that was encoded with encode_tagged() and verify that the +// tag matches and the correct amount of bytes was read. +fn decode_tagged<'a, 'tcx, D, T, V>(decoder: &mut D, + expected_tag: T) + -> Result + where T: Decodable + Eq + ::std::fmt::Debug, + V: Decodable, + D: Decoder + ty_codec::TyDecoder<'a, 'tcx>, + 'tcx: 'a, +{ + let start_pos = decoder.position(); + + let actual_tag = T::decode(decoder)?; + assert_eq!(actual_tag, expected_tag); + let value = V::decode(decoder)?; + let end_pos = decoder.position(); + + let expected_len: u64 = Decodable::decode(decoder)?; + assert_eq!((end_pos - start_pos) as u64, expected_len); + + Ok(value) +} + + impl<'a, 'tcx: 'a, 'x> ty_codec::TyDecoder<'a, 'tcx> for CacheDecoder<'a, 'tcx, 'x> { #[inline] @@ -565,6 +596,30 @@ struct CacheEncoder<'enc, 'tcx, E> definitions: &'enc Definitions, } +impl<'enc, 'tcx, E> CacheEncoder<'enc, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + /// Encode something with additional information that allows to do some + /// sanity checks when decoding the data again. This method will first + /// encode the specified tag, then the given value, then the number of + /// bytes taken up by tag and value. On decoding, we can then verify that + /// we get the expected tag and read the expected number of bytes. + fn encode_tagged(&mut self, + tag: T, + value: &V) + -> Result<(), E::Error> + { + use ty::codec::TyEncoder; + let start_pos = self.position(); + + tag.encode(self)?; + value.encode(self)?; + + let end_pos = self.position(); + ((end_pos - start_pos) as u64).encode(self) + } +} + impl<'enc, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'enc, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { @@ -644,3 +699,4 @@ impl<'enc, 'tcx, E> Encoder for CacheEncoder<'enc, 'tcx, E> emit_str(&str); } } + From 4bfab89aa258f25ceb8cc4973127fceaa5367ec3 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Tue, 14 Nov 2017 16:15:45 +0100 Subject: [PATCH 10/21] incr.comp.: Store the query result index which records where query results can be found in the cached. --- src/librustc/ty/maps/on_disk_cache.rs | 152 ++++++++++++++++++++--- src/librustc_incremental/persist/load.rs | 2 +- 2 files changed, 134 insertions(+), 20 deletions(-) diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 121b81111f217..6b0cc426b52aa 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -20,7 +20,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque, SpecializedDecoder, SpecializedEncoder, - UseSpecializedDecodable}; + UseSpecializedDecodable, UseSpecializedEncodable}; use session::{CrateDisambiguator, Session}; use std::borrow::Cow; use std::cell::RefCell; @@ -30,7 +30,7 @@ use syntax::ast::NodeId; use syntax::codemap::{CodeMap, StableFilemapId}; use syntax_pos::{BytePos, Span, NO_EXPANSION, DUMMY_SP}; use ty; -use ty::codec::{self as ty_codec, TyDecoder}; +use ty::codec::{self as ty_codec, TyDecoder, TyEncoder}; use ty::context::TyCtxt; use ty::subst::Substs; @@ -38,12 +38,17 @@ use ty::subst::Substs; // basically random numbers. const PREV_DIAGNOSTICS_TAG: u64 = 0x1234_5678_A1A1_A1A1; const DEF_PATH_TABLE_TAG: u64 = 0x1234_5678_B2B2_B2B2; +const QUERY_RESULT_INDEX_TAG: u64 = 0x1234_5678_C3C3_C3C3; /// `OnDiskCache` provides an interface to incr. comp. data cached from the /// previous compilation session. This data will eventually include the results /// of a few selected queries (like `typeck_tables_of` and `mir_optimized`) and /// any diagnostics that have been emitted during a query. pub struct OnDiskCache<'sess> { + + // The complete cache data in serialized form. + serialized_data: Vec, + // The diagnostics emitted during the previous compilation session. prev_diagnostics: FxHashMap>, @@ -56,8 +61,12 @@ pub struct OnDiskCache<'sess> { cnum_map: RefCell>>>, prev_def_path_tables: Vec, - _prev_filemap_starts: BTreeMap, + prev_filemap_starts: BTreeMap, codemap: &'sess CodeMap, + + // A map from dep-node to the position of the cached query result in + // `serialized_data`. + query_result_index: FxHashMap, } // This type is used only for (de-)serialization. @@ -68,26 +77,25 @@ struct Header { } type EncodedPrevDiagnostics = Vec<(SerializedDepNodeIndex, Vec)>; +type EncodedQueryResultIndex = Vec<(SerializedDepNodeIndex, usize)>; impl<'sess> OnDiskCache<'sess> { /// Create a new OnDiskCache instance from the serialized data in `data`. - /// Note that the current implementation (which only deals with diagnostics - /// so far) will eagerly deserialize the complete cache. Once we are - /// dealing with larger amounts of data (i.e. cached query results), - /// deserialization will need to happen lazily. - pub fn new(sess: &'sess Session, data: &[u8], start_pos: usize) -> OnDiskCache<'sess> { + pub fn new(sess: &'sess Session, data: Vec, start_pos: usize) -> OnDiskCache<'sess> { debug_assert!(sess.opts.incremental.is_some()); - let mut decoder = opaque::Decoder::new(&data[..], start_pos); - - // Decode the header - let header = Header::decode(&mut decoder).unwrap(); + let (header, post_header_pos) = { + let mut decoder = opaque::Decoder::new(&data[..], start_pos); + let header = Header::decode(&mut decoder) + .expect("Error while trying to decode incr. comp. cache header."); + (header, decoder.position()) + }; - let (prev_diagnostics, prev_def_path_tables) = { + let (prev_diagnostics, prev_def_path_tables, query_result_index) = { let mut decoder = CacheDecoder { tcx: None, - opaque: decoder, + opaque: opaque::Decoder::new(&data[..], post_header_pos), codemap: sess.codemap(), prev_filemap_starts: &header.prev_filemap_starts, cnum_map: &IndexVec::new(), @@ -100,38 +108,56 @@ impl<'sess> OnDiskCache<'sess> { decode_tagged(&mut decoder, PREV_DIAGNOSTICS_TAG) .expect("Error while trying to decode previous session \ diagnostics from incr. comp. cache."); - diagnostics.into_iter().collect() }; // Decode DefPathTables let prev_def_path_tables: Vec = decode_tagged(&mut decoder, DEF_PATH_TABLE_TAG) - .expect("Error while trying to decode cached DefPathTables"); + .expect("Error while trying to decode cached DefPathTables."); + + // Decode the *position* of the query result index + let query_result_index_pos = { + let pos_pos = data.len() - IntEncodedWithFixedSize::ENCODED_SIZE; + decoder.with_position(pos_pos, |decoder| { + IntEncodedWithFixedSize::decode(decoder) + }).expect("Error while trying to decode query result index position.") + .0 as usize + }; - (prev_diagnostics, prev_def_path_tables) + // Decode the query result index itself + let query_result_index: EncodedQueryResultIndex = + decoder.with_position(query_result_index_pos, |decoder| { + decode_tagged(decoder, QUERY_RESULT_INDEX_TAG) + }).expect("Error while trying to decode query result index."); + + (prev_diagnostics, prev_def_path_tables, query_result_index) }; OnDiskCache { + serialized_data: data, prev_diagnostics, - _prev_filemap_starts: header.prev_filemap_starts, + prev_filemap_starts: header.prev_filemap_starts, prev_cnums: header.prev_cnums, cnum_map: RefCell::new(None), prev_def_path_tables, codemap: sess.codemap(), current_diagnostics: RefCell::new(FxHashMap()), + query_result_index: query_result_index.into_iter().collect(), } } pub fn new_empty(codemap: &'sess CodeMap) -> OnDiskCache<'sess> { OnDiskCache { + serialized_data: Vec::new(), prev_diagnostics: FxHashMap(), - _prev_filemap_starts: BTreeMap::new(), + prev_filemap_starts: BTreeMap::new(), prev_cnums: vec![], cnum_map: RefCell::new(None), prev_def_path_tables: Vec::new(), codemap, current_diagnostics: RefCell::new(FxHashMap()), + query_result_index: FxHashMap(), } } @@ -201,6 +227,20 @@ impl<'sess> OnDiskCache<'sess> { encoder.encode_tagged(DEF_PATH_TABLE_TAG, &def_path_tables)?; + + // Encode query results + let query_result_index = EncodedQueryResultIndex::new(); + // ... we don't encode anything yet, actually + + + // Encode query result index + let query_result_index_pos = encoder.position() as u64; + encoder.encode_tagged(QUERY_RESULT_INDEX_TAG, &query_result_index)?; + + // Encode the position of the query result index as the last 8 bytes of + // file so we know where to look for it. + IntEncodedWithFixedSize(query_result_index_pos).encode(&mut encoder)?; + return Ok(()); fn sorted_cnums_including_local_crate(cstore: &CrateStore) -> Vec { @@ -231,6 +271,38 @@ impl<'sess> OnDiskCache<'sess> { debug_assert!(prev.is_none()); } + pub fn load_query_result<'a, 'tcx, T>(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + dep_node_index: SerializedDepNodeIndex) + -> T + where T: Decodable + { + let pos = self.query_result_index[&dep_node_index]; + + let mut cnum_map = self.cnum_map.borrow_mut(); + if cnum_map.is_none() { + *cnum_map = Some(Self::compute_cnum_map(tcx, &self.prev_cnums[..])); + } + + let mut decoder = CacheDecoder { + tcx: Some(tcx), + opaque: opaque::Decoder::new(&self.serialized_data[..], pos), + codemap: self.codemap, + prev_filemap_starts: &self.prev_filemap_starts, + cnum_map: cnum_map.as_ref().unwrap(), + prev_def_path_tables: &self.prev_def_path_tables, + }; + + match decode_tagged(&mut decoder, dep_node_index) { + Ok(value) => { + value + } + Err(e) => { + bug!("Could not decode cached query result: {}", e) + } + } + } + /// Store a diagnostic emitted during computation of an anonymous query. /// Since many anonymous queries can share the same `DepNode`, we aggregate /// them -- as opposed to regular queries where we assume that there is a @@ -700,3 +772,45 @@ impl<'enc, 'tcx, E> Encoder for CacheEncoder<'enc, 'tcx, E> } } +// An integer that will always encode to 8 bytes. +struct IntEncodedWithFixedSize(u64); + +impl IntEncodedWithFixedSize { + pub const ENCODED_SIZE: usize = 8; +} + +impl UseSpecializedEncodable for IntEncodedWithFixedSize {} +impl UseSpecializedDecodable for IntEncodedWithFixedSize {} + +impl<'enc, 'tcx, E> SpecializedEncoder +for CacheEncoder<'enc, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + fn specialized_encode(&mut self, x: &IntEncodedWithFixedSize) -> Result<(), Self::Error> { + let start_pos = self.position(); + for i in 0 .. IntEncodedWithFixedSize::ENCODED_SIZE { + ((x.0 >> i * 8) as u8).encode(self)?; + } + let end_pos = self.position(); + assert_eq!((end_pos - start_pos), IntEncodedWithFixedSize::ENCODED_SIZE); + Ok(()) + } +} + +impl<'a, 'tcx, 'x> SpecializedDecoder +for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + let mut value: u64 = 0; + let start_pos = self.position(); + + for i in 0 .. IntEncodedWithFixedSize::ENCODED_SIZE { + let byte: u8 = Decodable::decode(self)?; + value |= (byte as u64) << (i * 8); + } + + let end_pos = self.position(); + assert_eq!((end_pos - start_pos), IntEncodedWithFixedSize::ENCODED_SIZE); + + Ok(IntEncodedWithFixedSize(value)) + } +} diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs index 624a9ed930ad0..44111e8af0945 100644 --- a/src/librustc_incremental/persist/load.rs +++ b/src/librustc_incremental/persist/load.rs @@ -207,7 +207,7 @@ pub fn load_query_result_cache<'sess>(sess: &'sess Session) -> OnDiskCache<'sess } if let Some((bytes, start_pos)) = load_data(sess, &query_cache_path(sess)) { - OnDiskCache::new(sess, &bytes[..], start_pos) + OnDiskCache::new(sess, bytes, start_pos) } else { OnDiskCache::new_empty(sess.codemap()) } From 0b1438307e9357904ecb5603023414e554dbeb26 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Tue, 14 Nov 2017 17:00:44 +0100 Subject: [PATCH 11/21] incr.comp.: Add 'tcx to QueryDescription. --- src/librustc/ty/maps/config.rs | 168 +++++++++++++++---------------- src/librustc/ty/maps/plumbing.rs | 10 +- 2 files changed, 89 insertions(+), 89 deletions(-) diff --git a/src/librustc/ty/maps/config.rs b/src/librustc/ty/maps/config.rs index deaafd1efed45..2d979dbe7804a 100644 --- a/src/librustc/ty/maps/config.rs +++ b/src/librustc/ty/maps/config.rs @@ -23,11 +23,11 @@ pub trait QueryConfig { type Value; } -pub(super) trait QueryDescription: QueryConfig { +pub(super) trait QueryDescription<'tcx>: QueryConfig { fn describe(tcx: TyCtxt, key: Self::Key) -> String; } -impl> QueryDescription for M { +impl<'tcx, M: QueryConfig> QueryDescription<'tcx> for M { default fn describe(tcx: TyCtxt, def_id: DefId) -> String { if !tcx.sess.verbose() { format!("processing `{}`", tcx.item_path_str(def_id)) @@ -38,50 +38,50 @@ impl> QueryDescription for M { } } -impl<'tcx> QueryDescription for queries::is_copy_raw<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_copy_raw<'tcx> { fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String { format!("computing whether `{}` is `Copy`", env.value) } } -impl<'tcx> QueryDescription for queries::is_sized_raw<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_sized_raw<'tcx> { fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String { format!("computing whether `{}` is `Sized`", env.value) } } -impl<'tcx> QueryDescription for queries::is_freeze_raw<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_freeze_raw<'tcx> { fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String { format!("computing whether `{}` is freeze", env.value) } } -impl<'tcx> QueryDescription for queries::needs_drop_raw<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::needs_drop_raw<'tcx> { fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String { format!("computing whether `{}` needs drop", env.value) } } -impl<'tcx> QueryDescription for queries::layout_raw<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::layout_raw<'tcx> { fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String { format!("computing layout of `{}`", env.value) } } -impl<'tcx> QueryDescription for queries::super_predicates_of<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::super_predicates_of<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("computing the supertraits of `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::erase_regions_ty<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::erase_regions_ty<'tcx> { fn describe(_tcx: TyCtxt, ty: Ty<'tcx>) -> String { format!("erasing regions from `{:?}`", ty) } } -impl<'tcx> QueryDescription for queries::type_param_predicates<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::type_param_predicates<'tcx> { fn describe(tcx: TyCtxt, (_, def_id): (DefId, DefId)) -> String { let id = tcx.hir.as_local_node_id(def_id).unwrap(); format!("computing the bounds for type parameter `{}`", @@ -89,451 +89,451 @@ impl<'tcx> QueryDescription for queries::type_param_predicates<'tcx> { } } -impl<'tcx> QueryDescription for queries::coherent_trait<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::coherent_trait<'tcx> { fn describe(tcx: TyCtxt, (_, def_id): (CrateNum, DefId)) -> String { format!("coherence checking all impls of trait `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::crate_inherent_impls<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_inherent_impls<'tcx> { fn describe(_: TyCtxt, k: CrateNum) -> String { format!("all inherent impls defined in crate `{:?}`", k) } } -impl<'tcx> QueryDescription for queries::crate_inherent_impls_overlap_check<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_inherent_impls_overlap_check<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { format!("check for overlap between inherent impls defined in this crate") } } -impl<'tcx> QueryDescription for queries::crate_variances<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_variances<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("computing the variances for items in this crate") } } -impl<'tcx> QueryDescription for queries::mir_shims<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::mir_shims<'tcx> { fn describe(tcx: TyCtxt, def: ty::InstanceDef<'tcx>) -> String { format!("generating MIR shim for `{}`", tcx.item_path_str(def.def_id())) } } -impl<'tcx> QueryDescription for queries::privacy_access_levels<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::privacy_access_levels<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { format!("privacy access levels") } } -impl<'tcx> QueryDescription for queries::typeck_item_bodies<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::typeck_item_bodies<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { format!("type-checking all item bodies") } } -impl<'tcx> QueryDescription for queries::reachable_set<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::reachable_set<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { format!("reachability") } } -impl<'tcx> QueryDescription for queries::const_eval<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::const_eval<'tcx> { fn describe(tcx: TyCtxt, key: ty::ParamEnvAnd<'tcx, (DefId, &'tcx Substs<'tcx>)>) -> String { format!("const-evaluating `{}`", tcx.item_path_str(key.value.0)) } } -impl<'tcx> QueryDescription for queries::mir_keys<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::mir_keys<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { format!("getting a list of all mir_keys") } } -impl<'tcx> QueryDescription for queries::symbol_name<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::symbol_name<'tcx> { fn describe(_tcx: TyCtxt, instance: ty::Instance<'tcx>) -> String { format!("computing the symbol for `{}`", instance) } } -impl<'tcx> QueryDescription for queries::describe_def<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::describe_def<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("describe_def") } } -impl<'tcx> QueryDescription for queries::def_span<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::def_span<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("def_span") } } -impl<'tcx> QueryDescription for queries::lookup_stability<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::lookup_stability<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("stability") } } -impl<'tcx> QueryDescription for queries::lookup_deprecation_entry<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::lookup_deprecation_entry<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("deprecation") } } -impl<'tcx> QueryDescription for queries::item_attrs<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::item_attrs<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("item_attrs") } } -impl<'tcx> QueryDescription for queries::is_exported_symbol<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_exported_symbol<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("is_exported_symbol") } } -impl<'tcx> QueryDescription for queries::fn_arg_names<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::fn_arg_names<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("fn_arg_names") } } -impl<'tcx> QueryDescription for queries::impl_parent<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::impl_parent<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("impl_parent") } } -impl<'tcx> QueryDescription for queries::trait_of_item<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::trait_of_item<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { bug!("trait_of_item") } } -impl<'tcx> QueryDescription for queries::item_body_nested_bodies<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::item_body_nested_bodies<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("nested item bodies of `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::const_is_rvalue_promotable_to_static<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::const_is_rvalue_promotable_to_static<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("const checking if rvalue is promotable to static `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::rvalue_promotable_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::rvalue_promotable_map<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("checking which parts of `{}` are promotable to static", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::is_mir_available<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_mir_available<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("checking if item is mir available: `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::trans_fulfill_obligation<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::trans_fulfill_obligation<'tcx> { fn describe(tcx: TyCtxt, key: (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>)) -> String { format!("checking if `{}` fulfills its obligations", tcx.item_path_str(key.1.def_id())) } } -impl<'tcx> QueryDescription for queries::trait_impls_of<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::trait_impls_of<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("trait impls of `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::is_object_safe<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_object_safe<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("determine object safety of trait `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::is_const_fn<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_const_fn<'tcx> { fn describe(tcx: TyCtxt, def_id: DefId) -> String { format!("checking if item is const fn: `{}`", tcx.item_path_str(def_id)) } } -impl<'tcx> QueryDescription for queries::dylib_dependency_formats<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::dylib_dependency_formats<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { "dylib dependency formats of crate".to_string() } } -impl<'tcx> QueryDescription for queries::is_panic_runtime<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_panic_runtime<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { "checking if the crate is_panic_runtime".to_string() } } -impl<'tcx> QueryDescription for queries::is_compiler_builtins<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_compiler_builtins<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { "checking if the crate is_compiler_builtins".to_string() } } -impl<'tcx> QueryDescription for queries::has_global_allocator<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::has_global_allocator<'tcx> { fn describe(_: TyCtxt, _: CrateNum) -> String { "checking if the crate has_global_allocator".to_string() } } -impl<'tcx> QueryDescription for queries::extern_crate<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::extern_crate<'tcx> { fn describe(_: TyCtxt, _: DefId) -> String { "getting crate's ExternCrateData".to_string() } } -impl<'tcx> QueryDescription for queries::lint_levels<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::lint_levels<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("computing the lint levels for items in this crate") } } -impl<'tcx> QueryDescription for queries::specializes<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::specializes<'tcx> { fn describe(_tcx: TyCtxt, _: (DefId, DefId)) -> String { format!("computing whether impls specialize one another") } } -impl<'tcx> QueryDescription for queries::in_scope_traits_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::in_scope_traits_map<'tcx> { fn describe(_tcx: TyCtxt, _: DefIndex) -> String { format!("traits in scope at a block") } } -impl<'tcx> QueryDescription for queries::is_no_builtins<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_no_builtins<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("test whether a crate has #![no_builtins]") } } -impl<'tcx> QueryDescription for queries::panic_strategy<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::panic_strategy<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("query a crate's configured panic strategy") } } -impl<'tcx> QueryDescription for queries::is_profiler_runtime<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_profiler_runtime<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("query a crate is #![profiler_runtime]") } } -impl<'tcx> QueryDescription for queries::is_sanitizer_runtime<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_sanitizer_runtime<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("query a crate is #![sanitizer_runtime]") } } -impl<'tcx> QueryDescription for queries::exported_symbol_ids<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::exported_symbol_ids<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the exported symbols of a crate") } } -impl<'tcx> QueryDescription for queries::native_libraries<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::native_libraries<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the native libraries of a linked crate") } } -impl<'tcx> QueryDescription for queries::plugin_registrar_fn<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::plugin_registrar_fn<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the plugin registrar for a crate") } } -impl<'tcx> QueryDescription for queries::derive_registrar_fn<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::derive_registrar_fn<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the derive registrar for a crate") } } -impl<'tcx> QueryDescription for queries::crate_disambiguator<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_disambiguator<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the disambiguator a crate") } } -impl<'tcx> QueryDescription for queries::crate_hash<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_hash<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the hash a crate") } } -impl<'tcx> QueryDescription for queries::original_crate_name<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::original_crate_name<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up the original name a crate") } } -impl<'tcx> QueryDescription for queries::implementations_of_trait<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::implementations_of_trait<'tcx> { fn describe(_tcx: TyCtxt, _: (CrateNum, DefId)) -> String { format!("looking up implementations of a trait in a crate") } } -impl<'tcx> QueryDescription for queries::all_trait_implementations<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::all_trait_implementations<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up all (?) trait implementations") } } -impl<'tcx> QueryDescription for queries::link_args<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::link_args<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up link arguments for a crate") } } -impl<'tcx> QueryDescription for queries::named_region_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::named_region_map<'tcx> { fn describe(_tcx: TyCtxt, _: DefIndex) -> String { format!("looking up a named region") } } -impl<'tcx> QueryDescription for queries::is_late_bound_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::is_late_bound_map<'tcx> { fn describe(_tcx: TyCtxt, _: DefIndex) -> String { format!("testing if a region is late boudn") } } -impl<'tcx> QueryDescription for queries::object_lifetime_defaults_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::object_lifetime_defaults_map<'tcx> { fn describe(_tcx: TyCtxt, _: DefIndex) -> String { format!("looking up lifetime defaults for a region") } } -impl<'tcx> QueryDescription for queries::dep_kind<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::dep_kind<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("fetching what a dependency looks like") } } -impl<'tcx> QueryDescription for queries::crate_name<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::crate_name<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("fetching what a crate is named") } } -impl<'tcx> QueryDescription for queries::get_lang_items<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::get_lang_items<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("calculating the lang items map") } } -impl<'tcx> QueryDescription for queries::defined_lang_items<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::defined_lang_items<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("calculating the lang items defined in a crate") } } -impl<'tcx> QueryDescription for queries::missing_lang_items<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::missing_lang_items<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("calculating the missing lang items in a crate") } } -impl<'tcx> QueryDescription for queries::visible_parent_map<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::visible_parent_map<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("calculating the visible parent map") } } -impl<'tcx> QueryDescription for queries::missing_extern_crate_item<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::missing_extern_crate_item<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("seeing if we're missing an `extern crate` item for this crate") } } -impl<'tcx> QueryDescription for queries::used_crate_source<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::used_crate_source<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking at the source for a crate") } } -impl<'tcx> QueryDescription for queries::postorder_cnums<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::postorder_cnums<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("generating a postorder list of CrateNums") } } -impl<'tcx> QueryDescription for queries::maybe_unused_extern_crates<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::maybe_unused_extern_crates<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("looking up all possibly unused extern crates") } } -impl<'tcx> QueryDescription for queries::stability_index<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::stability_index<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("calculating the stability index for the local crate") } } -impl<'tcx> QueryDescription for queries::all_crate_nums<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::all_crate_nums<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("fetching all foreign CrateNum instances") } } -impl<'tcx> QueryDescription for queries::exported_symbols<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::exported_symbols<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("exported_symbols") } } -impl<'tcx> QueryDescription for queries::collect_and_partition_translation_items<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::collect_and_partition_translation_items<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("collect_and_partition_translation_items") } } -impl<'tcx> QueryDescription for queries::codegen_unit<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::codegen_unit<'tcx> { fn describe(_tcx: TyCtxt, _: InternedString) -> String { format!("codegen_unit") } } -impl<'tcx> QueryDescription for queries::compile_codegen_unit<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::compile_codegen_unit<'tcx> { fn describe(_tcx: TyCtxt, _: InternedString) -> String { format!("compile_codegen_unit") } } -impl<'tcx> QueryDescription for queries::output_filenames<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::output_filenames<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("output_filenames") } } -impl<'tcx> QueryDescription for queries::has_clone_closures<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::has_clone_closures<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("seeing if the crate has enabled `Clone` closures") } } -impl<'tcx> QueryDescription for queries::vtable_methods<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::vtable_methods<'tcx> { fn describe(tcx: TyCtxt, key: ty::PolyTraitRef<'tcx> ) -> String { format!("finding all methods for trait {}", tcx.item_path_str(key.def_id())) } } -impl<'tcx> QueryDescription for queries::has_copy_closures<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::has_copy_closures<'tcx> { fn describe(_tcx: TyCtxt, _: CrateNum) -> String { format!("seeing if the crate has enabled `Copy` closures") } } -impl<'tcx> QueryDescription for queries::fully_normalize_monormophic_ty<'tcx> { +impl<'tcx> QueryDescription<'tcx> for queries::fully_normalize_monormophic_ty<'tcx> { fn describe(_tcx: TyCtxt, _: Ty) -> String { format!("normalizing types") } diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs index f5e1f384d60ea..1096d306a130e 100644 --- a/src/librustc/ty/maps/plumbing.rs +++ b/src/librustc/ty/maps/plumbing.rs @@ -25,8 +25,8 @@ use std::marker::PhantomData; use std::mem; use syntax_pos::Span; -pub(super) struct QueryMap { - phantom: PhantomData, +pub(super) struct QueryMap<'tcx, D: QueryDescription<'tcx>> { + phantom: PhantomData<(D, &'tcx ())>, pub(super) map: FxHashMap>, } @@ -46,8 +46,8 @@ impl QueryValue { } } -impl QueryMap { - pub(super) fn new() -> QueryMap { +impl<'tcx, M: QueryDescription<'tcx>> QueryMap<'tcx, M> { + pub(super) fn new() -> QueryMap<'tcx, M> { QueryMap { phantom: PhantomData, map: FxHashMap(), @@ -547,7 +547,7 @@ macro_rules! define_map_struct { pub struct Maps<$tcx> { providers: IndexVec>, query_stack: RefCell)>>, - $($(#[$attr])* $name: RefCell>>,)* + $($(#[$attr])* $name: RefCell>>,)* } }; } From 2c1aeddf272d9370f32dc13025bfb802270efd10 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Tue, 14 Nov 2017 19:52:49 +0100 Subject: [PATCH 12/21] incr.comp.: Cache TypeckTables and add -Zincremental-queries flag. --- src/librustc/dep_graph/graph.rs | 6 +++++ src/librustc/dep_graph/prev.rs | 5 ++++ src/librustc/hir/def_id.rs | 2 +- src/librustc/session/config.rs | 2 ++ src/librustc/ty/maps/config.rs | 27 +++++++++++++++++++ src/librustc/ty/maps/on_disk_cache.rs | 38 +++++++++++++++++++++++---- src/librustc/ty/maps/plumbing.rs | 30 +++++++++++++-------- src/tools/compiletest/src/runtest.rs | 3 ++- 8 files changed, 95 insertions(+), 18 deletions(-) diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs index 97ac1b256124d..c9205f67f661f 100644 --- a/src/librustc/dep_graph/graph.rs +++ b/src/librustc/dep_graph/graph.rs @@ -327,6 +327,7 @@ impl DepGraph { } } + #[inline] pub fn fingerprint_of(&self, dep_node: &DepNode) -> Fingerprint { match self.fingerprints.borrow().get(dep_node) { Some(&fingerprint) => fingerprint, @@ -340,6 +341,11 @@ impl DepGraph { self.data.as_ref().unwrap().previous.fingerprint_of(dep_node) } + #[inline] + pub fn prev_dep_node_index_of(&self, dep_node: &DepNode) -> SerializedDepNodeIndex { + self.data.as_ref().unwrap().previous.node_to_index(dep_node) + } + /// Indicates that a previous work product exists for `v`. This is /// invoked during initial start-up based on what nodes are clean /// (and what files exist in the incr. directory). diff --git a/src/librustc/dep_graph/prev.rs b/src/librustc/dep_graph/prev.rs index 17001bbb0c38a..6c43b5c5ff197 100644 --- a/src/librustc/dep_graph/prev.rs +++ b/src/librustc/dep_graph/prev.rs @@ -44,6 +44,11 @@ impl PreviousDepGraph { self.data.nodes[dep_node_index].0 } + #[inline] + pub fn node_to_index(&self, dep_node: &DepNode) -> SerializedDepNodeIndex { + self.index[dep_node] + } + #[inline] pub fn fingerprint_of(&self, dep_node: &DepNode) -> Option { self.index diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index b2eefca7fe232..31730960a3458 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -184,7 +184,7 @@ impl DefIndexAddressSpace { /// A DefId identifies a particular *definition*, by combining a crate /// index and a def index. -#[derive(Clone, Eq, Ord, PartialOrd, PartialEq, RustcEncodable, RustcDecodable, Hash, Copy)] +#[derive(Clone, Eq, Ord, PartialOrd, PartialEq, Hash, Copy)] pub struct DefId { pub krate: CrateNum, pub index: DefIndex, diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index ffb8144e07e5c..2857d50fc87bc 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -1042,6 +1042,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "enable incremental compilation (experimental)"), incremental_cc: bool = (false, parse_bool, [UNTRACKED], "enable cross-crate incremental compilation (even more experimental)"), + incremental_queries: bool = (false, parse_bool, [UNTRACKED], + "enable incremental compilation support for queries (experimental)"), incremental_info: bool = (false, parse_bool, [UNTRACKED], "print high-level information about incremental reuse (or the lack thereof)"), incremental_dump_hash: bool = (false, parse_bool, [UNTRACKED], diff --git a/src/librustc/ty/maps/config.rs b/src/librustc/ty/maps/config.rs index 2d979dbe7804a..066b80cefa4b5 100644 --- a/src/librustc/ty/maps/config.rs +++ b/src/librustc/ty/maps/config.rs @@ -8,6 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use dep_graph::SerializedDepNodeIndex; use hir::def_id::{CrateNum, DefId, DefIndex}; use ty::{self, Ty, TyCtxt}; use ty::maps::queries; @@ -25,6 +26,16 @@ pub trait QueryConfig { pub(super) trait QueryDescription<'tcx>: QueryConfig { fn describe(tcx: TyCtxt, key: Self::Key) -> String; + + fn cache_on_disk(_: Self::Key) -> bool { + false + } + + fn load_from_disk<'a>(_: TyCtxt<'a, 'tcx, 'tcx>, + _: SerializedDepNodeIndex) + -> Self::Value { + bug!("QueryDescription::load_from_disk() called for unsupport query.") + } } impl<'tcx, M: QueryConfig> QueryDescription<'tcx> for M { @@ -538,3 +549,19 @@ impl<'tcx> QueryDescription<'tcx> for queries::fully_normalize_monormophic_ty<'t format!("normalizing types") } } + +impl<'tcx> QueryDescription<'tcx> for queries::typeck_tables_of<'tcx> { + #[inline] + fn cache_on_disk(def_id: Self::Key) -> bool { + def_id.is_local() + } + + fn load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: SerializedDepNodeIndex) + -> Self::Value { + let typeck_tables: ty::TypeckTables<'tcx> = tcx.on_disk_query_result_cache + .load_query_result(tcx, id); + tcx.alloc_tables(typeck_tables) + } +} + diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 6b0cc426b52aa..d325d1437fc34 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -32,6 +32,7 @@ use syntax_pos::{BytePos, Span, NO_EXPANSION, DUMMY_SP}; use ty; use ty::codec::{self as ty_codec, TyDecoder, TyEncoder}; use ty::context::TyCtxt; +use ty::maps::config::QueryDescription; use ty::subst::Substs; // Some magic values used for verifying that encoding and decoding. These are @@ -229,9 +230,22 @@ impl<'sess> OnDiskCache<'sess> { // Encode query results - let query_result_index = EncodedQueryResultIndex::new(); - // ... we don't encode anything yet, actually + let mut query_result_index = EncodedQueryResultIndex::new(); + // Encode TypeckTables + for (def_id, entry) in tcx.maps.typeck_tables_of.borrow().map.iter() { + if ty::maps::queries::typeck_tables_of::cache_on_disk(*def_id) { + let dep_node = SerializedDepNodeIndex::new(entry.index.index()); + + // Record position of the cache entry + query_result_index.push((dep_node, encoder.position())); + + // Encode the type check tables with the SerializedDepNodeIndex + // as tag. + let typeck_tables: &ty::TypeckTables<'gcx> = &entry.value; + encoder.encode_tagged(dep_node, typeck_tables)?; + } + } // Encode query result index let query_result_index_pos = encoder.position() as u64; @@ -522,9 +536,7 @@ impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { fn specialized_decode(&mut self) -> Result { - let cnum = CrateNum::from_u32(u32::decode(self)?); - let mapped = self.map_encoded_cnum_to_current(cnum); - Ok(mapped) + ty_codec::decode_cnum(self) } } @@ -576,6 +588,8 @@ impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> .as_ref() .unwrap()[&def_path_hash]; + debug_assert!(def_id.is_local()); + // The ItemLocalId needs no remapping. let local_id = hir::ItemLocalId::decode(self)?; @@ -721,6 +735,20 @@ impl<'enc, 'tcx, E> SpecializedEncoder> } } +impl<'enc, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + fn specialized_encode(&mut self, id: &hir::HirId) -> Result<(), Self::Error> { + let hir::HirId { + owner, + local_id, + } = *id; + + owner.encode(self)?; + local_id.encode(self) + } +} + // NodeIds are not stable across compilation sessions, so we store them in their // HirId representation. This allows use to map them to the current NodeId. impl<'enc, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'tcx, E> diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs index 1096d306a130e..2f8f724edad1f 100644 --- a/src/librustc/ty/maps/plumbing.rs +++ b/src/librustc/ty/maps/plumbing.rs @@ -379,18 +379,26 @@ macro_rules! define_maps { { debug_assert!(tcx.dep_graph.is_green(dep_node_index)); - // We don't do any caching yet, so recompute. - // The diagnostics for this query have already been promoted to - // the current session during try_mark_green(), so we can ignore - // them here. - let (result, _) = tcx.cycle_check(span, Query::$name(key), || { - tcx.sess.diagnostic().track_diagnostics(|| { - // The dep-graph for this computation is already in place - tcx.dep_graph.with_ignore(|| { - Self::compute_result(tcx, key) + let result = if tcx.sess.opts.debugging_opts.incremental_queries && + Self::cache_on_disk(key) { + let prev_dep_node_index = + tcx.dep_graph.prev_dep_node_index_of(dep_node); + Self::load_from_disk(tcx.global_tcx(), prev_dep_node_index) + } else { + let (result, _ ) = tcx.cycle_check(span, Query::$name(key), || { + // The diagnostics for this query have already been + // promoted to the current session during + // try_mark_green(), so we can ignore them here. + tcx.sess.diagnostic().track_diagnostics(|| { + // The dep-graph for this computation is already in + // place + tcx.dep_graph.with_ignore(|| { + Self::compute_result(tcx, key) + }) }) - }) - })?; + })?; + result + }; // If -Zincremental-verify-ich is specified, re-hash results from // the cache and make sure that they have the expected fingerprint. diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index 80ca0afe72b50..8d94039c594f8 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -1387,6 +1387,7 @@ actual:\n\ if let Some(ref incremental_dir) = self.props.incremental_dir { rustc.args(&["-Z", &format!("incremental={}", incremental_dir.display())]); rustc.args(&["-Z", "incremental-verify-ich"]); + rustc.args(&["-Z", "incremental-queries"]); } match self.config.mode { @@ -2614,4 +2615,4 @@ fn read2_abbreviated(mut child: Child) -> io::Result { stdout: stdout.into_bytes(), stderr: stderr.into_bytes(), }) -} \ No newline at end of file +} From 279b6df1f2355b0b9af5993f38fd9524d74c320d Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Wed, 15 Nov 2017 14:18:00 +0100 Subject: [PATCH 13/21] incr.comp.: Refactor query cache serialization to be more re-usable. --- src/librustc/ty/context.rs | 2 +- src/librustc/ty/maps/on_disk_cache.rs | 53 ++++++++++++++++++--------- src/librustc/ty/maps/plumbing.rs | 14 ++++++- 3 files changed, 49 insertions(+), 20 deletions(-) diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 512860a02b056..1395f37722e2d 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1313,7 +1313,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { -> Result<(), E::Error> where E: ty::codec::TyEncoder { - self.on_disk_query_result_cache.serialize(self, self.cstore, encoder) + self.on_disk_query_result_cache.serialize(self.global_tcx(), self.cstore, encoder) } } diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index d325d1437fc34..98727b5d10d1d 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -32,7 +32,6 @@ use syntax_pos::{BytePos, Span, NO_EXPANSION, DUMMY_SP}; use ty; use ty::codec::{self as ty_codec, TyDecoder, TyEncoder}; use ty::context::TyCtxt; -use ty::maps::config::QueryDescription; use ty::subst::Substs; // Some magic values used for verifying that encoding and decoding. These are @@ -162,11 +161,11 @@ impl<'sess> OnDiskCache<'sess> { } } - pub fn serialize<'a, 'gcx, 'lcx, E>(&self, - tcx: TyCtxt<'a, 'gcx, 'lcx>, - cstore: &CrateStore, - encoder: &mut E) - -> Result<(), E::Error> + pub fn serialize<'a, 'tcx, E>(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + cstore: &CrateStore, + encoder: &mut E) + -> Result<(), E::Error> where E: ty_codec::TyEncoder { // Serializing the DepGraph should not modify it: @@ -232,19 +231,13 @@ impl<'sess> OnDiskCache<'sess> { // Encode query results let mut query_result_index = EncodedQueryResultIndex::new(); - // Encode TypeckTables - for (def_id, entry) in tcx.maps.typeck_tables_of.borrow().map.iter() { - if ty::maps::queries::typeck_tables_of::cache_on_disk(*def_id) { - let dep_node = SerializedDepNodeIndex::new(entry.index.index()); + { + use ty::maps::queries::*; + let enc = &mut encoder; + let qri = &mut query_result_index; - // Record position of the cache entry - query_result_index.push((dep_node, encoder.position())); - - // Encode the type check tables with the SerializedDepNodeIndex - // as tag. - let typeck_tables: &ty::TypeckTables<'gcx> = &entry.value; - encoder.encode_tagged(dep_node, typeck_tables)?; - } + // Encode TypeckTables + encode_query_results::(tcx, enc, qri)?; } // Encode query result index @@ -842,3 +835,27 @@ for CacheDecoder<'a, 'tcx, 'x> { Ok(IntEncodedWithFixedSize(value)) } } + +fn encode_query_results<'x, 'a, 'tcx, Q, E>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + encoder: &mut CacheEncoder<'x, 'tcx, E>, + query_result_index: &mut EncodedQueryResultIndex) + -> Result<(), E::Error> + where Q: super::plumbing::GetCacheInternal<'tcx>, + E: 'x + TyEncoder, + Q::Value: Encodable, +{ + for (key, entry) in Q::get_cache_internal(tcx).map.iter() { + if Q::cache_on_disk(key.clone()) { + let dep_node = SerializedDepNodeIndex::new(entry.index.index()); + + // Record position of the cache entry + query_result_index.push((dep_node, encoder.position())); + + // Encode the type check tables with the SerializedDepNodeIndex + // as tag. + encoder.encode_tagged(dep_node, &entry.value)?; + } + } + + Ok(()) +} diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs index 2f8f724edad1f..1ca8fc6eb480f 100644 --- a/src/librustc/ty/maps/plumbing.rs +++ b/src/librustc/ty/maps/plumbing.rs @@ -20,7 +20,7 @@ use ty::maps::config::QueryDescription; use ty::item_path; use rustc_data_structures::fx::{FxHashMap}; -use std::cell::RefMut; +use std::cell::{Ref, RefMut}; use std::marker::PhantomData; use std::mem; use syntax_pos::Span; @@ -55,6 +55,11 @@ impl<'tcx, M: QueryDescription<'tcx>> QueryMap<'tcx, M> { } } +pub(super) trait GetCacheInternal<'tcx>: QueryDescription<'tcx> + Sized { + fn get_cache_internal<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> Ref<'a, QueryMap<'tcx, Self>>; +} + pub(super) struct CycleError<'a, 'tcx: 'a> { span: Span, cycle: RefMut<'a, [(Span, Query<'tcx>)]>, @@ -242,6 +247,13 @@ macro_rules! define_maps { type Value = $V; } + impl<$tcx> GetCacheInternal<$tcx> for queries::$name<$tcx> { + fn get_cache_internal<'a>(tcx: TyCtxt<'a, $tcx, $tcx>) + -> ::std::cell::Ref<'a, QueryMap<$tcx, Self>> { + tcx.maps.$name.borrow() + } + } + impl<'a, $tcx, 'lcx> queries::$name<$tcx> { #[allow(unused)] From 13582c6b58bc42fee3d94808b281f34a25fc237a Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Wed, 15 Nov 2017 15:21:19 +0100 Subject: [PATCH 14/21] incr.comp.: Add missing [input] annotation for DepNode::MaybeUnusedExternCrates --- src/librustc/dep_graph/dep_node.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index b391b353632aa..523a244c8361b 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -618,7 +618,7 @@ define_dep_nodes!( <'tcx> [input] Freevars(DefId), [input] MaybeUnusedTraitImport(DefId), - [] MaybeUnusedExternCrates, + [input] MaybeUnusedExternCrates, [] StabilityIndex, [input] AllCrateNums, [] ExportedSymbols(CrateNum), From 2f50e626e1e0ccd1a5eff407c92e87c987f405e6 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Wed, 15 Nov 2017 15:31:59 +0100 Subject: [PATCH 15/21] incr.comp.: Only save and load query result cache when -Zincremental-queries is specified. --- src/librustc_incremental/persist/load.rs | 3 ++- src/librustc_incremental/persist/save.rs | 12 +++++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs index 44111e8af0945..e4bc6b7339efc 100644 --- a/src/librustc_incremental/persist/load.rs +++ b/src/librustc_incremental/persist/load.rs @@ -202,7 +202,8 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph { } pub fn load_query_result_cache<'sess>(sess: &'sess Session) -> OnDiskCache<'sess> { - if sess.opts.incremental.is_none() { + if sess.opts.incremental.is_none() || + !sess.opts.debugging_opts.incremental_queries { return OnDiskCache::new_empty(sess.codemap()); } diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs index 711550c27d16f..63038f1b93a3c 100644 --- a/src/librustc_incremental/persist/save.rs +++ b/src/librustc_incremental/persist/save.rs @@ -69,11 +69,13 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, |e| encode_query_cache(tcx, e)); }); - time(sess.time_passes(), "persist dep-graph", || { - save_in(sess, - dep_graph_path(sess), - |e| encode_dep_graph(tcx, e)); - }); + if tcx.sess.opts.debugging_opts.incremental_queries { + time(sess.time_passes(), "persist dep-graph", || { + save_in(sess, + dep_graph_path(sess), + |e| encode_dep_graph(tcx, e)); + }); + } dirty_clean::check_dirty_clean_annotations(tcx); dirty_clean::check_dirty_clean_metadata(tcx, From 24e54ddefadd3be0fa32f033f9b744cac6a11af6 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Thu, 16 Nov 2017 14:04:01 +0100 Subject: [PATCH 16/21] Introduce LocalDefId which provides a type-level guarantee that the DefId is from the local crate. --- src/librustc/hir/def_id.rs | 40 ++++++++++++++++++++++- src/librustc/hir/map/mod.rs | 12 ++++++- src/librustc/ich/impls_hir.rs | 20 +++++++++++- src/librustc/middle/expr_use_visitor.rs | 4 +-- src/librustc/middle/mem_categorization.rs | 10 +++--- src/librustc/ty/context.rs | 2 +- src/librustc/ty/mod.rs | 4 +-- src/librustc_borrowck/borrowck/mod.rs | 10 +++--- src/librustc_mir/build/mod.rs | 6 ++-- src/librustc_mir/hair/cx/expr.rs | 5 +-- src/librustc_typeck/check/upvar.rs | 16 ++++----- 11 files changed, 98 insertions(+), 31 deletions(-) diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index 31730960a3458..f34022993de23 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -92,6 +92,7 @@ impl serialize::UseSpecializedDecodable for CrateNum { /// don't have to care about these ranges. newtype_index!(DefIndex { + ENCODABLE = custom DEBUG_FORMAT = custom, /// The start of the "high" range of DefIndexes. @@ -208,13 +209,20 @@ impl fmt::Debug for DefId { impl DefId { /// Make a local `DefId` with the given index. + #[inline] pub fn local(index: DefIndex) -> DefId { DefId { krate: LOCAL_CRATE, index: index } } - pub fn is_local(&self) -> bool { + #[inline] + pub fn is_local(self) -> bool { self.krate == LOCAL_CRATE } + + #[inline] + pub fn to_local(self) -> LocalDefId { + LocalDefId::from_def_id(self) + } } impl serialize::UseSpecializedEncodable for DefId { @@ -242,3 +250,33 @@ impl serialize::UseSpecializedDecodable for DefId { }) } } + + +#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct LocalDefId(DefIndex); + +impl LocalDefId { + + #[inline] + pub fn from_def_id(def_id: DefId) -> LocalDefId { + assert!(def_id.is_local()); + LocalDefId(def_id.index) + } + + #[inline] + pub fn to_def_id(self) -> DefId { + DefId { + krate: LOCAL_CRATE, + index: self.0 + } + } +} + +impl fmt::Debug for LocalDefId { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.to_def_id().fmt(f) + } +} + +impl serialize::UseSpecializedEncodable for LocalDefId {} +impl serialize::UseSpecializedDecodable for LocalDefId {} diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 453d30dde7595..d0a7ac392014e 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -17,7 +17,7 @@ pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData, use dep_graph::{DepGraph, DepNode, DepKind, DepNodeIndex}; -use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndexAddressSpace}; +use hir::def_id::{CRATE_DEF_INDEX, DefId, LocalDefId, DefIndexAddressSpace}; use syntax::abi::Abi; use syntax::ast::{self, Name, NodeId, CRATE_NODE_ID}; @@ -359,6 +359,16 @@ impl<'hir> Map<'hir> { self.definitions.as_local_node_id(DefId::local(def_index)).unwrap() } + #[inline] + pub fn local_def_id_to_hir_id(&self, def_id: LocalDefId) -> HirId { + self.definitions.def_index_to_hir_id(def_id.to_def_id().index) + } + + #[inline] + pub fn local_def_id_to_node_id(&self, def_id: LocalDefId) -> NodeId { + self.definitions.as_local_node_id(def_id.to_def_id()).unwrap() + } + fn entry_count(&self) -> usize { self.map.len() } diff --git a/src/librustc/ich/impls_hir.rs b/src/librustc/ich/impls_hir.rs index 6b78cd473be8f..11ac2bf83be3e 100644 --- a/src/librustc/ich/impls_hir.rs +++ b/src/librustc/ich/impls_hir.rs @@ -13,7 +13,7 @@ use hir; use hir::map::DefPathHash; -use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX}; +use hir::def_id::{DefId, LocalDefId, CrateNum, CRATE_DEF_INDEX}; use ich::{StableHashingContext, NodeIdHashingMode}; use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, StableHasher, StableHasherResult}; @@ -38,6 +38,24 @@ impl<'gcx> ToStableHashKey> for DefId { } } +impl<'gcx> HashStable> for LocalDefId { + #[inline] + fn hash_stable(&self, + hcx: &mut StableHashingContext<'gcx>, + hasher: &mut StableHasher) { + hcx.def_path_hash(self.to_def_id()).hash_stable(hcx, hasher); + } +} + +impl<'gcx> ToStableHashKey> for LocalDefId { + type KeyType = DefPathHash; + + #[inline] + fn to_stable_hash_key(&self, hcx: &StableHashingContext<'gcx>) -> DefPathHash { + hcx.def_path_hash(self.to_def_id()) + } +} + impl<'gcx> HashStable> for CrateNum { #[inline] fn hash_stable(&self, diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index 0383d5ca68232..9018b9fe590b2 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -20,7 +20,7 @@ use self::TrackMatchMode::*; use self::OverloadedCallType::*; use hir::def::Def; -use hir::def_id::{DefId}; +use hir::def_id::DefId; use infer::InferCtxt; use middle::mem_categorization as mc; use middle::region; @@ -915,7 +915,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { let closure_def_id = self.tcx().hir.local_def_id(closure_expr.id); let upvar_id = ty::UpvarId { var_id: var_hir_id, - closure_expr_id: closure_def_id.index + closure_expr_id: closure_def_id.to_local(), }; let upvar_capture = self.mc.tables.upvar_capture(upvar_id); let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id, diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index fc10406c8cebd..6ea8595533991 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -70,7 +70,7 @@ pub use self::Note::*; use self::Aliasability::*; use middle::region; -use hir::def_id::{DefId, DefIndex}; +use hir::def_id::{DefId, LocalDefId}; use hir::map as hir_map; use infer::InferCtxt; use hir::def::{Def, CtorKind}; @@ -191,7 +191,7 @@ pub type cmt<'tcx> = Rc>; pub enum ImmutabilityBlame<'tcx> { ImmLocal(ast::NodeId), - ClosureEnv(DefIndex), + ClosureEnv(LocalDefId), LocalDeref(ast::NodeId), AdtFieldDeref(&'tcx ty::AdtDef, &'tcx ty::FieldDef) } @@ -758,11 +758,11 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { } }; - let closure_expr_def_index = self.tcx.hir.local_def_id(fn_node_id).index; + let closure_expr_def_id = self.tcx.hir.local_def_id(fn_node_id); let var_hir_id = self.tcx.hir.node_to_hir_id(var_id); let upvar_id = ty::UpvarId { var_id: var_hir_id, - closure_expr_id: closure_expr_def_index + closure_expr_id: closure_expr_def_id.to_local(), }; let var_ty = self.node_ty(var_hir_id)?; @@ -837,7 +837,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { // The environment of a closure is guaranteed to // outlive any bindings introduced in the body of the // closure itself. - scope: DefId::local(upvar_id.closure_expr_id), + scope: upvar_id.closure_expr_id.to_def_id(), bound_region: ty::BrEnv })); diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 1395f37722e2d..9daf152dc4216 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -769,7 +769,7 @@ impl<'gcx> HashStable> for TypeckTables<'gcx> { }; let closure_def_id = DefId { krate: local_id_root.krate, - index: closure_expr_id, + index: closure_expr_id.to_def_id().index, }; (hcx.def_path_hash(var_owner_def_id), var_id.local_id, diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index 0deababd21829..cf5255dca61f8 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -17,7 +17,7 @@ pub use self::fold::TypeFoldable; use hir::{map as hir_map, FreevarMap, TraitMap}; use hir::def::{Def, CtorKind, ExportMap}; -use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE}; +use hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use hir::map::DefPathData; use ich::StableHashingContext; use middle::const_val::ConstVal; @@ -573,7 +573,7 @@ impl Slice { #[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct UpvarId { pub var_id: hir::HirId, - pub closure_expr_id: DefIndex, + pub closure_expr_id: LocalDefId, } #[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable, Copy)] diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 6be07878487b9..7b09e45fe96e3 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -29,7 +29,7 @@ use rustc::middle::dataflow::BitwiseOperator; use rustc::middle::dataflow::DataFlowOperator; use rustc::middle::dataflow::KillFrom; use rustc::middle::borrowck::BorrowCheckResult; -use rustc::hir::def_id::{DefId, DefIndex}; +use rustc::hir::def_id::{DefId, LocalDefId}; use rustc::middle::expr_use_visitor as euv; use rustc::middle::mem_categorization as mc; use rustc::middle::mem_categorization::Categorization; @@ -376,9 +376,9 @@ pub enum LoanPathElem<'tcx> { LpInterior(Option, InteriorKind), } -fn closure_to_block(closure_id: DefIndex, +fn closure_to_block(closure_id: LocalDefId, tcx: TyCtxt) -> ast::NodeId { - let closure_id = tcx.hir.def_index_to_node_id(closure_id); + let closure_id = tcx.hir.local_def_id_to_node_id(closure_id); match tcx.hir.get(closure_id) { hir_map::NodeExpr(expr) => match expr.node { hir::ExprClosure(.., body_id, _, _) => { @@ -1101,7 +1101,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } else { "consider changing this closure to take self by mutable reference" }; - let node_id = self.tcx.hir.def_index_to_node_id(id); + let node_id = self.tcx.hir.local_def_id_to_node_id(id); let help_span = self.tcx.hir.span(node_id); self.cannot_act_on_capture_in_sharable_fn(span, prefix, @@ -1297,7 +1297,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { }; if kind == ty::ClosureKind::Fn { let closure_node_id = - self.tcx.hir.def_index_to_node_id(upvar_id.closure_expr_id); + self.tcx.hir.local_def_id_to_node_id(upvar_id.closure_expr_id); db.span_help(self.tcx.hir.span(closure_node_id), "consider changing this closure to take \ self by mutable reference"); diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 2073d49530061..7d624b5c9cf5b 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -13,7 +13,7 @@ use build; use hair::cx::Cx; use hair::LintLevel; use rustc::hir; -use rustc::hir::def_id::DefId; +use rustc::hir::def_id::{DefId, LocalDefId}; use rustc::middle::region; use rustc::mir::*; use rustc::mir::transform::MirSource; @@ -414,10 +414,10 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, freevars.iter().map(|fv| { let var_id = fv.var_id(); let var_hir_id = tcx.hir.node_to_hir_id(var_id); - let closure_expr_id = tcx.hir.local_def_id(fn_id).index; + let closure_expr_id = tcx.hir.local_def_id(fn_id); let capture = hir.tables().upvar_capture(ty::UpvarId { var_id: var_hir_id, - closure_expr_id, + closure_expr_id: LocalDefId::from_def_id(closure_expr_id), }); let by_ref = match capture { ty::UpvarCapture::ByValue => false, diff --git a/src/librustc_mir/hair/cx/expr.rs b/src/librustc_mir/hair/cx/expr.rs index f5a53e2aa8eed..798928e7ae7a5 100644 --- a/src/librustc_mir/hair/cx/expr.rs +++ b/src/librustc_mir/hair/cx/expr.rs @@ -20,6 +20,7 @@ use rustc::ty::{self, AdtKind, VariantDef, Ty}; use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow}; use rustc::ty::cast::CastKind as TyCastKind; use rustc::hir; +use rustc::hir::def_id::LocalDefId; impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr { type Output = Expr<'tcx>; @@ -783,7 +784,7 @@ fn convert_var<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, // point we need an implicit deref let upvar_id = ty::UpvarId { var_id: var_hir_id, - closure_expr_id: closure_def_id.index, + closure_expr_id: LocalDefId::from_def_id(closure_def_id), }; match cx.tables().upvar_capture(upvar_id) { ty::UpvarCapture::ByValue => field_kind, @@ -897,7 +898,7 @@ fn capture_freevar<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, let var_hir_id = cx.tcx.hir.node_to_hir_id(freevar.var_id()); let upvar_id = ty::UpvarId { var_id: var_hir_id, - closure_expr_id: cx.tcx.hir.local_def_id(closure_expr.id).index, + closure_expr_id: cx.tcx.hir.local_def_id(closure_expr.id).to_local(), }; let upvar_capture = cx.tables().upvar_capture(upvar_id); let temp_lifetime = cx.region_scope_tree.temporary_scope(closure_expr.hir_id.local_id); diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs index d179b390a2918..07ed0f5b89003 100644 --- a/src/librustc_typeck/check/upvar.rs +++ b/src/librustc_typeck/check/upvar.rs @@ -50,7 +50,7 @@ use rustc::infer::UpvarRegion; use syntax::ast; use syntax_pos::Span; use rustc::hir; -use rustc::hir::def_id::DefIndex; +use rustc::hir::def_id::LocalDefId; use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; use rustc::util::nodemap::FxHashMap; @@ -128,7 +128,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { for freevar in freevars { let upvar_id = ty::UpvarId { var_id: self.tcx.hir.node_to_hir_id(freevar.var_id()), - closure_expr_id: closure_def_id.index, + closure_expr_id: LocalDefId::from_def_id(closure_def_id), }; debug!("seed upvar_id {:?}", upvar_id); @@ -167,7 +167,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Write the adjusted values back into the main tables. if infer_kind { if let Some(kind) = delegate.adjust_closure_kinds - .remove(&closure_def_id.index) { + .remove(&closure_def_id.to_local()) { self.tables .borrow_mut() .closure_kinds_mut() @@ -231,7 +231,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // This may change if abstract return types of some sort are // implemented. let tcx = self.tcx; - let closure_def_index = tcx.hir.local_def_id(closure_id).index; + let closure_def_index = tcx.hir.local_def_id(closure_id); tcx.with_freevars(closure_id, |freevars| { freevars.iter().map(|freevar| { @@ -240,7 +240,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let freevar_ty = self.node_ty(var_hir_id); let upvar_id = ty::UpvarId { var_id: var_hir_id, - closure_expr_id: closure_def_index, + closure_expr_id: LocalDefId::from_def_id(closure_def_index), }; let capture = self.tables.borrow().upvar_capture(upvar_id); @@ -263,7 +263,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { struct InferBorrowKind<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, - adjust_closure_kinds: FxHashMap)>, + adjust_closure_kinds: FxHashMap)>, adjust_upvar_captures: ty::UpvarCaptureMap<'tcx>, } @@ -481,7 +481,7 @@ impl<'a, 'gcx, 'tcx> InferBorrowKind<'a, 'gcx, 'tcx> { } fn adjust_closure_kind(&mut self, - closure_id: DefIndex, + closure_id: LocalDefId, new_kind: ty::ClosureKind, upvar_span: Span, var_name: ast::Name) { @@ -490,7 +490,7 @@ impl<'a, 'gcx, 'tcx> InferBorrowKind<'a, 'gcx, 'tcx> { let closure_kind = self.adjust_closure_kinds.get(&closure_id).cloned() .or_else(|| { - let closure_id = self.fcx.tcx.hir.def_index_to_hir_id(closure_id); + let closure_id = self.fcx.tcx.hir.local_def_id_to_hir_id(closure_id); self.fcx.tables.borrow().closure_kinds().get(closure_id).cloned() }); From 2f44ef282e7495c17c3771732e37621e7377d33e Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Thu, 16 Nov 2017 14:09:44 +0100 Subject: [PATCH 17/21] incr.comp.: Encode DefIds as DefPathHashes instead of recomputing those during deserialization. --- src/librustc/ty/maps/on_disk_cache.rs | 138 ++++++++++++-------------- 1 file changed, 65 insertions(+), 73 deletions(-) diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 98727b5d10d1d..209b89fc03fa2 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -11,9 +11,9 @@ use dep_graph::{DepNodeIndex, SerializedDepNodeIndex}; use errors::Diagnostic; use hir; -use hir::def_id::{CrateNum, DefIndex, DefId, RESERVED_FOR_INCR_COMP_CACHE, - LOCAL_CRATE}; -use hir::map::definitions::{Definitions, DefPathTable}; +use hir::def_id::{CrateNum, DefIndex, DefId, LocalDefId, + RESERVED_FOR_INCR_COMP_CACHE, LOCAL_CRATE}; +use hir::map::definitions::DefPathHash; use middle::const_val::ByteArray; use middle::cstore::CrateStore; use rustc_data_structures::fx::FxHashMap; @@ -37,7 +37,6 @@ use ty::subst::Substs; // Some magic values used for verifying that encoding and decoding. These are // basically random numbers. const PREV_DIAGNOSTICS_TAG: u64 = 0x1234_5678_A1A1_A1A1; -const DEF_PATH_TABLE_TAG: u64 = 0x1234_5678_B2B2_B2B2; const QUERY_RESULT_INDEX_TAG: u64 = 0x1234_5678_C3C3_C3C3; /// `OnDiskCache` provides an interface to incr. comp. data cached from the @@ -56,10 +55,8 @@ pub struct OnDiskCache<'sess> { // compilation session. current_diagnostics: RefCell>>, - prev_cnums: Vec<(u32, String, CrateDisambiguator)>, cnum_map: RefCell>>>, - prev_def_path_tables: Vec, prev_filemap_starts: BTreeMap, codemap: &'sess CodeMap, @@ -92,14 +89,13 @@ impl<'sess> OnDiskCache<'sess> { (header, decoder.position()) }; - let (prev_diagnostics, prev_def_path_tables, query_result_index) = { + let (prev_diagnostics, query_result_index) = { let mut decoder = CacheDecoder { tcx: None, opaque: opaque::Decoder::new(&data[..], post_header_pos), codemap: sess.codemap(), prev_filemap_starts: &header.prev_filemap_starts, cnum_map: &IndexVec::new(), - prev_def_path_tables: &Vec::new(), }; // Decode Diagnostics @@ -111,11 +107,6 @@ impl<'sess> OnDiskCache<'sess> { diagnostics.into_iter().collect() }; - // Decode DefPathTables - let prev_def_path_tables: Vec = - decode_tagged(&mut decoder, DEF_PATH_TABLE_TAG) - .expect("Error while trying to decode cached DefPathTables."); - // Decode the *position* of the query result index let query_result_index_pos = { let pos_pos = data.len() - IntEncodedWithFixedSize::ENCODED_SIZE; @@ -131,7 +122,7 @@ impl<'sess> OnDiskCache<'sess> { decode_tagged(decoder, QUERY_RESULT_INDEX_TAG) }).expect("Error while trying to decode query result index."); - (prev_diagnostics, prev_def_path_tables, query_result_index) + (prev_diagnostics, query_result_index) }; OnDiskCache { @@ -140,7 +131,6 @@ impl<'sess> OnDiskCache<'sess> { prev_filemap_starts: header.prev_filemap_starts, prev_cnums: header.prev_cnums, cnum_map: RefCell::new(None), - prev_def_path_tables, codemap: sess.codemap(), current_diagnostics: RefCell::new(FxHashMap()), query_result_index: query_result_index.into_iter().collect(), @@ -154,7 +144,6 @@ impl<'sess> OnDiskCache<'sess> { prev_filemap_starts: BTreeMap::new(), prev_cnums: vec![], cnum_map: RefCell::new(None), - prev_def_path_tables: Vec::new(), codemap, current_diagnostics: RefCell::new(FxHashMap()), query_result_index: FxHashMap(), @@ -172,10 +161,10 @@ impl<'sess> OnDiskCache<'sess> { let _in_ignore = tcx.dep_graph.in_ignore(); let mut encoder = CacheEncoder { + tcx, encoder, type_shorthands: FxHashMap(), predicate_shorthands: FxHashMap(), - definitions: tcx.hir.definitions(), }; @@ -212,22 +201,6 @@ impl<'sess> OnDiskCache<'sess> { encoder.encode_tagged(PREV_DIAGNOSTICS_TAG, &diagnostics)?; - // Encode all DefPathTables - let upstream_def_path_tables = tcx.all_crate_nums(LOCAL_CRATE) - .iter() - .map(|&cnum| (cnum, cstore.def_path_table(cnum))) - .collect::>(); - let def_path_tables: Vec<&DefPathTable> = sorted_cnums.into_iter().map(|cnum| { - if cnum == LOCAL_CRATE { - tcx.hir.definitions().def_path_table() - } else { - &*upstream_def_path_tables[&cnum] - } - }).collect(); - - encoder.encode_tagged(DEF_PATH_TABLE_TAG, &def_path_tables)?; - - // Encode query results let mut query_result_index = EncodedQueryResultIndex::new(); @@ -297,7 +270,6 @@ impl<'sess> OnDiskCache<'sess> { codemap: self.codemap, prev_filemap_starts: &self.prev_filemap_starts, cnum_map: cnum_map.as_ref().unwrap(), - prev_def_path_tables: &self.prev_def_path_tables, }; match decode_tagged(&mut decoder, dep_node_index) { @@ -373,7 +345,6 @@ struct CacheDecoder<'a, 'tcx: 'a, 'x> { codemap: &'x CodeMap, prev_filemap_starts: &'x BTreeMap, cnum_map: &'x IndexVec>, - prev_def_path_tables: &'x Vec, } impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> { @@ -548,32 +519,24 @@ impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { // sessions, to map the old DefId to the new one. impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { fn specialized_decode(&mut self) -> Result { - // Decode the unmapped CrateNum - let prev_cnum = CrateNum::default_decode(self)?; - - // Decode the unmapped DefIndex - let def_index = DefIndex::default_decode(self)?; - - // Unmapped CrateNum and DefIndex are valid keys for the *cached* - // DefPathTables, so we use them to look up the DefPathHash. - let def_path_hash = self.prev_def_path_tables[prev_cnum.index()] - .def_path_hash(def_index); + // Load the DefPathHash which is was we encoded the DefId as. + let def_path_hash = DefPathHash::decode(self)?; // Using the DefPathHash, we can lookup the new DefId Ok(self.tcx().def_path_hash_to_def_id.as_ref().unwrap()[&def_path_hash]) } } +impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { + fn specialized_decode(&mut self) -> Result { + Ok(LocalDefId::from_def_id(DefId::decode(self)?)) + } +} + impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { fn specialized_decode(&mut self) -> Result { - // Decode the unmapped DefIndex of the HirId. - let def_index = DefIndex::default_decode(self)?; - - // Use the unmapped DefIndex to look up the DefPathHash in the cached - // DefPathTable. For HirIds we know that we always have to look in the - // *local* DefPathTable. - let def_path_hash = self.prev_def_path_tables[LOCAL_CRATE.index()] - .def_path_hash(def_index); + // Load the DefPathHash which is was we encoded the DefIndex as. + let def_path_hash = DefPathHash::decode(self)?; // Use the DefPathHash to map to the current DefId. let def_id = self.tcx() @@ -666,16 +629,17 @@ for CacheDecoder<'a, 'tcx, 'x> { //- ENCODING ------------------------------------------------------------------- -struct CacheEncoder<'enc, 'tcx, E> - where E: 'enc + ty_codec::TyEncoder +struct CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder, + 'tcx: 'a, { + tcx: TyCtxt<'a, 'tcx, 'tcx>, encoder: &'enc mut E, type_shorthands: FxHashMap, usize>, predicate_shorthands: FxHashMap, usize>, - definitions: &'enc Definitions, } -impl<'enc, 'tcx, E> CacheEncoder<'enc, 'tcx, E> +impl<'enc, 'a, 'tcx, E> CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { /// Encode something with additional information that allows to do some @@ -699,7 +663,7 @@ impl<'enc, 'tcx, E> CacheEncoder<'enc, 'tcx, E> } } -impl<'enc, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'enc, 'tcx, E> +impl<'enc, 'a, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { fn position(&self) -> usize { @@ -707,7 +671,7 @@ impl<'enc, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'enc, 'tcx, E> } } -impl<'enc, 'tcx, E> SpecializedEncoder> for CacheEncoder<'enc, 'tcx, E> +impl<'enc, 'a, 'tcx, E> SpecializedEncoder> for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { fn specialized_encode(&mut self, ty: &ty::Ty<'tcx>) -> Result<(), Self::Error> { @@ -716,8 +680,8 @@ impl<'enc, 'tcx, E> SpecializedEncoder> for CacheEncoder<'enc, 'tcx } } -impl<'enc, 'tcx, E> SpecializedEncoder> - for CacheEncoder<'enc, 'tcx, E> +impl<'enc, 'a, 'tcx, E> SpecializedEncoder> + for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { fn specialized_encode(&mut self, @@ -728,7 +692,7 @@ impl<'enc, 'tcx, E> SpecializedEncoder> } } -impl<'enc, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'tcx, E> +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { fn specialized_encode(&mut self, id: &hir::HirId) -> Result<(), Self::Error> { @@ -737,18 +701,46 @@ impl<'enc, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'tcx, local_id, } = *id; - owner.encode(self)?; + let def_path_hash = self.tcx.hir.definitions().def_path_hash(owner); + + def_path_hash.encode(self)?; local_id.encode(self) } } + +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + fn specialized_encode(&mut self, id: &DefId) -> Result<(), Self::Error> { + let def_path_hash = self.tcx.def_path_hash(*id); + def_path_hash.encode(self) + } +} + +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + fn specialized_encode(&mut self, id: &LocalDefId) -> Result<(), Self::Error> { + id.to_def_id().encode(self) + } +} + +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + fn specialized_encode(&mut self, _: &DefIndex) -> Result<(), Self::Error> { + bug!("Encoding DefIndex without context.") + } +} + // NodeIds are not stable across compilation sessions, so we store them in their // HirId representation. This allows use to map them to the current NodeId. -impl<'enc, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'tcx, E> +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { fn specialized_encode(&mut self, node_id: &NodeId) -> Result<(), Self::Error> { - let hir_id = self.definitions.node_to_hir_id(*node_id); + let hir_id = self.tcx.hir.node_to_hir_id(*node_id); hir_id.encode(self) } } @@ -761,7 +753,7 @@ macro_rules! encoder_methods { } } -impl<'enc, 'tcx, E> Encoder for CacheEncoder<'enc, 'tcx, E> +impl<'enc, 'a, 'tcx, E> Encoder for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { type Error = E::Error; @@ -803,8 +795,8 @@ impl IntEncodedWithFixedSize { impl UseSpecializedEncodable for IntEncodedWithFixedSize {} impl UseSpecializedDecodable for IntEncodedWithFixedSize {} -impl<'enc, 'tcx, E> SpecializedEncoder -for CacheEncoder<'enc, 'tcx, E> +impl<'enc, 'a, 'tcx, E> SpecializedEncoder +for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { fn specialized_encode(&mut self, x: &IntEncodedWithFixedSize) -> Result<(), Self::Error> { @@ -836,12 +828,12 @@ for CacheDecoder<'a, 'tcx, 'x> { } } -fn encode_query_results<'x, 'a, 'tcx, Q, E>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - encoder: &mut CacheEncoder<'x, 'tcx, E>, - query_result_index: &mut EncodedQueryResultIndex) - -> Result<(), E::Error> +fn encode_query_results<'enc, 'a, 'tcx, Q, E>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + encoder: &mut CacheEncoder<'enc, 'a, 'tcx, E>, + query_result_index: &mut EncodedQueryResultIndex) + -> Result<(), E::Error> where Q: super::plumbing::GetCacheInternal<'tcx>, - E: 'x + TyEncoder, + E: 'enc + TyEncoder, Q::Value: Encodable, { for (key, entry) in Q::get_cache_internal(tcx).map.iter() { From 723028f3086b4d5a8eec95962ac5728af4e271b4 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Thu, 16 Nov 2017 15:26:00 +0100 Subject: [PATCH 18/21] incr.comp.: Remove some code duplication around TyDecoder by factoring things into a macro. --- src/librustc/ty/codec.rs | 137 ++++++++++++++++++++++++++ src/librustc/ty/maps/on_disk_cache.rs | 111 +-------------------- src/librustc/ty/mod.rs | 1 + src/librustc_metadata/decoder.rs | 115 +-------------------- 4 files changed, 142 insertions(+), 222 deletions(-) diff --git a/src/librustc/ty/codec.rs b/src/librustc/ty/codec.rs index 164aac303af80..4739ea34e6fc1 100644 --- a/src/librustc/ty/codec.rs +++ b/src/librustc/ty/codec.rs @@ -138,6 +138,7 @@ pub trait TyDecoder<'a, 'tcx: 'a>: Decoder { } } +#[inline] pub fn decode_cnum<'a, 'tcx, D>(decoder: &mut D) -> Result where D: TyDecoder<'a, 'tcx>, 'tcx: 'a, @@ -146,6 +147,7 @@ pub fn decode_cnum<'a, 'tcx, D>(decoder: &mut D) -> Result Ok(decoder.map_encoded_cnum_to_current(cnum)) } +#[inline] pub fn decode_ty<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> where D: TyDecoder<'a, 'tcx>, 'tcx: 'a, @@ -165,6 +167,7 @@ pub fn decode_ty<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> } } +#[inline] pub fn decode_predicates<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -188,6 +191,7 @@ pub fn decode_predicates<'a, 'tcx, D>(decoder: &mut D) }) } +#[inline] pub fn decode_substs<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx Substs<'tcx>, D::Error> where D: TyDecoder<'a, 'tcx>, 'tcx: 'a, @@ -197,6 +201,7 @@ pub fn decode_substs<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx Substs<'tcx>, Ok(tcx.mk_substs((0..len).map(|_| Decodable::decode(decoder)))?) } +#[inline] pub fn decode_region<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> where D: TyDecoder<'a, 'tcx>, 'tcx: 'a, @@ -204,6 +209,7 @@ pub fn decode_region<'a, 'tcx, D>(decoder: &mut D) -> Result, D Ok(decoder.tcx().mk_region(Decodable::decode(decoder)?)) } +#[inline] pub fn decode_ty_slice<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx ty::Slice>, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -213,6 +219,7 @@ pub fn decode_ty_slice<'a, 'tcx, D>(decoder: &mut D) Ok(decoder.tcx().mk_type_list((0..len).map(|_| Decodable::decode(decoder)))?) } +#[inline] pub fn decode_adt_def<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx ty::AdtDef, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -222,6 +229,7 @@ pub fn decode_adt_def<'a, 'tcx, D>(decoder: &mut D) Ok(decoder.tcx().adt_def(def_id)) } +#[inline] pub fn decode_existential_predicate_slice<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx ty::Slice>, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -232,6 +240,7 @@ pub fn decode_existential_predicate_slice<'a, 'tcx, D>(decoder: &mut D) .mk_existential_predicates((0..len).map(|_| Decodable::decode(decoder)))?) } +#[inline] pub fn decode_byte_array<'a, 'tcx, D>(decoder: &mut D) -> Result, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -242,6 +251,7 @@ pub fn decode_byte_array<'a, 'tcx, D>(decoder: &mut D) }) } +#[inline] pub fn decode_const<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx ty::Const<'tcx>, D::Error> where D: TyDecoder<'a, 'tcx>, @@ -249,3 +259,130 @@ pub fn decode_const<'a, 'tcx, D>(decoder: &mut D) { Ok(decoder.tcx().mk_const(Decodable::decode(decoder)?)) } + +#[macro_export] +macro_rules! __impl_decoder_methods { + ($($name:ident -> $ty:ty;)*) => { + $(fn $name(&mut self) -> Result<$ty, Self::Error> { + self.opaque.$name() + })* + } +} + +#[macro_export] +macro_rules! implement_ty_decoder { + ($DecoderName:ident <$($typaram:tt),*>) => { + mod __ty_decoder_impl { + use super::$DecoderName; + use $crate::ty; + use $crate::ty::codec::*; + use $crate::ty::subst::Substs; + use $crate::hir::def_id::{CrateNum}; + use $crate::middle::const_val::ByteArray; + use rustc_serialize::{Decoder, SpecializedDecoder}; + use std::borrow::Cow; + + impl<$($typaram ),*> Decoder for $DecoderName<$($typaram),*> { + type Error = String; + + __impl_decoder_methods! { + read_nil -> (); + + read_u128 -> u128; + read_u64 -> u64; + read_u32 -> u32; + read_u16 -> u16; + read_u8 -> u8; + read_usize -> usize; + + read_i128 -> i128; + read_i64 -> i64; + read_i32 -> i32; + read_i16 -> i16; + read_i8 -> i8; + read_isize -> isize; + + read_bool -> bool; + read_f64 -> f64; + read_f32 -> f32; + read_char -> char; + read_str -> Cow; + } + + fn error(&mut self, err: &str) -> Self::Error { + self.opaque.error(err) + } + } + + // FIXME(#36588) These impls are horribly unsound as they allow + // the caller to pick any lifetime for 'tcx, including 'static, + // by using the unspecialized proxies to them. + + impl<$($typaram),*> SpecializedDecoder for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result { + decode_cnum(self) + } + } + + impl<$($typaram),*> SpecializedDecoder> for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + decode_ty(self) + } + } + + impl<$($typaram),*> SpecializedDecoder> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + decode_predicates(self) + } + } + + impl<$($typaram),*> SpecializedDecoder<&'tcx Substs<'tcx>> for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result<&'tcx Substs<'tcx>, Self::Error> { + decode_substs(self) + } + } + + impl<$($typaram),*> SpecializedDecoder> for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + decode_region(self) + } + } + + impl<$($typaram),*> SpecializedDecoder<&'tcx ty::Slice>> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice>, Self::Error> { + decode_ty_slice(self) + } + } + + impl<$($typaram),*> SpecializedDecoder<&'tcx ty::AdtDef> for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result<&'tcx ty::AdtDef, Self::Error> { + decode_adt_def(self) + } + } + + impl<$($typaram),*> SpecializedDecoder<&'tcx ty::Slice>> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) + -> Result<&'tcx ty::Slice>, Self::Error> { + decode_existential_predicate_slice(self) + } + } + + impl<$($typaram),*> SpecializedDecoder> for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + decode_byte_array(self) + } + } + + impl<$($typaram),*> SpecializedDecoder<&'tcx $crate::ty::Const<'tcx>> + for $DecoderName<$($typaram),*> { + fn specialized_decode(&mut self) -> Result<&'tcx ty::Const<'tcx>, Self::Error> { + decode_const(self) + } + } + } + } +} + diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 209b89fc03fa2..8c234e632e9fb 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -14,7 +14,6 @@ use hir; use hir::def_id::{CrateNum, DefIndex, DefId, LocalDefId, RESERVED_FOR_INCR_COMP_CACHE, LOCAL_CRATE}; use hir::map::definitions::DefPathHash; -use middle::const_val::ByteArray; use middle::cstore::CrateStore; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; @@ -22,7 +21,6 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque, SpecializedDecoder, SpecializedEncoder, UseSpecializedDecodable, UseSpecializedEncodable}; use session::{CrateDisambiguator, Session}; -use std::borrow::Cow; use std::cell::RefCell; use std::collections::BTreeMap; use std::mem; @@ -32,7 +30,6 @@ use syntax_pos::{BytePos, Span, NO_EXPANSION, DUMMY_SP}; use ty; use ty::codec::{self as ty_codec, TyDecoder, TyEncoder}; use ty::context::TyCtxt; -use ty::subst::Substs; // Some magic values used for verifying that encoding and decoding. These are // basically random numbers. @@ -359,46 +356,6 @@ impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> { } } -macro_rules! decoder_methods { - ($($name:ident -> $ty:ty;)*) => { - $(fn $name(&mut self) -> Result<$ty, Self::Error> { - self.opaque.$name() - })* - } -} - -impl<'a, 'tcx, 'x> Decoder for CacheDecoder<'a, 'tcx, 'x> { - type Error = String; - - decoder_methods! { - read_nil -> (); - - read_u128 -> u128; - read_u64 -> u64; - read_u32 -> u32; - read_u16 -> u16; - read_u8 -> u8; - read_usize -> usize; - - read_i128 -> i128; - read_i64 -> i64; - read_i32 -> i32; - read_i16 -> i16; - read_i8 -> i8; - read_isize -> isize; - - read_bool -> bool; - read_f64 -> f64; - read_f32 -> f32; - read_char -> char; - read_str -> Cow; - } - - fn error(&mut self, err: &str) -> Self::Error { - self.opaque.error(err) - } -} - // Decode something that was encoded with encode_tagged() and verify that the // tag matches and the correct amount of bytes was read. fn decode_tagged<'a, 'tcx, D, T, V>(decoder: &mut D, @@ -481,6 +438,8 @@ impl<'a, 'tcx: 'a, 'x> ty_codec::TyDecoder<'a, 'tcx> for CacheDecoder<'a, 'tcx, } } +implement_ty_decoder!( CacheDecoder<'a, 'tcx, 'x> ); + impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { fn specialized_decode(&mut self) -> Result { let lo = BytePos::decode(self)?; @@ -498,12 +457,6 @@ impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { } } -impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { - fn specialized_decode(&mut self) -> Result { - ty_codec::decode_cnum(self) - } -} - // This impl makes sure that we get a runtime error when we try decode a // DefIndex that is not contained in a DefId. Such a case would be problematic // because we would not know how to transform the DefIndex to the current @@ -567,66 +520,6 @@ impl<'a, 'tcx, 'x> SpecializedDecoder for CacheDecoder<'a, 'tcx, 'x> { } } -impl<'a, 'tcx, 'x> SpecializedDecoder> for CacheDecoder<'a, 'tcx, 'x> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_ty(self) - } -} - -impl<'a, 'tcx, 'x> SpecializedDecoder> -for CacheDecoder<'a, 'tcx, 'x> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_predicates(self) - } -} - -impl<'a, 'tcx, 'x> SpecializedDecoder<&'tcx Substs<'tcx>> for CacheDecoder<'a, 'tcx, 'x> { - fn specialized_decode(&mut self) -> Result<&'tcx Substs<'tcx>, Self::Error> { - ty_codec::decode_substs(self) - } -} - -impl<'a, 'tcx, 'x> SpecializedDecoder> for CacheDecoder<'a, 'tcx, 'x> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_region(self) - } -} - -impl<'a, 'tcx, 'x> SpecializedDecoder<&'tcx ty::Slice>> -for CacheDecoder<'a, 'tcx, 'x> { - fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice>, Self::Error> { - ty_codec::decode_ty_slice(self) - } -} - -impl<'a, 'tcx, 'x> SpecializedDecoder<&'tcx ty::AdtDef> for CacheDecoder<'a, 'tcx, 'x> { - fn specialized_decode(&mut self) -> Result<&'tcx ty::AdtDef, Self::Error> { - ty_codec::decode_adt_def(self) - } -} - -impl<'a, 'tcx, 'x> SpecializedDecoder<&'tcx ty::Slice>> - for CacheDecoder<'a, 'tcx, 'x> { - fn specialized_decode(&mut self) - -> Result<&'tcx ty::Slice>, Self::Error> { - ty_codec::decode_existential_predicate_slice(self) - } -} - -impl<'a, 'tcx, 'x> SpecializedDecoder> for CacheDecoder<'a, 'tcx, 'x> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_byte_array(self) - } -} - -impl<'a, 'tcx, 'x> SpecializedDecoder<&'tcx ty::Const<'tcx>> -for CacheDecoder<'a, 'tcx, 'x> { - fn specialized_decode(&mut self) -> Result<&'tcx ty::Const<'tcx>, Self::Error> { - ty_codec::decode_const(self) - } -} - - //- ENCODING ------------------------------------------------------------------- struct CacheEncoder<'enc, 'a, 'tcx, E> diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index cf5255dca61f8..abf2a1b0c00a4 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -89,6 +89,7 @@ pub use self::maps::queries; pub mod adjustment; pub mod binding; pub mod cast; +#[macro_use] pub mod codec; pub mod error; mod erase_regions; diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 83d0a1fdfdb7b..ddc8b6abfacce 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -15,8 +15,6 @@ use schema::*; use rustc::hir::map::{DefKey, DefPath, DefPathData, DefPathHash}; use rustc::hir; - -use rustc::middle::const_val::ByteArray; use rustc::middle::cstore::{LinkagePreference, ExternConstBody, ExternBodyNestedBodies}; use rustc::hir::def::{self, Def, CtorKind}; @@ -25,19 +23,15 @@ use rustc::ich::Fingerprint; use rustc::middle::lang_items; use rustc::session::Session; use rustc::ty::{self, Ty, TyCtxt}; -use rustc::ty::codec::{self as ty_codec, TyDecoder}; -use rustc::ty::subst::Substs; +use rustc::ty::codec::TyDecoder; use rustc::util::nodemap::DefIdSet; - use rustc::mir::Mir; -use std::borrow::Cow; use std::cell::Ref; use std::collections::BTreeMap; use std::io; use std::mem; use std::rc::Rc; -use std::str; use std::u32; use rustc_serialize::{Decodable, Decoder, SpecializedDecoder, opaque}; @@ -174,47 +168,6 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> { } } -macro_rules! decoder_methods { - ($($name:ident -> $ty:ty;)*) => { - $(fn $name(&mut self) -> Result<$ty, Self::Error> { - self.opaque.$name() - })* - } -} - -impl<'doc, 'tcx> Decoder for DecodeContext<'doc, 'tcx> { - type Error = as Decoder>::Error; - - decoder_methods! { - read_nil -> (); - - read_u128 -> u128; - read_u64 -> u64; - read_u32 -> u32; - read_u16 -> u16; - read_u8 -> u8; - read_usize -> usize; - - read_i128 -> i128; - read_i64 -> i64; - read_i32 -> i32; - read_i16 -> i16; - read_i8 -> i8; - read_isize -> isize; - - read_bool -> bool; - read_f64 -> f64; - read_f32 -> f32; - read_char -> char; - read_str -> Cow; - } - - fn error(&mut self, err: &str) -> Self::Error { - self.opaque.error(err) - } -} - - impl<'a, 'tcx: 'a> TyDecoder<'a, 'tcx> for DecodeContext<'a, 'tcx> { #[inline] @@ -293,12 +246,6 @@ impl<'a, 'tcx, T> SpecializedDecoder> for DecodeContext<'a, 'tcx> { } } -impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result { - ty_codec::decode_cnum(self) - } -} - impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { fn specialized_decode(&mut self) -> Result { let lo = BytePos::decode(self)?; @@ -359,65 +306,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { } } -// FIXME(#36588) These impls are horribly unsound as they allow -// the caller to pick any lifetime for 'tcx, including 'static, -// by using the unspecialized proxies to them. - -impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_ty(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_predicates(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder<&'tcx Substs<'tcx>> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result<&'tcx Substs<'tcx>, Self::Error> { - ty_codec::decode_substs(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_region(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Slice>> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice>, Self::Error> { - ty_codec::decode_ty_slice(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::AdtDef> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result<&'tcx ty::AdtDef, Self::Error> { - ty_codec::decode_adt_def(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Slice>> - for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) - -> Result<&'tcx ty::Slice>, Self::Error> { - ty_codec::decode_existential_predicate_slice(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result, Self::Error> { - ty_codec::decode_byte_array(self) - } -} - -impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Const<'tcx>> for DecodeContext<'a, 'tcx> { - fn specialized_decode(&mut self) -> Result<&'tcx ty::Const<'tcx>, Self::Error> { - ty_codec::decode_const(self) - } -} +implement_ty_decoder!( DecodeContext<'a, 'tcx> ); impl<'a, 'tcx> MetadataBlob { pub fn is_compatible(&self) -> bool { From cb1ff24425c37835b5d746a22d68047ff2cb807b Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Thu, 16 Nov 2017 17:13:39 +0100 Subject: [PATCH 19/21] incr.comp.: Remove default serialization implementations for things in rustc::hir::def_id so that we get an ICE instead of silently doing the wrong thing. --- src/librustc/hir/def_id.rs | 58 +++--------------------- src/librustc/middle/cstore.rs | 4 +- src/librustc/session/config.rs | 2 +- src/librustc/ty/maps/on_disk_cache.rs | 16 +++++++ src/librustc_incremental/persist/data.rs | 3 +- src/librustc_incremental/persist/save.rs | 6 +-- src/librustc_metadata/decoder.rs | 21 +++++++++ src/librustc_metadata/encoder.rs | 31 ++++++++++++- src/librustc_metadata/index_builder.rs | 2 +- 9 files changed, 81 insertions(+), 62 deletions(-) diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index f34022993de23..05d021ac6d266 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -11,8 +11,7 @@ use ty; use rustc_data_structures::indexed_vec::Idx; -use serialize::{self, Encoder, Decoder, Decodable, Encodable}; - +use serialize; use std::fmt; use std::u32; @@ -65,17 +64,8 @@ impl fmt::Display for CrateNum { } } -impl serialize::UseSpecializedEncodable for CrateNum { - fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_u32(self.0) - } -} - -impl serialize::UseSpecializedDecodable for CrateNum { - fn default_decode(d: &mut D) -> Result { - d.read_u32().map(CrateNum) - } -} +impl serialize::UseSpecializedEncodable for CrateNum {} +impl serialize::UseSpecializedDecodable for CrateNum {} /// A DefIndex is an index into the hir-map for a crate, identifying a /// particular definition. It should really be considered an interned @@ -151,19 +141,8 @@ impl DefIndex { } } -impl serialize::UseSpecializedEncodable for DefIndex { - #[inline] - fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_u32(self.0) - } -} - -impl serialize::UseSpecializedDecodable for DefIndex { - #[inline] - fn default_decode(d: &mut D) -> Result { - d.read_u32().map(DefIndex) - } -} +impl serialize::UseSpecializedEncodable for DefIndex {} +impl serialize::UseSpecializedDecodable for DefIndex {} #[derive(Copy, Clone, Eq, PartialEq, Hash)] pub enum DefIndexAddressSpace { @@ -225,31 +204,8 @@ impl DefId { } } -impl serialize::UseSpecializedEncodable for DefId { - #[inline] - fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { - let DefId { - krate, - index, - } = *self; - - krate.encode(s)?; - index.encode(s) - } -} - -impl serialize::UseSpecializedDecodable for DefId { - #[inline] - fn default_decode(d: &mut D) -> Result { - let krate = CrateNum::decode(d)?; - let index = DefIndex::decode(d)?; - - Ok(DefId { - krate, - index - }) - } -} +impl serialize::UseSpecializedEncodable for DefId {} +impl serialize::UseSpecializedDecodable for DefId {} #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index 628538b41c5d8..5d7141949e389 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -24,7 +24,7 @@ use hir; use hir::def; -use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; +use hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use hir::map as hir_map; use hir::map::definitions::{Definitions, DefKey, DefPathTable}; use hir::svh::Svh; @@ -180,7 +180,7 @@ impl EncodedMetadata { /// upstream crate. #[derive(Debug, RustcEncodable, RustcDecodable, Copy, Clone)] pub struct EncodedMetadataHash { - pub def_index: DefIndex, + pub def_index: u32, pub hash: ich::Fingerprint, } diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 2857d50fc87bc..b7abcc03132cb 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -1042,7 +1042,7 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "enable incremental compilation (experimental)"), incremental_cc: bool = (false, parse_bool, [UNTRACKED], "enable cross-crate incremental compilation (even more experimental)"), - incremental_queries: bool = (false, parse_bool, [UNTRACKED], + incremental_queries: bool = (true, parse_bool, [UNTRACKED], "enable incremental compilation support for queries (experimental)"), incremental_info: bool = (false, parse_bool, [UNTRACKED], "print high-level information about incremental reuse (or the lack thereof)"), diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 8c234e632e9fb..53ca9b3851d5e 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -559,14 +559,25 @@ impl<'enc, 'a, 'tcx, E> CacheEncoder<'enc, 'a, 'tcx, E> impl<'enc, 'a, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { + #[inline] fn position(&self) -> usize { self.encoder.position() } } +impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> + where E: 'enc + ty_codec::TyEncoder +{ + #[inline] + fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> { + self.emit_u32(cnum.as_u32()) + } +} + impl<'enc, 'a, 'tcx, E> SpecializedEncoder> for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { + #[inline] fn specialized_encode(&mut self, ty: &ty::Ty<'tcx>) -> Result<(), Self::Error> { ty_codec::encode_with_shorthand(self, ty, |encoder| &mut encoder.type_shorthands) @@ -577,6 +588,7 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder> for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { + #[inline] fn specialized_encode(&mut self, predicates: &ty::GenericPredicates<'tcx>) -> Result<(), Self::Error> { @@ -588,6 +600,7 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder> impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { + #[inline] fn specialized_encode(&mut self, id: &hir::HirId) -> Result<(), Self::Error> { let hir::HirId { owner, @@ -605,6 +618,7 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { + #[inline] fn specialized_encode(&mut self, id: &DefId) -> Result<(), Self::Error> { let def_path_hash = self.tcx.def_path_hash(*id); def_path_hash.encode(self) @@ -614,6 +628,7 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tc impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { + #[inline] fn specialized_encode(&mut self, id: &LocalDefId) -> Result<(), Self::Error> { id.to_def_id().encode(self) } @@ -632,6 +647,7 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, impl<'enc, 'a, 'tcx, E> SpecializedEncoder for CacheEncoder<'enc, 'a, 'tcx, E> where E: 'enc + ty_codec::TyEncoder { + #[inline] fn specialized_encode(&mut self, node_id: &NodeId) -> Result<(), Self::Error> { let hir_id = self.tcx.hir.node_to_hir_id(*node_id); hir_id.encode(self) diff --git a/src/librustc_incremental/persist/data.rs b/src/librustc_incremental/persist/data.rs index fc417851b8897..08f9dba2ba162 100644 --- a/src/librustc_incremental/persist/data.rs +++ b/src/librustc_incremental/persist/data.rs @@ -11,7 +11,6 @@ //! The data that we will serialize and deserialize. use rustc::dep_graph::{WorkProduct, WorkProductId}; -use rustc::hir::def_id::DefIndex; use rustc::hir::map::DefPathHash; use rustc::middle::cstore::EncodedMetadataHash; use rustc_data_structures::fx::FxHashMap; @@ -58,5 +57,5 @@ pub struct SerializedMetadataHashes { /// is only populated if -Z query-dep-graph is specified. It will be /// empty otherwise. Importing crates are perfectly happy with just having /// the DefIndex. - pub index_map: FxHashMap + pub index_map: FxHashMap } diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs index 63038f1b93a3c..b6dabf99be7d7 100644 --- a/src/librustc_incremental/persist/save.rs +++ b/src/librustc_incremental/persist/save.rs @@ -9,7 +9,7 @@ // except according to those terms. use rustc::dep_graph::{DepGraph, DepKind}; -use rustc::hir::def_id::DefId; +use rustc::hir::def_id::{DefId, DefIndex}; use rustc::hir::svh::Svh; use rustc::ich::Fingerprint; use rustc::middle::cstore::EncodedMetadataHashes; @@ -270,11 +270,11 @@ fn encode_metadata_hashes(tcx: TyCtxt, if tcx.sess.opts.debugging_opts.query_dep_graph { for serialized_hash in &serialized_hashes.entry_hashes { - let def_id = DefId::local(serialized_hash.def_index); + let def_id = DefId::local(DefIndex::from_u32(serialized_hash.def_index)); // Store entry in the index_map let def_path_hash = tcx.def_path_hash(def_id); - serialized_hashes.index_map.insert(def_id.index, def_path_hash); + serialized_hashes.index_map.insert(def_id.index.as_u32(), def_path_hash); // Record hash in current_metadata_hashes current_metadata_hashes.insert(def_id, serialized_hash.hash); diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index ddc8b6abfacce..0dd1b9e500c08 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -246,6 +246,27 @@ impl<'a, 'tcx, T> SpecializedDecoder> for DecodeContext<'a, 'tcx> { } } + +impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { + #[inline] + fn specialized_decode(&mut self) -> Result { + let krate = CrateNum::decode(self)?; + let index = DefIndex::decode(self)?; + + Ok(DefId { + krate, + index, + }) + } +} + +impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { + #[inline] + fn specialized_decode(&mut self) -> Result { + Ok(DefIndex::from_u32(self.read_u32()?)) + } +} + impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { fn specialized_decode(&mut self) -> Result { let lo = BytePos::decode(self)?; diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index d5eee14bf506b..19d0de7334650 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -116,6 +116,33 @@ impl<'a, 'tcx, T> SpecializedEncoder> for EncodeContext<'a, 'tcx> { } } +impl<'a, 'tcx> SpecializedEncoder for EncodeContext<'a, 'tcx> { + #[inline] + fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> { + self.emit_u32(cnum.as_u32()) + } +} + +impl<'a, 'tcx> SpecializedEncoder for EncodeContext<'a, 'tcx> { + #[inline] + fn specialized_encode(&mut self, def_id: &DefId) -> Result<(), Self::Error> { + let DefId { + krate, + index, + } = *def_id; + + krate.encode(self)?; + index.encode(self) + } +} + +impl<'a, 'tcx> SpecializedEncoder for EncodeContext<'a, 'tcx> { + #[inline] + fn specialized_encode(&mut self, def_index: &DefIndex) -> Result<(), Self::Error> { + self.emit_u32(def_index.as_u32()) + } +} + impl<'a, 'tcx> SpecializedEncoder> for EncodeContext<'a, 'tcx> { fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> { ty_codec::encode_with_shorthand(self, ty, |ecx| &mut ecx.type_shorthands) @@ -213,7 +240,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { if let Some(fingerprint) = fingerprint { this.metadata_hashes.hashes.push(EncodedMetadataHash { - def_index, + def_index: def_index.as_u32(), hash: fingerprint, }) } @@ -395,7 +422,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let total_bytes = self.position(); self.metadata_hashes.hashes.push(EncodedMetadataHash { - def_index: global_metadata_def_index(GlobalMetaDataKind::Krate), + def_index: global_metadata_def_index(GlobalMetaDataKind::Krate).as_u32(), hash: Fingerprint::from_smaller_hash(link_meta.crate_hash.as_u64()) }); diff --git a/src/librustc_metadata/index_builder.rs b/src/librustc_metadata/index_builder.rs index 1d2b6cc33d46a..46706bba96d6d 100644 --- a/src/librustc_metadata/index_builder.rs +++ b/src/librustc_metadata/index_builder.rs @@ -136,7 +136,7 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { let (fingerprint, ecx) = entry_builder.finish(); if let Some(hash) = fingerprint { ecx.metadata_hashes.hashes.push(EncodedMetadataHash { - def_index: id.index, + def_index: id.index.as_u32(), hash, }); } From 4c4f7a3189c837805e203decacb2f43d0866aac6 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Thu, 16 Nov 2017 17:42:39 +0100 Subject: [PATCH 20/21] Fix some tidy errors in ty::codec. --- src/librustc/ty/codec.rs | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/src/librustc/ty/codec.rs b/src/librustc/ty/codec.rs index 4739ea34e6fc1..fbb14f39ade34 100644 --- a/src/librustc/ty/codec.rs +++ b/src/librustc/ty/codec.rs @@ -318,13 +318,15 @@ macro_rules! implement_ty_decoder { // the caller to pick any lifetime for 'tcx, including 'static, // by using the unspecialized proxies to them. - impl<$($typaram),*> SpecializedDecoder for $DecoderName<$($typaram),*> { + impl<$($typaram),*> SpecializedDecoder + for $DecoderName<$($typaram),*> { fn specialized_decode(&mut self) -> Result { decode_cnum(self) } } - impl<$($typaram),*> SpecializedDecoder> for $DecoderName<$($typaram),*> { + impl<$($typaram),*> SpecializedDecoder> + for $DecoderName<$($typaram),*> { fn specialized_decode(&mut self) -> Result, Self::Error> { decode_ty(self) } @@ -332,18 +334,21 @@ macro_rules! implement_ty_decoder { impl<$($typaram),*> SpecializedDecoder> for $DecoderName<$($typaram),*> { - fn specialized_decode(&mut self) -> Result, Self::Error> { + fn specialized_decode(&mut self) + -> Result, Self::Error> { decode_predicates(self) } } - impl<$($typaram),*> SpecializedDecoder<&'tcx Substs<'tcx>> for $DecoderName<$($typaram),*> { + impl<$($typaram),*> SpecializedDecoder<&'tcx Substs<'tcx>> + for $DecoderName<$($typaram),*> { fn specialized_decode(&mut self) -> Result<&'tcx Substs<'tcx>, Self::Error> { decode_substs(self) } } - impl<$($typaram),*> SpecializedDecoder> for $DecoderName<$($typaram),*> { + impl<$($typaram),*> SpecializedDecoder> + for $DecoderName<$($typaram),*> { fn specialized_decode(&mut self) -> Result, Self::Error> { decode_region(self) } @@ -351,12 +356,14 @@ macro_rules! implement_ty_decoder { impl<$($typaram),*> SpecializedDecoder<&'tcx ty::Slice>> for $DecoderName<$($typaram),*> { - fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice>, Self::Error> { + fn specialized_decode(&mut self) + -> Result<&'tcx ty::Slice>, Self::Error> { decode_ty_slice(self) } } - impl<$($typaram),*> SpecializedDecoder<&'tcx ty::AdtDef> for $DecoderName<$($typaram),*> { + impl<$($typaram),*> SpecializedDecoder<&'tcx ty::AdtDef> + for $DecoderName<$($typaram),*> { fn specialized_decode(&mut self) -> Result<&'tcx ty::AdtDef, Self::Error> { decode_adt_def(self) } @@ -370,7 +377,8 @@ macro_rules! implement_ty_decoder { } } - impl<$($typaram),*> SpecializedDecoder> for $DecoderName<$($typaram),*> { + impl<$($typaram),*> SpecializedDecoder> + for $DecoderName<$($typaram),*> { fn specialized_decode(&mut self) -> Result, Self::Error> { decode_byte_array(self) } From 0a1f6dd8a8c27caf913a16eb0a1afeaa8183b983 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Thu, 16 Nov 2017 18:07:49 +0100 Subject: [PATCH 21/21] Add doc comment for LocalDefId. --- src/librustc/hir/def_id.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index 05d021ac6d266..f6fcff37ca53e 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -198,7 +198,7 @@ impl DefId { self.krate == LOCAL_CRATE } - #[inline] + #[inline] pub fn to_local(self) -> LocalDefId { LocalDefId::from_def_id(self) } @@ -207,7 +207,12 @@ impl DefId { impl serialize::UseSpecializedEncodable for DefId {} impl serialize::UseSpecializedDecodable for DefId {} - +/// A LocalDefId is equivalent to a DefId with `krate == LOCAL_CRATE`. Since +/// we encode this information in the type, we can ensure at compile time that +/// no DefIds from upstream crates get thrown into the mix. There are quite a +/// few cases where we know that only DefIds from the local crate are expected +/// and a DefId from a different crate would signify a bug somewhere. This +/// is when LocalDefId comes in handy. #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct LocalDefId(DefIndex);