Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

perform less decoding if it has the same syntax context #129827

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions compiler/rustc_metadata/src/rmeta/decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -469,10 +469,10 @@ impl<'a, 'tcx> SpanDecoder for DecodeContext<'a, 'tcx> {

let cname = cdata.root.name();
rustc_span::hygiene::decode_syntax_context(self, &cdata.hygiene_context, |_, id| {
debug!("SpecializedDecoder<SyntaxContext>: decoding {}", id);
debug!("SpecializedDecoder<SyntaxContextKey>: decoding {}", id);
cdata
.root
.syntax_contexts
.syntax_context_keys
.get(cdata, id)
.unwrap_or_else(|| panic!("Missing SyntaxContext {id:?} for crate {cname:?}"))
.decode((cdata, sess))
Expand Down
17 changes: 9 additions & 8 deletions compiler/rustc_metadata/src/rmeta/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -685,7 +685,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
// the incremental cache. If this causes us to deserialize a `Span`, then we may load
// additional `SyntaxContext`s into the global `HygieneData`. Therefore, we need to encode
// the hygiene data last to ensure that we encode any `SyntaxContext`s that might be used.
let (syntax_contexts, expn_data, expn_hashes) = stat!("hygiene", || self.encode_hygiene());
let (syntax_context_keys, expn_data, expn_hashes) =
stat!("hygiene", || self.encode_hygiene());

let def_path_hash_map = stat!("def-path-hash-map", || self.encode_def_path_hash_map());

Expand Down Expand Up @@ -738,7 +739,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
exported_symbols,
interpret_alloc_index,
tables,
syntax_contexts,
syntax_context_keys,
expn_data,
expn_hashes,
def_path_hash_map,
Expand Down Expand Up @@ -1810,15 +1811,15 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
self.lazy_array(foreign_modules.iter().map(|(_, m)| m).cloned())
}

fn encode_hygiene(&mut self) -> (SyntaxContextTable, ExpnDataTable, ExpnHashTable) {
let mut syntax_contexts: TableBuilder<_, _> = Default::default();
fn encode_hygiene(&mut self) -> (SyntaxContextKeyTable, ExpnDataTable, ExpnHashTable) {
let mut syntax_context_keys: TableBuilder<_, _> = Default::default();
let mut expn_data_table: TableBuilder<_, _> = Default::default();
let mut expn_hash_table: TableBuilder<_, _> = Default::default();

self.hygiene_ctxt.encode(
&mut (&mut *self, &mut syntax_contexts, &mut expn_data_table, &mut expn_hash_table),
|(this, syntax_contexts, _, _), index, ctxt_data| {
syntax_contexts.set_some(index, this.lazy(ctxt_data));
&mut (&mut *self, &mut syntax_context_keys, &mut expn_data_table, &mut expn_hash_table),
|(this, syntax_context_keys, _, _), index, ctxt_data| {
syntax_context_keys.set_some(index, this.lazy(ctxt_data));
},
|(this, _, expn_data_table, expn_hash_table), index, expn_data, hash| {
if let Some(index) = index.as_local() {
Expand All @@ -1829,7 +1830,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
);

(
syntax_contexts.encode(&mut self.opaque),
syntax_context_keys.encode(&mut self.opaque),
expn_data_table.encode(&mut self.opaque),
expn_hash_table.encode(&mut self.opaque),
)
Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_metadata/src/rmeta/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ use rustc_serialize::opaque::FileEncoder;
use rustc_session::config::SymbolManglingVersion;
use rustc_session::cstore::{CrateDepKind, ForeignModule, LinkagePreference, NativeLib};
use rustc_span::edition::Edition;
use rustc_span::hygiene::{ExpnIndex, MacroKind, SyntaxContextData};
use rustc_span::hygiene::{ExpnIndex, MacroKind, SyntaxContextKey};
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::{self, ExpnData, ExpnHash, ExpnId, Span};
use rustc_target::abi::{FieldIdx, VariantIdx};
Expand Down Expand Up @@ -193,7 +193,7 @@ enum LazyState {
Previous(NonZero<usize>),
}

type SyntaxContextTable = LazyTable<u32, Option<LazyValue<SyntaxContextData>>>;
type SyntaxContextKeyTable = LazyTable<u32, Option<LazyValue<SyntaxContextKey>>>;
type ExpnDataTable = LazyTable<ExpnIndex, Option<LazyValue<ExpnData>>>;
type ExpnHashTable = LazyTable<ExpnIndex, Option<LazyValue<ExpnHash>>>;

Expand Down Expand Up @@ -276,7 +276,7 @@ pub(crate) struct CrateRoot {

exported_symbols: LazyArray<(ExportedSymbol<'static>, SymbolExportInfo)>,

syntax_contexts: SyntaxContextTable,
syntax_context_keys: SyntaxContextKeyTable,
expn_data: ExpnDataTable,
expn_hashes: ExpnHashTable,

Expand Down
29 changes: 14 additions & 15 deletions compiler/rustc_middle/src/query/on_disk_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ use rustc_serialize::opaque::{FileEncodeResult, FileEncoder, IntEncodedWithFixed
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use rustc_session::Session;
use rustc_span::hygiene::{
ExpnId, HygieneDecodeContext, HygieneEncodeContext, SyntaxContext, SyntaxContextData,
ExpnId, HygieneDecodeContext, HygieneEncodeContext, SyntaxContext, SyntaxContextKey,
};
use rustc_span::source_map::SourceMap;
use rustc_span::{
Expand All @@ -36,7 +36,7 @@ const TAG_FULL_SPAN: u8 = 0;
const TAG_PARTIAL_SPAN: u8 = 1;
const TAG_RELATIVE_SPAN: u8 = 2;

const TAG_SYNTAX_CONTEXT: u8 = 0;
const TAG_SYNTAX_CONTEXT_KEY: u8 = 0;
const TAG_EXPN_DATA: u8 = 1;

// Tags for encoding Symbol's
Expand Down Expand Up @@ -77,7 +77,7 @@ pub struct OnDiskCache<'sess> {
// to represent the fact that we are storing *encoded* ids. When we decode
// a `SyntaxContext`, a new id will be allocated from the global `HygieneData`,
// which will almost certainly be different than the serialized id.
syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
syntax_context_keys: FxHashMap<u32, AbsoluteBytePos>,
// A map from the `DefPathHash` of an `ExpnId` to the position
// of their associated `ExpnData`. Ideally, we would store a `DefId`,
// but we need to decode this before we've constructed a `TyCtxt` (which
Expand Down Expand Up @@ -108,7 +108,7 @@ struct Footer {
// without measurable overhead. This permits larger const allocations without ICEing.
interpret_alloc_index: Vec<u64>,
// See `OnDiskCache.syntax_contexts`
syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
syntax_context_keys: FxHashMap<u32, AbsoluteBytePos>,
// See `OnDiskCache.expn_data`
expn_data: UnhashMap<ExpnHash, AbsoluteBytePos>,
foreign_expn_data: UnhashMap<ExpnHash, u32>,
Expand Down Expand Up @@ -179,7 +179,7 @@ impl<'sess> OnDiskCache<'sess> {
query_result_index: footer.query_result_index.into_iter().collect(),
prev_side_effects_index: footer.side_effects_index.into_iter().collect(),
alloc_decoding_state: AllocDecodingState::new(footer.interpret_alloc_index),
syntax_contexts: footer.syntax_contexts,
syntax_context_keys: footer.syntax_context_keys,
expn_data: footer.expn_data,
foreign_expn_data: footer.foreign_expn_data,
hygiene_context: Default::default(),
Expand All @@ -196,7 +196,7 @@ impl<'sess> OnDiskCache<'sess> {
query_result_index: Default::default(),
prev_side_effects_index: Default::default(),
alloc_decoding_state: AllocDecodingState::new(Vec::new()),
syntax_contexts: FxHashMap::default(),
syntax_context_keys: FxHashMap::default(),
expn_data: UnhashMap::default(),
foreign_expn_data: UnhashMap::default(),
hygiene_context: Default::default(),
Expand Down Expand Up @@ -301,7 +301,7 @@ impl<'sess> OnDiskCache<'sess> {
interpret_alloc_index
};

let mut syntax_contexts = FxHashMap::default();
let mut syntax_context_keys = FxHashMap::default();
let mut expn_data = UnhashMap::default();
let mut foreign_expn_data = UnhashMap::default();

Expand All @@ -312,8 +312,8 @@ impl<'sess> OnDiskCache<'sess> {
&mut encoder,
|encoder, index, ctxt_data| {
let pos = AbsoluteBytePos::new(encoder.position());
encoder.encode_tagged(TAG_SYNTAX_CONTEXT, ctxt_data);
syntax_contexts.insert(index, pos);
encoder.encode_tagged(TAG_SYNTAX_CONTEXT_KEY, &ctxt_data);
syntax_context_keys.insert(index, pos);
},
|encoder, expn_id, data, hash| {
if expn_id.krate == LOCAL_CRATE {
Expand All @@ -335,7 +335,7 @@ impl<'sess> OnDiskCache<'sess> {
query_result_index,
side_effects_index,
interpret_alloc_index,
syntax_contexts,
syntax_context_keys,
expn_data,
foreign_expn_data,
},
Expand Down Expand Up @@ -442,7 +442,7 @@ impl<'sess> OnDiskCache<'sess> {
file_index_to_file: &self.file_index_to_file,
file_index_to_stable_id: &self.file_index_to_stable_id,
alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(),
syntax_contexts: &self.syntax_contexts,
syntax_context_keys: &self.syntax_context_keys,
expn_data: &self.expn_data,
foreign_expn_data: &self.foreign_expn_data,
hygiene_context: &self.hygiene_context,
Expand All @@ -463,7 +463,7 @@ pub struct CacheDecoder<'a, 'tcx> {
file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, EncodedSourceFileId>,
alloc_decoding_session: AllocDecodingSession<'a>,
syntax_contexts: &'a FxHashMap<u32, AbsoluteBytePos>,
syntax_context_keys: &'a FxHashMap<u32, AbsoluteBytePos>,
expn_data: &'a UnhashMap<ExpnHash, AbsoluteBytePos>,
foreign_expn_data: &'a UnhashMap<ExpnHash, u32>,
hygiene_context: &'a HygieneDecodeContext,
Expand Down Expand Up @@ -584,13 +584,12 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Vec<u8> {

impl<'a, 'tcx> SpanDecoder for CacheDecoder<'a, 'tcx> {
fn decode_syntax_context(&mut self) -> SyntaxContext {
let syntax_contexts = self.syntax_contexts;
rustc_span::hygiene::decode_syntax_context(self, self.hygiene_context, |this, id| {
// This closure is invoked if we haven't already decoded the data for the `SyntaxContext` we are deserializing.
// We look up the position of the associated `SyntaxData` and decode it.
let pos = syntax_contexts.get(&id).unwrap();
let pos = this.syntax_context_keys.get(&id).unwrap();
this.with_position(pos.to_usize(), |decoder| {
let data: SyntaxContextData = decode_tagged(decoder, TAG_SYNTAX_CONTEXT);
let data: SyntaxContextKey = decode_tagged(decoder, TAG_SYNTAX_CONTEXT_KEY);
data
})
})
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_middle/src/ty/parameterized.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ trivially_parameterized_over_tcx! {
rustc_span::Span,
rustc_span::Symbol,
rustc_span::def_id::DefPathHash,
rustc_span::hygiene::SyntaxContextKey,
rustc_span::hygiene::SyntaxContextData,
rustc_span::symbol::Ident,
rustc_type_ir::Variance,
Expand Down
Loading
Loading