|
| 1 | +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT |
| 2 | +// file at the top-level directory of this distribution and at |
| 3 | +// http://rust-lang.org/COPYRIGHT. |
| 4 | +// |
| 5 | +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
| 6 | +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
| 7 | +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
| 8 | +// option. This file may not be copied, modified, or distributed |
| 9 | +// except according to those terms. |
| 10 | + |
| 11 | +use hir; |
| 12 | +use hir::def_id::DefId; |
| 13 | +use ich::{self, CachingCodemapView, DefPathHashes}; |
| 14 | +use session::config::DebugInfoLevel::NoDebugInfo; |
| 15 | +use ty; |
| 16 | + |
| 17 | +use std::hash as std_hash; |
| 18 | + |
| 19 | +use syntax::ast; |
| 20 | +use syntax::attr; |
| 21 | +use syntax::ext::hygiene::SyntaxContext; |
| 22 | +use syntax::symbol::Symbol; |
| 23 | +use syntax_pos::Span; |
| 24 | + |
| 25 | +use rustc_data_structures::stable_hasher::{HashStable, StableHasher, |
| 26 | + StableHasherResult}; |
| 27 | +use rustc_data_structures::accumulate_vec::AccumulateVec; |
| 28 | + |
| 29 | +/// This is the context state available during incr. comp. hashing. It contains |
| 30 | +/// enough information to transform DefIds and HirIds into stable DefPaths (i.e. |
| 31 | +/// a reference to the TyCtxt) and it holds a few caches for speeding up various |
| 32 | +/// things (e.g. each DefId/DefPath is only hashed once). |
| 33 | +pub struct StableHashingContext<'a, 'tcx: 'a> { |
| 34 | + tcx: ty::TyCtxt<'a, 'tcx, 'tcx>, |
| 35 | + def_path_hashes: DefPathHashes<'a, 'tcx>, |
| 36 | + codemap: CachingCodemapView<'tcx>, |
| 37 | + hash_spans: bool, |
| 38 | + hash_bodies: bool, |
| 39 | + overflow_checks_enabled: bool, |
| 40 | + node_id_hashing_mode: NodeIdHashingMode, |
| 41 | + // A sorted array of symbol keys for fast lookup. |
| 42 | + ignored_attr_names: Vec<Symbol>, |
| 43 | +} |
| 44 | + |
| 45 | +#[derive(PartialEq, Eq, Clone, Copy)] |
| 46 | +pub enum NodeIdHashingMode { |
| 47 | + Ignore, |
| 48 | + HashDefPath, |
| 49 | + HashTraitsInScope, |
| 50 | +} |
| 51 | + |
| 52 | +impl<'a, 'tcx: 'a> StableHashingContext<'a, 'tcx> { |
| 53 | + |
| 54 | + pub fn new(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>) -> Self { |
| 55 | + let hash_spans_initial = tcx.sess.opts.debuginfo != NoDebugInfo; |
| 56 | + let check_overflow_initial = tcx.sess.overflow_checks(); |
| 57 | + |
| 58 | + let mut ignored_attr_names: Vec<_> = ich::IGNORED_ATTRIBUTES |
| 59 | + .iter() |
| 60 | + .map(|&s| Symbol::intern(s)) |
| 61 | + .collect(); |
| 62 | + |
| 63 | + ignored_attr_names.sort(); |
| 64 | + |
| 65 | + StableHashingContext { |
| 66 | + tcx: tcx, |
| 67 | + def_path_hashes: DefPathHashes::new(tcx), |
| 68 | + codemap: CachingCodemapView::new(tcx), |
| 69 | + hash_spans: hash_spans_initial, |
| 70 | + hash_bodies: true, |
| 71 | + overflow_checks_enabled: check_overflow_initial, |
| 72 | + node_id_hashing_mode: NodeIdHashingMode::HashDefPath, |
| 73 | + ignored_attr_names: ignored_attr_names, |
| 74 | + } |
| 75 | + } |
| 76 | + |
| 77 | + #[inline] |
| 78 | + pub fn while_hashing_hir_bodies<F: FnOnce(&mut Self)>(&mut self, |
| 79 | + hash_bodies: bool, |
| 80 | + f: F) { |
| 81 | + let prev_hash_bodies = self.hash_bodies; |
| 82 | + self.hash_bodies = hash_bodies; |
| 83 | + f(self); |
| 84 | + self.hash_bodies = prev_hash_bodies; |
| 85 | + } |
| 86 | + |
| 87 | + #[inline] |
| 88 | + pub fn while_hashing_spans<F: FnOnce(&mut Self)>(&mut self, |
| 89 | + hash_spans: bool, |
| 90 | + f: F) { |
| 91 | + let prev_hash_spans = self.hash_spans; |
| 92 | + self.hash_spans = hash_spans; |
| 93 | + f(self); |
| 94 | + self.hash_spans = prev_hash_spans; |
| 95 | + } |
| 96 | + |
| 97 | + #[inline] |
| 98 | + pub fn with_node_id_hashing_mode<F: FnOnce(&mut Self)>(&mut self, |
| 99 | + mode: NodeIdHashingMode, |
| 100 | + f: F) { |
| 101 | + let prev = self.node_id_hashing_mode; |
| 102 | + self.node_id_hashing_mode = mode; |
| 103 | + f(self); |
| 104 | + self.node_id_hashing_mode = prev; |
| 105 | + } |
| 106 | + |
| 107 | + #[inline] |
| 108 | + pub fn tcx(&self) -> ty::TyCtxt<'a, 'tcx, 'tcx> { |
| 109 | + self.tcx |
| 110 | + } |
| 111 | + |
| 112 | + #[inline] |
| 113 | + pub fn def_path_hash(&mut self, def_id: DefId) -> u64 { |
| 114 | + self.def_path_hashes.hash(def_id) |
| 115 | + } |
| 116 | + |
| 117 | + #[inline] |
| 118 | + pub fn hash_spans(&self) -> bool { |
| 119 | + self.hash_spans |
| 120 | + } |
| 121 | + |
| 122 | + #[inline] |
| 123 | + pub fn hash_bodies(&self) -> bool { |
| 124 | + self.hash_bodies |
| 125 | + } |
| 126 | + |
| 127 | + #[inline] |
| 128 | + pub fn codemap(&mut self) -> &mut CachingCodemapView<'tcx> { |
| 129 | + &mut self.codemap |
| 130 | + } |
| 131 | + |
| 132 | + #[inline] |
| 133 | + pub fn is_ignored_attr(&self, name: Symbol) -> bool { |
| 134 | + self.ignored_attr_names.binary_search(&name).is_ok() |
| 135 | + } |
| 136 | + |
| 137 | + pub fn hash_hir_item_like<F: FnOnce(&mut Self)>(&mut self, |
| 138 | + item_attrs: &[ast::Attribute], |
| 139 | + f: F) { |
| 140 | + let prev_overflow_checks = self.overflow_checks_enabled; |
| 141 | + if attr::contains_name(item_attrs, "rustc_inherit_overflow_checks") { |
| 142 | + self.overflow_checks_enabled = true; |
| 143 | + } |
| 144 | + let prev_hash_node_ids = self.node_id_hashing_mode; |
| 145 | + self.node_id_hashing_mode = NodeIdHashingMode::Ignore; |
| 146 | + |
| 147 | + f(self); |
| 148 | + |
| 149 | + self.node_id_hashing_mode = prev_hash_node_ids; |
| 150 | + self.overflow_checks_enabled = prev_overflow_checks; |
| 151 | + } |
| 152 | + |
| 153 | + #[inline] |
| 154 | + pub fn binop_can_panic_at_runtime(&self, binop: hir::BinOp_) -> bool |
| 155 | + { |
| 156 | + match binop { |
| 157 | + hir::BiAdd | |
| 158 | + hir::BiSub | |
| 159 | + hir::BiMul => self.overflow_checks_enabled, |
| 160 | + |
| 161 | + hir::BiDiv | |
| 162 | + hir::BiRem => true, |
| 163 | + |
| 164 | + hir::BiAnd | |
| 165 | + hir::BiOr | |
| 166 | + hir::BiBitXor | |
| 167 | + hir::BiBitAnd | |
| 168 | + hir::BiBitOr | |
| 169 | + hir::BiShl | |
| 170 | + hir::BiShr | |
| 171 | + hir::BiEq | |
| 172 | + hir::BiLt | |
| 173 | + hir::BiLe | |
| 174 | + hir::BiNe | |
| 175 | + hir::BiGe | |
| 176 | + hir::BiGt => false |
| 177 | + } |
| 178 | + } |
| 179 | + |
| 180 | + #[inline] |
| 181 | + pub fn unop_can_panic_at_runtime(&self, unop: hir::UnOp) -> bool |
| 182 | + { |
| 183 | + match unop { |
| 184 | + hir::UnDeref | |
| 185 | + hir::UnNot => false, |
| 186 | + hir::UnNeg => self.overflow_checks_enabled, |
| 187 | + } |
| 188 | + } |
| 189 | +} |
| 190 | + |
| 191 | + |
| 192 | +impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for ast::NodeId { |
| 193 | + fn hash_stable<W: StableHasherResult>(&self, |
| 194 | + hcx: &mut StableHashingContext<'a, 'tcx>, |
| 195 | + hasher: &mut StableHasher<W>) { |
| 196 | + match hcx.node_id_hashing_mode { |
| 197 | + NodeIdHashingMode::Ignore => { |
| 198 | + // Most NodeIds in the HIR can be ignored, but if there is a |
| 199 | + // corresponding entry in the `trait_map` we need to hash that. |
| 200 | + // Make sure we don't ignore too much by checking that there is |
| 201 | + // no entry in a debug_assert!(). |
| 202 | + debug_assert!(hcx.tcx.trait_map.get(self).is_none()); |
| 203 | + } |
| 204 | + NodeIdHashingMode::HashDefPath => { |
| 205 | + hcx.tcx.hir.definitions().node_to_hir_id(*self).hash_stable(hcx, hasher); |
| 206 | + } |
| 207 | + NodeIdHashingMode::HashTraitsInScope => { |
| 208 | + if let Some(traits) = hcx.tcx.trait_map.get(self) { |
| 209 | + // The ordering of the candidates is not fixed. So we hash |
| 210 | + // the def-ids and then sort them and hash the collection. |
| 211 | + let mut candidates: AccumulateVec<[_; 8]> = |
| 212 | + traits.iter() |
| 213 | + .map(|&hir::TraitCandidate { def_id, import_id: _ }| { |
| 214 | + hcx.def_path_hash(def_id) |
| 215 | + }) |
| 216 | + .collect(); |
| 217 | + if traits.len() > 1 { |
| 218 | + candidates.sort(); |
| 219 | + } |
| 220 | + candidates.hash_stable(hcx, hasher); |
| 221 | + } |
| 222 | + } |
| 223 | + } |
| 224 | + } |
| 225 | +} |
| 226 | + |
| 227 | +impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for Span { |
| 228 | + |
| 229 | + // Hash a span in a stable way. We can't directly hash the span's BytePos |
| 230 | + // fields (that would be similar to hashing pointers, since those are just |
| 231 | + // offsets into the CodeMap). Instead, we hash the (file name, line, column) |
| 232 | + // triple, which stays the same even if the containing FileMap has moved |
| 233 | + // within the CodeMap. |
| 234 | + // Also note that we are hashing byte offsets for the column, not unicode |
| 235 | + // codepoint offsets. For the purpose of the hash that's sufficient. |
| 236 | + // Also, hashing filenames is expensive so we avoid doing it twice when the |
| 237 | + // span starts and ends in the same file, which is almost always the case. |
| 238 | + fn hash_stable<W: StableHasherResult>(&self, |
| 239 | + hcx: &mut StableHashingContext<'a, 'tcx>, |
| 240 | + hasher: &mut StableHasher<W>) { |
| 241 | + use syntax_pos::Pos; |
| 242 | + |
| 243 | + if !hcx.hash_spans { |
| 244 | + return |
| 245 | + } |
| 246 | + |
| 247 | + // If this is not an empty or invalid span, we want to hash the last |
| 248 | + // position that belongs to it, as opposed to hashing the first |
| 249 | + // position past it. |
| 250 | + let span_hi = if self.hi > self.lo { |
| 251 | + // We might end up in the middle of a multibyte character here, |
| 252 | + // but that's OK, since we are not trying to decode anything at |
| 253 | + // this position. |
| 254 | + self.hi - ::syntax_pos::BytePos(1) |
| 255 | + } else { |
| 256 | + self.hi |
| 257 | + }; |
| 258 | + |
| 259 | + { |
| 260 | + let loc1 = hcx.codemap().byte_pos_to_line_and_col(self.lo); |
| 261 | + let loc1 = loc1.as_ref() |
| 262 | + .map(|&(ref fm, line, col)| (&fm.name[..], line, col.to_usize())) |
| 263 | + .unwrap_or(("???", 0, 0)); |
| 264 | + |
| 265 | + let loc2 = hcx.codemap().byte_pos_to_line_and_col(span_hi); |
| 266 | + let loc2 = loc2.as_ref() |
| 267 | + .map(|&(ref fm, line, col)| (&fm.name[..], line, col.to_usize())) |
| 268 | + .unwrap_or(("???", 0, 0)); |
| 269 | + |
| 270 | + if loc1.0 == loc2.0 { |
| 271 | + std_hash::Hash::hash(&0u8, hasher); |
| 272 | + |
| 273 | + std_hash::Hash::hash(loc1.0, hasher); |
| 274 | + std_hash::Hash::hash(&loc1.1, hasher); |
| 275 | + std_hash::Hash::hash(&loc1.2, hasher); |
| 276 | + |
| 277 | + // Do not hash the file name twice |
| 278 | + std_hash::Hash::hash(&loc2.1, hasher); |
| 279 | + std_hash::Hash::hash(&loc2.2, hasher); |
| 280 | + } else { |
| 281 | + std_hash::Hash::hash(&1u8, hasher); |
| 282 | + |
| 283 | + std_hash::Hash::hash(loc1.0, hasher); |
| 284 | + std_hash::Hash::hash(&loc1.1, hasher); |
| 285 | + std_hash::Hash::hash(&loc1.2, hasher); |
| 286 | + |
| 287 | + std_hash::Hash::hash(loc2.0, hasher); |
| 288 | + std_hash::Hash::hash(&loc2.1, hasher); |
| 289 | + std_hash::Hash::hash(&loc2.2, hasher); |
| 290 | + } |
| 291 | + } |
| 292 | + |
| 293 | + if self.ctxt == SyntaxContext::empty() { |
| 294 | + 0u8.hash_stable(hcx, hasher); |
| 295 | + } else { |
| 296 | + 1u8.hash_stable(hcx, hasher); |
| 297 | + self.source_callsite().hash_stable(hcx, hasher); |
| 298 | + } |
| 299 | + } |
| 300 | +} |
0 commit comments