Skip to content

Commit af25253

Browse files
committed
Auto merge of #123556 - Mark-Simulacrum:drop-unused-sharding, r=Nadrieril
Remove sharding for VecCache This sharding is never used (per the comment in code). If we re-add sharding at some point in the future this is cheap to restore, but for now no need for the extra complexity.
2 parents 087ae97 + 668b318 commit af25253

File tree

1 file changed

+6
-13
lines changed
  • compiler/rustc_query_system/src/query

1 file changed

+6
-13
lines changed

compiler/rustc_query_system/src/query/caches.rs

+6-13
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ where
101101
}
102102

103103
pub struct VecCache<K: Idx, V> {
104-
cache: Sharded<IndexVec<K, Option<(V, DepNodeIndex)>>>,
104+
cache: Lock<IndexVec<K, Option<(V, DepNodeIndex)>>>,
105105
}
106106

107107
impl<K: Idx, V> Default for VecCache<K, V> {
@@ -120,24 +120,20 @@ where
120120

121121
#[inline(always)]
122122
fn lookup(&self, key: &K) -> Option<(V, DepNodeIndex)> {
123-
// FIXME: lock_shard_by_hash will use high bits which are usually zero in the index() passed
124-
// here. This makes sharding essentially useless, always selecting the zero'th shard.
125-
let lock = self.cache.lock_shard_by_hash(key.index() as u64);
123+
let lock = self.cache.lock();
126124
if let Some(Some(value)) = lock.get(*key) { Some(*value) } else { None }
127125
}
128126

129127
#[inline]
130128
fn complete(&self, key: K, value: V, index: DepNodeIndex) {
131-
let mut lock = self.cache.lock_shard_by_hash(key.index() as u64);
129+
let mut lock = self.cache.lock();
132130
lock.insert(key, (value, index));
133131
}
134132

135133
fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
136-
for shard in self.cache.lock_shards() {
137-
for (k, v) in shard.iter_enumerated() {
138-
if let Some(v) = v {
139-
f(&k, &v.0, v.1);
140-
}
134+
for (k, v) in self.cache.lock().iter_enumerated() {
135+
if let Some(v) = v {
136+
f(&k, &v.0, v.1);
141137
}
142138
}
143139
}
@@ -149,9 +145,6 @@ pub struct DefIdCache<V> {
149145
///
150146
/// The second element of the tuple is the set of keys actually present in the IndexVec, used
151147
/// for faster iteration in `iter()`.
152-
// FIXME: This may want to be sharded, like VecCache. However *how* to shard an IndexVec isn't
153-
// super clear; VecCache is effectively not sharded today (see FIXME there). For now just omit
154-
// that complexity here.
155148
local: Lock<(IndexVec<DefIndex, Option<(V, DepNodeIndex)>>, Vec<DefIndex>)>,
156149
foreign: DefaultCache<DefId, V>,
157150
}

0 commit comments

Comments
 (0)