Skip to content

Commit abaca54

Browse files
committed
Auto merge of rust-lang#122432 - oli-obk:validate_before_interning, r=<try>
Validate before interning based on rust-lang#122397 fixes rust-lang#122398 r? `@RalfJung` There are more cleanups that can be done afterwards, but I think they may be unnecessary to make this PR useful on its own
2 parents 13abc0a + c99636f commit abaca54

25 files changed

+416
-575
lines changed

compiler/rustc_const_eval/messages.ftl

-1
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,6 @@ const_eval_dangling_int_pointer =
4949
const_eval_dangling_null_pointer =
5050
{$bad_pointer_message}: null pointer is a dangling pointer (it has no provenance)
5151
52-
const_eval_dangling_ptr_in_final = encountered dangling pointer in final value of {const_eval_intern_kind}
5352
const_eval_dead_local =
5453
accessing a dead local variable
5554
const_eval_dealloc_immutable =

compiler/rustc_const_eval/src/const_eval/eval_queries.rs

+7-5
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,12 @@ use super::{CanAccessMutGlobal, CompileTimeEvalContext, CompileTimeInterpreter};
1616
use crate::const_eval::CheckAlignment;
1717
use crate::errors;
1818
use crate::errors::ConstEvalError;
19-
use crate::interpret::eval_nullary_intrinsic;
2019
use crate::interpret::{
2120
create_static_alloc, intern_const_alloc_recursive, CtfeValidationMode, GlobalId, Immediate,
2221
InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking,
2322
StackPopCleanup,
2423
};
24+
use crate::interpret::{eval_nullary_intrinsic, patch_mutability_of_allocs};
2525

2626
// Returns a pointer to where the result lives
2727
#[instrument(level = "trace", skip(ecx, body))]
@@ -81,11 +81,13 @@ fn eval_body_using_ecx<'mir, 'tcx, R: InterpretationResult<'tcx>>(
8181
// The main interpreter loop.
8282
while ecx.step()? {}
8383

84-
// Intern the result
85-
intern_const_alloc_recursive(ecx, intern_kind, &ret)?;
84+
patch_mutability_of_allocs(ecx, intern_kind, &ret)?;
8685

8786
// Since evaluation had no errors, validate the resulting constant.
88-
const_validate_mplace(&ecx, &ret, cid)?;
87+
const_validate_mplace(ecx, &ret, cid)?;
88+
89+
// Intern the result
90+
intern_const_alloc_recursive(ecx, intern_kind, &ret)?;
8991

9092
Ok(R::make_result(ret, ecx))
9193
}
@@ -401,7 +403,7 @@ fn const_validate_mplace<'mir, 'tcx>(
401403
CtfeValidationMode::Const { allow_immutable_unsafe_cell: !inner }
402404
}
403405
};
404-
ecx.const_validate_operand(&mplace.into(), path, &mut ref_tracking, mode)
406+
ecx.const_validate_operand(mplace, path, &mut ref_tracking, mode)
405407
// Instead of just reporting the `InterpError` via the usual machinery, we give a more targetted
406408
// error about the validation failure.
407409
.map_err(|error| report_validation_error(&ecx, error, alloc_id))?;

compiler/rustc_const_eval/src/const_eval/valtrees.rs

+3-1
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,11 @@ use super::machine::CompileTimeEvalContext;
1010
use super::{ValTreeCreationError, ValTreeCreationResult, VALTREE_MAX_NODES};
1111
use crate::const_eval::CanAccessMutGlobal;
1212
use crate::errors::MaxNumNodesInConstErr;
13-
use crate::interpret::MPlaceTy;
1413
use crate::interpret::{
1514
intern_const_alloc_recursive, ImmTy, Immediate, InternKind, MemPlaceMeta, MemoryKind, PlaceTy,
1615
Projectable, Scalar,
1716
};
17+
use crate::interpret::{patch_mutability_of_allocs, MPlaceTy};
1818

1919
#[instrument(skip(ecx), level = "debug")]
2020
fn branches<'tcx>(
@@ -323,6 +323,7 @@ pub fn valtree_to_const_value<'tcx>(
323323

324324
valtree_into_mplace(&mut ecx, &place, valtree);
325325
dump_place(&ecx, &place);
326+
patch_mutability_of_allocs(&mut ecx, InternKind::Constant, &place).unwrap();
326327
intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &place).unwrap();
327328

328329
op_to_const(&ecx, &place.into(), /* for diagnostics */ false)
@@ -359,6 +360,7 @@ fn valtree_to_ref<'tcx>(
359360

360361
valtree_into_mplace(ecx, &pointee_place, valtree);
361362
dump_place(ecx, &pointee_place);
363+
patch_mutability_of_allocs(ecx, InternKind::Constant, &pointee_place).unwrap();
362364
intern_const_alloc_recursive(ecx, InternKind::Constant, &pointee_place).unwrap();
363365

364366
pointee_place.to_ref(&ecx.tcx)

compiler/rustc_const_eval/src/errors.rs

-8
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,6 @@ use rustc_target::abi::{Size, WrappingRange};
1717

1818
use crate::interpret::InternKind;
1919

20-
#[derive(Diagnostic)]
21-
#[diag(const_eval_dangling_ptr_in_final)]
22-
pub(crate) struct DanglingPtrInFinal {
23-
#[primary_span]
24-
pub span: Span,
25-
pub kind: InternKind,
26-
}
27-
2820
#[derive(LintDiagnostic)]
2921
#[diag(const_eval_mutable_ptr_in_final)]
3022
pub(crate) struct MutablePtrInFinal {

compiler/rustc_const_eval/src/interpret/intern.rs

+68-47
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ use rustc_span::sym;
2727

2828
use super::{AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy};
2929
use crate::const_eval;
30-
use crate::errors::{DanglingPtrInFinal, MutablePtrInFinal};
30+
use crate::errors::MutablePtrInFinal;
3131

3232
pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
3333
'mir,
@@ -62,26 +62,13 @@ impl HasStaticRootDefId for const_eval::CompileTimeInterpreter<'_, '_> {
6262
fn intern_shallow<'rt, 'mir, 'tcx, T, M: CompileTimeMachine<'mir, 'tcx, T>>(
6363
ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
6464
alloc_id: AllocId,
65-
mutability: Mutability,
6665
) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, ()> {
6766
trace!("intern_shallow {:?}", alloc_id);
6867
// remove allocation
6968
// FIXME(#120456) - is `swap_remove` correct?
70-
let Some((_kind, mut alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else {
69+
let Some((_kind, alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else {
7170
return Err(());
7271
};
73-
// Set allocation mutability as appropriate. This is used by LLVM to put things into
74-
// read-only memory, and also by Miri when evaluating other globals that
75-
// access this one.
76-
match mutability {
77-
Mutability::Not => {
78-
alloc.mutability = Mutability::Not;
79-
}
80-
Mutability::Mut => {
81-
// This must be already mutable, we won't "un-freeze" allocations ever.
82-
assert_eq!(alloc.mutability, Mutability::Mut);
83-
}
84-
}
8572
// link the alloc id to the actual allocation
8673
let alloc = ecx.tcx.mk_const_alloc(alloc);
8774
if let Some(static_id) = ecx.machine.static_def_id() {
@@ -123,14 +110,9 @@ pub enum InternKind {
123110
Promoted,
124111
}
125112

126-
/// Intern `ret` and everything it references.
127-
///
128-
/// This *cannot raise an interpreter error*. Doing so is left to validation, which
129-
/// tracks where in the value we are and thus can show much better error messages.
130-
///
131-
/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
132-
#[instrument(level = "debug", skip(ecx))]
133-
pub fn intern_const_alloc_recursive<
113+
/// Now that evaluation is finished, and we are not going to modify allocations anymore,
114+
/// recursively mark all allocations as immutable if the item kind calls for it (const/promoted/immut static).
115+
pub fn patch_mutability_of_allocs<
134116
'mir,
135117
'tcx: 'mir,
136118
M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>,
@@ -141,12 +123,12 @@ pub fn intern_const_alloc_recursive<
141123
) -> Result<(), ErrorGuaranteed> {
142124
// We are interning recursively, and for mutability we are distinguishing the "root" allocation
143125
// that we are starting in, and all other allocations that we are encountering recursively.
144-
let (base_mutability, inner_mutability, is_static) = match intern_kind {
126+
let (base_mutability, inner_mutability) = match intern_kind {
145127
InternKind::Constant | InternKind::Promoted => {
146128
// Completely immutable. Interning anything mutably here can only lead to unsoundness,
147129
// since all consts are conceptually independent values but share the same underlying
148130
// memory.
149-
(Mutability::Not, Mutability::Not, false)
131+
(Mutability::Not, Mutability::Not)
150132
}
151133
InternKind::Static(Mutability::Not) => {
152134
(
@@ -159,30 +141,79 @@ pub fn intern_const_alloc_recursive<
159141
// Inner allocations are never mutable. They can only arise via the "tail
160142
// expression" / "outer scope" rule, and we treat them consistently with `const`.
161143
Mutability::Not,
162-
true,
163144
)
164145
}
165146
InternKind::Static(Mutability::Mut) => {
166147
// Just make everything mutable. We accept code like
167148
// `static mut X = &mut [42]`, so even inner allocations need to be mutable.
168-
(Mutability::Mut, Mutability::Mut, true)
149+
(Mutability::Mut, Mutability::Mut)
169150
}
170151
};
152+
let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
153+
let mut todo: Vec<_> = {
154+
let base_alloc = &mut ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap().1;
155+
base_alloc.mutability = base_mutability;
156+
base_alloc.provenance().ptrs().iter().copied().collect()
157+
};
158+
let mut seen = FxHashSet::default();
159+
seen.insert(base_alloc_id);
160+
while let Some((_, prov)) = todo.pop() {
161+
if !seen.insert(prov.alloc_id()) {
162+
// Already processed
163+
continue;
164+
}
165+
let Some((_, alloc)) = &mut ecx.memory.alloc_map.get_mut(&prov.alloc_id()) else {
166+
continue;
167+
};
168+
// We always intern with `inner_mutability`, and furthermore we ensured above that if
169+
// that is "immutable", then there are *no* mutable pointers anywhere in the newly
170+
// interned memory -- justifying that we can indeed intern immutably. However this also
171+
// means we can *not* easily intern immutably here if `prov.immutable()` is true and
172+
// `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
173+
// we'd have to somehow check that they are *all* immutable before deciding that this
174+
// allocation can be made immutable. In the future we could consider analyzing all
175+
// pointers before deciding which allocations can be made immutable; but for now we are
176+
// okay with losing some potential for immutability here. This can anyway only affect
177+
// `static mut`.
178+
alloc.mutability = inner_mutability;
179+
todo.extend(alloc.provenance().ptrs().iter().copied());
180+
}
181+
Ok(())
182+
}
183+
184+
/// Intern `ret` and everything it references.
185+
///
186+
/// This *cannot raise an interpreter error*. Doing so is left to validation, which
187+
/// tracks where in the value we are and thus can show much better error messages.
188+
///
189+
/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
190+
#[instrument(level = "debug", skip(ecx))]
191+
pub fn intern_const_alloc_recursive<
192+
'mir,
193+
'tcx: 'mir,
194+
M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>,
195+
>(
196+
ecx: &mut InterpCx<'mir, 'tcx, M>,
197+
intern_kind: InternKind,
198+
ret: &MPlaceTy<'tcx>,
199+
) -> Result<(), ErrorGuaranteed> {
200+
let (inner_mutability, is_static) = match intern_kind {
201+
InternKind::Constant | InternKind::Promoted => (Mutability::Not, false),
202+
InternKind::Static(mutability) => (mutability, true),
203+
};
171204

172205
// Intern the base allocation, and initialize todo list for recursive interning.
173206
let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
174-
trace!(?base_alloc_id, ?base_mutability);
207+
trace!(?base_alloc_id);
175208
// First we intern the base allocation, as it requires a different mutability.
176209
// This gives us the initial set of nested allocations, which will then all be processed
177210
// recursively in the loop below.
178211
let mut todo: Vec<_> = if is_static {
179212
// Do not steal the root allocation, we need it later to create the return value of `eval_static_initializer`.
180-
// But still change its mutability to match the requested one.
181-
let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
182-
alloc.1.mutability = base_mutability;
213+
let alloc = ecx.memory.alloc_map.get(&base_alloc_id).unwrap();
183214
alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
184215
} else {
185-
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().collect()
216+
intern_shallow(ecx, base_alloc_id).unwrap().collect()
186217
};
187218
// We need to distinguish "has just been interned" from "was already in `tcx`",
188219
// so we track this in a separate set.
@@ -248,19 +279,7 @@ pub fn intern_const_alloc_recursive<
248279
continue;
249280
}
250281
just_interned.insert(alloc_id);
251-
// We always intern with `inner_mutability`, and furthermore we ensured above that if
252-
// that is "immutable", then there are *no* mutable pointers anywhere in the newly
253-
// interned memory -- justifying that we can indeed intern immutably. However this also
254-
// means we can *not* easily intern immutably here if `prov.immutable()` is true and
255-
// `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
256-
// we'd have to somehow check that they are *all* immutable before deciding that this
257-
// allocation can be made immutable. In the future we could consider analyzing all
258-
// pointers before deciding which allocations can be made immutable; but for now we are
259-
// okay with losing some potential for immutability here. This can anyway only affect
260-
// `static mut`.
261-
todo.extend(intern_shallow(ecx, alloc_id, inner_mutability).map_err(|()| {
262-
ecx.tcx.dcx().emit_err(DanglingPtrInFinal { span: ecx.tcx.span, kind: intern_kind })
263-
})?);
282+
todo.extend(intern_shallow(ecx, alloc_id).unwrap());
264283
}
265284
if found_bad_mutable_pointer {
266285
let err_diag = MutablePtrInFinal { span: ecx.tcx.span, kind: intern_kind };
@@ -291,7 +310,8 @@ pub fn intern_const_alloc_for_constprop<
291310
return Ok(());
292311
}
293312
// Move allocation to `tcx`.
294-
for _ in intern_shallow(ecx, alloc_id, Mutability::Not).map_err(|()| err_ub!(DeadLocal))? {
313+
ecx.memory.alloc_map.get_mut(&alloc_id).unwrap().1.mutability = Mutability::Not;
314+
for _ in intern_shallow(ecx, alloc_id).map_err(|()| err_ub!(DeadLocal))? {
295315
// We are not doing recursive interning, so we don't currently support provenance.
296316
// (If this assertion ever triggers, we should just implement a
297317
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
@@ -318,7 +338,8 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
318338
let dest = self.allocate(layout, MemoryKind::Stack)?;
319339
f(self, &dest.clone().into())?;
320340
let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
321-
for prov in intern_shallow(self, alloc_id, Mutability::Not).unwrap() {
341+
self.memory.alloc_map.get_mut(&alloc_id).unwrap().1.mutability = Mutability::Not;
342+
for prov in intern_shallow(self, alloc_id).unwrap() {
322343
// We are not doing recursive interning, so we don't currently support provenance.
323344
// (If this assertion ever triggers, we should just implement a
324345
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.

compiler/rustc_const_eval/src/interpret/mod.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,8 @@ pub use rustc_middle::mir::interpret::*; // have all the `interpret` symbols in
2222

2323
pub use self::eval_context::{format_interp_error, Frame, FrameInfo, InterpCx, StackPopCleanup};
2424
pub use self::intern::{
25-
intern_const_alloc_for_constprop, intern_const_alloc_recursive, HasStaticRootDefId, InternKind,
25+
intern_const_alloc_for_constprop, intern_const_alloc_recursive, patch_mutability_of_allocs,
26+
HasStaticRootDefId, InternKind,
2627
};
2728
pub use self::machine::{compile_time_machine, AllocMap, Machine, MayLeak, StackPopJump};
2829
pub use self::memory::{AllocKind, AllocRef, AllocRefMut, FnVal, Memory, MemoryKind};

0 commit comments

Comments
 (0)