Skip to content

Commit

Permalink
Unrolled build for #132801
Browse files Browse the repository at this point in the history
Rollup merge of #132801 - RalfJung:alloc-mutability, r=oli-obk

interpret: get_alloc_info: also return mutability

This will be needed for rust-lang/miri#3971

This then tuned into a larger refactor where we introduce a new type for the `get_alloc_info` return data, and we move some code to methods on `GlobalAlloc` to avoid duplicating it between the validity check and `get_alloc_info`.
  • Loading branch information
rust-timer authored Nov 10, 2024
2 parents 7660aed + 4a54ec8 commit ef45597
Show file tree
Hide file tree
Showing 11 changed files with 190 additions and 166 deletions.
5 changes: 3 additions & 2 deletions compiler/rustc_const_eval/src/const_eval/eval_queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -472,8 +472,9 @@ fn report_validation_error<'tcx>(
backtrace.print_backtrace();

let bytes = ecx.print_alloc_bytes_for_diagnostics(alloc_id);
let (size, align, _) = ecx.get_alloc_info(alloc_id);
let raw_bytes = errors::RawBytesNote { size: size.bytes(), align: align.bytes(), bytes };
let info = ecx.get_alloc_info(alloc_id);
let raw_bytes =
errors::RawBytesNote { size: info.size.bytes(), align: info.align.bytes(), bytes };

crate::const_eval::report(
*ecx.tcx,
Expand Down
132 changes: 57 additions & 75 deletions compiler/rustc_const_eval/src/interpret/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,9 @@ use std::{fmt, mem, ptr};
use rustc_abi::{Align, HasDataLayout, Size};
use rustc_ast::Mutability;
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
use rustc_hir::def::DefKind;
use rustc_middle::bug;
use rustc_middle::mir::display_allocation;
use rustc_middle::ty::{self, Instance, ParamEnv, Ty, TyCtxt};
use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
use tracing::{debug, instrument, trace};

use super::{
Expand Down Expand Up @@ -72,6 +71,21 @@ pub enum AllocKind {
Dead,
}

/// Metadata about an `AllocId`.
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct AllocInfo {
pub size: Size,
pub align: Align,
pub kind: AllocKind,
pub mutbl: Mutability,
}

impl AllocInfo {
fn new(size: Size, align: Align, kind: AllocKind, mutbl: Mutability) -> Self {
Self { size, align, kind, mutbl }
}
}

/// The value of a function pointer.
#[derive(Debug, Copy, Clone)]
pub enum FnVal<'tcx, Other> {
Expand Down Expand Up @@ -524,17 +538,22 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
match self.ptr_try_get_alloc_id(ptr, 0) {
Err(addr) => is_offset_misaligned(addr, align),
Ok((alloc_id, offset, _prov)) => {
let (_size, alloc_align, kind) = self.get_alloc_info(alloc_id);
if let Some(misalign) =
M::alignment_check(self, alloc_id, alloc_align, kind, offset, align)
{
let alloc_info = self.get_alloc_info(alloc_id);
if let Some(misalign) = M::alignment_check(
self,
alloc_id,
alloc_info.align,
alloc_info.kind,
offset,
align,
) {
Some(misalign)
} else if M::Provenance::OFFSET_IS_ADDR {
is_offset_misaligned(ptr.addr().bytes(), align)
} else {
// Check allocation alignment and offset alignment.
if alloc_align.bytes() < align.bytes() {
Some(Misalignment { has: alloc_align, required: align })
if alloc_info.align.bytes() < align.bytes() {
Some(Misalignment { has: alloc_info.align, required: align })
} else {
is_offset_misaligned(offset.bytes(), align)
}
Expand Down Expand Up @@ -818,82 +837,45 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {

/// Obtain the size and alignment of an allocation, even if that allocation has
/// been deallocated.
pub fn get_alloc_info(&self, id: AllocId) -> (Size, Align, AllocKind) {
pub fn get_alloc_info(&self, id: AllocId) -> AllocInfo {
// # Regular allocations
// Don't use `self.get_raw` here as that will
// a) cause cycles in case `id` refers to a static
// b) duplicate a global's allocation in miri
if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
return (alloc.size(), alloc.align, AllocKind::LiveData);
return AllocInfo::new(
alloc.size(),
alloc.align,
AllocKind::LiveData,
alloc.mutability,
);
}

// # Function pointers
// (both global from `alloc_map` and local from `extra_fn_ptr_map`)
if self.get_fn_alloc(id).is_some() {
return (Size::ZERO, Align::ONE, AllocKind::Function);
return AllocInfo::new(Size::ZERO, Align::ONE, AllocKind::Function, Mutability::Not);
}

// # Statics
// Can't do this in the match argument, we may get cycle errors since the lock would
// be held throughout the match.
match self.tcx.try_get_global_alloc(id) {
Some(GlobalAlloc::Static(def_id)) => {
// Thread-local statics do not have a constant address. They *must* be accessed via
// `ThreadLocalRef`; we can never have a pointer to them as a regular constant value.
assert!(!self.tcx.is_thread_local_static(def_id));

let DefKind::Static { nested, .. } = self.tcx.def_kind(def_id) else {
bug!("GlobalAlloc::Static is not a static")
};

let (size, align) = if nested {
// Nested anonymous statics are untyped, so let's get their
// size and alignment from the allocation itself. This always
// succeeds, as the query is fed at DefId creation time, so no
// evaluation actually occurs.
let alloc = self.tcx.eval_static_initializer(def_id).unwrap();
(alloc.0.size(), alloc.0.align)
} else {
// Use size and align of the type for everything else. We need
// to do that to
// * avoid cycle errors in case of self-referential statics,
// * be able to get information on extern statics.
let ty = self
.tcx
.type_of(def_id)
.no_bound_vars()
.expect("statics should not have generic parameters");
let layout = self.tcx.layout_of(ParamEnv::empty().and(ty)).unwrap();
assert!(layout.is_sized());
(layout.size, layout.align.abi)
};
(size, align, AllocKind::LiveData)
}
Some(GlobalAlloc::Memory(alloc)) => {
// Need to duplicate the logic here, because the global allocations have
// different associated types than the interpreter-local ones.
let alloc = alloc.inner();
(alloc.size(), alloc.align, AllocKind::LiveData)
}
Some(GlobalAlloc::Function { .. }) => {
bug!("We already checked function pointers above")
}
Some(GlobalAlloc::VTable(..)) => {
// No data to be accessed here. But vtables are pointer-aligned.
return (Size::ZERO, self.tcx.data_layout.pointer_align.abi, AllocKind::VTable);
}
// The rest must be dead.
None => {
// Deallocated pointers are allowed, we should be able to find
// them in the map.
let (size, align) = *self
.memory
.dead_alloc_map
.get(&id)
.expect("deallocated pointers should all be recorded in `dead_alloc_map`");
(size, align, AllocKind::Dead)
}
// # Global allocations
if let Some(global_alloc) = self.tcx.try_get_global_alloc(id) {
let (size, align) = global_alloc.size_and_align(*self.tcx, self.param_env);
let mutbl = global_alloc.mutability(*self.tcx, self.param_env);
let kind = match global_alloc {
GlobalAlloc::Static { .. } | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
GlobalAlloc::VTable { .. } => AllocKind::VTable,
};
return AllocInfo::new(size, align, kind, mutbl);
}

// # Dead pointers
let (size, align) = *self
.memory
.dead_alloc_map
.get(&id)
.expect("deallocated pointers should all be recorded in `dead_alloc_map`");
AllocInfo::new(size, align, AllocKind::Dead, Mutability::Not)
}

/// Obtain the size and alignment of a *live* allocation.
Expand All @@ -902,11 +884,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
id: AllocId,
msg: CheckInAllocMsg,
) -> InterpResult<'tcx, (Size, Align)> {
let (size, align, kind) = self.get_alloc_info(id);
if matches!(kind, AllocKind::Dead) {
let info = self.get_alloc_info(id);
if matches!(info.kind, AllocKind::Dead) {
throw_ub!(PointerUseAfterFree(id, msg))
}
interp_ok((size, align))
interp_ok((info.size, info.align))
}

fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
Expand Down Expand Up @@ -1458,7 +1440,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
let ptr = scalar.to_pointer(self)?;
match self.ptr_try_get_alloc_id(ptr, 0) {
Ok((alloc_id, offset, _)) => {
let (size, _align, _kind) = self.get_alloc_info(alloc_id);
let size = self.get_alloc_info(alloc_id).size;
// If the pointer is out-of-bounds, it may be null.
// Note that one-past-the-end (offset == size) is still inbounds, and never null.
offset > size
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_const_eval/src/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ pub use self::intern::{
};
pub(crate) use self::intrinsics::eval_nullary_intrinsic;
pub use self::machine::{AllocMap, Machine, MayLeak, ReturnAction, compile_time_machine};
pub use self::memory::{AllocKind, AllocRef, AllocRefMut, FnVal, Memory, MemoryKind};
pub use self::memory::{AllocInfo, AllocKind, AllocRef, AllocRefMut, FnVal, Memory, MemoryKind};
use self::operand::Operand;
pub use self::operand::{ImmTy, Immediate, OpTy};
pub use self::place::{MPlaceTy, MemPlaceMeta, PlaceTy, Writeable};
Expand Down
91 changes: 26 additions & 65 deletions compiler/rustc_const_eval/src/interpret/validity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ use tracing::trace;

use super::machine::AllocMap;
use super::{
AllocId, AllocKind, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult,
MPlaceTy, Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
format_interp_error,
};

Expand Down Expand Up @@ -557,9 +557,20 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
if let Ok((alloc_id, _offset, _prov)) =
self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)
{
if let Some(GlobalAlloc::Static(did)) =
self.ecx.tcx.try_get_global_alloc(alloc_id)
{
// Everything should be already interned.
let Some(global_alloc) = self.ecx.tcx.try_get_global_alloc(alloc_id) else {
assert!(self.ecx.memory.alloc_map.get(alloc_id).is_none());
// We can't have *any* references to non-existing allocations in const-eval
// as the rest of rustc isn't happy with them... so we throw an error, even
// though for zero-sized references this isn't really UB.
// A potential future alternative would be to resurrect this as a zero-sized allocation
// (which codegen will then compile to an aligned dummy pointer anyway).
throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
};
let (size, _align) =
global_alloc.size_and_align(*self.ecx.tcx, self.ecx.param_env);

if let GlobalAlloc::Static(did) = global_alloc {
let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {
bug!()
};
Expand Down Expand Up @@ -593,17 +604,6 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
}
}

// Dangling and Mutability check.
let (size, _align, alloc_kind) = self.ecx.get_alloc_info(alloc_id);
if alloc_kind == AllocKind::Dead {
// This can happen for zero-sized references. We can't have *any* references to
// non-existing allocations in const-eval though, interning rejects them all as
// the rest of rustc isn't happy with them... so we throw an error, even though
// this isn't really UB.
// A potential future alternative would be to resurrect this as a zero-sized allocation
// (which codegen will then compile to an aligned dummy pointer anyway).
throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
}
// If this allocation has size zero, there is no actual mutability here.
if size != Size::ZERO {
// Determine whether this pointer expects to be pointing to something mutable.
Expand All @@ -618,7 +618,8 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
}
};
// Determine what it actually points to.
let alloc_actual_mutbl = mutability(self.ecx, alloc_id);
let alloc_actual_mutbl =
global_alloc.mutability(*self.ecx.tcx, self.ecx.param_env);
// Mutable pointer to immutable memory is no good.
if ptr_expected_mutbl == Mutability::Mut
&& alloc_actual_mutbl == Mutability::Not
Expand Down Expand Up @@ -842,9 +843,16 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
}

fn in_mutable_memory(&self, val: &PlaceTy<'tcx, M::Provenance>) -> bool {
debug_assert!(self.ctfe_mode.is_some());
if let Some(mplace) = val.as_mplace_or_local().left() {
if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
mutability(self.ecx, alloc_id).is_mut()
let tcx = *self.ecx.tcx;
// Everything must be already interned.
let mutbl = tcx.global_alloc(alloc_id).mutability(tcx, self.ecx.param_env);
if let Some((_, alloc)) = self.ecx.memory.alloc_map.get(alloc_id) {
assert_eq!(alloc.mutability, mutbl);
}
mutbl.is_mut()
} else {
// No memory at all.
false
Expand Down Expand Up @@ -1016,53 +1024,6 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
}
}

/// Returns whether the allocation is mutable, and whether it's actually a static.
/// For "root" statics we look at the type to account for interior
/// mutability; for nested statics we have no type and directly use the annotated mutability.
fn mutability<'tcx>(ecx: &InterpCx<'tcx, impl Machine<'tcx>>, alloc_id: AllocId) -> Mutability {
// Let's see what kind of memory this points to.
// We're not using `try_global_alloc` since dangling pointers have already been handled.
match ecx.tcx.global_alloc(alloc_id) {
GlobalAlloc::Static(did) => {
let DefKind::Static { safety: _, mutability, nested } = ecx.tcx.def_kind(did) else {
bug!()
};
if nested {
assert!(
ecx.memory.alloc_map.get(alloc_id).is_none(),
"allocations of nested statics are already interned: {alloc_id:?}, {did:?}"
);
// Nested statics in a `static` are never interior mutable,
// so just use the declared mutability.
mutability
} else {
let mutability = match mutability {
Mutability::Not
if !ecx
.tcx
.type_of(did)
.no_bound_vars()
.expect("statics should not have generic parameters")
.is_freeze(*ecx.tcx, ty::ParamEnv::reveal_all()) =>
{
Mutability::Mut
}
_ => mutability,
};
if let Some((_, alloc)) = ecx.memory.alloc_map.get(alloc_id) {
assert_eq!(alloc.mutability, mutability);
}
mutability
}
}
GlobalAlloc::Memory(alloc) => alloc.inner().mutability,
GlobalAlloc::Function { .. } | GlobalAlloc::VTable(..) => {
// These are immutable, we better don't allow mutable pointers here.
Mutability::Not
}
}
}

impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, 'tcx, M> {
type V = PlaceTy<'tcx, M::Provenance>;

Expand Down
Loading

0 comments on commit ef45597

Please sign in to comment.