Skip to content

Commit ef45597

Browse files
authored
Unrolled build for #132801
Rollup merge of #132801 - RalfJung:alloc-mutability, r=oli-obk interpret: get_alloc_info: also return mutability This will be needed for rust-lang/miri#3971 This then tuned into a larger refactor where we introduce a new type for the `get_alloc_info` return data, and we move some code to methods on `GlobalAlloc` to avoid duplicating it between the validity check and `get_alloc_info`.
2 parents 7660aed + 4a54ec8 commit ef45597

File tree

11 files changed

+190
-166
lines changed

11 files changed

+190
-166
lines changed

Diff for: compiler/rustc_const_eval/src/const_eval/eval_queries.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -472,8 +472,9 @@ fn report_validation_error<'tcx>(
472472
backtrace.print_backtrace();
473473

474474
let bytes = ecx.print_alloc_bytes_for_diagnostics(alloc_id);
475-
let (size, align, _) = ecx.get_alloc_info(alloc_id);
476-
let raw_bytes = errors::RawBytesNote { size: size.bytes(), align: align.bytes(), bytes };
475+
let info = ecx.get_alloc_info(alloc_id);
476+
let raw_bytes =
477+
errors::RawBytesNote { size: info.size.bytes(), align: info.align.bytes(), bytes };
477478

478479
crate::const_eval::report(
479480
*ecx.tcx,

Diff for: compiler/rustc_const_eval/src/interpret/memory.rs

+57-75
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,9 @@ use std::{fmt, mem, ptr};
1414
use rustc_abi::{Align, HasDataLayout, Size};
1515
use rustc_ast::Mutability;
1616
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
17-
use rustc_hir::def::DefKind;
1817
use rustc_middle::bug;
1918
use rustc_middle::mir::display_allocation;
20-
use rustc_middle::ty::{self, Instance, ParamEnv, Ty, TyCtxt};
19+
use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
2120
use tracing::{debug, instrument, trace};
2221

2322
use super::{
@@ -72,6 +71,21 @@ pub enum AllocKind {
7271
Dead,
7372
}
7473

74+
/// Metadata about an `AllocId`.
75+
#[derive(Copy, Clone, PartialEq, Debug)]
76+
pub struct AllocInfo {
77+
pub size: Size,
78+
pub align: Align,
79+
pub kind: AllocKind,
80+
pub mutbl: Mutability,
81+
}
82+
83+
impl AllocInfo {
84+
fn new(size: Size, align: Align, kind: AllocKind, mutbl: Mutability) -> Self {
85+
Self { size, align, kind, mutbl }
86+
}
87+
}
88+
7589
/// The value of a function pointer.
7690
#[derive(Debug, Copy, Clone)]
7791
pub enum FnVal<'tcx, Other> {
@@ -524,17 +538,22 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
524538
match self.ptr_try_get_alloc_id(ptr, 0) {
525539
Err(addr) => is_offset_misaligned(addr, align),
526540
Ok((alloc_id, offset, _prov)) => {
527-
let (_size, alloc_align, kind) = self.get_alloc_info(alloc_id);
528-
if let Some(misalign) =
529-
M::alignment_check(self, alloc_id, alloc_align, kind, offset, align)
530-
{
541+
let alloc_info = self.get_alloc_info(alloc_id);
542+
if let Some(misalign) = M::alignment_check(
543+
self,
544+
alloc_id,
545+
alloc_info.align,
546+
alloc_info.kind,
547+
offset,
548+
align,
549+
) {
531550
Some(misalign)
532551
} else if M::Provenance::OFFSET_IS_ADDR {
533552
is_offset_misaligned(ptr.addr().bytes(), align)
534553
} else {
535554
// Check allocation alignment and offset alignment.
536-
if alloc_align.bytes() < align.bytes() {
537-
Some(Misalignment { has: alloc_align, required: align })
555+
if alloc_info.align.bytes() < align.bytes() {
556+
Some(Misalignment { has: alloc_info.align, required: align })
538557
} else {
539558
is_offset_misaligned(offset.bytes(), align)
540559
}
@@ -818,82 +837,45 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
818837

819838
/// Obtain the size and alignment of an allocation, even if that allocation has
820839
/// been deallocated.
821-
pub fn get_alloc_info(&self, id: AllocId) -> (Size, Align, AllocKind) {
840+
pub fn get_alloc_info(&self, id: AllocId) -> AllocInfo {
822841
// # Regular allocations
823842
// Don't use `self.get_raw` here as that will
824843
// a) cause cycles in case `id` refers to a static
825844
// b) duplicate a global's allocation in miri
826845
if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
827-
return (alloc.size(), alloc.align, AllocKind::LiveData);
846+
return AllocInfo::new(
847+
alloc.size(),
848+
alloc.align,
849+
AllocKind::LiveData,
850+
alloc.mutability,
851+
);
828852
}
829853

830854
// # Function pointers
831855
// (both global from `alloc_map` and local from `extra_fn_ptr_map`)
832856
if self.get_fn_alloc(id).is_some() {
833-
return (Size::ZERO, Align::ONE, AllocKind::Function);
857+
return AllocInfo::new(Size::ZERO, Align::ONE, AllocKind::Function, Mutability::Not);
834858
}
835859

836-
// # Statics
837-
// Can't do this in the match argument, we may get cycle errors since the lock would
838-
// be held throughout the match.
839-
match self.tcx.try_get_global_alloc(id) {
840-
Some(GlobalAlloc::Static(def_id)) => {
841-
// Thread-local statics do not have a constant address. They *must* be accessed via
842-
// `ThreadLocalRef`; we can never have a pointer to them as a regular constant value.
843-
assert!(!self.tcx.is_thread_local_static(def_id));
844-
845-
let DefKind::Static { nested, .. } = self.tcx.def_kind(def_id) else {
846-
bug!("GlobalAlloc::Static is not a static")
847-
};
848-
849-
let (size, align) = if nested {
850-
// Nested anonymous statics are untyped, so let's get their
851-
// size and alignment from the allocation itself. This always
852-
// succeeds, as the query is fed at DefId creation time, so no
853-
// evaluation actually occurs.
854-
let alloc = self.tcx.eval_static_initializer(def_id).unwrap();
855-
(alloc.0.size(), alloc.0.align)
856-
} else {
857-
// Use size and align of the type for everything else. We need
858-
// to do that to
859-
// * avoid cycle errors in case of self-referential statics,
860-
// * be able to get information on extern statics.
861-
let ty = self
862-
.tcx
863-
.type_of(def_id)
864-
.no_bound_vars()
865-
.expect("statics should not have generic parameters");
866-
let layout = self.tcx.layout_of(ParamEnv::empty().and(ty)).unwrap();
867-
assert!(layout.is_sized());
868-
(layout.size, layout.align.abi)
869-
};
870-
(size, align, AllocKind::LiveData)
871-
}
872-
Some(GlobalAlloc::Memory(alloc)) => {
873-
// Need to duplicate the logic here, because the global allocations have
874-
// different associated types than the interpreter-local ones.
875-
let alloc = alloc.inner();
876-
(alloc.size(), alloc.align, AllocKind::LiveData)
877-
}
878-
Some(GlobalAlloc::Function { .. }) => {
879-
bug!("We already checked function pointers above")
880-
}
881-
Some(GlobalAlloc::VTable(..)) => {
882-
// No data to be accessed here. But vtables are pointer-aligned.
883-
return (Size::ZERO, self.tcx.data_layout.pointer_align.abi, AllocKind::VTable);
884-
}
885-
// The rest must be dead.
886-
None => {
887-
// Deallocated pointers are allowed, we should be able to find
888-
// them in the map.
889-
let (size, align) = *self
890-
.memory
891-
.dead_alloc_map
892-
.get(&id)
893-
.expect("deallocated pointers should all be recorded in `dead_alloc_map`");
894-
(size, align, AllocKind::Dead)
895-
}
860+
// # Global allocations
861+
if let Some(global_alloc) = self.tcx.try_get_global_alloc(id) {
862+
let (size, align) = global_alloc.size_and_align(*self.tcx, self.param_env);
863+
let mutbl = global_alloc.mutability(*self.tcx, self.param_env);
864+
let kind = match global_alloc {
865+
GlobalAlloc::Static { .. } | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
866+
GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
867+
GlobalAlloc::VTable { .. } => AllocKind::VTable,
868+
};
869+
return AllocInfo::new(size, align, kind, mutbl);
896870
}
871+
872+
// # Dead pointers
873+
let (size, align) = *self
874+
.memory
875+
.dead_alloc_map
876+
.get(&id)
877+
.expect("deallocated pointers should all be recorded in `dead_alloc_map`");
878+
AllocInfo::new(size, align, AllocKind::Dead, Mutability::Not)
897879
}
898880

899881
/// Obtain the size and alignment of a *live* allocation.
@@ -902,11 +884,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
902884
id: AllocId,
903885
msg: CheckInAllocMsg,
904886
) -> InterpResult<'tcx, (Size, Align)> {
905-
let (size, align, kind) = self.get_alloc_info(id);
906-
if matches!(kind, AllocKind::Dead) {
887+
let info = self.get_alloc_info(id);
888+
if matches!(info.kind, AllocKind::Dead) {
907889
throw_ub!(PointerUseAfterFree(id, msg))
908890
}
909-
interp_ok((size, align))
891+
interp_ok((info.size, info.align))
910892
}
911893

912894
fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
@@ -1458,7 +1440,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
14581440
let ptr = scalar.to_pointer(self)?;
14591441
match self.ptr_try_get_alloc_id(ptr, 0) {
14601442
Ok((alloc_id, offset, _)) => {
1461-
let (size, _align, _kind) = self.get_alloc_info(alloc_id);
1443+
let size = self.get_alloc_info(alloc_id).size;
14621444
// If the pointer is out-of-bounds, it may be null.
14631445
// Note that one-past-the-end (offset == size) is still inbounds, and never null.
14641446
offset > size

Diff for: compiler/rustc_const_eval/src/interpret/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ pub use self::intern::{
3131
};
3232
pub(crate) use self::intrinsics::eval_nullary_intrinsic;
3333
pub use self::machine::{AllocMap, Machine, MayLeak, ReturnAction, compile_time_machine};
34-
pub use self::memory::{AllocKind, AllocRef, AllocRefMut, FnVal, Memory, MemoryKind};
34+
pub use self::memory::{AllocInfo, AllocKind, AllocRef, AllocRefMut, FnVal, Memory, MemoryKind};
3535
use self::operand::Operand;
3636
pub use self::operand::{ImmTy, Immediate, OpTy};
3737
pub use self::place::{MPlaceTy, MemPlaceMeta, PlaceTy, Writeable};

Diff for: compiler/rustc_const_eval/src/interpret/validity.rs

+26-65
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,8 @@ use tracing::trace;
3131

3232
use super::machine::AllocMap;
3333
use super::{
34-
AllocId, AllocKind, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult,
35-
MPlaceTy, Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
34+
AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
35+
Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
3636
format_interp_error,
3737
};
3838

@@ -557,9 +557,20 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
557557
if let Ok((alloc_id, _offset, _prov)) =
558558
self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)
559559
{
560-
if let Some(GlobalAlloc::Static(did)) =
561-
self.ecx.tcx.try_get_global_alloc(alloc_id)
562-
{
560+
// Everything should be already interned.
561+
let Some(global_alloc) = self.ecx.tcx.try_get_global_alloc(alloc_id) else {
562+
assert!(self.ecx.memory.alloc_map.get(alloc_id).is_none());
563+
// We can't have *any* references to non-existing allocations in const-eval
564+
// as the rest of rustc isn't happy with them... so we throw an error, even
565+
// though for zero-sized references this isn't really UB.
566+
// A potential future alternative would be to resurrect this as a zero-sized allocation
567+
// (which codegen will then compile to an aligned dummy pointer anyway).
568+
throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
569+
};
570+
let (size, _align) =
571+
global_alloc.size_and_align(*self.ecx.tcx, self.ecx.param_env);
572+
573+
if let GlobalAlloc::Static(did) = global_alloc {
563574
let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {
564575
bug!()
565576
};
@@ -593,17 +604,6 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
593604
}
594605
}
595606

596-
// Dangling and Mutability check.
597-
let (size, _align, alloc_kind) = self.ecx.get_alloc_info(alloc_id);
598-
if alloc_kind == AllocKind::Dead {
599-
// This can happen for zero-sized references. We can't have *any* references to
600-
// non-existing allocations in const-eval though, interning rejects them all as
601-
// the rest of rustc isn't happy with them... so we throw an error, even though
602-
// this isn't really UB.
603-
// A potential future alternative would be to resurrect this as a zero-sized allocation
604-
// (which codegen will then compile to an aligned dummy pointer anyway).
605-
throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
606-
}
607607
// If this allocation has size zero, there is no actual mutability here.
608608
if size != Size::ZERO {
609609
// Determine whether this pointer expects to be pointing to something mutable.
@@ -618,7 +618,8 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
618618
}
619619
};
620620
// Determine what it actually points to.
621-
let alloc_actual_mutbl = mutability(self.ecx, alloc_id);
621+
let alloc_actual_mutbl =
622+
global_alloc.mutability(*self.ecx.tcx, self.ecx.param_env);
622623
// Mutable pointer to immutable memory is no good.
623624
if ptr_expected_mutbl == Mutability::Mut
624625
&& alloc_actual_mutbl == Mutability::Not
@@ -842,9 +843,16 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
842843
}
843844

844845
fn in_mutable_memory(&self, val: &PlaceTy<'tcx, M::Provenance>) -> bool {
846+
debug_assert!(self.ctfe_mode.is_some());
845847
if let Some(mplace) = val.as_mplace_or_local().left() {
846848
if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
847-
mutability(self.ecx, alloc_id).is_mut()
849+
let tcx = *self.ecx.tcx;
850+
// Everything must be already interned.
851+
let mutbl = tcx.global_alloc(alloc_id).mutability(tcx, self.ecx.param_env);
852+
if let Some((_, alloc)) = self.ecx.memory.alloc_map.get(alloc_id) {
853+
assert_eq!(alloc.mutability, mutbl);
854+
}
855+
mutbl.is_mut()
848856
} else {
849857
// No memory at all.
850858
false
@@ -1016,53 +1024,6 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
10161024
}
10171025
}
10181026

1019-
/// Returns whether the allocation is mutable, and whether it's actually a static.
1020-
/// For "root" statics we look at the type to account for interior
1021-
/// mutability; for nested statics we have no type and directly use the annotated mutability.
1022-
fn mutability<'tcx>(ecx: &InterpCx<'tcx, impl Machine<'tcx>>, alloc_id: AllocId) -> Mutability {
1023-
// Let's see what kind of memory this points to.
1024-
// We're not using `try_global_alloc` since dangling pointers have already been handled.
1025-
match ecx.tcx.global_alloc(alloc_id) {
1026-
GlobalAlloc::Static(did) => {
1027-
let DefKind::Static { safety: _, mutability, nested } = ecx.tcx.def_kind(did) else {
1028-
bug!()
1029-
};
1030-
if nested {
1031-
assert!(
1032-
ecx.memory.alloc_map.get(alloc_id).is_none(),
1033-
"allocations of nested statics are already interned: {alloc_id:?}, {did:?}"
1034-
);
1035-
// Nested statics in a `static` are never interior mutable,
1036-
// so just use the declared mutability.
1037-
mutability
1038-
} else {
1039-
let mutability = match mutability {
1040-
Mutability::Not
1041-
if !ecx
1042-
.tcx
1043-
.type_of(did)
1044-
.no_bound_vars()
1045-
.expect("statics should not have generic parameters")
1046-
.is_freeze(*ecx.tcx, ty::ParamEnv::reveal_all()) =>
1047-
{
1048-
Mutability::Mut
1049-
}
1050-
_ => mutability,
1051-
};
1052-
if let Some((_, alloc)) = ecx.memory.alloc_map.get(alloc_id) {
1053-
assert_eq!(alloc.mutability, mutability);
1054-
}
1055-
mutability
1056-
}
1057-
}
1058-
GlobalAlloc::Memory(alloc) => alloc.inner().mutability,
1059-
GlobalAlloc::Function { .. } | GlobalAlloc::VTable(..) => {
1060-
// These are immutable, we better don't allow mutable pointers here.
1061-
Mutability::Not
1062-
}
1063-
}
1064-
}
1065-
10661027
impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, 'tcx, M> {
10671028
type V = PlaceTy<'tcx, M::Provenance>;
10681029

0 commit comments

Comments
 (0)