@@ -155,8 +155,8 @@ impl CtfeValidationMode {
155
155
156
156
/// State for tracking recursive validation of references
157
157
pub struct RefTracking < T , PATH = ( ) > {
158
- pub seen : FxHashSet < T > ,
159
- pub todo : Vec < ( T , PATH ) > ,
158
+ seen : FxHashSet < T > ,
159
+ todo : Vec < ( T , PATH ) > ,
160
160
}
161
161
162
162
impl < T : Clone + Eq + Hash + std:: fmt:: Debug , PATH : Default > RefTracking < T , PATH > {
@@ -169,8 +169,11 @@ impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH>
169
169
ref_tracking_for_consts. seen . insert ( op) ;
170
170
ref_tracking_for_consts
171
171
}
172
+ pub fn next ( & mut self ) -> Option < ( T , PATH ) > {
173
+ self . todo . pop ( )
174
+ }
172
175
173
- pub fn track ( & mut self , op : T , path : impl FnOnce ( ) -> PATH ) {
176
+ fn track ( & mut self , op : T , path : impl FnOnce ( ) -> PATH ) {
174
177
if self . seen . insert ( op. clone ( ) ) {
175
178
trace ! ( "Recursing below ptr {:#?}" , op) ;
176
179
let path = path ( ) ;
@@ -435,95 +438,110 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
435
438
if self . ecx . scalar_may_be_null ( Scalar :: from_maybe_pointer ( place. ptr ( ) , self . ecx ) ) ? {
436
439
throw_validation_failure ! ( self . path, NullPtr { ptr_kind } )
437
440
}
438
- // Do not allow pointers to uninhabited types.
441
+ // Do not allow references to uninhabited types.
439
442
if place. layout . abi . is_uninhabited ( ) {
440
443
let ty = place. layout . ty ;
441
444
throw_validation_failure ! ( self . path, PtrToUninhabited { ptr_kind, ty } )
442
445
}
443
446
// Recursive checking
444
447
if let Some ( ref_tracking) = self . ref_tracking . as_deref_mut ( ) {
445
- // Determine whether this pointer expects to be pointing to something mutable.
446
- let ptr_expected_mutbl = match ptr_kind {
447
- PointerKind :: Box => Mutability :: Mut ,
448
- PointerKind :: Ref ( mutbl) => {
449
- // We do not take into account interior mutability here since we cannot know if
450
- // there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
451
- // that in the recursive descent behind this reference (controlled by
452
- // `allow_immutable_unsafe_cell`).
453
- mutbl
454
- }
455
- } ;
456
448
// Proceed recursively even for ZST, no reason to skip them!
457
449
// `!` is a ZST and we want to validate it.
458
- if let Ok ( ( alloc_id , _offset , _prov ) ) = self . ecx . ptr_try_get_alloc_id ( place . ptr ( ) ) {
450
+ if let Some ( ctfe_mode ) = self . ctfe_mode {
459
451
let mut skip_recursive_check = false ;
460
- if let Some ( GlobalAlloc :: Static ( did) ) = self . ecx . tcx . try_get_global_alloc ( alloc_id)
461
- {
462
- let DefKind :: Static { nested, .. } = self . ecx . tcx . def_kind ( did) else { bug ! ( ) } ;
463
- // Special handling for pointers to statics (irrespective of their type).
464
- assert ! ( !self . ecx. tcx. is_thread_local_static( did) ) ;
465
- assert ! ( self . ecx. tcx. is_static( did) ) ;
466
- // Mode-specific checks
467
- match self . ctfe_mode {
468
- Some (
469
- CtfeValidationMode :: Static { .. } | CtfeValidationMode :: Promoted { .. } ,
470
- ) => {
471
- // We skip recursively checking other statics. These statics must be sound by
472
- // themselves, and the only way to get broken statics here is by using
473
- // unsafe code.
474
- // The reasons we don't check other statics is twofold. For one, in all
475
- // sound cases, the static was already validated on its own, and second, we
476
- // trigger cycle errors if we try to compute the value of the other static
477
- // and that static refers back to us (potentially through a promoted).
478
- // This could miss some UB, but that's fine.
479
- // We still walk nested allocations, as they are fundamentally part of this validation run.
480
- // This means we will also recurse into nested statics of *other*
481
- // statics, even though we do not recurse into other statics directly.
482
- // That's somewhat inconsistent but harmless.
483
- skip_recursive_check = !nested;
484
- }
485
- Some ( CtfeValidationMode :: Const { .. } ) => {
486
- // We can't recursively validate `extern static`, so we better reject them.
487
- if self . ecx . tcx . is_foreign_item ( did) {
488
- throw_validation_failure ! ( self . path, ConstRefToExtern ) ;
452
+ // CTFE imposes restrictions on what references can point to.
453
+ if let Ok ( ( alloc_id, _offset, _prov) ) = self . ecx . ptr_try_get_alloc_id ( place. ptr ( ) ) {
454
+ if let Some ( GlobalAlloc :: Static ( did) ) =
455
+ self . ecx . tcx . try_get_global_alloc ( alloc_id)
456
+ {
457
+ let DefKind :: Static { nested, .. } = self . ecx . tcx . def_kind ( did) else {
458
+ bug ! ( )
459
+ } ;
460
+ // Special handling for pointers to statics (irrespective of their type).
461
+ assert ! ( !self . ecx. tcx. is_thread_local_static( did) ) ;
462
+ assert ! ( self . ecx. tcx. is_static( did) ) ;
463
+ // Mode-specific checks
464
+ match ctfe_mode {
465
+ CtfeValidationMode :: Static { .. }
466
+ | CtfeValidationMode :: Promoted { .. } => {
467
+ // We skip recursively checking other statics. These statics must be sound by
468
+ // themselves, and the only way to get broken statics here is by using
469
+ // unsafe code.
470
+ // The reasons we don't check other statics is twofold. For one, in all
471
+ // sound cases, the static was already validated on its own, and second, we
472
+ // trigger cycle errors if we try to compute the value of the other static
473
+ // and that static refers back to us (potentially through a promoted).
474
+ // This could miss some UB, but that's fine.
475
+ // We still walk nested allocations, as they are fundamentally part of this validation run.
476
+ // This means we will also recurse into nested statics of *other*
477
+ // statics, even though we do not recurse into other statics directly.
478
+ // That's somewhat inconsistent but harmless.
479
+ skip_recursive_check = !nested;
480
+ }
481
+ CtfeValidationMode :: Const { .. } => {
482
+ // We can't recursively validate `extern static`, so we better reject them.
483
+ if self . ecx . tcx . is_foreign_item ( did) {
484
+ throw_validation_failure ! ( self . path, ConstRefToExtern ) ;
485
+ }
489
486
}
490
487
}
491
- None => { }
492
488
}
493
- }
494
489
495
- // Dangling and Mutability check.
496
- let ( size, _align, alloc_kind) = self . ecx . get_alloc_info ( alloc_id) ;
497
- if alloc_kind == AllocKind :: Dead {
498
- // This can happen for zero-sized references. We can't have *any* references to non-existing
499
- // allocations though, interning rejects them all as the rest of rustc isn't happy with them...
500
- // so we throw an error, even though this isn't really UB.
501
- // A potential future alternative would be to resurrect this as a zero-sized allocation
502
- // (which codegen will then compile to an aligned dummy pointer anyway).
503
- throw_validation_failure ! ( self . path, DanglingPtrUseAfterFree { ptr_kind } ) ;
504
- }
505
- // If this allocation has size zero, there is no actual mutability here.
506
- if size != Size :: ZERO {
507
- let alloc_actual_mutbl = mutability ( self . ecx , alloc_id) ;
508
- // Mutable pointer to immutable memory is no good.
509
- if ptr_expected_mutbl == Mutability :: Mut
510
- && alloc_actual_mutbl == Mutability :: Not
511
- {
512
- throw_validation_failure ! ( self . path, MutableRefToImmutable ) ;
490
+ // Dangling and Mutability check.
491
+ let ( size, _align, alloc_kind) = self . ecx . get_alloc_info ( alloc_id) ;
492
+ if alloc_kind == AllocKind :: Dead {
493
+ // This can happen for zero-sized references. We can't have *any* references to
494
+ // non-existing allocations in const-eval though, interning rejects them all as
495
+ // the rest of rustc isn't happy with them... so we throw an error, even though
496
+ // this isn't really UB.
497
+ // A potential future alternative would be to resurrect this as a zero-sized allocation
498
+ // (which codegen will then compile to an aligned dummy pointer anyway).
499
+ throw_validation_failure ! ( self . path, DanglingPtrUseAfterFree { ptr_kind } ) ;
513
500
}
514
- // In a const, everything must be completely immutable.
515
- if matches ! ( self . ctfe_mode, Some ( CtfeValidationMode :: Const { .. } ) ) {
501
+ // If this allocation has size zero, there is no actual mutability here.
502
+ if size != Size :: ZERO {
503
+ // Determine whether this pointer expects to be pointing to something mutable.
504
+ let ptr_expected_mutbl = match ptr_kind {
505
+ PointerKind :: Box => Mutability :: Mut ,
506
+ PointerKind :: Ref ( mutbl) => {
507
+ // We do not take into account interior mutability here since we cannot know if
508
+ // there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
509
+ // that in the recursive descent behind this reference (controlled by
510
+ // `allow_immutable_unsafe_cell`).
511
+ mutbl
512
+ }
513
+ } ;
514
+ // Determine what it actually points to.
515
+ let alloc_actual_mutbl = mutability ( self . ecx , alloc_id) ;
516
+ // Mutable pointer to immutable memory is no good.
516
517
if ptr_expected_mutbl == Mutability :: Mut
517
- || alloc_actual_mutbl == Mutability :: Mut
518
+ && alloc_actual_mutbl == Mutability :: Not
518
519
{
519
- throw_validation_failure ! ( self . path, ConstRefToMutable ) ;
520
+ throw_validation_failure ! ( self . path, MutableRefToImmutable ) ;
521
+ }
522
+ // In a const, everything must be completely immutable.
523
+ if matches ! ( self . ctfe_mode, Some ( CtfeValidationMode :: Const { .. } ) ) {
524
+ if ptr_expected_mutbl == Mutability :: Mut
525
+ || alloc_actual_mutbl == Mutability :: Mut
526
+ {
527
+ throw_validation_failure ! ( self . path, ConstRefToMutable ) ;
528
+ }
520
529
}
521
530
}
522
531
}
523
532
// Potentially skip recursive check.
524
533
if skip_recursive_check {
525
534
return Ok ( ( ) ) ;
526
535
}
536
+ } else {
537
+ // This is not CTFE, so it's Miri with recursive checking.
538
+ // FIXME: we do *not* check behind boxes, since creating a new box first creates it uninitialized
539
+ // and then puts the value in there, so briefly we have a box with uninit contents.
540
+ // FIXME: should we also skip `UnsafeCell` behind shared references? Currently that is not
541
+ // needed since validation reads bypass Stacked Borrows and data race checks.
542
+ if matches ! ( ptr_kind, PointerKind :: Box ) {
543
+ return Ok ( ( ) ) ;
544
+ }
527
545
}
528
546
let path = & self . path ;
529
547
ref_tracking. track ( place, || {
@@ -1072,11 +1090,23 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1072
1090
/// `op` is assumed to cover valid memory if it is an indirect operand.
1073
1091
/// It will error if the bits at the destination do not match the ones described by the layout.
1074
1092
#[ inline( always) ]
1075
- pub fn validate_operand ( & self , op : & OpTy < ' tcx , M :: Provenance > ) -> InterpResult < ' tcx > {
1093
+ pub fn validate_operand (
1094
+ & self ,
1095
+ op : & OpTy < ' tcx , M :: Provenance > ,
1096
+ recursive : bool ,
1097
+ ) -> InterpResult < ' tcx > {
1076
1098
// Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's
1077
1099
// still correct to not use `ctfe_mode`: that mode is for validation of the final constant
1078
- // value, it rules out things like `UnsafeCell` in awkward places. It also can make checking
1079
- // recurse through references which, for now, we don't want here, either.
1080
- self . validate_operand_internal ( op, vec ! [ ] , None , None )
1100
+ // value, it rules out things like `UnsafeCell` in awkward places.
1101
+ if !recursive {
1102
+ return self . validate_operand_internal ( op, vec ! [ ] , None , None ) ;
1103
+ }
1104
+ // Do a recursive check.
1105
+ let mut ref_tracking = RefTracking :: empty ( ) ;
1106
+ self . validate_operand_internal ( op, vec ! [ ] , Some ( & mut ref_tracking) , None ) ?;
1107
+ while let Some ( ( mplace, path) ) = ref_tracking. todo . pop ( ) {
1108
+ self . validate_operand_internal ( & mplace. into ( ) , path, Some ( & mut ref_tracking) , None ) ?;
1109
+ }
1110
+ Ok ( ( ) )
1081
1111
}
1082
1112
}
0 commit comments