14
14
15
15
use rustc:: ty:: TyCtxt ;
16
16
use rustc:: mir:: { self , Mir , Location } ;
17
- use rustc_data_structures:: bitslice:: BitSlice ; // adds set_bit/get_bit to &[usize] bitvector rep.
18
17
use rustc_data_structures:: bitslice:: { BitwiseOperator } ;
19
18
use rustc_data_structures:: indexed_set:: { IdxSet } ;
20
19
use rustc_data_structures:: indexed_vec:: Idx ;
@@ -504,7 +503,6 @@ impl<'a, 'gcx, 'tcx> BitDenotation for MovingOutStatements<'a, 'gcx, 'tcx> {
504
503
let stmt = & mir[ location. block ] . statements [ location. statement_index ] ;
505
504
let loc_map = & move_data. loc_map ;
506
505
let path_map = & move_data. path_map ;
507
- let bits_per_block = self . bits_per_block ( ) ;
508
506
509
507
match stmt. kind {
510
508
// this analysis only tries to find moves explicitly
@@ -515,21 +513,15 @@ impl<'a, 'gcx, 'tcx> BitDenotation for MovingOutStatements<'a, 'gcx, 'tcx> {
515
513
_ => {
516
514
debug ! ( "stmt {:?} at loc {:?} moves out of move_indexes {:?}" ,
517
515
stmt, location, & loc_map[ location] ) ;
518
- for move_index in & loc_map[ location] {
519
- // Every path deinitialized by a *particular move*
520
- // has corresponding bit, "gen'ed" (i.e. set)
521
- // here, in dataflow vector
522
- zero_to_one ( sets. gen_set . words_mut ( ) , * move_index) ;
523
- }
516
+ // Every path deinitialized by a *particular move*
517
+ // has corresponding bit, "gen'ed" (i.e. set)
518
+ // here, in dataflow vector
519
+ sets. gen_all_and_assert_dead ( & loc_map[ location] ) ;
524
520
}
525
521
}
526
522
527
523
for_location_inits ( tcx, mir, move_data, location,
528
- |mpi| for moi in & path_map[ mpi] {
529
- assert ! ( moi. index( ) < bits_per_block) ;
530
- sets. kill_set . add ( & moi) ;
531
- }
532
- ) ;
524
+ |mpi| sets. kill_all ( & path_map[ mpi] ) ) ;
533
525
}
534
526
535
527
fn terminator_effect ( & self ,
@@ -543,18 +535,10 @@ impl<'a, 'gcx, 'tcx> BitDenotation for MovingOutStatements<'a, 'gcx, 'tcx> {
543
535
544
536
debug ! ( "terminator {:?} at loc {:?} moves out of move_indexes {:?}" ,
545
537
term, location, & loc_map[ location] ) ;
546
- let bits_per_block = self . bits_per_block ( ) ;
547
- for move_index in & loc_map[ location] {
548
- assert ! ( move_index. index( ) < bits_per_block) ;
549
- zero_to_one ( sets. gen_set . words_mut ( ) , * move_index) ;
550
- }
538
+ sets. gen_all_and_assert_dead ( & loc_map[ location] ) ;
551
539
552
540
for_location_inits ( tcx, mir, move_data, location,
553
- |mpi| for moi in & path_map[ mpi] {
554
- assert ! ( moi. index( ) < bits_per_block) ;
555
- sets. kill_set . add ( & moi) ;
556
- }
557
- ) ;
541
+ |mpi| sets. kill_all ( & path_map[ mpi] ) ) ;
558
542
}
559
543
560
544
fn propagate_call_return ( & self ,
@@ -585,11 +569,7 @@ impl<'a, 'gcx, 'tcx> BitDenotation for EverInitializedLvals<'a, 'gcx, 'tcx> {
585
569
}
586
570
587
571
fn start_block_effect ( & self , sets : & mut BlockSets < InitIndex > ) {
588
- let bits_per_block = self . bits_per_block ( ) ;
589
- for init_index in ( 0 ..self . mir . arg_count ) . map ( InitIndex :: new) {
590
- assert ! ( init_index. index( ) < bits_per_block) ;
591
- sets. gen_set . add ( & init_index) ;
592
- }
572
+ sets. gen_all ( ( 0 ..self . mir . arg_count ) . map ( InitIndex :: new) ) ;
593
573
}
594
574
fn statement_effect ( & self ,
595
575
sets : & mut BlockSets < InitIndex > ,
@@ -599,26 +579,39 @@ impl<'a, 'gcx, 'tcx> BitDenotation for EverInitializedLvals<'a, 'gcx, 'tcx> {
599
579
let init_path_map = & move_data. init_path_map ;
600
580
let init_loc_map = & move_data. init_loc_map ;
601
581
let rev_lookup = & move_data. rev_lookup ;
602
- let bits_per_block = self . bits_per_block ( ) ;
603
582
604
583
debug ! ( "statement {:?} at loc {:?} initializes move_indexes {:?}" ,
605
584
stmt, location, & init_loc_map[ location] ) ;
606
- for init_index in & init_loc_map[ location] {
607
- assert ! ( init_index. index( ) < bits_per_block) ;
608
- sets. gen_set . add ( init_index) ;
609
- }
585
+ sets. gen_all ( & init_loc_map[ location] ) ;
610
586
611
587
match stmt. kind {
612
- mir:: StatementKind :: StorageDead ( local) => {
613
- // End inits for StorageDead, so that an immutable variable can
614
- // be reinitialized on the next iteration of the loop.
588
+ mir:: StatementKind :: StorageDead ( local) |
589
+ mir:: StatementKind :: StorageLive ( local) => {
590
+ // End inits for StorageDead and StorageLive, so that an immutable
591
+ // variable can be reinitialized on the next iteration of the loop.
592
+ //
593
+ // FIXME(#46525): We *need* to do this for StorageLive as well as
594
+ // StorageDead, because lifetimes of match bindings with guards are
595
+ // weird - i.e. this code
596
+ //
597
+ // ```
598
+ // fn main() {
599
+ // match 0 {
600
+ // a | a
601
+ // if { println!("a={}", a); false } => {}
602
+ // _ => {}
603
+ // }
604
+ // }
605
+ // ```
606
+ //
607
+ // runs the guard twice, using the same binding for `a`, and only
608
+ // storagedeads after everything ends, so if we don't regard the
609
+ // storagelive as killing storage, we would have a multiple assignment
610
+ // to immutable data error.
615
611
if let LookupResult :: Exact ( mpi) = rev_lookup. find ( & mir:: Place :: Local ( local) ) {
616
612
debug ! ( "stmt {:?} at loc {:?} clears the ever initialized status of {:?}" ,
617
- stmt, location, & init_path_map[ mpi] ) ;
618
- for ii in & init_path_map[ mpi] {
619
- assert ! ( ii. index( ) < bits_per_block) ;
620
- sets. kill_set . add ( & ii) ;
621
- }
613
+ stmt, location, & init_path_map[ mpi] ) ;
614
+ sets. kill_all ( & init_path_map[ mpi] ) ;
622
615
}
623
616
}
624
617
_ => { }
@@ -634,13 +627,11 @@ impl<'a, 'gcx, 'tcx> BitDenotation for EverInitializedLvals<'a, 'gcx, 'tcx> {
634
627
let init_loc_map = & move_data. init_loc_map ;
635
628
debug ! ( "terminator {:?} at loc {:?} initializes move_indexes {:?}" ,
636
629
term, location, & init_loc_map[ location] ) ;
637
- let bits_per_block = self . bits_per_block ( ) ;
638
- for init_index in & init_loc_map[ location] {
639
- if move_data. inits [ * init_index] . kind != InitKind :: NonPanicPathOnly {
640
- assert ! ( init_index. index( ) < bits_per_block) ;
641
- sets. gen_set . add ( init_index) ;
642
- }
643
- }
630
+ sets. gen_all (
631
+ init_loc_map[ location] . iter ( ) . filter ( |init_index| {
632
+ move_data. inits [ * * init_index] . kind != InitKind :: NonPanicPathOnly
633
+ } )
634
+ ) ;
644
635
}
645
636
646
637
fn propagate_call_return ( & self ,
@@ -663,11 +654,6 @@ impl<'a, 'gcx, 'tcx> BitDenotation for EverInitializedLvals<'a, 'gcx, 'tcx> {
663
654
}
664
655
}
665
656
666
- fn zero_to_one ( bitvec : & mut [ usize ] , move_index : MoveOutIndex ) {
667
- let retval = bitvec. set_bit ( move_index. index ( ) ) ;
668
- assert ! ( retval) ;
669
- }
670
-
671
657
impl < ' a , ' gcx , ' tcx > BitwiseOperator for MaybeInitializedLvals < ' a , ' gcx , ' tcx > {
672
658
#[ inline]
673
659
fn join ( & self , pred1 : usize , pred2 : usize ) -> usize {
0 commit comments