@@ -116,7 +116,10 @@ pub struct Scope<'tcx> {
116
116
/// The cache for drop chain on "generator drop" exit.
117
117
cached_generator_drop : Option < BasicBlock > ,
118
118
119
- /// The cache for drop chain on "unwind" exit.
119
+ /// The cache for drop chain on "unwind" exit. This block
120
+ /// contains code to run the current drop and all the preceding
121
+ /// drops (i.e., those having lower index in Drop’s Scope drop
122
+ /// array)
120
123
cached_unwind : CachedBlock ,
121
124
}
122
125
@@ -133,21 +136,7 @@ struct DropData<'tcx> {
133
136
}
134
137
135
138
#[ derive( Debug , Default , Clone , Copy ) ]
136
- pub ( crate ) struct CachedBlock {
137
- /// The cached block for the cleanups-on-diverge path. This block
138
- /// contains code to run the current drop and all the preceding
139
- /// drops (i.e., those having lower index in Drop’s Scope drop
140
- /// array)
141
- unwind : Option < BasicBlock > ,
142
-
143
- /// The cached block for unwinds during cleanups-on-generator-drop path
144
- ///
145
- /// This is split from the standard unwind path here to prevent drop
146
- /// elaboration from creating drop flags that would have to be captured
147
- /// by the generator. I'm not sure how important this optimization is,
148
- /// but it is here.
149
- generator_drop : Option < BasicBlock > ,
150
- }
139
+ pub ( crate ) struct CachedBlock ( Option < BasicBlock > ) ;
151
140
152
141
#[ derive( Debug ) ]
153
142
pub ( crate ) enum DropKind {
@@ -173,24 +162,15 @@ pub struct BreakableScope<'tcx> {
173
162
174
163
impl CachedBlock {
175
164
fn invalidate ( & mut self ) {
176
- self . generator_drop = None ;
177
- self . unwind = None ;
165
+ self . 0 = None ;
178
166
}
179
167
180
- fn get ( & self , generator_drop : bool ) -> Option < BasicBlock > {
181
- if generator_drop {
182
- self . generator_drop
183
- } else {
184
- self . unwind
185
- }
168
+ fn get ( & self ) -> Option < BasicBlock > {
169
+ self . 0
186
170
}
187
171
188
- fn ref_mut ( & mut self , generator_drop : bool ) -> & mut Option < BasicBlock > {
189
- if generator_drop {
190
- & mut self . generator_drop
191
- } else {
192
- & mut self . unwind
193
- }
172
+ fn ref_mut ( & mut self ) -> & mut Option < BasicBlock > {
173
+ & mut self . 0
194
174
}
195
175
}
196
176
@@ -370,7 +350,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
370
350
assert_eq ! ( scope. region_scope, region_scope. 0 ) ;
371
351
372
352
let unwind_to = self . scopes . last ( ) . and_then ( |next_scope| {
373
- next_scope. cached_unwind . get ( false )
353
+ next_scope. cached_unwind . get ( )
374
354
} ) . unwrap_or_else ( || self . resume_block ( ) ) ;
375
355
376
356
unpack ! ( block = build_scope_drops(
@@ -379,7 +359,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
379
359
block,
380
360
unwind_to,
381
361
self . arg_count,
382
- false ,
383
362
) ) ;
384
363
385
364
block. unit ( )
@@ -434,7 +413,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
434
413
}
435
414
} ;
436
415
437
- let unwind_to = next_scope. cached_unwind . get ( false ) . unwrap_or_else ( || {
416
+ let unwind_to = next_scope. cached_unwind . get ( ) . unwrap_or_else ( || {
438
417
debug_assert ! ( !may_panic, "cached block not present?" ) ;
439
418
START_BLOCK
440
419
} ) ;
@@ -445,7 +424,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
445
424
block,
446
425
unwind_to,
447
426
self . arg_count,
448
- false ,
449
427
) ) ;
450
428
451
429
scope = next_scope;
@@ -462,7 +440,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
462
440
/// None indicates there’s no cleanup to do at this point.
463
441
pub fn generator_drop_cleanup ( & mut self ) -> Option < BasicBlock > {
464
442
// Fill in the cache for unwinds
465
- self . diverge_cleanup_gen ( true ) ;
443
+ self . diverge_cleanup_gen ( ) ;
466
444
467
445
let src_info = self . scopes [ 0 ] . source_info ( self . fn_span ) ;
468
446
let resume_block = self . resume_block ( ) ;
@@ -484,7 +462,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
484
462
} ;
485
463
486
464
let unwind_to = scopes. peek ( ) . as_ref ( ) . map ( |scope| {
487
- scope. cached_unwind . get ( true ) . unwrap_or_else ( || {
465
+ scope. cached_unwind . get ( ) . unwrap_or_else ( || {
488
466
span_bug ! ( src_info. span, "cached block not present?" )
489
467
} )
490
468
} ) . unwrap_or ( resume_block) ;
@@ -495,7 +473,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
495
473
block,
496
474
unwind_to,
497
475
self . arg_count,
498
- true ,
499
476
) ) ;
500
477
}
501
478
@@ -748,7 +725,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
748
725
/// This path terminates in Resume. Returns the start of the path.
749
726
/// See module comment for more details.
750
727
pub fn diverge_cleanup ( & mut self ) -> BasicBlock {
751
- self . diverge_cleanup_gen ( false )
728
+ self . diverge_cleanup_gen ( )
752
729
}
753
730
754
731
fn resume_block ( & mut self ) -> BasicBlock {
@@ -767,7 +744,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
767
744
}
768
745
}
769
746
770
- fn diverge_cleanup_gen ( & mut self , generator_drop : bool ) -> BasicBlock {
747
+ fn diverge_cleanup_gen ( & mut self ) -> BasicBlock {
771
748
// Build up the drops in **reverse** order. The end result will
772
749
// look like:
773
750
//
@@ -781,15 +758,15 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
781
758
782
759
// Find the last cached block
783
760
let ( mut target, first_uncached) = if let Some ( cached_index) = self . scopes . iter ( )
784
- . rposition ( |scope| scope. cached_unwind . get ( generator_drop ) . is_some ( ) ) {
785
- ( self . scopes [ cached_index] . cached_unwind . get ( generator_drop ) . unwrap ( ) , cached_index + 1 )
761
+ . rposition ( |scope| scope. cached_unwind . get ( ) . is_some ( ) ) {
762
+ ( self . scopes [ cached_index] . cached_unwind . get ( ) . unwrap ( ) , cached_index + 1 )
786
763
} else {
787
764
( self . resume_block ( ) , 0 )
788
765
} ;
789
766
790
767
for scope in self . scopes [ first_uncached..] . iter_mut ( ) {
791
768
target = build_diverge_scope ( & mut self . cfg , scope. region_scope_span ,
792
- scope, target, generator_drop ) ;
769
+ scope, target) ;
793
770
}
794
771
795
772
target
@@ -869,7 +846,6 @@ fn build_scope_drops<'tcx>(
869
846
mut block : BasicBlock ,
870
847
last_unwind_to : BasicBlock ,
871
848
arg_count : usize ,
872
- generator_drop : bool ,
873
849
) -> BlockAnd < ( ) > {
874
850
debug ! ( "build_scope_drops({:?} -> {:?}" , block, scope) ;
875
851
@@ -890,7 +866,7 @@ fn build_scope_drops<'tcx>(
890
866
891
867
let mut unwind_blocks = scope. drops . iter ( ) . rev ( ) . filter_map ( |drop_data| {
892
868
if let DropKind :: Value { cached_block } = drop_data. kind {
893
- Some ( cached_block. get ( generator_drop ) . unwrap_or_else ( || {
869
+ Some ( cached_block. get ( ) . unwrap_or_else ( || {
894
870
span_bug ! ( drop_data. span, "cached block not present?" )
895
871
} ) )
896
872
} else {
@@ -937,8 +913,7 @@ fn build_scope_drops<'tcx>(
937
913
fn build_diverge_scope ( cfg : & mut CFG < ' tcx > ,
938
914
span : Span ,
939
915
scope : & mut Scope < ' tcx > ,
940
- mut target : BasicBlock ,
941
- generator_drop : bool )
916
+ mut target : BasicBlock )
942
917
-> BasicBlock
943
918
{
944
919
// Build up the drops in **reverse** order. The end result will
@@ -990,7 +965,7 @@ fn build_diverge_scope(cfg: &mut CFG<'tcx>,
990
965
} ;
991
966
}
992
967
DropKind :: Value { ref mut cached_block } => {
993
- let cached_block = cached_block. ref_mut ( generator_drop ) ;
968
+ let cached_block = cached_block. ref_mut ( ) ;
994
969
target = if let Some ( cached_block) = * cached_block {
995
970
storage_deads. clear ( ) ;
996
971
target_built_by_us = false ;
@@ -1013,7 +988,7 @@ fn build_diverge_scope(cfg: &mut CFG<'tcx>,
1013
988
} ;
1014
989
}
1015
990
push_storage_deads ( cfg, & mut target, & mut storage_deads, target_built_by_us, source_scope) ;
1016
- * scope. cached_unwind . ref_mut ( generator_drop ) = Some ( target) ;
991
+ * scope. cached_unwind . ref_mut ( ) = Some ( target) ;
1017
992
1018
993
assert ! ( storage_deads. is_empty( ) ) ;
1019
994
debug ! ( "build_diverge_scope({:?}, {:?}) = {:?}" , scope, span, target) ;
0 commit comments