@@ -226,6 +226,19 @@ enum StructKind {
226
226
Prefixed ( Size , Align ) ,
227
227
}
228
228
229
+ // Invert a bijective mapping, i.e. `invert(map)[y] = x` if `map[x] = y`.
230
+ // This is used to go between `memory_index` (source field order to memory order)
231
+ // and `inverse_memory_index` (memory order to source field order).
232
+ // See also `FieldPlacement::Arbitrary::memory_index` for more details.
233
+ // FIXME(eddyb) build a better abstraction for permutations, if possible.
234
+ fn invert_mapping ( map : & [ u32 ] ) -> Vec < u32 > {
235
+ let mut inverse = vec ! [ 0 ; map. len( ) ] ;
236
+ for i in 0 ..map. len ( ) {
237
+ inverse[ map[ i] as usize ] = i as u32 ;
238
+ }
239
+ inverse
240
+ }
241
+
229
242
impl < ' tcx > LayoutCx < ' tcx , TyCtxt < ' tcx > > {
230
243
fn scalar_pair ( & self , a : Scalar , b : Scalar ) -> LayoutDetails {
231
244
let dl = self . data_layout ( ) ;
@@ -303,7 +316,9 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
303
316
// That is, if field 5 has offset 0, the first element of inverse_memory_index is 5.
304
317
// We now write field offsets to the corresponding offset slot;
305
318
// field 5 with offset 0 puts 0 in offsets[5].
306
- // At the bottom of this function, we use inverse_memory_index to produce memory_index.
319
+ // At the bottom of this function, we invert `inverse_memory_index` to
320
+ // produce `memory_index` (see `invert_mapping`).
321
+
307
322
308
323
let mut offset = Size :: ZERO ;
309
324
@@ -360,13 +375,9 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
360
375
// Field 5 would be the first element, so memory_index is i:
361
376
// Note: if we didn't optimize, it's already right.
362
377
363
- let mut memory_index;
378
+ let memory_index;
364
379
if optimize {
365
- memory_index = vec ! [ 0 ; inverse_memory_index. len( ) ] ;
366
-
367
- for i in 0 ..inverse_memory_index. len ( ) {
368
- memory_index[ inverse_memory_index[ i] as usize ] = i as u32 ;
369
- }
380
+ memory_index = invert_mapping ( & inverse_memory_index) ;
370
381
} else {
371
382
memory_index = inverse_memory_index;
372
383
}
@@ -1311,18 +1322,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
1311
1322
) -> Result < & ' tcx LayoutDetails , LayoutError < ' tcx > > {
1312
1323
use SavedLocalEligibility :: * ;
1313
1324
let tcx = self . tcx ;
1314
- let recompute_memory_index = |offsets : & [ Size ] | -> Vec < u32 > {
1315
- debug ! ( "recompute_memory_index({:?})" , offsets) ;
1316
- let mut inverse_index = ( 0 ..offsets. len ( ) as u32 ) . collect :: < Vec < _ > > ( ) ;
1317
- inverse_index. sort_unstable_by_key ( |i| offsets[ * i as usize ] ) ;
1318
1325
1319
- let mut index = vec ! [ 0 ; offsets. len( ) ] ;
1320
- for i in 0 ..index. len ( ) {
1321
- index[ inverse_index[ i] as usize ] = i as u32 ;
1322
- }
1323
- debug ! ( "recompute_memory_index() => {:?}" , index) ;
1324
- index
1325
- } ;
1326
1326
let subst_field = |ty : Ty < ' tcx > | { ty. subst ( tcx, substs. substs ) } ;
1327
1327
1328
1328
let info = tcx. generator_layout ( def_id) ;
@@ -1349,14 +1349,34 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
1349
1349
// get included in each variant that requested them in
1350
1350
// GeneratorLayout.
1351
1351
debug ! ( "prefix = {:#?}" , prefix) ;
1352
- let ( outer_fields, promoted_offsets) = match prefix. fields {
1353
- FieldPlacement :: Arbitrary { mut offsets, .. } => {
1354
- let offsets_b = offsets. split_off ( discr_index + 1 ) ;
1352
+ let ( outer_fields, promoted_offsets, promoted_memory_index) = match prefix. fields {
1353
+ FieldPlacement :: Arbitrary { mut offsets, memory_index } => {
1354
+ let mut inverse_memory_index = invert_mapping ( & memory_index) ;
1355
+
1356
+ // "a" (`0..b_start`) and "b" (`b_start..`) correspond to
1357
+ // "outer" and "promoted" fields respectively.
1358
+ let b_start = ( discr_index + 1 ) as u32 ;
1359
+ let offsets_b = offsets. split_off ( b_start as usize ) ;
1355
1360
let offsets_a = offsets;
1356
1361
1357
- let memory_index = recompute_memory_index ( & offsets_a) ;
1358
- let outer_fields = FieldPlacement :: Arbitrary { offsets : offsets_a, memory_index } ;
1359
- ( outer_fields, offsets_b)
1362
+ // Disentangle the "a" and "b" components of `inverse_memory_index`
1363
+ // by preserving the order but keeping only one disjoint "half" each.
1364
+ // FIXME(eddyb) build a better abstraction for permutations, if possible.
1365
+ let inverse_memory_index_b: Vec < _ > =
1366
+ inverse_memory_index. iter ( ) . filter_map ( |& i| i. checked_sub ( b_start) ) . collect ( ) ;
1367
+ inverse_memory_index. retain ( |& i| i < b_start) ;
1368
+ let inverse_memory_index_a = inverse_memory_index;
1369
+
1370
+ // Since `inverse_memory_index_{a,b}` each only refer to their
1371
+ // respective fields, they can be safely inverted
1372
+ let memory_index_a = invert_mapping ( & inverse_memory_index_a) ;
1373
+ let memory_index_b = invert_mapping ( & inverse_memory_index_b) ;
1374
+
1375
+ let outer_fields = FieldPlacement :: Arbitrary {
1376
+ offsets : offsets_a,
1377
+ memory_index : memory_index_a,
1378
+ } ;
1379
+ ( outer_fields, offsets_b, memory_index_b)
1360
1380
}
1361
1381
_ => bug ! ( ) ,
1362
1382
} ;
@@ -1386,30 +1406,51 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
1386
1406
StructKind :: Prefixed ( prefix_size, prefix_align. abi ) ) ?;
1387
1407
variant. variants = Variants :: Single { index } ;
1388
1408
1389
- let offsets = match variant. fields {
1390
- FieldPlacement :: Arbitrary { offsets, .. } => offsets,
1409
+ let ( offsets, memory_index) = match variant. fields {
1410
+ FieldPlacement :: Arbitrary { offsets, memory_index } => {
1411
+ ( offsets, memory_index)
1412
+ }
1391
1413
_ => bug ! ( ) ,
1392
1414
} ;
1393
1415
1394
1416
// Now, stitch the promoted and variant-only fields back together in
1395
1417
// the order they are mentioned by our GeneratorLayout.
1396
- let mut next_variant_field = 0 ;
1397
- let mut combined_offsets = Vec :: new ( ) ;
1398
- for local in variant_fields. iter ( ) {
1399
- match assignments[ * local] {
1418
+ // Because we only use some subset (that can differ between variants)
1419
+ // of the promoted fields, we can't just pick those elements of the
1420
+ // `promoted_memory_index` (as we'd end up with gaps).
1421
+ // So instead, we build an "inverse memory_index", as if all of the
1422
+ // promoted fields were being used, but leave the elements not in the
1423
+ // subset as `INVALID_FIELD_IDX`, which we can filter out later to
1424
+ // obtain a valid (bijective) mapping.
1425
+ const INVALID_FIELD_IDX : u32 = !0 ;
1426
+ let mut combined_inverse_memory_index =
1427
+ vec ! [ INVALID_FIELD_IDX ; promoted_memory_index. len( ) + memory_index. len( ) ] ;
1428
+ let mut offsets_and_memory_index = offsets. into_iter ( ) . zip ( memory_index) ;
1429
+ let combined_offsets = variant_fields. iter ( ) . enumerate ( ) . map ( |( i, local) | {
1430
+ let ( offset, memory_index) = match assignments[ * local] {
1400
1431
Unassigned => bug ! ( ) ,
1401
1432
Assigned ( _) => {
1402
- combined_offsets . push ( offsets [ next_variant_field ] ) ;
1403
- next_variant_field += 1 ;
1433
+ let ( offset , memory_index ) = offsets_and_memory_index . next ( ) . unwrap ( ) ;
1434
+ ( offset , promoted_memory_index . len ( ) as u32 + memory_index )
1404
1435
}
1405
1436
Ineligible ( field_idx) => {
1406
1437
let field_idx = field_idx. unwrap ( ) as usize ;
1407
- combined_offsets . push ( promoted_offsets[ field_idx] ) ;
1438
+ ( promoted_offsets[ field_idx] , promoted_memory_index [ field_idx ] )
1408
1439
}
1409
- }
1410
- }
1411
- let memory_index = recompute_memory_index ( & combined_offsets) ;
1412
- variant. fields = FieldPlacement :: Arbitrary { offsets : combined_offsets, memory_index } ;
1440
+ } ;
1441
+ combined_inverse_memory_index[ memory_index as usize ] = i as u32 ;
1442
+ offset
1443
+ } ) . collect ( ) ;
1444
+
1445
+ // Remove the unused slots and invert the mapping to obtain the
1446
+ // combined `memory_index` (also see previous comment).
1447
+ combined_inverse_memory_index. retain ( |& i| i != INVALID_FIELD_IDX ) ;
1448
+ let combined_memory_index = invert_mapping ( & combined_inverse_memory_index) ;
1449
+
1450
+ variant. fields = FieldPlacement :: Arbitrary {
1451
+ offsets : combined_offsets,
1452
+ memory_index : combined_memory_index,
1453
+ } ;
1413
1454
1414
1455
size = size. max ( variant. size ) ;
1415
1456
align = align. max ( variant. align ) ;
0 commit comments