@@ -81,8 +81,8 @@ pub struct Arena {
81
81
// The head is separated out from the list as a unbenchmarked
82
82
// microoptimization, to avoid needing to case on the list to access the
83
83
// head.
84
- head : Chunk ,
85
- copy_head : Chunk ,
84
+ head : RefCell < Chunk > ,
85
+ copy_head : RefCell < Chunk > ,
86
86
chunks : RefCell < Vec < Chunk > > ,
87
87
}
88
88
@@ -95,8 +95,8 @@ impl Arena {
95
95
/// Allocate a new Arena with `initial_size` bytes preallocated.
96
96
pub fn new_with_size ( initial_size : uint ) -> Arena {
97
97
Arena {
98
- head : chunk ( initial_size, false ) ,
99
- copy_head : chunk ( initial_size, true ) ,
98
+ head : RefCell :: new ( chunk ( initial_size, false ) ) ,
99
+ copy_head : RefCell :: new ( chunk ( initial_size, true ) ) ,
100
100
chunks : RefCell :: new ( Vec :: new ( ) ) ,
101
101
}
102
102
}
@@ -114,7 +114,7 @@ fn chunk(size: uint, is_copy: bool) -> Chunk {
114
114
impl Drop for Arena {
115
115
fn drop ( & mut self ) {
116
116
unsafe {
117
- destroy_chunk ( & self . head ) ;
117
+ destroy_chunk ( & * self . head . borrow ( ) ) ;
118
118
for chunk in self . chunks . borrow ( ) . iter ( ) {
119
119
if !chunk. is_copy . get ( ) {
120
120
destroy_chunk ( chunk) ;
@@ -171,38 +171,40 @@ fn un_bitpack_tydesc_ptr(p: uint) -> (*TyDesc, bool) {
171
171
172
172
impl Arena {
173
173
fn chunk_size ( & self ) -> uint {
174
- self . copy_head . capacity ( )
174
+ self . copy_head . borrow ( ) . capacity ( )
175
175
}
176
+
176
177
// Functions for the POD part of the arena
177
- fn alloc_copy_grow ( & mut self , n_bytes : uint , align : uint ) -> * u8 {
178
+ fn alloc_copy_grow ( & self , n_bytes : uint , align : uint ) -> * u8 {
178
179
// Allocate a new chunk.
179
180
let new_min_chunk_size = cmp:: max ( n_bytes, self . chunk_size ( ) ) ;
180
- self . chunks . borrow_mut ( ) . push ( self . copy_head . clone ( ) ) ;
181
- self . copy_head =
181
+ self . chunks . borrow_mut ( ) . push ( self . copy_head . borrow ( ) . clone ( ) ) ;
182
+
183
+ * self . copy_head . borrow_mut ( ) =
182
184
chunk ( num:: next_power_of_two ( new_min_chunk_size + 1 u) , true ) ;
183
185
184
186
return self . alloc_copy_inner ( n_bytes, align) ;
185
187
}
186
188
187
189
#[ inline]
188
- fn alloc_copy_inner ( & mut self , n_bytes : uint , align : uint ) -> * u8 {
189
- unsafe {
190
- let start = round_up ( self . copy_head . fill . get ( ) , align) ;
191
- let end = start + n_bytes;
192
- if end > self . chunk_size ( ) {
193
- return self . alloc_copy_grow ( n_bytes, align) ;
194
- }
195
- self . copy_head . fill . set ( end) ;
190
+ fn alloc_copy_inner ( & self , n_bytes : uint , align : uint ) -> * u8 {
191
+ let start = round_up ( self . copy_head . borrow ( ) . fill . get ( ) , align) ;
192
+
193
+ let end = start + n_bytes;
194
+ if end > self . chunk_size ( ) {
195
+ return self . alloc_copy_grow ( n_bytes, align) ;
196
+ }
196
197
197
- //debug!("idx = {}, size = {}, align = {}, fill = {}",
198
- // start, n_bytes, align, head .fill.get() );
198
+ let copy_head = self . copy_head . borrow ( ) ;
199
+ copy_head . fill . set ( end ) ;
199
200
200
- self . copy_head . as_ptr ( ) . offset ( start as int )
201
+ unsafe {
202
+ copy_head. as_ptr ( ) . offset ( start as int )
201
203
}
202
204
}
203
205
204
206
#[ inline]
205
- fn alloc_copy < ' a , T > ( & ' a mut self , op: || -> T ) -> & ' a T {
207
+ fn alloc_copy < ' a , T > ( & ' a self , op: || -> T ) -> & ' a T {
206
208
unsafe {
207
209
let ptr = self . alloc_copy_inner ( mem:: size_of :: < T > ( ) ,
208
210
mem:: min_align_of :: < T > ( ) ) ;
@@ -213,42 +215,48 @@ impl Arena {
213
215
}
214
216
215
217
// Functions for the non-POD part of the arena
216
- fn alloc_noncopy_grow ( & mut self , n_bytes : uint , align : uint )
217
- -> ( * u8 , * u8 ) {
218
+ fn alloc_noncopy_grow ( & self , n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
218
219
// Allocate a new chunk.
219
220
let new_min_chunk_size = cmp:: max ( n_bytes, self . chunk_size ( ) ) ;
220
- self . chunks . borrow_mut ( ) . push ( self . head . clone ( ) ) ;
221
- self . head =
221
+ self . chunks . borrow_mut ( ) . push ( self . head . borrow ( ) . clone ( ) ) ;
222
+
223
+ * self . head . borrow_mut ( ) =
222
224
chunk ( num:: next_power_of_two ( new_min_chunk_size + 1 u) , false ) ;
223
225
224
226
return self . alloc_noncopy_inner ( n_bytes, align) ;
225
227
}
226
228
227
229
#[ inline]
228
- fn alloc_noncopy_inner ( & mut self , n_bytes : uint , align : uint )
229
- -> ( * u8 , * u8 ) {
230
- unsafe {
231
- let tydesc_start = self . head . fill . get ( ) ;
232
- let after_tydesc = self . head . fill . get ( ) + mem:: size_of :: < * TyDesc > ( ) ;
230
+ fn alloc_noncopy_inner ( & self , n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
231
+ // Be careful to not maintain any `head` borrows active, because
232
+ // `alloc_noncopy_grow` borrows it mutably.
233
+ let ( start, end, tydesc_start, head_capacity) = {
234
+ let head = self . head . borrow ( ) ;
235
+ let fill = head. fill . get ( ) ;
236
+
237
+ let tydesc_start = fill;
238
+ let after_tydesc = fill + mem:: size_of :: < * TyDesc > ( ) ;
233
239
let start = round_up ( after_tydesc, align) ;
234
240
let end = start + n_bytes;
235
241
236
- if end > self . head . capacity ( ) {
237
- return self . alloc_noncopy_grow ( n_bytes, align) ;
238
- }
242
+ ( start, end, tydesc_start, head. capacity ( ) )
243
+ } ;
239
244
240
- self . head . fill . set ( round_up ( end, mem:: align_of :: < * TyDesc > ( ) ) ) ;
245
+ if end > head_capacity {
246
+ return self . alloc_noncopy_grow ( n_bytes, align) ;
247
+ }
241
248
242
- //debug!("idx = {}, size = {}, align = {}, fill = {}",
243
- // start, n_bytes, align, head.fill);
249
+ let head = self . head . borrow ( ) ;
250
+ head. fill . set ( round_up ( end , mem :: align_of :: < * TyDesc > ( ) ) ) ;
244
251
245
- let buf = self . head . as_ptr ( ) ;
252
+ unsafe {
253
+ let buf = head. as_ptr ( ) ;
246
254
return ( buf. offset ( tydesc_start as int ) , buf. offset ( start as int ) ) ;
247
255
}
248
256
}
249
257
250
258
#[ inline]
251
- fn alloc_noncopy < ' a , T > ( & ' a mut self , op: || -> T ) -> & ' a T {
259
+ fn alloc_noncopy < ' a , T > ( & ' a self , op: || -> T ) -> & ' a T {
252
260
unsafe {
253
261
let tydesc = get_tydesc :: < T > ( ) ;
254
262
let ( ty_ptr, ptr) =
@@ -274,12 +282,10 @@ impl Arena {
274
282
#[ inline]
275
283
pub fn alloc < ' a , T > ( & ' a self , op: || -> T ) -> & ' a T {
276
284
unsafe {
277
- // FIXME #13933: Remove/justify all `&T` to `&mut T` transmutes
278
- let this: & mut Arena = mem:: transmute :: < & _ , & mut _ > ( self ) ;
279
285
if intrinsics:: needs_drop :: < T > ( ) {
280
- this . alloc_noncopy ( op)
286
+ self . alloc_noncopy ( op)
281
287
} else {
282
- this . alloc_copy ( op)
288
+ self . alloc_copy ( op)
283
289
}
284
290
}
285
291
}
@@ -298,6 +304,20 @@ fn test_arena_destructors() {
298
304
}
299
305
}
300
306
307
+ #[ test]
308
+ fn test_arena_alloc_nested ( ) {
309
+ struct Inner { value : uint }
310
+ struct Outer < ' a > { inner : & ' a Inner }
311
+
312
+ let arena = Arena :: new ( ) ;
313
+
314
+ let result = arena. alloc ( || Outer {
315
+ inner : arena. alloc ( || Inner { value : 10 } )
316
+ } ) ;
317
+
318
+ assert_eq ! ( result. inner. value, 10 ) ;
319
+ }
320
+
301
321
#[ test]
302
322
#[ should_fail]
303
323
fn test_arena_destructors_fail ( ) {
@@ -325,19 +345,20 @@ fn test_arena_destructors_fail() {
325
345
/// run again for these objects.
326
346
pub struct TypedArena < T > {
327
347
/// A pointer to the next object to be allocated.
328
- ptr : * T ,
348
+ ptr : Cell < * T > ,
329
349
330
350
/// A pointer to the end of the allocated area. When this pointer is
331
351
/// reached, a new chunk is allocated.
332
- end : * T ,
352
+ end : Cell < * T > ,
333
353
334
354
/// A pointer to the first arena segment.
335
- first : Option < Box < TypedArenaChunk < T > > > ,
355
+ first : RefCell < TypedArenaChunkRef < T > > ,
336
356
}
357
+ type TypedArenaChunkRef < T > = Option < Box < TypedArenaChunk < T > > > ;
337
358
338
359
struct TypedArenaChunk < T > {
339
360
/// Pointer to the next arena segment.
340
- next : Option < Box < TypedArenaChunk < T > > > ,
361
+ next : TypedArenaChunkRef < T > ,
341
362
342
363
/// The number of elements that this chunk can hold.
343
364
capacity : uint ,
@@ -423,53 +444,52 @@ impl<T> TypedArena<T> {
423
444
pub fn with_capacity ( capacity : uint ) -> TypedArena < T > {
424
445
let chunk = TypedArenaChunk :: < T > :: new ( None , capacity) ;
425
446
TypedArena {
426
- ptr : chunk. start ( ) as * T ,
427
- end : chunk. end ( ) as * T ,
428
- first : Some ( chunk) ,
447
+ ptr : Cell :: new ( chunk. start ( ) as * T ) ,
448
+ end : Cell :: new ( chunk. end ( ) as * T ) ,
449
+ first : RefCell :: new ( Some ( chunk) ) ,
429
450
}
430
451
}
431
452
432
453
/// Allocates an object in the TypedArena, returning a reference to it.
433
454
#[ inline]
434
455
pub fn alloc < ' a > ( & ' a self , object : T ) -> & ' a T {
435
- unsafe {
436
- // FIXME #13933: Remove/justify all `&T` to `&mut T` transmutes
437
- let this: & mut TypedArena < T > = mem:: transmute :: < & _ , & mut _ > ( self ) ;
438
- if this. ptr == this. end {
439
- this. grow ( )
440
- }
456
+ if self . ptr == self . end {
457
+ self . grow ( )
458
+ }
441
459
442
- let ptr: & ' a mut T = mem:: transmute ( this. ptr ) ;
460
+ let ptr: & ' a T = unsafe {
461
+ let ptr: & ' a mut T = mem:: transmute ( self . ptr ) ;
443
462
ptr:: write ( ptr, object) ;
444
- this. ptr = this. ptr . offset ( 1 ) ;
445
- let ptr: & ' a T = ptr;
463
+ self . ptr . set ( self . ptr . get ( ) . offset ( 1 ) ) ;
446
464
ptr
447
- }
465
+ } ;
466
+
467
+ ptr
448
468
}
449
469
450
470
/// Grows the arena.
451
471
#[ inline( never) ]
452
- fn grow ( & mut self ) {
453
- let chunk = self . first . take_unwrap ( ) ;
472
+ fn grow ( & self ) {
473
+ let chunk = self . first . borrow_mut ( ) . take_unwrap ( ) ;
454
474
let new_capacity = chunk. capacity . checked_mul ( & 2 ) . unwrap ( ) ;
455
475
let chunk = TypedArenaChunk :: < T > :: new ( Some ( chunk) , new_capacity) ;
456
- self . ptr = chunk. start ( ) as * T ;
457
- self . end = chunk. end ( ) as * T ;
458
- self . first = Some ( chunk)
476
+ self . ptr . set ( chunk. start ( ) as * T ) ;
477
+ self . end . set ( chunk. end ( ) as * T ) ;
478
+ * self . first . borrow_mut ( ) = Some ( chunk)
459
479
}
460
480
}
461
481
462
482
#[ unsafe_destructor]
463
483
impl < T > Drop for TypedArena < T > {
464
484
fn drop ( & mut self ) {
465
485
// Determine how much was filled.
466
- let start = self . first . get_ref ( ) . start ( ) as uint ;
467
- let end = self . ptr as uint ;
486
+ let start = self . first . borrow ( ) . get_ref ( ) . start ( ) as uint ;
487
+ let end = self . ptr . get ( ) as uint ;
468
488
let diff = ( end - start) / mem:: size_of :: < T > ( ) ;
469
489
470
490
// Pass that to the `destroy` method.
471
491
unsafe {
472
- self . first . get_mut_ref ( ) . destroy ( diff)
492
+ self . first . borrow_mut ( ) . get_mut_ref ( ) . destroy ( diff)
473
493
}
474
494
}
475
495
}
0 commit comments