@@ -196,9 +196,9 @@ impl<T> TypedArena<T> {
196
196
self . end . set ( last_chunk. end ( ) ) ;
197
197
return ;
198
198
} else {
199
- let prev_capacity = last_chunk. storage . cap ( ) ;
199
+ new_capacity = last_chunk. storage . cap ( ) ;
200
200
loop {
201
- new_capacity = prev_capacity . checked_mul ( 2 ) . unwrap ( ) ;
201
+ new_capacity = new_capacity . checked_mul ( 2 ) . unwrap ( ) ;
202
202
if new_capacity >= currently_used_cap + n {
203
203
break ;
204
204
}
@@ -280,6 +280,133 @@ impl<T> Drop for TypedArena<T> {
280
280
281
281
unsafe impl < T : Send > Send for TypedArena < T > { }
282
282
283
+ pub struct DroplessArena {
284
+ /// A pointer to the next object to be allocated.
285
+ ptr : Cell < * mut u8 > ,
286
+
287
+ /// A pointer to the end of the allocated area. When this pointer is
288
+ /// reached, a new chunk is allocated.
289
+ end : Cell < * mut u8 > ,
290
+
291
+ /// A vector of arena chunks.
292
+ chunks : RefCell < Vec < TypedArenaChunk < u8 > > > ,
293
+ }
294
+
295
+ impl DroplessArena {
296
+ pub fn new ( ) -> DroplessArena {
297
+ DroplessArena {
298
+ ptr : Cell :: new ( 0 as * mut u8 ) ,
299
+ end : Cell :: new ( 0 as * mut u8 ) ,
300
+ chunks : RefCell :: new ( vec ! [ ] ) ,
301
+ }
302
+ }
303
+
304
+ pub fn in_arena < T : ?Sized > ( & self , ptr : * const T ) -> bool {
305
+ let ptr = ptr as * const u8 as * mut u8 ;
306
+ for chunk in & * self . chunks . borrow ( ) {
307
+ if chunk. start ( ) <= ptr && ptr < chunk. end ( ) {
308
+ return true ;
309
+ }
310
+ }
311
+
312
+ false
313
+ }
314
+
315
+ fn align_for < T > ( & self ) {
316
+ let align = mem:: align_of :: < T > ( ) ;
317
+ let final_address = ( ( self . ptr . get ( ) as usize ) + align - 1 ) & !( align - 1 ) ;
318
+ self . ptr . set ( final_address as * mut u8 ) ;
319
+ assert ! ( self . ptr <= self . end) ;
320
+ }
321
+
322
+ #[ inline( never) ]
323
+ #[ cold]
324
+ fn grow < T > ( & self , n : usize ) {
325
+ let needed_bytes = n * mem:: size_of :: < T > ( ) ;
326
+ unsafe {
327
+ let mut chunks = self . chunks . borrow_mut ( ) ;
328
+ let ( chunk, mut new_capacity) ;
329
+ if let Some ( last_chunk) = chunks. last_mut ( ) {
330
+ let used_bytes = self . ptr . get ( ) as usize - last_chunk. start ( ) as usize ;
331
+ if last_chunk. storage . reserve_in_place ( used_bytes, needed_bytes) {
332
+ self . end . set ( last_chunk. end ( ) ) ;
333
+ return ;
334
+ } else {
335
+ new_capacity = last_chunk. storage . cap ( ) ;
336
+ loop {
337
+ new_capacity = new_capacity. checked_mul ( 2 ) . unwrap ( ) ;
338
+ if new_capacity >= used_bytes + needed_bytes {
339
+ break ;
340
+ }
341
+ }
342
+ }
343
+ } else {
344
+ new_capacity = cmp:: max ( needed_bytes, PAGE ) ;
345
+ }
346
+ chunk = TypedArenaChunk :: < u8 > :: new ( new_capacity) ;
347
+ self . ptr . set ( chunk. start ( ) ) ;
348
+ self . end . set ( chunk. end ( ) ) ;
349
+ chunks. push ( chunk) ;
350
+ }
351
+ }
352
+
353
+ #[ inline]
354
+ pub fn alloc < T > ( & self , object : T ) -> & mut T {
355
+ unsafe {
356
+ assert ! ( !intrinsics:: needs_drop:: <T >( ) ) ;
357
+ assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
358
+
359
+ self . align_for :: < T > ( ) ;
360
+ let future_end = intrinsics:: arith_offset ( self . ptr . get ( ) , mem:: size_of :: < T > ( ) as isize ) ;
361
+ if ( future_end as * mut u8 ) >= self . end . get ( ) {
362
+ self . grow :: < T > ( 1 )
363
+ }
364
+
365
+ let ptr = self . ptr . get ( ) ;
366
+ // Set the pointer past ourselves
367
+ self . ptr . set ( intrinsics:: arith_offset (
368
+ self . ptr . get ( ) , mem:: size_of :: < T > ( ) as isize
369
+ ) as * mut u8 ) ;
370
+ // Write into uninitialized memory.
371
+ ptr:: write ( ptr as * mut T , object) ;
372
+ & mut * ( ptr as * mut T )
373
+ }
374
+ }
375
+
376
+ /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
377
+ /// reference to it. Will panic if passed a zero-sized type.
378
+ ///
379
+ /// Panics:
380
+ /// - Zero-sized types
381
+ /// - Zero-length slices
382
+ #[ inline]
383
+ pub fn alloc_slice < T > ( & self , slice : & [ T ] ) -> & mut [ T ]
384
+ where T : Copy {
385
+ unsafe {
386
+ assert ! ( !intrinsics:: needs_drop:: <T >( ) ) ;
387
+ }
388
+ assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
389
+ assert ! ( slice. len( ) != 0 ) ;
390
+ self . align_for :: < T > ( ) ;
391
+
392
+ let future_end = unsafe {
393
+ intrinsics:: arith_offset ( self . ptr . get ( ) , ( slice. len ( ) * mem:: size_of :: < T > ( ) ) as isize )
394
+ } ;
395
+ if ( future_end as * mut u8 ) >= self . end . get ( ) {
396
+ self . grow :: < T > ( slice. len ( ) ) ;
397
+ }
398
+
399
+ unsafe {
400
+ let arena_slice = slice:: from_raw_parts_mut ( self . ptr . get ( ) as * mut T , slice. len ( ) ) ;
401
+ self . ptr . set ( intrinsics:: arith_offset (
402
+ self . ptr . get ( ) , ( slice. len ( ) * mem:: size_of :: < T > ( ) ) as isize
403
+ ) as * mut u8 ) ;
404
+ arena_slice. copy_from_slice ( slice) ;
405
+ arena_slice
406
+ }
407
+ }
408
+ }
409
+
283
410
#[ cfg( test) ]
284
411
mod tests {
285
412
extern crate test;
0 commit comments