@@ -132,30 +132,54 @@ impl<T> TypedArena<T> {
132
132
/// Allocates an object in the `TypedArena`, returning a reference to it.
133
133
#[ inline]
134
134
pub fn alloc ( & self , object : T ) -> & mut T {
135
- if self . ptr == self . end {
136
- self . grow ( 1 )
137
- }
135
+ // Zero sized path
136
+ if mem:: size_of :: < T > ( ) == 0 {
137
+ if self . ptr == self . end {
138
+ self . grow ( 1 )
139
+ }
138
140
139
- unsafe {
140
- if mem:: size_of :: < T > ( ) == 0 {
141
+ unsafe {
141
142
self . ptr
142
143
. set ( intrinsics:: arith_offset ( self . ptr . get ( ) as * mut u8 , 1 )
143
144
as * mut T ) ;
144
145
let ptr = mem:: align_of :: < T > ( ) as * mut T ;
145
146
// Don't drop the object. This `write` is equivalent to `forget`.
146
147
ptr:: write ( ptr, object) ;
147
- & mut * ptr
148
+ return & mut * ptr;
149
+ }
150
+ }
151
+
152
+ let ptr = self . ptr . get ( ) ;
153
+
154
+ unsafe {
155
+ if std:: intrinsics:: unlikely ( ptr == self . end . get ( ) ) {
156
+ self . grow_and_alloc ( object)
148
157
} else {
149
- let ptr = self . ptr . get ( ) ;
150
- // Advance the pointer.
151
- self . ptr . set ( self . ptr . get ( ) . offset ( 1 ) ) ;
152
- // Write into uninitialized memory.
153
- ptr:: write ( ptr, object) ;
154
- & mut * ptr
158
+ self . alloc_unchecked ( ptr, object)
155
159
}
156
160
}
157
161
}
158
162
163
+ #[ inline( always) ]
164
+ unsafe fn alloc_unchecked ( & self , ptr : * mut T , object : T ) -> & mut T {
165
+ // Advance the pointer.
166
+ self . ptr . set ( ptr. offset ( 1 ) ) ;
167
+ // Write into uninitialized memory.
168
+ ptr:: write ( ptr, object) ;
169
+ & mut * ptr
170
+ }
171
+
172
+ #[ inline( never) ]
173
+ #[ cold]
174
+ fn grow_and_alloc ( & self , object : T ) -> & mut T {
175
+ // We move the object in this function so if it has a destructor
176
+ // the fast path need not have an unwind handler to destroy it
177
+ self . grow ( 1 ) ;
178
+ unsafe {
179
+ self . alloc_unchecked ( self . ptr . get ( ) , object)
180
+ }
181
+ }
182
+
159
183
/// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
160
184
/// reference to it. Will panic if passed a zero-sized types.
161
185
///
@@ -174,7 +198,7 @@ impl<T> TypedArena<T> {
174
198
let available_capacity_bytes = self . end . get ( ) as usize - self . ptr . get ( ) as usize ;
175
199
let at_least_bytes = slice. len ( ) * mem:: size_of :: < T > ( ) ;
176
200
if available_capacity_bytes < at_least_bytes {
177
- self . grow ( slice. len ( ) ) ;
201
+ self . grow_slice ( slice. len ( ) ) ;
178
202
}
179
203
180
204
unsafe {
@@ -186,9 +210,14 @@ impl<T> TypedArena<T> {
186
210
}
187
211
}
188
212
189
- /// Grows the arena.
190
213
#[ inline( never) ]
191
214
#[ cold]
215
+ fn grow_slice ( & self , n : usize ) {
216
+ self . grow ( n)
217
+ }
218
+
219
+ /// Grows the arena.
220
+ #[ inline( always) ]
192
221
fn grow ( & self , n : usize ) {
193
222
unsafe {
194
223
let mut chunks = self . chunks . borrow_mut ( ) ;
@@ -283,6 +312,22 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
283
312
284
313
unsafe impl < T : Send > Send for TypedArena < T > { }
285
314
315
+ type BackingType = usize ;
316
+ const BLOCK_SIZE : usize = std:: mem:: size_of :: < BackingType > ( ) ;
317
+
318
+ #[ inline( always) ]
319
+ fn required_backing_types ( bytes : usize ) -> usize {
320
+ assert ! ( BLOCK_SIZE . is_power_of_two( ) ) ;
321
+ // FIXME: This addition could overflow
322
+ ( bytes + BLOCK_SIZE - 1 ) / BLOCK_SIZE
323
+ }
324
+
325
+ #[ inline( always) ]
326
+ fn align ( val : usize , align : usize ) -> usize {
327
+ assert ! ( align. is_power_of_two( ) ) ;
328
+ ( val + align - 1 ) & !( align - 1 )
329
+ }
330
+
286
331
pub struct DroplessArena {
287
332
/// A pointer to the next object to be allocated.
288
333
ptr : Cell < * mut u8 > ,
@@ -292,7 +337,7 @@ pub struct DroplessArena {
292
337
end : Cell < * mut u8 > ,
293
338
294
339
/// A vector of arena chunks.
295
- chunks : RefCell < Vec < TypedArenaChunk < u8 > > > ,
340
+ chunks : RefCell < Vec < TypedArenaChunk < BackingType > > > ,
296
341
}
297
342
298
343
unsafe impl Send for DroplessArena { }
@@ -310,69 +355,84 @@ impl Default for DroplessArena {
310
355
311
356
impl DroplessArena {
312
357
pub fn in_arena < T : ?Sized > ( & self , ptr : * const T ) -> bool {
313
- let ptr = ptr as * const u8 as * mut u8 ;
358
+ let ptr = ptr as * const u8 as * mut BackingType ;
314
359
315
360
self . chunks . borrow ( ) . iter ( ) . any ( |chunk| chunk. start ( ) <= ptr && ptr < chunk. end ( ) )
316
361
}
317
362
318
- #[ inline]
319
- fn align ( & self , align : usize ) {
320
- let final_address = ( ( self . ptr . get ( ) as usize ) + align - 1 ) & !( align - 1 ) ;
321
- self . ptr . set ( final_address as * mut u8 ) ;
322
- assert ! ( self . ptr <= self . end) ;
323
- }
324
-
325
- #[ inline( never) ]
326
- #[ cold]
327
363
fn grow ( & self , needed_bytes : usize ) {
328
364
unsafe {
365
+ let needed_vals = required_backing_types ( needed_bytes) ;
329
366
let mut chunks = self . chunks . borrow_mut ( ) ;
330
367
let ( chunk, mut new_capacity) ;
331
368
if let Some ( last_chunk) = chunks. last_mut ( ) {
332
369
let used_bytes = self . ptr . get ( ) as usize - last_chunk. start ( ) as usize ;
370
+ let used_vals = required_backing_types ( used_bytes) ;
333
371
if last_chunk
334
372
. storage
335
- . reserve_in_place ( used_bytes , needed_bytes )
373
+ . reserve_in_place ( used_vals , needed_vals )
336
374
{
337
- self . end . set ( last_chunk. end ( ) ) ;
375
+ self . end . set ( last_chunk. end ( ) as * mut u8 ) ;
338
376
return ;
339
377
} else {
340
378
new_capacity = last_chunk. storage . cap ( ) ;
341
379
loop {
342
380
new_capacity = new_capacity. checked_mul ( 2 ) . unwrap ( ) ;
343
- if new_capacity >= used_bytes + needed_bytes {
381
+ if new_capacity >= used_vals + needed_vals {
344
382
break ;
345
383
}
346
384
}
347
385
}
348
386
} else {
349
- new_capacity = cmp:: max ( needed_bytes , PAGE ) ;
387
+ new_capacity = cmp:: max ( needed_vals , required_backing_types ( PAGE ) ) ;
350
388
}
351
- chunk = TypedArenaChunk :: < u8 > :: new ( new_capacity) ;
352
- self . ptr . set ( chunk. start ( ) ) ;
353
- self . end . set ( chunk. end ( ) ) ;
389
+ chunk = TypedArenaChunk :: < BackingType > :: new ( new_capacity) ;
390
+ self . ptr . set ( chunk. start ( ) as * mut u8 ) ;
391
+ self . end . set ( chunk. end ( ) as * mut u8 ) ;
354
392
chunks. push ( chunk) ;
355
393
}
356
394
}
357
395
396
+ #[ inline( never) ]
397
+ #[ cold]
398
+ fn grow_and_alloc_raw ( & self , bytes : usize ) -> & mut [ u8 ] {
399
+ self . grow ( bytes) ;
400
+ unsafe {
401
+ self . alloc_raw_unchecked ( self . ptr . get ( ) , bytes)
402
+ }
403
+ }
404
+
405
+ #[ inline( always) ]
406
+ unsafe fn alloc_raw_unchecked ( & self , start : * mut u8 , bytes : usize ) -> & mut [ u8 ] {
407
+ // Tell LLVM that `start` is aligned to BLOCK_SIZE
408
+ std:: intrinsics:: assume ( start as usize == align ( start as usize , BLOCK_SIZE ) ) ;
409
+
410
+ // Set the pointer past ourselves and align it
411
+ let end = start. offset ( bytes as isize ) as usize ;
412
+ let end = align ( end, BLOCK_SIZE ) as * mut u8 ;
413
+ self . ptr . set ( end) ;
414
+
415
+ // Return the result
416
+ slice:: from_raw_parts_mut ( start, bytes)
417
+ }
418
+
358
419
#[ inline]
359
420
pub fn alloc_raw ( & self , bytes : usize , align : usize ) -> & mut [ u8 ] {
360
421
unsafe {
361
422
assert ! ( bytes != 0 ) ;
423
+ assert ! ( align <= BLOCK_SIZE ) ;
424
+ assert ! ( std:: mem:: align_of:: <BackingType >( ) == std:: mem:: size_of:: <BackingType >( ) ) ;
425
+ // FIXME: Check that `bytes` fit in a isize
362
426
363
- self . align ( align) ;
364
-
365
- let future_end = intrinsics:: arith_offset ( self . ptr . get ( ) , bytes as isize ) ;
366
- if ( future_end as * mut u8 ) >= self . end . get ( ) {
367
- self . grow ( bytes) ;
368
- }
369
-
427
+ // FIXME: arith_offset could overflow here.
428
+ // Find some way to guarantee this doesn't happen for small fixed size types
370
429
let ptr = self . ptr . get ( ) ;
371
- // Set the pointer past ourselves
372
- self . ptr . set (
373
- intrinsics:: arith_offset ( self . ptr . get ( ) , bytes as isize ) as * mut u8 ,
374
- ) ;
375
- slice:: from_raw_parts_mut ( ptr, bytes)
430
+ let future_end = intrinsics:: arith_offset ( ptr, bytes as isize ) ;
431
+ if std:: intrinsics:: unlikely ( ( future_end as * mut u8 ) >= self . end . get ( ) ) {
432
+ self . grow_and_alloc_raw ( bytes)
433
+ } else {
434
+ self . alloc_raw_unchecked ( ptr, bytes)
435
+ }
376
436
}
377
437
}
378
438
0 commit comments