23
23
24
24
extern crate alloc;
25
25
26
+ use rustc_data_structures:: cold_path;
26
27
use rustc_data_structures:: sync:: MTLock ;
28
+ use smallvec:: SmallVec ;
27
29
28
30
use std:: cell:: { Cell , RefCell } ;
29
31
use std:: cmp;
@@ -55,13 +57,16 @@ pub struct TypedArena<T> {
55
57
struct TypedArenaChunk < T > {
56
58
/// The raw storage for the arena chunk.
57
59
storage : RawVec < T > ,
60
+ /// The number of valid entries in the chunk.
61
+ entries : usize ,
58
62
}
59
63
60
64
impl < T > TypedArenaChunk < T > {
61
65
#[ inline]
62
66
unsafe fn new ( capacity : usize ) -> TypedArenaChunk < T > {
63
67
TypedArenaChunk {
64
68
storage : RawVec :: with_capacity ( capacity) ,
69
+ entries : 0 ,
65
70
}
66
71
}
67
72
@@ -149,6 +154,34 @@ impl<T> TypedArena<T> {
149
154
}
150
155
}
151
156
157
+ #[ inline]
158
+ fn can_allocate ( & self , len : usize ) -> bool {
159
+ let available_capacity_bytes = self . end . get ( ) as usize - self . ptr . get ( ) as usize ;
160
+ let at_least_bytes = len. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) ;
161
+ available_capacity_bytes >= at_least_bytes
162
+ }
163
+
164
+ /// Ensures there's enough space in the current chunk to fit `len` objects.
165
+ #[ inline]
166
+ fn ensure_capacity ( & self , len : usize ) {
167
+ if !self . can_allocate ( len) {
168
+ self . grow ( len) ;
169
+ debug_assert ! ( self . can_allocate( len) ) ;
170
+ }
171
+ }
172
+
173
+ #[ inline]
174
+ unsafe fn alloc_raw_slice ( & self , len : usize ) -> * mut T {
175
+ assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
176
+ assert ! ( len != 0 ) ;
177
+
178
+ self . ensure_capacity ( len) ;
179
+
180
+ let start_ptr = self . ptr . get ( ) ;
181
+ self . ptr . set ( start_ptr. add ( len) ) ;
182
+ start_ptr
183
+ }
184
+
152
185
/// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
153
186
/// reference to it. Will panic if passed a zero-sized types.
154
187
///
@@ -161,21 +194,64 @@ impl<T> TypedArena<T> {
161
194
where
162
195
T : Copy ,
163
196
{
197
+ unsafe {
198
+ let len = slice. len ( ) ;
199
+ let start_ptr = self . alloc_raw_slice ( len) ;
200
+ slice. as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
201
+ slice:: from_raw_parts_mut ( start_ptr, len)
202
+ }
203
+ }
204
+
205
+ #[ inline]
206
+ pub fn alloc_from_iter < I : IntoIterator < Item = T > > ( & self , iter : I ) -> & mut [ T ] {
164
207
assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
165
- assert ! ( slice. len( ) != 0 ) ;
208
+ let mut iter = iter. into_iter ( ) ;
209
+ let size_hint = iter. size_hint ( ) ;
166
210
167
- let available_capacity_bytes = self . end . get ( ) as usize - self . ptr . get ( ) as usize ;
168
- let at_least_bytes = slice. len ( ) * mem:: size_of :: < T > ( ) ;
169
- if available_capacity_bytes < at_least_bytes {
170
- self . grow ( slice. len ( ) ) ;
171
- }
211
+ match size_hint {
212
+ ( min, Some ( max) ) if min == max => {
213
+ // We know the exact number of elements the iterator will produce here
214
+ let len = min;
172
215
173
- unsafe {
174
- let start_ptr = self . ptr . get ( ) ;
175
- let arena_slice = slice:: from_raw_parts_mut ( start_ptr, slice. len ( ) ) ;
176
- self . ptr . set ( start_ptr. add ( arena_slice. len ( ) ) ) ;
177
- arena_slice. copy_from_slice ( slice) ;
178
- arena_slice
216
+ if len == 0 {
217
+ return & mut [ ] ;
218
+ }
219
+
220
+ self . ensure_capacity ( len) ;
221
+
222
+ let slice = self . ptr . get ( ) ;
223
+
224
+ unsafe {
225
+ let mut ptr = self . ptr . get ( ) ;
226
+ for _ in 0 ..len {
227
+ // Write into uninitialized memory.
228
+ ptr:: write ( ptr, iter. next ( ) . unwrap ( ) ) ;
229
+ // Advance the pointer.
230
+ ptr = ptr. offset ( 1 ) ;
231
+ // Update the pointer per iteration so if `iter.next()` panics
232
+ // we destroy the correct amount
233
+ self . ptr . set ( ptr) ;
234
+ }
235
+ slice:: from_raw_parts_mut ( slice, len)
236
+ }
237
+ }
238
+ _ => {
239
+ cold_path ( move || -> & mut [ T ] {
240
+ let mut vec: SmallVec < [ _ ; 8 ] > = iter. collect ( ) ;
241
+ if vec. is_empty ( ) {
242
+ return & mut [ ] ;
243
+ }
244
+ // Move the content to the arena by copying it and then forgetting
245
+ // the content of the SmallVec
246
+ unsafe {
247
+ let len = vec. len ( ) ;
248
+ let start_ptr = self . alloc_raw_slice ( len) ;
249
+ vec. as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
250
+ vec. set_len ( 0 ) ;
251
+ slice:: from_raw_parts_mut ( start_ptr, len)
252
+ }
253
+ } )
254
+ }
179
255
}
180
256
}
181
257
@@ -189,6 +265,7 @@ impl<T> TypedArena<T> {
189
265
if let Some ( last_chunk) = chunks. last_mut ( ) {
190
266
let used_bytes = self . ptr . get ( ) as usize - last_chunk. start ( ) as usize ;
191
267
let currently_used_cap = used_bytes / mem:: size_of :: < T > ( ) ;
268
+ last_chunk. entries = currently_used_cap;
192
269
if last_chunk. storage . reserve_in_place ( currently_used_cap, n) {
193
270
self . end . set ( last_chunk. end ( ) ) ;
194
271
return ;
@@ -222,8 +299,7 @@ impl<T> TypedArena<T> {
222
299
let len = chunks_borrow. len ( ) ;
223
300
// If `T` is ZST, code below has no effect.
224
301
for mut chunk in chunks_borrow. drain ( ..len-1 ) {
225
- let cap = chunk. storage . cap ( ) ;
226
- chunk. destroy ( cap) ;
302
+ chunk. destroy ( chunk. entries ) ;
227
303
}
228
304
}
229
305
}
@@ -265,8 +341,7 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
265
341
self . clear_last_chunk ( & mut last_chunk) ;
266
342
// The last chunk will be dropped. Destroy all other chunks.
267
343
for chunk in chunks_borrow. iter_mut ( ) {
268
- let cap = chunk. storage . cap ( ) ;
269
- chunk. destroy ( cap) ;
344
+ chunk. destroy ( chunk. entries ) ;
270
345
}
271
346
}
272
347
// RawVec handles deallocation of `last_chunk` and `self.chunks`.
@@ -410,6 +485,54 @@ impl DroplessArena {
410
485
arena_slice
411
486
}
412
487
}
488
+
489
+ #[ inline]
490
+ pub fn alloc_from_iter < T , I : IntoIterator < Item = T > > ( & self , iter : I ) -> & mut [ T ] {
491
+ let mut iter = iter. into_iter ( ) ;
492
+ assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
493
+ assert ! ( !mem:: needs_drop:: <T >( ) ) ;
494
+
495
+ let size_hint = iter. size_hint ( ) ;
496
+
497
+ match size_hint {
498
+ ( min, Some ( max) ) if min == max => {
499
+ // We know the exact number of elements the iterator will produce here
500
+ let len = min;
501
+
502
+ if len == 0 {
503
+ return & mut [ ]
504
+ }
505
+ let size = len. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) ;
506
+ let mem = self . alloc_raw ( size, mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
507
+ unsafe {
508
+ for i in 0 ..len {
509
+ ptr:: write ( mem. offset ( i as isize ) , iter. next ( ) . unwrap ( ) )
510
+ }
511
+ slice:: from_raw_parts_mut ( mem, len)
512
+ }
513
+ }
514
+ ( _, _) => {
515
+ cold_path ( move || -> & mut [ T ] {
516
+ let mut vec: SmallVec < [ _ ; 8 ] > = iter. collect ( ) ;
517
+ if vec. is_empty ( ) {
518
+ return & mut [ ] ;
519
+ }
520
+ // Move the content to the arena by copying it and then forgetting
521
+ // the content of the SmallVec
522
+ unsafe {
523
+ let len = vec. len ( ) ;
524
+ let start_ptr = self . alloc_raw (
525
+ len * mem:: size_of :: < T > ( ) ,
526
+ mem:: align_of :: < T > ( )
527
+ ) as * mut _ as * mut T ;
528
+ vec. as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
529
+ vec. set_len ( 0 ) ;
530
+ slice:: from_raw_parts_mut ( start_ptr, len)
531
+ }
532
+ } )
533
+ }
534
+ }
535
+ }
413
536
}
414
537
415
538
#[ derive( Default ) ]
0 commit comments