@@ -46,6 +46,7 @@ use std::intrinsics;
46
46
use std:: marker:: { PhantomData , Send } ;
47
47
use std:: mem;
48
48
use std:: ptr;
49
+ use std:: slice;
49
50
50
51
use alloc:: heap;
51
52
use alloc:: raw_vec:: RawVec ;
@@ -133,7 +134,7 @@ impl<T> TypedArena<T> {
133
134
#[ inline]
134
135
pub fn alloc ( & self , object : T ) -> & mut T {
135
136
if self . ptr == self . end {
136
- self . grow ( )
137
+ self . grow ( 1 )
137
138
}
138
139
139
140
unsafe {
@@ -154,24 +155,56 @@ impl<T> TypedArena<T> {
154
155
}
155
156
}
156
157
158
+ /// Allocates a slice of objects that are copy into the `TypedArena`, returning a mutable
159
+ /// reference to it. Will panic if passed a zero-sized types.
160
+ #[ inline]
161
+ pub fn alloc_slice ( & self , slice : & [ T ] ) -> & mut [ T ]
162
+ where T : Copy {
163
+ assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
164
+ if slice. len ( ) == 0 {
165
+ return unsafe { slice:: from_raw_parts_mut ( heap:: EMPTY as * mut T , 0 ) } ;
166
+ }
167
+
168
+ let available_capacity_bytes = self . end . get ( ) as usize - self . ptr . get ( ) as usize ;
169
+ let at_least_bytes = slice. len ( ) * mem:: size_of :: < T > ( ) ;
170
+ if available_capacity_bytes < at_least_bytes {
171
+ self . grow ( slice. len ( ) ) ;
172
+ }
173
+
174
+ unsafe {
175
+ let start_ptr = self . ptr . get ( ) ;
176
+ let arena_slice = slice:: from_raw_parts_mut ( start_ptr, slice. len ( ) ) ;
177
+ self . ptr . set ( start_ptr. offset ( arena_slice. len ( ) as isize ) ) ;
178
+ arena_slice. copy_from_slice ( slice) ;
179
+ arena_slice
180
+ }
181
+ }
182
+
157
183
/// Grows the arena.
158
184
#[ inline( never) ]
159
185
#[ cold]
160
- fn grow ( & self ) {
186
+ fn grow ( & self , n : usize ) {
161
187
unsafe {
162
188
let mut chunks = self . chunks . borrow_mut ( ) ;
163
- let ( chunk, new_capacity) ;
189
+ let ( chunk, mut new_capacity) ;
164
190
if let Some ( last_chunk) = chunks. last_mut ( ) {
165
- if last_chunk. storage . double_in_place ( ) {
191
+ let used_bytes = self . ptr . get ( ) as usize - last_chunk. start ( ) as usize ;
192
+ let currently_used_cap = used_bytes / mem:: size_of :: < T > ( ) ;
193
+ if last_chunk. storage . reserve_in_place ( currently_used_cap, n) {
166
194
self . end . set ( last_chunk. end ( ) ) ;
167
195
return ;
168
196
} else {
169
197
let prev_capacity = last_chunk. storage . cap ( ) ;
170
- new_capacity = prev_capacity. checked_mul ( 2 ) . unwrap ( ) ;
198
+ loop {
199
+ new_capacity = prev_capacity. checked_mul ( 2 ) . unwrap ( ) ;
200
+ if new_capacity >= currently_used_cap + n {
201
+ break ;
202
+ }
203
+ }
171
204
}
172
205
} else {
173
206
let elem_size = cmp:: max ( 1 , mem:: size_of :: < T > ( ) ) ;
174
- new_capacity = cmp:: max ( 1 , PAGE / elem_size) ;
207
+ new_capacity = cmp:: max ( n , PAGE / elem_size) ;
175
208
}
176
209
chunk = TypedArenaChunk :: < T > :: new ( new_capacity) ;
177
210
self . ptr . set ( chunk. start ( ) ) ;
0 commit comments