19
19
#![ feature( rustc_attrs) ]
20
20
#![ cfg_attr( test, feature( test) ) ]
21
21
#![ feature( strict_provenance) ]
22
+ #![ feature( ptr_const_cast) ]
22
23
23
24
use smallvec:: SmallVec ;
24
25
@@ -27,7 +28,7 @@ use std::cell::{Cell, RefCell};
27
28
use std:: cmp;
28
29
use std:: marker:: { PhantomData , Send } ;
29
30
use std:: mem:: { self , MaybeUninit } ;
30
- use std:: ptr;
31
+ use std:: ptr:: { self , NonNull } ;
31
32
use std:: slice;
32
33
33
34
#[ inline( never) ]
@@ -55,15 +56,24 @@ pub struct TypedArena<T> {
55
56
56
57
struct ArenaChunk < T = u8 > {
57
58
/// The raw storage for the arena chunk.
58
- storage : Box < [ MaybeUninit < T > ] > ,
59
+ storage : NonNull < [ MaybeUninit < T > ] > ,
59
60
/// The number of valid entries in the chunk.
60
61
entries : usize ,
61
62
}
62
63
64
+ unsafe impl < #[ may_dangle] T > Drop for ArenaChunk < T > {
65
+ fn drop ( & mut self ) {
66
+ unsafe { Box :: from_raw ( self . storage . as_mut ( ) ) } ;
67
+ }
68
+ }
69
+
63
70
impl < T > ArenaChunk < T > {
64
71
#[ inline]
65
72
unsafe fn new ( capacity : usize ) -> ArenaChunk < T > {
66
- ArenaChunk { storage : Box :: new_uninit_slice ( capacity) , entries : 0 }
73
+ ArenaChunk {
74
+ storage : NonNull :: new ( Box :: into_raw ( Box :: new_uninit_slice ( capacity) ) ) . unwrap ( ) ,
75
+ entries : 0 ,
76
+ }
67
77
}
68
78
69
79
/// Destroys this arena chunk.
@@ -72,14 +82,15 @@ impl<T> ArenaChunk<T> {
72
82
// The branch on needs_drop() is an -O1 performance optimization.
73
83
// Without the branch, dropping TypedArena<u8> takes linear time.
74
84
if mem:: needs_drop :: < T > ( ) {
75
- ptr:: drop_in_place ( MaybeUninit :: slice_assume_init_mut ( & mut self . storage [ ..len] ) ) ;
85
+ let slice = & mut * ( self . storage . as_mut ( ) ) ;
86
+ ptr:: drop_in_place ( MaybeUninit :: slice_assume_init_mut ( & mut slice[ ..len] ) ) ;
76
87
}
77
88
}
78
89
79
90
// Returns a pointer to the first allocated object.
80
91
#[ inline]
81
92
fn start ( & mut self ) -> * mut T {
82
- MaybeUninit :: slice_as_mut_ptr ( & mut self . storage )
93
+ self . storage . as_ptr ( ) as * mut T
83
94
}
84
95
85
96
// Returns a pointer to the end of the allocated space.
@@ -90,7 +101,7 @@ impl<T> ArenaChunk<T> {
90
101
// A pointer as large as possible for zero-sized elements.
91
102
ptr:: invalid_mut ( !0 )
92
103
} else {
93
- self . start ( ) . add ( self . storage . len ( ) )
104
+ self . start ( ) . add ( ( * self . storage . as_ptr ( ) ) . len ( ) )
94
105
}
95
106
}
96
107
}
@@ -274,7 +285,7 @@ impl<T> TypedArena<T> {
274
285
// If the previous chunk's len is less than HUGE_PAGE
275
286
// bytes, then this chunk will be least double the previous
276
287
// chunk's size.
277
- new_cap = last_chunk. storage . len ( ) . min ( HUGE_PAGE / elem_size / 2 ) ;
288
+ new_cap = ( * last_chunk. storage . as_ptr ( ) ) . len ( ) . min ( HUGE_PAGE / elem_size / 2 ) ;
278
289
new_cap *= 2 ;
279
290
} else {
280
291
new_cap = PAGE / elem_size;
@@ -382,7 +393,7 @@ impl DroplessArena {
382
393
// If the previous chunk's len is less than HUGE_PAGE
383
394
// bytes, then this chunk will be least double the previous
384
395
// chunk's size.
385
- new_cap = last_chunk. storage . len ( ) . min ( HUGE_PAGE / 2 ) ;
396
+ new_cap = ( * last_chunk. storage . as_ptr ( ) ) . len ( ) . min ( HUGE_PAGE / 2 ) ;
386
397
new_cap *= 2 ;
387
398
} else {
388
399
new_cap = PAGE ;
0 commit comments