Skip to content

Commit f92a8fa

Browse files
committed
auto merge of #14768 : riccieri/rust/detransmute-arena, r=alexcrichton
**Update** I've reimplemented this using `Cell` and `RefCell`, as suggested by @alexcrichton. By taking care with the duration of the borrows, I was able to maintain the recursive allocation feature (now covered by a test) without the use of `Unsafe`, and without breaking the non-aliasing `&mut` invariant. **Original** Changes both `Arena` and `TypedArena` to contain an inner struct wrapped in a `Unsafe`, and change field access to go through those instead of transmuting `&self` to `&mut self`. Part of #13933
2 parents b1302f9 + 47b72e3 commit f92a8fa

File tree

1 file changed

+87
-67
lines changed

1 file changed

+87
-67
lines changed

Diff for: src/libarena/lib.rs

+87-67
Original file line numberDiff line numberDiff line change
@@ -81,8 +81,8 @@ pub struct Arena {
8181
// The head is separated out from the list as a unbenchmarked
8282
// microoptimization, to avoid needing to case on the list to access the
8383
// head.
84-
head: Chunk,
85-
copy_head: Chunk,
84+
head: RefCell<Chunk>,
85+
copy_head: RefCell<Chunk>,
8686
chunks: RefCell<Vec<Chunk>>,
8787
}
8888

@@ -95,8 +95,8 @@ impl Arena {
9595
/// Allocate a new Arena with `initial_size` bytes preallocated.
9696
pub fn new_with_size(initial_size: uint) -> Arena {
9797
Arena {
98-
head: chunk(initial_size, false),
99-
copy_head: chunk(initial_size, true),
98+
head: RefCell::new(chunk(initial_size, false)),
99+
copy_head: RefCell::new(chunk(initial_size, true)),
100100
chunks: RefCell::new(Vec::new()),
101101
}
102102
}
@@ -114,7 +114,7 @@ fn chunk(size: uint, is_copy: bool) -> Chunk {
114114
impl Drop for Arena {
115115
fn drop(&mut self) {
116116
unsafe {
117-
destroy_chunk(&self.head);
117+
destroy_chunk(&*self.head.borrow());
118118
for chunk in self.chunks.borrow().iter() {
119119
if !chunk.is_copy.get() {
120120
destroy_chunk(chunk);
@@ -171,38 +171,40 @@ fn un_bitpack_tydesc_ptr(p: uint) -> (*TyDesc, bool) {
171171

172172
impl Arena {
173173
fn chunk_size(&self) -> uint {
174-
self.copy_head.capacity()
174+
self.copy_head.borrow().capacity()
175175
}
176+
176177
// Functions for the POD part of the arena
177-
fn alloc_copy_grow(&mut self, n_bytes: uint, align: uint) -> *u8 {
178+
fn alloc_copy_grow(&self, n_bytes: uint, align: uint) -> *u8 {
178179
// Allocate a new chunk.
179180
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
180-
self.chunks.borrow_mut().push(self.copy_head.clone());
181-
self.copy_head =
181+
self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
182+
183+
*self.copy_head.borrow_mut() =
182184
chunk(num::next_power_of_two(new_min_chunk_size + 1u), true);
183185

184186
return self.alloc_copy_inner(n_bytes, align);
185187
}
186188

187189
#[inline]
188-
fn alloc_copy_inner(&mut self, n_bytes: uint, align: uint) -> *u8 {
189-
unsafe {
190-
let start = round_up(self.copy_head.fill.get(), align);
191-
let end = start + n_bytes;
192-
if end > self.chunk_size() {
193-
return self.alloc_copy_grow(n_bytes, align);
194-
}
195-
self.copy_head.fill.set(end);
190+
fn alloc_copy_inner(&self, n_bytes: uint, align: uint) -> *u8 {
191+
let start = round_up(self.copy_head.borrow().fill.get(), align);
192+
193+
let end = start + n_bytes;
194+
if end > self.chunk_size() {
195+
return self.alloc_copy_grow(n_bytes, align);
196+
}
196197

197-
//debug!("idx = {}, size = {}, align = {}, fill = {}",
198-
// start, n_bytes, align, head.fill.get());
198+
let copy_head = self.copy_head.borrow();
199+
copy_head.fill.set(end);
199200

200-
self.copy_head.as_ptr().offset(start as int)
201+
unsafe {
202+
copy_head.as_ptr().offset(start as int)
201203
}
202204
}
203205

204206
#[inline]
205-
fn alloc_copy<'a, T>(&'a mut self, op: || -> T) -> &'a T {
207+
fn alloc_copy<'a, T>(&'a self, op: || -> T) -> &'a T {
206208
unsafe {
207209
let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
208210
mem::min_align_of::<T>());
@@ -213,42 +215,48 @@ impl Arena {
213215
}
214216

215217
// Functions for the non-POD part of the arena
216-
fn alloc_noncopy_grow(&mut self, n_bytes: uint, align: uint)
217-
-> (*u8, *u8) {
218+
fn alloc_noncopy_grow(&self, n_bytes: uint, align: uint) -> (*u8, *u8) {
218219
// Allocate a new chunk.
219220
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
220-
self.chunks.borrow_mut().push(self.head.clone());
221-
self.head =
221+
self.chunks.borrow_mut().push(self.head.borrow().clone());
222+
223+
*self.head.borrow_mut() =
222224
chunk(num::next_power_of_two(new_min_chunk_size + 1u), false);
223225

224226
return self.alloc_noncopy_inner(n_bytes, align);
225227
}
226228

227229
#[inline]
228-
fn alloc_noncopy_inner(&mut self, n_bytes: uint, align: uint)
229-
-> (*u8, *u8) {
230-
unsafe {
231-
let tydesc_start = self.head.fill.get();
232-
let after_tydesc = self.head.fill.get() + mem::size_of::<*TyDesc>();
230+
fn alloc_noncopy_inner(&self, n_bytes: uint, align: uint) -> (*u8, *u8) {
231+
// Be careful to not maintain any `head` borrows active, because
232+
// `alloc_noncopy_grow` borrows it mutably.
233+
let (start, end, tydesc_start, head_capacity) = {
234+
let head = self.head.borrow();
235+
let fill = head.fill.get();
236+
237+
let tydesc_start = fill;
238+
let after_tydesc = fill + mem::size_of::<*TyDesc>();
233239
let start = round_up(after_tydesc, align);
234240
let end = start + n_bytes;
235241

236-
if end > self.head.capacity() {
237-
return self.alloc_noncopy_grow(n_bytes, align);
238-
}
242+
(start, end, tydesc_start, head.capacity())
243+
};
239244

240-
self.head.fill.set(round_up(end, mem::align_of::<*TyDesc>()));
245+
if end > head_capacity {
246+
return self.alloc_noncopy_grow(n_bytes, align);
247+
}
241248

242-
//debug!("idx = {}, size = {}, align = {}, fill = {}",
243-
// start, n_bytes, align, head.fill);
249+
let head = self.head.borrow();
250+
head.fill.set(round_up(end, mem::align_of::<*TyDesc>()));
244251

245-
let buf = self.head.as_ptr();
252+
unsafe {
253+
let buf = head.as_ptr();
246254
return (buf.offset(tydesc_start as int), buf.offset(start as int));
247255
}
248256
}
249257

250258
#[inline]
251-
fn alloc_noncopy<'a, T>(&'a mut self, op: || -> T) -> &'a T {
259+
fn alloc_noncopy<'a, T>(&'a self, op: || -> T) -> &'a T {
252260
unsafe {
253261
let tydesc = get_tydesc::<T>();
254262
let (ty_ptr, ptr) =
@@ -274,12 +282,10 @@ impl Arena {
274282
#[inline]
275283
pub fn alloc<'a, T>(&'a self, op: || -> T) -> &'a T {
276284
unsafe {
277-
// FIXME #13933: Remove/justify all `&T` to `&mut T` transmutes
278-
let this: &mut Arena = mem::transmute::<&_, &mut _>(self);
279285
if intrinsics::needs_drop::<T>() {
280-
this.alloc_noncopy(op)
286+
self.alloc_noncopy(op)
281287
} else {
282-
this.alloc_copy(op)
288+
self.alloc_copy(op)
283289
}
284290
}
285291
}
@@ -298,6 +304,20 @@ fn test_arena_destructors() {
298304
}
299305
}
300306

307+
#[test]
308+
fn test_arena_alloc_nested() {
309+
struct Inner { value: uint }
310+
struct Outer<'a> { inner: &'a Inner }
311+
312+
let arena = Arena::new();
313+
314+
let result = arena.alloc(|| Outer {
315+
inner: arena.alloc(|| Inner { value: 10 })
316+
});
317+
318+
assert_eq!(result.inner.value, 10);
319+
}
320+
301321
#[test]
302322
#[should_fail]
303323
fn test_arena_destructors_fail() {
@@ -325,19 +345,20 @@ fn test_arena_destructors_fail() {
325345
/// run again for these objects.
326346
pub struct TypedArena<T> {
327347
/// A pointer to the next object to be allocated.
328-
ptr: *T,
348+
ptr: Cell<*T>,
329349

330350
/// A pointer to the end of the allocated area. When this pointer is
331351
/// reached, a new chunk is allocated.
332-
end: *T,
352+
end: Cell<*T>,
333353

334354
/// A pointer to the first arena segment.
335-
first: Option<Box<TypedArenaChunk<T>>>,
355+
first: RefCell<TypedArenaChunkRef<T>>,
336356
}
357+
type TypedArenaChunkRef<T> = Option<Box<TypedArenaChunk<T>>>;
337358

338359
struct TypedArenaChunk<T> {
339360
/// Pointer to the next arena segment.
340-
next: Option<Box<TypedArenaChunk<T>>>,
361+
next: TypedArenaChunkRef<T>,
341362

342363
/// The number of elements that this chunk can hold.
343364
capacity: uint,
@@ -423,53 +444,52 @@ impl<T> TypedArena<T> {
423444
pub fn with_capacity(capacity: uint) -> TypedArena<T> {
424445
let chunk = TypedArenaChunk::<T>::new(None, capacity);
425446
TypedArena {
426-
ptr: chunk.start() as *T,
427-
end: chunk.end() as *T,
428-
first: Some(chunk),
447+
ptr: Cell::new(chunk.start() as *T),
448+
end: Cell::new(chunk.end() as *T),
449+
first: RefCell::new(Some(chunk)),
429450
}
430451
}
431452

432453
/// Allocates an object in the TypedArena, returning a reference to it.
433454
#[inline]
434455
pub fn alloc<'a>(&'a self, object: T) -> &'a T {
435-
unsafe {
436-
// FIXME #13933: Remove/justify all `&T` to `&mut T` transmutes
437-
let this: &mut TypedArena<T> = mem::transmute::<&_, &mut _>(self);
438-
if this.ptr == this.end {
439-
this.grow()
440-
}
456+
if self.ptr == self.end {
457+
self.grow()
458+
}
441459

442-
let ptr: &'a mut T = mem::transmute(this.ptr);
460+
let ptr: &'a T = unsafe {
461+
let ptr: &'a mut T = mem::transmute(self.ptr);
443462
ptr::write(ptr, object);
444-
this.ptr = this.ptr.offset(1);
445-
let ptr: &'a T = ptr;
463+
self.ptr.set(self.ptr.get().offset(1));
446464
ptr
447-
}
465+
};
466+
467+
ptr
448468
}
449469

450470
/// Grows the arena.
451471
#[inline(never)]
452-
fn grow(&mut self) {
453-
let chunk = self.first.take_unwrap();
472+
fn grow(&self) {
473+
let chunk = self.first.borrow_mut().take_unwrap();
454474
let new_capacity = chunk.capacity.checked_mul(&2).unwrap();
455475
let chunk = TypedArenaChunk::<T>::new(Some(chunk), new_capacity);
456-
self.ptr = chunk.start() as *T;
457-
self.end = chunk.end() as *T;
458-
self.first = Some(chunk)
476+
self.ptr.set(chunk.start() as *T);
477+
self.end.set(chunk.end() as *T);
478+
*self.first.borrow_mut() = Some(chunk)
459479
}
460480
}
461481

462482
#[unsafe_destructor]
463483
impl<T> Drop for TypedArena<T> {
464484
fn drop(&mut self) {
465485
// Determine how much was filled.
466-
let start = self.first.get_ref().start() as uint;
467-
let end = self.ptr as uint;
486+
let start = self.first.borrow().get_ref().start() as uint;
487+
let end = self.ptr.get() as uint;
468488
let diff = (end - start) / mem::size_of::<T>();
469489

470490
// Pass that to the `destroy` method.
471491
unsafe {
472-
self.first.get_mut_ref().destroy(diff)
492+
self.first.borrow_mut().get_mut_ref().destroy(diff)
473493
}
474494
}
475495
}

0 commit comments

Comments
 (0)