Skip to content

Commit 38eb0e5

Browse files
committed
auto merge of #17012 : thestinger/rust/sized, r=nikomatsakis
2 parents d8a2618 + c76e3ca commit 38eb0e5

File tree

8 files changed

+186
-87
lines changed

8 files changed

+186
-87
lines changed

Diff for: src/liballoc/heap.rs

+17-2
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414

1515
use core::ptr::RawPtr;
1616
#[cfg(not(test))] use core::raw;
17-
#[cfg(not(test))] use util;
17+
#[cfg(stage0, not(test))] use util;
1818

1919
/// Returns a pointer to `size` bytes of memory.
2020
///
@@ -119,7 +119,7 @@ unsafe fn exchange_free(ptr: *mut u8, size: uint, align: uint) {
119119
}
120120

121121
// FIXME: #7496
122-
#[cfg(not(test))]
122+
#[cfg(stage0, not(test))]
123123
#[lang="closure_exchange_malloc"]
124124
#[inline]
125125
#[allow(deprecated)]
@@ -134,6 +134,21 @@ unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint,
134134
alloc as *mut u8
135135
}
136136

137+
// FIXME: #7496
138+
#[cfg(not(stage0), not(test))]
139+
#[lang="closure_exchange_malloc"]
140+
#[inline]
141+
#[allow(deprecated)]
142+
unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint,
143+
align: uint) -> *mut u8 {
144+
let p = allocate(size, align);
145+
146+
let alloc = p as *mut raw::Box<()>;
147+
(*alloc).drop_glue = drop_glue;
148+
149+
alloc as *mut u8
150+
}
151+
137152
#[cfg(jemalloc)]
138153
mod imp {
139154
use core::option::{None, Option};

Diff for: src/libarena/lib.rs

+50-47
Original file line numberDiff line numberDiff line change
@@ -39,17 +39,18 @@ use std::mem;
3939
use std::num;
4040
use std::ptr;
4141
use std::rc::Rc;
42-
use std::rt::heap::allocate;
42+
use std::rt::heap::{allocate, deallocate};
4343

4444
// The way arena uses arrays is really deeply awful. The arrays are
4545
// allocated, and have capacities reserved, but the fill for the array
4646
// will always stay at 0.
4747
#[deriving(Clone, PartialEq)]
4848
struct Chunk {
49-
data: Rc<RefCell<Vec<u8> >>,
49+
data: Rc<RefCell<Vec<u8>>>,
5050
fill: Cell<uint>,
5151
is_copy: Cell<bool>,
5252
}
53+
5354
impl Chunk {
5455
fn capacity(&self) -> uint {
5556
self.data.borrow().capacity()
@@ -357,38 +358,37 @@ pub struct TypedArena<T> {
357358
end: Cell<*const T>,
358359

359360
/// A pointer to the first arena segment.
360-
first: RefCell<TypedArenaChunkRef<T>>,
361+
first: RefCell<*mut TypedArenaChunk<T>>,
361362
}
362-
type TypedArenaChunkRef<T> = Option<Box<TypedArenaChunk<T>>>;
363363

364364
struct TypedArenaChunk<T> {
365365
/// Pointer to the next arena segment.
366-
next: TypedArenaChunkRef<T>,
366+
next: *mut TypedArenaChunk<T>,
367367

368368
/// The number of elements that this chunk can hold.
369369
capacity: uint,
370370

371371
// Objects follow here, suitably aligned.
372372
}
373373

374+
fn calculate_size<T>(capacity: uint) -> uint {
375+
let mut size = mem::size_of::<TypedArenaChunk<T>>();
376+
size = round_up(size, mem::min_align_of::<T>());
377+
let elem_size = mem::size_of::<T>();
378+
let elems_size = elem_size.checked_mul(&capacity).unwrap();
379+
size = size.checked_add(&elems_size).unwrap();
380+
size
381+
}
382+
374383
impl<T> TypedArenaChunk<T> {
375384
#[inline]
376-
fn new(next: Option<Box<TypedArenaChunk<T>>>, capacity: uint)
377-
-> Box<TypedArenaChunk<T>> {
378-
let mut size = mem::size_of::<TypedArenaChunk<T>>();
379-
size = round_up(size, mem::min_align_of::<T>());
380-
let elem_size = mem::size_of::<T>();
381-
let elems_size = elem_size.checked_mul(&capacity).unwrap();
382-
size = size.checked_add(&elems_size).unwrap();
383-
384-
let mut chunk = unsafe {
385-
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>());
386-
let mut chunk: Box<TypedArenaChunk<T>> = mem::transmute(chunk);
387-
ptr::write(&mut chunk.next, next);
388-
chunk
389-
};
390-
391-
chunk.capacity = capacity;
385+
unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: uint)
386+
-> *mut TypedArenaChunk<T> {
387+
let size = calculate_size::<T>(capacity);
388+
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
389+
as *mut TypedArenaChunk<T>;
390+
(*chunk).next = next;
391+
(*chunk).capacity = capacity;
392392
chunk
393393
}
394394

@@ -406,14 +406,13 @@ impl<T> TypedArenaChunk<T> {
406406
}
407407

408408
// Destroy the next chunk.
409-
let next_opt = mem::replace(&mut self.next, None);
410-
match next_opt {
411-
None => {}
412-
Some(mut next) => {
413-
// We assume that the next chunk is completely filled.
414-
let capacity = next.capacity;
415-
next.destroy(capacity)
416-
}
409+
let next = self.next;
410+
let size = calculate_size::<T>(self.capacity);
411+
deallocate(self as *mut TypedArenaChunk<T> as *mut u8, size,
412+
mem::min_align_of::<TypedArenaChunk<T>>());
413+
if next.is_not_null() {
414+
let capacity = (*next).capacity;
415+
(*next).destroy(capacity);
417416
}
418417
}
419418

@@ -448,11 +447,13 @@ impl<T> TypedArena<T> {
448447
/// objects.
449448
#[inline]
450449
pub fn with_capacity(capacity: uint) -> TypedArena<T> {
451-
let chunk = TypedArenaChunk::<T>::new(None, capacity);
452-
TypedArena {
453-
ptr: Cell::new(chunk.start() as *const T),
454-
end: Cell::new(chunk.end() as *const T),
455-
first: RefCell::new(Some(chunk)),
450+
unsafe {
451+
let chunk = TypedArenaChunk::<T>::new(ptr::mut_null(), capacity);
452+
TypedArena {
453+
ptr: Cell::new((*chunk).start() as *const T),
454+
end: Cell::new((*chunk).end() as *const T),
455+
first: RefCell::new(chunk),
456+
}
456457
}
457458
}
458459

@@ -476,26 +477,28 @@ impl<T> TypedArena<T> {
476477
/// Grows the arena.
477478
#[inline(never)]
478479
fn grow(&self) {
479-
let chunk = self.first.borrow_mut().take().unwrap();
480-
let new_capacity = chunk.capacity.checked_mul(&2).unwrap();
481-
let chunk = TypedArenaChunk::<T>::new(Some(chunk), new_capacity);
482-
self.ptr.set(chunk.start() as *const T);
483-
self.end.set(chunk.end() as *const T);
484-
*self.first.borrow_mut() = Some(chunk)
480+
unsafe {
481+
let chunk = *self.first.borrow_mut();
482+
let new_capacity = (*chunk).capacity.checked_mul(&2).unwrap();
483+
let chunk = TypedArenaChunk::<T>::new(chunk, new_capacity);
484+
self.ptr.set((*chunk).start() as *const T);
485+
self.end.set((*chunk).end() as *const T);
486+
*self.first.borrow_mut() = chunk
487+
}
485488
}
486489
}
487490

488491
#[unsafe_destructor]
489492
impl<T> Drop for TypedArena<T> {
490493
fn drop(&mut self) {
491-
// Determine how much was filled.
492-
let start = self.first.borrow().as_ref().unwrap().start() as uint;
493-
let end = self.ptr.get() as uint;
494-
let diff = (end - start) / mem::size_of::<T>();
495-
496-
// Pass that to the `destroy` method.
497494
unsafe {
498-
self.first.borrow_mut().as_mut().unwrap().destroy(diff)
495+
// Determine how much was filled.
496+
let start = self.first.borrow().as_ref().unwrap().start() as uint;
497+
let end = self.ptr.get() as uint;
498+
let diff = (end - start) / mem::size_of::<T>();
499+
500+
// Pass that to the `destroy` method.
501+
(**self.first.borrow_mut()).destroy(diff)
499502
}
500503
}
501504
}

Diff for: src/librustc/middle/trans/base.rs

+33-2
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ use middle::trans::glue;
6666
use middle::trans::inline;
6767
use middle::trans::intrinsic;
6868
use middle::trans::machine;
69-
use middle::trans::machine::{llsize_of, llsize_of_real};
69+
use middle::trans::machine::{llsize_of, llsize_of_real, llalign_of_min};
7070
use middle::trans::meth;
7171
use middle::trans::monomorphize;
7272
use middle::trans::tvec;
@@ -382,13 +382,44 @@ pub fn malloc_raw_dyn<'a>(bcx: &'a Block<'a>,
382382
Result::new(r.bcx, PointerCast(r.bcx, r.val, llty_ptr))
383383
}
384384

385+
pub fn malloc_raw_dyn_proc<'a>(
386+
bcx: &'a Block<'a>,
387+
t: ty::t, alloc_fn: LangItem) -> Result<'a> {
388+
let _icx = push_ctxt("malloc_raw_dyn_proc");
389+
let ccx = bcx.ccx();
390+
391+
let langcall = require_alloc_fn(bcx, t, alloc_fn);
392+
393+
// Grab the TypeRef type of ptr_ty.
394+
let ptr_ty = ty::mk_uniq(bcx.tcx(), t);
395+
let ptr_llty = type_of(ccx, ptr_ty);
396+
397+
let llty = type_of(bcx.ccx(), t);
398+
let size = llsize_of(bcx.ccx(), llty);
399+
let llalign = C_uint(ccx, llalign_of_min(bcx.ccx(), llty) as uint);
400+
401+
// Allocate space:
402+
let drop_glue = glue::get_drop_glue(ccx, ty::mk_uniq(bcx.tcx(), t));
403+
let r = callee::trans_lang_call(
404+
bcx,
405+
langcall,
406+
[
407+
PointerCast(bcx, drop_glue, Type::glue_fn(ccx, Type::i8p(ccx)).ptr_to()),
408+
size,
409+
llalign
410+
],
411+
None);
412+
Result::new(r.bcx, PointerCast(r.bcx, r.val, ptr_llty))
413+
}
414+
415+
385416
pub fn malloc_raw_dyn_managed<'a>(
386417
bcx: &'a Block<'a>,
387418
t: ty::t,
388419
alloc_fn: LangItem,
389420
size: ValueRef)
390421
-> Result<'a> {
391-
let _icx = push_ctxt("malloc_raw_managed");
422+
let _icx = push_ctxt("malloc_raw_dyn_managed");
392423
let ccx = bcx.ccx();
393424

394425
let langcall = require_alloc_fn(bcx, t, alloc_fn);

Diff for: src/librustc/middle/trans/cleanup.rs

+55
Original file line numberDiff line numberDiff line change
@@ -340,6 +340,27 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
340340
self.schedule_clean(cleanup_scope, drop as CleanupObj);
341341
}
342342

343+
fn schedule_free_slice(&self,
344+
cleanup_scope: ScopeId,
345+
val: ValueRef,
346+
size: ValueRef,
347+
align: ValueRef,
348+
heap: Heap) {
349+
/*!
350+
* Schedules a call to `free(val)`. Note that this is a shallow
351+
* operation.
352+
*/
353+
354+
let drop = box FreeSlice { ptr: val, size: size, align: align, heap: heap };
355+
356+
debug!("schedule_free_slice({:?}, val={}, heap={:?})",
357+
cleanup_scope,
358+
self.ccx.tn().val_to_string(val),
359+
heap);
360+
361+
self.schedule_clean(cleanup_scope, drop as CleanupObj);
362+
}
363+
343364
fn schedule_clean(&self,
344365
cleanup_scope: ScopeId,
345366
cleanup: CleanupObj) {
@@ -926,6 +947,34 @@ impl Cleanup for FreeValue {
926947
}
927948
}
928949

950+
pub struct FreeSlice {
951+
ptr: ValueRef,
952+
size: ValueRef,
953+
align: ValueRef,
954+
heap: Heap,
955+
}
956+
957+
impl Cleanup for FreeSlice {
958+
fn must_unwind(&self) -> bool {
959+
true
960+
}
961+
962+
fn clean_on_unwind(&self) -> bool {
963+
true
964+
}
965+
966+
fn trans<'a>(&self, bcx: &'a Block<'a>) -> &'a Block<'a> {
967+
match self.heap {
968+
HeapManaged => {
969+
glue::trans_free(bcx, self.ptr)
970+
}
971+
HeapExchange => {
972+
glue::trans_exchange_free_dyn(bcx, self.ptr, self.size, self.align)
973+
}
974+
}
975+
}
976+
}
977+
929978
pub struct LifetimeEnd {
930979
ptr: ValueRef,
931980
}
@@ -1020,6 +1069,12 @@ pub trait CleanupMethods<'a> {
10201069
val: ValueRef,
10211070
heap: Heap,
10221071
content_ty: ty::t);
1072+
fn schedule_free_slice(&self,
1073+
cleanup_scope: ScopeId,
1074+
val: ValueRef,
1075+
size: ValueRef,
1076+
align: ValueRef,
1077+
heap: Heap);
10231078
fn schedule_clean(&self,
10241079
cleanup_scope: ScopeId,
10251080
cleanup: CleanupObj);

Diff for: src/librustc/middle/trans/closure.rs

+2-6
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ use middle::trans::common::*;
2424
use middle::trans::datum::{Datum, DatumBlock, Expr, Lvalue, rvalue_scratch_datum};
2525
use middle::trans::debuginfo;
2626
use middle::trans::expr;
27-
use middle::trans::machine::llsize_of;
2827
use middle::trans::type_of::*;
2928
use middle::trans::type_::Type;
3029
use middle::ty;
@@ -144,15 +143,12 @@ fn allocate_cbox<'a>(bcx: &'a Block<'a>,
144143
let tcx = bcx.tcx();
145144

146145
// Allocate and initialize the box:
146+
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
147147
match store {
148148
ty::UniqTraitStore => {
149-
let ty = type_of(bcx.ccx(), cdata_ty);
150-
let size = llsize_of(bcx.ccx(), ty);
151-
// we treat proc as @ here, which isn't ideal
152-
malloc_raw_dyn_managed(bcx, cdata_ty, ClosureExchangeMallocFnLangItem, size)
149+
malloc_raw_dyn_proc(bcx, cbox_ty, ClosureExchangeMallocFnLangItem)
153150
}
154151
ty::RegionTraitStore(..) => {
155-
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
156152
let llbox = alloc_ty(bcx, cbox_ty, "__closure");
157153
Result::new(bcx, llbox)
158154
}

0 commit comments

Comments
 (0)