|
25 | 25 | extern crate alloc;
|
26 | 26 |
|
27 | 27 | use rustc_data_structures::cold_path;
|
28 |
| -use rustc_data_structures::sync::MTLock; |
| 28 | +use rustc_data_structures::sync::{SharedWorkerLocal, WorkerLocal, Lock}; |
29 | 29 | use smallvec::SmallVec;
|
30 | 30 |
|
31 | 31 | use std::cell::{Cell, RefCell};
|
@@ -123,11 +123,6 @@ impl<T> Default for TypedArena<T> {
|
123 | 123 | }
|
124 | 124 |
|
125 | 125 | impl<T> TypedArena<T> {
|
126 |
| - pub fn in_arena(&self, ptr: *const T) -> bool { |
127 |
| - let ptr = ptr as *const T as *mut T; |
128 |
| - |
129 |
| - self.chunks.borrow().iter().any(|chunk| chunk.start() <= ptr && ptr < chunk.end()) |
130 |
| - } |
131 | 126 | /// Allocates an object in the `TypedArena`, returning a reference to it.
|
132 | 127 | #[inline]
|
133 | 128 | pub fn alloc(&self, object: T) -> &mut T {
|
@@ -378,12 +373,6 @@ impl Default for DroplessArena {
|
378 | 373 | }
|
379 | 374 |
|
380 | 375 | impl DroplessArena {
|
381 |
| - pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool { |
382 |
| - let ptr = ptr as *const u8 as *mut u8; |
383 |
| - |
384 |
| - self.chunks.borrow().iter().any(|chunk| chunk.start() <= ptr && ptr < chunk.end()) |
385 |
| - } |
386 |
| - |
387 | 376 | #[inline]
|
388 | 377 | fn align(&self, align: usize) {
|
389 | 378 | let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
|
@@ -555,64 +544,164 @@ impl DroplessArena {
|
555 | 544 | }
|
556 | 545 | }
|
557 | 546 |
|
558 |
| -#[derive(Default)] |
559 |
| -// FIXME(@Zoxc): this type is entirely unused in rustc |
560 |
| -pub struct SyncTypedArena<T> { |
561 |
| - lock: MTLock<TypedArena<T>>, |
| 547 | +struct CurrentChunk<T> { |
| 548 | + /// A pointer to the next object to be allocated. |
| 549 | + ptr: Cell<*mut T>, |
| 550 | + |
| 551 | + /// A pointer to the end of the allocated area. When this pointer is |
| 552 | + /// reached, a new chunk is allocated. |
| 553 | + end: Cell<*mut T>, |
562 | 554 | }
|
563 | 555 |
|
564 |
| -impl<T> SyncTypedArena<T> { |
565 |
| - #[inline(always)] |
566 |
| - pub fn alloc(&self, object: T) -> &mut T { |
567 |
| - // Extend the lifetime of the result since it's limited to the lock guard |
568 |
| - unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) } |
| 556 | +impl<T> Default for CurrentChunk<T> { |
| 557 | + #[inline] |
| 558 | + fn default() -> Self { |
| 559 | + CurrentChunk { |
| 560 | + // We set both `ptr` and `end` to 0 so that the first call to |
| 561 | + // alloc() will trigger a grow(). |
| 562 | + ptr: Cell::new(0 as *mut T), |
| 563 | + end: Cell::new(0 as *mut T), |
| 564 | + } |
569 | 565 | }
|
| 566 | +} |
570 | 567 |
|
571 |
| - #[inline(always)] |
572 |
| - pub fn alloc_slice(&self, slice: &[T]) -> &mut [T] |
573 |
| - where |
574 |
| - T: Copy, |
575 |
| - { |
576 |
| - // Extend the lifetime of the result since it's limited to the lock guard |
577 |
| - unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) } |
| 568 | +impl<T> CurrentChunk<T> { |
| 569 | + #[inline] |
| 570 | + fn align(&self, align: usize) { |
| 571 | + let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1); |
| 572 | + self.ptr.set(final_address as *mut T); |
| 573 | + assert!(self.ptr <= self.end); |
578 | 574 | }
|
579 | 575 |
|
| 576 | + /// Grows the arena. |
580 | 577 | #[inline(always)]
|
581 |
| - pub fn clear(&mut self) { |
582 |
| - self.lock.get_mut().clear(); |
| 578 | + fn grow(&self, n: usize, chunks: &mut Vec<TypedArenaChunk<T>>) { |
| 579 | + unsafe { |
| 580 | + let (chunk, mut new_capacity); |
| 581 | + if let Some(last_chunk) = chunks.last_mut() { |
| 582 | + let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; |
| 583 | + let currently_used_cap = used_bytes / mem::size_of::<T>(); |
| 584 | + last_chunk.entries = currently_used_cap; |
| 585 | + if last_chunk.storage.reserve_in_place(currently_used_cap, n) { |
| 586 | + self.end.set(last_chunk.end()); |
| 587 | + return; |
| 588 | + } else { |
| 589 | + new_capacity = last_chunk.storage.cap(); |
| 590 | + loop { |
| 591 | + new_capacity = new_capacity.checked_mul(2).unwrap(); |
| 592 | + if new_capacity >= currently_used_cap + n { |
| 593 | + break; |
| 594 | + } |
| 595 | + } |
| 596 | + } |
| 597 | + } else { |
| 598 | + let elem_size = cmp::max(1, mem::size_of::<T>()); |
| 599 | + new_capacity = cmp::max(n, PAGE / elem_size); |
| 600 | + } |
| 601 | + chunk = TypedArenaChunk::<T>::new(new_capacity); |
| 602 | + self.ptr.set(chunk.start()); |
| 603 | + self.end.set(chunk.end()); |
| 604 | + chunks.push(chunk); |
| 605 | + } |
583 | 606 | }
|
584 | 607 | }
|
585 | 608 |
|
586 |
| -#[derive(Default)] |
587 | 609 | pub struct SyncDroplessArena {
|
588 |
| - lock: MTLock<DroplessArena>, |
| 610 | + /// Pointers to the current chunk |
| 611 | + current: WorkerLocal<CurrentChunk<u8>>, |
| 612 | + |
| 613 | + /// A vector of arena chunks. |
| 614 | + chunks: Lock<SharedWorkerLocal<Vec<TypedArenaChunk<u8>>>>, |
| 615 | +} |
| 616 | + |
| 617 | +impl Default for SyncDroplessArena { |
| 618 | + #[inline] |
| 619 | + fn default() -> SyncDroplessArena { |
| 620 | + SyncDroplessArena { |
| 621 | + current: WorkerLocal::new(|_| CurrentChunk::default()), |
| 622 | + chunks: Default::default(), |
| 623 | + } |
| 624 | + } |
589 | 625 | }
|
590 | 626 |
|
591 | 627 | impl SyncDroplessArena {
|
592 |
| - #[inline(always)] |
593 | 628 | pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
|
594 |
| - self.lock.lock().in_arena(ptr) |
| 629 | + let ptr = ptr as *const u8 as *mut u8; |
| 630 | + |
| 631 | + self.chunks.lock().iter().any(|chunks| chunks.iter().any(|chunk| { |
| 632 | + chunk.start() <= ptr && ptr < chunk.end() |
| 633 | + })) |
595 | 634 | }
|
596 | 635 |
|
597 |
| - #[inline(always)] |
| 636 | + #[inline(never)] |
| 637 | + #[cold] |
| 638 | + fn grow(&self, needed_bytes: usize) { |
| 639 | + self.current.grow(needed_bytes, &mut **self.chunks.lock()); |
| 640 | + } |
| 641 | + |
| 642 | + #[inline] |
598 | 643 | pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
|
599 |
| - // Extend the lifetime of the result since it's limited to the lock guard |
600 |
| - unsafe { &mut *(self.lock.lock().alloc_raw(bytes, align) as *mut [u8]) } |
| 644 | + unsafe { |
| 645 | + assert!(bytes != 0); |
| 646 | + |
| 647 | + let current = &*self.current; |
| 648 | + |
| 649 | + current.align(align); |
| 650 | + |
| 651 | + let future_end = intrinsics::arith_offset(current.ptr.get(), bytes as isize); |
| 652 | + if (future_end as *mut u8) >= current.end.get() { |
| 653 | + self.grow(bytes); |
| 654 | + } |
| 655 | + |
| 656 | + let ptr = current.ptr.get(); |
| 657 | + // Set the pointer past ourselves |
| 658 | + current.ptr.set( |
| 659 | + intrinsics::arith_offset(current.ptr.get(), bytes as isize) as *mut u8, |
| 660 | + ); |
| 661 | + slice::from_raw_parts_mut(ptr, bytes) |
| 662 | + } |
601 | 663 | }
|
602 | 664 |
|
603 |
| - #[inline(always)] |
| 665 | + #[inline] |
604 | 666 | pub fn alloc<T>(&self, object: T) -> &mut T {
|
605 |
| - // Extend the lifetime of the result since it's limited to the lock guard |
606 |
| - unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) } |
| 667 | + assert!(!mem::needs_drop::<T>()); |
| 668 | + |
| 669 | + let mem = self.alloc_raw( |
| 670 | + mem::size_of::<T>(), |
| 671 | + mem::align_of::<T>()) as *mut _ as *mut T; |
| 672 | + |
| 673 | + unsafe { |
| 674 | + // Write into uninitialized memory. |
| 675 | + ptr::write(mem, object); |
| 676 | + &mut *mem |
| 677 | + } |
607 | 678 | }
|
608 | 679 |
|
609 |
| - #[inline(always)] |
| 680 | + /// Allocates a slice of objects that are copied into the `SyncDroplessArena`, returning a |
| 681 | + /// mutable reference to it. Will panic if passed a zero-sized type. |
| 682 | + /// |
| 683 | + /// Panics: |
| 684 | + /// |
| 685 | + /// - Zero-sized types |
| 686 | + /// - Zero-length slices |
| 687 | + #[inline] |
610 | 688 | pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
|
611 | 689 | where
|
612 | 690 | T: Copy,
|
613 | 691 | {
|
614 |
| - // Extend the lifetime of the result since it's limited to the lock guard |
615 |
| - unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) } |
| 692 | + assert!(!mem::needs_drop::<T>()); |
| 693 | + assert!(mem::size_of::<T>() != 0); |
| 694 | + assert!(!slice.is_empty()); |
| 695 | + |
| 696 | + let mem = self.alloc_raw( |
| 697 | + slice.len() * mem::size_of::<T>(), |
| 698 | + mem::align_of::<T>()) as *mut _ as *mut T; |
| 699 | + |
| 700 | + unsafe { |
| 701 | + let arena_slice = slice::from_raw_parts_mut(mem, slice.len()); |
| 702 | + arena_slice.copy_from_slice(slice); |
| 703 | + arena_slice |
| 704 | + } |
616 | 705 | }
|
617 | 706 | }
|
618 | 707 |
|
|
0 commit comments