|
21 | 21 | extern crate alloc;
|
22 | 22 |
|
23 | 23 | use rustc_data_structures::cold_path;
|
24 |
| -use rustc_data_structures::sync::MTLock; |
25 | 24 | use smallvec::SmallVec;
|
26 | 25 |
|
27 | 26 | use std::cell::{Cell, RefCell};
|
@@ -116,11 +115,6 @@ impl<T> Default for TypedArena<T> {
|
116 | 115 | }
|
117 | 116 |
|
118 | 117 | impl<T> TypedArena<T> {
|
119 |
| - pub fn in_arena(&self, ptr: *const T) -> bool { |
120 |
| - let ptr = ptr as *const T as *mut T; |
121 |
| - |
122 |
| - self.chunks.borrow().iter().any(|chunk| chunk.start() <= ptr && ptr < chunk.end()) |
123 |
| - } |
124 | 118 | /// Allocates an object in the `TypedArena`, returning a reference to it.
|
125 | 119 | #[inline]
|
126 | 120 | pub fn alloc(&self, object: T) -> &mut T {
|
@@ -334,12 +328,6 @@ impl Default for DroplessArena {
|
334 | 328 | }
|
335 | 329 |
|
336 | 330 | impl DroplessArena {
|
337 |
| - pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool { |
338 |
| - let ptr = ptr as *const u8 as *mut u8; |
339 |
| - |
340 |
| - self.chunks.borrow().iter().any(|chunk| chunk.start() <= ptr && ptr < chunk.end()) |
341 |
| - } |
342 |
| - |
343 | 331 | #[inline]
|
344 | 332 | fn align(&self, align: usize) {
|
345 | 333 | let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
|
@@ -500,66 +488,5 @@ impl DroplessArena {
|
500 | 488 | }
|
501 | 489 | }
|
502 | 490 |
|
503 |
| -#[derive(Default)] |
504 |
| -// FIXME(@Zoxc): this type is entirely unused in rustc |
505 |
| -pub struct SyncTypedArena<T> { |
506 |
| - lock: MTLock<TypedArena<T>>, |
507 |
| -} |
508 |
| - |
509 |
| -impl<T> SyncTypedArena<T> { |
510 |
| - #[inline(always)] |
511 |
| - pub fn alloc(&self, object: T) -> &mut T { |
512 |
| - // Extend the lifetime of the result since it's limited to the lock guard |
513 |
| - unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) } |
514 |
| - } |
515 |
| - |
516 |
| - #[inline(always)] |
517 |
| - pub fn alloc_slice(&self, slice: &[T]) -> &mut [T] |
518 |
| - where |
519 |
| - T: Copy, |
520 |
| - { |
521 |
| - // Extend the lifetime of the result since it's limited to the lock guard |
522 |
| - unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) } |
523 |
| - } |
524 |
| - |
525 |
| - #[inline(always)] |
526 |
| - pub fn clear(&mut self) { |
527 |
| - self.lock.get_mut().clear(); |
528 |
| - } |
529 |
| -} |
530 |
| - |
531 |
| -#[derive(Default)] |
532 |
| -pub struct SyncDroplessArena { |
533 |
| - lock: MTLock<DroplessArena>, |
534 |
| -} |
535 |
| - |
536 |
| -impl SyncDroplessArena { |
537 |
| - #[inline(always)] |
538 |
| - pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool { |
539 |
| - self.lock.lock().in_arena(ptr) |
540 |
| - } |
541 |
| - |
542 |
| - #[inline(always)] |
543 |
| - pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] { |
544 |
| - // Extend the lifetime of the result since it's limited to the lock guard |
545 |
| - unsafe { &mut *(self.lock.lock().alloc_raw(bytes, align) as *mut [u8]) } |
546 |
| - } |
547 |
| - |
548 |
| - #[inline(always)] |
549 |
| - pub fn alloc<T>(&self, object: T) -> &mut T { |
550 |
| - // Extend the lifetime of the result since it's limited to the lock guard |
551 |
| - unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) } |
552 |
| - } |
553 |
| - |
554 |
| - #[inline(always)] |
555 |
| - pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T] |
556 |
| - where |
557 |
| - T: Copy, |
558 |
| - { |
559 |
| - // Extend the lifetime of the result since it's limited to the lock guard |
560 |
| - unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) } |
561 |
| - } |
562 |
| -} |
563 |
| - |
564 | 491 | #[cfg(test)]
|
565 | 492 | mod tests;
|
0 commit comments