From 6f568e72f336cd64fecb240c6aafbe4bf7ed4379 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 28 Feb 2020 22:54:10 +0100 Subject: [PATCH 01/10] miri engine: turn some debug_assert into assert --- src/librustc_mir/interpret/cast.rs | 2 +- src/librustc_mir/interpret/memory.rs | 2 +- src/librustc_mir/interpret/operator.rs | 2 +- src/librustc_mir/interpret/place.rs | 10 ++++------ src/librustc_mir/interpret/step.rs | 2 +- src/librustc_mir/interpret/terminator.rs | 2 +- src/librustc_mir/interpret/validity.rs | 6 +++--- 7 files changed, 12 insertions(+), 14 deletions(-) diff --git a/src/librustc_mir/interpret/cast.rs b/src/librustc_mir/interpret/cast.rs index 9461a06690212..78e7db44b8745 100644 --- a/src/librustc_mir/interpret/cast.rs +++ b/src/librustc_mir/interpret/cast.rs @@ -202,7 +202,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Char => { // `u8` to `char` cast - debug_assert_eq!(v as u8 as u128, v); + assert_eq!(v as u8 as u128, v); Ok(Scalar::from_uint(v, Size::from_bytes(4))) } diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs index 1df389d9c8bee..673afc6c06d25 100644 --- a/src/librustc_mir/interpret/memory.rs +++ b/src/librustc_mir/interpret/memory.rs @@ -195,7 +195,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { kind: MemoryKind, ) -> Pointer { let id = self.tcx.alloc_map.lock().reserve(); - debug_assert_ne!( + assert_ne!( Some(kind), M::STATIC_KIND.map(MemoryKind::Machine), "dynamically allocating static memory" diff --git a/src/librustc_mir/interpret/operator.rs b/src/librustc_mir/interpret/operator.rs index 2e8c94903ca46..3b074e7b5a270 100644 --- a/src/librustc_mir/interpret/operator.rs +++ b/src/librustc_mir/interpret/operator.rs @@ -234,7 +234,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { BitXor => (Scalar::from_uint(l ^ r, size), left_layout.ty), Add | Sub | Mul | Rem | Div => { - debug_assert!(!left_layout.abi.is_signed()); + assert!(!left_layout.abi.is_signed()); let op: fn(u128, u128) -> (u128, bool) = match bin_op { Add => u128::overflowing_add, Sub => u128::overflowing_sub, diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs index 4f96cb698915d..8215b4c5aa406 100644 --- a/src/librustc_mir/interpret/place.rs +++ b/src/librustc_mir/interpret/place.rs @@ -1130,12 +1130,10 @@ where let layout = self.layout_of(ty)?; // More sanity checks - if cfg!(debug_assertions) { - let (size, align) = self.read_size_and_align_from_vtable(vtable)?; - assert_eq!(size, layout.size); - // only ABI alignment is preserved - assert_eq!(align, layout.align.abi); - } + let (size, align) = self.read_size_and_align_from_vtable(vtable)?; + assert_eq!(size, layout.size); + // only ABI alignment is preserved + assert_eq!(align, layout.align.abi); let mplace = MPlaceTy { mplace: MemPlace { meta: MemPlaceMeta::None, ..*mplace }, layout }; Ok((instance, mplace)) diff --git a/src/librustc_mir/interpret/step.rs b/src/librustc_mir/interpret/step.rs index 7d59c0181a8f3..f298a6677d6dc 100644 --- a/src/librustc_mir/interpret/step.rs +++ b/src/librustc_mir/interpret/step.rs @@ -287,7 +287,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { self.eval_terminator(terminator)?; if !self.stack.is_empty() { // This should change *something* - debug_assert!(self.cur_frame() != old_stack || self.frame().block != old_bb); + assert!(self.cur_frame() != old_stack || self.frame().block != old_bb); if let Some(block) = self.frame().block { info!("// executing {:?}", block); } diff --git a/src/librustc_mir/interpret/terminator.rs b/src/librustc_mir/interpret/terminator.rs index a02a6898c1d71..28922f2c75329 100644 --- a/src/librustc_mir/interpret/terminator.rs +++ b/src/librustc_mir/interpret/terminator.rs @@ -172,7 +172,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } let caller_arg = caller_arg.next().ok_or_else(|| err_unsup!(FunctionArgCountMismatch))?; if rust_abi { - debug_assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out"); + assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out"); } // Now, check if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) { diff --git a/src/librustc_mir/interpret/validity.rs b/src/librustc_mir/interpret/validity.rs index aa2b3040a716f..77c911a516557 100644 --- a/src/librustc_mir/interpret/validity.rs +++ b/src/librustc_mir/interpret/validity.rs @@ -142,16 +142,16 @@ fn wrapping_range_contains(r: &RangeInclusive, test: u128) -> bool { // "expected something " makes sense. fn wrapping_range_format(r: &RangeInclusive, max_hi: u128) -> String { let (lo, hi) = r.clone().into_inner(); - debug_assert!(hi <= max_hi); + assert!(hi <= max_hi); if lo > hi { format!("less or equal to {}, or greater or equal to {}", hi, lo) } else if lo == hi { format!("equal to {}", lo) } else if lo == 0 { - debug_assert!(hi < max_hi, "should not be printing if the range covers everything"); + assert!(hi < max_hi, "should not be printing if the range covers everything"); format!("less or equal to {}", hi) } else if hi == max_hi { - debug_assert!(lo > 0, "should not be printing if the range covers everything"); + assert!(lo > 0, "should not be printing if the range covers everything"); format!("greater or equal to {}", lo) } else { format!("in the range {:?}", r) From 5982e9d3ca679866a0533d69a41224c333a5348a Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 29 Feb 2020 10:25:57 +0100 Subject: [PATCH 02/10] downgrade some assertions to debug_ again --- src/librustc_mir/interpret/memory.rs | 2 +- src/librustc_mir/interpret/place.rs | 10 ++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs index 673afc6c06d25..1df389d9c8bee 100644 --- a/src/librustc_mir/interpret/memory.rs +++ b/src/librustc_mir/interpret/memory.rs @@ -195,7 +195,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { kind: MemoryKind, ) -> Pointer { let id = self.tcx.alloc_map.lock().reserve(); - assert_ne!( + debug_assert_ne!( Some(kind), M::STATIC_KIND.map(MemoryKind::Machine), "dynamically allocating static memory" diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs index 8215b4c5aa406..4f96cb698915d 100644 --- a/src/librustc_mir/interpret/place.rs +++ b/src/librustc_mir/interpret/place.rs @@ -1130,10 +1130,12 @@ where let layout = self.layout_of(ty)?; // More sanity checks - let (size, align) = self.read_size_and_align_from_vtable(vtable)?; - assert_eq!(size, layout.size); - // only ABI alignment is preserved - assert_eq!(align, layout.align.abi); + if cfg!(debug_assertions) { + let (size, align) = self.read_size_and_align_from_vtable(vtable)?; + assert_eq!(size, layout.size); + // only ABI alignment is preserved + assert_eq!(align, layout.align.abi); + } let mplace = MPlaceTy { mplace: MemPlace { meta: MemPlaceMeta::None, ..*mplace }, layout }; Ok((instance, mplace)) From 275dac7bfb6cfadd02c12edacbd4fdf529269424 Mon Sep 17 00:00:00 2001 From: Matthew Kuo Date: Sun, 1 Mar 2020 14:15:44 -0600 Subject: [PATCH 03/10] doc(librustc_error_codes): add long error explanation for E0719 Progresses #61137 --- src/librustc_error_codes/error_codes.rs | 2 +- src/librustc_error_codes/error_codes/E0719.md | 35 +++++++++++++++++++ .../associated-type-bounds/duplicate.stderr | 1 + src/test/ui/error-codes/E0719.stderr | 1 + 4 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 src/librustc_error_codes/error_codes/E0719.md diff --git a/src/librustc_error_codes/error_codes.rs b/src/librustc_error_codes/error_codes.rs index 91a7b6c895838..a79fcb978ca51 100644 --- a/src/librustc_error_codes/error_codes.rs +++ b/src/librustc_error_codes/error_codes.rs @@ -395,6 +395,7 @@ E0714: include_str!("./error_codes/E0714.md"), E0715: include_str!("./error_codes/E0715.md"), E0716: include_str!("./error_codes/E0716.md"), E0718: include_str!("./error_codes/E0718.md"), +E0719: include_str!("./error_codes/E0719.md"), E0720: include_str!("./error_codes/E0720.md"), E0723: include_str!("./error_codes/E0723.md"), E0725: include_str!("./error_codes/E0725.md"), @@ -604,7 +605,6 @@ E0747: include_str!("./error_codes/E0747.md"), E0710, // an unknown tool name found in scoped lint E0711, // a feature has been declared with conflicting stability attributes E0717, // rustc_promotable without stability attribute - E0719, // duplicate values for associated type binding // E0721, // `await` keyword E0722, // Malformed `#[optimize]` attribute E0724, // `#[ffi_returns_twice]` is only allowed in foreign functions diff --git a/src/librustc_error_codes/error_codes/E0719.md b/src/librustc_error_codes/error_codes/E0719.md new file mode 100644 index 0000000000000..38bc63550ac7f --- /dev/null +++ b/src/librustc_error_codes/error_codes/E0719.md @@ -0,0 +1,35 @@ +The value for an associated type has already been specified. + +Erroneous code example: + +```compile_fail,E0719 +#![feature(associated_type_bounds)] + +trait FooTrait {} +trait BarTrait {} + +// error: associated type `Item` in trait `Iterator` is specified twice +struct Foo> { f: T } +``` + +`Item` in trait `Iterator` cannot be specified multiple times for struct `Foo`. +To fix this, create a new trait that is a combination of the desired traits and +specify the associated type with the new trait. + +Corrected example: + +``` +#![feature(associated_type_bounds)] + +trait FooTrait {} +trait BarTrait {} +trait FooBarTrait: FooTrait + BarTrait {} + +struct Foo> { f: T } +``` + +For more information about associated types, see [the book][bk-at]. For more +information on associated type bounds, see [RFC 2289][rfc-2289]. + +[bk-at]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#specifying-placeholder-types-in-trait-definitions-with-associated-types +[rfc-2289]: https://rust-lang.github.io/rfcs/2289-associated-type-bounds.html diff --git a/src/test/ui/associated-type-bounds/duplicate.stderr b/src/test/ui/associated-type-bounds/duplicate.stderr index df1151d876c04..82b2d32d09d57 100644 --- a/src/test/ui/associated-type-bounds/duplicate.stderr +++ b/src/test/ui/associated-type-bounds/duplicate.stderr @@ -728,3 +728,4 @@ LL | type TADyn3 = dyn Iterator; error: aborting due to 96 previous errors +For more information about this error, try `rustc --explain E0719`. diff --git a/src/test/ui/error-codes/E0719.stderr b/src/test/ui/error-codes/E0719.stderr index a046fbfc3d04a..0e4bbf083baf3 100644 --- a/src/test/ui/error-codes/E0719.stderr +++ b/src/test/ui/error-codes/E0719.stderr @@ -16,3 +16,4 @@ LL | fn test() -> Box> { error: aborting due to 2 previous errors +For more information about this error, try `rustc --explain E0719`. From 99a595e406077375824a1316d4d669ad1f0a15f8 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Mon, 2 Mar 2020 11:47:49 +1100 Subject: [PATCH 04/10] Fix a leak in `DiagnosticBuilder::into_diagnostic`. Fixes #69600. --- src/librustc_errors/diagnostic_builder.rs | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/librustc_errors/diagnostic_builder.rs b/src/librustc_errors/diagnostic_builder.rs index 39f585231eea4..008d2e92418f9 100644 --- a/src/librustc_errors/diagnostic_builder.rs +++ b/src/librustc_errors/diagnostic_builder.rs @@ -136,12 +136,11 @@ impl<'a> DiagnosticBuilder<'a> { let handler = self.0.handler; - // We need to use `ptr::read` because `DiagnosticBuilder` implements `Drop`. - let diagnostic; - unsafe { - diagnostic = std::ptr::read(&self.0.diagnostic); - std::mem::forget(self); - }; + // We must use `Level::Cancelled` for `dummy` to avoid an ICE about an + // unused diagnostic. + let dummy = Diagnostic::new(Level::Cancelled, ""); + let diagnostic = std::mem::replace(&mut self.0.diagnostic, dummy); + // Logging here is useful to help track down where in logs an error was // actually emitted. debug!("buffer: diagnostic={:?}", diagnostic); From 4643b12f782fd4307a5ab1bc6809515cb053fbf1 Mon Sep 17 00:00:00 2001 From: Erin Power Date: Mon, 2 Mar 2020 12:04:09 +0100 Subject: [PATCH 05/10] Update my mailmap entry --- .mailmap | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.mailmap b/.mailmap index e5aad52ef4b79..ea8ef0eebf9a1 100644 --- a/.mailmap +++ b/.mailmap @@ -5,7 +5,6 @@ # email addresses. # -Aaron Power Erin Power Aaron Todd Abhishek Chanda Abhishek Chanda Adolfo Ochagavía @@ -84,6 +83,8 @@ Eric Holk Eric Holmes Eric Reed Erick Tryzelaar +Erin Power +Erin Power Esteban Küber Esteban Küber Esteban Küber From ba49ed01f0abd2c18313611ad43424ca827c1498 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Mon, 2 Mar 2020 13:10:24 +0100 Subject: [PATCH 06/10] clean up E0378 explanation --- src/librustc_error_codes/error_codes/E0378.md | 42 ++++++++++--------- 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/src/librustc_error_codes/error_codes/E0378.md b/src/librustc_error_codes/error_codes/E0378.md index 311483c8900be..7f4374738de28 100644 --- a/src/librustc_error_codes/error_codes/E0378.md +++ b/src/librustc_error_codes/error_codes/E0378.md @@ -1,10 +1,28 @@ +The `DispatchFromDyn` trait was implemented on something which is not a pointer +or a newtype wrapper around a pointer. + +Erroneous code example: + +```compile-fail,E0378 +#![feature(dispatch_from_dyn)] +use std::ops::DispatchFromDyn; + +struct WrapperExtraField { + ptr: T, + extra_stuff: i32, +} + +impl DispatchFromDyn> for WrapperExtraField +where + T: DispatchFromDyn, +{} +``` + The `DispatchFromDyn` trait currently can only be implemented for builtin pointer types and structs that are newtype wrappers around them — that is, the struct must have only one field (except for`PhantomData`), and that field must itself implement `DispatchFromDyn`. -Examples: - ``` #![feature(dispatch_from_dyn, unsize)] use std::{ @@ -20,6 +38,8 @@ where {} ``` +Another example: + ``` #![feature(dispatch_from_dyn)] use std::{ @@ -37,21 +57,3 @@ where T: DispatchFromDyn, {} ``` - -Example of illegal `DispatchFromDyn` implementation -(illegal because of extra field) - -```compile-fail,E0378 -#![feature(dispatch_from_dyn)] -use std::ops::DispatchFromDyn; - -struct WrapperExtraField { - ptr: T, - extra_stuff: i32, -} - -impl DispatchFromDyn> for WrapperExtraField -where - T: DispatchFromDyn, -{} -``` From 0ec14089a9b62b9e3cda6e84f6ac598ea3778933 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20Kr=C3=BCger?= Date: Mon, 2 Mar 2020 00:09:17 +0100 Subject: [PATCH 07/10] Don't convert Results to Options just for matching. --- src/librustc_resolve/imports.rs | 4 ++-- src/libstd/net/addr.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/librustc_resolve/imports.rs b/src/librustc_resolve/imports.rs index 1d502e52de4b5..73bc038ea15f9 100644 --- a/src/librustc_resolve/imports.rs +++ b/src/librustc_resolve/imports.rs @@ -1252,7 +1252,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> { // this may resolve to either a value or a type, but for documentation // purposes it's good enough to just favor one over the other. self.r.per_ns(|this, ns| { - if let Some(binding) = source_bindings[ns].get().ok() { + if let Ok(binding) = source_bindings[ns].get() { this.import_res_map.entry(directive.id).or_default()[ns] = Some(binding.res()); } }); @@ -1293,7 +1293,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> { let mut redundant_span = PerNS { value_ns: None, type_ns: None, macro_ns: None }; self.r.per_ns(|this, ns| { - if let Some(binding) = source_bindings[ns].get().ok() { + if let Ok(binding) = source_bindings[ns].get() { if binding.res() == Res::Err { return; } diff --git a/src/libstd/net/addr.rs b/src/libstd/net/addr.rs index a59d7f0263bb0..57cba6b1f7a1b 100644 --- a/src/libstd/net/addr.rs +++ b/src/libstd/net/addr.rs @@ -901,7 +901,7 @@ impl ToSocketAddrs for str { type Iter = vec::IntoIter; fn to_socket_addrs(&self) -> io::Result> { // try to parse as a regular SocketAddr first - if let Some(addr) = self.parse().ok() { + if let Ok(addr) = self.parse() { return Ok(vec![addr].into_iter()); } From fdc14cb0b0b47d1a1e1eadf7431b5fcd11568599 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Sun, 1 Mar 2020 16:06:36 -0800 Subject: [PATCH 08/10] Toolstate: don't duplicate nightly tool list. --- src/bootstrap/toolstate.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bootstrap/toolstate.rs b/src/bootstrap/toolstate.rs index 5c39f5d5bc3ef..7cffc47293070 100644 --- a/src/bootstrap/toolstate.rs +++ b/src/bootstrap/toolstate.rs @@ -443,7 +443,7 @@ fn change_toolstate( if new_state != state { eprintln!("The state of `{}` has changed from `{}` to `{}`", tool, state, new_state); if new_state < state { - if !["rustc-guide", "miri", "embedded-book"].contains(&tool.as_str()) { + if !NIGHTLY_TOOLS.iter().any(|(name, _path)| name == tool) { regressed = true; } } From 4281fe03ee6e7679ee286479fae744d6efbcb7bd Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Mon, 2 Mar 2020 10:00:05 -0800 Subject: [PATCH 09/10] Update books --- src/doc/embedded-book | 2 +- src/doc/nomicon | 2 +- src/doc/reference | 2 +- src/doc/rust-by-example | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/doc/embedded-book b/src/doc/embedded-book index b2e1092bf67bd..b81ffb7a6f4c5 160000 --- a/src/doc/embedded-book +++ b/src/doc/embedded-book @@ -1 +1 @@ -Subproject commit b2e1092bf67bd4d7686c4553f186edbb7f5f92db +Subproject commit b81ffb7a6f4c5aaed92786e770e99db116aa4ebd diff --git a/src/doc/nomicon b/src/doc/nomicon index 3e6e1001dc6e0..71241f403091e 160000 --- a/src/doc/nomicon +++ b/src/doc/nomicon @@ -1 +1 @@ -Subproject commit 3e6e1001dc6e095dbd5c88005e80969f60e384e1 +Subproject commit 71241f403091e021842ca8275740e44d0ab0ece1 diff --git a/src/doc/reference b/src/doc/reference index 64239df6d1735..559e09caa9661 160000 --- a/src/doc/reference +++ b/src/doc/reference @@ -1 +1 @@ -Subproject commit 64239df6d173562b9deb4f012e4c3e6e960c4754 +Subproject commit 559e09caa9661043744cf7af7bd88432d966f743 diff --git a/src/doc/rust-by-example b/src/doc/rust-by-example index 32facd5522ddb..db57f899ea2a5 160000 --- a/src/doc/rust-by-example +++ b/src/doc/rust-by-example @@ -1 +1 @@ -Subproject commit 32facd5522ddbbf37baf01e4e4b6562bc55c071a +Subproject commit db57f899ea2a56a544c8d280cbf033438666273d From d8e3557dbae23283f81d7bc45200413dd93ced4a Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Tue, 3 Mar 2020 00:08:24 +0100 Subject: [PATCH 10/10] Remove `usable_size` APIs --- src/liballoc/alloc.rs | 14 +- src/liballoc/alloc/tests.rs | 2 +- src/liballoc/boxed.rs | 4 +- src/liballoc/raw_vec.rs | 21 +- src/liballoc/raw_vec/tests.rs | 2 +- src/liballoc/rc.rs | 2 +- src/liballoc/sync.rs | 2 +- src/liballoc/tests/heap.rs | 2 +- src/libcore/alloc.rs | 304 ++++++-------------- src/libstd/alloc.rs | 16 +- src/test/ui/allocator/custom.rs | 4 +- src/test/ui/allocator/xcrate-use.rs | 4 +- src/test/ui/realloc-16687.rs | 12 +- src/test/ui/regions/regions-mock-codegen.rs | 14 +- 14 files changed, 134 insertions(+), 269 deletions(-) diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index f41404bf8cab9..73e8121868aba 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -165,8 +165,8 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl AllocRef for Global { #[inline] - unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { - NonNull::new(alloc(layout)).ok_or(AllocErr) + unsafe fn alloc(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { + NonNull::new(alloc(layout)).ok_or(AllocErr).map(|p| (p, layout.size())) } #[inline] @@ -180,13 +180,13 @@ unsafe impl AllocRef for Global { ptr: NonNull, layout: Layout, new_size: usize, - ) -> Result, AllocErr> { - NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr) + ) -> Result<(NonNull, usize), AllocErr> { + NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr).map(|p| (p, new_size)) } #[inline] - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { - NonNull::new(alloc_zeroed(layout)).ok_or(AllocErr) + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { + NonNull::new(alloc_zeroed(layout)).ok_or(AllocErr).map(|p| (p, layout.size())) } } @@ -201,7 +201,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { } else { let layout = Layout::from_size_align_unchecked(size, align); match Global.alloc(layout) { - Ok(ptr) => ptr.as_ptr(), + Ok((ptr, _)) => ptr.as_ptr(), Err(_) => handle_alloc_error(layout), } } diff --git a/src/liballoc/alloc/tests.rs b/src/liballoc/alloc/tests.rs index c902971638b8e..55944398e1677 100644 --- a/src/liballoc/alloc/tests.rs +++ b/src/liballoc/alloc/tests.rs @@ -8,7 +8,7 @@ use test::Bencher; fn allocate_zeroed() { unsafe { let layout = Layout::from_size_align(1024, 1).unwrap(); - let ptr = + let (ptr, _) = Global.alloc_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout)); let mut i = ptr.cast::().as_ptr(); diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 3ac4bd82a3a10..4729f0290cfca 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -200,7 +200,7 @@ impl Box { let ptr = if layout.size() == 0 { NonNull::dangling() } else { - Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast() + Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast() }; Box::from_raw(ptr.as_ptr()) } @@ -270,7 +270,7 @@ impl Box<[T]> { let ptr = if layout.size() == 0 { NonNull::dangling() } else { - Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast() + Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast() }; Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len)) } diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 144654946a2ac..345834d7daacc 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -72,7 +72,7 @@ impl RawVec { RawVec::allocate_in(capacity, true, a) } - fn allocate_in(capacity: usize, zeroed: bool, mut a: A) -> Self { + fn allocate_in(mut capacity: usize, zeroed: bool, mut a: A) -> Self { unsafe { let elem_size = mem::size_of::(); @@ -87,7 +87,10 @@ impl RawVec { let layout = Layout::from_size_align(alloc_size, align).unwrap(); let result = if zeroed { a.alloc_zeroed(layout) } else { a.alloc(layout) }; match result { - Ok(ptr) => ptr.cast(), + Ok((ptr, size)) => { + capacity = size / elem_size; + ptr.cast() + } Err(_) => handle_alloc_error(layout), } }; @@ -280,7 +283,7 @@ impl RawVec { // 0, getting to here necessarily means the `RawVec` is overfull. assert!(elem_size != 0, "capacity overflow"); - let (new_cap, ptr) = match self.current_layout() { + let (ptr, new_cap) = match self.current_layout() { Some(cur) => { // Since we guarantee that we never allocate more than // `isize::MAX` bytes, `elem_size * self.cap <= isize::MAX` as @@ -297,7 +300,7 @@ impl RawVec { alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), cur, new_size); match ptr_res { - Ok(ptr) => (new_cap, ptr), + Ok((ptr, new_size)) => (ptr, new_size / elem_size), Err(_) => handle_alloc_error(Layout::from_size_align_unchecked( new_size, cur.align(), @@ -310,7 +313,7 @@ impl RawVec { let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 }; let layout = Layout::array::(new_cap).unwrap(); match self.a.alloc(layout) { - Ok(ptr) => (new_cap, ptr), + Ok((ptr, new_size)) => (ptr, new_size / elem_size), Err(_) => handle_alloc_error(layout), } } @@ -598,7 +601,7 @@ impl RawVec { let align = mem::align_of::(); let old_layout = Layout::from_size_align_unchecked(old_size, align); match self.a.realloc(NonNull::from(self.ptr).cast(), old_layout, new_size) { - Ok(p) => self.ptr = p.cast().into(), + Ok((ptr, _)) => self.ptr = ptr.cast().into(), Err(_) => { handle_alloc_error(Layout::from_size_align_unchecked(new_size, align)) } @@ -631,6 +634,8 @@ impl RawVec { fallibility: Fallibility, strategy: ReserveStrategy, ) -> Result<(), TryReserveError> { + let elem_size = mem::size_of::(); + unsafe { // NOTE: we don't early branch on ZSTs here because we want this // to actually catch "asking for more than usize::MAX" in that case. @@ -662,7 +667,7 @@ impl RawVec { None => self.a.alloc(new_layout), }; - let ptr = match (res, fallibility) { + let (ptr, new_cap) = match (res, fallibility) { (Err(AllocErr), Infallible) => handle_alloc_error(new_layout), (Err(AllocErr), Fallible) => { return Err(TryReserveError::AllocError { @@ -670,7 +675,7 @@ impl RawVec { non_exhaustive: (), }); } - (Ok(ptr), _) => ptr, + (Ok((ptr, new_size)), _) => (ptr, new_size / elem_size), }; self.ptr = ptr.cast().into(); diff --git a/src/liballoc/raw_vec/tests.rs b/src/liballoc/raw_vec/tests.rs index 63087501f0e27..860058debe1fd 100644 --- a/src/liballoc/raw_vec/tests.rs +++ b/src/liballoc/raw_vec/tests.rs @@ -20,7 +20,7 @@ fn allocator_param() { fuel: usize, } unsafe impl AllocRef for BoundedAlloc { - unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { + unsafe fn alloc(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { let size = layout.size(); if size > self.fuel { return Err(AllocErr); diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 9dc5447397f09..901cc70a4d82e 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -923,7 +923,7 @@ impl Rc { let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); // Allocate for the layout. - let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the RcBox let inner = mem_to_rcbox(mem.as_ptr()); diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index fd285242d5be4..fced5e680ea01 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -784,7 +784,7 @@ impl Arc { // reference (see #54908). let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the ArcInner let inner = mem_to_arcinner(mem.as_ptr()); diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index 7fcfcf9b2945d..d159126f426c5 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -20,7 +20,7 @@ fn check_overalign_requests(mut allocator: T) { unsafe { let pointers: Vec<_> = (0..iterations) .map(|_| { - allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap() + allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap().0 }) .collect(); for &ptr in &pointers { diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs index 71f7f971eabaf..f3a2b73f2b8de 100644 --- a/src/libcore/alloc.rs +++ b/src/libcore/alloc.rs @@ -11,12 +11,6 @@ use crate::num::NonZeroUsize; use crate::ptr::{self, NonNull}; use crate::usize; -/// Represents the combination of a starting address and -/// a total capacity of the returned block. -#[unstable(feature = "allocator_api", issue = "32838")] -#[derive(Debug)] -pub struct Excess(pub NonNull, pub usize); - const fn size_align() -> (usize, usize) { (mem::size_of::(), mem::align_of::()) } @@ -593,13 +587,12 @@ pub unsafe trait GlobalAlloc { /// /// * the starting address for that memory block was previously /// returned by a previous call to an allocation method (`alloc`, -/// `alloc_zeroed`, `alloc_excess`) or reallocation method -/// (`realloc`, `realloc_excess`), and +/// `alloc_zeroed`) or reallocation method (`realloc`), and /// /// * the memory block has not been subsequently deallocated, where /// blocks are deallocated either by being passed to a deallocation -/// method (`dealloc`, `dealloc_one`, `dealloc_array`) or by being -/// passed to a reallocation method (see above) that returns `Ok`. +/// method (`dealloc`) or by being passed to a reallocation method +/// (see above) that returns `Ok`. /// /// A note regarding zero-sized types and zero-sized layouts: many /// methods in the `AllocRef` trait state that allocation requests @@ -625,11 +618,9 @@ pub unsafe trait GlobalAlloc { /// /// 2. The block's size must fall in the range `[use_min, use_max]`, where: /// -/// * `use_min` is `self.usable_size(layout).0`, and +/// * `use_min` is `layout.size()`, and /// -/// * `use_max` is the capacity that was (or would have been) -/// returned when (if) the block was allocated via a call to -/// `alloc_excess` or `realloc_excess`. +/// * `use_max` is the capacity that was returned. /// /// Note that: /// @@ -643,6 +634,9 @@ pub unsafe trait GlobalAlloc { /// currently allocated via an allocator `a`, then it is legal to /// use that layout to deallocate it, i.e., `a.dealloc(ptr, k);`. /// +/// * if an allocator does not support overallocating, it is fine to +/// simply return `layout.size()` as the allocated size. +/// /// # Safety /// /// The `AllocRef` trait is an `unsafe` trait for a number of reasons, and @@ -671,8 +665,9 @@ pub unsafe trait AllocRef { // However in jemalloc for example, // `mallocx(0)` is documented as undefined behavior.) - /// Returns a pointer meeting the size and alignment guarantees of - /// `layout`. + /// On success, returns a pointer meeting the size and alignment + /// guarantees of `layout` and the actual size of the allocated block, + /// which must be greater than or equal to `layout.size()`. /// /// If this method returns an `Ok(addr)`, then the `addr` returned /// will be non-null address pointing to a block of storage @@ -709,7 +704,7 @@ pub unsafe trait AllocRef { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr>; + unsafe fn alloc(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr>; /// Deallocate the memory referenced by `ptr`. /// @@ -728,38 +723,31 @@ pub unsafe trait AllocRef { /// to allocate that block of memory. unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); - // == ALLOCATOR-SPECIFIC QUANTITIES AND LIMITS == - // usable_size - - /// Returns bounds on the guaranteed usable size of a successful - /// allocation created with the specified `layout`. - /// - /// In particular, if one has a memory block allocated via a given - /// allocator `a` and layout `k` where `a.usable_size(k)` returns - /// `(l, u)`, then one can pass that block to `a.dealloc()` with a - /// layout in the size range [l, u]. - /// - /// (All implementors of `usable_size` must ensure that - /// `l <= k.size() <= u`) - /// - /// Both the lower- and upper-bounds (`l` and `u` respectively) - /// are provided, because an allocator based on size classes could - /// misbehave if one attempts to deallocate a block without - /// providing a correct value for its size (i.e., one within the - /// range `[l, u]`). - /// - /// Clients who wish to make use of excess capacity are encouraged - /// to use the `alloc_excess` and `realloc_excess` instead, as - /// this method is constrained to report conservative values that - /// serve as valid bounds for *all possible* allocation method - /// calls. - /// - /// However, for clients that do not wish to track the capacity - /// returned by `alloc_excess` locally, this method is likely to - /// produce useful results. - #[inline] - fn usable_size(&self, layout: &Layout) -> (usize, usize) { - (layout.size(), layout.size()) + /// Behaves like `alloc`, but also ensures that the contents + /// are set to zero before being returned. + /// + /// # Safety + /// + /// This function is unsafe for the same reasons that `alloc` is. + /// + /// # Errors + /// + /// Returning `Err` indicates that either memory is exhausted or + /// `layout` does not meet allocator's size or alignment + /// constraints, just as in `alloc`. + /// + /// Clients wishing to abort computation in response to an + /// allocation error are encouraged to call the [`handle_alloc_error`] function, + /// rather than directly invoking `panic!` or similar. + /// + /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { + let size = layout.size(); + let result = self.alloc(layout); + if let Ok((p, _)) = result { + ptr::write_bytes(p.as_ptr(), 0, size); + } + result } // == METHODS FOR MEMORY REUSE == @@ -767,9 +755,10 @@ pub unsafe trait AllocRef { /// Returns a pointer suitable for holding data described by /// a new layout with `layout`’s alignment and a size given - /// by `new_size`. To - /// accomplish this, this may extend or shrink the allocation - /// referenced by `ptr` to fit the new layout. + /// by `new_size` and the actual size of the allocated block. + /// The latter is greater than or equal to `layout.size()`. + /// To accomplish this, the allocator may extend or shrink + /// the allocation referenced by `ptr` to fit the new layout. /// /// If this returns `Ok`, then ownership of the memory block /// referenced by `ptr` has been transferred to this @@ -824,23 +813,25 @@ pub unsafe trait AllocRef { ptr: NonNull, layout: Layout, new_size: usize, - ) -> Result, AllocErr> { + ) -> Result<(NonNull, usize), AllocErr> { let old_size = layout.size(); - if new_size >= old_size { - if let Ok(()) = self.grow_in_place(ptr, layout, new_size) { - return Ok(ptr); + if new_size > old_size { + if let Ok(size) = self.grow_in_place(ptr, layout, new_size) { + return Ok((ptr, size)); } } else if new_size < old_size { - if let Ok(()) = self.shrink_in_place(ptr, layout, new_size) { - return Ok(ptr); + if let Ok(size) = self.shrink_in_place(ptr, layout, new_size) { + return Ok((ptr, size)); } + } else { + return Ok((ptr, new_size)); } // otherwise, fall back on alloc + copy + dealloc. let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let result = self.alloc(new_layout); - if let Ok(new_ptr) = result { + if let Ok((new_ptr, _)) = result { ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size)); self.dealloc(ptr, layout); } @@ -877,174 +868,40 @@ pub unsafe trait AllocRef { ptr: NonNull, layout: Layout, new_size: usize, - ) -> Result, AllocErr> { + ) -> Result<(NonNull, usize), AllocErr> { let old_size = layout.size(); - if new_size >= old_size { - if let Ok(()) = self.grow_in_place_zeroed(ptr, layout, new_size) { - return Ok(ptr); + if new_size > old_size { + if let Ok(size) = self.grow_in_place_zeroed(ptr, layout, new_size) { + return Ok((ptr, size)); } } else if new_size < old_size { - if let Ok(()) = self.shrink_in_place(ptr, layout, new_size) { - return Ok(ptr); + if let Ok(size) = self.shrink_in_place(ptr, layout, new_size) { + return Ok((ptr, size)); } + } else { + return Ok((ptr, new_size)); } // otherwise, fall back on alloc + copy + dealloc. let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let result = self.alloc_zeroed(new_layout); - if let Ok(new_ptr) = result { + if let Ok((new_ptr, _)) = result { ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size)); self.dealloc(ptr, layout); } result } - /// Behaves like `alloc`, but also ensures that the contents - /// are set to zero before being returned. - /// - /// # Safety - /// - /// This function is unsafe for the same reasons that `alloc` is. - /// - /// # Errors - /// - /// Returning `Err` indicates that either memory is exhausted or - /// `layout` does not meet allocator's size or alignment - /// constraints, just as in `alloc`. - /// - /// Clients wishing to abort computation in response to an - /// allocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { - let size = layout.size(); - let p = self.alloc(layout); - if let Ok(p) = p { - ptr::write_bytes(p.as_ptr(), 0, size); - } - p - } - - /// Behaves like `alloc`, but also returns the whole size of - /// the returned block. For some `layout` inputs, like arrays, this - /// may include extra storage usable for additional data. - /// - /// # Safety - /// - /// This function is unsafe for the same reasons that `alloc` is. - /// - /// # Errors - /// - /// Returning `Err` indicates that either memory is exhausted or - /// `layout` does not meet allocator's size or alignment - /// constraints, just as in `alloc`. - /// - /// Clients wishing to abort computation in response to an - /// allocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { - let usable_size = self.usable_size(&layout); - self.alloc(layout).map(|p| Excess(p, usable_size.1)) - } - - /// Behaves like `alloc`, but also returns the whole size of - /// the returned block. For some `layout` inputs, like arrays, this - /// may include extra storage usable for additional data. - /// Also it ensures that the contents are set to zero before being returned. - /// - /// # Safety - /// - /// This function is unsafe for the same reasons that `alloc` is. - /// - /// # Errors - /// - /// Returning `Err` indicates that either memory is exhausted or - /// `layout` does not meet allocator's size or alignment - /// constraints, just as in `alloc`. - /// - /// Clients wishing to abort computation in response to an - /// allocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn alloc_excess_zeroed(&mut self, layout: Layout) -> Result { - let usable_size = self.usable_size(&layout); - self.alloc_zeroed(layout).map(|p| Excess(p, usable_size.1)) - } - - /// Behaves like `realloc`, but also returns the whole size of - /// the returned block. For some `layout` inputs, like arrays, this - /// may include extra storage usable for additional data. - /// - /// # Safety - /// - /// This function is unsafe for the same reasons that `realloc` is. - /// - /// # Errors - /// - /// Returning `Err` indicates that either memory is exhausted or - /// `layout` does not meet allocator's size or alignment - /// constraints, just as in `realloc`. - /// - /// Clients wishing to abort computation in response to a - /// reallocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn realloc_excess( - &mut self, - ptr: NonNull, - layout: Layout, - new_size: usize, - ) -> Result { - let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - let usable_size = self.usable_size(&new_layout); - self.realloc(ptr, layout, new_size).map(|p| Excess(p, usable_size.1)) - } - - /// Behaves like `realloc`, but also returns the whole size of - /// the returned block. For some `layout` inputs, like arrays, this - /// may include extra storage usable for additional data. - /// Also it ensures that the contents are set to zero before being returned. - /// - /// # Safety - /// - /// This function is unsafe for the same reasons that `realloc` is. - /// - /// # Errors - /// - /// Returning `Err` indicates that either memory is exhausted or - /// `layout` does not meet allocator's size or alignment - /// constraints, just as in `realloc`. - /// - /// Clients wishing to abort computation in response to a - /// reallocation error are encouraged to call the [`handle_alloc_error`] function, - /// rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn realloc_excess_zeroed( - &mut self, - ptr: NonNull, - layout: Layout, - new_size: usize, - ) -> Result { - let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - let usable_size = self.usable_size(&new_layout); - self.realloc_zeroed(ptr, layout, new_size).map(|p| Excess(p, usable_size.1)) - } - /// Attempts to extend the allocation referenced by `ptr` to fit `new_size`. /// /// If this returns `Ok`, then the allocator has asserted that the /// memory block referenced by `ptr` now fits `new_size`, and thus can /// be used to carry data of a layout of that size and same alignment as - /// `layout`. (The allocator is allowed to - /// expend effort to accomplish this, such as extending the memory block to - /// include successor blocks, or virtual memory tricks.) + /// `layout`. The returned value is the new size of the allocated block. + /// (The allocator is allowed to expend effort to accomplish this, such + /// as extending the memory block to include successor blocks, or virtual + /// memory tricks.) /// /// Regardless of what this method returns, ownership of the /// memory block referenced by `ptr` has not been transferred, and @@ -1072,18 +929,17 @@ pub unsafe trait AllocRef { /// function; clients are expected either to be able to recover from /// `grow_in_place` failures without aborting, or to fall back on /// another reallocation method before resorting to an abort. + #[inline] unsafe fn grow_in_place( &mut self, ptr: NonNull, layout: Layout, new_size: usize, - ) -> Result<(), CannotReallocInPlace> { - let _ = ptr; // this default implementation doesn't care about the actual address. - debug_assert!(new_size >= layout.size()); - let (_l, u) = self.usable_size(&layout); - // _l <= layout.size() [guaranteed by usable_size()] - // layout.size() <= new_layout.size() [required by this method] - if new_size <= u { Ok(()) } else { Err(CannotReallocInPlace) } + ) -> Result { + let _ = ptr; + let _ = layout; + let _ = new_size; + Err(CannotReallocInPlace) } /// Behaves like `grow_in_place`, but also ensures that the new @@ -1108,10 +964,10 @@ pub unsafe trait AllocRef { ptr: NonNull, layout: Layout, new_size: usize, - ) -> Result<(), CannotReallocInPlace> { - self.grow_in_place(ptr, layout, new_size)?; + ) -> Result { + let size = self.grow_in_place(ptr, layout, new_size)?; ptr.as_ptr().add(layout.size()).write_bytes(0, new_size - layout.size()); - Ok(()) + Ok(size) } /// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`. @@ -1119,7 +975,8 @@ pub unsafe trait AllocRef { /// If this returns `Ok`, then the allocator has asserted that the /// memory block referenced by `ptr` now fits `new_size`, and /// thus can only be used to carry data of that smaller - /// layout. (The allocator is allowed to take advantage of this, + /// layout. The returned value is the new size the allocated block. + /// (The allocator is allowed to take advantage of this, /// carving off portions of the block for reuse elsewhere.) The /// truncated contents of the block within the smaller layout are /// unaltered, and ownership of block has not been transferred. @@ -1153,17 +1010,16 @@ pub unsafe trait AllocRef { /// function; clients are expected either to be able to recover from /// `shrink_in_place` failures without aborting, or to fall back /// on another reallocation method before resorting to an abort. + #[inline] unsafe fn shrink_in_place( &mut self, ptr: NonNull, layout: Layout, new_size: usize, - ) -> Result<(), CannotReallocInPlace> { - let _ = ptr; // this default implementation doesn't care about the actual address. - debug_assert!(new_size <= layout.size()); - let (l, _u) = self.usable_size(&layout); - // layout.size() <= _u [guaranteed by usable_size()] - // new_layout.size() <= layout.size() [required by this method] - if l <= new_size { Ok(()) } else { Err(CannotReallocInPlace) } + ) -> Result { + let _ = ptr; + let _ = layout; + let _ = new_size; + Err(CannotReallocInPlace) } } diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index 8965c6860c4e9..2da18e06d99bf 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -137,13 +137,15 @@ pub struct System; #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl AllocRef for System { #[inline] - unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr) + unsafe fn alloc(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { + NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr).map(|p| (p, layout.size())) } #[inline] - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr) + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull, usize), AllocErr> { + NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)) + .ok_or(AllocErr) + .map(|p| (p, layout.size())) } #[inline] @@ -157,8 +159,10 @@ unsafe impl AllocRef for System { ptr: NonNull, layout: Layout, new_size: usize, - ) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr) + ) -> Result<(NonNull, usize), AllocErr> { + NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)) + .ok_or(AllocErr) + .map(|p| (p, new_size)) } } diff --git a/src/test/ui/allocator/custom.rs b/src/test/ui/allocator/custom.rs index 0b1f6d5a96e56..c275db14b427c 100644 --- a/src/test/ui/allocator/custom.rs +++ b/src/test/ui/allocator/custom.rs @@ -37,7 +37,7 @@ fn main() { unsafe { let layout = Layout::from_size_align(4, 2).unwrap(); - let ptr = Global.alloc(layout.clone()).unwrap(); + let (ptr, _) = Global.alloc(layout.clone()).unwrap(); helper::work_with(&ptr); assert_eq!(HITS.load(Ordering::SeqCst), n + 1); Global.dealloc(ptr, layout.clone()); @@ -49,7 +49,7 @@ fn main() { drop(s); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); - let ptr = System.alloc(layout.clone()).unwrap(); + let (ptr, _) = System.alloc(layout.clone()).unwrap(); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); helper::work_with(&ptr); System.dealloc(ptr, layout); diff --git a/src/test/ui/allocator/xcrate-use.rs b/src/test/ui/allocator/xcrate-use.rs index 37b28c195df5b..e4746d1a7ec09 100644 --- a/src/test/ui/allocator/xcrate-use.rs +++ b/src/test/ui/allocator/xcrate-use.rs @@ -20,13 +20,13 @@ fn main() { let n = GLOBAL.0.load(Ordering::SeqCst); let layout = Layout::from_size_align(4, 2).unwrap(); - let ptr = Global.alloc(layout.clone()).unwrap(); + let (ptr, _) = Global.alloc(layout.clone()).unwrap(); helper::work_with(&ptr); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1); Global.dealloc(ptr, layout.clone()); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); - let ptr = System.alloc(layout.clone()).unwrap(); + let (ptr, _) = System.alloc(layout.clone()).unwrap(); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); helper::work_with(&ptr); System.dealloc(ptr, layout); diff --git a/src/test/ui/realloc-16687.rs b/src/test/ui/realloc-16687.rs index 425aa83e70a85..eb6224ad1bbb6 100644 --- a/src/test/ui/realloc-16687.rs +++ b/src/test/ui/realloc-16687.rs @@ -41,13 +41,13 @@ unsafe fn test_triangle() -> bool { println!("allocate({:?})", layout); } - let ret = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + let (ptr, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); if PRINT { - println!("allocate({:?}) = {:?}", layout, ret); + println!("allocate({:?}) = {:?}", layout, ptr); } - ret.cast().as_ptr() + ptr.cast().as_ptr() } unsafe fn deallocate(ptr: *mut u8, layout: Layout) { @@ -63,16 +63,16 @@ unsafe fn test_triangle() -> bool { println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new); } - let ret = Global.realloc(NonNull::new_unchecked(ptr), old, new.size()) + let (ptr, _) = Global.realloc(NonNull::new_unchecked(ptr), old, new.size()) .unwrap_or_else(|_| handle_alloc_error( Layout::from_size_align_unchecked(new.size(), old.align()) )); if PRINT { println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", - ptr, old, new, ret); + ptr, old, new, ptr); } - ret.cast().as_ptr() + ptr.cast().as_ptr() } fn idx_to_size(i: usize) -> usize { (i+1) * 10 } diff --git a/src/test/ui/regions/regions-mock-codegen.rs b/src/test/ui/regions/regions-mock-codegen.rs index f50b1c8b17f28..fe3a864fe4ba5 100644 --- a/src/test/ui/regions/regions-mock-codegen.rs +++ b/src/test/ui/regions/regions-mock-codegen.rs @@ -24,29 +24,29 @@ struct Ccx { x: isize } -fn alloc<'a>(_bcx : &'a arena) -> &'a Bcx<'a> { +fn alloc(_bcx: &arena) -> &Bcx<'_> { unsafe { let layout = Layout::new::(); - let ptr = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + let (ptr, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); &*(ptr.as_ptr() as *const _) } } -fn h<'a>(bcx : &'a Bcx<'a>) -> &'a Bcx<'a> { +fn h<'a>(bcx: &'a Bcx<'a>) -> &'a Bcx<'a> { return alloc(bcx.fcx.arena); } -fn g(fcx : &Fcx) { - let bcx = Bcx { fcx: fcx }; +fn g(fcx: &Fcx) { + let bcx = Bcx { fcx }; let bcx2 = h(&bcx); unsafe { Global.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::()); } } -fn f(ccx : &Ccx) { +fn f(ccx: &Ccx) { let a = arena(()); - let fcx = Fcx { arena: &a, ccx: ccx }; + let fcx = Fcx { arena: &a, ccx }; return g(&fcx); }