From 98ebb0c40c3dce7801a820b05e45a81d66d58bfd Mon Sep 17 00:00:00 2001 From: Mike Hommey Date: Fri, 3 May 2019 22:28:19 +0900 Subject: [PATCH 1/9] Remove the impl Alloc for System See https://github.com/rust-lang/wg-allocators/issues/2 --- src/liballoc/tests/heap.rs | 13 ++++------- src/libstd/alloc.rs | 28 ----------------------- src/test/run-pass/allocator/custom.rs | 6 ++--- src/test/run-pass/allocator/xcrate-use.rs | 4 ++-- 4 files changed, 9 insertions(+), 42 deletions(-) diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index c225ebfa96b91..a486ea8370891 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -1,4 +1,4 @@ -use std::alloc::{Global, Alloc, Layout, System}; +use std::alloc::{GlobalAlloc, Layout, System}; /// Issue #45955. #[test] @@ -6,21 +6,16 @@ fn alloc_system_overaligned_request() { check_overalign_requests(System) } -#[test] -fn std_heap_overaligned_request() { - check_overalign_requests(Global) -} - -fn check_overalign_requests(mut allocator: T) { +fn check_overalign_requests(allocator: T) { let size = 8; let align = 16; // greater than size let iterations = 100; unsafe { let pointers: Vec<_> = (0..iterations).map(|_| { - allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap() + allocator.alloc(Layout::from_size_align(size, align).unwrap()) }).collect(); for &ptr in &pointers { - assert_eq!((ptr.as_ptr() as usize) % align, 0, + assert_eq!((ptr as usize) % align, 0, "Got a pointer less aligned than requested") } diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index ff52974775b05..f09176d149eee 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -63,7 +63,6 @@ use core::sync::atomic::{AtomicPtr, Ordering}; use core::{mem, ptr}; -use core::ptr::NonNull; use crate::sys_common::util::dumb_print; @@ -133,33 +132,6 @@ pub use alloc_crate::alloc::*; #[derive(Debug, Default, Copy, Clone)] pub struct System; -// The Alloc impl just forwards to the GlobalAlloc impl, which is in `std::sys::*::alloc`. -#[unstable(feature = "allocator_api", issue = "32838")] -unsafe impl Alloc for System { - #[inline] - unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr) - } - - #[inline] - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr) - } - - #[inline] - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { - GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) - } - - #[inline] - unsafe fn realloc(&mut self, - ptr: NonNull, - layout: Layout, - new_size: usize) -> Result, AllocErr> { - NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr) - } -} - static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut()); /// Registers a custom allocation error hook, replacing any that was previously registered. diff --git a/src/test/run-pass/allocator/custom.rs b/src/test/run-pass/allocator/custom.rs index 71f72ae46c23f..5439e6d820955 100644 --- a/src/test/run-pass/allocator/custom.rs +++ b/src/test/run-pass/allocator/custom.rs @@ -7,14 +7,14 @@ extern crate helper; -use std::alloc::{self, Global, Alloc, System, Layout}; +use std::alloc::{Global, Alloc, GlobalAlloc, System, Layout}; use std::sync::atomic::{AtomicUsize, Ordering}; static HITS: AtomicUsize = AtomicUsize::new(0); struct A; -unsafe impl alloc::GlobalAlloc for A { +unsafe impl GlobalAlloc for A { unsafe fn alloc(&self, layout: Layout) -> *mut u8 { HITS.fetch_add(1, Ordering::SeqCst); System.alloc(layout) @@ -49,7 +49,7 @@ fn main() { drop(s); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); - let ptr = System.alloc(layout.clone()).unwrap(); + let ptr = System.alloc(layout.clone()); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); helper::work_with(&ptr); System.dealloc(ptr, layout); diff --git a/src/test/run-pass/allocator/xcrate-use.rs b/src/test/run-pass/allocator/xcrate-use.rs index 039c70e77bedf..98be512f00a07 100644 --- a/src/test/run-pass/allocator/xcrate-use.rs +++ b/src/test/run-pass/allocator/xcrate-use.rs @@ -9,7 +9,7 @@ extern crate custom; extern crate helper; -use std::alloc::{Global, Alloc, System, Layout}; +use std::alloc::{Global, Alloc, GlobalAlloc, System, Layout}; use std::sync::atomic::{Ordering, AtomicUsize}; #[global_allocator] @@ -26,7 +26,7 @@ fn main() { Global.dealloc(ptr, layout.clone()); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); - let ptr = System.alloc(layout.clone()).unwrap(); + let ptr = System.alloc(layout.clone()); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); helper::work_with(&ptr); System.dealloc(ptr, layout); From 3cbdf188f4e9edef63b7a301ece8ea8900d81fc8 Mon Sep 17 00:00:00 2001 From: Mike Hommey Date: Wed, 13 Feb 2019 17:38:37 +0900 Subject: [PATCH 2/9] Associate an allocator to boxes This turns `Box` into `Box`, with a `A: Alloc` bound for impls. Ideally, inherent methods like `Box::new` would be applied to `Box`, but as of now, that would be backwards incompatible because it would require type annotations in places where they currently aren't required. `impl FromIterator` is not covered because it relies on `Vec`, which would need allocator awareness. `DispatchFromDyn` is left out or being generic over `A` because there is no bound that would make it work currently. `FnBox` is left out because it's related to `DispatchFromDyn`. --- src/liballoc/alloc.rs | 24 +- src/liballoc/boxed.rs | 273 ++++++++++++------ src/liballoc/raw_vec.rs | 9 +- src/liballoc/rc.rs | 2 +- src/liballoc/str.rs | 5 +- src/liballoc/sync.rs | 2 +- .../e0119/conflict-with-std.stderr | 2 +- src/test/ui/issues/issue-14092.rs | 2 +- src/test/ui/issues/issue-14092.stderr | 4 +- src/test/ui/issues/issue-41974.stderr | 6 +- 10 files changed, 210 insertions(+), 119 deletions(-) diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 755feb8496203..b12eb1415d086 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -40,10 +40,14 @@ extern "Rust" { /// accessed through the [free functions in `alloc`](index.html#functions). /// /// [`Alloc`]: trait.Alloc.html +#[cfg(not(test))] #[unstable(feature = "allocator_api", issue = "32838")] #[derive(Copy, Clone, Default, Debug)] pub struct Global; +#[cfg(test)] +pub use std::alloc::Global; + /// Allocate memory with the global allocator. /// /// This function forwards calls to the [`GlobalAlloc::alloc`] method @@ -163,6 +167,7 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { __rust_alloc_zeroed(layout.size(), layout.align()) } +#[cfg(not(test))] #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl Alloc for Global { #[inline] @@ -201,25 +206,22 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { align as *mut u8 } else { let layout = Layout::from_size_align_unchecked(size, align); - let ptr = alloc(layout); - if !ptr.is_null() { - ptr - } else { - handle_alloc_error(layout) + match Global.alloc(layout) { + Ok(ptr) => ptr.as_ptr(), + Err(_) => handle_alloc_error(layout), } } } #[cfg_attr(not(test), lang = "box_free")] #[inline] -pub(crate) unsafe fn box_free(ptr: Unique) { - let ptr = ptr.as_ptr(); - let size = size_of_val(&*ptr); - let align = min_align_of_val(&*ptr); - // We do not allocate for Box when T is ZST, so deallocation is also not necessary. +pub(crate) unsafe fn box_free(ptr: Unique, mut a: A) { + let size = size_of_val(&*ptr.as_ptr()); + let align = min_align_of_val(&*ptr.as_ptr()); + // We do not allocate for Box when T is ZST, so deallocation is also not necessary. if size != 0 { let layout = Layout::from_size_align_unchecked(size, align); - dealloc(ptr as *mut u8, layout); + a.dealloc(NonNull::from(ptr).cast(), layout); } } diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 76b660fba685c..6f6433bc5c936 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -88,6 +88,7 @@ use core::ops::{ use core::ptr::{self, NonNull, Unique}; use core::task::{Context, Poll}; +use crate::alloc::{Alloc, Global, Layout, handle_alloc_error}; use crate::vec::Vec; use crate::raw_vec::RawVec; use crate::str::from_boxed_utf8_unchecked; @@ -98,7 +99,7 @@ use crate::str::from_boxed_utf8_unchecked; #[lang = "owned_box"] #[fundamental] #[stable(feature = "rust1", since = "1.0.0")] -pub struct Box(Unique); +pub struct Box(Unique, pub(crate) A); impl Box { /// Allocates memory on the heap and then places `x` into it. @@ -125,6 +126,49 @@ impl Box { } } +impl Box { + /// Allocates memory in the given allocator and then places `x` into it. + /// + /// This doesn't actually allocate if `T` is zero-sized. + /// + /// # Examples + /// + /// ``` + /// # #![feature(allocator_api)] + /// use std::alloc::Global; + /// let five = Box::new_in(5, Global); + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline(always)] + pub fn new_in(x: T, a: A) -> Box { + let mut a = a; + let layout = Layout::for_value(&x); + let size = layout.size(); + let ptr = if size == 0 { + Unique::empty() + } else { + unsafe { + let ptr = a.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + ptr.cast().into() + } + }; + // Move x into the location allocated above. This needs to happen + // for any size so that x is not dropped in some cases. + unsafe { + ptr::write(ptr.as_ptr() as *mut T, x); + } + Box(ptr, a) + } + + /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then + /// `x` will be pinned in memory and unable to be moved. + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline(always)] + pub fn pin_in(x: T, a: A) -> Pin> { + Box::new_in(x, a).into() + } +} + impl Box { /// Constructs a box from a raw pointer. /// @@ -165,9 +209,48 @@ impl Box { #[stable(feature = "box_raw", since = "1.4.0")] #[inline] pub unsafe fn from_raw(raw: *mut T) -> Self { - Box(Unique::new_unchecked(raw)) + Box(Unique::new_unchecked(raw), Global) + } +} + +impl Box { + /// Constructs a box from a raw pointer in the given allocator. + /// + /// This is similar to the [`Box::from_raw`] function, but assumes + /// the pointer was allocated with the given allocator. + /// + /// This function is unsafe because improper use may lead to + /// memory problems. For example, specifying the wrong allocator + /// may corrupt the allocator state. + /// + /// [`Box::from_raw`]: struct.Box.html#method.from_raw + /// + /// # Examples + /// + /// ``` + /// # #![feature(allocator_api)] + /// use std::alloc::Global; + /// let x = Box::new_in(5, Global); + /// let ptr = Box::into_raw(x); + /// let x = unsafe { Box::from_raw_in(ptr, Global) }; + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub unsafe fn from_raw_in(raw: *mut T, a: A) -> Self { + Box(Unique::new_unchecked(raw), a) } + /// Maps a `Box` to `Box` by applying a function to the + /// raw pointer. + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub unsafe fn map_raw *mut U>(b: Box, f: F) -> Box { + let a = ptr::read(&b.1); + Box::from_raw_in(f(Box::into_raw(b)), a) + } +} + +impl Box { /// Consumes the `Box`, returning a wrapped raw pointer. /// /// The pointer will be properly aligned and non-null. @@ -210,7 +293,7 @@ impl Box { /// [`Box::from_raw`]: struct.Box.html#method.from_raw #[stable(feature = "box_raw", since = "1.4.0")] #[inline] - pub fn into_raw(b: Box) -> *mut T { + pub fn into_raw(b: Box) -> *mut T { Box::into_raw_non_null(b).as_ptr() } @@ -246,14 +329,14 @@ impl Box { /// ``` #[unstable(feature = "box_into_raw_non_null", issue = "47336")] #[inline] - pub fn into_raw_non_null(b: Box) -> NonNull { + pub fn into_raw_non_null(b: Box) -> NonNull { Box::into_unique(b).into() } #[unstable(feature = "ptr_internals", issue = "0", reason = "use into_raw_non_null instead")] #[inline] #[doc(hidden)] - pub fn into_unique(b: Box) -> Unique { + pub fn into_unique(b: Box) -> Unique { let mut unique = b.0; mem::forget(b); // Box is kind-of a library type, but recognized as a "unique pointer" by @@ -308,7 +391,7 @@ impl Box { /// ``` #[stable(feature = "box_leak", since = "1.26.0")] #[inline] - pub fn leak<'a>(b: Box) -> &'a mut T + pub fn leak<'a>(b: Box) -> &'a mut T where T: 'a // Technically not needed, but kept to be explicit. { @@ -321,7 +404,7 @@ impl Box { /// /// This is also available via [`From`]. #[unstable(feature = "box_into_pin", issue = "0")] - pub fn into_pin(boxed: Box) -> Pin> { + pub fn into_pin(boxed: Box) -> Pin> { // It's not possible to move or replace the insides of a `Pin>` // when `T: !Unpin`, so it's safe to pin it directly without any // additional requirements. @@ -330,36 +413,36 @@ impl Box { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Box { +unsafe impl<#[may_dangle] T: ?Sized, A> Drop for Box { fn drop(&mut self) { // FIXME: Do nothing, drop is currently performed by compiler. } } #[stable(feature = "rust1", since = "1.0.0")] -impl Default for Box { - /// Creates a `Box`, with the `Default` value for T. - fn default() -> Box { - box Default::default() +impl Default for Box { + /// Creates a `Box`, with the `Default` value for T. + fn default() -> Box { + Box::new_in(Default::default(), A::default()) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Default for Box<[T]> { - fn default() -> Box<[T]> { - Box::<[T; 0]>::new([]) +impl Default for Box<[T], A> { + fn default() -> Box<[T], A> { + Box::<[T; 0], A>::new_in([], A::default()) } } #[stable(feature = "default_box_extra", since = "1.17.0")] -impl Default for Box { - fn default() -> Box { +impl Default for Box { + fn default() -> Box { unsafe { from_boxed_utf8_unchecked(Default::default()) } } } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Box { +impl Clone for Box { /// Returns a new box with a `clone()` of this box's contents. /// /// # Examples @@ -370,8 +453,8 @@ impl Clone for Box { /// ``` #[rustfmt::skip] #[inline] - fn clone(&self) -> Box { - box { (**self).clone() } + fn clone(&self) -> Box { + Box::new_in((**self).clone(), self.1.clone()) } /// Copies `source`'s contents into `self` without creating a new allocation. /// @@ -386,17 +469,17 @@ impl Clone for Box { /// assert_eq!(*y, 5); /// ``` #[inline] - fn clone_from(&mut self, source: &Box) { + fn clone_from(&mut self, source: &Box) { (**self).clone_from(&(**source)); } } #[stable(feature = "box_slice_clone", since = "1.3.0")] -impl Clone for Box { +impl Clone for Box { fn clone(&self) -> Self { // this makes a copy of the data - let buf: Box<[u8]> = self.as_bytes().into(); + let buf = Box::<[u8], A>::from_slice_in(self.as_bytes(), self.1.clone()); unsafe { from_boxed_utf8_unchecked(buf) } @@ -404,58 +487,58 @@ impl Clone for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for Box { +impl PartialEq for Box { #[inline] - fn eq(&self, other: &Box) -> bool { + fn eq(&self, other: &Box) -> bool { PartialEq::eq(&**self, &**other) } #[inline] - fn ne(&self, other: &Box) -> bool { + fn ne(&self, other: &Box) -> bool { PartialEq::ne(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for Box { +impl PartialOrd for Box { #[inline] - fn partial_cmp(&self, other: &Box) -> Option { + fn partial_cmp(&self, other: &Box) -> Option { PartialOrd::partial_cmp(&**self, &**other) } #[inline] - fn lt(&self, other: &Box) -> bool { + fn lt(&self, other: &Box) -> bool { PartialOrd::lt(&**self, &**other) } #[inline] - fn le(&self, other: &Box) -> bool { + fn le(&self, other: &Box) -> bool { PartialOrd::le(&**self, &**other) } #[inline] - fn ge(&self, other: &Box) -> bool { + fn ge(&self, other: &Box) -> bool { PartialOrd::ge(&**self, &**other) } #[inline] - fn gt(&self, other: &Box) -> bool { + fn gt(&self, other: &Box) -> bool { PartialOrd::gt(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for Box { +impl Ord for Box { #[inline] - fn cmp(&self, other: &Box) -> Ordering { + fn cmp(&self, other: &Box) -> Ordering { Ord::cmp(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for Box {} +impl Eq for Box {} #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for Box { +impl Hash for Box { fn hash(&self, state: &mut H) { (**self).hash(state); } } #[stable(feature = "indirect_hasher_impl", since = "1.22.0")] -impl Hasher for Box { +impl Hasher for Box { fn finish(&self) -> u64 { (**self).finish() } @@ -501,7 +584,7 @@ impl Hasher for Box { } #[stable(feature = "from_for_ptrs", since = "1.6.0")] -impl From for Box { +impl From for Box { /// Converts a generic type `T` into a `Box` /// /// The conversion allocates on the heap and moves `t` @@ -515,22 +598,33 @@ impl From for Box { /// assert_eq!(Box::from(x), boxed); /// ``` fn from(t: T) -> Self { - Box::new(t) + Box::new_in(t, A::default()) } } #[stable(feature = "pin", since = "1.33.0")] -impl From> for Pin> { +impl From> for Pin> { /// Converts a `Box` into a `Pin>` /// /// This conversion does not allocate on the heap and happens in place. - fn from(boxed: Box) -> Self { + fn from(boxed: Box) -> Self { Box::into_pin(boxed) } } +impl Box<[T], A> { + fn from_slice_in(slice: &[T], a: A) -> Box<[T], A> { + let len = slice.len(); + let buf = RawVec::with_capacity_in(len, a); + unsafe { + ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len); + buf.into_box() + } + } +} + #[stable(feature = "box_from_slice", since = "1.17.0")] -impl From<&[T]> for Box<[T]> { +impl From<&[T]> for Box<[T], A> { /// Converts a `&[T]` into a `Box<[T]>` /// /// This conversion allocates on the heap @@ -544,18 +638,13 @@ impl From<&[T]> for Box<[T]> { /// /// println!("{:?}", boxed_slice); /// ``` - fn from(slice: &[T]) -> Box<[T]> { - let len = slice.len(); - let buf = RawVec::with_capacity(len); - unsafe { - ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len); - buf.into_box() - } + fn from(slice: &[T]) -> Box<[T], A> { + Box::<_, _>::from_slice_in(slice, A::default()) } } #[stable(feature = "box_from_slice", since = "1.17.0")] -impl From<&str> for Box { +impl From<&str> for Box { /// Converts a `&str` into a `Box` /// /// This conversion allocates on the heap @@ -567,13 +656,13 @@ impl From<&str> for Box { /// println!("{}", boxed); /// ``` #[inline] - fn from(s: &str) -> Box { + fn from(s: &str) -> Box { unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) } } } #[stable(feature = "boxed_str_conv", since = "1.19.0")] -impl From> for Box<[u8]> { +impl From> for Box<[u8], A> { /// Converts a `Box>` into a `Box<[u8]>` /// /// This conversion does not allocate on the heap and happens in place. @@ -591,12 +680,12 @@ impl From> for Box<[u8]> { /// assert_eq!(boxed_slice, boxed_str); /// ``` #[inline] - fn from(s: Box) -> Self { - unsafe { Box::from_raw(Box::into_raw(s) as *mut [u8]) } + fn from(s: Box) -> Self { + unsafe { Box::map_raw(s, |p| p as *mut [u8]) } } } -impl Box { +impl Box { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. @@ -618,19 +707,16 @@ impl Box { /// print_if_string(Box::new(0i8)); /// } /// ``` - pub fn downcast(self) -> Result, Box> { + pub fn downcast(self) -> Result, Box> { if self.is::() { - unsafe { - let raw: *mut dyn Any = Box::into_raw(self); - Ok(Box::from_raw(raw as *mut T)) - } + unsafe { Ok(Box::map_raw(self, |p| p as *mut T)) } } else { Err(self) } } } -impl Box { +impl Box { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. @@ -652,30 +738,30 @@ impl Box { /// print_if_string(Box::new(0i8)); /// } /// ``` - pub fn downcast(self) -> Result, Box> { - >::downcast(self).map_err(|s| unsafe { + pub fn downcast(self) -> Result, Box> { + >::downcast(self).map_err(|s| unsafe { // reapply the Send marker - Box::from_raw(Box::into_raw(s) as *mut (dyn Any + Send)) + Box::map_raw(s, |p| p as *mut (dyn Any + Send)) }) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Display for Box { +impl fmt::Display for Box { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for Box { +impl fmt::Debug for Box { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Pointer for Box { +impl fmt::Pointer for Box { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // It's not possible to extract the inner Uniq directly from the Box, // instead we cast it to a *const which aliases the Unique @@ -685,7 +771,7 @@ impl fmt::Pointer for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl Deref for Box { +impl Deref for Box { type Target = T; fn deref(&self) -> &T { @@ -694,17 +780,17 @@ impl Deref for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl DerefMut for Box { +impl DerefMut for Box { fn deref_mut(&mut self) -> &mut T { &mut **self } } #[unstable(feature = "receiver_trait", issue = "0")] -impl Receiver for Box {} +impl Receiver for Box {} #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Box { +impl Iterator for Box { type Item = I::Item; fn next(&mut self) -> Option { (**self).next() @@ -717,7 +803,7 @@ impl Iterator for Box { } } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Box { +impl DoubleEndedIterator for Box { fn next_back(&mut self) -> Option { (**self).next_back() } @@ -726,7 +812,7 @@ impl DoubleEndedIterator for Box { } } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Box { +impl ExactSizeIterator for Box { fn len(&self) -> usize { (**self).len() } @@ -736,10 +822,10 @@ impl ExactSizeIterator for Box { } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Box {} +impl FusedIterator for Box {} #[stable(feature = "boxed_closure_impls", since = "1.35.0")] -impl + ?Sized> FnOnce for Box { +impl + ?Sized, Alloc> FnOnce for Box { type Output = >::Output; extern "rust-call" fn call_once(self, args: A) -> Self::Output { @@ -748,14 +834,14 @@ impl + ?Sized> FnOnce for Box { } #[stable(feature = "boxed_closure_impls", since = "1.35.0")] -impl + ?Sized> FnMut for Box { +impl + ?Sized, Alloc> FnMut for Box { extern "rust-call" fn call_mut(&mut self, args: A) -> Self::Output { >::call_mut(self, args) } } #[stable(feature = "boxed_closure_impls", since = "1.35.0")] -impl + ?Sized> Fn for Box { +impl + ?Sized, Alloc> Fn for Box { extern "rust-call" fn call(&self, args: A) -> Self::Output { >::call(self, args) } @@ -841,8 +927,9 @@ impl FnBox for F } #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized> CoerceUnsized> for Box {} +impl, U: ?Sized, A> CoerceUnsized> for Box {} +//FIXME: Make generic over A when the compiler supports it. #[unstable(feature = "dispatch_from_dyn", issue = "0")] impl, U: ?Sized> DispatchFromDyn> for Box {} @@ -854,10 +941,10 @@ impl FromIterator for Box<[A]> { } #[stable(feature = "box_slice_clone", since = "1.3.0")] -impl Clone for Box<[T]> { +impl Clone for Box<[T], A> { fn clone(&self) -> Self { let mut new = BoxBuilder { - data: RawVec::with_capacity(self.len()), + data: RawVec::with_capacity_in(self.len(), self.1.clone()), len: 0, }; @@ -875,20 +962,20 @@ impl Clone for Box<[T]> { return unsafe { new.into_box() }; // Helper type for responding to panics correctly. - struct BoxBuilder { - data: RawVec, + struct BoxBuilder { + data: RawVec, len: usize, } - impl BoxBuilder { - unsafe fn into_box(self) -> Box<[T]> { + impl BoxBuilder { + unsafe fn into_box(self) -> Box<[T], A> { let raw = ptr::read(&self.data); mem::forget(self); raw.into_box() } } - impl Drop for BoxBuilder { + impl Drop for BoxBuilder { fn drop(&mut self) { let mut data = self.data.ptr(); let max = unsafe { data.add(self.len) }; @@ -905,28 +992,28 @@ impl Clone for Box<[T]> { } #[stable(feature = "box_borrow", since = "1.1.0")] -impl borrow::Borrow for Box { +impl borrow::Borrow for Box { fn borrow(&self) -> &T { &**self } } #[stable(feature = "box_borrow", since = "1.1.0")] -impl borrow::BorrowMut for Box { +impl borrow::BorrowMut for Box { fn borrow_mut(&mut self) -> &mut T { &mut **self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl AsRef for Box { +impl AsRef for Box { fn as_ref(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl AsMut for Box { +impl AsMut for Box { fn as_mut(&mut self) -> &mut T { &mut **self } @@ -955,10 +1042,10 @@ impl AsMut for Box { * could have a method to project a Pin from it. */ #[stable(feature = "pin", since = "1.33.0")] -impl Unpin for Box { } +impl Unpin for Box { } #[unstable(feature = "generator_trait", issue = "43122")] -impl Generator for Box { +impl Generator for Box { type Yield = G::Yield; type Return = G::Return; @@ -968,7 +1055,7 @@ impl Generator for Box { } #[unstable(feature = "generator_trait", issue = "43122")] -impl Generator for Pin> { +impl Generator for Pin> { type Yield = G::Yield; type Return = G::Return; @@ -978,7 +1065,7 @@ impl Generator for Pin> { } #[stable(feature = "futures_api", since = "1.36.0")] -impl Future for Box { +impl Future for Box { type Output = F::Output; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 0454a56443579..c40624b14c7f0 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -682,8 +682,8 @@ impl RawVec { } -impl RawVec { - /// Converts the entire buffer into `Box<[T]>`. +impl RawVec { + /// Converts the entire buffer into `Box<[T], A>`. /// /// Note that this will correctly reconstitute any `cap` changes /// that may have been performed. (see description of type for details) @@ -693,10 +693,11 @@ impl RawVec { /// All elements of `RawVec` must be initialized. Notice that /// the rules around uninitialized boxed values are not finalized yet, /// but until they are, it is advisable to avoid them. - pub unsafe fn into_box(self) -> Box<[T]> { + pub unsafe fn into_box(self) -> Box<[T], A> { // NOTE: not calling `cap()` here, actually using the real `cap` field! let slice = slice::from_raw_parts_mut(self.ptr(), self.cap); - let output: Box<[T]> = Box::from_raw(slice); + let a = ptr::read(&self.a); + let output: Box<[T], A> = Box::from_raw_in(slice, a); mem::forget(self); output } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index c827e218b2fb3..47b4d80cc5e16 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -729,7 +729,7 @@ impl Rc { value_size); // Free the allocation without dropping its contents - box_free(box_unique); + box_free::<_, Global>(box_unique, Global); Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } } diff --git a/src/liballoc/str.rs b/src/liballoc/str.rs index 0c7d2b837a39a..893d1f69d8870 100644 --- a/src/liballoc/str.rs +++ b/src/liballoc/str.rs @@ -35,6 +35,7 @@ use core::ptr; use core::iter::FusedIterator; use core::unicode::conversions; +use crate::alloc::Alloc; use crate::borrow::ToOwned; use crate::boxed::Box; use crate::slice::{SliceConcatExt, SliceIndex}; @@ -585,6 +586,6 @@ impl str { /// ``` #[stable(feature = "str_box_extras", since = "1.20.0")] #[inline] -pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box { - Box::from_raw(Box::into_raw(v) as *mut str) +pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8], A>) -> Box { + Box::map_raw(v, |p| p as *mut str) } diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index 70865656c510e..97d655e8456ab 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -615,7 +615,7 @@ impl Arc { value_size); // Free the allocation without dropping its contents - box_free(box_unique); + box_free::<_, Global>(box_unique, Global); Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } } diff --git a/src/test/ui/error-codes/e0119/conflict-with-std.stderr b/src/test/ui/error-codes/e0119/conflict-with-std.stderr index 3e0c71e907481..7f8c4e6b32ac2 100644 --- a/src/test/ui/error-codes/e0119/conflict-with-std.stderr +++ b/src/test/ui/error-codes/e0119/conflict-with-std.stderr @@ -5,7 +5,7 @@ LL | impl AsRef for Box { | ^^^^^^^^^^^^^^^^^^^^^^^^ | = note: conflicting implementation in crate `alloc`: - - impl std::convert::AsRef for std::boxed::Box + - impl std::convert::AsRef for std::boxed::Box where T: ?Sized; error[E0119]: conflicting implementations of trait `std::convert::From` for type `S`: diff --git a/src/test/ui/issues/issue-14092.rs b/src/test/ui/issues/issue-14092.rs index 77da6badde948..3cfaa20a8b5ab 100644 --- a/src/test/ui/issues/issue-14092.rs +++ b/src/test/ui/issues/issue-14092.rs @@ -1,4 +1,4 @@ fn fn1(0: Box) {} - //~^ ERROR wrong number of type arguments: expected 1, found 0 [E0107] + //~^ ERROR wrong number of type arguments: expected at least 1, found 0 [E0107] fn main() {} diff --git a/src/test/ui/issues/issue-14092.stderr b/src/test/ui/issues/issue-14092.stderr index 626830ece8c11..b749c44780d96 100644 --- a/src/test/ui/issues/issue-14092.stderr +++ b/src/test/ui/issues/issue-14092.stderr @@ -1,8 +1,8 @@ -error[E0107]: wrong number of type arguments: expected 1, found 0 +error[E0107]: wrong number of type arguments: expected at least 1, found 0 --> $DIR/issue-14092.rs:1:11 | LL | fn fn1(0: Box) {} - | ^^^ expected 1 type argument + | ^^^ expected at least 1 type argument error: aborting due to previous error diff --git a/src/test/ui/issues/issue-41974.stderr b/src/test/ui/issues/issue-41974.stderr index 20121878a0754..3ca9ed724b56b 100644 --- a/src/test/ui/issues/issue-41974.stderr +++ b/src/test/ui/issues/issue-41974.stderr @@ -1,13 +1,13 @@ -error[E0119]: conflicting implementations of trait `std::ops::Drop` for type `std::boxed::Box<_>`: +error[E0119]: conflicting implementations of trait `std::ops::Drop` for type `std::boxed::Box<_, _>`: --> $DIR/issue-41974.rs:7:1 | LL | impl Drop for T where T: A { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: conflicting implementation in crate `alloc`: - - impl std::ops::Drop for std::boxed::Box + - impl std::ops::Drop for std::boxed::Box where T: ?Sized; - = note: downstream crates may implement trait `A` for type `std::boxed::Box<_>` + = note: downstream crates may implement trait `A` for type `std::boxed::Box<_, _>` error[E0120]: the Drop trait may only be implemented on structures --> $DIR/issue-41974.rs:7:18 From 53538debdab54ce426fbe0bf56726bc00e75226a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 26 Dec 2017 19:43:47 -0500 Subject: [PATCH 3/9] Add associated error type to allocators We will use this for fallibility polymorphism --- src/liballoc/alloc.rs | 6 +++ src/liballoc/boxed.rs | 39 +++++++------- src/liballoc/raw_vec.rs | 38 +++++++------ src/liballoc/str.rs | 4 +- src/liballoc/string.rs | 2 +- src/libcore/alloc.rs | 115 +++++++++++++++++++++++++--------------- 6 files changed, 118 insertions(+), 86 deletions(-) diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index b12eb1415d086..57ed2f4faa4bc 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -167,6 +167,12 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { __rust_alloc_zeroed(layout.size(), layout.align()) } +#[cfg(not(test))] +#[unstable(feature = "allocator_api", issue = "32838")] +impl AllocHelper for Global { + type Err = AllocErr; +} + #[cfg(not(test))] #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl Alloc for Global { diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 6f6433bc5c936..3a830eb031d3e 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -88,7 +88,7 @@ use core::ops::{ use core::ptr::{self, NonNull, Unique}; use core::task::{Context, Poll}; -use crate::alloc::{Alloc, Global, Layout, handle_alloc_error}; +use crate::alloc::{Alloc, AllocErr, Global, Layout, handle_alloc_error}; use crate::vec::Vec; use crate::raw_vec::RawVec; use crate::str::from_boxed_utf8_unchecked; @@ -126,7 +126,7 @@ impl Box { } } -impl Box { +impl> Box { /// Allocates memory in the given allocator and then places `x` into it. /// /// This doesn't actually allocate if `T` is zero-sized. @@ -213,7 +213,7 @@ impl Box { } } -impl Box { +impl Box { /// Constructs a box from a raw pointer in the given allocator. /// /// This is similar to the [`Box::from_raw`] function, but assumes @@ -420,7 +420,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A> Drop for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl Default for Box { +impl + Default> Default for Box { /// Creates a `Box`, with the `Default` value for T. fn default() -> Box { Box::new_in(Default::default(), A::default()) @@ -428,21 +428,21 @@ impl Default for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl Default for Box<[T], A> { +impl + Default> Default for Box<[T], A> { fn default() -> Box<[T], A> { Box::<[T; 0], A>::new_in([], A::default()) } } #[stable(feature = "default_box_extra", since = "1.17.0")] -impl Default for Box { +impl + Default> Default for Box { fn default() -> Box { unsafe { from_boxed_utf8_unchecked(Default::default()) } } } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Box { +impl + Clone> Clone for Box { /// Returns a new box with a `clone()` of this box's contents. /// /// # Examples @@ -474,9 +474,8 @@ impl Clone for Box { } } - #[stable(feature = "box_slice_clone", since = "1.3.0")] -impl Clone for Box { +impl + Clone> Clone for Box { fn clone(&self) -> Self { // this makes a copy of the data let buf = Box::<[u8], A>::from_slice_in(self.as_bytes(), self.1.clone()); @@ -584,7 +583,7 @@ impl Hasher for Box { } #[stable(feature = "from_for_ptrs", since = "1.6.0")] -impl From for Box { +impl + Default> From for Box { /// Converts a generic type `T` into a `Box` /// /// The conversion allocates on the heap and moves `t` @@ -612,7 +611,7 @@ impl From> for Pin> { } } -impl Box<[T], A> { +impl> Box<[T], A> { fn from_slice_in(slice: &[T], a: A) -> Box<[T], A> { let len = slice.len(); let buf = RawVec::with_capacity_in(len, a); @@ -624,7 +623,7 @@ impl Box<[T], A> { } #[stable(feature = "box_from_slice", since = "1.17.0")] -impl From<&[T]> for Box<[T], A> { +impl + Default> From<&[T]> for Box<[T], A> { /// Converts a `&[T]` into a `Box<[T]>` /// /// This conversion allocates on the heap @@ -644,7 +643,7 @@ impl From<&[T]> for Box<[T], A> { } #[stable(feature = "box_from_slice", since = "1.17.0")] -impl From<&str> for Box { +impl + Default> From<&str> for Box { /// Converts a `&str` into a `Box` /// /// This conversion allocates on the heap @@ -662,7 +661,7 @@ impl From<&str> for Box { } #[stable(feature = "boxed_str_conv", since = "1.19.0")] -impl From> for Box<[u8], A> { +impl From> for Box<[u8], A> { /// Converts a `Box>` into a `Box<[u8]>` /// /// This conversion does not allocate on the heap and happens in place. @@ -685,7 +684,7 @@ impl From> for Box<[u8], A> { } } -impl Box { +impl Box { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. @@ -716,7 +715,7 @@ impl Box { } } -impl Box { +impl> Box { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. @@ -941,7 +940,7 @@ impl FromIterator for Box<[A]> { } #[stable(feature = "box_slice_clone", since = "1.3.0")] -impl Clone for Box<[T], A> { +impl + Clone> Clone for Box<[T], A> { fn clone(&self) -> Self { let mut new = BoxBuilder { data: RawVec::with_capacity_in(self.len(), self.1.clone()), @@ -962,12 +961,12 @@ impl Clone for Box<[T], A> { return unsafe { new.into_box() }; // Helper type for responding to panics correctly. - struct BoxBuilder { + struct BoxBuilder> { data: RawVec, len: usize, } - impl BoxBuilder { + impl> BoxBuilder { unsafe fn into_box(self) -> Box<[T], A> { let raw = ptr::read(&self.data); mem::forget(self); @@ -975,7 +974,7 @@ impl Clone for Box<[T], A> { } } - impl Drop for BoxBuilder { + impl> Drop for BoxBuilder { fn drop(&mut self) { let mut data = self.data.ptr(); let max = unsafe { data.add(self.len) }; diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index c40624b14c7f0..2a4ec50397f2c 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -7,8 +7,8 @@ use core::ops::Drop; use core::ptr::{self, NonNull, Unique}; use core::slice; -use crate::alloc::{Alloc, Layout, Global, handle_alloc_error}; -use crate::collections::CollectionAllocErr::{self, *}; +use crate::alloc::{Alloc, AllocErr, Layout, Global, handle_alloc_error}; +use crate::collections::CollectionAllocErr; use crate::boxed::Box; /// A low-level utility for more ergonomically allocating, reallocating, and deallocating @@ -39,13 +39,13 @@ use crate::boxed::Box; /// field. This allows zero-sized types to not be special-cased by consumers of /// this type. #[allow(missing_debug_implementations)] -pub struct RawVec { +pub struct RawVec = Global> { ptr: Unique, cap: usize, a: A, } -impl RawVec { +impl> RawVec { /// Like `new` but parameterized over the choice of allocator for /// the returned RawVec. pub const fn new_in(a: A) -> Self { @@ -146,7 +146,7 @@ impl RawVec { } } -impl RawVec { +impl> RawVec { /// Reconstitutes a RawVec from a pointer, capacity, and allocator. /// /// # Undefined Behavior @@ -189,7 +189,7 @@ impl RawVec { } } -impl RawVec { +impl> RawVec { /// Gets a raw pointer to the start of the allocation. Note that this is /// Unique::empty() if `cap = 0` or T is zero-sized. In the former case, you must /// be careful. @@ -409,8 +409,8 @@ impl RawVec { /// Aborts on OOM pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) { match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Exact) { - Err(CapacityOverflow) => capacity_overflow(), - Err(AllocErr) => unreachable!(), + Err(CollectionAllocErr::CapacityOverflow) => capacity_overflow(), + Err(CollectionAllocErr::AllocErr) => unreachable!(), Ok(()) => { /* yay */ } } } @@ -422,7 +422,7 @@ impl RawVec { -> Result { // Nothing we can really do about these checks :( - let required_cap = used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?; + let required_cap = used_cap.checked_add(needed_extra_cap).ok_or(CollectionAllocErr::CapacityOverflow)?; // Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`. let double_cap = self.cap * 2; // `double_cap` guarantees exponential growth. @@ -489,8 +489,8 @@ impl RawVec { /// ``` pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) { match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Amortized) { - Err(CapacityOverflow) => capacity_overflow(), - Err(AllocErr) => unreachable!(), + Err(CollectionAllocErr::CapacityOverflow) => capacity_overflow(), + Err(CollectionAllocErr::AllocErr) => unreachable!(), Ok(()) => { /* yay */ } } } @@ -629,7 +629,7 @@ enum ReserveStrategy { use ReserveStrategy::*; -impl RawVec { +impl> RawVec { fn reserve_internal( &mut self, used_cap: usize, @@ -638,8 +638,6 @@ impl RawVec { strategy: ReserveStrategy, ) -> Result<(), CollectionAllocErr> { unsafe { - use crate::alloc::AllocErr; - // NOTE: we don't early branch on ZSTs here because we want this // to actually catch "asking for more than usize::MAX" in that case. // If we make it past the first branch then we are guaranteed to @@ -653,10 +651,10 @@ impl RawVec { // Nothing we can really do about these checks :( let new_cap = match strategy { - Exact => used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?, + Exact => used_cap.checked_add(needed_extra_cap).ok_or(CollectionAllocErr::CapacityOverflow)?, Amortized => self.amortized_new_size(used_cap, needed_extra_cap)?, }; - let new_layout = Layout::array::(new_cap).map_err(|_| CapacityOverflow)?; + let new_layout = Layout::array::(new_cap).map_err(|_| CollectionAllocErr::CapacityOverflow)?; alloc_guard(new_layout.size())?; @@ -682,7 +680,7 @@ impl RawVec { } -impl RawVec { +impl> RawVec { /// Converts the entire buffer into `Box<[T], A>`. /// /// Note that this will correctly reconstitute any `cap` changes @@ -703,7 +701,7 @@ impl RawVec { } } -impl RawVec { +impl> RawVec { /// Frees the memory owned by the RawVec *without* trying to Drop its contents. pub unsafe fn dealloc_buffer(&mut self) { let elem_size = mem::size_of::(); @@ -715,7 +713,7 @@ impl RawVec { } } -unsafe impl<#[may_dangle] T, A: Alloc> Drop for RawVec { +unsafe impl<#[may_dangle] T, A: Alloc> Drop for RawVec { /// Frees the memory owned by the RawVec *without* trying to Drop its contents. fn drop(&mut self) { unsafe { self.dealloc_buffer(); } @@ -736,7 +734,7 @@ unsafe impl<#[may_dangle] T, A: Alloc> Drop for RawVec { #[inline] fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> { if mem::size_of::() < 8 && alloc_size > core::isize::MAX as usize { - Err(CapacityOverflow) + Err(CollectionAllocErr::CapacityOverflow) } else { Ok(()) } diff --git a/src/liballoc/str.rs b/src/liballoc/str.rs index 893d1f69d8870..efc83ce726618 100644 --- a/src/liballoc/str.rs +++ b/src/liballoc/str.rs @@ -35,7 +35,7 @@ use core::ptr; use core::iter::FusedIterator; use core::unicode::conversions; -use crate::alloc::Alloc; +use crate::alloc::{Alloc, AllocErr}; use crate::borrow::ToOwned; use crate::boxed::Box; use crate::slice::{SliceConcatExt, SliceIndex}; @@ -586,6 +586,6 @@ impl str { /// ``` #[stable(feature = "str_box_extras", since = "1.20.0")] #[inline] -pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8], A>) -> Box { +pub unsafe fn from_boxed_utf8_unchecked>(v: Box<[u8], A>) -> Box { Box::map_raw(v, |p| p as *mut str) } diff --git a/src/liballoc/string.rs b/src/liballoc/string.rs index 7f7722548f581..7a915f1e68f66 100644 --- a/src/liballoc/string.rs +++ b/src/liballoc/string.rs @@ -991,7 +991,7 @@ impl String { /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.vec.try_reserve_exact(additional) } diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs index f25631e028eec..720f566ecbb8e 100644 --- a/src/libcore/alloc.rs +++ b/src/libcore/alloc.rs @@ -579,6 +579,17 @@ pub unsafe trait GlobalAlloc { } } +/// A hack so the default impl can condition on the associated type. This `Err` +/// type ought to just live in the `Alloc` trait. +#[unstable(feature = "allocator_api", issue = "32838")] +pub trait AllocHelper { + + /// The type of any errors thrown by the allocator, customarily + /// either `AllocErr`, for when error recovery is allowed, or `!` + /// to signify that all errors will result in . + type Err = AllocErr; +} + /// An implementation of `Alloc` can allocate, reallocate, and /// deallocate arbitrary blocks of data described via `Layout`. /// @@ -659,7 +670,7 @@ pub unsafe trait GlobalAlloc { /// Note that this list may get tweaked over time as clarifications are made in /// the future. #[unstable(feature = "allocator_api", issue = "32838")] -pub unsafe trait Alloc { +pub unsafe trait Alloc: AllocHelper { // (Note: some existing allocators have unspecified but well-defined // behavior in response to a zero size allocation request ; @@ -707,7 +718,7 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr>; + unsafe fn alloc(&mut self, layout: Layout) -> Result, Self::Err>; /// Deallocate the memory referenced by `ptr`. /// @@ -820,7 +831,7 @@ pub unsafe trait Alloc { unsafe fn realloc(&mut self, ptr: NonNull, layout: Layout, - new_size: usize) -> Result, AllocErr> { + new_size: usize) -> Result, Self::Err> { let old_size = layout.size(); if new_size >= old_size { @@ -863,7 +874,7 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, Self::Err> { let size = layout.size(); let p = self.alloc(layout); if let Ok(p) = p { @@ -891,7 +902,7 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { + unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { let usable_size = self.usable_size(&layout); self.alloc(layout).map(|p| Excess(p, usable_size.1)) } @@ -918,7 +929,7 @@ pub unsafe trait Alloc { unsafe fn realloc_excess(&mut self, ptr: NonNull, layout: Layout, - new_size: usize) -> Result { + new_size: usize) -> Result { let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let usable_size = self.usable_size(&new_layout); self.realloc(ptr, layout, new_size) @@ -1064,16 +1075,8 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc_one(&mut self) -> Result, AllocErr> - where Self: Sized - { - let k = Layout::new::(); - if k.size() > 0 { - unsafe { self.alloc(k).map(|p| p.cast()) } - } else { - Err(AllocErr) - } - } + fn alloc_one(&mut self) -> Result, Self::Err> + where Self: Sized; /// Deallocates a block suitable for holding an instance of `T`. /// @@ -1133,18 +1136,8 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc_array(&mut self, n: usize) -> Result, AllocErr> - where Self: Sized - { - match Layout::array::(n) { - Ok(ref layout) if layout.size() > 0 => { - unsafe { - self.alloc(layout.clone()).map(|p| p.cast()) - } - } - _ => Err(AllocErr), - } - } + fn alloc_array(&mut self, n: usize) -> Result, Self::Err> + where Self: Sized; /// Reallocates a block previously suitable for holding `n_old` /// instances of `T`, returning a block suitable for holding @@ -1183,19 +1176,8 @@ pub unsafe trait Alloc { unsafe fn realloc_array(&mut self, ptr: NonNull, n_old: usize, - n_new: usize) -> Result, AllocErr> - where Self: Sized - { - match (Layout::array::(n_old), Layout::array::(n_new)) { - (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => { - debug_assert!(k_old.align() == k_new.align()); - self.realloc(ptr.cast(), k_old.clone(), k_new.size()).map(NonNull::cast) - } - _ => { - Err(AllocErr) - } - } - } + n_new: usize) -> Result, Self::Err> + where Self: Sized; /// Deallocates a block suitable for holding `n` instances of `T`. /// @@ -1217,8 +1199,38 @@ pub unsafe trait Alloc { /// constraints. /// /// Always returns `Err` on arithmetic overflow. - unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), AllocErr> - where Self: Sized + unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), Self::Err> + where Self: Sized; +} + +#[unstable(feature = "allocator_api", issue = "32838")] +default unsafe impl> Alloc for A { + fn alloc_one(&mut self) -> Result, Self::Err> + where Self: Sized + { + let k = Layout::new::(); + if k.size() > 0 { + unsafe { self.alloc(k).map(|p| p.cast()) } + } else { + Err(AllocErr) + } + } + + fn alloc_array(&mut self, n: usize) -> Result, Self::Err> + where Self: Sized + { + match Layout::array::(n) { + Ok(ref layout) if layout.size() > 0 => { + unsafe { + self.alloc(layout.clone()).map(|p| p.cast()) + } + } + _ => Err(AllocErr), + } + } + + unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), Self::Err> + where Self: Sized { match Layout::array::(n) { Ok(ref k) if k.size() > 0 => { @@ -1229,4 +1241,21 @@ pub unsafe trait Alloc { } } } + + unsafe fn realloc_array(&mut self, + ptr: NonNull, + n_old: usize, + n_new: usize) -> Result, Self::Err> + where Self: Sized + { + match (Layout::array::(n_old), Layout::array::(n_new)) { + (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => { + debug_assert!(k_old.align() == k_new.align()); + self.realloc(ptr.cast(), k_old.clone(), k_new.size()).map(NonNull::cast) + } + _ => { + Err(AllocErr) + } + } + } } From 445409390f44eeecd49e977fa1f22be092dda4e2 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 26 Dec 2017 20:24:00 -0500 Subject: [PATCH 4/9] Add AbortAdapter allocator adapter See its documentation for why its useful --- src/liballoc/abort_adapter.rs | 109 ++++++++++++++++++++++++++++++++++ src/liballoc/lib.rs | 2 + src/liballoc/raw_vec.rs | 9 ++- 3 files changed, 117 insertions(+), 3 deletions(-) create mode 100644 src/liballoc/abort_adapter.rs diff --git a/src/liballoc/abort_adapter.rs b/src/liballoc/abort_adapter.rs new file mode 100644 index 0000000000000..732b955a7a051 --- /dev/null +++ b/src/liballoc/abort_adapter.rs @@ -0,0 +1,109 @@ +//! An allocator adapter that blows up by calling `handle_alloc_error` on all errors. +//! +//! On one hand, concrete allocator implementations should always be written +//! without panicking on user error and OOM to give users maximum +//! flexibility. On the other hand, code that depends on allocation succeeding +//! should depend on `Alloc` to avoid repetitively handling errors from +//! which it cannot recover. +//! +//! This adapter bridges the gap, effectively allowing `Alloc` to be +//! implemented by any allocator. + +#![unstable(feature = "allocator_api", + reason = "the precise API and guarantees it provides may be tweaked \ + slightly, especially to possibly take into account the \ + types being stored to make room for a future \ + tracing garbage collector", + issue = "32838")] + +use core::usize; +use core::ptr::NonNull; + +use crate::alloc::*; + +/// An allocator adapter that blows up by calling `handle_alloc_error` on all errors. +/// +/// See the [module-level documentation](../../std/abort_adapter/index.html) for more. +#[derive(Copy, Clone, Debug, Default)] +pub struct AbortAdapter(pub Alloc); + +impl AllocHelper for AbortAdapter { + type Err = !; +} + +unsafe impl Alloc for AbortAdapter { + unsafe fn alloc(&mut self, layout: Layout) -> Result, Self::Err> { + self.0.alloc(layout).or_else(|_| handle_alloc_error(layout)) + } + + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + self.0.dealloc(ptr, layout) + } + + fn usable_size(&self, layout: &Layout) -> (usize, usize) { + self.0.usable_size(layout) + } + + unsafe fn realloc(&mut self, + ptr: NonNull, + layout: Layout, + new_size: usize) -> Result, Self::Err> { + self.0.realloc(ptr, layout, new_size).or_else(|_| handle_alloc_error(layout)) + } + + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, Self::Err> { + self.0.alloc_zeroed(layout).or_else(|_| handle_alloc_error(layout)) + } + + unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { + self.0.alloc_excess(layout).or_else(|_| handle_alloc_error(layout)) + } + + unsafe fn grow_in_place(&mut self, + ptr: NonNull, + layout: Layout, + new_size: usize) -> Result<(), CannotReallocInPlace> { + self.0.grow_in_place(ptr, layout, new_size) + } + + unsafe fn shrink_in_place(&mut self, + ptr: NonNull, + layout: Layout, + new_size: usize) -> Result<(), CannotReallocInPlace> { + self.0.shrink_in_place(ptr, layout, new_size) + } + + fn alloc_one(&mut self) -> Result, Self::Err> + where Self: Sized + { + self.0.alloc_one().or_else(|_| handle_alloc_error(Layout::new::())) + } + + unsafe fn dealloc_one(&mut self, ptr: NonNull) + where Self: Sized + { + self.0.dealloc_one(ptr) + } + + fn alloc_array(&mut self, n: usize) -> Result, Self::Err> + where Self: Sized + { + self.0.alloc_array(n).or_else(|_| handle_alloc_error(Layout::new::())) + } + + unsafe fn realloc_array(&mut self, + ptr: NonNull, + n_old: usize, + n_new: usize) -> Result, Self::Err> + where Self: Sized + { + self.0.realloc_array(ptr, n_old, n_new) + .or_else(|_| handle_alloc_error(Layout::new::())) + } + + unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), Self::Err> + where Self: Sized + { + self.0.dealloc_array(ptr, n).or_else(|_| handle_alloc_error(Layout::new::())) + } +} diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index c530ac24275c2..9ec2c9f9fa379 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -112,6 +112,7 @@ #![feature(maybe_uninit_extra, maybe_uninit_slice, maybe_uninit_array)] #![feature(alloc_layout_extra)] #![feature(try_trait)] +#![feature(never_type)] // Allow testing this library @@ -128,6 +129,7 @@ mod macros; // Heaps provided for low-level allocation strategies pub mod alloc; +pub mod abort_adapter; // Primitive types using the heaps above diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 2a4ec50397f2c..dce79c050f4b3 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -422,7 +422,8 @@ impl> RawVec { -> Result { // Nothing we can really do about these checks :( - let required_cap = used_cap.checked_add(needed_extra_cap).ok_or(CollectionAllocErr::CapacityOverflow)?; + let required_cap = used_cap.checked_add(needed_extra_cap) + .ok_or(CollectionAllocErr::CapacityOverflow)?; // Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`. let double_cap = self.cap * 2; // `double_cap` guarantees exponential growth. @@ -651,10 +652,12 @@ impl> RawVec { // Nothing we can really do about these checks :( let new_cap = match strategy { - Exact => used_cap.checked_add(needed_extra_cap).ok_or(CollectionAllocErr::CapacityOverflow)?, + Exact => used_cap.checked_add(needed_extra_cap) + .ok_or(CollectionAllocErr::CapacityOverflow)?, Amortized => self.amortized_new_size(used_cap, needed_extra_cap)?, }; - let new_layout = Layout::array::(new_cap).map_err(|_| CollectionAllocErr::CapacityOverflow)?; + let new_layout = Layout::array::(new_cap) + .map_err(|_| CollectionAllocErr::CapacityOverflow)?; alloc_guard(new_layout.size())?; From afa567775f1d2bb5c44b75f51cd930f6a5f35329 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 27 Dec 2017 22:15:36 -0500 Subject: [PATCH 5/9] box, RawVec: Generalize to not neccessary abort on OOM --- src/liballoc/abort_adapter.rs | 5 +- src/liballoc/boxed.rs | 112 ++++++++---- src/liballoc/collections/mod.rs | 10 +- src/liballoc/collections/vec_deque.rs | 15 +- src/liballoc/lib.rs | 1 + src/liballoc/raw_vec.rs | 235 ++++++++++++-------------- src/liballoc/str.rs | 2 +- src/liballoc/string.rs | 7 +- src/liballoc/vec.rs | 17 +- src/libstd/collections/hash/map.rs | 8 +- src/libstd/collections/hash/set.rs | 4 +- 11 files changed, 230 insertions(+), 186 deletions(-) diff --git a/src/liballoc/abort_adapter.rs b/src/liballoc/abort_adapter.rs index 732b955a7a051..a1e0fe9f547b0 100644 --- a/src/liballoc/abort_adapter.rs +++ b/src/liballoc/abort_adapter.rs @@ -48,7 +48,10 @@ unsafe impl Alloc for AbortAdapter { ptr: NonNull, layout: Layout, new_size: usize) -> Result, Self::Err> { - self.0.realloc(ptr, layout, new_size).or_else(|_| handle_alloc_error(layout)) + self.0.realloc(ptr, layout, new_size).or_else(|_| { + let layout = Layout::from_size_align_unchecked(new_size, layout.align()); + handle_alloc_error(layout) + }) } unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, Self::Err> { diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 3a830eb031d3e..e240820410434 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -88,7 +88,8 @@ use core::ops::{ use core::ptr::{self, NonNull, Unique}; use core::task::{Context, Poll}; -use crate::alloc::{Alloc, AllocErr, Global, Layout, handle_alloc_error}; +use crate::abort_adapter::AbortAdapter; +use crate::alloc::{Alloc, Global, Layout}; use crate::vec::Vec; use crate::raw_vec::RawVec; use crate::str::from_boxed_utf8_unchecked; @@ -99,7 +100,7 @@ use crate::str::from_boxed_utf8_unchecked; #[lang = "owned_box"] #[fundamental] #[stable(feature = "rust1", since = "1.0.0")] -pub struct Box(Unique, pub(crate) A); +pub struct Box>(Unique, pub(crate) A); impl Box { /// Allocates memory on the heap and then places `x` into it. @@ -126,7 +127,7 @@ impl Box { } } -impl> Box { +impl Box { /// Allocates memory in the given allocator and then places `x` into it. /// /// This doesn't actually allocate if `T` is zero-sized. @@ -140,7 +141,7 @@ impl> Box { /// ``` #[unstable(feature = "allocator_api", issue = "32838")] #[inline(always)] - pub fn new_in(x: T, a: A) -> Box { + pub fn new_in(x: T, a: A) -> Result, A::Err> { let mut a = a; let layout = Layout::for_value(&x); let size = layout.size(); @@ -148,7 +149,7 @@ impl> Box { Unique::empty() } else { unsafe { - let ptr = a.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)); + let ptr = a.alloc(layout)?; ptr.cast().into() } }; @@ -157,15 +158,15 @@ impl> Box { unsafe { ptr::write(ptr.as_ptr() as *mut T, x); } - Box(ptr, a) + Ok(Box(ptr, a)) } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `x` will be pinned in memory and unable to be moved. #[unstable(feature = "allocator_api", issue = "32838")] #[inline(always)] - pub fn pin_in(x: T, a: A) -> Pin> { - Box::new_in(x, a).into() + pub fn pin_in(x: T, a: A) -> Result>, A::Err> { + Box::new_in(x, a).map(Into::into) } } @@ -209,7 +210,7 @@ impl Box { #[stable(feature = "box_raw", since = "1.4.0")] #[inline] pub unsafe fn from_raw(raw: *mut T) -> Self { - Box(Unique::new_unchecked(raw), Global) + Box(Unique::new_unchecked(raw), AbortAdapter(Global)) } } @@ -293,7 +294,7 @@ impl Box { /// [`Box::from_raw`]: struct.Box.html#method.from_raw #[stable(feature = "box_raw", since = "1.4.0")] #[inline] - pub fn into_raw(b: Box) -> *mut T { + pub fn into_raw(b: Self) -> *mut T { Box::into_raw_non_null(b).as_ptr() } @@ -333,10 +334,20 @@ impl Box { Box::into_unique(b).into() } + /// Consumes the `Box`, returning the wrapped pointer as `NonNull` along + /// with the allocator. + // Also feature = "box_into_raw_non_null", issue = "47336", FIXME how to write? + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn into_both_non_null(b: Self) -> (NonNull, A) { + let (unique, alloc) = Box::into_both_unique(b).into(); + (unique.into(), alloc) + } + #[unstable(feature = "ptr_internals", issue = "0", reason = "use into_raw_non_null instead")] #[inline] #[doc(hidden)] - pub fn into_unique(b: Box) -> Unique { + pub fn into_unique(b: Self) -> Unique { let mut unique = b.0; mem::forget(b); // Box is kind-of a library type, but recognized as a "unique pointer" by @@ -348,6 +359,22 @@ impl Box { unsafe { Unique::new_unchecked(unique.as_mut() as *mut T) } } + /// Consumes the `Box`, returning the wrapped pointer as `Unique` along + /// with the allocator. + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + #[doc(hidden)] + pub fn into_both_unique(mut b: Self) -> (Unique, A) { + let unique = b.0.into(); + let alloc = unsafe { + let mut a = mem::MaybeUninit::uninit(); + ptr::swap(a.as_mut_ptr(), &mut b.1 as *mut _); + mem::forget(b); + a.assume_init() + }; + (unique, alloc) + } + /// Consumes and leaks the `Box`, returning a mutable reference, /// `&'a mut T`. Note that the type `T` must outlive the chosen lifetime /// `'a`. If the type has only static references, or none at all, then this @@ -391,7 +418,7 @@ impl Box { /// ``` #[stable(feature = "box_leak", since = "1.26.0")] #[inline] - pub fn leak<'a>(b: Box) -> &'a mut T + pub fn leak<'a>(b: Self) -> &'a mut T where T: 'a // Technically not needed, but kept to be explicit. { @@ -412,6 +439,7 @@ impl Box { } } + #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<#[may_dangle] T: ?Sized, A> Drop for Box { fn drop(&mut self) { @@ -420,29 +448,31 @@ unsafe impl<#[may_dangle] T: ?Sized, A> Drop for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl + Default> Default for Box { +impl + Default> Default for Box { /// Creates a `Box`, with the `Default` value for T. fn default() -> Box { - Box::new_in(Default::default(), A::default()) + let Ok(b) = Box::new_in(Default::default(), A::default()); + b } } #[stable(feature = "rust1", since = "1.0.0")] -impl + Default> Default for Box<[T], A> { +impl + Default> Default for Box<[T], A> { fn default() -> Box<[T], A> { - Box::<[T; 0], A>::new_in([], A::default()) + let Ok(b) = Box::<[T; 0], A>::new_in([], Default::default()); + b } } #[stable(feature = "default_box_extra", since = "1.17.0")] -impl + Default> Default for Box { +impl + Default> Default for Box { fn default() -> Box { unsafe { from_boxed_utf8_unchecked(Default::default()) } } } #[stable(feature = "rust1", since = "1.0.0")] -impl + Clone> Clone for Box { +impl + Clone> Clone for Box { /// Returns a new box with a `clone()` of this box's contents. /// /// # Examples @@ -453,8 +483,9 @@ impl + Clone> Clone for Box { /// ``` #[rustfmt::skip] #[inline] - fn clone(&self) -> Box { - Box::new_in((**self).clone(), self.1.clone()) + fn clone(&self) -> Self { + let Ok(b) = Box::new_in((**self).clone(), self.1.clone()); + b } /// Copies `source`'s contents into `self` without creating a new allocation. /// @@ -469,13 +500,13 @@ impl + Clone> Clone for Box { /// assert_eq!(*y, 5); /// ``` #[inline] - fn clone_from(&mut self, source: &Box) { + fn clone_from(&mut self, source: &Self) { (**self).clone_from(&(**source)); } } #[stable(feature = "box_slice_clone", since = "1.3.0")] -impl + Clone> Clone for Box { +impl + Clone> Clone for Box { fn clone(&self) -> Self { // this makes a copy of the data let buf = Box::<[u8], A>::from_slice_in(self.as_bytes(), self.1.clone()); @@ -485,6 +516,7 @@ impl + Clone> Clone for Box { } } +/// Just the contents are compared, the allocator is ignored #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for Box { #[inline] @@ -496,6 +528,7 @@ impl PartialEq for Box { PartialEq::ne(&**self, &**other) } } +/// Just the contents are compared, the allocator is ignored #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Box { #[inline] @@ -519,6 +552,7 @@ impl PartialOrd for Box { PartialOrd::gt(&**self, &**other) } } +/// Just the contents are compared, the allocator is ignored #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Box { #[inline] @@ -526,9 +560,11 @@ impl Ord for Box { Ord::cmp(&**self, &**other) } } +/// Just the contents are compared, the allocator is ignored #[stable(feature = "rust1", since = "1.0.0")] impl Eq for Box {} +/// Just the contents are compared, the allocator is ignored #[stable(feature = "rust1", since = "1.0.0")] impl Hash for Box { fn hash(&self, state: &mut H) { @@ -536,6 +572,7 @@ impl Hash for Box { } } +/// Just the contents are compared, the allocator is ignored #[stable(feature = "indirect_hasher_impl", since = "1.22.0")] impl Hasher for Box { fn finish(&self) -> u64 { @@ -583,7 +620,7 @@ impl Hasher for Box { } #[stable(feature = "from_for_ptrs", since = "1.6.0")] -impl + Default> From for Box { +impl + Default> From for Box { /// Converts a generic type `T` into a `Box` /// /// The conversion allocates on the heap and moves `t` @@ -597,7 +634,8 @@ impl + Default> From for Box { /// assert_eq!(Box::from(x), boxed); /// ``` fn from(t: T) -> Self { - Box::new_in(t, A::default()) + let Ok(b) = Box::new_in(t, Default::default()); + b } } @@ -611,10 +649,10 @@ impl From> for Pin> { } } -impl> Box<[T], A> { +impl> Box<[T], A> { fn from_slice_in(slice: &[T], a: A) -> Box<[T], A> { let len = slice.len(); - let buf = RawVec::with_capacity_in(len, a); + let Ok(buf) = RawVec::with_capacity_in(len, a); unsafe { ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len); buf.into_box() @@ -623,7 +661,7 @@ impl> Box<[T], A> { } #[stable(feature = "box_from_slice", since = "1.17.0")] -impl + Default> From<&[T]> for Box<[T], A> { +impl + Default> From<&[T]> for Box<[T], A> { /// Converts a `&[T]` into a `Box<[T]>` /// /// This conversion allocates on the heap @@ -643,7 +681,7 @@ impl + Default> From<&[T]> for Box<[T], A> { } #[stable(feature = "box_from_slice", since = "1.17.0")] -impl + Default> From<&str> for Box { +impl + Default> From<&str> for Box { /// Converts a `&str` into a `Box` /// /// This conversion allocates on the heap @@ -715,7 +753,7 @@ impl Box { } } -impl> Box { +impl> Box { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. @@ -909,9 +947,9 @@ impl + ?Sized, Alloc> Fn for Box { #[rustc_paren_sugar] #[unstable(feature = "fnbox", issue = "28796")] #[rustc_deprecated(reason = "use `FnOnce`, `FnMut`, or `Fn` instead", since = "1.37.0")] -pub trait FnBox: FnOnce { +pub trait FnBox>: FnOnce { /// Performs the call operation. - fn call_box(self: Box, args: A) -> Self::Output; + fn call_box(self: Box, args: Args) -> Self::Output; } #[unstable(feature = "fnbox", issue = "28796")] @@ -940,10 +978,12 @@ impl FromIterator for Box<[A]> { } #[stable(feature = "box_slice_clone", since = "1.3.0")] -impl + Clone> Clone for Box<[T], A> { +impl + Clone> Clone for Box<[T], A> { fn clone(&self) -> Self { + let Ok(b) = RawVec::with_capacity_in(self.len(), self.1.clone()); + let mut new = BoxBuilder { - data: RawVec::with_capacity_in(self.len(), self.1.clone()), + data: b, len: 0, }; @@ -961,12 +1001,12 @@ impl + Clone> Clone for Box<[T], A> { return unsafe { new.into_box() }; // Helper type for responding to panics correctly. - struct BoxBuilder> { + struct BoxBuilder { data: RawVec, len: usize, } - impl> BoxBuilder { + impl BoxBuilder { unsafe fn into_box(self) -> Box<[T], A> { let raw = ptr::read(&self.data); mem::forget(self); @@ -974,7 +1014,7 @@ impl + Clone> Clone for Box<[T], A> { } } - impl> Drop for BoxBuilder { + impl Drop for BoxBuilder { fn drop(&mut self) { let mut data = self.data.ptr(); let max = unsafe { data.add(self.len) }; diff --git a/src/liballoc/collections/mod.rs b/src/liballoc/collections/mod.rs index 5a33ddc14f004..ef852cf07db20 100644 --- a/src/liballoc/collections/mod.rs +++ b/src/liballoc/collections/mod.rs @@ -46,24 +46,24 @@ use crate::alloc::{AllocErr, LayoutErr}; /// Augments `AllocErr` with a CapacityOverflow variant. #[derive(Clone, PartialEq, Eq, Debug)] #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] -pub enum CollectionAllocErr { +pub enum CollectionAllocErr { /// Error due to the computed capacity exceeding the collection's maximum /// (usually `isize::MAX` bytes). CapacityOverflow, /// Error due to the allocator (see the `AllocErr` type's docs). - AllocErr, + AllocErr(E), } #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] -impl From for CollectionAllocErr { +impl From for CollectionAllocErr { #[inline] fn from(AllocErr: AllocErr) -> Self { - CollectionAllocErr::AllocErr + CollectionAllocErr::AllocErr(AllocErr) } } #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] -impl From for CollectionAllocErr { +impl From for CollectionAllocErr { #[inline] fn from(_: LayoutErr) -> Self { CollectionAllocErr::CapacityOverflow diff --git a/src/liballoc/collections/vec_deque.rs b/src/liballoc/collections/vec_deque.rs index 31e49d06a7b5a..024b29799a8a0 100644 --- a/src/liballoc/collections/vec_deque.rs +++ b/src/liballoc/collections/vec_deque.rs @@ -19,6 +19,7 @@ use core::ptr::{self, NonNull}; use core::slice; use core::hash::{Hash, Hasher}; +use crate::alloc::AllocErr; use crate::collections::CollectionAllocErr; use crate::raw_vec::RawVec; use crate::vec::Vec; @@ -549,7 +550,7 @@ impl VecDeque { .expect("capacity overflow"); if new_cap > old_cap { - self.buf.reserve_exact(used_cap, new_cap - used_cap); + let Ok(()) = self.buf.reserve_exact(used_cap, new_cap - used_cap); unsafe { self.handle_cap_increase(old_cap); } @@ -593,7 +594,9 @@ impl VecDeque { /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve_exact(&mut self, additional: usize) + -> Result<(), CollectionAllocErr> + { self.try_reserve(additional) } @@ -631,7 +634,7 @@ impl VecDeque { /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { let old_cap = self.cap(); let used_cap = self.len() + 1; let new_cap = used_cap.checked_add(additional) @@ -748,7 +751,7 @@ impl VecDeque { debug_assert!(self.head < self.tail); } - self.buf.shrink_to_fit(target_cap); + let Ok(()) = self.buf.shrink_to_fit(target_cap); debug_assert!(self.head < self.cap()); debug_assert!(self.tail < self.cap()); @@ -1885,7 +1888,7 @@ impl VecDeque { fn grow_if_necessary(&mut self) { if self.is_full() { let old_cap = self.cap(); - self.buf.double(); + let Ok(()) = self.buf.double(); unsafe { self.handle_cap_increase(old_cap); } @@ -2721,7 +2724,7 @@ impl From> for VecDeque { if !buf.cap().is_power_of_two() || (buf.cap() < (MINIMUM_CAPACITY + 1)) || (buf.cap() == len) { let cap = cmp::max(buf.cap() + 1, MINIMUM_CAPACITY + 1).next_power_of_two(); - buf.reserve_exact(len, cap - len); + let Ok(()) = buf.reserve_exact(len, cap - len); } VecDeque { diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 9ec2c9f9fa379..912f698935f62 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -113,6 +113,7 @@ #![feature(alloc_layout_extra)] #![feature(try_trait)] #![feature(never_type)] +#![feature(exhaustive_patterns)] // Allow testing this library diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index dce79c050f4b3..12bee2b20d6f8 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -7,7 +7,8 @@ use core::ops::Drop; use core::ptr::{self, NonNull, Unique}; use core::slice; -use crate::alloc::{Alloc, AllocErr, Layout, Global, handle_alloc_error}; +use crate::abort_adapter::AbortAdapter; +use crate::alloc::{Alloc, Layout, Global}; use crate::collections::CollectionAllocErr; use crate::boxed::Box; @@ -21,7 +22,6 @@ use crate::boxed::Box; /// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics) /// * Guards against 32-bit systems allocating more than isize::MAX bytes /// * Guards against overflowing your length -/// * Aborts on OOM or calls handle_alloc_error as applicable /// * Avoids freeing Unique::empty() /// * Contains a ptr::Unique and thus endows the user with all related benefits /// @@ -39,13 +39,13 @@ use crate::boxed::Box; /// field. This allows zero-sized types to not be special-cased by consumers of /// this type. #[allow(missing_debug_implementations)] -pub struct RawVec = Global> { +pub struct RawVec> { ptr: Unique, cap: usize, a: A, } -impl> RawVec { +impl RawVec { /// Like `new` but parameterized over the choice of allocator for /// the returned RawVec. pub const fn new_in(a: A) -> Self { @@ -65,23 +65,24 @@ impl> RawVec { /// Like `with_capacity` but parameterized over the choice of /// allocator for the returned RawVec. #[inline] - pub fn with_capacity_in(cap: usize, a: A) -> Self { - RawVec::allocate_in(cap, false, a) + pub fn with_capacity_in(cap: usize, a: A) -> Result { + RawVec::allocate_in(cap, false, a).map_err(handle_overflow_error) } /// Like `with_capacity_zeroed` but parameterized over the choice /// of allocator for the returned RawVec. #[inline] - pub fn with_capacity_zeroed_in(cap: usize, a: A) -> Self { - RawVec::allocate_in(cap, true, a) + pub fn with_capacity_zeroed_in(cap: usize, a: A) -> Result { + RawVec::allocate_in(cap, true, a).map_err(handle_overflow_error) } - fn allocate_in(cap: usize, zeroed: bool, mut a: A) -> Self { - unsafe { + fn allocate_in(cap: usize, zeroed: bool, mut a: A) -> Result> { + unsafe { let elem_size = mem::size_of::(); - let alloc_size = cap.checked_mul(elem_size).unwrap_or_else(|| capacity_overflow()); - alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow()); + let alloc_size = cap.checked_mul(elem_size) + .ok_or(CollectionAllocErr::CapacityOverflow)?; + alloc_guard(alloc_size)?; // handles ZSTs and `cap = 0` alike let ptr = if alloc_size == 0 { @@ -89,34 +90,31 @@ impl> RawVec { } else { let align = mem::align_of::(); let layout = Layout::from_size_align(alloc_size, align).unwrap(); - let result = if zeroed { + if zeroed { a.alloc_zeroed(layout) } else { a.alloc(layout) - }; - match result { - Ok(ptr) => ptr.cast(), - Err(_) => handle_alloc_error(layout), - } + }.map_err(CollectionAllocErr::AllocErr)?.cast() }; - RawVec { + Ok(RawVec { ptr: ptr.into(), cap, a, - } + }) } } } -impl RawVec { +impl RawVec { /// Creates the biggest possible RawVec (on the system heap) /// without allocating. If T has positive size, then this makes a /// RawVec with capacity 0. If T has 0 size, then it makes a /// RawVec with capacity `usize::MAX`. Useful for implementing /// delayed allocation. pub const fn new() -> Self { - Self::new_in(Global) + // Cannot use `Default::default()` cause const fun. + Self::new_in(AbortAdapter(Global)) } /// Creates a RawVec (on the system heap) with exactly the @@ -130,23 +128,23 @@ impl RawVec { /// * Panics if the requested capacity exceeds `usize::MAX` bytes. /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. - /// - /// # Aborts - /// - /// Aborts on OOM #[inline] pub fn with_capacity(cap: usize) -> Self { - RawVec::allocate_in(cap, false, Global) + let Ok(v) = RawVec::allocate_in(cap, false, Default::default()) + .map_err(handle_overflow_error); + v } /// Like `with_capacity` but guarantees the buffer is zeroed. #[inline] pub fn with_capacity_zeroed(cap: usize) -> Self { - RawVec::allocate_in(cap, true, Global) + let Ok(v) = RawVec::allocate_in(cap, true, Default::default()) + .map_err(handle_overflow_error); + v } } -impl> RawVec { +impl RawVec { /// Reconstitutes a RawVec from a pointer, capacity, and allocator. /// /// # Undefined Behavior @@ -163,7 +161,7 @@ impl> RawVec { } } -impl RawVec { +impl RawVec { /// Reconstitutes a RawVec from a pointer, capacity. /// /// # Undefined Behavior @@ -175,7 +173,7 @@ impl RawVec { RawVec { ptr: Unique::new_unchecked(ptr), cap, - a: Global, + a: Default::default(), } } @@ -189,7 +187,7 @@ impl RawVec { } } -impl> RawVec { +impl RawVec { /// Gets a raw pointer to the start of the allocation. Note that this is /// Unique::empty() if `cap = 0` or T is zero-sized. In the former case, you must /// be careful. @@ -249,10 +247,6 @@ impl> RawVec { /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. /// - /// # Aborts - /// - /// Aborts on OOM - /// /// # Examples /// /// ``` @@ -283,7 +277,7 @@ impl> RawVec { /// ``` #[inline(never)] #[cold] - pub fn double(&mut self) { + pub fn double(&mut self) -> Result<(), A::Err> { unsafe { let elem_size = mem::size_of::(); @@ -305,29 +299,23 @@ impl> RawVec { // `from_size_align_unchecked`. let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; - alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); - let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), - cur, - new_size); - match ptr_res { - Ok(ptr) => (new_cap, ptr.cast().into()), - Err(_) => handle_alloc_error( - Layout::from_size_align_unchecked(new_size, cur.align()) - ), - } + alloc_guard::(new_size).unwrap_or_else(|_| capacity_overflow()); + let ptr = self.a.realloc(NonNull::from(self.ptr).cast(), + cur, + new_size)?; + (new_cap, ptr.cast().into()) } None => { // skip to 4 because tiny Vec's are dumb; but not if that // would cause overflow let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 }; - match self.a.alloc_array::(new_cap) { - Ok(ptr) => (new_cap, ptr.into()), - Err(_) => handle_alloc_error(Layout::array::(new_cap).unwrap()), - } + let ptr = self.a.alloc_array::(new_cap)?; + (new_cap, ptr.cast().into()) } }; self.ptr = uniq; self.cap = new_cap; + Ok(()) } } @@ -366,7 +354,7 @@ impl> RawVec { // overflow and the alignment is sufficiently small. let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; - alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); + alloc_guard::(new_size).unwrap_or_else(|_| capacity_overflow()); match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) { Ok(_) => { // We can't directly divide `size`. @@ -380,13 +368,6 @@ impl> RawVec { } } - /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. - pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) - -> Result<(), CollectionAllocErr> { - - self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact) - } - /// Ensures that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already, /// will reallocate the minimum possible amount of memory necessary. @@ -403,24 +384,18 @@ impl> RawVec { /// * Panics if the requested capacity exceeds `usize::MAX` bytes. /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. - /// - /// # Aborts - /// - /// Aborts on OOM - pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) { - match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Exact) { - Err(CollectionAllocErr::CapacityOverflow) => capacity_overflow(), - Err(CollectionAllocErr::AllocErr) => unreachable!(), - Ok(()) => { /* yay */ } - } - } + pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) + -> Result<(), A::Err> + { + self.reserve_internal_2(used_cap, needed_extra_cap, Exact) + } /// Calculates the buffer's new size given that it'll hold `used_cap + /// needed_extra_cap` elements. This logic is used in amortized reserve methods. /// Returns `(new_capacity, new_alloc_size)`. fn amortized_new_size(&self, used_cap: usize, needed_extra_cap: usize) - -> Result { - + -> Result> + { // Nothing we can really do about these checks :( let required_cap = used_cap.checked_add(needed_extra_cap) .ok_or(CollectionAllocErr::CapacityOverflow)?; @@ -430,12 +405,6 @@ impl> RawVec { Ok(cmp::max(double_cap, required_cap)) } - /// The same as `reserve`, but returns on errors instead of panicking or aborting. - pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize) - -> Result<(), CollectionAllocErr> { - self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized) - } - /// Ensures that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already have /// enough capacity, will reallocate enough space plus comfortable slack @@ -454,10 +423,6 @@ impl> RawVec { /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. /// - /// # Aborts - /// - /// Aborts on OOM - /// /// # Examples /// /// ``` @@ -488,12 +453,8 @@ impl> RawVec { /// # vector.push_all(&[1, 3, 5, 7, 9]); /// # } /// ``` - pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) { - match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Amortized) { - Err(CollectionAllocErr::CapacityOverflow) => capacity_overflow(), - Err(CollectionAllocErr::AllocErr) => unreachable!(), - Ok(()) => { /* yay */ } - } + pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) -> Result<(), A::Err> { + self.reserve_internal_2(used_cap, needed_extra_cap, Amortized) } /// Attempts to ensure that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already have @@ -539,7 +500,7 @@ impl> RawVec { let new_layout = Layout::new::().repeat(new_cap).unwrap().0; // FIXME: may crash and burn on over-reserve - alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow()); + alloc_guard::(new_layout.size()).unwrap_or_else(|_| capacity_overflow()); match self.a.grow_in_place( NonNull::from(self.ptr).cast(), old_layout, new_layout.size(), ) { @@ -560,17 +521,13 @@ impl> RawVec { /// # Panics /// /// Panics if the given amount is *larger* than the current capacity. - /// - /// # Aborts - /// - /// Aborts on OOM. - pub fn shrink_to_fit(&mut self, amount: usize) { + pub fn shrink_to_fit(&mut self, amount: usize) -> Result<(), A::Err> { let elem_size = mem::size_of::(); // Set the `cap` because they might be about to promote to a `Box<[T]>` if elem_size == 0 { self.cap = amount; - return; + return Ok(()); } // This check is my waterloo; it's the only thing Vec wouldn't have to do. @@ -602,26 +559,40 @@ impl> RawVec { let new_size = elem_size * amount; let align = mem::align_of::(); let old_layout = Layout::from_size_align_unchecked(old_size, align); - match self.a.realloc(NonNull::from(self.ptr).cast(), - old_layout, - new_size) { - Ok(p) => self.ptr = p.cast().into(), - Err(_) => handle_alloc_error( - Layout::from_size_align_unchecked(new_size, align) - ), - } + let ptr = self.a.realloc(NonNull::from(self.ptr).cast(), + old_layout, + new_size)?; + self.ptr = ptr.cast().into(); } self.cap = amount; } + Ok(()) } -} -enum Fallibility { - Fallible, - Infallible, + // Reborrow a `RawVec` as one which does diverge on allocation failures. + pub fn as_infallible(&mut self) -> &mut RawVec> { + unsafe { ::core::mem::transmute(self) } + } } -use Fallibility::*; +impl RawVec> { + // Reborrow a `RawVec` as one which doesn't diverge on allocation failures. + pub fn as_fallible(&mut self) -> &mut RawVec { + unsafe { ::core::mem::transmute(self) } + } + + /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. + pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) + -> Result<(), CollectionAllocErr> { + self.as_fallible().reserve_internal(used_cap, needed_extra_cap, Exact) + } + + /// The same as `reserve`, but returns on errors instead of panicking or aborting. + pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize) + -> Result<(), CollectionAllocErr> { + self.as_fallible().reserve_internal(used_cap, needed_extra_cap, Amortized) + } +} enum ReserveStrategy { Exact, @@ -630,14 +601,13 @@ enum ReserveStrategy { use ReserveStrategy::*; -impl> RawVec { +impl RawVec { fn reserve_internal( &mut self, used_cap: usize, needed_extra_cap: usize, - fallibility: Fallibility, strategy: ReserveStrategy, - ) -> Result<(), CollectionAllocErr> { + ) -> Result<(), CollectionAllocErr> { unsafe { // NOTE: we don't early branch on ZSTs here because we want this // to actually catch "asking for more than usize::MAX" in that case. @@ -661,29 +631,37 @@ impl> RawVec { alloc_guard(new_layout.size())?; - let res = match self.current_layout() { + let ptr = match self.current_layout() { Some(layout) => { debug_assert!(new_layout.align() == layout.align()); self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size()) } None => self.a.alloc(new_layout), - }; - - match (&res, fallibility) { - (Err(AllocErr), Infallible) => handle_alloc_error(new_layout), - _ => {} - } + }.map_err(CollectionAllocErr::AllocErr)?; - self.ptr = res?.cast().into(); + self.ptr = ptr.cast().into(); self.cap = new_cap; Ok(()) } } + /// Like the above, but throws away capacity overflow errors + fn reserve_internal_2( + &mut self, + used_cap: usize, + needed_extra_cap: usize, + strategy: ReserveStrategy, + ) -> Result<(), A::Err> { + match self.reserve_internal(used_cap, needed_extra_cap, strategy) { + Err(CollectionAllocErr::CapacityOverflow) => capacity_overflow(), + Err(CollectionAllocErr::AllocErr(e)) => Err(e), + Ok(()) => Ok(()), + } + } } -impl> RawVec { +impl RawVec { /// Converts the entire buffer into `Box<[T], A>`. /// /// Note that this will correctly reconstitute any `cap` changes @@ -704,7 +682,7 @@ impl> RawVec { } } -impl> RawVec { +impl RawVec { /// Frees the memory owned by the RawVec *without* trying to Drop its contents. pub unsafe fn dealloc_buffer(&mut self) { let elem_size = mem::size_of::(); @@ -716,7 +694,7 @@ impl> RawVec { } } -unsafe impl<#[may_dangle] T, A: Alloc> Drop for RawVec { +unsafe impl<#[may_dangle] T, A: Alloc> Drop for RawVec { /// Frees the memory owned by the RawVec *without* trying to Drop its contents. fn drop(&mut self) { unsafe { self.dealloc_buffer(); } @@ -735,14 +713,23 @@ unsafe impl<#[may_dangle] T, A: Alloc> Drop for RawVec { // all 4GB in user-space. e.g., PAE or x32 #[inline] -fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> { - if mem::size_of::() < 8 && alloc_size > core::isize::MAX as usize { +fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> { + if mem::size_of::() < 8 && alloc_size > ::core::isize::MAX as usize { Err(CollectionAllocErr::CapacityOverflow) } else { Ok(()) } } +/// Reduce error to just allocation error +#[inline] +fn handle_overflow_error(err: CollectionAllocErr) -> E { + match err { + CollectionAllocErr::AllocErr(e) => e, + CollectionAllocErr::CapacityOverflow => capacity_overflow(), + } +} + // One central function responsible for reporting capacity overflows. This'll // ensure that the code generation related to these panics is minimal as there's // only one location which panics rather than a bunch throughout the module. diff --git a/src/liballoc/str.rs b/src/liballoc/str.rs index efc83ce726618..d3f62ffa16dff 100644 --- a/src/liballoc/str.rs +++ b/src/liballoc/str.rs @@ -586,6 +586,6 @@ impl str { /// ``` #[stable(feature = "str_box_extras", since = "1.20.0")] #[inline] -pub unsafe fn from_boxed_utf8_unchecked>(v: Box<[u8], A>) -> Box { +pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8], A>) -> Box { Box::map_raw(v, |p| p as *mut str) } diff --git a/src/liballoc/string.rs b/src/liballoc/string.rs index 7a915f1e68f66..d3c84d25396a6 100644 --- a/src/liballoc/string.rs +++ b/src/liballoc/string.rs @@ -55,6 +55,7 @@ use core::ops::Bound::{Excluded, Included, Unbounded}; use core::ptr; use core::str::{pattern::Pattern, lossy}; +use crate::alloc::AllocErr; use crate::borrow::{Cow, ToOwned}; use crate::collections::CollectionAllocErr; use crate::boxed::Box; @@ -953,7 +954,7 @@ impl String { /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.vec.try_reserve(additional) } @@ -991,7 +992,9 @@ impl String { /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve_exact(&mut self, additional: usize) + -> Result<(), CollectionAllocErr> + { self.vec.try_reserve_exact(additional) } diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs index 92fe0834dd029..5e5887aad9f0e 100644 --- a/src/liballoc/vec.rs +++ b/src/liballoc/vec.rs @@ -68,6 +68,7 @@ use core::ops::Bound::{Excluded, Included, Unbounded}; use core::ptr::{self, NonNull}; use core::slice::{self, SliceIndex}; +use crate::alloc::AllocErr; use crate::borrow::{ToOwned, Cow}; use crate::collections::CollectionAllocErr; use crate::boxed::Box; @@ -454,7 +455,7 @@ impl Vec { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { - self.buf.reserve(self.len, additional); + let Ok(()) = self.buf.reserve(self.len, additional); } /// Reserves the minimum capacity for exactly `additional` more elements to @@ -479,7 +480,7 @@ impl Vec { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve_exact(&mut self, additional: usize) { - self.buf.reserve_exact(self.len, additional); + let Ok(()) = self.buf.reserve_exact(self.len, additional); } /// Tries to reserve capacity for at least `additional` more elements to be inserted @@ -515,7 +516,7 @@ impl Vec { /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.buf.try_reserve(self.len, additional) } @@ -555,7 +556,9 @@ impl Vec { /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve_exact(&mut self, additional: usize) + -> Result<(), CollectionAllocErr> + { self.buf.try_reserve_exact(self.len, additional) } @@ -576,7 +579,7 @@ impl Vec { #[stable(feature = "rust1", since = "1.0.0")] pub fn shrink_to_fit(&mut self) { if self.capacity() != self.len { - self.buf.shrink_to_fit(self.len); + let Ok(()) = self.buf.shrink_to_fit(self.len); } } @@ -602,7 +605,7 @@ impl Vec { /// ``` #[unstable(feature = "shrink_to", reason = "new API", issue="56431")] pub fn shrink_to(&mut self, min_capacity: usize) { - self.buf.shrink_to_fit(cmp::max(self.len, min_capacity)); + let Ok(()) = self.buf.shrink_to_fit(cmp::max(self.len, min_capacity)); } /// Converts the vector into [`Box<[T]>`][owned slice]. @@ -2728,7 +2731,7 @@ impl Drain<'_, T> { unsafe fn move_tail(&mut self, extra_capacity: usize) { let vec = self.vec.as_mut(); let used_capacity = self.tail_start + self.tail_len; - vec.buf.reserve(used_capacity, extra_capacity); + let Ok(()) = vec.buf.reserve(used_capacity, extra_capacity); let new_tail_start = self.tail_start + extra_capacity; let src = vec.as_ptr().add(self.tail_start); diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs index 5a2fe2b244f55..7f2cc262fd277 100644 --- a/src/libstd/collections/hash/map.rs +++ b/src/libstd/collections/hash/map.rs @@ -4,6 +4,8 @@ use self::Entry::*; use hashbrown::hash_map as base; +use alloc::alloc::AllocErr; + use crate::borrow::Borrow; use crate::cell::Cell; use crate::collections::CollectionAllocErr; @@ -588,7 +590,7 @@ where /// ``` #[inline] #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] - pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.base .try_reserve(additional) .map_err(map_collection_alloc_err) @@ -2542,10 +2544,10 @@ fn map_entry<'a, K: 'a, V: 'a>(raw: base::RustcEntry<'a, K, V>) -> Entry<'a, K, } #[inline] -fn map_collection_alloc_err(err: hashbrown::CollectionAllocErr) -> CollectionAllocErr { +fn map_collection_alloc_err(err: hashbrown::CollectionAllocErr) -> CollectionAllocErr { match err { hashbrown::CollectionAllocErr::CapacityOverflow => CollectionAllocErr::CapacityOverflow, - hashbrown::CollectionAllocErr::AllocErr => CollectionAllocErr::AllocErr, + hashbrown::CollectionAllocErr::AllocErr => CollectionAllocErr::AllocErr(AllocErr), } } diff --git a/src/libstd/collections/hash/set.rs b/src/libstd/collections/hash/set.rs index 403914c070780..51b7c314be16b 100644 --- a/src/libstd/collections/hash/set.rs +++ b/src/libstd/collections/hash/set.rs @@ -1,3 +1,5 @@ +use alloc::alloc::AllocErr; + use crate::borrow::Borrow; use crate::collections::CollectionAllocErr; use crate::fmt; @@ -383,7 +385,7 @@ impl HashSet /// ``` #[inline] #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.map.try_reserve(additional) } From e7f48a6cf4b578e465e86c267a051659f2324b6e Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 16 Jul 2018 02:57:21 -0400 Subject: [PATCH 6/9] box_free_worker: Pull out version that just borrows alloc --- src/liballoc/alloc.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 57ed2f4faa4bc..1978bcf223f3a 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -222,9 +222,14 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { #[cfg_attr(not(test), lang = "box_free")] #[inline] pub(crate) unsafe fn box_free(ptr: Unique, mut a: A) { + box_free_worker(ptr, &mut a) +} + +#[inline] +pub(crate) unsafe fn box_free_worker(ptr: Unique, a: &mut A) { let size = size_of_val(&*ptr.as_ptr()); let align = min_align_of_val(&*ptr.as_ptr()); - // We do not allocate for Box when T is ZST, so deallocation is also not necessary. + // We do not allocate for Box when T is ZST, so deallocation is also not necessary. if size != 0 { let layout = Layout::from_size_align_unchecked(size, align); a.dealloc(NonNull::from(ptr).cast(), layout); From 55098c0af577738285a56f658467b669ab050e01 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Dec 2017 13:25:39 -0500 Subject: [PATCH 7/9] Rc: Generalize for arbitrary allocators --- src/liballoc/rc.rs | 266 +++++++++++++++++++++++++++++---------------- 1 file changed, 173 insertions(+), 93 deletions(-) diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 47b4d80cc5e16..33da2a62283a1 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -1,7 +1,7 @@ //! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference //! Counted'. //! -//! The type [`Rc`][`Rc`] provides shared ownership of a value of type `T`, +//! The type [`Rc`][`Rc`] provides shared ownership of a value of type `T`, //! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new //! pointer to the same value in the heap. When the last [`Rc`] pointer to a //! given value is destroyed, the pointed-to value is also destroyed. @@ -29,9 +29,9 @@ //! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from //! children back to their parents. //! -//! `Rc` automatically dereferences to `T` (via the [`Deref`] trait), -//! so you can call `T`'s methods on a value of type [`Rc`][`Rc`]. To avoid name -//! clashes with `T`'s methods, the methods of [`Rc`][`Rc`] itself are associated +//! `Rc` automatically dereferences to `T` (via the [`Deref`] trait), +//! so you can call `T`'s methods on a value of type [`Rc`][`Rc`]. To avoid name +//! clashes with `T`'s methods, the methods of [`Rc`][`Rc`] itself are associated //! functions, called using function-like syntax: //! //! ``` @@ -41,13 +41,13 @@ //! Rc::downgrade(&my_rc); //! ``` //! -//! [`Weak`][`Weak`] does not auto-dereference to `T`, because the value may have +//! [`Weak`][`Weak`] does not auto-dereference to `T`, because the value may have //! already been destroyed. //! //! # Cloning references //! //! Creating a new reference from an existing reference counted pointer is done using the -//! `Clone` trait implemented for [`Rc`][`Rc`] and [`Weak`][`Weak`]. +//! `Clone` trait implemented for [`Rc`][`Rc`] and [`Weak`][`Weak`]. //! //! ``` //! use std::rc::Rc; @@ -242,18 +242,20 @@ use core::marker::{self, Unpin, Unsize, PhantomData}; use core::mem::{self, align_of, align_of_val, forget, size_of_val}; use core::ops::{Deref, Receiver, CoerceUnsized, DispatchFromDyn}; use core::pin::Pin; -use core::ptr::{self, NonNull}; +use core::ptr::{self, NonNull, Unique}; use core::slice::from_raw_parts_mut; use core::convert::From; use core::usize; -use crate::alloc::{Global, Alloc, Layout, box_free, handle_alloc_error}; +use crate::abort_adapter::AbortAdapter; +use crate::alloc::{Global, Alloc, Layout, box_free_worker}; use crate::string::String; use crate::vec::Vec; -struct RcBox { +struct RcBox> { strong: Cell, weak: Cell, + alloc: A, value: T, } @@ -270,18 +272,18 @@ struct RcBox { /// [get_mut]: #method.get_mut #[cfg_attr(not(test), lang = "rc")] #[stable(feature = "rust1", since = "1.0.0")] -pub struct Rc { - ptr: NonNull>, +pub struct Rc> { + ptr: NonNull>, phantom: PhantomData, } #[stable(feature = "rust1", since = "1.0.0")] -impl !marker::Send for Rc {} +impl !marker::Send for Rc {} #[stable(feature = "rust1", since = "1.0.0")] -impl !marker::Sync for Rc {} +impl !marker::Sync for Rc {} #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized> CoerceUnsized> for Rc {} +impl, U: ?Sized, A: Alloc> CoerceUnsized> for Rc {} #[unstable(feature = "dispatch_from_dyn", issue = "0")] impl, U: ?Sized> DispatchFromDyn> for Rc {} @@ -296,20 +298,34 @@ impl Rc { /// /// let five = Rc::new(5); /// ``` + #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - Rc { - // there is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - ptr: Box::into_raw_non_null(box RcBox { + let Ok(a) = Self::new_in(value, Default::default()); + a + } +} + +impl Rc { + /// Constructs a new `Rc` using the given allocator. + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in(value: T, alloc: A) -> Result, A::Err> { + // there is an implicit weak pointer owned by all the strong + // pointers, which ensures that the weak destructor never frees + // the allocation while the strong destructor is running, even + // if the weak pointer is stored inside the strong one. + let x: Box<_, A> = Box::new_in(mem::MaybeUninit::uninit(), alloc)?; + let (mut unique, alloc_after) = Box::into_both_non_null(x); + let ptr = unsafe { + *unique.as_mut().as_mut_ptr() = RcBox { strong: Cell::new(1), weak: Cell::new(1), + alloc: alloc_after, value, - }), - phantom: PhantomData, - } + }; + NonNull::new_unchecked(unique.as_ptr() as _) + }; + Ok(Rc { ptr: ptr, phantom: PhantomData }) } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then @@ -363,29 +379,6 @@ impl Rc { } impl Rc { - /// Consumes the `Rc`, returning the wrapped pointer. - /// - /// To avoid a memory leak the pointer must be converted back to an `Rc` using - /// [`Rc::from_raw`][from_raw]. - /// - /// [from_raw]: struct.Rc.html#method.from_raw - /// - /// # Examples - /// - /// ``` - /// use std::rc::Rc; - /// - /// let x = Rc::new(10); - /// let x_ptr = Rc::into_raw(x); - /// assert_eq!(unsafe { *x_ptr }, 10); - /// ``` - #[stable(feature = "rc_raw", since = "1.17.0")] - pub fn into_raw(this: Self) -> *const T { - let ptr: *const T = &*this; - mem::forget(this); - ptr - } - /// Constructs an `Rc` from a raw pointer. /// /// The raw pointer must have been previously returned by a call to a @@ -416,10 +409,42 @@ impl Rc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { + Self::from_raw_in(ptr) + } +} + +impl Rc { + /// Consumes the `Rc`, returning the wrapped pointer. + /// + /// To avoid a memory leak the pointer must be converted back to an `Rc` using + /// [`Rc::from_raw`][from_raw]. + /// + /// [from_raw]: struct.Rc.html#method.from_raw + /// + /// # Examples + /// + /// ``` + /// use std::rc::Rc; + /// + /// let x = Rc::new(10); + /// let x_ptr = Rc::into_raw(x); + /// assert_eq!(unsafe { *x_ptr }, 10); + /// ``` + #[stable(feature = "rc_raw", since = "1.17.0")] + pub fn into_raw(this: Self) -> *const T { + let ptr: *const T = &*this; + mem::forget(this); + ptr + } + + /// Allocator-polymorphic version of `from_raw_in`. Make sure you use the + /// correct allocator! + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T) -> Self { let offset = data_offset(ptr); // Reverse the offset to find the original RcBox. - let fake_ptr = ptr as *mut RcBox; + let fake_ptr = ptr as *mut RcBox; let rc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); Rc { @@ -463,7 +488,7 @@ impl Rc { /// let weak_five = Rc::downgrade(&five); /// ``` #[stable(feature = "rc_weak", since = "1.4.0")] - pub fn downgrade(this: &Self) -> Weak { + pub fn downgrade(this: &Self) -> Weak { this.inc_weak(); // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr)); @@ -656,7 +681,7 @@ impl Rc { } } -impl Rc { +impl Rc { #[inline] #[stable(feature = "rc_downcast", since = "1.29.0")] /// Attempt to downcast the `Rc` to a concrete type. @@ -679,59 +704,92 @@ impl Rc { /// print_if_string(Rc::new(0i8)); /// } /// ``` - pub fn downcast(self) -> Result, Rc> { + pub fn downcast(self) -> Result, Rc> { if (*self).is::() { - let ptr = self.ptr.cast::>(); - forget(self); - Ok(Rc { ptr, phantom: PhantomData }) + // avoid the pointer arithmetic in from_raw + unsafe { + let raw: *const RcBox = self.ptr.as_ptr(); + forget(self); + Ok(Rc { + ptr: NonNull::new_unchecked(raw as *const RcBox as *mut _), + phantom: PhantomData, + }) + } } else { Err(self) } } } - impl Rc { + fn from_box(v: Box) -> Self { + let Ok(a) = Self::from_box_in(v, Default::default()); + a + } +} + +impl Rc { // Allocates an `RcBox` with sufficient space for an unsized value - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox { + unsafe fn allocate_for_ptr(ptr: *const T, mut alloc: A) -> Result<*mut RcBox, A::Err> { // Calculate layout using the given value. // Previously, layout was calculated on the expression // `&*(ptr as *const RcBox)`, but this created a misaligned // reference (see #54908). - let layout = Layout::new::>() + let layout = Layout::new::>() .extend(Layout::for_value(&*ptr)).unwrap().0 .pad_to_align().unwrap(); - let mem = Global.alloc(layout) - .unwrap_or_else(|_| handle_alloc_error(layout)); + let mem = alloc.alloc(layout)?; // Initialize the RcBox - let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox; + let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox; debug_assert_eq!(Layout::for_value(&*inner), layout); - ptr::write(&mut (*inner).strong, Cell::new(1)); - ptr::write(&mut (*inner).weak, Cell::new(1)); + ptr::write(inner as *mut RcBox<(), A>, RcBox { + strong: Cell::new(1), + weak: Cell::new(1), + alloc, + value: (), + }); - inner + Ok(inner) } - fn from_box(v: Box) -> Rc { - unsafe { - let box_unique = Box::into_unique(v); - let bptr = box_unique.as_ptr(); + /// `v` must be heap-allocated + unsafe fn from_box_raw(box_unique: Unique, alloc: A) -> Result, A::Err> { + let bptr = box_unique.as_ptr(); + let value_size = size_of_val(&*bptr); + let ptr = Self::allocate_for_ptr(bptr, alloc)?; - let value_size = size_of_val(&*bptr); - let ptr = Self::allocate_for_ptr(bptr); + // Copy value as bytes + ptr::copy_nonoverlapping( + bptr as *const T as *const u8, + &mut (*ptr).value as *mut _ as *mut u8, + value_size); - // Copy value as bytes - ptr::copy_nonoverlapping( - bptr as *const T as *const u8, - &mut (*ptr).value as *mut _ as *mut u8, - value_size); + Ok(Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }) + } + /// Copy from box, using potentially different allocator + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn from_box_in(v: Box, alloc: A) -> Result { + let (u, mut a) = Box::into_both_unique(v); + unsafe { + let rc = Self::from_box_raw(u, alloc)?; // Free the allocation without dropping its contents - box_free::<_, Global>(box_unique, Global); + box_free_worker(u, &mut a); + Ok(rc) + } + } - Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + /// Copy from box, using its own allocator + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn from_box_in_same(v: Box) -> Result { + let (u, a) = Box::into_both_unique(v); + unsafe { + let rc = Self::from_box_raw(u, a)?; + // Free the allocation without dropping its contents + box_free_worker(u, &mut (*rc.ptr.as_ptr()).alloc); + Ok(rc) } } } @@ -751,7 +809,7 @@ impl Rc<[T]> { // Unsafe because the caller must either take ownership or bind `T: Copy` unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { let v_ptr = v as *const [T]; - let ptr = Self::allocate_for_ptr(v_ptr); + let Ok(ptr) = Self::allocate_for_ptr(v_ptr, Default::default()); ptr::copy_nonoverlapping( v.as_ptr(), @@ -792,7 +850,7 @@ impl RcFromSlice for Rc<[T]> { unsafe { let v_ptr = v as *const [T]; - let ptr = Self::allocate_for_ptr(v_ptr); + let Ok(ptr) = Self::allocate_for_ptr(v_ptr, Default::default()); let mem = ptr as *mut _ as *mut u8; let layout = Layout::for_value(&*ptr); @@ -828,7 +886,7 @@ impl RcFromSlice for Rc<[T]> { } #[stable(feature = "rust1", since = "1.0.0")] -impl Deref for Rc { +impl Deref for Rc { type Target = T; #[inline(always)] @@ -841,7 +899,7 @@ impl Deref for Rc { impl Receiver for Rc {} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { +unsafe impl<#[may_dangle] T: ?Sized, A: Alloc> Drop for Rc { /// Drops the `Rc`. /// /// This will decrement the strong reference count. If the strong reference @@ -1237,13 +1295,13 @@ impl From> for Rc<[T]> { /// [`Option`]: ../../std/option/enum.Option.html /// [`None`]: ../../std/option/enum.Option.html#variant.None #[stable(feature = "rc_weak", since = "1.4.0")] -pub struct Weak { +pub struct Weak> { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need // to allocate space on the heap. That's not a value a real pointer // will ever have because RcBox has alignment at least 2. - ptr: NonNull>, + ptr: NonNull>, } #[stable(feature = "rc_weak", since = "1.4.0")] @@ -1274,8 +1332,16 @@ impl Weak { /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { + Self::new_in() + } +} + +impl Weak { + /// The same as `new`, but separate for stability reasons + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in() -> Weak { Weak { - ptr: NonNull::new(usize::MAX as *mut RcBox).expect("MAX is not 0"), + ptr: NonNull::new(usize::MAX as *mut RcBox).expect("MAX is not 0"), } } @@ -1314,7 +1380,7 @@ impl Weak { None => ptr::null(), Some(inner) => { let offset = data_offset_sized::(); - let ptr = inner as *const RcBox; + let ptr = inner as *const RcBox; // Note: while the pointer we create may already point to dropped value, the // allocation still lives (it must hold the weak point as long as we are alive). // Therefore, the offset is OK to do, it won't get out of the allocation. @@ -1404,11 +1470,11 @@ impl Weak { #[unstable(feature = "weak_into_raw", issue = "60728")] pub unsafe fn from_raw(ptr: *const T) -> Self { if ptr.is_null() { - Self::new() + Self::new_in() } else { // See Rc::from_raw for details let offset = data_offset(ptr); - let fake_ptr = ptr as *mut RcBox; + let fake_ptr = ptr as *mut RcBox; let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw"), @@ -1459,6 +1525,20 @@ impl Weak { Some(Rc { ptr: self.ptr, phantom: PhantomData }) } } +} + +impl Weak { + /// The same as `upgrade`, but separate for stability reasons + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn upgrade_in(&self) -> Option> { + let inner = self.inner()?; + if inner.strong() == 0 { + None + } else { + inner.inc_strong(); + Some(Rc { ptr: self.ptr, phantom: PhantomData }) + } + } /// Gets the number of strong (`Rc`) pointers pointing to this value. /// @@ -1495,7 +1575,7 @@ impl Weak { /// Returns `None` when the pointer is dangling and there is no allocated `RcBox` /// (i.e., when this `Weak` was created by `Weak::new`). #[inline] - fn inner(&self) -> Option<&RcBox> { + fn inner(&self) -> Option<&RcBox> { if is_dangling(self.ptr) { None } else { @@ -1551,7 +1631,7 @@ impl Weak { } #[stable(feature = "rc_weak", since = "1.4.0")] -impl Drop for Weak { +impl Drop for Weak { /// Drops the `Weak` pointer. /// /// # Examples @@ -1650,8 +1730,8 @@ impl Default for Weak { // clone these much in Rust thanks to ownership and move-semantics. #[doc(hidden)] -trait RcBoxPtr { - fn inner(&self) -> &RcBox; +trait RcBoxPtr { + fn inner(&self) -> &RcBox; #[inline] fn strong(&self) -> usize { @@ -1698,18 +1778,18 @@ trait RcBoxPtr { } } -impl RcBoxPtr for Rc { +impl RcBoxPtr for Rc { #[inline(always)] - fn inner(&self) -> &RcBox { + fn inner(&self) -> &RcBox { unsafe { self.ptr.as_ref() } } } -impl RcBoxPtr for RcBox { +impl RcBoxPtr for RcBox { #[inline(always)] - fn inner(&self) -> &RcBox { + fn inner(&self) -> &RcBox { self } } From 9289a624d77c35f320e5855e6f47d0cc806d47ff Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 16 Jul 2018 02:57:54 -0400 Subject: [PATCH 8/9] Arc: Generalize for arbitrary allocators --- src/liballoc/sync.rs | 384 ++++++++++++++++++++++++++----------------- 1 file changed, 235 insertions(+), 149 deletions(-) diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index 97d655e8456ab..1660b7b24f143 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -2,7 +2,7 @@ //! Thread-safe reference-counting pointers. //! -//! See the [`Arc`][arc] documentation for more details. +//! See the [`Arc`][arc] documentation for more details. //! //! [arc]: struct.Arc.html @@ -16,14 +16,15 @@ use core::intrinsics::abort; use core::mem::{self, align_of, align_of_val, size_of_val}; use core::ops::{Deref, Receiver, CoerceUnsized, DispatchFromDyn}; use core::pin::Pin; -use core::ptr::{self, NonNull}; +use core::ptr::{self, NonNull, Unique}; use core::marker::{Unpin, Unsize, PhantomData}; use core::hash::{Hash, Hasher}; use core::{isize, usize}; use core::convert::From; use core::slice::from_raw_parts_mut; -use crate::alloc::{Global, Alloc, Layout, box_free, handle_alloc_error}; +use crate::abort_adapter::AbortAdapter; +use crate::alloc::{Global, Alloc, Layout, box_free_worker}; use crate::boxed::Box; use crate::rc::is_dangling; use crate::string::String; @@ -38,7 +39,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically /// Reference Counted'. /// -/// The type `Arc` provides shared ownership of a value of type `T`, +/// The type `Arc` provides shared ownership of a value of type `T`, /// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces /// a new `Arc` instance, which points to the same value on the heap as the /// source `Arc`, while increasing a reference count. When the last `Arc` @@ -59,21 +60,21 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// are not sharing reference-counted values between threads, consider using /// [`Rc`] for lower overhead. [`Rc`] is a safe default, because the /// compiler will catch any attempt to send an [`Rc`] between threads. -/// However, a library might choose `Arc` in order to give library consumers +/// However, a library might choose `Arc` in order to give library consumers /// more flexibility. /// -/// `Arc` will implement [`Send`] and [`Sync`] as long as the `T` implements +/// `Arc` will implement [`Send`] and [`Sync`] as long as the `T` implements /// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an -/// `Arc` to make it thread-safe? This may be a bit counter-intuitive at -/// first: after all, isn't the point of `Arc` thread safety? The key is -/// this: `Arc` makes it thread safe to have multiple ownership of the same +/// `Arc` to make it thread-safe? This may be a bit counter-intuitive at +/// first: after all, isn't the point of `Arc` thread safety? The key is +/// this: `Arc` makes it thread safe to have multiple ownership of the same /// data, but it doesn't add thread safety to its data. Consider -/// `Arc<`[`RefCell`]`>`. [`RefCell`] isn't [`Sync`], and if `Arc` was always +/// `Arc<`[`RefCell`]`>`. [`RefCell`] isn't [`Sync`], and if `Arc` was always /// [`Send`], `Arc<`[`RefCell`]`>` would be as well. But then we'd have a problem: /// [`RefCell`] is not thread safe; it keeps track of the borrowing count using /// non-atomic operations. /// -/// In the end, this means that you may need to pair `Arc` with some sort of +/// In the end, this means that you may need to pair `Arc` with some sort of /// [`std::sync`] type, usually [`Mutex`][mutex]. /// /// ## Breaking cycles with `Weak` @@ -91,7 +92,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// # Cloning references /// /// Creating a new reference from an existing reference counted pointer is done using the -/// `Clone` trait implemented for [`Arc`][arc] and [`Weak`][weak]. +/// `Clone` trait implemented for [`Arc`][arc] and [`Weak`][weak]. /// /// ``` /// use std::sync::Arc; @@ -108,9 +109,9 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// /// ## `Deref` behavior /// -/// `Arc` automatically dereferences to `T` (via the [`Deref`][deref] trait), -/// so you can call `T`'s methods on a value of type `Arc`. To avoid name -/// clashes with `T`'s methods, the methods of `Arc` itself are associated +/// `Arc` automatically dereferences to `T` (via the [`Deref`][deref] trait), +/// so you can call `T`'s methods on a value of type `Arc`. To avoid name +/// clashes with `T`'s methods, the methods of `Arc` itself are associated /// functions, called using function-like syntax: /// /// ``` @@ -190,21 +191,21 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// [rc_examples]: ../../std/rc/index.html#examples #[cfg_attr(not(test), lang = "arc")] #[stable(feature = "rust1", since = "1.0.0")] -pub struct Arc { - ptr: NonNull>, +pub struct Arc> { + ptr: NonNull>, phantom: PhantomData, } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Send for Arc {} +unsafe impl Send for Arc {} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync for Arc {} +unsafe impl Sync for Arc {} #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized> CoerceUnsized> for Arc {} +impl, U: ?Sized, A: Alloc> CoerceUnsized> for Arc {} #[unstable(feature = "dispatch_from_dyn", issue = "0")] -impl, U: ?Sized> DispatchFromDyn> for Arc {} +impl, U: ?Sized, A: Alloc> DispatchFromDyn> for Arc {} /// `Weak` is a version of [`Arc`] that holds a non-owning reference to the /// managed value. The value is accessed by calling [`upgrade`] on the `Weak` @@ -230,33 +231,33 @@ impl, U: ?Sized> DispatchFromDyn> for Arc {} /// [`Option`]: ../../std/option/enum.Option.html /// [`None`]: ../../std/option/enum.Option.html#variant.None #[stable(feature = "arc_weak", since = "1.4.0")] -pub struct Weak { +pub struct Weak> { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need // to allocate space on the heap. That's not a value a real pointer // will ever have because RcBox has alignment at least 2. - ptr: NonNull>, + ptr: NonNull>, } #[stable(feature = "arc_weak", since = "1.4.0")] -unsafe impl Send for Weak {} +unsafe impl Send for Weak {} #[stable(feature = "arc_weak", since = "1.4.0")] -unsafe impl Sync for Weak {} +unsafe impl Sync for Weak {} #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized> CoerceUnsized> for Weak {} +impl, U: ?Sized, A: Alloc> CoerceUnsized> for Weak {} #[unstable(feature = "dispatch_from_dyn", issue = "0")] -impl, U: ?Sized> DispatchFromDyn> for Weak {} +impl, U: ?Sized, A: Alloc> DispatchFromDyn> for Weak {} #[stable(feature = "arc_weak", since = "1.4.0")] -impl fmt::Debug for Weak { +impl fmt::Debug for Weak { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "(Weak)") } } -struct ArcInner { +struct ArcInner> { strong: atomic::AtomicUsize, // the value usize::MAX acts as a sentinel for temporarily "locking" the @@ -264,14 +265,16 @@ struct ArcInner { // to avoid races in `make_mut` and `get_mut`. weak: atomic::AtomicUsize, + alloc: A, + data: T, } -unsafe impl Send for ArcInner {} -unsafe impl Sync for ArcInner {} +unsafe impl Send for ArcInner {} +unsafe impl Sync for ArcInner {} impl Arc { - /// Constructs a new `Arc`. + /// Constructs a new `Arc`. /// /// # Examples /// @@ -283,14 +286,29 @@ impl Arc { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(data: T) -> Arc { + let Ok(a) = Self::new_in(data, Default::default()); + a + } +} + +impl Arc { + /// Constructs a new `Arc` using the given allocator. + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in(data: T, alloc: A) -> Result, A::Err> { // Start the weak pointer count as 1 which is the weak pointer that's // held by all the strong pointers (kinda), see std/rc.rs for more info - let x: Box<_> = box ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data, + let x: Box<_, A> = Box::new_in(mem::MaybeUninit::uninit(), alloc)?; + let (mut unique, alloc_after) = Box::into_both_non_null(x); + let ptr = unsafe { + *unique.as_mut().as_mut_ptr() = ArcInner { + strong: atomic::AtomicUsize::new(1), + weak: atomic::AtomicUsize::new(1), + alloc: alloc_after, + data, + }; + NonNull::new_unchecked(unique.as_ptr() as _) }; - Arc { ptr: Box::into_raw_non_null(x), phantom: PhantomData } + Ok(Arc { ptr: ptr, phantom: PhantomData }) } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then @@ -344,29 +362,6 @@ impl Arc { } impl Arc { - /// Consumes the `Arc`, returning the wrapped pointer. - /// - /// To avoid a memory leak the pointer must be converted back to an `Arc` using - /// [`Arc::from_raw`][from_raw]. - /// - /// [from_raw]: struct.Arc.html#method.from_raw - /// - /// # Examples - /// - /// ``` - /// use std::sync::Arc; - /// - /// let x = Arc::new(10); - /// let x_ptr = Arc::into_raw(x); - /// assert_eq!(unsafe { *x_ptr }, 10); - /// ``` - #[stable(feature = "rc_raw", since = "1.17.0")] - pub fn into_raw(this: Self) -> *const T { - let ptr: *const T = &*this; - mem::forget(this); - ptr - } - /// Constructs an `Arc` from a raw pointer. /// /// The raw pointer must have been previously returned by a call to a @@ -397,10 +392,42 @@ impl Arc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { + Self::from_raw_in(ptr) + } +} + +impl Arc { + /// Consumes the `Arc`, returning the wrapped pointer. + /// + /// To avoid a memory leak the pointer must be converted back to an `Arc` using + /// [`Arc::from_raw`][from_raw]. + /// + /// [from_raw]: struct.Arc.html#method.from_raw + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let x = Arc::new(10); + /// let x_ptr = Arc::into_raw(x); + /// assert_eq!(unsafe { *x_ptr }, 10); + /// ``` + #[stable(feature = "rc_raw", since = "1.17.0")] + pub fn into_raw(this: Self) -> *const T { + let ptr: *const T = &*this; + mem::forget(this); + ptr + } + + /// Allocator-polymorphic version of `from_raw_in`. Make sure you use the + /// correct allocator! + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T) -> Self { let offset = data_offset(ptr); // Reverse the offset to find the original ArcInner. - let fake_ptr = ptr as *mut ArcInner; + let fake_ptr = ptr as *mut ArcInner; let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); Arc { @@ -444,7 +471,7 @@ impl Arc { /// let weak_five = Arc::downgrade(&five); /// ``` #[stable(feature = "arc_weak", since = "1.4.0")] - pub fn downgrade(this: &Self) -> Weak { + pub fn downgrade(this: &Self) -> Weak { // This Relaxed is OK because we're checking the value in the CAS // below. let mut cur = this.inner().weak.load(Relaxed); @@ -532,7 +559,7 @@ impl Arc { } #[inline] - fn inner(&self) -> &ArcInner { + fn inner(&self) -> &ArcInner { // This unsafety is ok because while this arc is alive we're guaranteed // that the inner pointer is valid. Furthermore, we know that the // `ArcInner` structure itself is `Sync` because the inner data is @@ -550,7 +577,15 @@ impl Arc { if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) + + // The logic here is a little tricky because we're using an Alloc + // stored in the data to free the container. We first move the Alloc + // out of the container and then free the container. The Alloc itself + // will be dropped when it goes out of scope in this function. + + let mut alloc = ptr::read(&(*self.ptr.as_ptr()).alloc); + + alloc.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } @@ -577,47 +612,75 @@ impl Arc { } impl Arc { - // Allocates an `ArcInner` with sufficient space for an unsized value - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner { + fn from_box(v: Box) -> Arc { + let Ok(a) = Self::from_box_in(v, Default::default()); + a + } +} + +impl Arc { + // Allocates an `ArcInner` with sufficient space for an unsized value + unsafe fn allocate_for_ptr(ptr: *const T, mut alloc: A) -> Result<*mut ArcInner, A::Err> { // Calculate layout using the given value. // Previously, layout was calculated on the expression // `&*(ptr as *const ArcInner)`, but this created a misaligned // reference (see #54908). - let layout = Layout::new::>() + let layout = Layout::new::>() .extend(Layout::for_value(&*ptr)).unwrap().0 .pad_to_align().unwrap(); - let mem = Global.alloc(layout) - .unwrap_or_else(|_| handle_alloc_error(layout)); + let mem = alloc.alloc(layout)?; // Initialize the ArcInner - let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner; + let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner; debug_assert_eq!(Layout::for_value(&*inner), layout); - ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); - ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); + ptr::write(inner as *mut ArcInner<(), A>, ArcInner { + strong: atomic::AtomicUsize::new(1), + weak: atomic::AtomicUsize::new(1), + alloc, + data: (), + }); - inner + Ok(inner) } - fn from_box(v: Box) -> Arc { - unsafe { - let box_unique = Box::into_unique(v); - let bptr = box_unique.as_ptr(); + /// `v` must be heap-allocated + unsafe fn from_box_raw(box_unique: Unique, alloc: A) -> Result { + let bptr = box_unique.as_ptr(); + let value_size = size_of_val(&*bptr); + let ptr = Self::allocate_for_ptr(bptr, alloc)?; - let value_size = size_of_val(&*bptr); - let ptr = Self::allocate_for_ptr(bptr); + // Copy value as bytes + ptr::copy_nonoverlapping( + bptr as *const T as *const u8, + &mut (*ptr).data as *mut _ as *mut u8, + value_size); - // Copy value as bytes - ptr::copy_nonoverlapping( - bptr as *const T as *const u8, - &mut (*ptr).data as *mut _ as *mut u8, - value_size); + Ok(Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }) + } + /// Copy from box, using potentially different allocator + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn from_box_in(v: Box, alloc: A) -> Result { + let (u, mut a) = Box::into_both_unique(v); + unsafe { + let arc = Self::from_box_raw(u, alloc)?; // Free the allocation without dropping its contents - box_free::<_, Global>(box_unique, Global); + box_free_worker(u, &mut a); + Ok(arc) + } + } - Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + /// Copy from box, using its own allocator + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn from_box_in_same(v: Box) -> Result { + let (u, a) = Box::into_both_unique(v); + unsafe { + let arc = Self::from_box_raw(u, a)?; + // Free the allocation without dropping its contents + box_free_worker(u, &mut (*arc.ptr.as_ptr()).alloc); + Ok(arc) } } } @@ -632,54 +695,62 @@ unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { } impl Arc<[T]> { - // Copy elements from slice into newly allocated Arc<[T]> + unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { + let Ok(a) = Self::copy_from_slice_in(v, Default::default()); + a + } +} + +impl Arc<[T], A> { + // Copy elements from slice into newly allocated Arc<[T], A> // // Unsafe because the caller must either take ownership or bind `T: Copy` - unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { + unsafe fn copy_from_slice_in(v: &[T], alloc: A) -> Result, A::Err> { let v_ptr = v as *const [T]; - let ptr = Self::allocate_for_ptr(v_ptr); + let ptr = Self::allocate_for_ptr(v_ptr, alloc)?; ptr::copy_nonoverlapping( v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len()); - Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + Ok(Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }) } } // Specialization trait used for From<&[T]> -trait ArcFromSlice { - fn from_slice(slice: &[T]) -> Self; +trait ArcFromSlice: Sized { + fn from_slice(slice: &[T], alloc: A) -> Result; } -impl ArcFromSlice for Arc<[T]> { +impl ArcFromSlice for Arc<[T], A> { #[inline] - default fn from_slice(v: &[T]) -> Self { + default fn from_slice(v: &[T], alloc: A) -> Result { // Panic guard while cloning T elements. // In the event of a panic, elements that have been written // into the new ArcInner will be dropped, then the memory freed. - struct Guard { + struct Guard<'a, T, A: 'a + Alloc> { mem: NonNull, elems: *mut T, layout: Layout, n_elems: usize, + alloc: &'a mut A, } - impl Drop for Guard { + impl<'a, T, A: 'a + Alloc> Drop for Guard<'a, T, A> { fn drop(&mut self) { unsafe { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(self.mem.cast(), self.layout.clone()); + self.alloc.dealloc(self.mem.cast(), self.layout.clone()); } } } unsafe { let v_ptr = v as *const [T]; - let ptr = Self::allocate_for_ptr(v_ptr); + let ptr = Self::allocate_for_ptr(v_ptr, alloc)?; let mem = ptr as *mut _ as *mut u8; let layout = Layout::for_value(&*ptr); @@ -692,6 +763,7 @@ impl ArcFromSlice for Arc<[T]> { elems: elems, layout: layout, n_elems: 0, + alloc: &mut (*ptr).alloc, }; for (i, item) in v.iter().enumerate() { @@ -702,20 +774,20 @@ impl ArcFromSlice for Arc<[T]> { // All clear. Forget the guard so it doesn't free the new ArcInner. mem::forget(guard); - Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + Ok(Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }) } } } -impl ArcFromSlice for Arc<[T]> { +impl ArcFromSlice for Arc<[T], A> { #[inline] - fn from_slice(v: &[T]) -> Self { - unsafe { Arc::copy_from_slice(v) } + fn from_slice(v: &[T], alloc: A) -> Result { + unsafe { Arc::copy_from_slice_in(v, alloc) } } } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Arc { +impl Clone for Arc { /// Makes a clone of the `Arc` pointer. /// /// This creates another pointer to the same inner value, increasing the @@ -731,7 +803,7 @@ impl Clone for Arc { /// let _ = Arc::clone(&five); /// ``` #[inline] - fn clone(&self) -> Arc { + fn clone(&self) -> Arc { // Using a relaxed ordering is alright here, as knowledge of the // original reference prevents other threads from erroneously deleting // the object. @@ -765,7 +837,7 @@ impl Clone for Arc { } #[stable(feature = "rust1", since = "1.0.0")] -impl Deref for Arc { +impl Deref for Arc { type Target = T; #[inline] @@ -862,7 +934,7 @@ impl Arc { } } -impl Arc { +impl Arc { /// Returns a mutable reference to the inner value, if there are /// no other `Arc` or [`Weak`][weak] pointers to the same value. /// @@ -936,7 +1008,7 @@ impl Arc { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { +unsafe impl<#[may_dangle] T: ?Sized, A: Alloc> Drop for Arc { /// Drops the `Arc`. /// /// This will decrement the strong reference count. If the strong reference @@ -1063,8 +1135,16 @@ impl Weak { /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { + Weak::new_in() + } +} + +impl Weak { + /// The same as `new`, but separate for stability reasons + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in() -> Weak { Weak { - ptr: NonNull::new(usize::MAX as *mut ArcInner).expect("MAX is not 0"), + ptr: NonNull::new(usize::MAX as *mut ArcInner).expect("MAX is not 0"), } } @@ -1103,7 +1183,7 @@ impl Weak { None => ptr::null(), Some(inner) => { let offset = data_offset_sized::(); - let ptr = inner as *const ArcInner; + let ptr = inner as *const ArcInner; // Note: while the pointer we create may already point to dropped value, the // allocation still lives (it must hold the weak point as long as we are alive). // Therefore, the offset is OK to do, it won't get out of the allocation. @@ -1194,11 +1274,11 @@ impl Weak { #[unstable(feature = "weak_into_raw", issue = "60728")] pub unsafe fn from_raw(ptr: *const T) -> Self { if ptr.is_null() { - Self::new() + Self::new_in() } else { // See Arc::from_raw for details let offset = data_offset(ptr); - let fake_ptr = ptr as *mut ArcInner; + let fake_ptr = ptr as *mut ArcInner; let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw"), @@ -1207,7 +1287,7 @@ impl Weak { } } -impl Weak { +impl Weak { /// Attempts to upgrade the `Weak` pointer to an [`Arc`], extending /// the lifetime of the value if successful. /// @@ -1235,7 +1315,7 @@ impl Weak { /// assert!(weak_five.upgrade().is_none()); /// ``` #[stable(feature = "arc_weak", since = "1.4.0")] - pub fn upgrade(&self) -> Option> { + pub fn upgrade(&self) -> Option> { // We use a CAS loop to increment the strong count instead of a // fetch_add because once the count hits 0 it must never be above 0. let inner = self.inner()?; @@ -1328,7 +1408,7 @@ impl Weak { /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`, /// (i.e., when this `Weak` was created by `Weak::new`). #[inline] - fn inner(&self) -> Option<&ArcInner> { + fn inner(&self) -> Option<&ArcInner> { if is_dangling(self.ptr) { None } else { @@ -1385,7 +1465,7 @@ impl Weak { } #[stable(feature = "arc_weak", since = "1.4.0")] -impl Clone for Weak { +impl Clone for Weak { /// Makes a clone of the `Weak` pointer that points to the same value. /// /// # Examples @@ -1398,7 +1478,7 @@ impl Clone for Weak { /// let _ = Weak::clone(&weak_five); /// ``` #[inline] - fn clone(&self) -> Weak { + fn clone(&self) -> Weak { let inner = if let Some(inner) = self.inner() { inner } else { @@ -1444,7 +1524,7 @@ impl Default for Weak { } #[stable(feature = "arc_weak", since = "1.4.0")] -impl Drop for Weak { +impl Drop for Weak { /// Drops the `Weak` pointer. /// /// # Examples @@ -1487,26 +1567,32 @@ impl Drop for Weak { if inner.weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); unsafe { - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) + // The logic here is a little tricky because we're using an Alloc + // stored in the data to free the container. We first move the Alloc + // out of the container and then free the container. The Alloc itself + // will be dropped when it goes out of scope in this function. + + let mut alloc = ptr::read(&(*self.ptr.as_ptr()).alloc); + alloc.dealloc(self.ptr.cast(), Layout::for_value(&*self.ptr.as_ptr())) } } } } #[stable(feature = "rust1", since = "1.0.0")] -trait ArcEqIdent { - fn eq(&self, other: &Arc) -> bool; - fn ne(&self, other: &Arc) -> bool; +trait ArcEqIdent { + fn eq(&self, other: &Arc) -> bool; + fn ne(&self, other: &Arc) -> bool; } #[stable(feature = "rust1", since = "1.0.0")] -impl ArcEqIdent for Arc { +impl ArcEqIdent for Arc { #[inline] - default fn eq(&self, other: &Arc) -> bool { + default fn eq(&self, other: &Self) -> bool { **self == **other } #[inline] - default fn ne(&self, other: &Arc) -> bool { + default fn ne(&self, other: &Self) -> bool { **self != **other } } @@ -1517,20 +1603,20 @@ impl ArcEqIdent for Arc { /// cost to pay off more easily. It's also more likely to have two `Arc` clones, that point to /// the same value, than two `&T`s. #[stable(feature = "rust1", since = "1.0.0")] -impl ArcEqIdent for Arc { +impl ArcEqIdent for Arc { #[inline] - fn eq(&self, other: &Arc) -> bool { + fn eq(&self, other: &Self) -> bool { Arc::ptr_eq(self, other) || **self == **other } #[inline] - fn ne(&self, other: &Arc) -> bool { + fn ne(&self, other: &Self) -> bool { !Arc::ptr_eq(self, other) && **self != **other } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for Arc { +impl PartialEq for Arc { /// Equality for two `Arc`s. /// /// Two `Arc`s are equal if their inner values are equal. @@ -1547,8 +1633,7 @@ impl PartialEq for Arc { /// /// assert!(five == Arc::new(5)); /// ``` - #[inline] - fn eq(&self, other: &Arc) -> bool { + fn eq(&self, other: &Self) -> bool { ArcEqIdent::eq(self, other) } @@ -1569,13 +1654,13 @@ impl PartialEq for Arc { /// assert!(five != Arc::new(6)); /// ``` #[inline] - fn ne(&self, other: &Arc) -> bool { + fn ne(&self, other: &Self) -> bool { ArcEqIdent::ne(self, other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for Arc { +impl PartialOrd for Arc { /// Partial comparison for two `Arc`s. /// /// The two are compared by calling `partial_cmp()` on their inner values. @@ -1590,7 +1675,7 @@ impl PartialOrd for Arc { /// /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6))); /// ``` - fn partial_cmp(&self, other: &Arc) -> Option { + fn partial_cmp(&self, other: &Self) -> Option { (**self).partial_cmp(&**other) } @@ -1607,7 +1692,7 @@ impl PartialOrd for Arc { /// /// assert!(five < Arc::new(6)); /// ``` - fn lt(&self, other: &Arc) -> bool { + fn lt(&self, other: &Self) -> bool { *(*self) < *(*other) } @@ -1624,7 +1709,7 @@ impl PartialOrd for Arc { /// /// assert!(five <= Arc::new(5)); /// ``` - fn le(&self, other: &Arc) -> bool { + fn le(&self, other: &Self) -> bool { *(*self) <= *(*other) } @@ -1641,7 +1726,7 @@ impl PartialOrd for Arc { /// /// assert!(five > Arc::new(4)); /// ``` - fn gt(&self, other: &Arc) -> bool { + fn gt(&self, other: &Self) -> bool { *(*self) > *(*other) } @@ -1658,12 +1743,12 @@ impl PartialOrd for Arc { /// /// assert!(five >= Arc::new(5)); /// ``` - fn ge(&self, other: &Arc) -> bool { + fn ge(&self, other: &Self) -> bool { *(*self) >= *(*other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for Arc { +impl Ord for Arc { /// Comparison for two `Arc`s. /// /// The two are compared by calling `cmp()` on their inner values. @@ -1678,29 +1763,29 @@ impl Ord for Arc { /// /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6))); /// ``` - fn cmp(&self, other: &Arc) -> Ordering { + fn cmp(&self, other: &Self) -> Ordering { (**self).cmp(&**other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for Arc {} +impl Eq for Arc {} #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Display for Arc { +impl fmt::Display for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for Arc { +impl fmt::Debug for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Pointer for Arc { +impl fmt::Pointer for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&(&**self as *const T), f) } @@ -1708,7 +1793,7 @@ impl fmt::Pointer for Arc { #[stable(feature = "rust1", since = "1.0.0")] impl Default for Arc { - /// Creates a new `Arc`, with the `Default` value for `T`. + /// Creates a new `Arc`, with the `Default` value for `T`. /// /// # Examples /// @@ -1724,7 +1809,7 @@ impl Default for Arc { } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for Arc { +impl Hash for Arc { fn hash(&self, state: &mut H) { (**self).hash(state) } @@ -1741,7 +1826,8 @@ impl From for Arc { impl From<&[T]> for Arc<[T]> { #[inline] fn from(v: &[T]) -> Arc<[T]> { - >::from_slice(v) + let Ok(a) = >::from_slice(v, Default::default()); + a } } @@ -2082,7 +2168,7 @@ mod tests { assert_eq!(format!("{:?}", a), "5"); } - // Make sure deriving works with Arc + // Make sure deriving works with Arc #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)] struct Foo { inner: Arc, @@ -2269,14 +2355,14 @@ mod tests { } #[stable(feature = "rust1", since = "1.0.0")] -impl borrow::Borrow for Arc { +impl borrow::Borrow for Arc { fn borrow(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl AsRef for Arc { +impl AsRef for Arc { fn as_ref(&self) -> &T { &**self } From 66b01b6e4fd0dfa168b14a0ec979a4081d4a2257 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 28 Dec 2017 21:59:46 -0500 Subject: [PATCH 9/9] LinkedList: Generalize for arbitrary allocators --- src/liballoc/collections/linked_list.rs | 279 ++++++++++++++++-------- 1 file changed, 182 insertions(+), 97 deletions(-) diff --git a/src/liballoc/collections/linked_list.rs b/src/liballoc/collections/linked_list.rs index 40a82d6feaa98..21b0eb321e3f7 100644 --- a/src/liballoc/collections/linked_list.rs +++ b/src/liballoc/collections/linked_list.rs @@ -20,6 +20,8 @@ use core::marker::PhantomData; use core::mem; use core::ptr::NonNull; +use crate::abort_adapter::AbortAdapter; +use crate::alloc::{Alloc, Global}; use crate::boxed::Box; use super::SpecExtend; @@ -32,17 +34,19 @@ use super::SpecExtend; /// `LinkedList`. In general, array-based containers are faster, /// more memory efficient and make better use of CPU cache. #[stable(feature = "rust1", since = "1.0.0")] -pub struct LinkedList { - head: Option>>, - tail: Option>>, +pub struct LinkedList> { + head: Option>>, + tail: Option>>, len: usize, - marker: PhantomData>>, + alloc: A, + marker: PhantomData, A>>, } -struct Node { - next: Option>>, - prev: Option>>, +struct Node { + next: Option>>, + prev: Option>>, element: T, + marker: PhantomData, A>>, } /// An iterator over the elements of a `LinkedList`. @@ -53,15 +57,16 @@ struct Node { /// [`iter`]: struct.LinkedList.html#method.iter /// [`LinkedList`]: struct.LinkedList.html #[stable(feature = "rust1", since = "1.0.0")] -pub struct Iter<'a, T: 'a> { - head: Option>>, - tail: Option>>, + +pub struct Iter<'a, T: 'a, A: 'a + Alloc = AbortAdapter > { + head: Option>>, + tail: Option>>, len: usize, - marker: PhantomData<&'a Node>, + marker: PhantomData<&'a Node>, } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug for Iter<'_, T> { +impl<'a, T: fmt::Debug, A: 'a + Alloc> fmt::Debug for Iter<'a, T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Iter") .field(&self.len) @@ -71,7 +76,7 @@ impl fmt::Debug for Iter<'_, T> { // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Iter<'_, T> { +impl Clone for Iter<'_, T, A> { fn clone(&self) -> Self { Iter { ..*self } } @@ -85,13 +90,13 @@ impl Clone for Iter<'_, T> { /// [`iter_mut`]: struct.LinkedList.html#method.iter_mut /// [`LinkedList`]: struct.LinkedList.html #[stable(feature = "rust1", since = "1.0.0")] -pub struct IterMut<'a, T: 'a> { +pub struct IterMut<'a, T: 'a, A: Alloc + Clone + 'a = AbortAdapter> { // We do *not* exclusively own the entire list here, references to node's `element` // have been handed out by the iterator! So be careful when using this; the methods // called must be aware that there can be aliasing pointers to `element`. - list: &'a mut LinkedList, - head: Option>>, - tail: Option>>, + list: &'a mut LinkedList, + head: Option>>, + tail: Option>>, len: usize, } @@ -112,10 +117,16 @@ impl fmt::Debug for IterMut<'_, T> { /// /// [`into_iter`]: struct.LinkedList.html#method.into_iter /// [`LinkedList`]: struct.LinkedList.html -#[derive(Clone)] #[stable(feature = "rust1", since = "1.0.0")] -pub struct IntoIter { - list: LinkedList, +pub struct IntoIter> { + list: LinkedList, +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Clone for IntoIter { + fn clone(&self) -> Self { + IntoIter { list: self.list.clone() } + } } #[stable(feature = "collection_debug", since = "1.17.0")] @@ -127,25 +138,26 @@ impl fmt::Debug for IntoIter { } } -impl Node { +impl Node { fn new(element: T) -> Self { Node { next: None, prev: None, element, + marker: PhantomData } } - fn into_element(self: Box) -> T { + fn into_element(self: Box) -> T { self.element } } // private methods -impl LinkedList { +impl LinkedList { /// Adds the given node to the front of the list. #[inline] - fn push_front_node(&mut self, mut node: Box>) { + fn push_front_node(&mut self, mut node: Box, A>) { // This method takes care not to create mutable references to whole nodes, // to maintain validity of aliasing pointers into `element`. unsafe { @@ -166,11 +178,11 @@ impl LinkedList { /// Removes and returns the node at the front of the list. #[inline] - fn pop_front_node(&mut self) -> Option>> { + fn pop_front_node(&mut self) -> Option, A>> { // This method takes care not to create mutable references to whole nodes, // to maintain validity of aliasing pointers into `element`. self.head.map(|node| unsafe { - let node = Box::from_raw(node.as_ptr()); + let node = Box::from_raw_in(node.as_ptr(), self.alloc.clone()); self.head = node.next; match self.head { @@ -186,7 +198,7 @@ impl LinkedList { /// Adds the given node to the back of the list. #[inline] - fn push_back_node(&mut self, mut node: Box>) { + fn push_back_node(&mut self, mut node: Box, A>) { // This method takes care not to create mutable references to whole nodes, // to maintain validity of aliasing pointers into `element`. unsafe { @@ -207,11 +219,11 @@ impl LinkedList { /// Removes and returns the node at the back of the list. #[inline] - fn pop_back_node(&mut self) -> Option>> { + fn pop_back_node(&mut self) -> Option, A>> { // This method takes care not to create mutable references to whole nodes, // to maintain validity of aliasing pointers into `element`. self.tail.map(|node| unsafe { - let node = Box::from_raw(node.as_ptr()); + let node = Box::from_raw_in(node.as_ptr(), self.alloc.clone()); self.tail = node.prev; match self.tail { @@ -232,7 +244,7 @@ impl LinkedList { /// This method takes care not to create mutable references to `element`, to /// maintain validity of aliasing pointers. #[inline] - unsafe fn unlink_node(&mut self, mut node: NonNull>) { + unsafe fn unlink_node(&mut self, mut node: NonNull>) { let node = node.as_mut(); // this one is ours now, we can create an &mut. // Not creating new mutable (unique!) references overlapping `element`. @@ -274,10 +286,60 @@ impl LinkedList { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new() -> Self { + Self::new_in(Default::default()) + } + + /// Appends an element to the back of a list + /// + /// # Examples + /// + /// ``` + /// use std::collections::LinkedList; + /// + /// let mut d = LinkedList::new(); + /// d.push_back(1); + /// d.push_back(3); + /// assert_eq!(3, *d.back().unwrap()); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn push_back(&mut self, elt: T) { + let Ok(()) = self.push_back_alloc(elt); + } + + /// Adds an element first in the list. + /// + /// This operation should compute in O(1) time. + /// + /// # Examples + /// + /// ``` + /// use std::collections::LinkedList; + /// + /// let mut dl = LinkedList::new(); + /// + /// dl.push_front(2); + /// assert_eq!(dl.front().unwrap(), &2); + /// + /// dl.push_front(1); + /// assert_eq!(dl.front().unwrap(), &1); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn push_front(&mut self, elt: T) { + let Ok(()) = self.push_front_alloc(elt); + } +} + +impl LinkedList { + /// like `Self::new` but with an explicit allocator rather than the default + /// global one. + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn new_in(a: A) -> Self { LinkedList { head: None, tail: None, len: 0, + alloc: a, marker: PhantomData, } } @@ -352,7 +414,7 @@ impl LinkedList { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - pub fn iter(&self) -> Iter<'_, T> { + pub fn iter(&self) -> Iter<'_, T, A> { Iter { head: self.head, tail: self.tail, @@ -386,7 +448,7 @@ impl LinkedList { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - pub fn iter_mut(&mut self) -> IterMut<'_, T> { + pub fn iter_mut(&mut self) -> IterMut<'_, T, A> { IterMut { head: self.head, tail: self.tail, @@ -465,7 +527,7 @@ impl LinkedList { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn clear(&mut self) { - *self = Self::new(); + *self = Self::new_in(self.alloc.clone()); } /// Returns `true` if the `LinkedList` contains an element equal to the @@ -610,8 +672,10 @@ impl LinkedList { /// assert_eq!(dl.front().unwrap(), &1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn push_front(&mut self, elt: T) { - self.push_front_node(box Node::new(elt)); + pub fn push_front_alloc(&mut self, elt: T) -> Result<(), A::Err> { + let alloc = self.alloc.clone(); + self.push_front_node(Box::new_in(Node::new(elt), alloc)?); + Ok(()) } /// Removes the first element and returns it, or `None` if the list is @@ -653,8 +717,10 @@ impl LinkedList { /// assert_eq!(3, *d.back().unwrap()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn push_back(&mut self, elt: T) { - self.push_back_node(box Node::new(elt)); + pub fn push_back_alloc(&mut self, elt: T) -> Result<(), A::Err> { + let alloc = self.alloc.clone(); + self.push_back_node(Box::new_in(Node::new(elt), alloc)?); + Ok(()) } /// Removes the last element from a list and returns it, or `None` if @@ -704,13 +770,14 @@ impl LinkedList { /// assert_eq!(splitted.pop_front(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn split_off(&mut self, at: usize) -> LinkedList { + pub fn split_off(&mut self, at: usize) -> Self { let len = self.len(); assert!(at <= len, "Cannot split off at a nonexistent index"); if at == 0 { - return mem::replace(self, Self::new()); + let alloc = self.alloc.clone(); + return mem::replace(self, Self::new_in(alloc)); } else if at == len { - return Self::new(); + return Self::new_in(self.alloc.clone()); } // Below, we iterate towards the `i-1`th node, either from the start or the end, @@ -748,6 +815,7 @@ impl LinkedList { head: second_part_head, tail: self.tail, len: len - at, + alloc: self.alloc.clone(), marker: PhantomData, }; @@ -785,7 +853,7 @@ impl LinkedList { /// assert_eq!(odds.into_iter().collect::>(), vec![1, 3, 5, 9, 11, 13, 15]); /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] - pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F> + pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A> where F: FnMut(&mut T) -> bool { // avoid borrow issues. @@ -803,14 +871,14 @@ impl LinkedList { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T> Drop for LinkedList { +unsafe impl<#[may_dangle] T, A: Alloc + Clone> Drop for LinkedList { fn drop(&mut self) { while let Some(_) = self.pop_front_node() {} } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> Iterator for Iter<'a, T> { +impl<'a, T, A: Alloc> Iterator for Iter<'a, T, A> { type Item = &'a T; #[inline] @@ -835,7 +903,7 @@ impl<'a, T> Iterator for Iter<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> DoubleEndedIterator for Iter<'a, T> { +impl<'a, T, A: Alloc> DoubleEndedIterator for Iter<'a, T, A> { #[inline] fn next_back(&mut self) -> Option<&'a T> { if self.len == 0 { @@ -853,13 +921,13 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Iter<'_, T> {} +impl ExactSizeIterator for Iter<'_, T, A> {} #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Iter<'_, T> {} +impl FusedIterator for Iter<'_, T, A> {} #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> Iterator for IterMut<'a, T> { +impl<'a, T, A: Alloc + Clone> Iterator for IterMut<'a, T, A> { type Item = &'a mut T; #[inline] @@ -884,7 +952,7 @@ impl<'a, T> Iterator for IterMut<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { +impl<'a, T, A: Alloc + Clone> DoubleEndedIterator for IterMut<'a, T, A> { #[inline] fn next_back(&mut self) -> Option<&'a mut T> { if self.len == 0 { @@ -902,10 +970,10 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IterMut<'_, T> {} +impl ExactSizeIterator for IterMut<'_, T, A> {} #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IterMut<'_, T> {} +impl FusedIterator for IterMut<'_, T, A> {} impl IterMut<'_, T> { /// Inserts the given element just after the element most recently returned by `.next()`. @@ -936,21 +1004,32 @@ impl IterMut<'_, T> { reason = "this is probably better handled by a cursor type -- we'll see", issue = "27794")] pub fn insert_next(&mut self, element: T) { - match self.head { + let Ok(()) = self.insert_next_alloc(element); + } +} + +impl<'a, T, A: Alloc + Clone> IterMut<'a, T, A> { + /// Like `Self::insert_next` but allows non-standard allocators. Might need + /// to be separate for stability reasons. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn insert_next_alloc(&mut self, element: T) -> Result<(), A::Err> { + Ok(match self.head { // `push_back` is okay with aliasing `element` references - None => self.list.push_back(element), + None => self.list.push_back_alloc(element)?, Some(head) => unsafe { + // `push_front` is okay with aliasing nodes let prev = match head.as_ref().prev { - // `push_front` is okay with aliasing nodes - None => return self.list.push_front(element), + None => return self.list.push_front_alloc(element), Some(prev) => prev, }; - let node = Some(Box::into_raw_non_null(box Node { + let node = Some(NonNull::from(Box::into_unique(Box::new_in(Node { next: Some(head), prev: Some(prev), element, - })); + marker: PhantomData, + }, self.list.alloc.clone())?))); // Not creating references to entire nodes to not invalidate the // reference to `element` we handed to the user. @@ -959,7 +1038,7 @@ impl IterMut<'_, T> { self.list.len += 1; }, - } + }) } /// Provides a reference to the next element, without changing the iterator. @@ -996,18 +1075,18 @@ impl IterMut<'_, T> { /// An iterator produced by calling `drain_filter` on LinkedList. #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -pub struct DrainFilter<'a, T: 'a, F: 'a> - where F: FnMut(&mut T) -> bool, +pub struct DrainFilter<'a, T: 'a, F: 'a, A: 'a = AbortAdapter> + where A: Alloc + Clone, F: FnMut(&mut T) -> bool, { - list: &'a mut LinkedList, - it: Option>>, + list: &'a mut LinkedList, + it: Option>>, pred: F, idx: usize, old_len: usize, } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Iterator for DrainFilter<'_, T, F> +impl Iterator for DrainFilter<'_, T, F, A> where F: FnMut(&mut T) -> bool, { type Item = T; @@ -1035,7 +1114,7 @@ impl Iterator for DrainFilter<'_, T, F> } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl Drop for DrainFilter<'_, T, F> +impl Drop for DrainFilter<'_, T, F, A> where F: FnMut(&mut T) -> bool, { fn drop(&mut self) { @@ -1044,7 +1123,7 @@ impl Drop for DrainFilter<'_, T, F> } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl fmt::Debug for DrainFilter<'_, T, F> +impl fmt::Debug for DrainFilter<'_, T, F, A> where F: FnMut(&mut T) -> bool { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1055,7 +1134,7 @@ impl fmt::Debug for DrainFilter<'_, T, F> } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter { +impl Iterator for IntoIter { type Item = T; #[inline] @@ -1070,7 +1149,7 @@ impl Iterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for IntoIter { +impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { self.list.pop_back() @@ -1078,10 +1157,10 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter {} +impl ExactSizeIterator for IntoIter {} #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter {} +impl FusedIterator for IntoIter {} #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator for LinkedList { @@ -1093,65 +1172,67 @@ impl FromIterator for LinkedList { } #[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for LinkedList { +impl IntoIterator for LinkedList { type Item = T; - type IntoIter = IntoIter; + type IntoIter = IntoIter; /// Consumes the list into an iterator yielding elements by value. #[inline] - fn into_iter(self) -> IntoIter { + fn into_iter(self) -> IntoIter { IntoIter { list: self } } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> IntoIterator for &'a LinkedList { +impl<'a, T, A: Alloc + Clone> IntoIterator for &'a LinkedList { type Item = &'a T; - type IntoIter = Iter<'a, T>; + type IntoIter = Iter<'a, T, A>; - fn into_iter(self) -> Iter<'a, T> { + fn into_iter(self) -> Iter<'a, T, A> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> IntoIterator for &'a mut LinkedList { +impl<'a, T, A: Alloc + Clone> IntoIterator for &'a mut LinkedList { type Item = &'a mut T; - type IntoIter = IterMut<'a, T>; + type IntoIter = IterMut<'a, T, A>; - fn into_iter(self) -> IterMut<'a, T> { + fn into_iter(self) -> IterMut<'a, T, A> { self.iter_mut() } } #[stable(feature = "rust1", since = "1.0.0")] -impl Extend for LinkedList { +impl + Clone> Extend for LinkedList { fn extend>(&mut self, iter: I) { >::spec_extend(self, iter); } } -impl SpecExtend for LinkedList { +impl + Clone> SpecExtend for LinkedList { default fn spec_extend(&mut self, iter: I) { - iter.into_iter().for_each(move |elt| self.push_back(elt)); + iter.into_iter().for_each(move |elt| { + let Ok(()) = self.push_back_alloc(elt); + }); } } -impl SpecExtend> for LinkedList { - fn spec_extend(&mut self, ref mut other: LinkedList) { +impl + Clone> SpecExtend> for LinkedList { + fn spec_extend(&mut self, ref mut other: LinkedList) { self.append(other); } } #[stable(feature = "extend_ref", since = "1.2.0")] -impl<'a, T: 'a + Copy> Extend<&'a T> for LinkedList { +impl<'a, T: 'a + Copy, A: Alloc + Clone> Extend<&'a T> for LinkedList { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for LinkedList { +impl PartialEq for LinkedList { fn eq(&self, other: &Self) -> bool { self.len() == other.len() && self.iter().eq(other) } @@ -1162,17 +1243,17 @@ impl PartialEq for LinkedList { } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for LinkedList {} +impl Eq for LinkedList {} #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for LinkedList { +impl PartialOrd for LinkedList { fn partial_cmp(&self, other: &Self) -> Option { self.iter().partial_cmp(other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for LinkedList { +impl Ord for LinkedList { #[inline] fn cmp(&self, other: &Self) -> Ordering { self.iter().cmp(other) @@ -1187,14 +1268,14 @@ impl Clone for LinkedList { } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for LinkedList { +impl fmt::Debug for LinkedList { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self).finish() } } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for LinkedList { +impl Hash for LinkedList { fn hash(&self, state: &mut H) { self.len().hash(state); for elt in self { @@ -1218,10 +1299,10 @@ fn assert_covariance() { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Send for LinkedList {} +unsafe impl Send for LinkedList {} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync for LinkedList {} +unsafe impl Sync for LinkedList {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for Iter<'_, T> {} @@ -1244,16 +1325,18 @@ mod tests { use super::{LinkedList, Node}; + type GlobalNode = Node>; + #[cfg(test)] - fn list_from(v: &[T]) -> LinkedList { + fn list_from(v: &[T]) -> LinkedList { v.iter().cloned().collect() } - pub fn check_links(list: &LinkedList) { + pub fn check_links(list: &LinkedList) { unsafe { let mut len = 0; - let mut last_ptr: Option<&Node> = None; - let mut node_ptr: &Node; + let mut last_ptr: Option<&GlobalNode> = None; + let mut node_ptr: &GlobalNode; match list.head { None => { // tail node should also be None. @@ -1268,7 +1351,9 @@ mod tests { (None, None) => {} (None, _) => panic!("prev link for head"), (Some(p), Some(pptr)) => { - assert_eq!(p as *const Node, pptr.as_ptr() as *const Node); + assert_eq!( + p as *const GlobalNode, + pptr.as_ptr() as *const GlobalNode); } _ => panic!("prev link is none, not good"), } @@ -1287,7 +1372,7 @@ mod tests { // verify that the tail node points to the last node. let tail = list.tail.as_ref().expect("some tail node").as_ref(); - assert_eq!(tail as *const Node, node_ptr as *const Node); + assert_eq!(tail as *const GlobalNode, node_ptr as *const GlobalNode); // check that len matches interior links. assert_eq!(len, list.len); }