Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

dropck and new scoping rules for safe destruction #21972

Merged
merged 9 commits into from
Feb 11, 2015
71 changes: 50 additions & 21 deletions src/libarena/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ use std::cell::{Cell, RefCell};
use std::cmp;
use std::intrinsics::{TyDesc, get_tydesc};
use std::intrinsics;
use std::marker;
use std::mem;
use std::num::{Int, UnsignedInt};
use std::ptr;
Expand Down Expand Up @@ -88,27 +89,29 @@ impl Chunk {
/// than objects without destructors. This reduces overhead when initializing
/// plain-old-data (`Copy` types) and means we don't need to waste time running
/// their destructors.
pub struct Arena {
pub struct Arena<'longer_than_self> {
// The head is separated out from the list as a unbenchmarked
// microoptimization, to avoid needing to case on the list to access the
// head.
head: RefCell<Chunk>,
copy_head: RefCell<Chunk>,
chunks: RefCell<Vec<Chunk>>,
_invariant: marker::InvariantLifetime<'longer_than_self>,
}

impl Arena {
impl<'a> Arena<'a> {
/// Allocates a new Arena with 32 bytes preallocated.
pub fn new() -> Arena {
pub fn new() -> Arena<'a> {
Arena::new_with_size(32)
}

/// Allocates a new Arena with `initial_size` bytes preallocated.
pub fn new_with_size(initial_size: usize) -> Arena {
pub fn new_with_size(initial_size: usize) -> Arena<'a> {
Arena {
head: RefCell::new(chunk(initial_size, false)),
copy_head: RefCell::new(chunk(initial_size, true)),
chunks: RefCell::new(Vec::new()),
_invariant: marker::InvariantLifetime,
}
}
}
Expand All @@ -122,7 +125,7 @@ fn chunk(size: usize, is_copy: bool) -> Chunk {
}

#[unsafe_destructor]
impl Drop for Arena {
impl<'longer_than_self> Drop for Arena<'longer_than_self> {
fn drop(&mut self) {
unsafe {
destroy_chunk(&*self.head.borrow());
Expand Down Expand Up @@ -180,7 +183,7 @@ fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
((p & !1) as *const TyDesc, p & 1 == 1)
}

impl Arena {
impl<'longer_than_self> Arena<'longer_than_self> {
fn chunk_size(&self) -> usize {
self.copy_head.borrow().capacity()
}
Expand Down Expand Up @@ -293,7 +296,7 @@ impl Arena {
/// Allocates a new item in the arena, using `op` to initialize the value,
/// and returns a reference to it.
#[inline]
pub fn alloc<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
pub fn alloc<T:'longer_than_self, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
unsafe {
if intrinsics::needs_drop::<T>() {
self.alloc_noncopy(op)
Expand All @@ -317,20 +320,6 @@ fn test_arena_destructors() {
}
}

#[test]
fn test_arena_alloc_nested() {
struct Inner { value: usize }
struct Outer<'a> { inner: &'a Inner }

let arena = Arena::new();

let result = arena.alloc(|| Outer {
inner: arena.alloc(|| Inner { value: 10 })
});

assert_eq!(result.inner.value, 10);
}

#[test]
#[should_fail]
fn test_arena_destructors_fail() {
Expand Down Expand Up @@ -365,6 +354,10 @@ pub struct TypedArena<T> {

/// A pointer to the first arena segment.
first: RefCell<*mut TypedArenaChunk<T>>,

/// Marker indicating that dropping the arena causes its owned
/// instances of `T` to be dropped.
_own: marker::PhantomData<T>,
}

struct TypedArenaChunk<T> {
Expand Down Expand Up @@ -460,6 +453,7 @@ impl<T> TypedArena<T> {
ptr: Cell::new((*chunk).start() as *const T),
end: Cell::new((*chunk).end() as *const T),
first: RefCell::new(chunk),
_own: marker::PhantomData,
}
}
}
Expand Down Expand Up @@ -523,6 +517,41 @@ mod tests {
z: i32,
}

#[test]
fn test_arena_alloc_nested() {
struct Inner { value: u8 }
struct Outer<'a> { inner: &'a Inner }
enum EI<'e> { I(Inner), O(Outer<'e>) }

struct Wrap<'a>(TypedArena<EI<'a>>);

impl<'a> Wrap<'a> {
fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
let r: &EI = self.0.alloc(EI::I(f()));
if let &EI::I(ref i) = r {
i
} else {
panic!("mismatch");
}
}
fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
let r: &EI = self.0.alloc(EI::O(f()));
if let &EI::O(ref o) = r {
o
} else {
panic!("mismatch");
}
}
}

let arena = Wrap(TypedArena::new());

let result = arena.alloc_outer(|| Outer {
inner: arena.alloc_inner(|| Inner { value: 10 }) });

assert_eq!(result.inner.value, 10);
}

#[test]
pub fn test_copy() {
let arena = TypedArena::new();
Expand Down
16 changes: 8 additions & 8 deletions src/libcollections/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ impl<T> Drop for RawItems<T> {
#[unsafe_destructor]
impl<K, V> Drop for Node<K, V> {
fn drop(&mut self) {
if self.keys.0.is_null() {
if self.keys.ptr.is_null() {
// We have already cleaned up this node.
return;
}
Expand All @@ -292,7 +292,7 @@ impl<K, V> Drop for Node<K, V> {
self.destroy();
}

self.keys.0 = ptr::null_mut();
self.keys.ptr = ptr::null_mut();
}
}

Expand Down Expand Up @@ -337,18 +337,18 @@ impl<K, V> Node<K, V> {
unsafe fn destroy(&mut self) {
let (alignment, size) =
calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
heap::deallocate(self.keys.0 as *mut u8, size, alignment);
heap::deallocate(self.keys.ptr as *mut u8, size, alignment);
}

#[inline]
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
unsafe {(
mem::transmute(raw::Slice {
data: self.keys.0,
data: self.keys.ptr,
len: self.len()
}),
mem::transmute(raw::Slice {
data: self.vals.0,
data: self.vals.ptr,
len: self.len()
})
)}
Expand All @@ -368,7 +368,7 @@ impl<K, V> Node<K, V> {
} else {
unsafe {
mem::transmute(raw::Slice {
data: self.edges.0,
data: self.edges.ptr,
len: self.len() + 1
})
}
Expand Down Expand Up @@ -586,7 +586,7 @@ impl <K, V> Node<K, V> {

/// If the node has any children
pub fn is_leaf(&self) -> bool {
self.edges.0.is_null()
self.edges.ptr.is_null()
}

/// if the node has too few elements
Expand Down Expand Up @@ -1064,7 +1064,7 @@ impl<K, V> Node<K, V> {
vals: RawItems::from_slice(self.vals()),
edges: RawItems::from_slice(self.edges()),

ptr: self.keys.0 as *mut u8,
ptr: self.keys.ptr as *mut u8,
capacity: self.capacity(),
is_leaf: self.is_leaf()
},
Expand Down
18 changes: 12 additions & 6 deletions src/libcollections/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ use core::default::Default;
use core::fmt;
use core::hash::{self, Hash};
use core::iter::{repeat, FromIterator, IntoIterator};
use core::marker::{ContravariantLifetime, InvariantType};
use core::marker::{self, ContravariantLifetime, InvariantType};
use core::mem;
use core::nonzero::NonZero;
use core::num::{Int, UnsignedInt};
Expand Down Expand Up @@ -140,6 +140,7 @@ pub struct Vec<T> {
ptr: NonZero<*mut T>,
len: usize,
cap: usize,
_own: marker::PhantomData<T>,
}

unsafe impl<T: Send> Send for Vec<T> { }
Expand All @@ -166,7 +167,7 @@ impl<T> Vec<T> {
// non-null value which is fine since we never call deallocate on the ptr
// if cap is 0. The reason for this is because the pointer of a slice
// being NULL would break the null pointer optimization for enums.
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, 0) }
}

/// Constructs a new, empty `Vec<T>` with the specified capacity.
Expand Down Expand Up @@ -198,15 +199,15 @@ impl<T> Vec<T> {
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: usize) -> Vec<T> {
if mem::size_of::<T>() == 0 {
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: usize::MAX }
unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, usize::MAX) }
} else if capacity == 0 {
Vec::new()
} else {
let size = capacity.checked_mul(mem::size_of::<T>())
.expect("capacity overflow");
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
if ptr.is_null() { ::alloc::oom() }
Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) }
}
}

Expand Down Expand Up @@ -247,7 +248,12 @@ impl<T> Vec<T> {
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize,
capacity: usize) -> Vec<T> {
Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
Vec {
ptr: NonZero::new(ptr),
len: length,
cap: capacity,
_own: marker::PhantomData,
}
}

/// Creates a vector by copying the elements from a raw pointer.
Expand Down Expand Up @@ -1626,7 +1632,7 @@ impl<T> IntoIter<T> {
for _x in self.by_ref() { }
let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
mem::forget(self);
Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
Vec::from_raw_parts(allocation, 0, cap)
}
}
}
Expand Down
18 changes: 18 additions & 0 deletions src/libcore/marker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,24 @@ pub unsafe trait Sync {
// Empty
}

/// A marker type that indicates to the compiler that the instances
/// of the type itself owns instances of the type parameter `T`.
///
/// This is used to indicate that one or more instances of the type
/// `T` could be dropped when instances of the type itself is dropped,
/// though that may not be apparent from the other structure of the
/// type itself. For example, the type may hold a `*mut T`, which the
/// compiler does not automatically treat as owned.
#[unstable(feature = "core",
reason = "Newly added to deal with scoping and destructor changes")]
#[lang="phantom_data"]
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub struct PhantomData<T: ?Sized>;

impl<T: ?Sized> Copy for PhantomData<T> {}
impl<T: ?Sized> Clone for PhantomData<T> {
fn clone(&self) -> PhantomData<T> { *self }
}

/// A marker type whose type parameter `T` is considered to be
/// covariant with respect to the type itself. This is (typically)
Expand Down
17 changes: 14 additions & 3 deletions src/libcore/ptr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ use mem;
use clone::Clone;
use intrinsics;
use option::Option::{self, Some, None};
use marker::{Send, Sized, Sync};
use marker::{self, Send, Sized, Sync};

use cmp::{PartialEq, Eq, Ord, PartialOrd};
use cmp::Ordering::{self, Less, Equal, Greater};
Expand Down Expand Up @@ -522,7 +522,11 @@ impl<T> PartialOrd for *mut T {
/// Useful for building abstractions like `Vec<T>` or `Box<T>`, which
/// internally use raw pointers to manage the memory that they own.
#[unstable(feature = "core", reason = "recently added to this module")]
pub struct Unique<T: ?Sized>(pub *mut T);
pub struct Unique<T: ?Sized> {
/// The wrapped `*mut T`.
pub ptr: *mut T,
_own: marker::PhantomData<T>,
}

/// `Unique` pointers are `Send` if `T` is `Send` because the data they
/// reference is unaliased. Note that this aliasing invariant is
Expand Down Expand Up @@ -550,6 +554,13 @@ impl<T> Unique<T> {
#[unstable(feature = "core",
reason = "recently added to this module")]
pub unsafe fn offset(self, offset: int) -> *mut T {
self.0.offset(offset)
self.ptr.offset(offset)
}
}

/// Creates a `Unique` wrapped around `ptr`, taking ownership of the
/// data referenced by `ptr`.
#[allow(non_snake_case)]
pub fn Unique<T: ?Sized>(ptr: *mut T) -> Unique<T> {
Unique { ptr: ptr, _own: marker::PhantomData }
}
2 changes: 1 addition & 1 deletion src/libcoretest/ptr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ fn test_set_memory() {
fn test_unsized_unique() {
let xs: &mut [_] = &mut [1, 2, 3];
let ptr = Unique(xs as *mut [_]);
let ys = unsafe { &mut *ptr.0 };
let ys = unsafe { &mut *ptr.ptr };
let zs: &mut [_] = &mut [1, 2, 3];
assert!(ys == zs);
}
4 changes: 2 additions & 2 deletions src/libflate/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,13 @@ pub struct Bytes {
impl Deref for Bytes {
type Target = [u8];
fn deref(&self) -> &[u8] {
unsafe { slice::from_raw_parts_mut(self.ptr.0, self.len) }
unsafe { slice::from_raw_parts_mut(self.ptr.ptr, self.len) }
}
}

impl Drop for Bytes {
fn drop(&mut self) {
unsafe { libc::free(self.ptr.0 as *mut _); }
unsafe { libc::free(self.ptr.ptr as *mut _); }
}
}

Expand Down
Loading