From 78032af66240e111405dc0f118d8b3c56463ebd7 Mon Sep 17 00:00:00 2001 From: nekevss Date: Fri, 21 Oct 2022 12:02:00 -0400 Subject: [PATCH 01/55] Early skeletal framework --- boa_gc/src/heap_box.rs | 9 +++++++++ boa_gc/src/lib.rs | 43 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+) create mode 100644 boa_gc/src/heap_box.rs diff --git a/boa_gc/src/heap_box.rs b/boa_gc/src/heap_box.rs new file mode 100644 index 00000000000..e4438d51acb --- /dev/null +++ b/boa_gc/src/heap_box.rs @@ -0,0 +1,9 @@ + + +pub(crate) struct HeapBoxHeader; + + +pub(crate) struct HeapBox { + header: HeapBoxHeader, + object: T, +} \ No newline at end of file diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index ac24531793c..eaf35c2cbd2 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -4,3 +4,46 @@ pub use gc::{ custom_trace, finalizer_safe, force_collect, unsafe_empty_trace, Finalize, Gc, GcCell as Cell, GcCellRef as Ref, GcCellRefMut as RefMut, Trace, }; + +use std::ptr::NonNull; + +mod heap_box; + +use heap_box::HeapBox; + +struct GcConfig { + threshold: usize, + growth_ratio: f64, + +} + +struct GcRuntimeData { + byte_allocated: usize, +} + +struct BoaGc { + config: GcConfig, + runtime: GcRuntimeData, + heap_start: Option>>, +} + + +/// The GcAllocater allocates a garbage collected value to heap. +pub struct GcAllocater; + +impl BoaAllocater { + pub fn new_gc(value: T) -> Gc { + + } +} + + +pub struct Collector; + + +impl Collector { + pub(crate) fn run_collection(st: &mut GcRuntimeData) { + + } +} + From dc892e6768da86211a01eaada5e95087bd030ccc Mon Sep 17 00:00:00 2001 From: nekevss Date: Sat, 22 Oct 2022 18:35:37 -0400 Subject: [PATCH 02/55] Build out concept further --- boa_gc/src/cell/borrow_flag.rs | 76 ++++++++ boa_gc/src/cell/cell.rs | 232 +++++++++++++++++++++++ boa_gc/src/cell/cell_ref.rs | 327 +++++++++++++++++++++++++++++++++ boa_gc/src/cell/mod.rs | 6 + boa_gc/src/heap_box.rs | 117 +++++++++++- boa_gc/src/lib.rs | 10 +- 6 files changed, 761 insertions(+), 7 deletions(-) create mode 100644 boa_gc/src/cell/borrow_flag.rs create mode 100644 boa_gc/src/cell/cell.rs create mode 100644 boa_gc/src/cell/cell_ref.rs create mode 100644 boa_gc/src/cell/mod.rs diff --git a/boa_gc/src/cell/borrow_flag.rs b/boa_gc/src/cell/borrow_flag.rs new file mode 100644 index 00000000000..47aaf11e68d --- /dev/null +++ b/boa_gc/src/cell/borrow_flag.rs @@ -0,0 +1,76 @@ +/// The BorrowFlag used by GC is split into 2 parts. the upper 63 or 31 bits +/// (depending on the architecture) are used to store the number of borrowed +/// references to the type. The low bit is used to record the rootedness of the +/// type. +/// +/// This means that GcCell can have, at maximum, half as many outstanding +/// borrows as RefCell before panicking. I don't think that will be a problem. +#[derive(Copy, Clone)] +pub(crate) struct BorrowFlag(usize); + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub(crate) enum BorrowState { + Reading, + Writing, + Unused, +} + +const ROOT: usize = 1; +const WRITING: usize = !1; +const UNUSED: usize = 0; + +/// The base borrowflag init is rooted, and has no outstanding borrows. +pub(crate) const BORROWFLAG_INIT: BorrowFlag = BorrowFlag(1); + +impl BorrowFlag { + fn borrowed(self) -> BorrowState { + match self.0 & !ROOT { + UNUSED => BorrowState::Unused, + WRITING => BorrowState::Writing, + _ => BorrowState::Reading, + } + } + + fn rooted(self) -> bool { + match self.0 & ROOT { + 0 => false, + _ => true, + } + } + + fn set_writing(self) -> Self { + // Set every bit other than the root bit, which is preserved + BorrowFlag(self.0 | WRITING) + } + + fn set_unused(self) -> Self { + // Clear every bit other than the root bit, which is preserved + BorrowFlag(self.0 & ROOT) + } + + fn add_reading(self) -> Self { + assert!(self.borrowed() != BorrowState::Writing); + // Add 1 to the integer starting at the second binary digit. As our + // borrowstate is not writing, we know that overflow cannot happen, so + // this is equivalent to the following, more complicated, expression: + // + // BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) + 1) << 1)) + BorrowFlag(self.0 + 0b10) + } + + fn sub_reading(self) -> Self { + assert!(self.borrowed() == BorrowState::Reading); + // Subtract 1 from the integer starting at the second binary digit. As + // our borrowstate is not writing or unused, we know that overflow or + // undeflow cannot happen, so this is equivalent to the following, more + // complicated, expression: + // + // BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) - 1) << 1)) + BorrowFlag(self.0 - 0b10) + } + + fn set_rooted(self, rooted: bool) -> Self { + // Preserve the non-root bits + BorrowFlag((self.0 & !ROOT) | (rooted as usize)) + } +} \ No newline at end of file diff --git a/boa_gc/src/cell/cell.rs b/boa_gc/src/cell/cell.rs new file mode 100644 index 00000000000..0435ceb02cf --- /dev/null +++ b/boa_gc/src/cell/cell.rs @@ -0,0 +1,232 @@ +//! A garbage collected cell implementation +use std::alloc::Layout; +use std::cell::{Cell, UnsafeCell}; +use std::cmp::Ordering; +use std::fmt::{self, Debug, Display}; +use std::hash::{Hash, Hasher}; +use std::ops::{Deref, DerefMut}; +use std::ptr::{self, NonNull}; + +mod borrow_flag; + +use borrow_flag::{BORROWFLAG_INIT, BorrowFlag}; + + +/// A mutable memory location with dynamically checked borrow rules +/// that can be used inside of a garbage-collected pointer. +/// +/// This object is a `RefCell` that can be used inside of a `Gc`. +pub struct GcCell { + flags: Cell, + cell: UnsafeCell, +} + +impl GcCell { + /// Creates a new `GcCell` containing `value`. + #[inline] + pub fn new(value: T) -> Self { + GcCell { + flags: Cell::new(BORROWFLAG_INIT), + cell: UnsafeCell::new(value), + } + } + + /// Consumes the `GcCell`, returning the wrapped value. + #[inline] + pub fn into_inner(self) -> T { + self.cell.into_inner() + } +} + +impl GcCell { + /// Immutably borrows the wrapped value. + /// + /// The borrow lasts until the returned `GcCellRef` exits scope. + /// Multiple immutable borrows can be taken out at the same time. + /// + /// # Panics + /// + /// Panics if the value is currently mutably borrowed. + #[inline] + pub fn borrow(&self) -> GcCellRef<'_, T> { + match self.try_borrow() { + Ok(value) => value, + Err(e) => panic!("{}", e), + } + } + + /// Mutably borrows the wrapped value. + /// + /// The borrow lasts until the returned `GcCellRefMut` exits scope. + /// The value cannot be borrowed while this borrow is active. + /// + /// # Panics + /// + /// Panics if the value is currently borrowed. + #[inline] + pub fn borrow_mut(&self) -> GcCellRefMut<'_, T> { + match self.try_borrow_mut() { + Ok(value) => value, + Err(e) => panic!("{}", e), + } + } + + /// Immutably borrows the wrapped value, returning an error if the value is currently mutably + /// borrowed. + /// + /// The borrow lasts until the returned `GcCellRef` exits scope. Multiple immutable borrows can be + /// taken out at the same time. + /// + /// This is the non-panicking variant of [`borrow`](#method.borrow). + /// + /// # Examples + /// + /// ``` + /// use gc::GcCell; + /// + /// let c = GcCell::new(5); + /// + /// { + /// let m = c.borrow_mut(); + /// assert!(c.try_borrow().is_err()); + /// } + /// + /// { + /// let m = c.borrow(); + /// assert!(c.try_borrow().is_ok()); + /// } + /// ``` + pub fn try_borrow(&self) -> Result, BorrowError> { + if self.flags.get().borrowed() == BorrowState::Writing { + return Err(BorrowError); + } + self.flags.set(self.flags.get().add_reading()); + + // This will fail if the borrow count overflows, which shouldn't happen, + // but let's be safe + assert!(self.flags.get().borrowed() == BorrowState::Reading); + + unsafe { + Ok(GcCellRef { + flags: &self.flags, + value: &*self.cell.get(), + }) + } + } + + /// Mutably borrows the wrapped value, returning an error if the value is currently borrowed. + /// + /// The borrow lasts until the returned `GcCellRefMut` exits scope. + /// The value cannot be borrowed while this borrow is active. + /// + /// This is the non-panicking variant of [`borrow_mut`](#method.borrow_mut). + /// + /// # Examples + /// + /// ``` + /// use gc::GcCell; + /// + /// let c = GcCell::new(5); + /// + /// { + /// let m = c.borrow(); + /// assert!(c.try_borrow_mut().is_err()); + /// } + /// + /// assert!(c.try_borrow_mut().is_ok()); + /// ``` + pub fn try_borrow_mut(&self) -> Result, BorrowMutError> { + if self.flags.get().borrowed() != BorrowState::Unused { + return Err(BorrowMutError); + } + self.flags.set(self.flags.get().set_writing()); + + unsafe { + // Force the val_ref's contents to be rooted for the duration of the + // mutable borrow + if !self.flags.get().rooted() { + (*self.cell.get()).root(); + } + + Ok(GcCellRefMut { + gc_cell: self, + value: &mut *self.cell.get(), + }) + } + } +} + +/// An error returned by [`GcCell::try_borrow`](struct.GcCell.html#method.try_borrow). +#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Default, Hash)] +pub struct BorrowError; + +impl std::fmt::Display for BorrowError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Display::fmt("GcCell already mutably borrowed", f) + } +} + +/// An error returned by [`GcCell::try_borrow_mut`](struct.GcCell.html#method.try_borrow_mut). +#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Default, Hash)] +pub struct BorrowMutError; + +impl std::fmt::Display for BorrowMutError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Display::fmt("GcCell already borrowed", f) + } +} + +impl Finalize for GcCell {} + +unsafe impl Trace for GcCell { + #[inline] + unsafe fn trace(&self) { + match self.flags.get().borrowed() { + BorrowState::Writing => (), + _ => (*self.cell.get()).trace(), + } + } + + #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + + #[inline] + unsafe fn weak_trace(&self, queue: &mut Vec) { + match self.flags.get().borrowed() { + BorrowState::Writing => (), + _ => (*self.cell.get()).weak_trace(queue), + } + } + + unsafe fn root(&self) { + assert!(!self.flags.get().rooted(), "Can't root a GcCell twice!"); + self.flags.set(self.flags.get().set_rooted(true)); + + match self.flags.get().borrowed() { + BorrowState::Writing => (), + _ => (*self.cell.get()).root(), + } + } + + #[inline] + unsafe fn unroot(&self) { + assert!(self.flags.get().rooted(), "Can't unroot a GcCell twice!"); + self.flags.set(self.flags.get().set_rooted(false)); + + match self.flags.get().borrowed() { + BorrowState::Writing => (), + _ => (*self.cell.get()).unroot(), + } + } + + #[inline] + fn finalize_glue(&self) { + Finalize::finalize(self); + match self.flags.get().borrowed() { + BorrowState::Writing => (), + _ => unsafe { (*self.cell.get()).finalize_glue() }, + } + } +} \ No newline at end of file diff --git a/boa_gc/src/cell/cell_ref.rs b/boa_gc/src/cell/cell_ref.rs new file mode 100644 index 00000000000..9a6240f90cc --- /dev/null +++ b/boa_gc/src/cell/cell_ref.rs @@ -0,0 +1,327 @@ +//! Implementation of a garbage collected cell reference +use std::alloc::Layout; +use std::cell::{Cell, UnsafeCell}; +use std::cmp::Ordering; +use std::fmt::{self, Debug, Display}; +use std::hash::{Hash, Hasher}; +use std::marker::PhantomData; +use std::mem; +use std::ops::{Deref, DerefMut}; +use std::ptr::{self, NonNull}; + +mod borrow_flag; +mod cell; + +use borrow_flag::{BORROWFLAG_INIT, BorrowFlag}; +use cell::GcCell; + +/// A wrapper type for an immutably borrowed value from a `GcCell`. +pub struct GcCellRef<'a, T: ?Sized + 'static> { + flags: &'a Cell, + value: &'a T, +} + +impl<'a, T: ?Sized> GcCellRef<'a, T> { + /// Copies a `GcCellRef`. + /// + /// The `GcCell` is already immutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `GcCellRef::clone(...)`. A `Clone` implementation or a method + /// would interfere with the use of `c.borrow().clone()` to clone + /// the contents of a `GcCell`. + #[inline] + pub fn clone(orig: &GcCellRef<'a, T>) -> GcCellRef<'a, T> { + orig.flags.set(orig.flags.get().add_reading()); + GcCellRef { + flags: orig.flags, + value: orig.value, + } + } + + /// Makes a new `GcCellRef` from a component of the borrowed data. + /// + /// The `GcCell` is already immutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as `GcCellRef::map(...)`. + /// A method would interfere with methods of the same name on the contents + /// of a `GcCellRef` used through `Deref`. + /// + /// # Examples + /// + /// ``` + /// use gc::{GcCell, GcCellRef}; + /// + /// let c = GcCell::new((5, 'b')); + /// let b1: GcCellRef<(u32, char)> = c.borrow(); + /// let b2: GcCellRef = GcCellRef::map(b1, |t| &t.0); + /// //assert_eq!(b2, 5); + /// ``` + #[inline] + pub fn map(orig: Self, f: F) -> GcCellRef<'a, U> + where + U: ?Sized, + F: FnOnce(&T) -> &U, + { + let ret = GcCellRef { + flags: orig.flags, + value: f(orig.value), + }; + + // We have to tell the compiler not to call the destructor of GcCellRef, + // because it will update the borrow flags. + std::mem::forget(orig); + + ret + } + + /// Splits a `GcCellRef` into multiple `GcCellRef`s for different components of the borrowed data. + /// + /// The `GcCell` is already immutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as GcCellRef::map_split(...). + /// A method would interfere with methods of the same name on the contents of a `GcCellRef` used through `Deref`. + /// + /// # Examples + /// + /// ``` + /// use gc::{GcCell, GcCellRef}; + /// + /// let cell = GcCell::new((1, 'c')); + /// let borrow = cell.borrow(); + /// let (first, second) = GcCellRef::map_split(borrow, |x| (&x.0, &x.1)); + /// assert_eq!(*first, 1); + /// assert_eq!(*second, 'c'); + /// ``` + #[inline] + pub fn map_split(orig: Self, f: F) -> (GcCellRef<'a, U>, GcCellRef<'a, V>) + where + U: ?Sized, + V: ?Sized, + F: FnOnce(&T) -> (&U, &V), + { + let (a, b) = f(orig.value); + + orig.flags.set(orig.flags.get().add_reading()); + + let ret = ( + GcCellRef { + flags: orig.flags, + value: a, + }, + GcCellRef { + flags: orig.flags, + value: b, + }, + ); + + // We have to tell the compiler not to call the destructor of GcCellRef, + // because it will update the borrow flags. + std::mem::forget(orig); + + ret + } +} + +impl<'a, T: ?Sized> Deref for GcCellRef<'a, T> { + type Target = T; + + #[inline] + fn deref(&self) -> &T { + self.value + } +} + +impl<'a, T: ?Sized> Drop for GcCellRef<'a, T> { + fn drop(&mut self) { + debug_assert!(self.flags.get().borrowed() == BorrowState::Reading); + self.flags.set(self.flags.get().sub_reading()); + } +} + +impl<'a, T: ?Sized + Debug> Debug for GcCellRef<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Debug::fmt(&**self, f) + } +} + +impl<'a, T: ?Sized + Display> Display for GcCellRef<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(&**self, f) + } +} + + + + + +/// A wrapper type for a mutably borrowed value from a `GcCell`. +pub struct GcCellRefMut<'a, T: Trace + ?Sized + 'static, U: ?Sized = T> { + gc_cell: &'a GcCell, + value: &'a mut U, +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> { + /// Makes a new `GcCellRefMut` for a component of the borrowed data, e.g., an enum + /// variant. + /// + /// The `GcCellRefMut` is already mutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `GcCellRefMut::map(...)`. A method would interfere with methods of the same + /// name on the contents of a `GcCell` used through `Deref`. + /// + /// # Examples + /// + /// ``` + /// use gc::{GcCell, GcCellRefMut}; + /// + /// let c = GcCell::new((5, 'b')); + /// { + /// let b1: GcCellRefMut<(u32, char)> = c.borrow_mut(); + /// let mut b2: GcCellRefMut<(u32, char), u32> = GcCellRefMut::map(b1, |t| &mut t.0); + /// assert_eq!(*b2, 5); + /// *b2 = 42; + /// } + /// assert_eq!(*c.borrow(), (42, 'b')); + /// ``` + #[inline] + pub fn map(orig: Self, f: F) -> GcCellRefMut<'a, T, V> + where + V: ?Sized, + F: FnOnce(&mut U) -> &mut V, + { + let value = unsafe { &mut *(orig.value as *mut U) }; + + let ret = GcCellRefMut { + gc_cell: orig.gc_cell, + value: f(value), + }; + + // We have to tell the compiler not to call the destructor of GcCellRefMut, + // because it will update the borrow flags. + std::mem::forget(orig); + + ret + } +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> Deref for GcCellRefMut<'a, T, U> { + type Target = U; + + #[inline] + fn deref(&self) -> &U { + self.value + } +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> DerefMut for GcCellRefMut<'a, T, U> { + #[inline] + fn deref_mut(&mut self) -> &mut U { + self.value + } +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'a, T, U> { + #[inline] + fn drop(&mut self) { + debug_assert!(self.gc_cell.flags.get().borrowed() == BorrowState::Writing); + // Restore the rooted state of the GcCell's contents to the state of the GcCell. + // During the lifetime of the GcCellRefMut, the GcCell's contents are rooted. + if !self.gc_cell.flags.get().rooted() { + unsafe { + (*self.gc_cell.cell.get()).unroot(); + } + } + self.gc_cell + .flags + .set(self.gc_cell.flags.get().set_unused()); + } +} + +impl<'a, T: Trace + ?Sized, U: Debug + ?Sized> Debug for GcCellRefMut<'a, T, U> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Debug::fmt(&*(self.deref()), f) + } +} + +impl<'a, T: Trace + ?Sized, U: Display + ?Sized> Display for GcCellRefMut<'a, T, U> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(&**self, f) + } +} + +unsafe impl Send for GcCell {} + +impl Clone for GcCell { + #[inline] + fn clone(&self) -> Self { + Self::new(self.borrow().clone()) + } +} + +impl Default for GcCell { + #[inline] + fn default() -> Self { + Self::new(Default::default()) + } +} + +impl PartialEq for GcCell { + #[inline(always)] + fn eq(&self, other: &Self) -> bool { + *self.borrow() == *other.borrow() + } +} + +impl Eq for GcCell {} + +impl PartialOrd for GcCell { + #[inline(always)] + fn partial_cmp(&self, other: &Self) -> Option { + (*self.borrow()).partial_cmp(&*other.borrow()) + } + + #[inline(always)] + fn lt(&self, other: &Self) -> bool { + *self.borrow() < *other.borrow() + } + + #[inline(always)] + fn le(&self, other: &Self) -> bool { + *self.borrow() <= *other.borrow() + } + + #[inline(always)] + fn gt(&self, other: &Self) -> bool { + *self.borrow() > *other.borrow() + } + + #[inline(always)] + fn ge(&self, other: &Self) -> bool { + *self.borrow() >= *other.borrow() + } +} + +impl Ord for GcCell { + #[inline] + fn cmp(&self, other: &GcCell) -> Ordering { + (*self.borrow()).cmp(&*other.borrow()) + } +} + +impl Debug for GcCell { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.flags.get().borrowed() { + BorrowState::Unused | BorrowState::Reading => f + .debug_struct("GcCell") + .field("value", &self.borrow()) + .finish(), + BorrowState::Writing => f + .debug_struct("GcCell") + .field("value", &"") + .finish(), + } + } +} + diff --git a/boa_gc/src/cell/mod.rs b/boa_gc/src/cell/mod.rs new file mode 100644 index 00000000000..30c29ef79fc --- /dev/null +++ b/boa_gc/src/cell/mod.rs @@ -0,0 +1,6 @@ + +pub mod cell; +pub mod cell_ref; + +pub use GcCell; +pub use GcCellRef; \ No newline at end of file diff --git a/boa_gc/src/heap_box.rs b/boa_gc/src/heap_box.rs index e4438d51acb..23d0ac00363 100644 --- a/boa_gc/src/heap_box.rs +++ b/boa_gc/src/heap_box.rs @@ -1,9 +1,122 @@ +use gc::{Trace, Finalize}; +use std::cell::Cell; +use std::ptr::{self, NonNull}; +const WEAK_MASK: usize = 1 << usize::BITS; +const MARK_MASK: usize = 1 << (usize::BITS - 1); +const ROOTS_MASK: usize = !(WEAK_MASK | MARK_MASK); +const ROOTS_MAX: usize = ROOTS_MASK; -pub(crate) struct HeapBoxHeader; +pub(crate) struct HeapBoxHeader { + references: Cell, + next: Cell>>>, +} +impl HeapBoxHeader { + #[inline] + pub fn new(next: Option>>) -> Self { + // TODO: implement a way for a cell to start out weak with WEAK_MASK + HeapBoxHeader { + references: Cell::new(1), + next: Cell::new(next), + } + } -pub(crate) struct HeapBox { + #[inline] + pub fn roots(&self) -> usize { + &self.roots.get() & ROOTS_MASK + } + + #[inline] + pub fn inc_roots(&self) { + let roots = self.roots.get(); + + if (roots & ROOTS_MASK) < ROOTS_MAX { + self.roots.set(roots + 1); + } else { + // TODO: implement a better way to handle root overload + panic!("roots counter overflow"); + } + } + + #[inline] + pub fn dec_roots(&self) { + self.roots.set(self.roots.get() - 1) // no underflow check + } + + #[inline] + pub fn is_marked(&self) -> bool { + self.roots.get() & MARK_MASK != 0 + } + + #[inline] + pub fn mark(&self) { + self.roots.set(self.roots.get() | MARK_MASK) + } + + #[inline] + pub fn unmark(&self) { + self.roots.set(self.roots.get() & !MARK_MASK) + } + + #[inline] + pub fn is_ephemeron(&self) { + self.roots.get() & WEAK_MASK != 0 + } +} + + +/// The HeapBox represents a box on the GC Heap. The HeapBox's creation and allocation is managed +/// by the allocator +pub(crate) struct HeapBox { header: HeapBoxHeader, object: T, +} + +impl HeapBox { + /// Returns `true` if the two references refer to the same `GcBox`. + pub(crate) fn ptr_eq(this: &HeapBox, other: &HeapBox) -> bool { + // Use .header to ignore fat pointer vtables, to work around + // https://github.com/rust-lang/rust/issues/46139 + ptr::eq(&this.header, &other.header) + } + + /// Marks this `GcBox` and marks through its data. + pub(crate) unsafe fn trace_inner(&self) { + if !self.header.is_marked() && !self.header.is_ephemeron() { + self.header.mark(); + self.data.trace(); + } + } + + /// Trace inner data + pub(crate) unsafe fn weak_trace_inner(&self, queue: &mut Vec>>) { + self.data.weak_trace(queue); + } + + /// Increases the root count on this `GcBox`. + /// Roots prevent the `GcBox` from being destroyed by the garbage collector. + pub(crate) unsafe fn root_inner(&self) { + self.header.inc_roots(); + } + + /// Decreases the root count on this `GcBox`. + /// Roots prevent the `GcBox` from being destroyed by the garbage collector. + pub(crate) unsafe fn unroot_inner(&self) { + self.header.dec_roots(); + } + + /// Returns a pointer to the `GcBox`'s value, without dereferencing it. + pub(crate) fn value_ptr(this: *const HeapBox) -> *const T { + unsafe { ptr::addr_of!((*this).data) } + } + + /// Returns a reference to the `GcBox`'s value. + pub(crate) fn value(&self) -> &T { + &self.data + } + + pub(crate) fn is_marked(&self) -> bool { + self.header.is_marked() + } } \ No newline at end of file diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index eaf35c2cbd2..0c08d9884ee 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -24,16 +24,16 @@ struct GcRuntimeData { struct BoaGc { config: GcConfig, runtime: GcRuntimeData, - heap_start: Option>>, + heap_start: Cell>>>, } /// The GcAllocater allocates a garbage collected value to heap. -pub struct GcAllocater; - -impl BoaAllocater { - pub fn new_gc(value: T) -> Gc { +pub struct GcAlloc; +impl GcAlloc { + pub fn new_gc(value: T) -> Gc { + } } From b2736abee2bf10597fb3921bd2060b1986c18eb0 Mon Sep 17 00:00:00 2001 From: nekevss Date: Mon, 24 Oct 2022 00:09:13 -0400 Subject: [PATCH 03/55] Complete initial work on alloc and collector --- boa_gc/Cargo.toml | 2 +- boa_gc/derive_macros/cargo.toml | 15 + boa_gc/derive_macros/src/lib.rs | 91 +++ boa_gc/src/cell/mod.rs | 6 - boa_gc/src/{heap_box.rs => gc_box.rs} | 102 +++- boa_gc/src/{cell => internals}/borrow_flag.rs | 16 +- boa_gc/src/{cell => internals}/cell.rs | 26 +- boa_gc/src/{cell => internals}/cell_ref.rs | 25 +- boa_gc/src/internals/ephemeron.rs | 0 boa_gc/src/internals/mod.rs | 6 + boa_gc/src/lib.rs | 339 ++++++++++- boa_gc/src/pointers/gc_ptr.rs | 249 ++++++++ boa_gc/src/pointers/mod.rs | 5 + boa_gc/src/trace.rs | 556 ++++++++++++++++++ 14 files changed, 1351 insertions(+), 87 deletions(-) create mode 100644 boa_gc/derive_macros/cargo.toml create mode 100644 boa_gc/derive_macros/src/lib.rs delete mode 100644 boa_gc/src/cell/mod.rs rename boa_gc/src/{heap_box.rs => gc_box.rs} (55%) rename boa_gc/src/{cell => internals}/borrow_flag.rs (87%) rename boa_gc/src/{cell => internals}/cell.rs (94%) rename boa_gc/src/{cell => internals}/cell_ref.rs (97%) create mode 100644 boa_gc/src/internals/ephemeron.rs create mode 100644 boa_gc/src/internals/mod.rs create mode 100644 boa_gc/src/pointers/gc_ptr.rs create mode 100644 boa_gc/src/pointers/mod.rs create mode 100644 boa_gc/src/trace.rs diff --git a/boa_gc/Cargo.toml b/boa_gc/Cargo.toml index 5cca97b53be..fb1a8f5da25 100644 --- a/boa_gc/Cargo.toml +++ b/boa_gc/Cargo.toml @@ -11,7 +11,7 @@ repository.workspace = true rust-version.workspace = true [dependencies] -gc = { version = "0.4.1", features = ["derive"] } +boa_gc_macros = { path = "derive_macros", version = "0.16.0" } # Optional Dependencies measureme = { version = "10.1.0", optional = true } diff --git a/boa_gc/derive_macros/cargo.toml b/boa_gc/derive_macros/cargo.toml new file mode 100644 index 00000000000..b9d873e3ab8 --- /dev/null +++ b/boa_gc/derive_macros/cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "boa_gc_macros" +version = "0.16.0" +description = "Garbage collector for the Boa JavaScript engine." +keywords = ["javascript", "js", "garbage", "memory"] +edition = "2021" + +[lib] +proc-macro = true + +[dependencies] +syn = "1.0" +proc-macro2 = "1.0" +quote = "1.0" +synstructure = "0.12" \ No newline at end of file diff --git a/boa_gc/derive_macros/src/lib.rs b/boa_gc/derive_macros/src/lib.rs new file mode 100644 index 00000000000..f3acb1f492e --- /dev/null +++ b/boa_gc/derive_macros/src/lib.rs @@ -0,0 +1,91 @@ +use quote::quote; +use synstructure::{decl_derive, AddBounds, Structure}; + +decl_derive!([Trace, attributes(unsafe_ignore_trace)] => derive_trace); + +fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream { + s.filter(|bi| { + !bi.ast() + .attrs + .iter() + .any(|attr| attr.path.is_ident("unsafe_ignore_trace")) + }); + let trace_body = s.each(|bi| quote!(mark(#bi))); + let weak_trace_body = s.each(|bi| quote!(mark(#bi, queue))); + + s.add_bounds(AddBounds::Fields); + let trace_impl = s.unsafe_bound_impl( + quote!(::boa_gc::Trace), + quote! { + #[inline] unsafe fn trace(&self) { + #[allow(dead_code)] + #[inline] + unsafe fn mark(it: &T) { + ::boa_gc::Trace::trace(it); + } + match *self { #trace_body } + } + #[inline] unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + #[inline] unsafe fn weak_trace(&self, queue: &mut Vec) { + #[allow(dead_code, unreachable_code)] + #[inline] + unsafe fn mark(it: &T, queue: &mut Vec<::gc::GcPointer>) { + ::boa_gc::Trace::weak_trace(it, queue) + } + match *self { #weak_trace_body } + } + #[inline] unsafe fn root(&self) { + #[allow(dead_code)] + #[inline] + unsafe fn mark(it: &T) { + ::boa_gc::Trace::root(it); + } + match *self { #trace_body } + } + #[inline] unsafe fn unroot(&self) { + #[allow(dead_code)] + #[inline] + unsafe fn mark(it: &T) { + ::boa_gc::Trace::unroot(it); + } + match *self { #trace_body } + } + #[inline] fn finalize_glue(&self) { + ::boa_gc::Finalize::finalize(self); + #[allow(dead_code)] + #[inline] + fn mark(it: &T) { + ::boa_gc::Trace::finalize_glue(it); + } + match *self { #trace_body } + } + }, + ); + + // We also implement drop to prevent unsafe drop implementations on this + // type and encourage people to use Finalize. This implementation will + // call `Finalize::finalize` if it is safe to do so. + let drop_impl = s.unbound_impl( + quote!(::std::ops::Drop), + quote! { + fn drop(&mut self) { + if ::boa_gc::finalizer_safe() { + ::boa_gc::Finalize::finalize(self); + } + } + }, + ); + + quote! { + #trace_impl + #drop_impl + } +} + +decl_derive!([Finalize] => derive_finalize); + +fn derive_finalize(s: Structure<'_>) -> proc_macro2::TokenStream { + s.unbound_impl(quote!(::boa_gc::Finalize), quote!()) +} diff --git a/boa_gc/src/cell/mod.rs b/boa_gc/src/cell/mod.rs deleted file mode 100644 index 30c29ef79fc..00000000000 --- a/boa_gc/src/cell/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ - -pub mod cell; -pub mod cell_ref; - -pub use GcCell; -pub use GcCellRef; \ No newline at end of file diff --git a/boa_gc/src/heap_box.rs b/boa_gc/src/gc_box.rs similarity index 55% rename from boa_gc/src/heap_box.rs rename to boa_gc/src/gc_box.rs index 23d0ac00363..4cc08341f36 100644 --- a/boa_gc/src/heap_box.rs +++ b/boa_gc/src/gc_box.rs @@ -1,27 +1,46 @@ -use gc::{Trace, Finalize}; +use crate::{Finalize, Trace}; use std::cell::Cell; use std::ptr::{self, NonNull}; -const WEAK_MASK: usize = 1 << usize::BITS; +// Age and Weak Flags +const WEAK_MASK: u8 = 1 << 7; +const AGE_MASK: u8 = !WEAK_MASK; +const AGE_MAX: u8 = AGE_MASK; + const MARK_MASK: usize = 1 << (usize::BITS - 1); -const ROOTS_MASK: usize = !(WEAK_MASK | MARK_MASK); +const ROOTS_MASK: usize = !MARK_MASK; const ROOTS_MAX: usize = ROOTS_MASK; -pub(crate) struct HeapBoxHeader { - references: Cell, - next: Cell>>>, +pub enum BoxLoc { + Stack, + Heap, +} + +pub(crate) struct GcBoxHeader { + roots: Cell, + cycle_age: Cell, + loc: Cell, + pub(crate) next: Cell>>>, } -impl HeapBoxHeader { +impl GcBoxHeader { #[inline] - pub fn new(next: Option>>) -> Self { + pub fn new() -> Self { // TODO: implement a way for a cell to start out weak with WEAK_MASK - HeapBoxHeader { - references: Cell::new(1), - next: Cell::new(next), + GcBoxHeader { + roots: Cell::new(1), + cycle_age: Cell::new(0_u8), + loc: Cell::new(BoxLoc::Stack), + next: Cell::new(None), } } + #[inline] + pub fn promote(&self, next: Option>>) { + self.loc.set(BoxLoc::Heap); + self.next.set(next); + } + #[inline] pub fn roots(&self) -> usize { &self.roots.get() & ROOTS_MASK @@ -32,7 +51,7 @@ impl HeapBoxHeader { let roots = self.roots.get(); if (roots & ROOTS_MASK) < ROOTS_MAX { - self.roots.set(roots + 1); + self.roots.set(roots + 1); } else { // TODO: implement a better way to handle root overload panic!("roots counter overflow"); @@ -60,38 +79,65 @@ impl HeapBoxHeader { } #[inline] - pub fn is_ephemeron(&self) { - self.roots.get() & WEAK_MASK != 0 + pub fn age(&self) -> u8 { + &self.cycle_age.get() & AGE_MASK + } + + #[inline] + pub fn inc_age(&self) { + let age = self.cycle_age.get(); + + // There is no need to increment the age after hitting max age + if (age & AGE_MASK) < AGE_MAX { + self.cycle_age.set(age + 1); + } } -} + #[inline] + pub fn is_ephemeron(&self) -> bool { + self.cycle_age.get() & WEAK_MASK != 0 + } +} /// The HeapBox represents a box on the GC Heap. The HeapBox's creation and allocation is managed -/// by the allocator -pub(crate) struct HeapBox { - header: HeapBoxHeader, - object: T, +/// by the allocator +pub struct GcBox { + pub(crate) header: GcBoxHeader, + pub(crate) value: T, } -impl HeapBox { +impl GcBox { + pub(crate) fn new(value: T) -> Self { + GcBox { + header: GcBoxHeader::new(), + value, + } + } +} + +impl GcBox { /// Returns `true` if the two references refer to the same `GcBox`. - pub(crate) fn ptr_eq(this: &HeapBox, other: &HeapBox) -> bool { + pub(crate) fn ptr_eq(this: &GcBox, other: &GcBox) -> bool { // Use .header to ignore fat pointer vtables, to work around // https://github.com/rust-lang/rust/issues/46139 ptr::eq(&this.header, &other.header) } + pub(crate) fn promote(&self, next: Option>>) { + self.header.promote(next) + } + /// Marks this `GcBox` and marks through its data. pub(crate) unsafe fn trace_inner(&self) { if !self.header.is_marked() && !self.header.is_ephemeron() { self.header.mark(); - self.data.trace(); + self.value.trace(); } } /// Trace inner data - pub(crate) unsafe fn weak_trace_inner(&self, queue: &mut Vec>>) { - self.data.weak_trace(queue); + pub(crate) unsafe fn weak_trace_inner(&self, queue: &mut Vec>>) { + self.value.weak_trace(queue); } /// Increases the root count on this `GcBox`. @@ -107,16 +153,16 @@ impl HeapBox { } /// Returns a pointer to the `GcBox`'s value, without dereferencing it. - pub(crate) fn value_ptr(this: *const HeapBox) -> *const T { - unsafe { ptr::addr_of!((*this).data) } + pub(crate) fn value_ptr(this: *const GcBox) -> *const T { + unsafe { ptr::addr_of!((*this).value) } } /// Returns a reference to the `GcBox`'s value. pub(crate) fn value(&self) -> &T { - &self.data + &self.value } pub(crate) fn is_marked(&self) -> bool { self.header.is_marked() } -} \ No newline at end of file +} diff --git a/boa_gc/src/cell/borrow_flag.rs b/boa_gc/src/internals/borrow_flag.rs similarity index 87% rename from boa_gc/src/cell/borrow_flag.rs rename to boa_gc/src/internals/borrow_flag.rs index 47aaf11e68d..53be29a772d 100644 --- a/boa_gc/src/cell/borrow_flag.rs +++ b/boa_gc/src/internals/borrow_flag.rs @@ -23,7 +23,7 @@ const UNUSED: usize = 0; pub(crate) const BORROWFLAG_INIT: BorrowFlag = BorrowFlag(1); impl BorrowFlag { - fn borrowed(self) -> BorrowState { + pub(crate) fn borrowed(self) -> BorrowState { match self.0 & !ROOT { UNUSED => BorrowState::Unused, WRITING => BorrowState::Writing, @@ -31,24 +31,24 @@ impl BorrowFlag { } } - fn rooted(self) -> bool { + pub(crate) fn rooted(self) -> bool { match self.0 & ROOT { 0 => false, _ => true, } } - fn set_writing(self) -> Self { + pub(crate) fn set_writing(self) -> Self { // Set every bit other than the root bit, which is preserved BorrowFlag(self.0 | WRITING) } - fn set_unused(self) -> Self { + pub(crate) fn set_unused(self) -> Self { // Clear every bit other than the root bit, which is preserved BorrowFlag(self.0 & ROOT) } - fn add_reading(self) -> Self { + pub(crate) fn add_reading(self) -> Self { assert!(self.borrowed() != BorrowState::Writing); // Add 1 to the integer starting at the second binary digit. As our // borrowstate is not writing, we know that overflow cannot happen, so @@ -58,7 +58,7 @@ impl BorrowFlag { BorrowFlag(self.0 + 0b10) } - fn sub_reading(self) -> Self { + pub(crate) fn sub_reading(self) -> Self { assert!(self.borrowed() == BorrowState::Reading); // Subtract 1 from the integer starting at the second binary digit. As // our borrowstate is not writing or unused, we know that overflow or @@ -69,8 +69,8 @@ impl BorrowFlag { BorrowFlag(self.0 - 0b10) } - fn set_rooted(self, rooted: bool) -> Self { + pub(crate) fn set_rooted(self, rooted: bool) -> Self { // Preserve the non-root bits BorrowFlag((self.0 & !ROOT) | (rooted as usize)) } -} \ No newline at end of file +} diff --git a/boa_gc/src/cell/cell.rs b/boa_gc/src/internals/cell.rs similarity index 94% rename from boa_gc/src/cell/cell.rs rename to boa_gc/src/internals/cell.rs index 0435ceb02cf..c4fca20d7a7 100644 --- a/boa_gc/src/cell/cell.rs +++ b/boa_gc/src/internals/cell.rs @@ -1,24 +1,24 @@ //! A garbage collected cell implementation -use std::alloc::Layout; use std::cell::{Cell, UnsafeCell}; -use std::cmp::Ordering; use std::fmt::{self, Debug, Display}; use std::hash::{Hash, Hasher}; -use std::ops::{Deref, DerefMut}; -use std::ptr::{self, NonNull}; - -mod borrow_flag; - -use borrow_flag::{BORROWFLAG_INIT, BorrowFlag}; +use crate::{ + internals::{ + borrow_flag::{BorrowFlag, BorrowState, BORROWFLAG_INIT}, + GcCellRef, GcCellRefMut, + }, + trace::{Finalize, Trace}, + GcPointer, +}; /// A mutable memory location with dynamically checked borrow rules /// that can be used inside of a garbage-collected pointer. /// /// This object is a `RefCell` that can be used inside of a `Gc`. pub struct GcCell { - flags: Cell, - cell: UnsafeCell, + pub(crate) flags: Cell, + pub(crate) cell: UnsafeCell, } impl GcCell { @@ -222,11 +222,11 @@ unsafe impl Trace for GcCell { } #[inline] - fn finalize_glue(&self) { + fn run_finalizer(&self) { Finalize::finalize(self); match self.flags.get().borrowed() { BorrowState::Writing => (), - _ => unsafe { (*self.cell.get()).finalize_glue() }, + _ => unsafe { (*self.cell.get()).run_finalizer() }, } } -} \ No newline at end of file +} diff --git a/boa_gc/src/cell/cell_ref.rs b/boa_gc/src/internals/cell_ref.rs similarity index 97% rename from boa_gc/src/cell/cell_ref.rs rename to boa_gc/src/internals/cell_ref.rs index 9a6240f90cc..f74b5ea3314 100644 --- a/boa_gc/src/cell/cell_ref.rs +++ b/boa_gc/src/internals/cell_ref.rs @@ -9,16 +9,18 @@ use std::mem; use std::ops::{Deref, DerefMut}; use std::ptr::{self, NonNull}; -mod borrow_flag; -mod cell; - -use borrow_flag::{BORROWFLAG_INIT, BorrowFlag}; -use cell::GcCell; +use crate::{ + internals::{ + borrow_flag::{BorrowFlag, BorrowState}, + GcCell, + }, + trace::{Finalize, Trace}, +}; /// A wrapper type for an immutably borrowed value from a `GcCell`. pub struct GcCellRef<'a, T: ?Sized + 'static> { - flags: &'a Cell, - value: &'a T, + pub(crate) flags: &'a Cell, + pub(crate) value: &'a T, } impl<'a, T: ?Sized> GcCellRef<'a, T> { @@ -151,14 +153,10 @@ impl<'a, T: ?Sized + Display> Display for GcCellRef<'a, T> { } } - - - - /// A wrapper type for a mutably borrowed value from a `GcCell`. pub struct GcCellRefMut<'a, T: Trace + ?Sized + 'static, U: ?Sized = T> { - gc_cell: &'a GcCell, - value: &'a mut U, + pub(crate) gc_cell: &'a GcCell, + pub(crate) value: &'a mut U, } impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> { @@ -324,4 +322,3 @@ impl Debug for GcCell { } } } - diff --git a/boa_gc/src/internals/ephemeron.rs b/boa_gc/src/internals/ephemeron.rs new file mode 100644 index 00000000000..e69de29bb2d diff --git a/boa_gc/src/internals/mod.rs b/boa_gc/src/internals/mod.rs new file mode 100644 index 00000000000..778e29b7b22 --- /dev/null +++ b/boa_gc/src/internals/mod.rs @@ -0,0 +1,6 @@ +pub(crate) mod borrow_flag; +pub mod cell; +pub mod cell_ref; + +pub use cell::GcCell; +pub use cell_ref::{GcCellRef, GcCellRefMut}; diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 0c08d9884ee..322d965f406 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -1,49 +1,354 @@ //! Garbage collector for the Boa JavaScript engine. -pub use gc::{ - custom_trace, finalizer_safe, force_collect, unsafe_empty_trace, Finalize, Gc, GcCell as Cell, - GcCellRef as Ref, GcCellRefMut as RefMut, Trace, -}; - +use std::cell::{Cell as StdCell, RefCell as StdRefCell}; +use std::mem; use std::ptr::NonNull; -mod heap_box; +pub use boa_gc_macros::{Trace, Finalize}; + +/// `gc_derive` is a general derive prelude import +pub mod derive_prelude { + pub use boa_gc_macros::{Trace, Finalize}; + pub use crate::GcPointer; +} + +mod gc_box; +mod internals; +pub mod trace; +pub mod pointers; -use heap_box::HeapBox; +pub(crate) use gc_box::GcBox; +pub use internals::{GcCell, GcCellRef}; +use pointers::Gc; +pub use crate::trace::{Finalize, Trace}; + +pub type GcPointer = NonNull>; + +thread_local!(pub static GC_DROPPING: StdCell = StdCell::new(false)); +thread_local!(static BOA_GC: StdRefCell = StdRefCell::new( BoaGc { + config: GcConfig::default(), + runtime: GcRuntimeData::default(), + heap_start: StdCell::new(None), + stack: StdCell::new(Vec::new()), +})); struct GcConfig { threshold: usize, growth_ratio: f64, + stack_base_capacity: usize, + stack_soft_cap: usize, +} +impl Default for GcConfig { + fn default() -> Self { + Self { + threshold: 100, + growth_ratio: 0.7, + stack_base_capacity: 255, + stack_soft_cap: 255, + } + } } struct GcRuntimeData { - byte_allocated: usize, + collections: usize, + heap_bytes_allocated: usize, + stack_allocations: usize, +} + +impl Default for GcRuntimeData { + fn default() -> Self { + Self { + collections: 0, + heap_bytes_allocated: 0, + stack_allocations: 0, + } + } } struct BoaGc { config: GcConfig, - runtime: GcRuntimeData, - heap_start: Cell>>>, + runtime: GcRuntimeData, + heap_start: StdCell>, + stack: StdCell>, +} + +// Whether or not the thread is currently in the sweep phase of garbage collection. +// During this phase, attempts to dereference a `Gc` pointer will trigger a panic. + +struct DropGuard; + +impl DropGuard { + fn new() -> DropGuard { + GC_DROPPING.with(|dropping| dropping.set(true)); + DropGuard + } } +impl Drop for DropGuard { + fn drop(&mut self) { + GC_DROPPING.with(|dropping| dropping.set(false)); + } +} + +pub fn finalizer_safe() -> bool { + GC_DROPPING.with(|dropping| !dropping.get()) +} -/// The GcAllocater allocates a garbage collected value to heap. +/// The GcAllocater handles initialization and allocation of garbage collected values. +/// +/// The allocator can trigger a garbage collection pub struct GcAlloc; impl GcAlloc { - pub fn new_gc(value: T) -> Gc { - + pub fn new(value: T) -> Gc { + BOA_GC.with(|st| { + let mut gc = st.borrow_mut(); + + unsafe { + Self::manage_state::(&mut *gc); + } + + let stack_element = Box::into_raw(Box::from(GcBox::new(value))); + unsafe { + let mut stack = gc.stack.take(); + stack.push(NonNull::new_unchecked(stack_element)); + gc.stack.set(stack); + gc.runtime.stack_allocations += 1; + + Gc::new(NonNull::new_unchecked(stack_element)) + } + }) } -} + pub fn new_cell(value: T) -> Gc> { + BOA_GC.with(|st| { + let mut gc = st.borrow_mut(); -pub struct Collector; + // Manage state preps the internal state for allocation and + // triggers a collection if the state dictates it. + unsafe { + Self::manage_state::(&mut *gc); + } + let cell = GcCell::new(value); + let stack_element = Box::into_raw(Box::from(GcBox::new(cell))); + unsafe { + let mut stack = gc.stack.take(); + stack.push(NonNull::new_unchecked(stack_element)); + gc.stack.set(stack); + gc.runtime.stack_allocations += 1; -impl Collector { - pub(crate) fn run_collection(st: &mut GcRuntimeData) { + Gc::new(NonNull::new_unchecked(stack_element)) + } + }) + } + + pub fn new_weak_pair(key: K, value: V) { + todo!() + } + + pub fn new_weak_cell(value: T) { + todo!() + } + pub(crate) unsafe fn promote_allocs( + promotions: Vec>>, + gc: &mut BoaGc, + ) { + for node in promotions { + (*node.as_ptr()).promote(gc.heap_start.take()); + gc.heap_start.set(Some(node)); + gc.runtime.heap_bytes_allocated += mem::size_of::>(); + } + } + + unsafe fn manage_state(gc: &mut BoaGc) { + if gc.runtime.heap_bytes_allocated > gc.config.threshold { + Collector::run_full_collection::(gc); + + if gc.runtime.heap_bytes_allocated as f64 + > gc.config.threshold as f64 * gc.config.growth_ratio + { + gc.config.threshold = + (gc.runtime.heap_bytes_allocated as f64 / gc.config.growth_ratio) as usize + } + } else { + if gc.runtime.stack_allocations > gc.config.stack_soft_cap { + Collector::run_stack_collection::(gc); + + // If we are constrained on the top of the stack, + // increase the size of capacity, so a garbage collection + // isn't triggered on every allocation + if gc.runtime.stack_allocations > gc.config.stack_soft_cap { + gc.config.stack_soft_cap += 5 + } + + // If the soft cap was increased but the allocation has lowered below + // the initial base, then reset to the original base + if gc.runtime.stack_allocations < gc.config.stack_base_capacity + && gc.config.stack_base_capacity != gc.config.stack_soft_cap + { + gc.config.stack_soft_cap = gc.config.stack_base_capacity + } + } + } } } +pub struct Collector; + +impl Collector { + pub(crate) unsafe fn run_stack_collection(gc: &mut BoaGc) { + gc.runtime.collections += 1; + let stack = gc.stack.take(); + let unreachable_nodes = Self::mark_stack(&stack); + Self::finalize(unreachable_nodes); + let _finalized = Self::mark_stack(&stack); + let promotions = Self::stack_sweep(gc, stack); + GcAlloc::promote_allocs::(promotions, gc); + } + + pub(crate) unsafe fn run_full_collection(gc: &mut BoaGc) { + gc.runtime.collections += 1; + let old_stack = gc.stack.take(); + let mut unreachable = Self::mark_heap(&gc.heap_start); + let stack_unreachable = Self::mark_stack(&old_stack); + unreachable.extend(stack_unreachable); + Self::finalize(unreachable); + let _heap_finalized = Self::mark_heap(&gc.heap_start); + let _sweep_finalized = Self::mark_stack(&old_stack); + Self::heap_sweep(gc); + let promotions = Self::stack_sweep(gc, old_stack); + GcAlloc::promote_allocs::(promotions, gc); + } + + pub(crate) unsafe fn mark_heap( + head: &StdCell>>>, + ) -> Vec>> { + // Walk the list, tracing and marking the nodes + let mut finalize = Vec::new(); + let mut ephemeron_queue = Vec::new(); + let mut mark_head = head; + while let Some(node) = mark_head.get() { + if (*node.as_ptr()).header.is_ephemeron() { + ephemeron_queue.push(node); + } else { + if (*node.as_ptr()).header.roots() > 0 { + (*node.as_ptr()).trace_inner(); + } else { + finalize.push(node) + } + } + mark_head = &(*node.as_ptr()).header.next; + } + + // Ephemeron Evaluation + if !ephemeron_queue.is_empty() { + loop { + let mut reachable_nodes = Vec::new(); + let mut other_nodes = Vec::new(); + // iterate through ephemeron queue, sorting nodes by whether they + // are reachable or unreachable + for node in ephemeron_queue { + if (*node.as_ptr()).value.is_marked_ephemeron() { + (*node.as_ptr()).header.mark(); + reachable_nodes.push(node); + } else { + other_nodes.push(node); + } + } + // Replace the old queue with the unreachable + ephemeron_queue = other_nodes; + + // If reachable nodes is not empty, trace values. If it is empty, + // break from the loop + if !reachable_nodes.is_empty() { + // iterate through reachable nodes and trace their values, + // enqueuing any ephemeron that is found during the trace + for node in reachable_nodes { + (*node.as_ptr()).weak_trace_inner(&mut ephemeron_queue) + } + } else { + break; + } + } + } + + // Any left over nodes in the ephemeron queue at this point are + // unreachable and need to be notified/finalized. + finalize.extend(ephemeron_queue); + + finalize + } + + pub(crate) unsafe fn mark_stack( + stack: &Vec>>, + ) -> Vec>> { + let mut finalize = Vec::new(); + + for node in stack { + if (*node.as_ptr()).header.roots() > 0 { + (*node.as_ptr()).header.mark() + } else { + finalize.push(*node) + } + } + + finalize + } + + unsafe fn finalize(finalize_vec: Vec>>) { + for node in finalize_vec { + // We double check that the unreachable nodes are actually unreachable + // prior to finalization as they could have been marked by a different + // trace after initially being added to the queue + if !(*node.as_ptr()).header.is_marked() { + Trace::run_finalizer(&(*node.as_ptr()).value) + } + } + } + + unsafe fn stack_sweep( + gc: &mut BoaGc, + old_stack: Vec>>, + ) -> Vec>> { + let _guard = DropGuard::new(); + + let mut new_stack = Vec::new(); + let mut promotions = Vec::new(); + + for node in old_stack { + if (*node.as_ptr()).header.is_marked() { + (*node.as_ptr()).header.unmark(); + (*node.as_ptr()).header.inc_age(); + if (*node.as_ptr()).header.age() > 10 { + promotions.push(node); + } else { + new_stack.push(node) + } + } else { + gc.runtime.stack_allocations -= 1; + } + } + + gc.stack.set(new_stack); + promotions + } + + unsafe fn heap_sweep(gc: &mut BoaGc) { + let _guard = DropGuard::new(); + + let mut sweep_head = &gc.heap_start; + while let Some(node) = sweep_head.get() { + if (*node.as_ptr()).header.is_marked() { + (*node.as_ptr()).header.unmark(); + sweep_head = &(*node.as_ptr()).header.next; + } else { + let unmarked_node = Box::from_raw(node.as_ptr()); + gc.runtime.heap_bytes_allocated -= mem::size_of_val::>(&*unmarked_node); + sweep_head.set(unmarked_node.header.next.take()); + } + } + } +} diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs new file mode 100644 index 00000000000..1b009738db5 --- /dev/null +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -0,0 +1,249 @@ +use std::cell::Cell; +use std::cmp::Ordering; +use std::fmt::{self, Debug, Display}; +use std::hash::{Hash, Hasher}; +use std::marker::PhantomData; +use std::ops::Deref; +use std::ptr::{self, NonNull}; +use std::rc::Rc; + +use crate::gc_box::GcBox; +use crate::{ + finalizer_safe, + trace::{Finalize, Trace}, + GcPointer, +}; + +pub(crate) unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { + ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); + ptr +} + +/// A garbage-collected pointer type over an immutable value. +pub struct Gc { + inner_ptr: Cell>>, + marker: PhantomData>, +} + +impl Gc { + /// Constructs a new `Gc` with the given value. + pub fn new(value: NonNull>) -> Self { + unsafe { + Self { + inner_ptr: Cell::new(NonNull::new_unchecked(value.as_ptr())), + marker: PhantomData, + } + } + } +} + +impl Gc { + /// Returns `true` if the two `Gc`s point to the same allocation. + pub fn ptr_eq(this: &Gc, other: &Gc) -> bool { + GcBox::ptr_eq(this.inner(), other.inner()) + } +} + +/// Returns the given pointer with its root bit cleared. +pub(crate) unsafe fn clear_root_bit( + ptr: NonNull>, +) -> NonNull> { + let ptr = ptr.as_ptr(); + let data = ptr as *mut u8; + let addr = data as isize; + let ptr = set_data_ptr(ptr, data.wrapping_offset((addr & !1) - addr)); + NonNull::new_unchecked(ptr) +} + +impl Gc { + fn rooted(&self) -> bool { + self.inner_ptr.get().as_ptr() as *mut u8 as usize & 1 != 0 + } + + unsafe fn set_root(&self) { + let ptr = self.inner_ptr.get().as_ptr(); + let data = ptr as *mut u8; + let addr = data as isize; + let ptr = set_data_ptr(ptr, data.wrapping_offset((addr | 1) - addr)); + self.inner_ptr.set(NonNull::new_unchecked(ptr)); + } + + unsafe fn clear_root(&self) { + self.inner_ptr.set(clear_root_bit(self.inner_ptr.get())); + } + + #[inline] + fn inner_ptr(&self) -> *mut GcBox { + assert!(finalizer_safe()); + + unsafe { clear_root_bit(self.inner_ptr.get()).as_ptr() } + } + + #[inline] + fn inner(&self) -> &GcBox { + unsafe { &*self.inner_ptr() } + } +} + +impl Finalize for Gc {} + +unsafe impl Trace for Gc { + #[inline] + unsafe fn trace(&self) { + self.inner().trace_inner(); + } + + #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + + #[inline] + unsafe fn weak_trace(&self, queue: &mut Vec) { + self.inner().weak_trace_inner(queue); + } + + #[inline] + unsafe fn root(&self) { + assert!(!self.rooted(), "Can't double-root a Gc"); + + // Try to get inner before modifying our state. Inner may be + // inaccessible due to this method being invoked during the sweeping + // phase, and we don't want to modify our state before panicking. + self.inner().root_inner(); + + self.set_root(); + } + + #[inline] + unsafe fn unroot(&self) { + assert!(self.rooted(), "Can't double-unroot a Gc"); + + // Try to get inner before modifying our state. Inner may be + // inaccessible due to this method being invoked during the sweeping + // phase, and we don't want to modify our state before panicking. + self.inner().unroot_inner(); + + self.clear_root(); + } + + #[inline] + fn run_finalizer(&self) { + Finalize::finalize(self); + } +} + +impl Clone for Gc { + #[inline] + fn clone(&self) -> Self { + unsafe { + self.inner().root_inner(); + let gc = Gc { + inner_ptr: Cell::new(self.inner_ptr.get()), + marker: PhantomData, + }; + gc.set_root(); + gc + } + } +} + +impl Deref for Gc { + type Target = T; + + #[inline] + fn deref(&self) -> &T { + &self.inner().value() + } +} + +impl Drop for Gc { + #[inline] + fn drop(&mut self) { + // If this pointer was a root, we should unroot it. + if self.rooted() { + unsafe { + self.inner().unroot_inner(); + } + } + } +} + +impl PartialEq for Gc { + #[inline(always)] + fn eq(&self, other: &Self) -> bool { + **self == **other + } +} + +impl Eq for Gc {} + +impl PartialOrd for Gc { + #[inline(always)] + fn partial_cmp(&self, other: &Self) -> Option { + (**self).partial_cmp(&**other) + } + + #[inline(always)] + fn lt(&self, other: &Self) -> bool { + **self < **other + } + + #[inline(always)] + fn le(&self, other: &Self) -> bool { + **self <= **other + } + + #[inline(always)] + fn gt(&self, other: &Self) -> bool { + **self > **other + } + + #[inline(always)] + fn ge(&self, other: &Self) -> bool { + **self >= **other + } +} + +impl Ord for Gc { + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + (**self).cmp(&**other) + } +} + +impl Hash for Gc { + fn hash(&self, state: &mut H) { + (**self).hash(state); + } +} + +impl Display for Gc { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(&**self, f) + } +} + +impl Debug for Gc { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Debug::fmt(&**self, f) + } +} + +impl fmt::Pointer for Gc { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Pointer::fmt(&self.inner(), f) + } +} + +impl std::borrow::Borrow for Gc { + fn borrow(&self) -> &T { + &**self + } +} + +impl std::convert::AsRef for Gc { + fn as_ref(&self) -> &T { + &**self + } +} diff --git a/boa_gc/src/pointers/mod.rs b/boa_gc/src/pointers/mod.rs new file mode 100644 index 00000000000..c62a28e6fd1 --- /dev/null +++ b/boa_gc/src/pointers/mod.rs @@ -0,0 +1,5 @@ +//! Pointers represents the External types returned by the Boa Garbage Collector + +pub mod gc_ptr; + +pub use gc_ptr::Gc; diff --git a/boa_gc/src/trace.rs b/boa_gc/src/trace.rs new file mode 100644 index 00000000000..88b8573cdfe --- /dev/null +++ b/boa_gc/src/trace.rs @@ -0,0 +1,556 @@ +use std::borrow::{Cow, ToOwned}; +use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque}; +use std::hash::{BuildHasher, Hash}; +use std::marker::PhantomData; +use std::num::{ + NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroIsize, NonZeroU128, + NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize, +}; +use std::path::{Path, PathBuf}; +use std::rc::Rc; +use std::sync::atomic::{ + AtomicBool, AtomicI16, AtomicI32, AtomicI64, AtomicI8, AtomicIsize, AtomicU16, AtomicU32, + AtomicU64, AtomicU8, AtomicUsize, +}; + +use crate::GcPointer; + +/// The Finalize trait, which needs to be implemented on +/// garbage-collected objects to define finalization logic. +pub trait Finalize { + fn finalize(&self) {} +} + +/// The Trace trait, which needs to be implemented on garbage-collected objects. +pub unsafe trait Trace: Finalize { + /// Marks all contained `Gc`s. + unsafe fn trace(&self); + + /// Checks if an ephemeron's key is marked. + /// + /// Note: value should always be implemented to return false + unsafe fn is_marked_ephemeron(&self) -> bool; + + /// Returns true if a marked `Gc` is found + unsafe fn weak_trace(&self, ephemeron_queue: &mut Vec); + + /// Increments the root-count of all contained `Gc`s. + unsafe fn root(&self); + + /// Decrements the root-count of all contained `Gc`s. + unsafe fn unroot(&self); + + /// Runs Finalize::finalize() on this object and all + /// contained subobjects + fn run_finalizer(&self); +} + +/// This rule implements the trace methods with empty implementations. +/// +/// Use this for marking types as not containing any `Trace` types. +#[macro_export] +macro_rules! unsafe_empty_trace { + () => { + #[inline] + unsafe fn trace(&self) {} + #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + #[inline] + unsafe fn weak_trace(&self, _ephemeron_queue: &mut Vec) {} + #[inline] + unsafe fn root(&self) {} + #[inline] + unsafe fn unroot(&self) {} + #[inline] + fn run_finalizer(&self) { + $crate::Finalize::finalize(self) + } + }; +} + +/// This rule implements the trace method. +/// +/// You define a `this` parameter name and pass in a body, which should call `mark` on every +/// traceable element inside the body. The mark implementation will automatically delegate to the +/// correct method on the argument. +#[macro_export] +macro_rules! custom_trace { + ($this:ident, $op:ident, $body:expr, $weak_body:expr) => { + #[inline] + unsafe fn trace(&self) { + #[inline] + unsafe fn mark(it: &T) { + $crate::Trace::trace(it); + } + let $this = self; + $body + } + #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + #[inline] + unsafe fn weak_trace(&self, queue: &mut Vec) { + #[inline] + unsafe fn mark(it: &T, queue: &mut Vec) { + $crate::Trace::weak_trace(it, queue) + } + let $this = self; + let $op = queue; + $weak_body + } + #[inline] + unsafe fn root(&self) { + #[inline] + unsafe fn mark(it: &T) { + $crate::Trace::root(it); + } + let $this = self; + $body + } + #[inline] + unsafe fn unroot(&self) { + #[inline] + unsafe fn mark(it: &T) { + $crate::Trace::unroot(it); + } + let $this = self; + $body + } + #[inline] + fn run_finalizer(&self) { + $crate::Finalize::finalize(self); + #[inline] + fn mark(it: &T) { + $crate::Trace::run_finalizer(it); + } + let $this = self; + $body + } + }; +} + +impl Finalize for &'static T {} +unsafe impl Trace for &'static T { + unsafe_empty_trace!(); +} + +macro_rules! simple_empty_finalize_trace { + ($($T:ty),*) => { + $( + impl Finalize for $T {} + unsafe impl Trace for $T { unsafe_empty_trace!(); } + )* + } +} + +simple_empty_finalize_trace![ + (), + bool, + isize, + usize, + i8, + u8, + i16, + u16, + i32, + u32, + i64, + u64, + i128, + u128, + f32, + f64, + char, + String, + Box, + Rc, + Path, + PathBuf, + NonZeroIsize, + NonZeroUsize, + NonZeroI8, + NonZeroU8, + NonZeroI16, + NonZeroU16, + NonZeroI32, + NonZeroU32, + NonZeroI64, + NonZeroU64, + NonZeroI128, + NonZeroU128, + AtomicBool, + AtomicIsize, + AtomicUsize, + AtomicI8, + AtomicU8, + AtomicI16, + AtomicU16, + AtomicI32, + AtomicU32, + AtomicI64, + AtomicU64 +]; + +impl Finalize for [T; N] {} +unsafe impl Trace for [T; N] { + custom_trace!( + this, + queue, + { + for v in this { + mark(v); + } + }, + { + for v in this { + mark(v, queue); + } + } + ); +} + +macro_rules! fn_finalize_trace_one { + ($ty:ty $(,$args:ident)*) => { + impl Finalize for $ty {} + unsafe impl Trace for $ty { unsafe_empty_trace!(); } + } +} +macro_rules! fn_finalize_trace_group { + () => { + fn_finalize_trace_one!(extern "Rust" fn () -> Ret); + fn_finalize_trace_one!(extern "C" fn () -> Ret); + fn_finalize_trace_one!(unsafe extern "Rust" fn () -> Ret); + fn_finalize_trace_one!(unsafe extern "C" fn () -> Ret); + }; + ($($args:ident),*) => { + fn_finalize_trace_one!(extern "Rust" fn ($($args),*) -> Ret, $($args),*); + fn_finalize_trace_one!(extern "C" fn ($($args),*) -> Ret, $($args),*); + fn_finalize_trace_one!(extern "C" fn ($($args),*, ...) -> Ret, $($args),*); + fn_finalize_trace_one!(unsafe extern "Rust" fn ($($args),*) -> Ret, $($args),*); + fn_finalize_trace_one!(unsafe extern "C" fn ($($args),*) -> Ret, $($args),*); + fn_finalize_trace_one!(unsafe extern "C" fn ($($args),*, ...) -> Ret, $($args),*); + } +} + +macro_rules! tuple_finalize_trace { + () => {}; // This case is handled above, by simple_finalize_empty_trace!(). + ($($args:ident),*) => { + impl<$($args),*> Finalize for ($($args,)*) {} + unsafe impl<$($args: $crate::Trace),*> Trace for ($($args,)*) { + custom_trace!(this, queue, { + #[allow(non_snake_case, unused_unsafe)] + fn avoid_lints<$($args: $crate::Trace),*>(&($(ref $args,)*): &($($args,)*)) { + unsafe { $(mark($args);)* } + } + avoid_lints(this) + }, { + #[allow(non_snake_case, unused_unsafe)] + fn avoid_lints<$($args: $crate::Trace),*>(&($(ref $args,)*): &($($args,)*), queue: &mut Vec) { + unsafe { $(mark($args, queue);)* } + } + avoid_lints(this, queue) + }); + } + } +} + +macro_rules! type_arg_tuple_based_finalize_trace_impls { + ($(($($args:ident),*);)*) => { + $( + fn_finalize_trace_group!($($args),*); + tuple_finalize_trace!($($args),*); + )* + } +} + +type_arg_tuple_based_finalize_trace_impls![ + (); + (A); + (A, B); + (A, B, C); + (A, B, C, D); + (A, B, C, D, E); + (A, B, C, D, E, F); + (A, B, C, D, E, F, G); + (A, B, C, D, E, F, G, H); + (A, B, C, D, E, F, G, H, I); + (A, B, C, D, E, F, G, H, I, J); + (A, B, C, D, E, F, G, H, I, J, K); + (A, B, C, D, E, F, G, H, I, J, K, L); +]; + +impl Finalize for Rc {} +unsafe impl Trace for Rc { + custom_trace!( + this, + queue, + { + mark(&**this); + }, + mark(&**this, queue) + ); +} + +impl Finalize for Rc<[T]> {} +unsafe impl Trace for Rc<[T]> { + custom_trace!( + this, + queue, + { + for e in this.iter() { + mark(e); + } + }, + { + for e in this.iter() { + mark(e, queue); + } + } + ); +} + +impl Finalize for Box {} +unsafe impl Trace for Box { + custom_trace!( + this, + queue, + { + mark(&**this); + }, + mark(&**this, queue) + ); +} + +impl Finalize for Box<[T]> {} +unsafe impl Trace for Box<[T]> { + custom_trace!( + this, + queue, + { + for e in this.iter() { + mark(e); + } + }, + { + for e in this.iter() { + mark(e, queue); + } + } + ); +} + +impl Finalize for Vec {} +unsafe impl Trace for Vec { + custom_trace!( + this, + queue, + { + for e in this { + mark(e); + } + }, + { + for e in this { + mark(e, queue); + } + } + ); +} + +impl Finalize for Option {} +unsafe impl Trace for Option { + custom_trace!( + this, + queue, + { + if let Some(ref v) = *this { + mark(v); + } + }, + { + if let Some(ref v) = *this { + mark(v, queue) + } + } + ); +} + +impl Finalize for Result {} +unsafe impl Trace for Result { + custom_trace!( + this, + queue, + { + match *this { + Ok(ref v) => mark(v), + Err(ref v) => mark(v), + } + }, + { + let marked = match *this { + Ok(ref v) => mark(v, queue), + Err(ref v) => mark(v, queue), + }; + marked + } + ); +} + +impl Finalize for BinaryHeap {} +unsafe impl Trace for BinaryHeap { + custom_trace!( + this, + queue, + { + for v in this.iter() { + mark(v); + } + }, + { + for e in this.iter() { + mark(e, queue); + } + } + ); +} + +impl Finalize for BTreeMap {} +unsafe impl Trace for BTreeMap { + custom_trace!( + this, + queue, + { + for (k, v) in this { + mark(k); + mark(v); + } + }, + { + for (k, v) in this { + mark(k, queue); + mark(v, queue); + } + } + ); +} + +impl Finalize for BTreeSet {} +unsafe impl Trace for BTreeSet { + custom_trace!( + this, + queue, + { + for v in this { + mark(v); + } + }, + { + for v in this { + mark(v, queue); + } + } + ); +} + +impl Finalize for HashMap {} +unsafe impl Trace for HashMap { + custom_trace!( + this, + queue, + { + for (k, v) in this.iter() { + mark(k); + mark(v); + } + }, + { + for (k, v) in this.iter() { + mark(k, queue); + mark(v, queue); + } + } + ); +} + +impl Finalize for HashSet {} +unsafe impl Trace for HashSet { + custom_trace!( + this, + queue, + { + for v in this.iter() { + mark(v); + } + }, + { + for v in this.iter() { + mark(v, queue); + } + } + ); +} + +impl Finalize for LinkedList {} +unsafe impl Trace for LinkedList { + custom_trace!( + this, + queue, + { + for v in this.iter() { + mark(v); + } + }, + { + for v in this.iter() { + mark(v, queue); + } + } + ); +} + +impl Finalize for PhantomData {} +unsafe impl Trace for PhantomData { + unsafe_empty_trace!(); +} + +impl Finalize for VecDeque {} +unsafe impl Trace for VecDeque { + custom_trace!( + this, + queue, + { + for v in this.iter() { + mark(v); + } + }, + { + for v in this.iter() { + mark(v, queue); + } + } + ); +} + +impl<'a, T: ToOwned + Trace + ?Sized> Finalize for Cow<'a, T> {} +unsafe impl<'a, T: ToOwned + Trace + ?Sized> Trace for Cow<'a, T> +where + T::Owned: Trace, +{ + custom_trace!( + this, + queue, + { + if let Cow::Owned(ref v) = this { + mark(v); + } + }, + { + if let Cow::Owned(ref v) = this { + mark(v, queue) + } + } + ); +} From be81ba466b67c4fa9bfea9cf69692da36678bd69 Mon Sep 17 00:00:00 2001 From: nekevss Date: Mon, 24 Oct 2022 21:27:36 -0400 Subject: [PATCH 04/55] Ephemeron implementation and clean up --- boa_gc/derive_macros/src/lib.rs | 13 +- boa_gc/src/gc_box.rs | 25 ++- boa_gc/src/internals/cell.rs | 9 +- boa_gc/src/internals/cell_ref.rs | 7 +- boa_gc/src/internals/ephemeron.rs | 123 +++++++++++++ boa_gc/src/internals/mod.rs | 2 + boa_gc/src/lib.rs | 159 +++++++++++----- boa_gc/src/pointers/gc_ptr.rs | 5 +- boa_gc/src/pointers/mod.rs | 2 + boa_gc/src/pointers/weak_ptr.rs | 64 +++++++ boa_gc/src/trace.rs | 293 ++++++++---------------------- 11 files changed, 410 insertions(+), 292 deletions(-) create mode 100644 boa_gc/src/pointers/weak_ptr.rs diff --git a/boa_gc/derive_macros/src/lib.rs b/boa_gc/derive_macros/src/lib.rs index f3acb1f492e..7f2c000710d 100644 --- a/boa_gc/derive_macros/src/lib.rs +++ b/boa_gc/derive_macros/src/lib.rs @@ -11,7 +11,6 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream { .any(|attr| attr.path.is_ident("unsafe_ignore_trace")) }); let trace_body = s.each(|bi| quote!(mark(#bi))); - let weak_trace_body = s.each(|bi| quote!(mark(#bi, queue))); s.add_bounds(AddBounds::Fields); let trace_impl = s.unsafe_bound_impl( @@ -28,13 +27,13 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream { #[inline] unsafe fn is_marked_ephemeron(&self) -> bool { false } - #[inline] unsafe fn weak_trace(&self, queue: &mut Vec) { + #[inline] unsafe fn weak_trace(&self) { #[allow(dead_code, unreachable_code)] #[inline] - unsafe fn mark(it: &T, queue: &mut Vec<::gc::GcPointer>) { - ::boa_gc::Trace::weak_trace(it, queue) + unsafe fn mark(it: &T) { + ::boa_gc::Trace::weak_trace(it) } - match *self { #weak_trace_body } + match *self { #trace_body } } #[inline] unsafe fn root(&self) { #[allow(dead_code)] @@ -52,12 +51,12 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream { } match *self { #trace_body } } - #[inline] fn finalize_glue(&self) { + #[inline] fn run_finalizer(&self) { ::boa_gc::Finalize::finalize(self); #[allow(dead_code)] #[inline] fn mark(it: &T) { - ::boa_gc::Trace::finalize_glue(it); + ::boa_gc::Trace::run_finalizer(it); } match *self { #trace_body } } diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index 4cc08341f36..0ad678985d2 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -1,3 +1,4 @@ +use crate::pointers::Gc; use crate::{Finalize, Trace}; use std::cell::Cell; use std::ptr::{self, NonNull}; @@ -35,6 +36,18 @@ impl GcBoxHeader { } } + #[inline] + pub fn new_weak() -> Self { + // Set weak_flag + let cycle_age = 0_u8 | WEAK_MASK; + GcBoxHeader { + roots: Cell::new(0), + cycle_age: Cell::new(cycle_age), + loc: Cell::new(BoxLoc::Stack), + next: Cell::new(None), + } + } + #[inline] pub fn promote(&self, next: Option>>) { self.loc.set(BoxLoc::Heap); @@ -101,6 +114,7 @@ impl GcBoxHeader { /// The HeapBox represents a box on the GC Heap. The HeapBox's creation and allocation is managed /// by the allocator +#[repr(C)] pub struct GcBox { pub(crate) header: GcBoxHeader, pub(crate) value: T, @@ -113,6 +127,13 @@ impl GcBox { value, } } + + pub(crate) fn new_weak(value: T) -> Self { + GcBox { + header: GcBoxHeader::new_weak(), + value, + } + } } impl GcBox { @@ -136,8 +157,8 @@ impl GcBox { } /// Trace inner data - pub(crate) unsafe fn weak_trace_inner(&self, queue: &mut Vec>>) { - self.value.weak_trace(queue); + pub(crate) unsafe fn weak_trace_inner(&self) { + self.value.weak_trace(); } /// Increases the root count on this `GcBox`. diff --git a/boa_gc/src/internals/cell.rs b/boa_gc/src/internals/cell.rs index c4fca20d7a7..911ad9e9517 100644 --- a/boa_gc/src/internals/cell.rs +++ b/boa_gc/src/internals/cell.rs @@ -1,7 +1,7 @@ //! A garbage collected cell implementation use std::cell::{Cell, UnsafeCell}; -use std::fmt::{self, Debug, Display}; -use std::hash::{Hash, Hasher}; +use std::fmt::{Debug, Display}; +use std::hash::Hash; use crate::{ internals::{ @@ -9,7 +9,6 @@ use crate::{ GcCellRef, GcCellRefMut, }, trace::{Finalize, Trace}, - GcPointer, }; /// A mutable memory location with dynamically checked borrow rules @@ -193,10 +192,10 @@ unsafe impl Trace for GcCell { } #[inline] - unsafe fn weak_trace(&self, queue: &mut Vec) { + unsafe fn weak_trace(&self) { match self.flags.get().borrowed() { BorrowState::Writing => (), - _ => (*self.cell.get()).weak_trace(queue), + _ => (*self.cell.get()).weak_trace(), } } diff --git a/boa_gc/src/internals/cell_ref.rs b/boa_gc/src/internals/cell_ref.rs index f74b5ea3314..0e5c76759ba 100644 --- a/boa_gc/src/internals/cell_ref.rs +++ b/boa_gc/src/internals/cell_ref.rs @@ -1,13 +1,8 @@ //! Implementation of a garbage collected cell reference -use std::alloc::Layout; -use std::cell::{Cell, UnsafeCell}; +use std::cell::Cell; use std::cmp::Ordering; use std::fmt::{self, Debug, Display}; -use std::hash::{Hash, Hasher}; -use std::marker::PhantomData; -use std::mem; use std::ops::{Deref, DerefMut}; -use std::ptr::{self, NonNull}; use crate::{ internals::{ diff --git a/boa_gc/src/internals/ephemeron.rs b/boa_gc/src/internals/ephemeron.rs index e69de29bb2d..f9d5f2f40ea 100644 --- a/boa_gc/src/internals/ephemeron.rs +++ b/boa_gc/src/internals/ephemeron.rs @@ -0,0 +1,123 @@ +//! This module will implement the internal types GcBox and Ephemeron +use crate::trace::Trace; +use crate::Finalize; +use crate::{finalizer_safe, GcBox}; +use std::cell::Cell; +use std::ptr::NonNull; + +/// Implementation of an Ephemeron structure +pub struct Ephemeron { + key: Cell>>>, + value: V, +} + +impl Ephemeron { + pub unsafe fn new(value: *mut GcBox) -> Self { + let ptr = NonNull::new_unchecked(value); + Ephemeron { + key: Cell::new(Some(ptr)), + value: (), + } + } +} + +impl Ephemeron { + #[inline] + pub(crate) fn is_marked(&self) -> bool { + if let Some(key) = self.inner_key() { + key.is_marked() + } else { + false + } + } + + #[inline] + fn inner_key_ptr(&self) -> Option<*mut GcBox> { + assert!(finalizer_safe()); + if let Some(key_node) = self.key.get() { + Some(key_node.as_ptr()) + } else { + None + } + } + + #[inline] + fn inner_key(&self) -> Option<&GcBox> { + unsafe { + if let Some(inner_key) = self.inner_key_ptr() { + Some(&*inner_key) + } else { + None + } + } + } + + #[inline] + pub fn key_value(&self) -> Option<&K> { + if let Some(key_box) = self.inner_key() { + Some(key_box.value()) + } else { + None + } + } + + #[inline] + pub fn inner_value(&self) -> &V { + &self.value + } + + #[inline] + unsafe fn weak_trace_key(&self) { + if let Some(key) = self.inner_key() { + key.weak_trace_inner() + } + } + + #[inline] + unsafe fn weak_trace_value(&self) { + self.inner_value().weak_trace() + } +} + +impl Finalize for Ephemeron { + #[inline] + fn finalize(&self) { + self.key.set(None) + } +} + +unsafe impl Trace for Ephemeron { + #[inline] + unsafe fn trace(&self) { + /* An ephemeron is never traced with Phase One Trace */ + /* May be traced in phase 3, so this still may need to be implemented */ + } + + #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + self.is_marked() + } + + #[inline] + unsafe fn weak_trace(&self) { + if self.is_marked() { + self.weak_trace_key(); + self.weak_trace_value(); + } + } + + #[inline] + unsafe fn root(&self) { + // An ephemeron here should probably not be rooted. + } + + #[inline] + unsafe fn unroot(&self) { + // An ephemeron is never rotted in the GcBoxHeader + } + + #[inline] + fn run_finalizer(&self) { + Finalize::finalize(self) + } +} diff --git a/boa_gc/src/internals/mod.rs b/boa_gc/src/internals/mod.rs index 778e29b7b22..dd2e8dda040 100644 --- a/boa_gc/src/internals/mod.rs +++ b/boa_gc/src/internals/mod.rs @@ -1,6 +1,8 @@ pub(crate) mod borrow_flag; pub mod cell; pub mod cell_ref; +pub mod ephemeron; pub use cell::GcCell; pub use cell_ref::{GcCellRef, GcCellRefMut}; +pub use ephemeron::Ephemeron; diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 322d965f406..b89043595a2 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -4,26 +4,27 @@ use std::cell::{Cell as StdCell, RefCell as StdRefCell}; use std::mem; use std::ptr::NonNull; -pub use boa_gc_macros::{Trace, Finalize}; +pub use boa_gc_macros::{Finalize, Trace}; -/// `gc_derive` is a general derive prelude import +/// `gc_derive` is a general derive prelude import pub mod derive_prelude { - pub use boa_gc_macros::{Trace, Finalize}; pub use crate::GcPointer; + pub use boa_gc_macros::{Finalize, Trace}; } mod gc_box; mod internals; -pub mod trace; pub mod pointers; +pub mod trace; -pub(crate) use gc_box::GcBox; -pub use internals::{GcCell, GcCellRef}; -use pointers::Gc; pub use crate::trace::{Finalize, Trace}; +pub(crate) use gc_box::GcBox; +pub use internals::{Ephemeron, GcCell as Cell, GcCellRef as CellRef}; +pub use pointers::{Gc, WeakGc}; pub type GcPointer = NonNull>; +thread_local!(pub static EPHEMERON_QUEUE: StdCell>> = StdCell::new(None)); thread_local!(pub static GC_DROPPING: StdCell = StdCell::new(false)); thread_local!(static BOA_GC: StdRefCell = StdRefCell::new( BoaGc { config: GcConfig::default(), @@ -73,6 +74,13 @@ struct BoaGc { stack: StdCell>, } +impl Drop for BoaGc { + fn drop(&mut self) { + unsafe { + Collector::dump(self); + } + } +} // Whether or not the thread is currently in the sweep phase of garbage collection. // During this phase, attempts to dereference a `Gc` pointer will trigger a panic. @@ -98,9 +106,9 @@ pub fn finalizer_safe() -> bool { /// The GcAllocater handles initialization and allocation of garbage collected values. /// /// The allocator can trigger a garbage collection -pub struct GcAlloc; +pub struct BoaAlloc; -impl GcAlloc { +impl BoaAlloc { pub fn new(value: T) -> Gc { BOA_GC.with(|st| { let mut gc = st.borrow_mut(); @@ -121,7 +129,7 @@ impl GcAlloc { }) } - pub fn new_cell(value: T) -> Gc> { + pub fn new_cell(value: T) -> Gc> { BOA_GC.with(|st| { let mut gc = st.borrow_mut(); @@ -131,7 +139,7 @@ impl GcAlloc { Self::manage_state::(&mut *gc); } - let cell = GcCell::new(value); + let cell = Cell::new(value); let stack_element = Box::into_raw(Box::from(GcBox::new(cell))); unsafe { let mut stack = gc.stack.take(); @@ -144,14 +152,31 @@ impl GcAlloc { }) } - pub fn new_weak_pair(key: K, value: V) { + pub fn new_weak_pair(key: GcPointer, value: V) { todo!() } - pub fn new_weak_cell(value: T) { - todo!() + pub fn new_weak_ref(value: NonNull>) -> WeakGc> { + BOA_GC.with(|state| { + let mut gc = state.borrow_mut(); + + unsafe { + Self::manage_state::(&mut *gc); + + let ephemeron = Ephemeron::new(value.as_ptr()); + let stack_element = Box::into_raw(Box::from(GcBox::new_weak(ephemeron))); + let mut stack = gc.stack.take(); + stack.push(NonNull::new_unchecked(stack_element)); + gc.stack.set(stack); + gc.runtime.stack_allocations += 1; + + WeakGc::new(NonNull::new_unchecked(stack_element)) + } + }) } + // Possibility here for `new_weak` that takes any value and creates a new WeakGc + pub(crate) unsafe fn promote_allocs( promotions: Vec>>, gc: &mut BoaGc, @@ -206,7 +231,7 @@ impl Collector { Self::finalize(unreachable_nodes); let _finalized = Self::mark_stack(&stack); let promotions = Self::stack_sweep(gc, stack); - GcAlloc::promote_allocs::(promotions, gc); + BoaAlloc::promote_allocs::(promotions, gc); } pub(crate) unsafe fn run_full_collection(gc: &mut BoaGc) { @@ -220,7 +245,7 @@ impl Collector { let _sweep_finalized = Self::mark_stack(&old_stack); Self::heap_sweep(gc); let promotions = Self::stack_sweep(gc, old_stack); - GcAlloc::promote_allocs::(promotions, gc); + BoaAlloc::promote_allocs::(promotions, gc); } pub(crate) unsafe fn mark_heap( @@ -245,34 +270,7 @@ impl Collector { // Ephemeron Evaluation if !ephemeron_queue.is_empty() { - loop { - let mut reachable_nodes = Vec::new(); - let mut other_nodes = Vec::new(); - // iterate through ephemeron queue, sorting nodes by whether they - // are reachable or unreachable - for node in ephemeron_queue { - if (*node.as_ptr()).value.is_marked_ephemeron() { - (*node.as_ptr()).header.mark(); - reachable_nodes.push(node); - } else { - other_nodes.push(node); - } - } - // Replace the old queue with the unreachable - ephemeron_queue = other_nodes; - - // If reachable nodes is not empty, trace values. If it is empty, - // break from the loop - if !reachable_nodes.is_empty() { - // iterate through reachable nodes and trace their values, - // enqueuing any ephemeron that is found during the trace - for node in reachable_nodes { - (*node.as_ptr()).weak_trace_inner(&mut ephemeron_queue) - } - } else { - break; - } - } + ephemeron_queue = Self::mark_ephemerons(ephemeron_queue); } // Any left over nodes in the ephemeron queue at this point are @@ -286,18 +284,75 @@ impl Collector { stack: &Vec>>, ) -> Vec>> { let mut finalize = Vec::new(); + let mut ephemeron_queue = Vec::new(); for node in stack { - if (*node.as_ptr()).header.roots() > 0 { - (*node.as_ptr()).header.mark() + if (*node.as_ptr()).header.is_ephemeron() { + ephemeron_queue.push(*node) } else { - finalize.push(*node) + if (*node.as_ptr()).header.roots() > 0 { + (*node.as_ptr()).header.mark() + } else { + finalize.push(*node) + } } } + if !ephemeron_queue.is_empty() { + ephemeron_queue = Self::mark_ephemerons(ephemeron_queue) + } + + finalize.extend(ephemeron_queue); + finalize } + // Tracing Ephemerons/Weak is always requires tracing the inner nodes in case it ends up marking unmarked node + // + // Time complexity should be something like O(nd) where d is the longest chain of epehemerons + unsafe fn mark_ephemerons( + initial_queue: Vec>>, + ) -> Vec>> { + let mut ephemeron_queue = initial_queue; + loop { + let mut reachable_nodes = Vec::new(); + let mut other_nodes = Vec::new(); + // iterate through ephemeron queue, sorting nodes by whether they + // are reachable or unreachable + for node in ephemeron_queue { + if (*node.as_ptr()).value.is_marked_ephemeron() { + (*node.as_ptr()).header.mark(); + reachable_nodes.push(node); + } else { + other_nodes.push(node); + } + } + // Replace the old queue with the unreachable + ephemeron_queue = other_nodes; + + // If reachable nodes is not empty, trace values. If it is empty, + // break from the loop + if !reachable_nodes.is_empty() { + EPHEMERON_QUEUE.with(|state| state.set(Some(Vec::new()))); + // iterate through reachable nodes and trace their values, + // enqueuing any ephemeron that is found during the trace + for node in reachable_nodes { + // TODO: deal with fetch ephemeron_queue + (*node.as_ptr()).weak_trace_inner() + } + + EPHEMERON_QUEUE.with(|st| { + if let Some(found_nodes) = st.take() { + ephemeron_queue.extend(found_nodes) + } + }) + } else { + break; + } + } + ephemeron_queue + } + unsafe fn finalize(finalize_vec: Vec>>) { for node in finalize_vec { // We double check that the unreachable nodes are actually unreachable @@ -319,7 +374,7 @@ impl Collector { let mut promotions = Vec::new(); for node in old_stack { - if (*node.as_ptr()).header.is_marked() { + if (*node.as_ptr()).is_marked() { (*node.as_ptr()).header.unmark(); (*node.as_ptr()).header.inc_age(); if (*node.as_ptr()).header.age() > 10 { @@ -341,7 +396,7 @@ impl Collector { let mut sweep_head = &gc.heap_start; while let Some(node) = sweep_head.get() { - if (*node.as_ptr()).header.is_marked() { + if (*node.as_ptr()).is_marked() { (*node.as_ptr()).header.unmark(); sweep_head = &(*node.as_ptr()).header.next; } else { @@ -351,4 +406,10 @@ impl Collector { } } } + + // Clean up the heap when BoaGc is dropped + unsafe fn dump(gc: &mut BoaGc) { + let _unreachable = Self::mark_heap(&gc.heap_start); + Self::heap_sweep(gc); + } } diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs index 1b009738db5..a915b3b7a8d 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -11,7 +11,6 @@ use crate::gc_box::GcBox; use crate::{ finalizer_safe, trace::{Finalize, Trace}, - GcPointer, }; pub(crate) unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { @@ -99,8 +98,8 @@ unsafe impl Trace for Gc { } #[inline] - unsafe fn weak_trace(&self, queue: &mut Vec) { - self.inner().weak_trace_inner(queue); + unsafe fn weak_trace(&self) { + self.inner().weak_trace_inner(); } #[inline] diff --git a/boa_gc/src/pointers/mod.rs b/boa_gc/src/pointers/mod.rs index c62a28e6fd1..7dc4e0c2381 100644 --- a/boa_gc/src/pointers/mod.rs +++ b/boa_gc/src/pointers/mod.rs @@ -1,5 +1,7 @@ //! Pointers represents the External types returned by the Boa Garbage Collector pub mod gc_ptr; +pub mod weak_ptr; pub use gc_ptr::Gc; +pub use weak_ptr::WeakGc; diff --git a/boa_gc/src/pointers/weak_ptr.rs b/boa_gc/src/pointers/weak_ptr.rs new file mode 100644 index 00000000000..d050a13a12b --- /dev/null +++ b/boa_gc/src/pointers/weak_ptr.rs @@ -0,0 +1,64 @@ + +use crate::{ + GcBox, EPHEMERON_QUEUE, finalizer_safe, + trace::{Trace, Finalize}, +}; +use std::cell::Cell; +use std::ptr::NonNull; + + +pub struct WeakGc { + inner_ptr: Cell>>, +} + +impl WeakGc { + pub fn new(value: NonNull>) -> Self { + unsafe { + Self { + inner_ptr: Cell::new(NonNull::new_unchecked(value.as_ptr())), + } + } + } +} + +impl WeakGc { + #[inline] + fn inner_ptr(&self) -> *mut GcBox { + assert!(finalizer_safe()); + + unsafe { self.inner_ptr.get().as_ptr() } + } +} + + +impl Finalize for WeakGc {} + +unsafe impl Trace for WeakGc { + #[inline] + unsafe fn trace(&self) {} + + #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + + #[inline] + unsafe fn weak_trace(&self) { + EPHEMERON_QUEUE.with(|q| { + let mut queue = q.take().expect("queue is initialized by weak_trace"); + queue.push(NonNull::new_unchecked(self.inner_ptr())) + }) + } + + #[inline] + unsafe fn root(&self) {} + + #[inline] + unsafe fn unroot(&self) {} + + #[inline] + fn run_finalizer(&self) { + Finalize::finalize(self) + } +} + diff --git a/boa_gc/src/trace.rs b/boa_gc/src/trace.rs index 88b8573cdfe..95e92bc09bd 100644 --- a/boa_gc/src/trace.rs +++ b/boa_gc/src/trace.rs @@ -32,7 +32,7 @@ pub unsafe trait Trace: Finalize { unsafe fn is_marked_ephemeron(&self) -> bool; /// Returns true if a marked `Gc` is found - unsafe fn weak_trace(&self, ephemeron_queue: &mut Vec); + unsafe fn weak_trace(&self); /// Increments the root-count of all contained `Gc`s. unsafe fn root(&self); @@ -58,7 +58,7 @@ macro_rules! unsafe_empty_trace { false } #[inline] - unsafe fn weak_trace(&self, _ephemeron_queue: &mut Vec) {} + unsafe fn weak_trace(&self) {} #[inline] unsafe fn root(&self) {} #[inline] @@ -77,7 +77,7 @@ macro_rules! unsafe_empty_trace { /// correct method on the argument. #[macro_export] macro_rules! custom_trace { - ($this:ident, $op:ident, $body:expr, $weak_body:expr) => { + ($this:ident, $body:expr) => { #[inline] unsafe fn trace(&self) { #[inline] @@ -92,14 +92,13 @@ macro_rules! custom_trace { false } #[inline] - unsafe fn weak_trace(&self, queue: &mut Vec) { + unsafe fn weak_trace(&self) { #[inline] - unsafe fn mark(it: &T, queue: &mut Vec) { - $crate::Trace::weak_trace(it, queue) + unsafe fn mark(it: &T) { + $crate::Trace::weak_trace(it) } let $this = self; - let $op = queue; - $weak_body + $body } #[inline] unsafe fn root(&self) { @@ -196,20 +195,11 @@ simple_empty_finalize_trace![ impl Finalize for [T; N] {} unsafe impl Trace for [T; N] { - custom_trace!( - this, - queue, - { - for v in this { - mark(v); - } - }, - { - for v in this { - mark(v, queue); - } + custom_trace!(this, { + for v in this { + mark(v); } - ); + }); } macro_rules! fn_finalize_trace_one { @@ -240,18 +230,12 @@ macro_rules! tuple_finalize_trace { ($($args:ident),*) => { impl<$($args),*> Finalize for ($($args,)*) {} unsafe impl<$($args: $crate::Trace),*> Trace for ($($args,)*) { - custom_trace!(this, queue, { + custom_trace!(this, { #[allow(non_snake_case, unused_unsafe)] fn avoid_lints<$($args: $crate::Trace),*>(&($(ref $args,)*): &($($args,)*)) { unsafe { $(mark($args);)* } } avoid_lints(this) - }, { - #[allow(non_snake_case, unused_unsafe)] - fn avoid_lints<$($args: $crate::Trace),*>(&($(ref $args,)*): &($($args,)*), queue: &mut Vec) { - unsafe { $(mark($args, queue);)* } - } - avoid_lints(this, queue) }); } } @@ -284,231 +268,118 @@ type_arg_tuple_based_finalize_trace_impls![ impl Finalize for Rc {} unsafe impl Trace for Rc { - custom_trace!( - this, - queue, - { - mark(&**this); - }, - mark(&**this, queue) - ); + custom_trace!(this, { + mark(&**this); + }); } impl Finalize for Rc<[T]> {} unsafe impl Trace for Rc<[T]> { - custom_trace!( - this, - queue, - { - for e in this.iter() { - mark(e); - } - }, - { - for e in this.iter() { - mark(e, queue); - } + custom_trace!(this, { + for e in this.iter() { + mark(e); } - ); + }); } impl Finalize for Box {} unsafe impl Trace for Box { - custom_trace!( - this, - queue, - { - mark(&**this); - }, - mark(&**this, queue) - ); + custom_trace!(this, { + mark(&**this); + }); } impl Finalize for Box<[T]> {} unsafe impl Trace for Box<[T]> { - custom_trace!( - this, - queue, - { - for e in this.iter() { - mark(e); - } - }, - { - for e in this.iter() { - mark(e, queue); - } + custom_trace!(this, { + for e in this.iter() { + mark(e); } - ); + }); } impl Finalize for Vec {} unsafe impl Trace for Vec { - custom_trace!( - this, - queue, - { - for e in this { - mark(e); - } - }, - { - for e in this { - mark(e, queue); - } + custom_trace!(this, { + for e in this { + mark(e); } - ); + }); } impl Finalize for Option {} unsafe impl Trace for Option { - custom_trace!( - this, - queue, - { - if let Some(ref v) = *this { - mark(v); - } - }, - { - if let Some(ref v) = *this { - mark(v, queue) - } + custom_trace!(this, { + if let Some(ref v) = *this { + mark(v); } - ); + }); } impl Finalize for Result {} unsafe impl Trace for Result { - custom_trace!( - this, - queue, - { - match *this { - Ok(ref v) => mark(v), - Err(ref v) => mark(v), - } - }, - { - let marked = match *this { - Ok(ref v) => mark(v, queue), - Err(ref v) => mark(v, queue), - }; - marked + custom_trace!(this, { + match *this { + Ok(ref v) => mark(v), + Err(ref v) => mark(v), } - ); + }); } impl Finalize for BinaryHeap {} unsafe impl Trace for BinaryHeap { - custom_trace!( - this, - queue, - { - for v in this.iter() { - mark(v); - } - }, - { - for e in this.iter() { - mark(e, queue); - } + custom_trace!(this, { + for v in this.iter() { + mark(v); } - ); + }); } impl Finalize for BTreeMap {} unsafe impl Trace for BTreeMap { - custom_trace!( - this, - queue, - { - for (k, v) in this { - mark(k); - mark(v); - } - }, - { - for (k, v) in this { - mark(k, queue); - mark(v, queue); - } + custom_trace!(this, { + for (k, v) in this { + mark(k); + mark(v); } - ); + }); } impl Finalize for BTreeSet {} unsafe impl Trace for BTreeSet { - custom_trace!( - this, - queue, - { - for v in this { - mark(v); - } - }, - { - for v in this { - mark(v, queue); - } + custom_trace!(this, { + for v in this { + mark(v); } - ); + }); } impl Finalize for HashMap {} unsafe impl Trace for HashMap { - custom_trace!( - this, - queue, - { - for (k, v) in this.iter() { - mark(k); - mark(v); - } - }, - { - for (k, v) in this.iter() { - mark(k, queue); - mark(v, queue); - } + custom_trace!(this, { + for (k, v) in this.iter() { + mark(k); + mark(v); } - ); + }); } impl Finalize for HashSet {} unsafe impl Trace for HashSet { - custom_trace!( - this, - queue, - { - for v in this.iter() { - mark(v); - } - }, - { - for v in this.iter() { - mark(v, queue); - } + custom_trace!(this, { + for v in this.iter() { + mark(v); } - ); + }); } impl Finalize for LinkedList {} unsafe impl Trace for LinkedList { - custom_trace!( - this, - queue, - { - for v in this.iter() { - mark(v); - } - }, - { - for v in this.iter() { - mark(v, queue); - } + custom_trace!(this, { + for v in this.iter() { + mark(v); } - ); + }); } impl Finalize for PhantomData {} @@ -518,20 +389,11 @@ unsafe impl Trace for PhantomData { impl Finalize for VecDeque {} unsafe impl Trace for VecDeque { - custom_trace!( - this, - queue, - { - for v in this.iter() { - mark(v); - } - }, - { - for v in this.iter() { - mark(v, queue); - } + custom_trace!(this, { + for v in this.iter() { + mark(v); } - ); + }); } impl<'a, T: ToOwned + Trace + ?Sized> Finalize for Cow<'a, T> {} @@ -539,18 +401,9 @@ unsafe impl<'a, T: ToOwned + Trace + ?Sized> Trace for Cow<'a, T> where T::Owned: Trace, { - custom_trace!( - this, - queue, - { - if let Cow::Owned(ref v) = this { - mark(v); - } - }, - { - if let Cow::Owned(ref v) = this { - mark(v, queue) - } + custom_trace!(this, { + if let Cow::Owned(ref v) = this { + mark(v); } - ); + }); } From 148fd7cdc14d47d03ccbca615ab243edbb97152e Mon Sep 17 00:00:00 2001 From: nekevss Date: Tue, 25 Oct 2022 18:08:29 -0400 Subject: [PATCH 05/55] Couple more fixes --- boa_gc/src/gc_box.rs | 2 +- boa_gc/src/lib.rs | 4 ++-- boa_gc/src/pointers/weak_ptr.rs | 9 +++------ 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index 0ad678985d2..040e2de07b0 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -112,7 +112,7 @@ impl GcBoxHeader { } } -/// The HeapBox represents a box on the GC Heap. The HeapBox's creation and allocation is managed +/// The GcBox represents a box on `BoaGc`'s heap. The GcBox's creation and allocation is handled /// by the allocator #[repr(C)] pub struct GcBox { diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index b89043595a2..c7fefbaf24d 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -45,8 +45,8 @@ impl Default for GcConfig { Self { threshold: 100, growth_ratio: 0.7, - stack_base_capacity: 255, - stack_soft_cap: 255, + stack_base_capacity: 32, + stack_soft_cap: 32, } } } diff --git a/boa_gc/src/pointers/weak_ptr.rs b/boa_gc/src/pointers/weak_ptr.rs index d050a13a12b..1c22af2e7b5 100644 --- a/boa_gc/src/pointers/weak_ptr.rs +++ b/boa_gc/src/pointers/weak_ptr.rs @@ -1,12 +1,11 @@ - use crate::{ - GcBox, EPHEMERON_QUEUE, finalizer_safe, - trace::{Trace, Finalize}, + finalizer_safe, + trace::{Finalize, Trace}, + GcBox, EPHEMERON_QUEUE, }; use std::cell::Cell; use std::ptr::NonNull; - pub struct WeakGc { inner_ptr: Cell>>, } @@ -30,7 +29,6 @@ impl WeakGc { } } - impl Finalize for WeakGc {} unsafe impl Trace for WeakGc { @@ -61,4 +59,3 @@ unsafe impl Trace for WeakGc { Finalize::finalize(self) } } - From 2eac402bf3fc3aa244ad3544f477cd32a1189575 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sun, 30 Oct 2022 16:19:11 -0400 Subject: [PATCH 06/55] Refactor Mark-Sweep --- boa_gc/src/gc_box.rs | 15 +-- boa_gc/src/lib.rs | 295 ++++++++++++++++++++++++++----------------- 2 files changed, 181 insertions(+), 129 deletions(-) diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index 040e2de07b0..5d259319c6f 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -12,15 +12,9 @@ const MARK_MASK: usize = 1 << (usize::BITS - 1); const ROOTS_MASK: usize = !MARK_MASK; const ROOTS_MAX: usize = ROOTS_MASK; -pub enum BoxLoc { - Stack, - Heap, -} - pub(crate) struct GcBoxHeader { roots: Cell, cycle_age: Cell, - loc: Cell, pub(crate) next: Cell>>>, } @@ -31,7 +25,6 @@ impl GcBoxHeader { GcBoxHeader { roots: Cell::new(1), cycle_age: Cell::new(0_u8), - loc: Cell::new(BoxLoc::Stack), next: Cell::new(None), } } @@ -43,14 +36,12 @@ impl GcBoxHeader { GcBoxHeader { roots: Cell::new(0), cycle_age: Cell::new(cycle_age), - loc: Cell::new(BoxLoc::Stack), next: Cell::new(None), } } #[inline] - pub fn promote(&self, next: Option>>) { - self.loc.set(BoxLoc::Heap); + pub fn set_next(&self, next: Option>>) { self.next.set(next); } @@ -144,8 +135,8 @@ impl GcBox { ptr::eq(&this.header, &other.header) } - pub(crate) fn promote(&self, next: Option>>) { - self.header.promote(next) + pub(crate) fn set_header_pointer(&self, next: Option>>) { + self.header.set_next(next) } /// Marks this `GcBox` and marks through its data. diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index c7fefbaf24d..99c65b8a70d 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -29,40 +29,46 @@ thread_local!(pub static GC_DROPPING: StdCell = StdCell::new(false)); thread_local!(static BOA_GC: StdRefCell = StdRefCell::new( BoaGc { config: GcConfig::default(), runtime: GcRuntimeData::default(), - heap_start: StdCell::new(None), - stack: StdCell::new(Vec::new()), + adult_start: StdCell::new(None), + youth_start: StdCell::new(None), })); struct GcConfig { - threshold: usize, + youth_threshold: usize, + youth_threshold_base: usize, + adult_threshold: usize, growth_ratio: f64, - stack_base_capacity: usize, - stack_soft_cap: usize, + youth_promo_age: u8, } impl Default for GcConfig { fn default() -> Self { Self { - threshold: 100, + youth_threshold: 1000, + youth_threshold_base: 1000, + adult_threshold: 5000, growth_ratio: 0.7, - stack_base_capacity: 32, - stack_soft_cap: 32, + youth_promo_age: 3, } } } struct GcRuntimeData { collections: usize, - heap_bytes_allocated: usize, - stack_allocations: usize, + total_bytes_allocated: usize, + youth_bytes: usize, + adult_bytes: usize, + object_allocations: usize, } impl Default for GcRuntimeData { fn default() -> Self { Self { collections: 0, - heap_bytes_allocated: 0, - stack_allocations: 0, + total_bytes_allocated: 0, + youth_bytes: 0, + adult_bytes: 0, + object_allocations: 0, } } } @@ -70,8 +76,8 @@ impl Default for GcRuntimeData { struct BoaGc { config: GcConfig, runtime: GcRuntimeData, - heap_start: StdCell>, - stack: StdCell>, + adult_start: StdCell>, + youth_start: StdCell>, } impl Drop for BoaGc { @@ -114,17 +120,25 @@ impl BoaAlloc { let mut gc = st.borrow_mut(); unsafe { - Self::manage_state::(&mut *gc); + Self::manage_state(&mut *gc); } - let stack_element = Box::into_raw(Box::from(GcBox::new(value))); + let gc_box = GcBox::new(value); + + let element_size = mem::size_of_val::>(&gc_box); + let element_pointer = Box::into_raw(Box::from(gc_box)); + unsafe { - let mut stack = gc.stack.take(); - stack.push(NonNull::new_unchecked(stack_element)); - gc.stack.set(stack); - gc.runtime.stack_allocations += 1; + let old_start = gc.youth_start.take(); + (*element_pointer).set_header_pointer(old_start); + gc.youth_start + .set(Some(NonNull::new_unchecked(element_pointer))); + + gc.runtime.object_allocations += 1; + gc.runtime.total_bytes_allocated += element_size; + gc.runtime.youth_bytes += element_size; - Gc::new(NonNull::new_unchecked(stack_element)) + Gc::new(NonNull::new_unchecked(element_pointer)) } }) } @@ -136,18 +150,23 @@ impl BoaAlloc { // Manage state preps the internal state for allocation and // triggers a collection if the state dictates it. unsafe { - Self::manage_state::(&mut *gc); + Self::manage_state(&mut *gc); } - let cell = Cell::new(value); - let stack_element = Box::into_raw(Box::from(GcBox::new(cell))); + let gc_box = GcBox::new(Cell::new(value)); + let element_size = mem::size_of_val::>>(&gc_box); + let element_pointer = Box::into_raw(Box::from(gc_box)); + unsafe { - let mut stack = gc.stack.take(); - stack.push(NonNull::new_unchecked(stack_element)); - gc.stack.set(stack); - gc.runtime.stack_allocations += 1; + let old_start = gc.youth_start.take(); + (*element_pointer).set_header_pointer(old_start); + gc.youth_start + .set(Some(NonNull::new_unchecked(element_pointer))); + + gc.runtime.object_allocations += 1; + gc.runtime.total_bytes_allocated += element_size; - Gc::new(NonNull::new_unchecked(stack_element)) + Gc::new(NonNull::new_unchecked(element_pointer)) } }) } @@ -161,91 +180,128 @@ impl BoaAlloc { let mut gc = state.borrow_mut(); unsafe { - Self::manage_state::(&mut *gc); + Self::manage_state(&mut *gc); let ephemeron = Ephemeron::new(value.as_ptr()); - let stack_element = Box::into_raw(Box::from(GcBox::new_weak(ephemeron))); - let mut stack = gc.stack.take(); - stack.push(NonNull::new_unchecked(stack_element)); - gc.stack.set(stack); - gc.runtime.stack_allocations += 1; + let gc_box = GcBox::new_weak(ephemeron); + + let element_size = mem::size_of_val::>(&gc_box); + let element_pointer = Box::into_raw(Box::from(gc_box)); - WeakGc::new(NonNull::new_unchecked(stack_element)) + let old_start = gc.youth_start.take(); + (*element_pointer).set_header_pointer(old_start); + gc.youth_start + .set(Some(NonNull::new_unchecked(element_pointer))); + + gc.runtime.object_allocations += 1; + gc.runtime.total_bytes_allocated += element_size; + + WeakGc::new(NonNull::new_unchecked(element_pointer)) } }) } // Possibility here for `new_weak` that takes any value and creates a new WeakGc - pub(crate) unsafe fn promote_allocs( + pub(crate) unsafe fn promote_to_medium( promotions: Vec>>, gc: &mut BoaGc, ) { for node in promotions { - (*node.as_ptr()).promote(gc.heap_start.take()); - gc.heap_start.set(Some(node)); - gc.runtime.heap_bytes_allocated += mem::size_of::>(); + (*node.as_ptr()).set_header_pointer(gc.adult_start.take()); + let allocation_bytes= mem::size_of_val::>(&(*node.as_ptr())); + gc.runtime.youth_bytes -= allocation_bytes; + gc.runtime.adult_bytes += allocation_bytes; + gc.adult_start.set(Some(node)); } } - unsafe fn manage_state(gc: &mut BoaGc) { - if gc.runtime.heap_bytes_allocated > gc.config.threshold { - Collector::run_full_collection::(gc); + unsafe fn manage_state(gc: &mut BoaGc) { + if gc.runtime.adult_bytes > gc.config.adult_threshold { + Collector::run_full_collection(gc); - if gc.runtime.heap_bytes_allocated as f64 - > gc.config.threshold as f64 * gc.config.growth_ratio + if gc.runtime.adult_bytes as f64 + > gc.config.adult_threshold as f64 * gc.config.growth_ratio { - gc.config.threshold = - (gc.runtime.heap_bytes_allocated as f64 / gc.config.growth_ratio) as usize + gc.config.adult_threshold = + (gc.runtime.adult_bytes as f64 / gc.config.growth_ratio) as usize } } else { - if gc.runtime.stack_allocations > gc.config.stack_soft_cap { - Collector::run_stack_collection::(gc); + if gc.runtime.youth_bytes > gc.config.youth_threshold { + Collector::run_youth_collection(gc); // If we are constrained on the top of the stack, // increase the size of capacity, so a garbage collection // isn't triggered on every allocation - if gc.runtime.stack_allocations > gc.config.stack_soft_cap { - gc.config.stack_soft_cap += 5 + if gc.runtime.youth_bytes > gc.config.youth_threshold { + gc.config.youth_threshold = + (gc.runtime.youth_bytes as f64 / gc.config.growth_ratio) as usize } - // If the soft cap was increased but the allocation has lowered below - // the initial base, then reset to the original base - if gc.runtime.stack_allocations < gc.config.stack_base_capacity - && gc.config.stack_base_capacity != gc.config.stack_soft_cap + // The young object threshold should only be raised in cases of high laod. It + // should retract back to base when the load lessens + if gc.runtime.youth_bytes < gc.config.youth_threshold_base + && gc.config.youth_threshold != gc.config.youth_threshold_base { - gc.config.stack_soft_cap = gc.config.stack_base_capacity + gc.config.youth_threshold = gc.config.youth_threshold_base } } } } } +// This collector currently functions in four main phases +// +// Mark -> Finalize -> Mark -> Sweep +// +// Mark nodes as reachable then finalize the unreachable nodes. A remark phase +// then needs to be retriggered as finalization can potentially resurrect dead +// nodes. +// +// A better appraoch in a more concurrent structure may be to reorder. +// +// Mark -> Sweep -> Finalize pub struct Collector; impl Collector { - pub(crate) unsafe fn run_stack_collection(gc: &mut BoaGc) { + pub(crate) unsafe fn run_youth_collection(gc: &mut BoaGc) { gc.runtime.collections += 1; - let stack = gc.stack.take(); - let unreachable_nodes = Self::mark_stack(&stack); + let unreachable_nodes = Self::mark_heap(&gc.youth_start); Self::finalize(unreachable_nodes); - let _finalized = Self::mark_stack(&stack); - let promotions = Self::stack_sweep(gc, stack); - BoaAlloc::promote_allocs::(promotions, gc); + // The returned unreachable vector must be filled with nodes that are for certain dead (these will be removed during the sweep) + let _finalized_unreachable_nodes = Self::mark_heap(&gc.youth_start); + let promotion_candidates = Self::sweep_with_promotions( + &gc.youth_start, + &mut gc.runtime.youth_bytes, + &mut gc.runtime.total_bytes_allocated, + &gc.config.youth_promo_age, + ); + // Check if there are any candidates for promotion + if promotion_candidates.len() > 0 { + BoaAlloc::promote_to_medium(promotion_candidates, gc); + } } - pub(crate) unsafe fn run_full_collection(gc: &mut BoaGc) { + pub(crate) unsafe fn run_full_collection(gc: &mut BoaGc) { gc.runtime.collections += 1; - let old_stack = gc.stack.take(); - let mut unreachable = Self::mark_heap(&gc.heap_start); - let stack_unreachable = Self::mark_stack(&old_stack); - unreachable.extend(stack_unreachable); - Self::finalize(unreachable); - let _heap_finalized = Self::mark_heap(&gc.heap_start); - let _sweep_finalized = Self::mark_stack(&old_stack); - Self::heap_sweep(gc); - let promotions = Self::stack_sweep(gc, old_stack); - BoaAlloc::promote_allocs::(promotions, gc); + let unreachable_adults = Self::mark_heap(&gc.adult_start); + let unreachable_youths = Self::mark_heap(&gc.youth_start); + Self::finalize(unreachable_adults); + Self::finalize(unreachable_youths); + let _final_unreachable_adults = Self::mark_heap(&gc.adult_start); + let _final_unreachable_youths = Self::mark_heap(&gc.youth_start); + + // Sweep both without promoting any values + Self::sweep( + &gc.adult_start, + &mut gc.runtime.adult_bytes, + &mut gc.runtime.total_bytes_allocated, + ); + Self::sweep( + &gc.youth_start, + &mut gc.runtime.youth_bytes, + &mut gc.runtime.total_bytes_allocated, + ); } pub(crate) unsafe fn mark_heap( @@ -280,33 +336,6 @@ impl Collector { finalize } - pub(crate) unsafe fn mark_stack( - stack: &Vec>>, - ) -> Vec>> { - let mut finalize = Vec::new(); - let mut ephemeron_queue = Vec::new(); - - for node in stack { - if (*node.as_ptr()).header.is_ephemeron() { - ephemeron_queue.push(*node) - } else { - if (*node.as_ptr()).header.roots() > 0 { - (*node.as_ptr()).header.mark() - } else { - finalize.push(*node) - } - } - } - - if !ephemeron_queue.is_empty() { - ephemeron_queue = Self::mark_ephemerons(ephemeron_queue) - } - - finalize.extend(ephemeron_queue); - - finalize - } - // Tracing Ephemerons/Weak is always requires tracing the inner nodes in case it ends up marking unmarked node // // Time complexity should be something like O(nd) where d is the longest chain of epehemerons @@ -364,44 +393,56 @@ impl Collector { } } - unsafe fn stack_sweep( - gc: &mut BoaGc, - old_stack: Vec>>, + unsafe fn sweep_with_promotions( + heap_start: &StdCell>>>, + heap_bytes: &mut usize, + total_bytes: &mut usize, + promotion_age: &u8, ) -> Vec>> { let _guard = DropGuard::new(); - let mut new_stack = Vec::new(); let mut promotions = Vec::new(); - - for node in old_stack { + let mut sweep_head = heap_start; + while let Some(node) = sweep_head.get() { if (*node.as_ptr()).is_marked() { (*node.as_ptr()).header.unmark(); - (*node.as_ptr()).header.inc_age(); - if (*node.as_ptr()).header.age() > 10 { - promotions.push(node); + if (*node.as_ptr()).header.age() >= *promotion_age { + sweep_head.set((*node.as_ptr()).header.next.take()); + promotions.push(node) } else { - new_stack.push(node) + sweep_head = &(*node.as_ptr()).header.next; } } else { - gc.runtime.stack_allocations -= 1; + // Drops occur here + let unmarked_node = Box::from_raw(node.as_ptr()); + let unallocated_bytes = mem::size_of_val::>(&*unmarked_node); + *heap_bytes -= unallocated_bytes; + *total_bytes -= unallocated_bytes; + sweep_head.set(unmarked_node.header.next.take()); } } - gc.stack.set(new_stack); promotions } - unsafe fn heap_sweep(gc: &mut BoaGc) { + unsafe fn sweep( + heap_start: &StdCell>>>, + bytes_allocated: &mut usize, + total_allocated: &mut usize, + ) { let _guard = DropGuard::new(); - let mut sweep_head = &gc.heap_start; + let mut sweep_head = heap_start; while let Some(node) = sweep_head.get() { if (*node.as_ptr()).is_marked() { (*node.as_ptr()).header.unmark(); sweep_head = &(*node.as_ptr()).header.next; } else { + // Drops occur here let unmarked_node = Box::from_raw(node.as_ptr()); - gc.runtime.heap_bytes_allocated -= mem::size_of_val::>(&*unmarked_node); + let unallocated_bytes = mem::size_of_val::>(&*unmarked_node); + *bytes_allocated -= unallocated_bytes; + *total_allocated -= unallocated_bytes; sweep_head.set(unmarked_node.header.next.take()); } } @@ -409,7 +450,27 @@ impl Collector { // Clean up the heap when BoaGc is dropped unsafe fn dump(gc: &mut BoaGc) { - let _unreachable = Self::mark_heap(&gc.heap_start); - Self::heap_sweep(gc); + Self::drop_heap(&gc.youth_start); + Self::drop_heap(&gc.adult_start); + } + + unsafe fn drop_heap(heap_start: &StdCell>>>) { + // Not initializing a dropguard since this should only be invoked when BOA_GC is being dropped. + + let sweep_head = heap_start; + while let Some(node) = sweep_head.get() { + // Drops every node + let unmarked_node = Box::from_raw(node.as_ptr()); + sweep_head.set(unmarked_node.header.next.take()); + } } } + +// A utility function that forces runs through Collector method based off the state. +// +// Note: +// - This method will not trigger a promotion between generations +// - This method is meant solely for testing purposes only +pub(crate) unsafe fn force_collect() { + BOA_GC.with(|internal| todo!()) +} From f2eefdfe4572376e94ad6dfd16347de22ace83be Mon Sep 17 00:00:00 2001 From: nekevss Date: Sun, 30 Oct 2022 23:23:11 -0400 Subject: [PATCH 07/55] Complete initial layout and refactor --- boa_gc/src/internals/ephemeron.rs | 19 ++++++-- boa_gc/src/lib.rs | 31 +++++++++++-- boa_gc/src/pointers/mod.rs | 6 ++- boa_gc/src/pointers/weak_pair.rs | 77 +++++++++++++++++++++++++++++++ boa_gc/src/pointers/weak_ptr.rs | 22 +++++++-- 5 files changed, 141 insertions(+), 14 deletions(-) create mode 100644 boa_gc/src/pointers/weak_pair.rs diff --git a/boa_gc/src/internals/ephemeron.rs b/boa_gc/src/internals/ephemeron.rs index f9d5f2f40ea..d91cd3b2ee3 100644 --- a/boa_gc/src/internals/ephemeron.rs +++ b/boa_gc/src/internals/ephemeron.rs @@ -12,8 +12,8 @@ pub struct Ephemeron { } impl Ephemeron { - pub unsafe fn new(value: *mut GcBox) -> Self { - let ptr = NonNull::new_unchecked(value); + pub unsafe fn new(value: NonNull>) -> Self { + let ptr = NonNull::new_unchecked(value.as_ptr()); Ephemeron { key: Cell::new(Some(ptr)), value: (), @@ -21,6 +21,17 @@ impl Ephemeron { } } +impl Ephemeron { + pub unsafe fn new_pair(key: NonNull>, value: V) -> Self { + let ptr = NonNull::new_unchecked(key.as_ptr()); + + Ephemeron { + key: Cell::new(Some(ptr)), + value, + } + } +} + impl Ephemeron { #[inline] pub(crate) fn is_marked(&self) -> bool { @@ -53,7 +64,7 @@ impl Ephemeron { } #[inline] - pub fn key_value(&self) -> Option<&K> { + pub fn key(&self) -> Option<&K> { if let Some(key_box) = self.inner_key() { Some(key_box.value()) } else { @@ -62,7 +73,7 @@ impl Ephemeron { } #[inline] - pub fn inner_value(&self) -> &V { + pub fn value(&self) -> &V { &self.value } diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 99c65b8a70d..0e0dcf397ea 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -20,7 +20,7 @@ pub mod trace; pub use crate::trace::{Finalize, Trace}; pub(crate) use gc_box::GcBox; pub use internals::{Ephemeron, GcCell as Cell, GcCellRef as CellRef}; -pub use pointers::{Gc, WeakGc}; +pub use pointers::{Gc, WeakGc, WeakPair}; pub type GcPointer = NonNull>; @@ -172,7 +172,28 @@ impl BoaAlloc { } pub fn new_weak_pair(key: GcPointer, value: V) { - todo!() + BOA_GC.with(|internals| { + let mut gc = internals.borrow_mut(); + + unsafe { + Self::manage_state(&mut *gc); + let ephem = Ephemeron::new_pair(key, value); + let gc_box = GcBox::new_weak(ephem); + + let element_size = mem::size_of_val::>(&gc_box); + let element_pointer = Box::into_raw(Box::from(gc_box)); + + let old_start = gc.youth_start.take(); + (*element_pointer).set_header_pointer(old_start); + gc.youth_start + .set(Some(NonNull::new_unchecked(element_pointer))); + + gc.runtime.object_allocations += 1; + gc.runtime.total_bytes_allocated += element_size; + + WeakPair::new(NonNull::new_unchecked(element_pointer)) + } + }) } pub fn new_weak_ref(value: NonNull>) -> WeakGc> { @@ -182,7 +203,7 @@ impl BoaAlloc { unsafe { Self::manage_state(&mut *gc); - let ephemeron = Ephemeron::new(value.as_ptr()); + let ephemeron = Ephemeron::new(value); let gc_box = GcBox::new_weak(ephemeron); let element_size = mem::size_of_val::>(&gc_box); @@ -209,7 +230,7 @@ impl BoaAlloc { ) { for node in promotions { (*node.as_ptr()).set_header_pointer(gc.adult_start.take()); - let allocation_bytes= mem::size_of_val::>(&(*node.as_ptr())); + let allocation_bytes = mem::size_of_val::>(&(*node.as_ptr())); gc.runtime.youth_bytes -= allocation_bytes; gc.runtime.adult_bytes += allocation_bytes; gc.adult_start.set(Some(node)); @@ -238,7 +259,7 @@ impl BoaAlloc { (gc.runtime.youth_bytes as f64 / gc.config.growth_ratio) as usize } - // The young object threshold should only be raised in cases of high laod. It + // The young object threshold should only be raised in cases of high laod. It // should retract back to base when the load lessens if gc.runtime.youth_bytes < gc.config.youth_threshold_base && gc.config.youth_threshold != gc.config.youth_threshold_base diff --git a/boa_gc/src/pointers/mod.rs b/boa_gc/src/pointers/mod.rs index 7dc4e0c2381..8c21bbfa5eb 100644 --- a/boa_gc/src/pointers/mod.rs +++ b/boa_gc/src/pointers/mod.rs @@ -1,7 +1,9 @@ //! Pointers represents the External types returned by the Boa Garbage Collector -pub mod gc_ptr; -pub mod weak_ptr; +mod gc_ptr; +mod weak_pair; +mod weak_ptr; pub use gc_ptr::Gc; +pub use weak_pair::WeakPair; pub use weak_ptr::WeakGc; diff --git a/boa_gc/src/pointers/weak_pair.rs b/boa_gc/src/pointers/weak_pair.rs new file mode 100644 index 00000000000..7acd51a83a8 --- /dev/null +++ b/boa_gc/src/pointers/weak_pair.rs @@ -0,0 +1,77 @@ +use crate::{ + finalizer_safe, + internals::Ephemeron, + trace::{Finalize, Trace}, + GcBox, EPHEMERON_QUEUE, +}; +use std::cell::Cell; +use std::ptr::NonNull; + +pub struct WeakPair { + inner_ptr: Cell>>>, +} + +impl WeakPair { + pub fn new(value: NonNull>>) -> Self { + unsafe { + Self { + inner_ptr: Cell::new(NonNull::new_unchecked(value.as_ptr())), + } + } + } +} + +impl WeakPair { + #[inline] + fn inner_ptr(&self) -> *mut GcBox> { + assert!(finalizer_safe()); + + unsafe { self.inner_ptr.get().as_ptr() } + } + + #[inline] + pub fn inner(&self) -> &GcBox> { + unsafe { &*self.inner_ptr() } + } + + #[inline] + pub fn key(&self) -> Option<&K> { + self.inner().key() + } + + #[inline] + pub fn value(&self) -> &V { + self.inner().value() + } +} + +impl Finalize for WeakPair {} + +unsafe impl Trace for WeakPair { + #[inline] + unsafe fn trace(&self) {} + + #[inline] + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } + + #[inline] + unsafe fn weak_trace(&self) { + EPHEMERON_QUEUE.with(|q| { + let mut queue = q.take().expect("queue is initialized by weak_trace"); + queue.push(NonNull::new_unchecked(self.inner_ptr())) + }) + } + + #[inline] + unsafe fn root(&self) {} + + #[inline] + unsafe fn unroot(&self) {} + + #[inline] + fn run_finalizer(&self) { + Finalize::finalize(self) + } +} diff --git a/boa_gc/src/pointers/weak_ptr.rs b/boa_gc/src/pointers/weak_ptr.rs index 1c22af2e7b5..65ec0607f19 100644 --- a/boa_gc/src/pointers/weak_ptr.rs +++ b/boa_gc/src/pointers/weak_ptr.rs @@ -1,5 +1,6 @@ use crate::{ finalizer_safe, + internals::Ephemeron, trace::{Finalize, Trace}, GcBox, EPHEMERON_QUEUE, }; @@ -7,11 +8,11 @@ use std::cell::Cell; use std::ptr::NonNull; pub struct WeakGc { - inner_ptr: Cell>>, + inner_ptr: Cell>>>, } impl WeakGc { - pub fn new(value: NonNull>) -> Self { + pub fn new(value: NonNull>>) -> Self { unsafe { Self { inner_ptr: Cell::new(NonNull::new_unchecked(value.as_ptr())), @@ -22,11 +23,26 @@ impl WeakGc { impl WeakGc { #[inline] - fn inner_ptr(&self) -> *mut GcBox { + fn inner_ptr(&self) -> *mut GcBox> { assert!(finalizer_safe()); unsafe { self.inner_ptr.get().as_ptr() } } + + #[inline] + fn value(&self) -> T { + self.inner_ptr() + } + + #[inline] + fn inner(&self) -> &GcBox> { + unsafe { &*self.inner_ptr() } + } + + #[inline] + pub fn value(&self) -> Option<&T> { + self.inner().key() + } } impl Finalize for WeakGc {} From 295b01c97e1538be1852bf85dc26344b053c7c13 Mon Sep 17 00:00:00 2001 From: nekevss Date: Mon, 31 Oct 2022 22:16:12 -0400 Subject: [PATCH 08/55] Build errors and first basic test --- boa_gc/src/gc_box.rs | 3 +-- boa_gc/src/internals/cell.rs | 32 ------------------------- boa_gc/src/internals/cell_ref.rs | 40 +------------------------------ boa_gc/src/internals/ephemeron.rs | 4 ++-- boa_gc/src/lib.rs | 4 ++-- boa_gc/src/pointers/weak_pair.rs | 8 +++---- boa_gc/src/pointers/weak_ptr.rs | 7 +----- boa_gc/src/trace.rs | 2 -- boa_gc/tests/allocation.rs | 10 ++++++++ 9 files changed, 21 insertions(+), 89 deletions(-) create mode 100644 boa_gc/tests/allocation.rs diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index 5d259319c6f..f4e426a437d 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -1,5 +1,4 @@ -use crate::pointers::Gc; -use crate::{Finalize, Trace}; +use crate::Trace; use std::cell::Cell; use std::ptr::{self, NonNull}; diff --git a/boa_gc/src/internals/cell.rs b/boa_gc/src/internals/cell.rs index 911ad9e9517..f11bb4a8f06 100644 --- a/boa_gc/src/internals/cell.rs +++ b/boa_gc/src/internals/cell.rs @@ -78,23 +78,6 @@ impl GcCell { /// /// This is the non-panicking variant of [`borrow`](#method.borrow). /// - /// # Examples - /// - /// ``` - /// use gc::GcCell; - /// - /// let c = GcCell::new(5); - /// - /// { - /// let m = c.borrow_mut(); - /// assert!(c.try_borrow().is_err()); - /// } - /// - /// { - /// let m = c.borrow(); - /// assert!(c.try_borrow().is_ok()); - /// } - /// ``` pub fn try_borrow(&self) -> Result, BorrowError> { if self.flags.get().borrowed() == BorrowState::Writing { return Err(BorrowError); @@ -119,21 +102,6 @@ impl GcCell { /// The value cannot be borrowed while this borrow is active. /// /// This is the non-panicking variant of [`borrow_mut`](#method.borrow_mut). - /// - /// # Examples - /// - /// ``` - /// use gc::GcCell; - /// - /// let c = GcCell::new(5); - /// - /// { - /// let m = c.borrow(); - /// assert!(c.try_borrow_mut().is_err()); - /// } - /// - /// assert!(c.try_borrow_mut().is_ok()); - /// ``` pub fn try_borrow_mut(&self) -> Result, BorrowMutError> { if self.flags.get().borrowed() != BorrowState::Unused { return Err(BorrowMutError); diff --git a/boa_gc/src/internals/cell_ref.rs b/boa_gc/src/internals/cell_ref.rs index 0e5c76759ba..3c9de15a52d 100644 --- a/boa_gc/src/internals/cell_ref.rs +++ b/boa_gc/src/internals/cell_ref.rs @@ -9,7 +9,7 @@ use crate::{ borrow_flag::{BorrowFlag, BorrowState}, GcCell, }, - trace::{Finalize, Trace}, + trace::Trace, }; /// A wrapper type for an immutably borrowed value from a `GcCell`. @@ -43,17 +43,6 @@ impl<'a, T: ?Sized> GcCellRef<'a, T> { /// This is an associated function that needs to be used as `GcCellRef::map(...)`. /// A method would interfere with methods of the same name on the contents /// of a `GcCellRef` used through `Deref`. - /// - /// # Examples - /// - /// ``` - /// use gc::{GcCell, GcCellRef}; - /// - /// let c = GcCell::new((5, 'b')); - /// let b1: GcCellRef<(u32, char)> = c.borrow(); - /// let b2: GcCellRef = GcCellRef::map(b1, |t| &t.0); - /// //assert_eq!(b2, 5); - /// ``` #[inline] pub fn map(orig: Self, f: F) -> GcCellRef<'a, U> where @@ -78,18 +67,6 @@ impl<'a, T: ?Sized> GcCellRef<'a, T> { /// /// This is an associated function that needs to be used as GcCellRef::map_split(...). /// A method would interfere with methods of the same name on the contents of a `GcCellRef` used through `Deref`. - /// - /// # Examples - /// - /// ``` - /// use gc::{GcCell, GcCellRef}; - /// - /// let cell = GcCell::new((1, 'c')); - /// let borrow = cell.borrow(); - /// let (first, second) = GcCellRef::map_split(borrow, |x| (&x.0, &x.1)); - /// assert_eq!(*first, 1); - /// assert_eq!(*second, 'c'); - /// ``` #[inline] pub fn map_split(orig: Self, f: F) -> (GcCellRef<'a, U>, GcCellRef<'a, V>) where @@ -163,21 +140,6 @@ impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> { /// This is an associated function that needs to be used as /// `GcCellRefMut::map(...)`. A method would interfere with methods of the same /// name on the contents of a `GcCell` used through `Deref`. - /// - /// # Examples - /// - /// ``` - /// use gc::{GcCell, GcCellRefMut}; - /// - /// let c = GcCell::new((5, 'b')); - /// { - /// let b1: GcCellRefMut<(u32, char)> = c.borrow_mut(); - /// let mut b2: GcCellRefMut<(u32, char), u32> = GcCellRefMut::map(b1, |t| &mut t.0); - /// assert_eq!(*b2, 5); - /// *b2 = 42; - /// } - /// assert_eq!(*c.borrow(), (42, 'b')); - /// ``` #[inline] pub fn map(orig: Self, f: F) -> GcCellRefMut<'a, T, V> where diff --git a/boa_gc/src/internals/ephemeron.rs b/boa_gc/src/internals/ephemeron.rs index d91cd3b2ee3..deab1708a42 100644 --- a/boa_gc/src/internals/ephemeron.rs +++ b/boa_gc/src/internals/ephemeron.rs @@ -21,7 +21,7 @@ impl Ephemeron { } } -impl Ephemeron { +impl Ephemeron { pub unsafe fn new_pair(key: NonNull>, value: V) -> Self { let ptr = NonNull::new_unchecked(key.as_ptr()); @@ -86,7 +86,7 @@ impl Ephemeron { #[inline] unsafe fn weak_trace_value(&self) { - self.inner_value().weak_trace() + self.value().weak_trace() } } diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 0e0dcf397ea..ddad2bc32eb 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -171,7 +171,7 @@ impl BoaAlloc { }) } - pub fn new_weak_pair(key: GcPointer, value: V) { + pub fn new_weak_pair(key: NonNull>, value: V) -> WeakPair { BOA_GC.with(|internals| { let mut gc = internals.borrow_mut(); @@ -196,7 +196,7 @@ impl BoaAlloc { }) } - pub fn new_weak_ref(value: NonNull>) -> WeakGc> { + pub fn new_weak_ref(value: NonNull>) -> WeakGc { BOA_GC.with(|state| { let mut gc = state.borrow_mut(); diff --git a/boa_gc/src/pointers/weak_pair.rs b/boa_gc/src/pointers/weak_pair.rs index 7acd51a83a8..02bcf5dc35c 100644 --- a/boa_gc/src/pointers/weak_pair.rs +++ b/boa_gc/src/pointers/weak_pair.rs @@ -7,11 +7,11 @@ use crate::{ use std::cell::Cell; use std::ptr::NonNull; -pub struct WeakPair { +pub struct WeakPair { inner_ptr: Cell>>>, } -impl WeakPair { +impl WeakPair { pub fn new(value: NonNull>>) -> Self { unsafe { Self { @@ -36,12 +36,12 @@ impl WeakPair { #[inline] pub fn key(&self) -> Option<&K> { - self.inner().key() + self.inner().value().key() } #[inline] pub fn value(&self) -> &V { - self.inner().value() + self.inner().value().value() } } diff --git a/boa_gc/src/pointers/weak_ptr.rs b/boa_gc/src/pointers/weak_ptr.rs index 65ec0607f19..a9d8d4a2bd1 100644 --- a/boa_gc/src/pointers/weak_ptr.rs +++ b/boa_gc/src/pointers/weak_ptr.rs @@ -29,11 +29,6 @@ impl WeakGc { unsafe { self.inner_ptr.get().as_ptr() } } - #[inline] - fn value(&self) -> T { - self.inner_ptr() - } - #[inline] fn inner(&self) -> &GcBox> { unsafe { &*self.inner_ptr() } @@ -41,7 +36,7 @@ impl WeakGc { #[inline] pub fn value(&self) -> Option<&T> { - self.inner().key() + self.inner().value().key() } } diff --git a/boa_gc/src/trace.rs b/boa_gc/src/trace.rs index 95e92bc09bd..f63084e086b 100644 --- a/boa_gc/src/trace.rs +++ b/boa_gc/src/trace.rs @@ -13,8 +13,6 @@ use std::sync::atomic::{ AtomicU64, AtomicU8, AtomicUsize, }; -use crate::GcPointer; - /// The Finalize trait, which needs to be implemented on /// garbage-collected objects to define finalization logic. pub trait Finalize { diff --git a/boa_gc/tests/allocation.rs b/boa_gc/tests/allocation.rs new file mode 100644 index 00000000000..96073f0a2c0 --- /dev/null +++ b/boa_gc/tests/allocation.rs @@ -0,0 +1,10 @@ +use std::ops::Deref; + +use boa_gc::{BoaAlloc, Trace, Finalize}; + +#[test] +fn gc_basic_cell_allocation() { + let gc_cell = BoaAlloc::new_cell("Hi"); + + assert_eq!(*gc_cell.borrow_mut(), "Hi"); +} \ No newline at end of file From 1109201acdabdf905d35a7689edf5e428ef523c1 Mon Sep 17 00:00:00 2001 From: nekevss Date: Thu, 3 Nov 2022 20:54:09 -0400 Subject: [PATCH 09/55] initial basic tests and some fixes --- boa_gc/src/gc_box.rs | 1 + boa_gc/src/internals/borrow_flag.rs | 8 +-- boa_gc/src/lib.rs | 98 +++++++++++++++++++++++------ boa_gc/src/pointers/gc_ptr.rs | 7 ++- boa_gc/tests/allocation.rs | 26 ++++++-- 5 files changed, 107 insertions(+), 33 deletions(-) diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index f4e426a437d..c7297831e99 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -1,5 +1,6 @@ use crate::Trace; use std::cell::Cell; +use std::mem; use std::ptr::{self, NonNull}; // Age and Weak Flags diff --git a/boa_gc/src/internals/borrow_flag.rs b/boa_gc/src/internals/borrow_flag.rs index 53be29a772d..dbcd04b5e96 100644 --- a/boa_gc/src/internals/borrow_flag.rs +++ b/boa_gc/src/internals/borrow_flag.rs @@ -1,10 +1,4 @@ -/// The BorrowFlag used by GC is split into 2 parts. the upper 63 or 31 bits -/// (depending on the architecture) are used to store the number of borrowed -/// references to the type. The low bit is used to record the rootedness of the -/// type. -/// -/// This means that GcCell can have, at maximum, half as many outstanding -/// borrows as RefCell before panicking. I don't think that will be a problem. + #[derive(Copy, Clone)] pub(crate) struct BorrowFlag(usize); diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index ddad2bc32eb..09bc215d50f 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -24,6 +24,7 @@ pub use pointers::{Gc, WeakGc, WeakPair}; pub type GcPointer = NonNull>; +// TODO: Determine if thread local variables are the correct approach vs an initialized structure thread_local!(pub static EPHEMERON_QUEUE: StdCell>> = StdCell::new(None)); thread_local!(pub static GC_DROPPING: StdCell = StdCell::new(false)); thread_local!(static BOA_GC: StdRefCell = StdRefCell::new( BoaGc { @@ -41,12 +42,15 @@ struct GcConfig { youth_promo_age: u8, } +// Setting the defaults to an arbitrary value currently. +// +// TODO: Add a configure later impl Default for GcConfig { fn default() -> Self { Self { - youth_threshold: 1000, - youth_threshold_base: 1000, - adult_threshold: 5000, + youth_threshold: 1024, + youth_threshold_base: 1024, + adult_threshold: 4096, growth_ratio: 0.7, youth_promo_age: 3, } @@ -58,7 +62,6 @@ struct GcRuntimeData { total_bytes_allocated: usize, youth_bytes: usize, adult_bytes: usize, - object_allocations: usize, } impl Default for GcRuntimeData { @@ -68,7 +71,6 @@ impl Default for GcRuntimeData { total_bytes_allocated: 0, youth_bytes: 0, adult_bytes: 0, - object_allocations: 0, } } } @@ -122,10 +124,10 @@ impl BoaAlloc { unsafe { Self::manage_state(&mut *gc); } - + let gc_box = GcBox::new(value); - let element_size = mem::size_of_val::>(&gc_box); + let element_size = mem::size_of_val::>(&gc_box); let element_pointer = Box::into_raw(Box::from(gc_box)); unsafe { @@ -134,7 +136,6 @@ impl BoaAlloc { gc.youth_start .set(Some(NonNull::new_unchecked(element_pointer))); - gc.runtime.object_allocations += 1; gc.runtime.total_bytes_allocated += element_size; gc.runtime.youth_bytes += element_size; @@ -152,9 +153,9 @@ impl BoaAlloc { unsafe { Self::manage_state(&mut *gc); } - + let gc_box = GcBox::new(Cell::new(value)); - let element_size = mem::size_of_val::>>(&gc_box); + let element_size = mem::size_of_val::>>(&gc_box); let element_pointer = Box::into_raw(Box::from(gc_box)); unsafe { @@ -163,7 +164,7 @@ impl BoaAlloc { gc.youth_start .set(Some(NonNull::new_unchecked(element_pointer))); - gc.runtime.object_allocations += 1; + gc.runtime.youth_bytes += element_size; gc.runtime.total_bytes_allocated += element_size; Gc::new(NonNull::new_unchecked(element_pointer)) @@ -188,7 +189,6 @@ impl BoaAlloc { gc.youth_start .set(Some(NonNull::new_unchecked(element_pointer))); - gc.runtime.object_allocations += 1; gc.runtime.total_bytes_allocated += element_size; WeakPair::new(NonNull::new_unchecked(element_pointer)) @@ -214,7 +214,6 @@ impl BoaAlloc { gc.youth_start .set(Some(NonNull::new_unchecked(element_pointer))); - gc.runtime.object_allocations += 1; gc.runtime.total_bytes_allocated += element_size; WeakGc::new(NonNull::new_unchecked(element_pointer)) @@ -288,7 +287,10 @@ impl Collector { pub(crate) unsafe fn run_youth_collection(gc: &mut BoaGc) { gc.runtime.collections += 1; let unreachable_nodes = Self::mark_heap(&gc.youth_start); - Self::finalize(unreachable_nodes); + + if !unreachable_nodes.is_empty() { + Self::finalize(unreachable_nodes); + } // The returned unreachable vector must be filled with nodes that are for certain dead (these will be removed during the sweep) let _finalized_unreachable_nodes = Self::mark_heap(&gc.youth_start); let promotion_candidates = Self::sweep_with_promotions( @@ -298,7 +300,7 @@ impl Collector { &gc.config.youth_promo_age, ); // Check if there are any candidates for promotion - if promotion_candidates.len() > 0 { + if !promotion_candidates.is_empty() { BoaAlloc::promote_to_medium(promotion_candidates, gc); } } @@ -307,8 +309,15 @@ impl Collector { gc.runtime.collections += 1; let unreachable_adults = Self::mark_heap(&gc.adult_start); let unreachable_youths = Self::mark_heap(&gc.youth_start); - Self::finalize(unreachable_adults); - Self::finalize(unreachable_youths); + + // Check if any unreachable nodes were found and finalize + if !unreachable_adults.is_empty() { + Self::finalize(unreachable_adults); + } + if !unreachable_youths.is_empty() { + Self::finalize(unreachable_youths); + } + let _final_unreachable_adults = Self::mark_heap(&gc.adult_start); let _final_unreachable_youths = Self::mark_heap(&gc.youth_start); @@ -427,6 +436,7 @@ impl Collector { while let Some(node) = sweep_head.get() { if (*node.as_ptr()).is_marked() { (*node.as_ptr()).header.unmark(); + (*node.as_ptr()).header.inc_age(); if (*node.as_ptr()).header.age() >= *promotion_age { sweep_head.set((*node.as_ptr()).header.next.take()); promotions.push(node) @@ -457,6 +467,7 @@ impl Collector { while let Some(node) = sweep_head.get() { if (*node.as_ptr()).is_marked() { (*node.as_ptr()).header.unmark(); + (*node.as_ptr()).header.inc_age(); sweep_head = &(*node.as_ptr()).header.next; } else { // Drops occur here @@ -490,8 +501,55 @@ impl Collector { // A utility function that forces runs through Collector method based off the state. // // Note: -// - This method will not trigger a promotion between generations // - This method is meant solely for testing purposes only -pub(crate) unsafe fn force_collect() { - BOA_GC.with(|internal| todo!()) +// - `force_collect` will not extend threshold +pub fn force_collect() { + BOA_GC.with(|current| { + let mut gc = current.borrow_mut(); + + unsafe { + if gc.runtime.adult_bytes > 0 { + Collector::run_full_collection(&mut *gc) + } else { + Collector::run_youth_collection(&mut *gc) + } + } + }) } + + +pub struct GcTester; + +impl GcTester { + pub fn assert_collections(o: usize) { + BOA_GC.with(|current|{ + let gc = current.borrow(); + assert_eq!(gc.runtime.collections, o); + }) + } + + pub fn assert_youth_bytes_allocated() { + BOA_GC.with(|current| { + let gc = current.borrow(); + assert!(gc.runtime.youth_bytes > 0); + }) + } + + pub fn assert_empty_gc() { + BOA_GC.with(|current| { + let gc = current.borrow(); + + assert_eq!(gc.adult_start.get().is_none(), true); + assert!(gc.runtime.adult_bytes == 0); + assert_eq!(gc.youth_start.get().is_none(), true); + assert!(gc.runtime.youth_bytes == 0); + }) + } + + pub fn assert_adult_bytes_allocated() { + BOA_GC.with(|current| { + let gc = current.borrow(); + assert!(gc.runtime.adult_bytes > 0); + }) + } +} \ No newline at end of file diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs index a915b3b7a8d..c46abbc6d64 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -27,11 +27,14 @@ pub struct Gc { impl Gc { /// Constructs a new `Gc` with the given value. pub fn new(value: NonNull>) -> Self { + // TODO: Determine whether it's worth keeping `set_root` approach unsafe { - Self { + let gc = Gc { inner_ptr: Cell::new(NonNull::new_unchecked(value.as_ptr())), marker: PhantomData, - } + }; + gc.set_root(); + gc } } } diff --git a/boa_gc/tests/allocation.rs b/boa_gc/tests/allocation.rs index 96073f0a2c0..fac5d5f36e7 100644 --- a/boa_gc/tests/allocation.rs +++ b/boa_gc/tests/allocation.rs @@ -1,10 +1,28 @@ -use std::ops::Deref; +use std::mem; -use boa_gc::{BoaAlloc, Trace, Finalize}; +use boa_gc::{BoaAlloc, force_collect, GcTester}; #[test] fn gc_basic_cell_allocation() { - let gc_cell = BoaAlloc::new_cell("Hi"); + let gc_cell = BoaAlloc::new_cell(16 as u16); - assert_eq!(*gc_cell.borrow_mut(), "Hi"); + force_collect(); + GcTester::assert_collections(1); + GcTester::assert_youth_bytes_allocated(); + assert_eq!(*gc_cell.borrow_mut(), 16); +} + +#[test] +fn gc_basic_pointer_alloc() { + let gc = BoaAlloc::new(16 as u8); + + force_collect(); + GcTester::assert_collections(1); + GcTester::assert_youth_bytes_allocated(); + assert_eq!(*gc, 16); + + drop(gc); + force_collect(); + GcTester::assert_collections(2); + GcTester::assert_empty_gc(); } \ No newline at end of file From 7f417ba2f3bb5c6abc125fbd06af697d7dadf510 Mon Sep 17 00:00:00 2001 From: nekevss Date: Thu, 3 Nov 2022 21:31:05 -0400 Subject: [PATCH 10/55] Promotions appear to work! --- boa_gc/src/lib.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 09bc215d50f..b7519342ee4 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -528,6 +528,13 @@ impl GcTester { }) } + pub fn assert_collection_floor(floor:usize) { + BOA_GC.with(|current|{ + let gc = current.borrow(); + assert!(gc.runtime.collections > floor); + }) + } + pub fn assert_youth_bytes_allocated() { BOA_GC.with(|current| { let gc = current.borrow(); From c4f52749fd1278bb5f343f82e1a2fcd3e59b148b Mon Sep 17 00:00:00 2001 From: nekevss Date: Fri, 4 Nov 2022 18:12:20 -0400 Subject: [PATCH 11/55] Promotion tests --- boa_gc/tests/allocation.rs | 2 -- boa_gc/tests/promotions.rs | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 2 deletions(-) create mode 100644 boa_gc/tests/promotions.rs diff --git a/boa_gc/tests/allocation.rs b/boa_gc/tests/allocation.rs index fac5d5f36e7..2fbd6a6fd40 100644 --- a/boa_gc/tests/allocation.rs +++ b/boa_gc/tests/allocation.rs @@ -1,5 +1,3 @@ -use std::mem; - use boa_gc::{BoaAlloc, force_collect, GcTester}; #[test] diff --git a/boa_gc/tests/promotions.rs b/boa_gc/tests/promotions.rs new file mode 100644 index 00000000000..d4c7644dd76 --- /dev/null +++ b/boa_gc/tests/promotions.rs @@ -0,0 +1,37 @@ + +// Let's get weird and age some heap values + +use boa_gc::{BoaAlloc, force_collect, GcTester}; + + +#[test] +fn generational_promo_one() { + let mut storage = Vec::new(); + + // Super basic loop that loads bytes and force collections + for i in 0..200 as usize { + let gc = BoaAlloc::new(i); + storage.push(gc); + } + GcTester::assert_collection_floor(2); + // assert that items were promoted to adults + GcTester::assert_adult_bytes_allocated(); + drop(storage); + force_collect(); + GcTester::assert_empty_gc() +} + +#[test] +fn generational_promo_two() { + let mut storage = Vec::new(); + for i in 0..2000 as usize { + let gc = BoaAlloc::new(i); + if i % 10 == 0 { + storage.push(gc.clone()) + } + } + GcTester::assert_collection_floor(3); + + GcTester::assert_adult_bytes_allocated(); + GcTester::assert_youth_bytes_allocated(); +} \ No newline at end of file From 727ea98c8f676acf96a6bca83f0f6975da60ce65 Mon Sep 17 00:00:00 2001 From: nekevss Date: Fri, 4 Nov 2022 20:57:28 -0400 Subject: [PATCH 12/55] Transfer engine to gc and fixes --- boa_engine/Cargo.toml | 1 - boa_engine/src/builtins/function/mod.rs | 4 +- boa_engine/src/builtins/promise/mod.rs | 12 +- boa_engine/src/bytecompiler/function.rs | 4 +- boa_engine/src/bytecompiler/mod.rs | 10 +- boa_engine/src/context/mod.rs | 6 +- boa_engine/src/environments/compile.rs | 6 +- boa_engine/src/environments/runtime.rs | 10 +- boa_engine/src/job.rs | 2 +- boa_engine/src/object/jsobject.rs | 4 +- boa_engine/src/realm.rs | 4 +- boa_engine/src/vm/code_block.rs | 10 +- boa_gc/derive_macros/cargo.toml | 1 + boa_gc/src/gc_box.rs | 1 + boa_gc/src/internals/cell.rs | 274 ++++++++++++++++++++++- boa_gc/src/internals/cell_ref.rs | 281 ------------------------ boa_gc/src/internals/mod.rs | 4 +- boa_gc/src/lib.rs | 15 +- boa_gc/src/pointers/gc_ptr.rs | 8 + 19 files changed, 323 insertions(+), 334 deletions(-) delete mode 100644 boa_gc/src/internals/cell_ref.rs diff --git a/boa_engine/Cargo.toml b/boa_engine/Cargo.toml index 1db60c0a7f3..409644006b0 100644 --- a/boa_engine/Cargo.toml +++ b/boa_engine/Cargo.toml @@ -37,7 +37,6 @@ boa_profiler.workspace = true boa_macros.workspace = true boa_ast.workspace = true boa_parser.workspace = true -gc = "0.4.1" serde = { version = "1.0.147", features = ["derive", "rc"] } serde_json = "1.0.87" rand = "0.8.5" diff --git a/boa_engine/src/builtins/function/mod.rs b/boa_engine/src/builtins/function/mod.rs index 5cc70deec57..2b90b22553c 100644 --- a/boa_engine/src/builtins/function/mod.rs +++ b/boa_engine/src/builtins/function/mod.rs @@ -34,7 +34,7 @@ use boa_ast::{ operations::{bound_names, contains, lexically_declared_names, ContainsSymbol}, StatementList, }; -use boa_gc::{self, custom_trace, Finalize, Gc, Trace}; +use boa_gc::{self, custom_trace, Finalize, Gc, Trace, BoaAlloc}; use boa_interner::Sym; use boa_parser::Parser; use boa_profiler::Profiler; @@ -186,7 +186,7 @@ impl Captures { where T: NativeObject, { - Self(Gc::new(boa_gc::Cell::new(Box::new(captures)))) + Self(BoaAlloc::new_cell(Box::new(captures))) } /// Casts `Captures` to `Any` diff --git a/boa_engine/src/builtins/promise/mod.rs b/boa_engine/src/builtins/promise/mod.rs index ae4ebef81e8..03add1ac506 100644 --- a/boa_engine/src/builtins/promise/mod.rs +++ b/boa_engine/src/builtins/promise/mod.rs @@ -21,7 +21,7 @@ use crate::{ value::JsValue, Context, JsError, JsResult, }; -use boa_gc::{Cell as GcCell, Finalize, Gc, Trace}; +use boa_gc::{BoaAlloc, Cell as GcCell, Finalize, Gc, Trace}; use boa_profiler::Profiler; use std::{cell::Cell, rc::Rc}; use tap::{Conv, Pipe}; @@ -118,10 +118,10 @@ impl PromiseCapability { // 2. NOTE: C is assumed to be a constructor function that supports the parameter conventions of the Promise constructor (see 27.2.3.1). // 3. Let promiseCapability be the PromiseCapability Record { [[Promise]]: undefined, [[Resolve]]: undefined, [[Reject]]: undefined }. - let promise_capability = Gc::new(boa_gc::Cell::new(RejectResolve { + let promise_capability = BoaAlloc::new_cell(RejectResolve { reject: JsValue::undefined(), resolve: JsValue::undefined(), - })); + }); // 4. Let executorClosure be a new Abstract Closure with parameters (resolve, reject) that captures promiseCapability and performs the following steps when called: // 5. Let executor be CreateBuiltinFunction(executorClosure, 2, "", « »). @@ -436,7 +436,7 @@ impl Promise { } // 1. Let values be a new empty List. - let values = Gc::new(GcCell::new(Vec::new())); + let values = BoaAlloc::new_cell(Vec::new()); // 2. Let remainingElementsCount be the Record { [[Value]]: 1 }. let remaining_elements_count = Rc::new(Cell::new(1)); @@ -677,7 +677,7 @@ impl Promise { } // 1. Let values be a new empty List. - let values = Gc::new(GcCell::new(Vec::new())); + let values = BoaAlloc::new_cell(Vec::new()); // 2. Let remainingElementsCount be the Record { [[Value]]: 1 }. let remaining_elements_count = Rc::new(Cell::new(1)); @@ -1008,7 +1008,7 @@ impl Promise { } // 1. Let errors be a new empty List. - let errors = Gc::new(GcCell::new(Vec::new())); + let errors = BoaAlloc::new_cell(Vec::new()); // 2. Let remainingElementsCount be the Record { [[Value]]: 1 }. let remaining_elements_count = Rc::new(Cell::new(1)); diff --git a/boa_engine/src/bytecompiler/function.rs b/boa_engine/src/bytecompiler/function.rs index 815ec309521..b2804269d33 100644 --- a/boa_engine/src/bytecompiler/function.rs +++ b/boa_engine/src/bytecompiler/function.rs @@ -7,7 +7,7 @@ use crate::{ use boa_ast::{ declaration::Binding, function::FormalParameterList, operations::bound_names, StatementList, }; -use boa_gc::Gc; +use boa_gc::{Gc, BoaAlloc}; use boa_interner::Sym; use rustc_hash::FxHashMap; @@ -219,6 +219,6 @@ impl FunctionCompiler { compiler.emit(Opcode::PushUndefined, &[]); compiler.emit(Opcode::Return, &[]); - Ok(Gc::new(compiler.finish())) + Ok(BoaAlloc::new(compiler.finish())) } } diff --git a/boa_engine/src/bytecompiler/mod.rs b/boa_engine/src/bytecompiler/mod.rs index 081a6b2259a..39919c06397 100644 --- a/boa_engine/src/bytecompiler/mod.rs +++ b/boa_engine/src/bytecompiler/mod.rs @@ -30,7 +30,7 @@ use boa_ast::{ }, Declaration, Expression, Statement, StatementList, StatementListItem, }; -use boa_gc::Gc; +use boa_gc::{Gc, BoaAlloc}; use boa_interner::{Interner, Sym}; use rustc_hash::FxHashMap; use std::mem::size_of; @@ -3310,7 +3310,7 @@ impl<'b> ByteCompiler<'b> { compiler.emit_opcode(Opcode::PushUndefined); compiler.emit_opcode(Opcode::Return); - let code = Gc::new(compiler.finish()); + let code = BoaAlloc::new(compiler.finish()); let index = self.code_block.functions.len() as u32; self.code_block.functions.push(code); self.emit(Opcode::GetFunction, &[index]); @@ -3481,7 +3481,7 @@ impl<'b> ByteCompiler<'b> { field_compiler.code_block.num_bindings = num_bindings; field_compiler.emit_opcode(Opcode::Return); - let code = Gc::new(field_compiler.finish()); + let code = BoaAlloc::new(field_compiler.finish()); let index = self.code_block.functions.len() as u32; self.code_block.functions.push(code); self.emit(Opcode::GetFunction, &[index]); @@ -3512,7 +3512,7 @@ impl<'b> ByteCompiler<'b> { field_compiler.code_block.num_bindings = num_bindings; field_compiler.emit_opcode(Opcode::Return); - let code = Gc::new(field_compiler.finish()); + let code = BoaAlloc::new(field_compiler.finish()); let index = self.code_block.functions.len() as u32; self.code_block.functions.push(code); self.emit(Opcode::GetFunction, &[index]); @@ -3563,7 +3563,7 @@ impl<'b> ByteCompiler<'b> { compiler.push_compile_environment(compile_environment); compiler.code_block.num_bindings = num_bindings; - let code = Gc::new(compiler.finish()); + let code = BoaAlloc::new(compiler.finish()); let index = self.code_block.functions.len() as u32; self.code_block.functions.push(code); self.emit(Opcode::GetFunction, &[index]); diff --git a/boa_engine/src/context/mod.rs b/boa_engine/src/context/mod.rs index f5531cdf0a7..be9cfb43e30 100644 --- a/boa_engine/src/context/mod.rs +++ b/boa_engine/src/context/mod.rs @@ -25,7 +25,7 @@ use crate::{ }; use boa_ast::StatementList; -use boa_gc::Gc; +use boa_gc::{Gc, BoaAlloc}; use boa_interner::{Interner, Sym}; use boa_parser::{Error as ParseError, Parser}; use boa_profiler::Profiler; @@ -473,7 +473,7 @@ impl Context { let mut compiler = ByteCompiler::new(Sym::MAIN, statement_list.strict(), self); compiler.create_decls(statement_list, false); compiler.compile_statement_list(statement_list, true, false)?; - Ok(Gc::new(compiler.finish())) + Ok(BoaAlloc::new(compiler.finish())) } /// Compile the AST into a `CodeBlock` with an additional declarative environment. @@ -486,7 +486,7 @@ impl Context { let _timer = Profiler::global().start_event("Compilation", "Main"); let mut compiler = ByteCompiler::new(Sym::MAIN, statement_list.strict(), self); compiler.compile_statement_list_with_new_declarative(statement_list, true, strict)?; - Ok(Gc::new(compiler.finish())) + Ok(BoaAlloc::new(compiler.finish())) } /// Call the VM with a `CodeBlock` and return the result. diff --git a/boa_engine/src/environments/compile.rs b/boa_engine/src/environments/compile.rs index 37aa9a64ec4..6af07278ced 100644 --- a/boa_engine/src/environments/compile.rs +++ b/boa_engine/src/environments/compile.rs @@ -2,7 +2,7 @@ use crate::{ environments::runtime::BindingLocator, property::PropertyDescriptor, Context, JsString, JsValue, }; use boa_ast::expression::Identifier; -use boa_gc::{Cell, Finalize, Gc, Trace}; +use boa_gc::{BoaAlloc, Cell, Finalize, Gc, Trace}; use rustc_hash::FxHashMap; @@ -223,12 +223,12 @@ impl Context { let environment_index = self.realm.compile_env.borrow().environment_index + 1; let outer = self.realm.compile_env.clone(); - self.realm.compile_env = Gc::new(Cell::new(CompileTimeEnvironment { + self.realm.compile_env = BoaAlloc::new_cell(CompileTimeEnvironment { outer: Some(outer), environment_index, bindings: FxHashMap::default(), function_scope, - })); + }); } /// Pop the last compile time environment from the stack. diff --git a/boa_engine/src/environments/runtime.rs b/boa_engine/src/environments/runtime.rs index 3314eb75e51..960396f5459 100644 --- a/boa_engine/src/environments/runtime.rs +++ b/boa_engine/src/environments/runtime.rs @@ -3,7 +3,7 @@ use std::cell::Cell; use crate::{ environments::CompileTimeEnvironment, error::JsNativeError, object::JsObject, Context, JsValue, }; -use boa_gc::{Cell as GcCell, Finalize, Gc, Trace}; +use boa_gc::{BoaAlloc, Cell as GcCell, Finalize, Gc, Trace}; use boa_ast::expression::Identifier; use rustc_hash::FxHashSet; @@ -232,7 +232,7 @@ impl DeclarativeEnvironmentStack { #[inline] pub(crate) fn new(global_compile_environment: Gc>) -> Self { Self { - stack: vec![Gc::new(DeclarativeEnvironment { + stack: vec![BoaAlloc::new(DeclarativeEnvironment { bindings: GcCell::new(Vec::new()), compile: global_compile_environment, poisoned: Cell::new(false), @@ -368,7 +368,7 @@ impl DeclarativeEnvironmentStack { let index = self.stack.len(); - self.stack.push(Gc::new(DeclarativeEnvironment { + self.stack.push(BoaAlloc::new(DeclarativeEnvironment { bindings: GcCell::new(vec![None; num_bindings]), compile: compile_environment, poisoned: Cell::new(poisoned), @@ -414,7 +414,7 @@ impl DeclarativeEnvironmentStack { JsValue::Null }; - self.stack.push(Gc::new(DeclarativeEnvironment { + self.stack.push(BoaAlloc::new(DeclarativeEnvironment { bindings: GcCell::new(vec![None; num_bindings]), compile: compile_environment, poisoned: Cell::new(poisoned), @@ -445,7 +445,7 @@ impl DeclarativeEnvironmentStack { let poisoned = outer.poisoned.get(); let slots = outer.slots.clone(); - self.stack.push(Gc::new(DeclarativeEnvironment { + self.stack.push(BoaAlloc::new(DeclarativeEnvironment { bindings: GcCell::new(vec![None; num_bindings]), compile: compile_environment, poisoned: Cell::new(poisoned), diff --git a/boa_engine/src/job.rs b/boa_engine/src/job.rs index 2a0988b2e57..f24be132a3a 100644 --- a/boa_engine/src/job.rs +++ b/boa_engine/src/job.rs @@ -1,5 +1,5 @@ use crate::{prelude::JsObject, Context, JsResult, JsValue}; -use gc::{Finalize, Trace}; +use boa_gc::{Finalize, Trace}; /// `JobCallback` records /// diff --git a/boa_engine/src/object/jsobject.rs b/boa_engine/src/object/jsobject.rs index d1afe8f99aa..6537e55c5c9 100644 --- a/boa_engine/src/object/jsobject.rs +++ b/boa_engine/src/object/jsobject.rs @@ -10,7 +10,7 @@ use crate::{ value::PreferredType, Context, JsResult, JsValue, }; -use boa_gc::{self, Finalize, Gc, Trace}; +use boa_gc::{self, BoaAlloc, Finalize, Gc, Trace}; use rustc_hash::FxHashMap; use std::{ cell::RefCell, @@ -37,7 +37,7 @@ impl JsObject { #[inline] fn from_object(object: Object) -> Self { Self { - inner: Gc::new(boa_gc::Cell::new(object)), + inner: BoaAlloc::new_cell(object), } } diff --git a/boa_engine/src/realm.rs b/boa_engine/src/realm.rs index 588f01fd16c..14a80496b3b 100644 --- a/boa_engine/src/realm.rs +++ b/boa_engine/src/realm.rs @@ -8,7 +8,7 @@ use crate::{ environments::{CompileTimeEnvironment, DeclarativeEnvironmentStack}, object::{GlobalPropertyMap, JsObject, JsPrototype, ObjectData, PropertyMap}, }; -use boa_gc::{Cell, Gc}; +use boa_gc::{BoaAlloc, Cell, Gc}; use boa_profiler::Profiler; /// Representation of a Realm. @@ -33,7 +33,7 @@ impl Realm { // Allow identification of the global object easily let global_object = JsObject::from_proto_and_data(None, ObjectData::global()); - let global_compile_environment = Gc::new(Cell::new(CompileTimeEnvironment::new_global())); + let global_compile_environment = BoaAlloc::new_cell(CompileTimeEnvironment::new_global()); Self { global_object, diff --git a/boa_engine/src/vm/code_block.rs b/boa_engine/src/vm/code_block.rs index c23c75cc046..bf35ecc773e 100644 --- a/boa_engine/src/vm/code_block.rs +++ b/boa_engine/src/vm/code_block.rs @@ -24,7 +24,7 @@ use crate::{ Context, JsResult, JsString, JsValue, }; use boa_ast::{expression::Identifier, function::FormalParameterList}; -use boa_gc::{Cell, Finalize, Gc, Trace}; +use boa_gc::{BoaAlloc, Cell, Finalize, Gc, Trace}; use boa_interner::{Interner, Sym, ToInternedString}; use boa_profiler::Profiler; use std::{collections::VecDeque, convert::TryInto, mem::size_of}; @@ -1098,11 +1098,11 @@ impl JsObject { prototype, ObjectData::generator(Generator { state: GeneratorState::SuspendedStart, - context: Some(Gc::new(Cell::new(GeneratorContext { + context: Some(BoaAlloc::new_cell(GeneratorContext { environments, call_frame, stack, - }))), + })), }), ); @@ -1241,11 +1241,11 @@ impl JsObject { prototype, ObjectData::async_generator(AsyncGenerator { state: AsyncGeneratorState::SuspendedStart, - context: Some(Gc::new(Cell::new(GeneratorContext { + context: Some(BoaAlloc::new_cell(GeneratorContext { environments, call_frame, stack, - }))), + })), queue: VecDeque::new(), }), ); diff --git a/boa_gc/derive_macros/cargo.toml b/boa_gc/derive_macros/cargo.toml index b9d873e3ab8..e38539ab398 100644 --- a/boa_gc/derive_macros/cargo.toml +++ b/boa_gc/derive_macros/cargo.toml @@ -6,6 +6,7 @@ keywords = ["javascript", "js", "garbage", "memory"] edition = "2021" [lib] +name = "boa_gc_macros" proc-macro = true [dependencies] diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index c7297831e99..358e7aa5ef3 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -103,6 +103,7 @@ impl GcBoxHeader { } } +// NOTE: [repr(C)] is most likely unneeded here, but will keep it for now /// The GcBox represents a box on `BoaGc`'s heap. The GcBox's creation and allocation is handled /// by the allocator #[repr(C)] diff --git a/boa_gc/src/internals/cell.rs b/boa_gc/src/internals/cell.rs index f11bb4a8f06..d71b7faeab8 100644 --- a/boa_gc/src/internals/cell.rs +++ b/boa_gc/src/internals/cell.rs @@ -1,12 +1,13 @@ //! A garbage collected cell implementation use std::cell::{Cell, UnsafeCell}; -use std::fmt::{Debug, Display}; use std::hash::Hash; +use std::cmp::Ordering; +use std::fmt::{self, Debug, Display}; +use std::ops::{Deref, DerefMut}; use crate::{ internals::{ borrow_flag::{BorrowFlag, BorrowState, BORROWFLAG_INIT}, - GcCellRef, GcCellRefMut, }, trace::{Finalize, Trace}, }; @@ -197,3 +198,272 @@ unsafe impl Trace for GcCell { } } } + + +/// A wrapper type for an immutably borrowed value from a `GcCell`. +pub struct GcCellRef<'a, T: ?Sized + 'static> { + pub(crate) flags: &'a Cell, + pub(crate) value: &'a T, +} + +impl<'a, T: ?Sized> GcCellRef<'a, T> { + /// Copies a `GcCellRef`. + /// + /// The `GcCell` is already immutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `GcCellRef::clone(...)`. A `Clone` implementation or a method + /// would interfere with the use of `c.borrow().clone()` to clone + /// the contents of a `GcCell`. + #[inline] + pub fn clone(orig: &GcCellRef<'a, T>) -> GcCellRef<'a, T> { + orig.flags.set(orig.flags.get().add_reading()); + GcCellRef { + flags: orig.flags, + value: orig.value, + } + } + + /// Makes a new `GcCellRef` from a component of the borrowed data. + /// + /// The `GcCell` is already immutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as `GcCellRef::map(...)`. + /// A method would interfere with methods of the same name on the contents + /// of a `GcCellRef` used through `Deref`. + #[inline] + pub fn map(orig: Self, f: F) -> GcCellRef<'a, U> + where + U: ?Sized, + F: FnOnce(&T) -> &U, + { + let ret = GcCellRef { + flags: orig.flags, + value: f(orig.value), + }; + + // We have to tell the compiler not to call the destructor of GcCellRef, + // because it will update the borrow flags. + std::mem::forget(orig); + + ret + } + + /// Splits a `GcCellRef` into multiple `GcCellRef`s for different components of the borrowed data. + /// + /// The `GcCell` is already immutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as GcCellRef::map_split(...). + /// A method would interfere with methods of the same name on the contents of a `GcCellRef` used through `Deref`. + #[inline] + pub fn map_split(orig: Self, f: F) -> (GcCellRef<'a, U>, GcCellRef<'a, V>) + where + U: ?Sized, + V: ?Sized, + F: FnOnce(&T) -> (&U, &V), + { + let (a, b) = f(orig.value); + + orig.flags.set(orig.flags.get().add_reading()); + + let ret = ( + GcCellRef { + flags: orig.flags, + value: a, + }, + GcCellRef { + flags: orig.flags, + value: b, + }, + ); + + // We have to tell the compiler not to call the destructor of GcCellRef, + // because it will update the borrow flags. + std::mem::forget(orig); + + ret + } +} + +impl<'a, T: ?Sized> Deref for GcCellRef<'a, T> { + type Target = T; + + #[inline] + fn deref(&self) -> &T { + self.value + } +} + +impl<'a, T: ?Sized> Drop for GcCellRef<'a, T> { + fn drop(&mut self) { + debug_assert!(self.flags.get().borrowed() == BorrowState::Reading); + self.flags.set(self.flags.get().sub_reading()); + } +} + +impl<'a, T: ?Sized + Debug> Debug for GcCellRef<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Debug::fmt(&**self, f) + } +} + +impl<'a, T: ?Sized + Display> Display for GcCellRef<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(&**self, f) + } +} + +/// A wrapper type for a mutably borrowed value from a `GcCell`. +pub struct GcCellRefMut<'a, T: Trace + ?Sized + 'static, U: ?Sized = T> { + pub(crate) gc_cell: &'a GcCell, + pub(crate) value: &'a mut U, +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> { + /// Makes a new `GcCellRefMut` for a component of the borrowed data, e.g., an enum + /// variant. + /// + /// The `GcCellRefMut` is already mutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `GcCellRefMut::map(...)`. A method would interfere with methods of the same + /// name on the contents of a `GcCell` used through `Deref`. + #[inline] + pub fn map(orig: Self, f: F) -> GcCellRefMut<'a, T, V> + where + V: ?Sized, + F: FnOnce(&mut U) -> &mut V, + { + let value = unsafe { &mut *(orig.value as *mut U) }; + + let ret = GcCellRefMut { + gc_cell: orig.gc_cell, + value: f(value), + }; + + // We have to tell the compiler not to call the destructor of GcCellRefMut, + // because it will update the borrow flags. + std::mem::forget(orig); + + ret + } +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> Deref for GcCellRefMut<'a, T, U> { + type Target = U; + + #[inline] + fn deref(&self) -> &U { + self.value + } +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> DerefMut for GcCellRefMut<'a, T, U> { + #[inline] + fn deref_mut(&mut self) -> &mut U { + self.value + } +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'a, T, U> { + #[inline] + fn drop(&mut self) { + debug_assert!(self.gc_cell.flags.get().borrowed() == BorrowState::Writing); + // Restore the rooted state of the GcCell's contents to the state of the GcCell. + // During the lifetime of the GcCellRefMut, the GcCell's contents are rooted. + if !self.gc_cell.flags.get().rooted() { + unsafe { + (*self.gc_cell.cell.get()).unroot(); + } + } + self.gc_cell + .flags + .set(self.gc_cell.flags.get().set_unused()); + } +} + +impl<'a, T: Trace + ?Sized, U: Debug + ?Sized> Debug for GcCellRefMut<'a, T, U> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Debug::fmt(&*(self.deref()), f) + } +} + +impl<'a, T: Trace + ?Sized, U: Display + ?Sized> Display for GcCellRefMut<'a, T, U> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(&**self, f) + } +} + +unsafe impl Send for GcCell {} + +impl Clone for GcCell { + #[inline] + fn clone(&self) -> Self { + Self::new(self.borrow().clone()) + } +} + +impl Default for GcCell { + #[inline] + fn default() -> Self { + Self::new(Default::default()) + } +} + +impl PartialEq for GcCell { + #[inline(always)] + fn eq(&self, other: &Self) -> bool { + *self.borrow() == *other.borrow() + } +} + +impl Eq for GcCell {} + +impl PartialOrd for GcCell { + #[inline(always)] + fn partial_cmp(&self, other: &Self) -> Option { + (*self.borrow()).partial_cmp(&*other.borrow()) + } + + #[inline(always)] + fn lt(&self, other: &Self) -> bool { + *self.borrow() < *other.borrow() + } + + #[inline(always)] + fn le(&self, other: &Self) -> bool { + *self.borrow() <= *other.borrow() + } + + #[inline(always)] + fn gt(&self, other: &Self) -> bool { + *self.borrow() > *other.borrow() + } + + #[inline(always)] + fn ge(&self, other: &Self) -> bool { + *self.borrow() >= *other.borrow() + } +} + +impl Ord for GcCell { + #[inline] + fn cmp(&self, other: &GcCell) -> Ordering { + (*self.borrow()).cmp(&*other.borrow()) + } +} + +impl Debug for GcCell { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.flags.get().borrowed() { + BorrowState::Unused | BorrowState::Reading => f + .debug_struct("GcCell") + .field("value", &self.borrow()) + .finish(), + BorrowState::Writing => f + .debug_struct("GcCell") + .field("value", &"") + .finish(), + } + } +} \ No newline at end of file diff --git a/boa_gc/src/internals/cell_ref.rs b/boa_gc/src/internals/cell_ref.rs deleted file mode 100644 index 3c9de15a52d..00000000000 --- a/boa_gc/src/internals/cell_ref.rs +++ /dev/null @@ -1,281 +0,0 @@ -//! Implementation of a garbage collected cell reference -use std::cell::Cell; -use std::cmp::Ordering; -use std::fmt::{self, Debug, Display}; -use std::ops::{Deref, DerefMut}; - -use crate::{ - internals::{ - borrow_flag::{BorrowFlag, BorrowState}, - GcCell, - }, - trace::Trace, -}; - -/// A wrapper type for an immutably borrowed value from a `GcCell`. -pub struct GcCellRef<'a, T: ?Sized + 'static> { - pub(crate) flags: &'a Cell, - pub(crate) value: &'a T, -} - -impl<'a, T: ?Sized> GcCellRef<'a, T> { - /// Copies a `GcCellRef`. - /// - /// The `GcCell` is already immutably borrowed, so this cannot fail. - /// - /// This is an associated function that needs to be used as - /// `GcCellRef::clone(...)`. A `Clone` implementation or a method - /// would interfere with the use of `c.borrow().clone()` to clone - /// the contents of a `GcCell`. - #[inline] - pub fn clone(orig: &GcCellRef<'a, T>) -> GcCellRef<'a, T> { - orig.flags.set(orig.flags.get().add_reading()); - GcCellRef { - flags: orig.flags, - value: orig.value, - } - } - - /// Makes a new `GcCellRef` from a component of the borrowed data. - /// - /// The `GcCell` is already immutably borrowed, so this cannot fail. - /// - /// This is an associated function that needs to be used as `GcCellRef::map(...)`. - /// A method would interfere with methods of the same name on the contents - /// of a `GcCellRef` used through `Deref`. - #[inline] - pub fn map(orig: Self, f: F) -> GcCellRef<'a, U> - where - U: ?Sized, - F: FnOnce(&T) -> &U, - { - let ret = GcCellRef { - flags: orig.flags, - value: f(orig.value), - }; - - // We have to tell the compiler not to call the destructor of GcCellRef, - // because it will update the borrow flags. - std::mem::forget(orig); - - ret - } - - /// Splits a `GcCellRef` into multiple `GcCellRef`s for different components of the borrowed data. - /// - /// The `GcCell` is already immutably borrowed, so this cannot fail. - /// - /// This is an associated function that needs to be used as GcCellRef::map_split(...). - /// A method would interfere with methods of the same name on the contents of a `GcCellRef` used through `Deref`. - #[inline] - pub fn map_split(orig: Self, f: F) -> (GcCellRef<'a, U>, GcCellRef<'a, V>) - where - U: ?Sized, - V: ?Sized, - F: FnOnce(&T) -> (&U, &V), - { - let (a, b) = f(orig.value); - - orig.flags.set(orig.flags.get().add_reading()); - - let ret = ( - GcCellRef { - flags: orig.flags, - value: a, - }, - GcCellRef { - flags: orig.flags, - value: b, - }, - ); - - // We have to tell the compiler not to call the destructor of GcCellRef, - // because it will update the borrow flags. - std::mem::forget(orig); - - ret - } -} - -impl<'a, T: ?Sized> Deref for GcCellRef<'a, T> { - type Target = T; - - #[inline] - fn deref(&self) -> &T { - self.value - } -} - -impl<'a, T: ?Sized> Drop for GcCellRef<'a, T> { - fn drop(&mut self) { - debug_assert!(self.flags.get().borrowed() == BorrowState::Reading); - self.flags.set(self.flags.get().sub_reading()); - } -} - -impl<'a, T: ?Sized + Debug> Debug for GcCellRef<'a, T> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Debug::fmt(&**self, f) - } -} - -impl<'a, T: ?Sized + Display> Display for GcCellRef<'a, T> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Display::fmt(&**self, f) - } -} - -/// A wrapper type for a mutably borrowed value from a `GcCell`. -pub struct GcCellRefMut<'a, T: Trace + ?Sized + 'static, U: ?Sized = T> { - pub(crate) gc_cell: &'a GcCell, - pub(crate) value: &'a mut U, -} - -impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> { - /// Makes a new `GcCellRefMut` for a component of the borrowed data, e.g., an enum - /// variant. - /// - /// The `GcCellRefMut` is already mutably borrowed, so this cannot fail. - /// - /// This is an associated function that needs to be used as - /// `GcCellRefMut::map(...)`. A method would interfere with methods of the same - /// name on the contents of a `GcCell` used through `Deref`. - #[inline] - pub fn map(orig: Self, f: F) -> GcCellRefMut<'a, T, V> - where - V: ?Sized, - F: FnOnce(&mut U) -> &mut V, - { - let value = unsafe { &mut *(orig.value as *mut U) }; - - let ret = GcCellRefMut { - gc_cell: orig.gc_cell, - value: f(value), - }; - - // We have to tell the compiler not to call the destructor of GcCellRefMut, - // because it will update the borrow flags. - std::mem::forget(orig); - - ret - } -} - -impl<'a, T: Trace + ?Sized, U: ?Sized> Deref for GcCellRefMut<'a, T, U> { - type Target = U; - - #[inline] - fn deref(&self) -> &U { - self.value - } -} - -impl<'a, T: Trace + ?Sized, U: ?Sized> DerefMut for GcCellRefMut<'a, T, U> { - #[inline] - fn deref_mut(&mut self) -> &mut U { - self.value - } -} - -impl<'a, T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'a, T, U> { - #[inline] - fn drop(&mut self) { - debug_assert!(self.gc_cell.flags.get().borrowed() == BorrowState::Writing); - // Restore the rooted state of the GcCell's contents to the state of the GcCell. - // During the lifetime of the GcCellRefMut, the GcCell's contents are rooted. - if !self.gc_cell.flags.get().rooted() { - unsafe { - (*self.gc_cell.cell.get()).unroot(); - } - } - self.gc_cell - .flags - .set(self.gc_cell.flags.get().set_unused()); - } -} - -impl<'a, T: Trace + ?Sized, U: Debug + ?Sized> Debug for GcCellRefMut<'a, T, U> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Debug::fmt(&*(self.deref()), f) - } -} - -impl<'a, T: Trace + ?Sized, U: Display + ?Sized> Display for GcCellRefMut<'a, T, U> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Display::fmt(&**self, f) - } -} - -unsafe impl Send for GcCell {} - -impl Clone for GcCell { - #[inline] - fn clone(&self) -> Self { - Self::new(self.borrow().clone()) - } -} - -impl Default for GcCell { - #[inline] - fn default() -> Self { - Self::new(Default::default()) - } -} - -impl PartialEq for GcCell { - #[inline(always)] - fn eq(&self, other: &Self) -> bool { - *self.borrow() == *other.borrow() - } -} - -impl Eq for GcCell {} - -impl PartialOrd for GcCell { - #[inline(always)] - fn partial_cmp(&self, other: &Self) -> Option { - (*self.borrow()).partial_cmp(&*other.borrow()) - } - - #[inline(always)] - fn lt(&self, other: &Self) -> bool { - *self.borrow() < *other.borrow() - } - - #[inline(always)] - fn le(&self, other: &Self) -> bool { - *self.borrow() <= *other.borrow() - } - - #[inline(always)] - fn gt(&self, other: &Self) -> bool { - *self.borrow() > *other.borrow() - } - - #[inline(always)] - fn ge(&self, other: &Self) -> bool { - *self.borrow() >= *other.borrow() - } -} - -impl Ord for GcCell { - #[inline] - fn cmp(&self, other: &GcCell) -> Ordering { - (*self.borrow()).cmp(&*other.borrow()) - } -} - -impl Debug for GcCell { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.flags.get().borrowed() { - BorrowState::Unused | BorrowState::Reading => f - .debug_struct("GcCell") - .field("value", &self.borrow()) - .finish(), - BorrowState::Writing => f - .debug_struct("GcCell") - .field("value", &"") - .finish(), - } - } -} diff --git a/boa_gc/src/internals/mod.rs b/boa_gc/src/internals/mod.rs index dd2e8dda040..2a87cbc0ad0 100644 --- a/boa_gc/src/internals/mod.rs +++ b/boa_gc/src/internals/mod.rs @@ -1,8 +1,6 @@ pub(crate) mod borrow_flag; pub mod cell; -pub mod cell_ref; pub mod ephemeron; -pub use cell::GcCell; -pub use cell_ref::{GcCellRef, GcCellRefMut}; +pub use cell::*; pub use ephemeron::Ephemeron; diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index b7519342ee4..72204ab193e 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -1,25 +1,18 @@ //! Garbage collector for the Boa JavaScript engine. - use std::cell::{Cell as StdCell, RefCell as StdRefCell}; use std::mem; use std::ptr::NonNull; -pub use boa_gc_macros::{Finalize, Trace}; - -/// `gc_derive` is a general derive prelude import -pub mod derive_prelude { - pub use crate::GcPointer; - pub use boa_gc_macros::{Finalize, Trace}; -} - mod gc_box; mod internals; -pub mod pointers; +mod pointers; pub mod trace; +pub use boa_gc_macros::{Trace, Finalize}; + pub use crate::trace::{Finalize, Trace}; pub(crate) use gc_box::GcBox; -pub use internals::{Ephemeron, GcCell as Cell, GcCellRef as CellRef}; +pub use internals::{Ephemeron, GcCell as Cell, GcCellRef as Ref, GcCellRefMut as RefMut}; pub use pointers::{Gc, WeakGc, WeakPair}; pub type GcPointer = NonNull>; diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs index c46abbc6d64..9747cc1c668 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -7,6 +7,7 @@ use std::ops::Deref; use std::ptr::{self, NonNull}; use std::rc::Rc; +use crate::BoaAlloc; use crate::gc_box::GcBox; use crate::{ finalizer_safe, @@ -171,6 +172,13 @@ impl Drop for Gc { } } +impl Default for Gc { + #[inline] + fn default() -> Self { + BoaAlloc::new(Default::default()) + } +} + impl PartialEq for Gc { #[inline(always)] fn eq(&self, other: &Self) -> bool { From f95ad04e0769a2cec1e78edb7f02a8a524814874 Mon Sep 17 00:00:00 2001 From: nekevss Date: Fri, 4 Nov 2022 22:41:23 -0400 Subject: [PATCH 13/55] gc fixes and rustfmt --- boa_engine/src/builtins/function/mod.rs | 2 +- boa_engine/src/bytecompiler/function.rs | 2 +- boa_engine/src/bytecompiler/mod.rs | 2 +- boa_engine/src/context/mod.rs | 2 +- boa_gc/Cargo.toml | 1 + boa_gc/src/gc_box.rs | 6 -- boa_gc/src/internals/borrow_flag.rs | 1 - boa_gc/src/internals/cell.rs | 9 +-- boa_gc/src/internals/ephemeron.rs | 4 +- boa_gc/src/lib.rs | 91 ++++++++++++++----------- boa_gc/src/pointers/gc_ptr.rs | 4 +- boa_gc/src/pointers/weak_pair.rs | 4 +- boa_gc/src/pointers/weak_ptr.rs | 2 +- boa_gc/tests/allocation.rs | 4 +- boa_gc/tests/promotions.rs | 8 +-- 15 files changed, 73 insertions(+), 69 deletions(-) diff --git a/boa_engine/src/builtins/function/mod.rs b/boa_engine/src/builtins/function/mod.rs index 2b90b22553c..213e7930b5a 100644 --- a/boa_engine/src/builtins/function/mod.rs +++ b/boa_engine/src/builtins/function/mod.rs @@ -34,7 +34,7 @@ use boa_ast::{ operations::{bound_names, contains, lexically_declared_names, ContainsSymbol}, StatementList, }; -use boa_gc::{self, custom_trace, Finalize, Gc, Trace, BoaAlloc}; +use boa_gc::{self, custom_trace, BoaAlloc, Finalize, Gc, Trace}; use boa_interner::Sym; use boa_parser::Parser; use boa_profiler::Profiler; diff --git a/boa_engine/src/bytecompiler/function.rs b/boa_engine/src/bytecompiler/function.rs index b2804269d33..90e46c7070f 100644 --- a/boa_engine/src/bytecompiler/function.rs +++ b/boa_engine/src/bytecompiler/function.rs @@ -7,7 +7,7 @@ use crate::{ use boa_ast::{ declaration::Binding, function::FormalParameterList, operations::bound_names, StatementList, }; -use boa_gc::{Gc, BoaAlloc}; +use boa_gc::{BoaAlloc, Gc}; use boa_interner::Sym; use rustc_hash::FxHashMap; diff --git a/boa_engine/src/bytecompiler/mod.rs b/boa_engine/src/bytecompiler/mod.rs index 39919c06397..5737a3f3fe4 100644 --- a/boa_engine/src/bytecompiler/mod.rs +++ b/boa_engine/src/bytecompiler/mod.rs @@ -30,7 +30,7 @@ use boa_ast::{ }, Declaration, Expression, Statement, StatementList, StatementListItem, }; -use boa_gc::{Gc, BoaAlloc}; +use boa_gc::{BoaAlloc, Gc}; use boa_interner::{Interner, Sym}; use rustc_hash::FxHashMap; use std::mem::size_of; diff --git a/boa_engine/src/context/mod.rs b/boa_engine/src/context/mod.rs index be9cfb43e30..de645744618 100644 --- a/boa_engine/src/context/mod.rs +++ b/boa_engine/src/context/mod.rs @@ -25,7 +25,7 @@ use crate::{ }; use boa_ast::StatementList; -use boa_gc::{Gc, BoaAlloc}; +use boa_gc::{BoaAlloc, Gc}; use boa_interner::{Interner, Sym}; use boa_parser::{Error as ParseError, Parser}; use boa_profiler::Profiler; diff --git a/boa_gc/Cargo.toml b/boa_gc/Cargo.toml index fb1a8f5da25..4fd137aa351 100644 --- a/boa_gc/Cargo.toml +++ b/boa_gc/Cargo.toml @@ -11,6 +11,7 @@ repository.workspace = true rust-version.workspace = true [dependencies] +boa_profiler.workspace = true boa_gc_macros = { path = "derive_macros", version = "0.16.0" } # Optional Dependencies diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index 358e7aa5ef3..0b7f96ac4b3 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -1,6 +1,5 @@ use crate::Trace; use std::cell::Cell; -use std::mem; use std::ptr::{self, NonNull}; // Age and Weak Flags @@ -165,11 +164,6 @@ impl GcBox { self.header.dec_roots(); } - /// Returns a pointer to the `GcBox`'s value, without dereferencing it. - pub(crate) fn value_ptr(this: *const GcBox) -> *const T { - unsafe { ptr::addr_of!((*this).value) } - } - /// Returns a reference to the `GcBox`'s value. pub(crate) fn value(&self) -> &T { &self.value diff --git a/boa_gc/src/internals/borrow_flag.rs b/boa_gc/src/internals/borrow_flag.rs index dbcd04b5e96..fa149952bbb 100644 --- a/boa_gc/src/internals/borrow_flag.rs +++ b/boa_gc/src/internals/borrow_flag.rs @@ -1,4 +1,3 @@ - #[derive(Copy, Clone)] pub(crate) struct BorrowFlag(usize); diff --git a/boa_gc/src/internals/cell.rs b/boa_gc/src/internals/cell.rs index d71b7faeab8..5750ea218f1 100644 --- a/boa_gc/src/internals/cell.rs +++ b/boa_gc/src/internals/cell.rs @@ -1,14 +1,12 @@ //! A garbage collected cell implementation use std::cell::{Cell, UnsafeCell}; -use std::hash::Hash; use std::cmp::Ordering; use std::fmt::{self, Debug, Display}; +use std::hash::Hash; use std::ops::{Deref, DerefMut}; use crate::{ - internals::{ - borrow_flag::{BorrowFlag, BorrowState, BORROWFLAG_INIT}, - }, + internals::borrow_flag::{BorrowFlag, BorrowState, BORROWFLAG_INIT}, trace::{Finalize, Trace}, }; @@ -199,7 +197,6 @@ unsafe impl Trace for GcCell { } } - /// A wrapper type for an immutably borrowed value from a `GcCell`. pub struct GcCellRef<'a, T: ?Sized + 'static> { pub(crate) flags: &'a Cell, @@ -466,4 +463,4 @@ impl Debug for GcCell { .finish(), } } -} \ No newline at end of file +} diff --git a/boa_gc/src/internals/ephemeron.rs b/boa_gc/src/internals/ephemeron.rs index deab1708a42..29682e124b7 100644 --- a/boa_gc/src/internals/ephemeron.rs +++ b/boa_gc/src/internals/ephemeron.rs @@ -25,8 +25,8 @@ impl Ephemeron { pub unsafe fn new_pair(key: NonNull>, value: V) -> Self { let ptr = NonNull::new_unchecked(key.as_ptr()); - Ephemeron { - key: Cell::new(Some(ptr)), + Ephemeron { + key: Cell::new(Some(ptr)), value, } } diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 72204ab193e..af6815375e2 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -1,4 +1,5 @@ //! Garbage collector for the Boa JavaScript engine. +use boa_profiler::Profiler; use std::cell::{Cell as StdCell, RefCell as StdRefCell}; use std::mem; use std::ptr::NonNull; @@ -8,7 +9,7 @@ mod internals; mod pointers; pub mod trace; -pub use boa_gc_macros::{Trace, Finalize}; +pub use boa_gc_macros::{Finalize, Trace}; pub use crate::trace::{Finalize, Trace}; pub(crate) use gc_box::GcBox; @@ -36,14 +37,14 @@ struct GcConfig { } // Setting the defaults to an arbitrary value currently. -// +// // TODO: Add a configure later impl Default for GcConfig { fn default() -> Self { Self { - youth_threshold: 1024, - youth_threshold_base: 1024, - adult_threshold: 4096, + youth_threshold: 4096, + youth_threshold_base: 4096, + adult_threshold: 16384, growth_ratio: 0.7, youth_promo_age: 3, } @@ -111,13 +112,14 @@ pub struct BoaAlloc; impl BoaAlloc { pub fn new(value: T) -> Gc { + let _timer = Profiler::global().start_event("New Pointer", "BoaAlloc"); BOA_GC.with(|st| { let mut gc = st.borrow_mut(); unsafe { Self::manage_state(&mut *gc); } - + let gc_box = GcBox::new(value); let element_size = mem::size_of_val::>(&gc_box); @@ -138,6 +140,7 @@ impl BoaAlloc { } pub fn new_cell(value: T) -> Gc> { + let _timer = Profiler::global().start_event("New Cell", "BoaAlloc"); BOA_GC.with(|st| { let mut gc = st.borrow_mut(); @@ -146,7 +149,7 @@ impl BoaAlloc { unsafe { Self::manage_state(&mut *gc); } - + let gc_box = GcBox::new(Cell::new(value)); let element_size = mem::size_of_val::>>(&gc_box); let element_pointer = Box::into_raw(Box::from(gc_box)); @@ -166,6 +169,7 @@ impl BoaAlloc { } pub fn new_weak_pair(key: NonNull>, value: V) -> WeakPair { + let _timer = Profiler::global().start_event("New Weak Pair", "BoaAlloc"); BOA_GC.with(|internals| { let mut gc = internals.borrow_mut(); @@ -190,6 +194,7 @@ impl BoaAlloc { } pub fn new_weak_ref(value: NonNull>) -> WeakGc { + let _timer = Profiler::global().start_event("New Weak Pointer", "BoaAlloc"); BOA_GC.with(|state| { let mut gc = state.borrow_mut(); @@ -220,6 +225,7 @@ impl BoaAlloc { promotions: Vec>>, gc: &mut BoaGc, ) { + let _timer = Profiler::global().start_event("Gc Promoting", "gc"); for node in promotions { (*node.as_ptr()).set_header_pointer(gc.adult_start.take()); let allocation_bytes = mem::size_of_val::>(&(*node.as_ptr())); @@ -239,25 +245,23 @@ impl BoaAlloc { gc.config.adult_threshold = (gc.runtime.adult_bytes as f64 / gc.config.growth_ratio) as usize } - } else { + } else if gc.runtime.youth_bytes > gc.config.youth_threshold { + Collector::run_youth_collection(gc); + + // If we are constrained on the top of the stack, + // increase the size of capacity, so a garbage collection + // isn't triggered on every allocation if gc.runtime.youth_bytes > gc.config.youth_threshold { - Collector::run_youth_collection(gc); - - // If we are constrained on the top of the stack, - // increase the size of capacity, so a garbage collection - // isn't triggered on every allocation - if gc.runtime.youth_bytes > gc.config.youth_threshold { - gc.config.youth_threshold = - (gc.runtime.youth_bytes as f64 / gc.config.growth_ratio) as usize - } + gc.config.youth_threshold = + (gc.runtime.youth_bytes as f64 / gc.config.growth_ratio) as usize + } - // The young object threshold should only be raised in cases of high laod. It - // should retract back to base when the load lessens - if gc.runtime.youth_bytes < gc.config.youth_threshold_base - && gc.config.youth_threshold != gc.config.youth_threshold_base - { - gc.config.youth_threshold = gc.config.youth_threshold_base - } + // The young object threshold should only be raised in cases of high laod. It + // should retract back to base when the load lessens + if gc.runtime.youth_bytes < gc.config.youth_threshold_base + && gc.config.youth_threshold != gc.config.youth_threshold_base + { + gc.config.youth_threshold = gc.config.youth_threshold_base } } } @@ -278,9 +282,10 @@ pub struct Collector; impl Collector { pub(crate) unsafe fn run_youth_collection(gc: &mut BoaGc) { + let _timer = Profiler::global().start_event("Gc Youth Collection", "gc"); gc.runtime.collections += 1; let unreachable_nodes = Self::mark_heap(&gc.youth_start); - + if !unreachable_nodes.is_empty() { Self::finalize(unreachable_nodes); } @@ -299,10 +304,11 @@ impl Collector { } pub(crate) unsafe fn run_full_collection(gc: &mut BoaGc) { + let _timer = Profiler::global().start_event("Gc Full Collection", "gc"); gc.runtime.collections += 1; let unreachable_adults = Self::mark_heap(&gc.adult_start); let unreachable_youths = Self::mark_heap(&gc.youth_start); - + // Check if any unreachable nodes were found and finalize if !unreachable_adults.is_empty() { Self::finalize(unreachable_adults); @@ -330,6 +336,7 @@ impl Collector { pub(crate) unsafe fn mark_heap( head: &StdCell>>>, ) -> Vec>> { + let _timer = Profiler::global().start_event("Gc Marking", "gc"); // Walk the list, tracing and marking the nodes let mut finalize = Vec::new(); let mut ephemeron_queue = Vec::new(); @@ -337,12 +344,10 @@ impl Collector { while let Some(node) = mark_head.get() { if (*node.as_ptr()).header.is_ephemeron() { ephemeron_queue.push(node); + } else if (*node.as_ptr()).header.roots() > 0 { + (*node.as_ptr()).trace_inner(); } else { - if (*node.as_ptr()).header.roots() > 0 { - (*node.as_ptr()).trace_inner(); - } else { - finalize.push(node) - } + finalize.push(node) } mark_head = &(*node.as_ptr()).header.next; } @@ -406,6 +411,7 @@ impl Collector { } unsafe fn finalize(finalize_vec: Vec>>) { + let _timer = Profiler::global().start_event("Gc Finalization", "gc"); for node in finalize_vec { // We double check that the unreachable nodes are actually unreachable // prior to finalization as they could have been marked by a different @@ -422,6 +428,7 @@ impl Collector { total_bytes: &mut usize, promotion_age: &u8, ) -> Vec>> { + let _timer = Profiler::global().start_event("Gc Sweeping", "gc"); let _guard = DropGuard::new(); let mut promotions = Vec::new(); @@ -443,6 +450,10 @@ impl Collector { *heap_bytes -= unallocated_bytes; *total_bytes -= unallocated_bytes; sweep_head.set(unmarked_node.header.next.take()); + // We have now finalized and taken care of the below node. We now forget the node + // to remove the node without calling the destructor on Gc or any other value + // since calling core::ptr::drop_in_place will trigger `Gc::drop`, which accesses `Gc::inner()` + mem::forget(unmarked_node) } } @@ -454,6 +465,7 @@ impl Collector { bytes_allocated: &mut usize, total_allocated: &mut usize, ) { + let _timer = Profiler::global().start_event("Gc Sweeping", "gc"); let _guard = DropGuard::new(); let mut sweep_head = heap_start; @@ -469,6 +481,10 @@ impl Collector { *bytes_allocated -= unallocated_bytes; *total_allocated -= unallocated_bytes; sweep_head.set(unmarked_node.header.next.take()); + // We have now finalized and taken care of the below node. We now forget the node + // to remove the node without calling the destructor on Gc or any other value + // since calling core::ptr::drop_in_place will trigger `Gc::drop`, which accesses `Gc::inner()` + mem::forget(unmarked_node) } } } @@ -510,19 +526,18 @@ pub fn force_collect() { }) } - pub struct GcTester; impl GcTester { pub fn assert_collections(o: usize) { - BOA_GC.with(|current|{ + BOA_GC.with(|current| { let gc = current.borrow(); assert_eq!(gc.runtime.collections, o); }) } - pub fn assert_collection_floor(floor:usize) { - BOA_GC.with(|current|{ + pub fn assert_collection_floor(floor: usize) { + BOA_GC.with(|current| { let gc = current.borrow(); assert!(gc.runtime.collections > floor); }) @@ -539,9 +554,9 @@ impl GcTester { BOA_GC.with(|current| { let gc = current.borrow(); - assert_eq!(gc.adult_start.get().is_none(), true); + assert!(gc.adult_start.get().is_none()); assert!(gc.runtime.adult_bytes == 0); - assert_eq!(gc.youth_start.get().is_none(), true); + assert!(gc.youth_start.get().is_none()); assert!(gc.runtime.youth_bytes == 0); }) } @@ -552,4 +567,4 @@ impl GcTester { assert!(gc.runtime.adult_bytes > 0); }) } -} \ No newline at end of file +} diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs index 9747cc1c668..2908c1867be 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -7,8 +7,8 @@ use std::ops::Deref; use std::ptr::{self, NonNull}; use std::rc::Rc; -use crate::BoaAlloc; use crate::gc_box::GcBox; +use crate::BoaAlloc; use crate::{ finalizer_safe, trace::{Finalize, Trace}, @@ -77,8 +77,8 @@ impl Gc { #[inline] fn inner_ptr(&self) -> *mut GcBox { + // Note: Initial `finalizer_safe` was asserted here. Needs to be determined whether this was the best practice assert!(finalizer_safe()); - unsafe { clear_root_bit(self.inner_ptr.get()).as_ptr() } } diff --git a/boa_gc/src/pointers/weak_pair.rs b/boa_gc/src/pointers/weak_pair.rs index 02bcf5dc35c..7009becc8c2 100644 --- a/boa_gc/src/pointers/weak_pair.rs +++ b/boa_gc/src/pointers/weak_pair.rs @@ -26,7 +26,7 @@ impl WeakPair { fn inner_ptr(&self) -> *mut GcBox> { assert!(finalizer_safe()); - unsafe { self.inner_ptr.get().as_ptr() } + self.inner_ptr.get().as_ptr() } #[inline] @@ -45,7 +45,7 @@ impl WeakPair { } } -impl Finalize for WeakPair {} +impl Finalize for WeakPair {} unsafe impl Trace for WeakPair { #[inline] diff --git a/boa_gc/src/pointers/weak_ptr.rs b/boa_gc/src/pointers/weak_ptr.rs index a9d8d4a2bd1..d2d28187533 100644 --- a/boa_gc/src/pointers/weak_ptr.rs +++ b/boa_gc/src/pointers/weak_ptr.rs @@ -26,7 +26,7 @@ impl WeakGc { fn inner_ptr(&self) -> *mut GcBox> { assert!(finalizer_safe()); - unsafe { self.inner_ptr.get().as_ptr() } + self.inner_ptr.get().as_ptr() } #[inline] diff --git a/boa_gc/tests/allocation.rs b/boa_gc/tests/allocation.rs index 2fbd6a6fd40..292c239131d 100644 --- a/boa_gc/tests/allocation.rs +++ b/boa_gc/tests/allocation.rs @@ -1,4 +1,4 @@ -use boa_gc::{BoaAlloc, force_collect, GcTester}; +use boa_gc::{force_collect, BoaAlloc, GcTester}; #[test] fn gc_basic_cell_allocation() { @@ -23,4 +23,4 @@ fn gc_basic_pointer_alloc() { force_collect(); GcTester::assert_collections(2); GcTester::assert_empty_gc(); -} \ No newline at end of file +} diff --git a/boa_gc/tests/promotions.rs b/boa_gc/tests/promotions.rs index d4c7644dd76..a75023baa13 100644 --- a/boa_gc/tests/promotions.rs +++ b/boa_gc/tests/promotions.rs @@ -1,8 +1,6 @@ - // Let's get weird and age some heap values -use boa_gc::{BoaAlloc, force_collect, GcTester}; - +use boa_gc::{force_collect, BoaAlloc, GcTester}; #[test] fn generational_promo_one() { @@ -31,7 +29,7 @@ fn generational_promo_two() { } } GcTester::assert_collection_floor(3); - + GcTester::assert_adult_bytes_allocated(); GcTester::assert_youth_bytes_allocated(); -} \ No newline at end of file +} From 174b0245a74f222597e03ba61bcc00088c1225d6 Mon Sep 17 00:00:00 2001 From: nekevss Date: Fri, 4 Nov 2022 22:57:03 -0400 Subject: [PATCH 14/55] Addressing clippy lints with fixes or allow --- boa_gc/src/gc_box.rs | 6 +++--- boa_gc/src/internals/ephemeron.rs | 6 +----- boa_gc/src/lib.rs | 34 +++++++++++++++++-------------- boa_gc/src/pointers/gc_ptr.rs | 2 +- 4 files changed, 24 insertions(+), 24 deletions(-) diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index 0b7f96ac4b3..6e5ca6146ce 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -31,7 +31,7 @@ impl GcBoxHeader { #[inline] pub fn new_weak() -> Self { // Set weak_flag - let cycle_age = 0_u8 | WEAK_MASK; + let cycle_age = WEAK_MASK; GcBoxHeader { roots: Cell::new(0), cycle_age: Cell::new(cycle_age), @@ -46,7 +46,7 @@ impl GcBoxHeader { #[inline] pub fn roots(&self) -> usize { - &self.roots.get() & ROOTS_MASK + self.roots.get() & ROOTS_MASK } #[inline] @@ -83,7 +83,7 @@ impl GcBoxHeader { #[inline] pub fn age(&self) -> u8 { - &self.cycle_age.get() & AGE_MASK + self.cycle_age.get() & AGE_MASK } #[inline] diff --git a/boa_gc/src/internals/ephemeron.rs b/boa_gc/src/internals/ephemeron.rs index 29682e124b7..1ee275e4ce8 100644 --- a/boa_gc/src/internals/ephemeron.rs +++ b/boa_gc/src/internals/ephemeron.rs @@ -45,11 +45,7 @@ impl Ephemeron { #[inline] fn inner_key_ptr(&self) -> Option<*mut GcBox> { assert!(finalizer_safe()); - if let Some(key_node) = self.key.get() { - Some(key_node.as_ptr()) - } else { - None - } + self.key.get().map(|key_node| key_node.as_ptr()) } #[inline] diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index af6815375e2..74039a877f6 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -1,4 +1,18 @@ //! Garbage collector for the Boa JavaScript engine. +//! + + +#![allow( + clippy::let_unit_value, + clippy::should_implement_trait, + clippy::match_like_matches_macro, + clippy::new_ret_no_self, + // Putting the below on the allow list for now, but these should eventually be addressed + clippy::missing_safety_doc, + clippy::explicit_auto_deref, + clippy::borrow_deref_ref, +)] + use boa_profiler::Profiler; use std::cell::{Cell as StdCell, RefCell as StdRefCell}; use std::mem; @@ -51,6 +65,7 @@ impl Default for GcConfig { } } +#[derive(Default)] struct GcRuntimeData { collections: usize, total_bytes_allocated: usize, @@ -58,17 +73,6 @@ struct GcRuntimeData { adult_bytes: usize, } -impl Default for GcRuntimeData { - fn default() -> Self { - Self { - collections: 0, - total_bytes_allocated: 0, - youth_bytes: 0, - adult_bytes: 0, - } - } -} - struct BoaGc { config: GcConfig, runtime: GcRuntimeData, @@ -117,7 +121,7 @@ impl BoaAlloc { let mut gc = st.borrow_mut(); unsafe { - Self::manage_state(&mut *gc); + Self::manage_state(&mut gc); } let gc_box = GcBox::new(value); @@ -147,7 +151,7 @@ impl BoaAlloc { // Manage state preps the internal state for allocation and // triggers a collection if the state dictates it. unsafe { - Self::manage_state(&mut *gc); + Self::manage_state(&mut gc); } let gc_box = GcBox::new(Cell::new(value)); @@ -174,7 +178,7 @@ impl BoaAlloc { let mut gc = internals.borrow_mut(); unsafe { - Self::manage_state(&mut *gc); + Self::manage_state(&mut gc); let ephem = Ephemeron::new_pair(key, value); let gc_box = GcBox::new_weak(ephem); @@ -199,7 +203,7 @@ impl BoaAlloc { let mut gc = state.borrow_mut(); unsafe { - Self::manage_state(&mut *gc); + Self::manage_state(&mut gc); let ephemeron = Ephemeron::new(value); let gc_box = GcBox::new_weak(ephemeron); diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs index 2908c1867be..14201e82811 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -156,7 +156,7 @@ impl Deref for Gc { #[inline] fn deref(&self) -> &T { - &self.inner().value() + self.inner().value() } } From 585f5ee2dfbc80e375397763d8e12dba465245e1 Mon Sep 17 00:00:00 2001 From: nekevss Date: Fri, 4 Nov 2022 22:57:37 -0400 Subject: [PATCH 15/55] Rustfmt --- boa_gc/src/lib.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 74039a877f6..c7252fdbd9a 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -1,6 +1,5 @@ //! Garbage collector for the Boa JavaScript engine. -//! - +//! #![allow( clippy::let_unit_value, From e40c40c74e505105e6673481ab3f193a3079f0cf Mon Sep 17 00:00:00 2001 From: nekevss Date: Sat, 5 Nov 2022 09:07:36 -0400 Subject: [PATCH 16/55] rebase and post rebase changes --- boa_gc/derive_macros/cargo.toml | 2 +- boa_gc/src/internals/cell_ref.rs | 281 +++++++++++++++++++++++++++++++ boa_gc/src/lib.rs | 3 + boa_gc/tests/promotions.rs | 2 +- 4 files changed, 286 insertions(+), 2 deletions(-) create mode 100644 boa_gc/src/internals/cell_ref.rs diff --git a/boa_gc/derive_macros/cargo.toml b/boa_gc/derive_macros/cargo.toml index e38539ab398..e7f90833608 100644 --- a/boa_gc/derive_macros/cargo.toml +++ b/boa_gc/derive_macros/cargo.toml @@ -2,7 +2,7 @@ name = "boa_gc_macros" version = "0.16.0" description = "Garbage collector for the Boa JavaScript engine." -keywords = ["javascript", "js", "garbage", "memory"] +keywords = ["javascript", "js", "garbage", "memory", "derive"] edition = "2021" [lib] diff --git a/boa_gc/src/internals/cell_ref.rs b/boa_gc/src/internals/cell_ref.rs new file mode 100644 index 00000000000..3c9de15a52d --- /dev/null +++ b/boa_gc/src/internals/cell_ref.rs @@ -0,0 +1,281 @@ +//! Implementation of a garbage collected cell reference +use std::cell::Cell; +use std::cmp::Ordering; +use std::fmt::{self, Debug, Display}; +use std::ops::{Deref, DerefMut}; + +use crate::{ + internals::{ + borrow_flag::{BorrowFlag, BorrowState}, + GcCell, + }, + trace::Trace, +}; + +/// A wrapper type for an immutably borrowed value from a `GcCell`. +pub struct GcCellRef<'a, T: ?Sized + 'static> { + pub(crate) flags: &'a Cell, + pub(crate) value: &'a T, +} + +impl<'a, T: ?Sized> GcCellRef<'a, T> { + /// Copies a `GcCellRef`. + /// + /// The `GcCell` is already immutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `GcCellRef::clone(...)`. A `Clone` implementation or a method + /// would interfere with the use of `c.borrow().clone()` to clone + /// the contents of a `GcCell`. + #[inline] + pub fn clone(orig: &GcCellRef<'a, T>) -> GcCellRef<'a, T> { + orig.flags.set(orig.flags.get().add_reading()); + GcCellRef { + flags: orig.flags, + value: orig.value, + } + } + + /// Makes a new `GcCellRef` from a component of the borrowed data. + /// + /// The `GcCell` is already immutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as `GcCellRef::map(...)`. + /// A method would interfere with methods of the same name on the contents + /// of a `GcCellRef` used through `Deref`. + #[inline] + pub fn map(orig: Self, f: F) -> GcCellRef<'a, U> + where + U: ?Sized, + F: FnOnce(&T) -> &U, + { + let ret = GcCellRef { + flags: orig.flags, + value: f(orig.value), + }; + + // We have to tell the compiler not to call the destructor of GcCellRef, + // because it will update the borrow flags. + std::mem::forget(orig); + + ret + } + + /// Splits a `GcCellRef` into multiple `GcCellRef`s for different components of the borrowed data. + /// + /// The `GcCell` is already immutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as GcCellRef::map_split(...). + /// A method would interfere with methods of the same name on the contents of a `GcCellRef` used through `Deref`. + #[inline] + pub fn map_split(orig: Self, f: F) -> (GcCellRef<'a, U>, GcCellRef<'a, V>) + where + U: ?Sized, + V: ?Sized, + F: FnOnce(&T) -> (&U, &V), + { + let (a, b) = f(orig.value); + + orig.flags.set(orig.flags.get().add_reading()); + + let ret = ( + GcCellRef { + flags: orig.flags, + value: a, + }, + GcCellRef { + flags: orig.flags, + value: b, + }, + ); + + // We have to tell the compiler not to call the destructor of GcCellRef, + // because it will update the borrow flags. + std::mem::forget(orig); + + ret + } +} + +impl<'a, T: ?Sized> Deref for GcCellRef<'a, T> { + type Target = T; + + #[inline] + fn deref(&self) -> &T { + self.value + } +} + +impl<'a, T: ?Sized> Drop for GcCellRef<'a, T> { + fn drop(&mut self) { + debug_assert!(self.flags.get().borrowed() == BorrowState::Reading); + self.flags.set(self.flags.get().sub_reading()); + } +} + +impl<'a, T: ?Sized + Debug> Debug for GcCellRef<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Debug::fmt(&**self, f) + } +} + +impl<'a, T: ?Sized + Display> Display for GcCellRef<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(&**self, f) + } +} + +/// A wrapper type for a mutably borrowed value from a `GcCell`. +pub struct GcCellRefMut<'a, T: Trace + ?Sized + 'static, U: ?Sized = T> { + pub(crate) gc_cell: &'a GcCell, + pub(crate) value: &'a mut U, +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> { + /// Makes a new `GcCellRefMut` for a component of the borrowed data, e.g., an enum + /// variant. + /// + /// The `GcCellRefMut` is already mutably borrowed, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `GcCellRefMut::map(...)`. A method would interfere with methods of the same + /// name on the contents of a `GcCell` used through `Deref`. + #[inline] + pub fn map(orig: Self, f: F) -> GcCellRefMut<'a, T, V> + where + V: ?Sized, + F: FnOnce(&mut U) -> &mut V, + { + let value = unsafe { &mut *(orig.value as *mut U) }; + + let ret = GcCellRefMut { + gc_cell: orig.gc_cell, + value: f(value), + }; + + // We have to tell the compiler not to call the destructor of GcCellRefMut, + // because it will update the borrow flags. + std::mem::forget(orig); + + ret + } +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> Deref for GcCellRefMut<'a, T, U> { + type Target = U; + + #[inline] + fn deref(&self) -> &U { + self.value + } +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> DerefMut for GcCellRefMut<'a, T, U> { + #[inline] + fn deref_mut(&mut self) -> &mut U { + self.value + } +} + +impl<'a, T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'a, T, U> { + #[inline] + fn drop(&mut self) { + debug_assert!(self.gc_cell.flags.get().borrowed() == BorrowState::Writing); + // Restore the rooted state of the GcCell's contents to the state of the GcCell. + // During the lifetime of the GcCellRefMut, the GcCell's contents are rooted. + if !self.gc_cell.flags.get().rooted() { + unsafe { + (*self.gc_cell.cell.get()).unroot(); + } + } + self.gc_cell + .flags + .set(self.gc_cell.flags.get().set_unused()); + } +} + +impl<'a, T: Trace + ?Sized, U: Debug + ?Sized> Debug for GcCellRefMut<'a, T, U> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Debug::fmt(&*(self.deref()), f) + } +} + +impl<'a, T: Trace + ?Sized, U: Display + ?Sized> Display for GcCellRefMut<'a, T, U> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(&**self, f) + } +} + +unsafe impl Send for GcCell {} + +impl Clone for GcCell { + #[inline] + fn clone(&self) -> Self { + Self::new(self.borrow().clone()) + } +} + +impl Default for GcCell { + #[inline] + fn default() -> Self { + Self::new(Default::default()) + } +} + +impl PartialEq for GcCell { + #[inline(always)] + fn eq(&self, other: &Self) -> bool { + *self.borrow() == *other.borrow() + } +} + +impl Eq for GcCell {} + +impl PartialOrd for GcCell { + #[inline(always)] + fn partial_cmp(&self, other: &Self) -> Option { + (*self.borrow()).partial_cmp(&*other.borrow()) + } + + #[inline(always)] + fn lt(&self, other: &Self) -> bool { + *self.borrow() < *other.borrow() + } + + #[inline(always)] + fn le(&self, other: &Self) -> bool { + *self.borrow() <= *other.borrow() + } + + #[inline(always)] + fn gt(&self, other: &Self) -> bool { + *self.borrow() > *other.borrow() + } + + #[inline(always)] + fn ge(&self, other: &Self) -> bool { + *self.borrow() >= *other.borrow() + } +} + +impl Ord for GcCell { + #[inline] + fn cmp(&self, other: &GcCell) -> Ordering { + (*self.borrow()).cmp(&*other.borrow()) + } +} + +impl Debug for GcCell { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.flags.get().borrowed() { + BorrowState::Unused | BorrowState::Reading => f + .debug_struct("GcCell") + .field("value", &self.borrow()) + .finish(), + BorrowState::Writing => f + .debug_struct("GcCell") + .field("value", &"") + .finish(), + } + } +} diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index c7252fdbd9a..0fc9088f0b4 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -506,6 +506,9 @@ impl Collector { // Drops every node let unmarked_node = Box::from_raw(node.as_ptr()); sweep_head.set(unmarked_node.header.next.take()); + + // Need to stay consistent when it comes to `mem::forget` approach vs. Drop in place + mem::forget(unmarked_node) } } } diff --git a/boa_gc/tests/promotions.rs b/boa_gc/tests/promotions.rs index a75023baa13..04cfa1ccacd 100644 --- a/boa_gc/tests/promotions.rs +++ b/boa_gc/tests/promotions.rs @@ -7,7 +7,7 @@ fn generational_promo_one() { let mut storage = Vec::new(); // Super basic loop that loads bytes and force collections - for i in 0..200 as usize { + for i in 0..15000 as usize { let gc = BoaAlloc::new(i); storage.push(gc); } From e5f0066ac4c07d4caa6b715614bbdcb57b440270 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sat, 5 Nov 2022 09:11:12 -0400 Subject: [PATCH 17/55] Remove unneeded comment line --- boa_gc/src/lib.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 0fc9088f0b4..4e4e4585550 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -1,5 +1,4 @@ //! Garbage collector for the Boa JavaScript engine. -//! #![allow( clippy::let_unit_value, From 6339992b6752262022aaa22d339a080aec6ccec0 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sat, 5 Nov 2022 09:14:56 -0400 Subject: [PATCH 18/55] cargo.toml -> Cargo.toml --- boa_gc/derive_macros/{cargo.toml => Cargo.toml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename boa_gc/derive_macros/{cargo.toml => Cargo.toml} (100%) diff --git a/boa_gc/derive_macros/cargo.toml b/boa_gc/derive_macros/Cargo.toml similarity index 100% rename from boa_gc/derive_macros/cargo.toml rename to boa_gc/derive_macros/Cargo.toml From b20bc257c242ca9518bfd57a9b48529d52c4a259 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sat, 5 Nov 2022 12:32:50 -0400 Subject: [PATCH 19/55] Changed back to drop_in_place on non-dump --- boa_gc/src/gc_box.rs | 5 +- boa_gc/src/internals/cell_ref.rs | 281 ------------------------------- boa_gc/src/lib.rs | 9 - boa_gc/src/pointers/gc_ptr.rs | 5 +- 4 files changed, 6 insertions(+), 294 deletions(-) delete mode 100644 boa_gc/src/internals/cell_ref.rs diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index 6e5ca6146ce..e1d16062984 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -63,7 +63,10 @@ impl GcBoxHeader { #[inline] pub fn dec_roots(&self) { - self.roots.set(self.roots.get() - 1) // no underflow check + // Underflow check as a stop gap for current issue when dropping + if self.roots.get() > 0 { + self.roots.set(self.roots.get() - 1) + } } #[inline] diff --git a/boa_gc/src/internals/cell_ref.rs b/boa_gc/src/internals/cell_ref.rs deleted file mode 100644 index 3c9de15a52d..00000000000 --- a/boa_gc/src/internals/cell_ref.rs +++ /dev/null @@ -1,281 +0,0 @@ -//! Implementation of a garbage collected cell reference -use std::cell::Cell; -use std::cmp::Ordering; -use std::fmt::{self, Debug, Display}; -use std::ops::{Deref, DerefMut}; - -use crate::{ - internals::{ - borrow_flag::{BorrowFlag, BorrowState}, - GcCell, - }, - trace::Trace, -}; - -/// A wrapper type for an immutably borrowed value from a `GcCell`. -pub struct GcCellRef<'a, T: ?Sized + 'static> { - pub(crate) flags: &'a Cell, - pub(crate) value: &'a T, -} - -impl<'a, T: ?Sized> GcCellRef<'a, T> { - /// Copies a `GcCellRef`. - /// - /// The `GcCell` is already immutably borrowed, so this cannot fail. - /// - /// This is an associated function that needs to be used as - /// `GcCellRef::clone(...)`. A `Clone` implementation or a method - /// would interfere with the use of `c.borrow().clone()` to clone - /// the contents of a `GcCell`. - #[inline] - pub fn clone(orig: &GcCellRef<'a, T>) -> GcCellRef<'a, T> { - orig.flags.set(orig.flags.get().add_reading()); - GcCellRef { - flags: orig.flags, - value: orig.value, - } - } - - /// Makes a new `GcCellRef` from a component of the borrowed data. - /// - /// The `GcCell` is already immutably borrowed, so this cannot fail. - /// - /// This is an associated function that needs to be used as `GcCellRef::map(...)`. - /// A method would interfere with methods of the same name on the contents - /// of a `GcCellRef` used through `Deref`. - #[inline] - pub fn map(orig: Self, f: F) -> GcCellRef<'a, U> - where - U: ?Sized, - F: FnOnce(&T) -> &U, - { - let ret = GcCellRef { - flags: orig.flags, - value: f(orig.value), - }; - - // We have to tell the compiler not to call the destructor of GcCellRef, - // because it will update the borrow flags. - std::mem::forget(orig); - - ret - } - - /// Splits a `GcCellRef` into multiple `GcCellRef`s for different components of the borrowed data. - /// - /// The `GcCell` is already immutably borrowed, so this cannot fail. - /// - /// This is an associated function that needs to be used as GcCellRef::map_split(...). - /// A method would interfere with methods of the same name on the contents of a `GcCellRef` used through `Deref`. - #[inline] - pub fn map_split(orig: Self, f: F) -> (GcCellRef<'a, U>, GcCellRef<'a, V>) - where - U: ?Sized, - V: ?Sized, - F: FnOnce(&T) -> (&U, &V), - { - let (a, b) = f(orig.value); - - orig.flags.set(orig.flags.get().add_reading()); - - let ret = ( - GcCellRef { - flags: orig.flags, - value: a, - }, - GcCellRef { - flags: orig.flags, - value: b, - }, - ); - - // We have to tell the compiler not to call the destructor of GcCellRef, - // because it will update the borrow flags. - std::mem::forget(orig); - - ret - } -} - -impl<'a, T: ?Sized> Deref for GcCellRef<'a, T> { - type Target = T; - - #[inline] - fn deref(&self) -> &T { - self.value - } -} - -impl<'a, T: ?Sized> Drop for GcCellRef<'a, T> { - fn drop(&mut self) { - debug_assert!(self.flags.get().borrowed() == BorrowState::Reading); - self.flags.set(self.flags.get().sub_reading()); - } -} - -impl<'a, T: ?Sized + Debug> Debug for GcCellRef<'a, T> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Debug::fmt(&**self, f) - } -} - -impl<'a, T: ?Sized + Display> Display for GcCellRef<'a, T> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Display::fmt(&**self, f) - } -} - -/// A wrapper type for a mutably borrowed value from a `GcCell`. -pub struct GcCellRefMut<'a, T: Trace + ?Sized + 'static, U: ?Sized = T> { - pub(crate) gc_cell: &'a GcCell, - pub(crate) value: &'a mut U, -} - -impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> { - /// Makes a new `GcCellRefMut` for a component of the borrowed data, e.g., an enum - /// variant. - /// - /// The `GcCellRefMut` is already mutably borrowed, so this cannot fail. - /// - /// This is an associated function that needs to be used as - /// `GcCellRefMut::map(...)`. A method would interfere with methods of the same - /// name on the contents of a `GcCell` used through `Deref`. - #[inline] - pub fn map(orig: Self, f: F) -> GcCellRefMut<'a, T, V> - where - V: ?Sized, - F: FnOnce(&mut U) -> &mut V, - { - let value = unsafe { &mut *(orig.value as *mut U) }; - - let ret = GcCellRefMut { - gc_cell: orig.gc_cell, - value: f(value), - }; - - // We have to tell the compiler not to call the destructor of GcCellRefMut, - // because it will update the borrow flags. - std::mem::forget(orig); - - ret - } -} - -impl<'a, T: Trace + ?Sized, U: ?Sized> Deref for GcCellRefMut<'a, T, U> { - type Target = U; - - #[inline] - fn deref(&self) -> &U { - self.value - } -} - -impl<'a, T: Trace + ?Sized, U: ?Sized> DerefMut for GcCellRefMut<'a, T, U> { - #[inline] - fn deref_mut(&mut self) -> &mut U { - self.value - } -} - -impl<'a, T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'a, T, U> { - #[inline] - fn drop(&mut self) { - debug_assert!(self.gc_cell.flags.get().borrowed() == BorrowState::Writing); - // Restore the rooted state of the GcCell's contents to the state of the GcCell. - // During the lifetime of the GcCellRefMut, the GcCell's contents are rooted. - if !self.gc_cell.flags.get().rooted() { - unsafe { - (*self.gc_cell.cell.get()).unroot(); - } - } - self.gc_cell - .flags - .set(self.gc_cell.flags.get().set_unused()); - } -} - -impl<'a, T: Trace + ?Sized, U: Debug + ?Sized> Debug for GcCellRefMut<'a, T, U> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Debug::fmt(&*(self.deref()), f) - } -} - -impl<'a, T: Trace + ?Sized, U: Display + ?Sized> Display for GcCellRefMut<'a, T, U> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Display::fmt(&**self, f) - } -} - -unsafe impl Send for GcCell {} - -impl Clone for GcCell { - #[inline] - fn clone(&self) -> Self { - Self::new(self.borrow().clone()) - } -} - -impl Default for GcCell { - #[inline] - fn default() -> Self { - Self::new(Default::default()) - } -} - -impl PartialEq for GcCell { - #[inline(always)] - fn eq(&self, other: &Self) -> bool { - *self.borrow() == *other.borrow() - } -} - -impl Eq for GcCell {} - -impl PartialOrd for GcCell { - #[inline(always)] - fn partial_cmp(&self, other: &Self) -> Option { - (*self.borrow()).partial_cmp(&*other.borrow()) - } - - #[inline(always)] - fn lt(&self, other: &Self) -> bool { - *self.borrow() < *other.borrow() - } - - #[inline(always)] - fn le(&self, other: &Self) -> bool { - *self.borrow() <= *other.borrow() - } - - #[inline(always)] - fn gt(&self, other: &Self) -> bool { - *self.borrow() > *other.borrow() - } - - #[inline(always)] - fn ge(&self, other: &Self) -> bool { - *self.borrow() >= *other.borrow() - } -} - -impl Ord for GcCell { - #[inline] - fn cmp(&self, other: &GcCell) -> Ordering { - (*self.borrow()).cmp(&*other.borrow()) - } -} - -impl Debug for GcCell { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.flags.get().borrowed() { - BorrowState::Unused | BorrowState::Reading => f - .debug_struct("GcCell") - .field("value", &self.borrow()) - .finish(), - BorrowState::Writing => f - .debug_struct("GcCell") - .field("value", &"") - .finish(), - } - } -} diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 4e4e4585550..165cfe17aef 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -452,10 +452,6 @@ impl Collector { *heap_bytes -= unallocated_bytes; *total_bytes -= unallocated_bytes; sweep_head.set(unmarked_node.header.next.take()); - // We have now finalized and taken care of the below node. We now forget the node - // to remove the node without calling the destructor on Gc or any other value - // since calling core::ptr::drop_in_place will trigger `Gc::drop`, which accesses `Gc::inner()` - mem::forget(unmarked_node) } } @@ -483,10 +479,6 @@ impl Collector { *bytes_allocated -= unallocated_bytes; *total_allocated -= unallocated_bytes; sweep_head.set(unmarked_node.header.next.take()); - // We have now finalized and taken care of the below node. We now forget the node - // to remove the node without calling the destructor on Gc or any other value - // since calling core::ptr::drop_in_place will trigger `Gc::drop`, which accesses `Gc::inner()` - mem::forget(unmarked_node) } } } @@ -506,7 +498,6 @@ impl Collector { let unmarked_node = Box::from_raw(node.as_ptr()); sweep_head.set(unmarked_node.header.next.take()); - // Need to stay consistent when it comes to `mem::forget` approach vs. Drop in place mem::forget(unmarked_node) } } diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs index 14201e82811..3f52161890b 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -8,9 +8,8 @@ use std::ptr::{self, NonNull}; use std::rc::Rc; use crate::gc_box::GcBox; -use crate::BoaAlloc; +use crate::{BoaAlloc, finalizer_safe}; use crate::{ - finalizer_safe, trace::{Finalize, Trace}, }; @@ -164,7 +163,7 @@ impl Drop for Gc { #[inline] fn drop(&mut self) { // If this pointer was a root, we should unroot it. - if self.rooted() { + if self.rooted() & finalizer_safe() { unsafe { self.inner().unroot_inner(); } From 61d09f1c175732d98194ab29d15514e4c846b61e Mon Sep 17 00:00:00 2001 From: nekevss Date: Sat, 5 Nov 2022 12:33:36 -0400 Subject: [PATCH 20/55] Rustfmt --- boa_gc/src/gc_box.rs | 2 +- boa_gc/src/pointers/gc_ptr.rs | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index e1d16062984..a0bd58b5a89 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -65,7 +65,7 @@ impl GcBoxHeader { pub fn dec_roots(&self) { // Underflow check as a stop gap for current issue when dropping if self.roots.get() > 0 { - self.roots.set(self.roots.get() - 1) + self.roots.set(self.roots.get() - 1) } } diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs index 3f52161890b..96e09199f99 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -8,10 +8,8 @@ use std::ptr::{self, NonNull}; use std::rc::Rc; use crate::gc_box::GcBox; -use crate::{BoaAlloc, finalizer_safe}; -use crate::{ - trace::{Finalize, Trace}, -}; +use crate::trace::{Finalize, Trace}; +use crate::{finalizer_safe, BoaAlloc}; pub(crate) unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); From 06b2c81b652a19ac5ae0d484d181ba76cae02ace Mon Sep 17 00:00:00 2001 From: nekevss Date: Sat, 5 Nov 2022 13:11:34 -0400 Subject: [PATCH 21/55] Rebase and fixes --- boa_gc/tests/allocation.rs | 4 ++-- boa_gc/tests/promotions.rs | 4 ++-- boa_tester/Cargo.toml | 1 - boa_tester/src/exec/mod.rs | 4 ++-- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/boa_gc/tests/allocation.rs b/boa_gc/tests/allocation.rs index 292c239131d..35f8a251f16 100644 --- a/boa_gc/tests/allocation.rs +++ b/boa_gc/tests/allocation.rs @@ -2,7 +2,7 @@ use boa_gc::{force_collect, BoaAlloc, GcTester}; #[test] fn gc_basic_cell_allocation() { - let gc_cell = BoaAlloc::new_cell(16 as u16); + let gc_cell = BoaAlloc::new_cell(16_u16); force_collect(); GcTester::assert_collections(1); @@ -12,7 +12,7 @@ fn gc_basic_cell_allocation() { #[test] fn gc_basic_pointer_alloc() { - let gc = BoaAlloc::new(16 as u8); + let gc = BoaAlloc::new(16_u8); force_collect(); GcTester::assert_collections(1); diff --git a/boa_gc/tests/promotions.rs b/boa_gc/tests/promotions.rs index 04cfa1ccacd..0db2fde05e7 100644 --- a/boa_gc/tests/promotions.rs +++ b/boa_gc/tests/promotions.rs @@ -7,7 +7,7 @@ fn generational_promo_one() { let mut storage = Vec::new(); // Super basic loop that loads bytes and force collections - for i in 0..15000 as usize { + for i in 0..15000_usize { let gc = BoaAlloc::new(i); storage.push(gc); } @@ -22,7 +22,7 @@ fn generational_promo_one() { #[test] fn generational_promo_two() { let mut storage = Vec::new(); - for i in 0..2000 as usize { + for i in 0..2000_usize { let gc = BoaAlloc::new(i); if i % 10 == 0 { storage.push(gc.clone()) diff --git a/boa_tester/Cargo.toml b/boa_tester/Cargo.toml index d3f9bc968f3..7f889169886 100644 --- a/boa_tester/Cargo.toml +++ b/boa_tester/Cargo.toml @@ -25,6 +25,5 @@ regex = "1.7.0" once_cell = "1.16.0" colored = "2.0.0" fxhash = "0.2.1" -gc = { version = "0.4.1", features = ["derive"] } rayon = "1.5.3" anyhow = "1.0.66" diff --git a/boa_tester/src/exec/mod.rs b/boa_tester/src/exec/mod.rs index 4310c8219e0..9a79fe4de51 100644 --- a/boa_tester/src/exec/mod.rs +++ b/boa_tester/src/exec/mod.rs @@ -12,7 +12,7 @@ use boa_engine::{ builtins::JsArgs, object::FunctionBuilder, property::Attribute, Context, JsNativeErrorKind, JsResult, JsValue, }; -use boa_gc::{Cell, Finalize, Gc, Trace}; +use boa_gc::{BoaAlloc, Cell, Finalize, Gc, Trace}; use boa_parser::Parser; use colored::Colorize; use rayon::prelude::*; @@ -406,7 +406,7 @@ struct AsyncResult { impl Default for AsyncResult { fn default() -> Self { Self { - inner: Gc::new(Cell::new(Ok(()))), + inner: BoaAlloc::new_cell(Ok(())), } } } From cc976540303be01c24c46eaf2bd5d999e2d47eb8 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sat, 5 Nov 2022 23:07:01 -0400 Subject: [PATCH 22/55] Root management fixes --- boa_gc/src/lib.rs | 62 ++++++++++++----------------------- boa_gc/src/pointers/gc_ptr.rs | 2 +- 2 files changed, 22 insertions(+), 42 deletions(-) diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 165cfe17aef..9a36626c10f 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -42,7 +42,6 @@ thread_local!(static BOA_GC: StdRefCell = StdRefCell::new( BoaGc { struct GcConfig { youth_threshold: usize, - youth_threshold_base: usize, adult_threshold: usize, growth_ratio: f64, youth_promo_age: u8, @@ -55,9 +54,8 @@ impl Default for GcConfig { fn default() -> Self { Self { youth_threshold: 4096, - youth_threshold_base: 4096, adult_threshold: 16384, - growth_ratio: 0.7, + growth_ratio: 0.8, youth_promo_age: 3, } } @@ -120,14 +118,13 @@ impl BoaAlloc { unsafe { Self::manage_state(&mut gc); - } - let gc_box = GcBox::new(value); + value.unroot(); + let gc_box = GcBox::new(value); - let element_size = mem::size_of_val::>(&gc_box); - let element_pointer = Box::into_raw(Box::from(gc_box)); + let element_size = mem::size_of_val::>(&gc_box); + let element_pointer = Box::into_raw(Box::from(gc_box)); - unsafe { let old_start = gc.youth_start.take(); (*element_pointer).set_header_pointer(old_start); gc.youth_start @@ -150,13 +147,14 @@ impl BoaAlloc { // triggers a collection if the state dictates it. unsafe { Self::manage_state(&mut gc); - } - let gc_box = GcBox::new(Cell::new(value)); - let element_size = mem::size_of_val::>>(&gc_box); - let element_pointer = Box::into_raw(Box::from(gc_box)); + let new_cell = Cell::new(value); + new_cell.unroot(); + + let gc_box = GcBox::new(new_cell); + let element_size = mem::size_of_val::>>(&gc_box); + let element_pointer = Box::into_raw(Box::from(gc_box)); - unsafe { let old_start = gc.youth_start.take(); (*element_pointer).set_header_pointer(old_start); gc.youth_start @@ -223,17 +221,15 @@ impl BoaAlloc { // Possibility here for `new_weak` that takes any value and creates a new WeakGc - pub(crate) unsafe fn promote_to_medium( - promotions: Vec>>, - gc: &mut BoaGc, - ) { + pub(crate) unsafe fn promote_to_medium(promotions: Vec>>, gc: &mut BoaGc) { let _timer = Profiler::global().start_event("Gc Promoting", "gc"); for node in promotions { - (*node.as_ptr()).set_header_pointer(gc.adult_start.take()); - let allocation_bytes = mem::size_of_val::>(&(*node.as_ptr())); + node.set_header_pointer(gc.adult_start.take()); + let allocation_bytes = mem::size_of_val::<_>(&node); gc.runtime.youth_bytes -= allocation_bytes; gc.runtime.adult_bytes += allocation_bytes; - gc.adult_start.set(Some(node)); + gc.adult_start + .set(Some(NonNull::new_unchecked(Box::into_raw(node)))); } } @@ -249,22 +245,6 @@ impl BoaAlloc { } } else if gc.runtime.youth_bytes > gc.config.youth_threshold { Collector::run_youth_collection(gc); - - // If we are constrained on the top of the stack, - // increase the size of capacity, so a garbage collection - // isn't triggered on every allocation - if gc.runtime.youth_bytes > gc.config.youth_threshold { - gc.config.youth_threshold = - (gc.runtime.youth_bytes as f64 / gc.config.growth_ratio) as usize - } - - // The young object threshold should only be raised in cases of high laod. It - // should retract back to base when the load lessens - if gc.runtime.youth_bytes < gc.config.youth_threshold_base - && gc.config.youth_threshold != gc.config.youth_threshold_base - { - gc.config.youth_threshold = gc.config.youth_threshold_base - } } } } @@ -429,7 +409,7 @@ impl Collector { heap_bytes: &mut usize, total_bytes: &mut usize, promotion_age: &u8, - ) -> Vec>> { + ) -> Vec>> { let _timer = Profiler::global().start_event("Gc Sweeping", "gc"); let _guard = DropGuard::new(); @@ -440,8 +420,9 @@ impl Collector { (*node.as_ptr()).header.unmark(); (*node.as_ptr()).header.inc_age(); if (*node.as_ptr()).header.age() >= *promotion_age { - sweep_head.set((*node.as_ptr()).header.next.take()); - promotions.push(node) + let promotion = Box::from_raw(node.as_ptr()); + sweep_head.set(promotion.header.next.take()); + promotions.push(promotion) } else { sweep_head = &(*node.as_ptr()).header.next; } @@ -491,14 +472,13 @@ impl Collector { unsafe fn drop_heap(heap_start: &StdCell>>>) { // Not initializing a dropguard since this should only be invoked when BOA_GC is being dropped. + let _guard = DropGuard::new(); let sweep_head = heap_start; while let Some(node) = sweep_head.get() { // Drops every node let unmarked_node = Box::from_raw(node.as_ptr()); sweep_head.set(unmarked_node.header.next.take()); - - mem::forget(unmarked_node) } } } diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs index 96e09199f99..a60651ef322 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -161,7 +161,7 @@ impl Drop for Gc { #[inline] fn drop(&mut self) { // If this pointer was a root, we should unroot it. - if self.rooted() & finalizer_safe() { + if self.rooted() { unsafe { self.inner().unroot_inner(); } From bc66910547a3632c80b0846db58f02ccc4a5444e Mon Sep 17 00:00:00 2001 From: nekevss Date: Sat, 5 Nov 2022 23:53:01 -0400 Subject: [PATCH 23/55] Revert changes to promotions from last commit --- boa_gc/src/lib.rs | 21 ++++++++++----------- boa_gc/tests/promotions.rs | 2 +- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 9a36626c10f..b95e7c9cc72 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -119,7 +119,6 @@ impl BoaAlloc { unsafe { Self::manage_state(&mut gc); - value.unroot(); let gc_box = GcBox::new(value); let element_size = mem::size_of_val::>(&gc_box); @@ -127,6 +126,8 @@ impl BoaAlloc { let old_start = gc.youth_start.take(); (*element_pointer).set_header_pointer(old_start); + (*element_pointer).value().unroot(); + gc.youth_start .set(Some(NonNull::new_unchecked(element_pointer))); @@ -149,7 +150,6 @@ impl BoaAlloc { Self::manage_state(&mut gc); let new_cell = Cell::new(value); - new_cell.unroot(); let gc_box = GcBox::new(new_cell); let element_size = mem::size_of_val::>>(&gc_box); @@ -157,6 +157,7 @@ impl BoaAlloc { let old_start = gc.youth_start.take(); (*element_pointer).set_header_pointer(old_start); + (*element_pointer).value().unroot(); gc.youth_start .set(Some(NonNull::new_unchecked(element_pointer))); @@ -221,15 +222,14 @@ impl BoaAlloc { // Possibility here for `new_weak` that takes any value and creates a new WeakGc - pub(crate) unsafe fn promote_to_medium(promotions: Vec>>, gc: &mut BoaGc) { + pub(crate) unsafe fn promote_to_medium(promotions: Vec>>, gc: &mut BoaGc) { let _timer = Profiler::global().start_event("Gc Promoting", "gc"); for node in promotions { - node.set_header_pointer(gc.adult_start.take()); - let allocation_bytes = mem::size_of_val::<_>(&node); + (*node.as_ptr()).set_header_pointer(gc.adult_start.take()); + let allocation_bytes = mem::size_of_val::>(&(*node.as_ptr())); gc.runtime.youth_bytes -= allocation_bytes; gc.runtime.adult_bytes += allocation_bytes; - gc.adult_start - .set(Some(NonNull::new_unchecked(Box::into_raw(node)))); + gc.adult_start.set(Some(node)); } } @@ -409,7 +409,7 @@ impl Collector { heap_bytes: &mut usize, total_bytes: &mut usize, promotion_age: &u8, - ) -> Vec>> { + ) -> Vec>> { let _timer = Profiler::global().start_event("Gc Sweeping", "gc"); let _guard = DropGuard::new(); @@ -420,9 +420,8 @@ impl Collector { (*node.as_ptr()).header.unmark(); (*node.as_ptr()).header.inc_age(); if (*node.as_ptr()).header.age() >= *promotion_age { - let promotion = Box::from_raw(node.as_ptr()); - sweep_head.set(promotion.header.next.take()); - promotions.push(promotion) + sweep_head.set((*node.as_ptr()).header.next.take()); + promotions.push(node) } else { sweep_head = &(*node.as_ptr()).header.next; } diff --git a/boa_gc/tests/promotions.rs b/boa_gc/tests/promotions.rs index 0db2fde05e7..68d3c5fc3ea 100644 --- a/boa_gc/tests/promotions.rs +++ b/boa_gc/tests/promotions.rs @@ -25,7 +25,7 @@ fn generational_promo_two() { for i in 0..2000_usize { let gc = BoaAlloc::new(i); if i % 10 == 0 { - storage.push(gc.clone()) + storage.push(gc) } } GcTester::assert_collection_floor(3); From a53f67b1a26ef8e9652d53039fb9a0f5fd93e875 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sat, 5 Nov 2022 23:56:45 -0400 Subject: [PATCH 24/55] Rustfmt --- boa_gc/src/lib.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index b95e7c9cc72..23e7821e42b 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -222,7 +222,10 @@ impl BoaAlloc { // Possibility here for `new_weak` that takes any value and creates a new WeakGc - pub(crate) unsafe fn promote_to_medium(promotions: Vec>>, gc: &mut BoaGc) { + pub(crate) unsafe fn promote_to_medium( + promotions: Vec>>, + gc: &mut BoaGc, + ) { let _timer = Profiler::global().start_event("Gc Promoting", "gc"); for node in promotions { (*node.as_ptr()).set_header_pointer(gc.adult_start.take()); From 3d3bac67949d976b6194ff70692175fa1c60aa52 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sun, 6 Nov 2022 08:17:13 -0500 Subject: [PATCH 25/55] Adding boa focused tests --- boa_gc/tests/boa.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 boa_gc/tests/boa.rs diff --git a/boa_gc/tests/boa.rs b/boa_gc/tests/boa.rs new file mode 100644 index 00000000000..a274396a2ff --- /dev/null +++ b/boa_gc/tests/boa.rs @@ -0,0 +1,11 @@ +use boa_gc::BoaAlloc; + +#[test] +fn boa_borrow_mut_test() { + let v = BoaAlloc::new_cell(Vec::new()); + + for _ in 1..=259 { + let cell = BoaAlloc::new_cell([0u8; 10]); + v.borrow_mut().push(cell); + } +} \ No newline at end of file From 9ab82defcaf72060d32990936b8118f57eeffe69 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sun, 6 Nov 2022 18:59:01 -0500 Subject: [PATCH 26/55] Removed divide in structs --- boa_gc/src/lib.rs | 41 +++++++++++++++-------------------- boa_gc/src/pointers/gc_ptr.rs | 6 +---- boa_gc/tests/allocation.rs | 4 ++-- boa_gc/tests/boa.rs | 2 +- boa_gc/tests/promotions.rs | 2 ++ 5 files changed, 23 insertions(+), 32 deletions(-) diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 23e7821e42b..cbfcbe32ddb 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -44,7 +44,6 @@ struct GcConfig { youth_threshold: usize, adult_threshold: usize, growth_ratio: f64, - youth_promo_age: u8, } // Setting the defaults to an arbitrary value currently. @@ -54,9 +53,8 @@ impl Default for GcConfig { fn default() -> Self { Self { youth_threshold: 4096, - adult_threshold: 16384, + adult_threshold: 1024, growth_ratio: 0.8, - youth_promo_age: 3, } } } @@ -124,15 +122,15 @@ impl BoaAlloc { let element_size = mem::size_of_val::>(&gc_box); let element_pointer = Box::into_raw(Box::from(gc_box)); - let old_start = gc.youth_start.take(); + let old_start = gc.adult_start.take(); (*element_pointer).set_header_pointer(old_start); (*element_pointer).value().unroot(); - gc.youth_start + gc.adult_start .set(Some(NonNull::new_unchecked(element_pointer))); gc.runtime.total_bytes_allocated += element_size; - gc.runtime.youth_bytes += element_size; + gc.runtime.adult_bytes += element_size; Gc::new(NonNull::new_unchecked(element_pointer)) } @@ -155,13 +153,14 @@ impl BoaAlloc { let element_size = mem::size_of_val::>>(&gc_box); let element_pointer = Box::into_raw(Box::from(gc_box)); - let old_start = gc.youth_start.take(); + let old_start = gc.adult_start.take(); (*element_pointer).set_header_pointer(old_start); (*element_pointer).value().unroot(); - gc.youth_start + + gc.adult_start .set(Some(NonNull::new_unchecked(element_pointer))); - gc.runtime.youth_bytes += element_size; + gc.runtime.adult_bytes += element_size; gc.runtime.total_bytes_allocated += element_size; Gc::new(NonNull::new_unchecked(element_pointer)) @@ -184,6 +183,8 @@ impl BoaAlloc { let old_start = gc.youth_start.take(); (*element_pointer).set_header_pointer(old_start); + (*element_pointer).value().unroot(); + gc.youth_start .set(Some(NonNull::new_unchecked(element_pointer))); @@ -210,6 +211,8 @@ impl BoaAlloc { let old_start = gc.youth_start.take(); (*element_pointer).set_header_pointer(old_start); + (*element_pointer).value().unroot(); + gc.youth_start .set(Some(NonNull::new_unchecked(element_pointer))); @@ -247,7 +250,7 @@ impl BoaAlloc { (gc.runtime.adult_bytes as f64 / gc.config.growth_ratio) as usize } } else if gc.runtime.youth_bytes > gc.config.youth_threshold { - Collector::run_youth_collection(gc); + //Collector::run_youth_collection(gc); } } } @@ -270,17 +273,18 @@ impl Collector { let _timer = Profiler::global().start_event("Gc Youth Collection", "gc"); gc.runtime.collections += 1; let unreachable_nodes = Self::mark_heap(&gc.youth_start); + let _adults = Self::mark_heap(&gc.adult_start); if !unreachable_nodes.is_empty() { Self::finalize(unreachable_nodes); } // The returned unreachable vector must be filled with nodes that are for certain dead (these will be removed during the sweep) - let _finalized_unreachable_nodes = Self::mark_heap(&gc.youth_start); + let finalized_unreachable_nodes = Self::mark_heap(&gc.youth_start); + println!("yc: {}", finalized_unreachable_nodes.len()); let promotion_candidates = Self::sweep_with_promotions( &gc.youth_start, &mut gc.runtime.youth_bytes, &mut gc.runtime.total_bytes_allocated, - &gc.config.youth_promo_age, ); // Check if there are any candidates for promotion if !promotion_candidates.is_empty() { @@ -292,18 +296,13 @@ impl Collector { let _timer = Profiler::global().start_event("Gc Full Collection", "gc"); gc.runtime.collections += 1; let unreachable_adults = Self::mark_heap(&gc.adult_start); - let unreachable_youths = Self::mark_heap(&gc.youth_start); // Check if any unreachable nodes were found and finalize if !unreachable_adults.is_empty() { Self::finalize(unreachable_adults); } - if !unreachable_youths.is_empty() { - Self::finalize(unreachable_youths); - } let _final_unreachable_adults = Self::mark_heap(&gc.adult_start); - let _final_unreachable_youths = Self::mark_heap(&gc.youth_start); // Sweep both without promoting any values Self::sweep( @@ -311,11 +310,6 @@ impl Collector { &mut gc.runtime.adult_bytes, &mut gc.runtime.total_bytes_allocated, ); - Self::sweep( - &gc.youth_start, - &mut gc.runtime.youth_bytes, - &mut gc.runtime.total_bytes_allocated, - ); } pub(crate) unsafe fn mark_heap( @@ -411,7 +405,6 @@ impl Collector { heap_start: &StdCell>>>, heap_bytes: &mut usize, total_bytes: &mut usize, - promotion_age: &u8, ) -> Vec>> { let _timer = Profiler::global().start_event("Gc Sweeping", "gc"); let _guard = DropGuard::new(); @@ -422,7 +415,7 @@ impl Collector { if (*node.as_ptr()).is_marked() { (*node.as_ptr()).header.unmark(); (*node.as_ptr()).header.inc_age(); - if (*node.as_ptr()).header.age() >= *promotion_age { + if (*node.as_ptr()).header.age() >= 3 { sweep_head.set((*node.as_ptr()).header.next.take()); promotions.push(node) } else { diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs index a60651ef322..556d9620351 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -25,7 +25,6 @@ pub struct Gc { impl Gc { /// Constructs a new `Gc` with the given value. pub fn new(value: NonNull>) -> Self { - // TODO: Determine whether it's worth keeping `set_root` approach unsafe { let gc = Gc { inner_ptr: Cell::new(NonNull::new_unchecked(value.as_ptr())), @@ -74,7 +73,6 @@ impl Gc { #[inline] fn inner_ptr(&self) -> *mut GcBox { - // Note: Initial `finalizer_safe` was asserted here. Needs to be determined whether this was the best practice assert!(finalizer_safe()); unsafe { clear_root_bit(self.inner_ptr.get()).as_ptr() } } @@ -106,7 +104,6 @@ unsafe impl Trace for Gc { #[inline] unsafe fn root(&self) { assert!(!self.rooted(), "Can't double-root a Gc"); - // Try to get inner before modifying our state. Inner may be // inaccessible due to this method being invoked during the sweeping // phase, and we don't want to modify our state before panicking. @@ -118,7 +115,6 @@ unsafe impl Trace for Gc { #[inline] unsafe fn unroot(&self) { assert!(self.rooted(), "Can't double-unroot a Gc"); - // Try to get inner before modifying our state. Inner may be // inaccessible due to this method being invoked during the sweeping // phase, and we don't want to modify our state before panicking. @@ -165,7 +161,7 @@ impl Drop for Gc { unsafe { self.inner().unroot_inner(); } - } + } } } diff --git a/boa_gc/tests/allocation.rs b/boa_gc/tests/allocation.rs index 35f8a251f16..b1034f647cf 100644 --- a/boa_gc/tests/allocation.rs +++ b/boa_gc/tests/allocation.rs @@ -6,7 +6,7 @@ fn gc_basic_cell_allocation() { force_collect(); GcTester::assert_collections(1); - GcTester::assert_youth_bytes_allocated(); + GcTester::assert_adult_bytes_allocated(); assert_eq!(*gc_cell.borrow_mut(), 16); } @@ -16,7 +16,7 @@ fn gc_basic_pointer_alloc() { force_collect(); GcTester::assert_collections(1); - GcTester::assert_youth_bytes_allocated(); + GcTester::assert_adult_bytes_allocated(); assert_eq!(*gc, 16); drop(gc); diff --git a/boa_gc/tests/boa.rs b/boa_gc/tests/boa.rs index a274396a2ff..1173ac0abbb 100644 --- a/boa_gc/tests/boa.rs +++ b/boa_gc/tests/boa.rs @@ -8,4 +8,4 @@ fn boa_borrow_mut_test() { let cell = BoaAlloc::new_cell([0u8; 10]); v.borrow_mut().push(cell); } -} \ No newline at end of file +} diff --git a/boa_gc/tests/promotions.rs b/boa_gc/tests/promotions.rs index 68d3c5fc3ea..2413723cd05 100644 --- a/boa_gc/tests/promotions.rs +++ b/boa_gc/tests/promotions.rs @@ -2,6 +2,7 @@ use boa_gc::{force_collect, BoaAlloc, GcTester}; +#[ignore] #[test] fn generational_promo_one() { let mut storage = Vec::new(); @@ -19,6 +20,7 @@ fn generational_promo_one() { GcTester::assert_empty_gc() } +#[ignore] #[test] fn generational_promo_two() { let mut storage = Vec::new(); From 196e9632b0a229274013fa10f1e83a4337d5924d Mon Sep 17 00:00:00 2001 From: nekevss Date: Sun, 6 Nov 2022 21:23:51 -0500 Subject: [PATCH 27/55] Rebase and complete remove generational setup --- boa_gc/src/gc_box.rs | 28 ++----------- boa_gc/src/lib.rs | 76 ----------------------------------- boa_gc/src/pointers/gc_ptr.rs | 2 +- 3 files changed, 5 insertions(+), 101 deletions(-) diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index a0bd58b5a89..1d10312e14c 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -3,17 +3,13 @@ use std::cell::Cell; use std::ptr::{self, NonNull}; // Age and Weak Flags -const WEAK_MASK: u8 = 1 << 7; -const AGE_MASK: u8 = !WEAK_MASK; -const AGE_MAX: u8 = AGE_MASK; - const MARK_MASK: usize = 1 << (usize::BITS - 1); const ROOTS_MASK: usize = !MARK_MASK; const ROOTS_MAX: usize = ROOTS_MASK; pub(crate) struct GcBoxHeader { roots: Cell, - cycle_age: Cell, + weak: Cell, pub(crate) next: Cell>>>, } @@ -23,7 +19,7 @@ impl GcBoxHeader { // TODO: implement a way for a cell to start out weak with WEAK_MASK GcBoxHeader { roots: Cell::new(1), - cycle_age: Cell::new(0_u8), + weak: Cell::new(false), next: Cell::new(None), } } @@ -31,10 +27,9 @@ impl GcBoxHeader { #[inline] pub fn new_weak() -> Self { // Set weak_flag - let cycle_age = WEAK_MASK; GcBoxHeader { roots: Cell::new(0), - cycle_age: Cell::new(cycle_age), + weak: Cell::new(true), next: Cell::new(None), } } @@ -84,24 +79,9 @@ impl GcBoxHeader { self.roots.set(self.roots.get() & !MARK_MASK) } - #[inline] - pub fn age(&self) -> u8 { - self.cycle_age.get() & AGE_MASK - } - - #[inline] - pub fn inc_age(&self) { - let age = self.cycle_age.get(); - - // There is no need to increment the age after hitting max age - if (age & AGE_MASK) < AGE_MAX { - self.cycle_age.set(age + 1); - } - } - #[inline] pub fn is_ephemeron(&self) -> bool { - self.cycle_age.get() & WEAK_MASK != 0 + self.weak.get() } } diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index cbfcbe32ddb..c83a578c7ae 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -30,7 +30,6 @@ pub use pointers::{Gc, WeakGc, WeakPair}; pub type GcPointer = NonNull>; -// TODO: Determine if thread local variables are the correct approach vs an initialized structure thread_local!(pub static EPHEMERON_QUEUE: StdCell>> = StdCell::new(None)); thread_local!(pub static GC_DROPPING: StdCell = StdCell::new(false)); thread_local!(static BOA_GC: StdRefCell = StdRefCell::new( BoaGc { @@ -223,22 +222,6 @@ impl BoaAlloc { }) } - // Possibility here for `new_weak` that takes any value and creates a new WeakGc - - pub(crate) unsafe fn promote_to_medium( - promotions: Vec>>, - gc: &mut BoaGc, - ) { - let _timer = Profiler::global().start_event("Gc Promoting", "gc"); - for node in promotions { - (*node.as_ptr()).set_header_pointer(gc.adult_start.take()); - let allocation_bytes = mem::size_of_val::>(&(*node.as_ptr())); - gc.runtime.youth_bytes -= allocation_bytes; - gc.runtime.adult_bytes += allocation_bytes; - gc.adult_start.set(Some(node)); - } - } - unsafe fn manage_state(gc: &mut BoaGc) { if gc.runtime.adult_bytes > gc.config.adult_threshold { Collector::run_full_collection(gc); @@ -269,29 +252,6 @@ impl BoaAlloc { pub struct Collector; impl Collector { - pub(crate) unsafe fn run_youth_collection(gc: &mut BoaGc) { - let _timer = Profiler::global().start_event("Gc Youth Collection", "gc"); - gc.runtime.collections += 1; - let unreachable_nodes = Self::mark_heap(&gc.youth_start); - let _adults = Self::mark_heap(&gc.adult_start); - - if !unreachable_nodes.is_empty() { - Self::finalize(unreachable_nodes); - } - // The returned unreachable vector must be filled with nodes that are for certain dead (these will be removed during the sweep) - let finalized_unreachable_nodes = Self::mark_heap(&gc.youth_start); - println!("yc: {}", finalized_unreachable_nodes.len()); - let promotion_candidates = Self::sweep_with_promotions( - &gc.youth_start, - &mut gc.runtime.youth_bytes, - &mut gc.runtime.total_bytes_allocated, - ); - // Check if there are any candidates for promotion - if !promotion_candidates.is_empty() { - BoaAlloc::promote_to_medium(promotion_candidates, gc); - } - } - pub(crate) unsafe fn run_full_collection(gc: &mut BoaGc) { let _timer = Profiler::global().start_event("Gc Full Collection", "gc"); gc.runtime.collections += 1; @@ -401,39 +361,6 @@ impl Collector { } } - unsafe fn sweep_with_promotions( - heap_start: &StdCell>>>, - heap_bytes: &mut usize, - total_bytes: &mut usize, - ) -> Vec>> { - let _timer = Profiler::global().start_event("Gc Sweeping", "gc"); - let _guard = DropGuard::new(); - - let mut promotions = Vec::new(); - let mut sweep_head = heap_start; - while let Some(node) = sweep_head.get() { - if (*node.as_ptr()).is_marked() { - (*node.as_ptr()).header.unmark(); - (*node.as_ptr()).header.inc_age(); - if (*node.as_ptr()).header.age() >= 3 { - sweep_head.set((*node.as_ptr()).header.next.take()); - promotions.push(node) - } else { - sweep_head = &(*node.as_ptr()).header.next; - } - } else { - // Drops occur here - let unmarked_node = Box::from_raw(node.as_ptr()); - let unallocated_bytes = mem::size_of_val::>(&*unmarked_node); - *heap_bytes -= unallocated_bytes; - *total_bytes -= unallocated_bytes; - sweep_head.set(unmarked_node.header.next.take()); - } - } - - promotions - } - unsafe fn sweep( heap_start: &StdCell>>>, bytes_allocated: &mut usize, @@ -446,7 +373,6 @@ impl Collector { while let Some(node) = sweep_head.get() { if (*node.as_ptr()).is_marked() { (*node.as_ptr()).header.unmark(); - (*node.as_ptr()).header.inc_age(); sweep_head = &(*node.as_ptr()).header.next; } else { // Drops occur here @@ -490,8 +416,6 @@ pub fn force_collect() { unsafe { if gc.runtime.adult_bytes > 0 { Collector::run_full_collection(&mut *gc) - } else { - Collector::run_youth_collection(&mut *gc) } } }) diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs index 556d9620351..ee3e1ad02a2 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -161,7 +161,7 @@ impl Drop for Gc { unsafe { self.inner().unroot_inner(); } - } + } } } From 602c3c1e0e8a387c92beb31ff3526923bdc26157 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sun, 6 Nov 2022 22:02:24 -0500 Subject: [PATCH 28/55] Cleaning up remaining youth_heap variables --- boa_gc/src/lib.rs | 25 ++++--------------------- boa_gc/tests/promotions.rs | 1 - 2 files changed, 4 insertions(+), 22 deletions(-) diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index c83a578c7ae..4417ea83d91 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -36,11 +36,9 @@ thread_local!(static BOA_GC: StdRefCell = StdRefCell::new( BoaGc { config: GcConfig::default(), runtime: GcRuntimeData::default(), adult_start: StdCell::new(None), - youth_start: StdCell::new(None), })); struct GcConfig { - youth_threshold: usize, adult_threshold: usize, growth_ratio: f64, } @@ -51,7 +49,6 @@ struct GcConfig { impl Default for GcConfig { fn default() -> Self { Self { - youth_threshold: 4096, adult_threshold: 1024, growth_ratio: 0.8, } @@ -62,7 +59,6 @@ impl Default for GcConfig { struct GcRuntimeData { collections: usize, total_bytes_allocated: usize, - youth_bytes: usize, adult_bytes: usize, } @@ -70,7 +66,6 @@ struct BoaGc { config: GcConfig, runtime: GcRuntimeData, adult_start: StdCell>, - youth_start: StdCell>, } impl Drop for BoaGc { @@ -180,11 +175,11 @@ impl BoaAlloc { let element_size = mem::size_of_val::>(&gc_box); let element_pointer = Box::into_raw(Box::from(gc_box)); - let old_start = gc.youth_start.take(); + let old_start = gc.adult_start.take(); (*element_pointer).set_header_pointer(old_start); (*element_pointer).value().unroot(); - gc.youth_start + gc.adult_start .set(Some(NonNull::new_unchecked(element_pointer))); gc.runtime.total_bytes_allocated += element_size; @@ -208,11 +203,11 @@ impl BoaAlloc { let element_size = mem::size_of_val::>(&gc_box); let element_pointer = Box::into_raw(Box::from(gc_box)); - let old_start = gc.youth_start.take(); + let old_start = gc.adult_start.take(); (*element_pointer).set_header_pointer(old_start); (*element_pointer).value().unroot(); - gc.youth_start + gc.adult_start .set(Some(NonNull::new_unchecked(element_pointer))); gc.runtime.total_bytes_allocated += element_size; @@ -232,8 +227,6 @@ impl BoaAlloc { gc.config.adult_threshold = (gc.runtime.adult_bytes as f64 / gc.config.growth_ratio) as usize } - } else if gc.runtime.youth_bytes > gc.config.youth_threshold { - //Collector::run_youth_collection(gc); } } } @@ -387,7 +380,6 @@ impl Collector { // Clean up the heap when BoaGc is dropped unsafe fn dump(gc: &mut BoaGc) { - Self::drop_heap(&gc.youth_start); Self::drop_heap(&gc.adult_start); } @@ -438,21 +430,12 @@ impl GcTester { }) } - pub fn assert_youth_bytes_allocated() { - BOA_GC.with(|current| { - let gc = current.borrow(); - assert!(gc.runtime.youth_bytes > 0); - }) - } - pub fn assert_empty_gc() { BOA_GC.with(|current| { let gc = current.borrow(); assert!(gc.adult_start.get().is_none()); assert!(gc.runtime.adult_bytes == 0); - assert!(gc.youth_start.get().is_none()); - assert!(gc.runtime.youth_bytes == 0); }) } diff --git a/boa_gc/tests/promotions.rs b/boa_gc/tests/promotions.rs index 2413723cd05..6e47e7991cf 100644 --- a/boa_gc/tests/promotions.rs +++ b/boa_gc/tests/promotions.rs @@ -33,5 +33,4 @@ fn generational_promo_two() { GcTester::assert_collection_floor(3); GcTester::assert_adult_bytes_allocated(); - GcTester::assert_youth_bytes_allocated(); } From e56e3a5f9ccb9ff1964916125da6b52d5b33f0f7 Mon Sep 17 00:00:00 2001 From: nekevss Date: Mon, 7 Nov 2022 23:47:51 -0500 Subject: [PATCH 29/55] Review changes --- boa_engine/src/builtins/function/mod.rs | 4 +- boa_engine/src/builtins/promise/mod.rs | 10 +-- boa_engine/src/bytecompiler/function.rs | 4 +- boa_engine/src/bytecompiler/mod.rs | 10 +-- boa_engine/src/context/mod.rs | 6 +- boa_engine/src/environments/compile.rs | 4 +- boa_engine/src/environments/runtime.rs | 10 +-- boa_engine/src/object/jsobject.rs | 4 +- boa_engine/src/realm.rs | 4 +- boa_engine/src/vm/code_block.rs | 6 +- boa_gc/src/gc_box.rs | 10 +-- .../{ephemeron.rs => ephemeron_box.rs} | 33 ++++---- boa_gc/src/internals/mod.rs | 4 +- boa_gc/src/lib.rs | 82 ++++++------------- .../pointers/{weak_pair.rs => ephemeron.rs} | 25 +++--- boa_gc/src/pointers/gc_ptr.rs | 27 ++++-- boa_gc/src/pointers/mod.rs | 4 +- boa_gc/src/pointers/weak_ptr.rs | 15 ++-- boa_gc/tests/allocation.rs | 10 +-- boa_gc/tests/boa.rs | 6 +- boa_gc/tests/promotions.rs | 36 -------- boa_tester/src/exec/mod.rs | 4 +- 22 files changed, 134 insertions(+), 184 deletions(-) rename boa_gc/src/internals/{ephemeron.rs => ephemeron_box.rs} (72%) rename boa_gc/src/pointers/{weak_pair.rs => ephemeron.rs} (59%) delete mode 100644 boa_gc/tests/promotions.rs diff --git a/boa_engine/src/builtins/function/mod.rs b/boa_engine/src/builtins/function/mod.rs index 213e7930b5a..a98a2fe1bc8 100644 --- a/boa_engine/src/builtins/function/mod.rs +++ b/boa_engine/src/builtins/function/mod.rs @@ -34,7 +34,7 @@ use boa_ast::{ operations::{bound_names, contains, lexically_declared_names, ContainsSymbol}, StatementList, }; -use boa_gc::{self, custom_trace, BoaAlloc, Finalize, Gc, Trace}; +use boa_gc::{self, custom_trace, Finalize, Gc, Trace}; use boa_interner::Sym; use boa_parser::Parser; use boa_profiler::Profiler; @@ -186,7 +186,7 @@ impl Captures { where T: NativeObject, { - Self(BoaAlloc::new_cell(Box::new(captures))) + Self(Gc::new_cell(Box::new(captures))) } /// Casts `Captures` to `Any` diff --git a/boa_engine/src/builtins/promise/mod.rs b/boa_engine/src/builtins/promise/mod.rs index 03add1ac506..922ce57dd01 100644 --- a/boa_engine/src/builtins/promise/mod.rs +++ b/boa_engine/src/builtins/promise/mod.rs @@ -21,7 +21,7 @@ use crate::{ value::JsValue, Context, JsError, JsResult, }; -use boa_gc::{BoaAlloc, Cell as GcCell, Finalize, Gc, Trace}; +use boa_gc::{Cell as GcCell, Finalize, Gc, Trace}; use boa_profiler::Profiler; use std::{cell::Cell, rc::Rc}; use tap::{Conv, Pipe}; @@ -118,7 +118,7 @@ impl PromiseCapability { // 2. NOTE: C is assumed to be a constructor function that supports the parameter conventions of the Promise constructor (see 27.2.3.1). // 3. Let promiseCapability be the PromiseCapability Record { [[Promise]]: undefined, [[Resolve]]: undefined, [[Reject]]: undefined }. - let promise_capability = BoaAlloc::new_cell(RejectResolve { + let promise_capability = Gc::new_cell(RejectResolve { reject: JsValue::undefined(), resolve: JsValue::undefined(), }); @@ -436,7 +436,7 @@ impl Promise { } // 1. Let values be a new empty List. - let values = BoaAlloc::new_cell(Vec::new()); + let values = Gc::new_cell(Vec::new()); // 2. Let remainingElementsCount be the Record { [[Value]]: 1 }. let remaining_elements_count = Rc::new(Cell::new(1)); @@ -677,7 +677,7 @@ impl Promise { } // 1. Let values be a new empty List. - let values = BoaAlloc::new_cell(Vec::new()); + let values = Gc::new_cell(Vec::new()); // 2. Let remainingElementsCount be the Record { [[Value]]: 1 }. let remaining_elements_count = Rc::new(Cell::new(1)); @@ -1008,7 +1008,7 @@ impl Promise { } // 1. Let errors be a new empty List. - let errors = BoaAlloc::new_cell(Vec::new()); + let errors = Gc::new_cell(Vec::new()); // 2. Let remainingElementsCount be the Record { [[Value]]: 1 }. let remaining_elements_count = Rc::new(Cell::new(1)); diff --git a/boa_engine/src/bytecompiler/function.rs b/boa_engine/src/bytecompiler/function.rs index 90e46c7070f..815ec309521 100644 --- a/boa_engine/src/bytecompiler/function.rs +++ b/boa_engine/src/bytecompiler/function.rs @@ -7,7 +7,7 @@ use crate::{ use boa_ast::{ declaration::Binding, function::FormalParameterList, operations::bound_names, StatementList, }; -use boa_gc::{BoaAlloc, Gc}; +use boa_gc::Gc; use boa_interner::Sym; use rustc_hash::FxHashMap; @@ -219,6 +219,6 @@ impl FunctionCompiler { compiler.emit(Opcode::PushUndefined, &[]); compiler.emit(Opcode::Return, &[]); - Ok(BoaAlloc::new(compiler.finish())) + Ok(Gc::new(compiler.finish())) } } diff --git a/boa_engine/src/bytecompiler/mod.rs b/boa_engine/src/bytecompiler/mod.rs index 5737a3f3fe4..081a6b2259a 100644 --- a/boa_engine/src/bytecompiler/mod.rs +++ b/boa_engine/src/bytecompiler/mod.rs @@ -30,7 +30,7 @@ use boa_ast::{ }, Declaration, Expression, Statement, StatementList, StatementListItem, }; -use boa_gc::{BoaAlloc, Gc}; +use boa_gc::Gc; use boa_interner::{Interner, Sym}; use rustc_hash::FxHashMap; use std::mem::size_of; @@ -3310,7 +3310,7 @@ impl<'b> ByteCompiler<'b> { compiler.emit_opcode(Opcode::PushUndefined); compiler.emit_opcode(Opcode::Return); - let code = BoaAlloc::new(compiler.finish()); + let code = Gc::new(compiler.finish()); let index = self.code_block.functions.len() as u32; self.code_block.functions.push(code); self.emit(Opcode::GetFunction, &[index]); @@ -3481,7 +3481,7 @@ impl<'b> ByteCompiler<'b> { field_compiler.code_block.num_bindings = num_bindings; field_compiler.emit_opcode(Opcode::Return); - let code = BoaAlloc::new(field_compiler.finish()); + let code = Gc::new(field_compiler.finish()); let index = self.code_block.functions.len() as u32; self.code_block.functions.push(code); self.emit(Opcode::GetFunction, &[index]); @@ -3512,7 +3512,7 @@ impl<'b> ByteCompiler<'b> { field_compiler.code_block.num_bindings = num_bindings; field_compiler.emit_opcode(Opcode::Return); - let code = BoaAlloc::new(field_compiler.finish()); + let code = Gc::new(field_compiler.finish()); let index = self.code_block.functions.len() as u32; self.code_block.functions.push(code); self.emit(Opcode::GetFunction, &[index]); @@ -3563,7 +3563,7 @@ impl<'b> ByteCompiler<'b> { compiler.push_compile_environment(compile_environment); compiler.code_block.num_bindings = num_bindings; - let code = BoaAlloc::new(compiler.finish()); + let code = Gc::new(compiler.finish()); let index = self.code_block.functions.len() as u32; self.code_block.functions.push(code); self.emit(Opcode::GetFunction, &[index]); diff --git a/boa_engine/src/context/mod.rs b/boa_engine/src/context/mod.rs index de645744618..f5531cdf0a7 100644 --- a/boa_engine/src/context/mod.rs +++ b/boa_engine/src/context/mod.rs @@ -25,7 +25,7 @@ use crate::{ }; use boa_ast::StatementList; -use boa_gc::{BoaAlloc, Gc}; +use boa_gc::Gc; use boa_interner::{Interner, Sym}; use boa_parser::{Error as ParseError, Parser}; use boa_profiler::Profiler; @@ -473,7 +473,7 @@ impl Context { let mut compiler = ByteCompiler::new(Sym::MAIN, statement_list.strict(), self); compiler.create_decls(statement_list, false); compiler.compile_statement_list(statement_list, true, false)?; - Ok(BoaAlloc::new(compiler.finish())) + Ok(Gc::new(compiler.finish())) } /// Compile the AST into a `CodeBlock` with an additional declarative environment. @@ -486,7 +486,7 @@ impl Context { let _timer = Profiler::global().start_event("Compilation", "Main"); let mut compiler = ByteCompiler::new(Sym::MAIN, statement_list.strict(), self); compiler.compile_statement_list_with_new_declarative(statement_list, true, strict)?; - Ok(BoaAlloc::new(compiler.finish())) + Ok(Gc::new(compiler.finish())) } /// Call the VM with a `CodeBlock` and return the result. diff --git a/boa_engine/src/environments/compile.rs b/boa_engine/src/environments/compile.rs index 6af07278ced..17129f324c5 100644 --- a/boa_engine/src/environments/compile.rs +++ b/boa_engine/src/environments/compile.rs @@ -2,7 +2,7 @@ use crate::{ environments::runtime::BindingLocator, property::PropertyDescriptor, Context, JsString, JsValue, }; use boa_ast::expression::Identifier; -use boa_gc::{BoaAlloc, Cell, Finalize, Gc, Trace}; +use boa_gc::{Cell, Finalize, Gc, Trace}; use rustc_hash::FxHashMap; @@ -223,7 +223,7 @@ impl Context { let environment_index = self.realm.compile_env.borrow().environment_index + 1; let outer = self.realm.compile_env.clone(); - self.realm.compile_env = BoaAlloc::new_cell(CompileTimeEnvironment { + self.realm.compile_env = Gc::new_cell(CompileTimeEnvironment { outer: Some(outer), environment_index, bindings: FxHashMap::default(), diff --git a/boa_engine/src/environments/runtime.rs b/boa_engine/src/environments/runtime.rs index 960396f5459..3314eb75e51 100644 --- a/boa_engine/src/environments/runtime.rs +++ b/boa_engine/src/environments/runtime.rs @@ -3,7 +3,7 @@ use std::cell::Cell; use crate::{ environments::CompileTimeEnvironment, error::JsNativeError, object::JsObject, Context, JsValue, }; -use boa_gc::{BoaAlloc, Cell as GcCell, Finalize, Gc, Trace}; +use boa_gc::{Cell as GcCell, Finalize, Gc, Trace}; use boa_ast::expression::Identifier; use rustc_hash::FxHashSet; @@ -232,7 +232,7 @@ impl DeclarativeEnvironmentStack { #[inline] pub(crate) fn new(global_compile_environment: Gc>) -> Self { Self { - stack: vec![BoaAlloc::new(DeclarativeEnvironment { + stack: vec![Gc::new(DeclarativeEnvironment { bindings: GcCell::new(Vec::new()), compile: global_compile_environment, poisoned: Cell::new(false), @@ -368,7 +368,7 @@ impl DeclarativeEnvironmentStack { let index = self.stack.len(); - self.stack.push(BoaAlloc::new(DeclarativeEnvironment { + self.stack.push(Gc::new(DeclarativeEnvironment { bindings: GcCell::new(vec![None; num_bindings]), compile: compile_environment, poisoned: Cell::new(poisoned), @@ -414,7 +414,7 @@ impl DeclarativeEnvironmentStack { JsValue::Null }; - self.stack.push(BoaAlloc::new(DeclarativeEnvironment { + self.stack.push(Gc::new(DeclarativeEnvironment { bindings: GcCell::new(vec![None; num_bindings]), compile: compile_environment, poisoned: Cell::new(poisoned), @@ -445,7 +445,7 @@ impl DeclarativeEnvironmentStack { let poisoned = outer.poisoned.get(); let slots = outer.slots.clone(); - self.stack.push(BoaAlloc::new(DeclarativeEnvironment { + self.stack.push(Gc::new(DeclarativeEnvironment { bindings: GcCell::new(vec![None; num_bindings]), compile: compile_environment, poisoned: Cell::new(poisoned), diff --git a/boa_engine/src/object/jsobject.rs b/boa_engine/src/object/jsobject.rs index 6537e55c5c9..6ee970796d8 100644 --- a/boa_engine/src/object/jsobject.rs +++ b/boa_engine/src/object/jsobject.rs @@ -10,7 +10,7 @@ use crate::{ value::PreferredType, Context, JsResult, JsValue, }; -use boa_gc::{self, BoaAlloc, Finalize, Gc, Trace}; +use boa_gc::{self, Finalize, Gc, Trace}; use rustc_hash::FxHashMap; use std::{ cell::RefCell, @@ -37,7 +37,7 @@ impl JsObject { #[inline] fn from_object(object: Object) -> Self { Self { - inner: BoaAlloc::new_cell(object), + inner: Gc::new_cell(object), } } diff --git a/boa_engine/src/realm.rs b/boa_engine/src/realm.rs index 14a80496b3b..c1ace1ef3fb 100644 --- a/boa_engine/src/realm.rs +++ b/boa_engine/src/realm.rs @@ -8,7 +8,7 @@ use crate::{ environments::{CompileTimeEnvironment, DeclarativeEnvironmentStack}, object::{GlobalPropertyMap, JsObject, JsPrototype, ObjectData, PropertyMap}, }; -use boa_gc::{BoaAlloc, Cell, Gc}; +use boa_gc::{Cell, Gc}; use boa_profiler::Profiler; /// Representation of a Realm. @@ -33,7 +33,7 @@ impl Realm { // Allow identification of the global object easily let global_object = JsObject::from_proto_and_data(None, ObjectData::global()); - let global_compile_environment = BoaAlloc::new_cell(CompileTimeEnvironment::new_global()); + let global_compile_environment = Gc::new_cell(CompileTimeEnvironment::new_global()); Self { global_object, diff --git a/boa_engine/src/vm/code_block.rs b/boa_engine/src/vm/code_block.rs index bf35ecc773e..59f80a3ff6e 100644 --- a/boa_engine/src/vm/code_block.rs +++ b/boa_engine/src/vm/code_block.rs @@ -24,7 +24,7 @@ use crate::{ Context, JsResult, JsString, JsValue, }; use boa_ast::{expression::Identifier, function::FormalParameterList}; -use boa_gc::{BoaAlloc, Cell, Finalize, Gc, Trace}; +use boa_gc::{Cell, Finalize, Gc, Trace}; use boa_interner::{Interner, Sym, ToInternedString}; use boa_profiler::Profiler; use std::{collections::VecDeque, convert::TryInto, mem::size_of}; @@ -1098,7 +1098,7 @@ impl JsObject { prototype, ObjectData::generator(Generator { state: GeneratorState::SuspendedStart, - context: Some(BoaAlloc::new_cell(GeneratorContext { + context: Some(Gc::new_cell(GeneratorContext { environments, call_frame, stack, @@ -1241,7 +1241,7 @@ impl JsObject { prototype, ObjectData::async_generator(AsyncGenerator { state: AsyncGeneratorState::SuspendedStart, - context: Some(BoaAlloc::new_cell(GeneratorContext { + context: Some(Gc::new_cell(GeneratorContext { environments, call_frame, stack, diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/gc_box.rs index 1d10312e14c..f0eb6710718 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/gc_box.rs @@ -3,13 +3,13 @@ use std::cell::Cell; use std::ptr::{self, NonNull}; // Age and Weak Flags -const MARK_MASK: usize = 1 << (usize::BITS - 1); -const ROOTS_MASK: usize = !MARK_MASK; +const MARK_MASK: usize = 1 << (usize::BITS - 2); +const WEAK_MASK: usize = 1 << (usize::BITS - 1); +const ROOTS_MASK: usize = !(MARK_MASK | WEAK_MASK); const ROOTS_MAX: usize = ROOTS_MASK; pub(crate) struct GcBoxHeader { roots: Cell, - weak: Cell, pub(crate) next: Cell>>>, } @@ -19,7 +19,6 @@ impl GcBoxHeader { // TODO: implement a way for a cell to start out weak with WEAK_MASK GcBoxHeader { roots: Cell::new(1), - weak: Cell::new(false), next: Cell::new(None), } } @@ -29,7 +28,6 @@ impl GcBoxHeader { // Set weak_flag GcBoxHeader { roots: Cell::new(0), - weak: Cell::new(true), next: Cell::new(None), } } @@ -81,7 +79,7 @@ impl GcBoxHeader { #[inline] pub fn is_ephemeron(&self) -> bool { - self.weak.get() + self.roots.get() & WEAK_MASK != 0 } } diff --git a/boa_gc/src/internals/ephemeron.rs b/boa_gc/src/internals/ephemeron_box.rs similarity index 72% rename from boa_gc/src/internals/ephemeron.rs rename to boa_gc/src/internals/ephemeron_box.rs index 1ee275e4ce8..9fe1cb5ede8 100644 --- a/boa_gc/src/internals/ephemeron.rs +++ b/boa_gc/src/internals/ephemeron_box.rs @@ -1,38 +1,43 @@ //! This module will implement the internal types GcBox and Ephemeron use crate::trace::Trace; use crate::Finalize; -use crate::{finalizer_safe, GcBox}; +use crate::{finalizer_safe, Gc, GcBox}; use std::cell::Cell; use std::ptr::NonNull; /// Implementation of an Ephemeron structure -pub struct Ephemeron { +pub struct EphemeronBox { key: Cell>>>, value: V, } -impl Ephemeron { - pub unsafe fn new(value: NonNull>) -> Self { - let ptr = NonNull::new_unchecked(value.as_ptr()); - Ephemeron { +impl EphemeronBox { + // This could panic if called in while dropping / !finalizer_safe() + pub unsafe fn new(value: &Gc) -> Self { + let ptr = NonNull::new_unchecked(value.clone().inner_ptr()); + // Clone increments root, so we need to decrement it + (*ptr.as_ptr()).unroot_inner(); + EphemeronBox { key: Cell::new(Some(ptr)), value: (), } } } -impl Ephemeron { - pub unsafe fn new_pair(key: NonNull>, value: V) -> Self { - let ptr = NonNull::new_unchecked(key.as_ptr()); - - Ephemeron { +impl EphemeronBox { + // This could panic if called while dropping / !finalizer_safe() + pub unsafe fn new_pair(key: &Gc, value: V) -> Self { + let ptr = NonNull::new_unchecked(key.clone().inner_ptr()); + // Clone increments root, so we need to decrement it + (*ptr.as_ptr()).unroot_inner(); + EphemeronBox { key: Cell::new(Some(ptr)), value, } } } -impl Ephemeron { +impl EphemeronBox { #[inline] pub(crate) fn is_marked(&self) -> bool { if let Some(key) = self.inner_key() { @@ -86,14 +91,14 @@ impl Ephemeron { } } -impl Finalize for Ephemeron { +impl Finalize for EphemeronBox { #[inline] fn finalize(&self) { self.key.set(None) } } -unsafe impl Trace for Ephemeron { +unsafe impl Trace for EphemeronBox { #[inline] unsafe fn trace(&self) { /* An ephemeron is never traced with Phase One Trace */ diff --git a/boa_gc/src/internals/mod.rs b/boa_gc/src/internals/mod.rs index 2a87cbc0ad0..06dd355507f 100644 --- a/boa_gc/src/internals/mod.rs +++ b/boa_gc/src/internals/mod.rs @@ -1,6 +1,6 @@ pub(crate) mod borrow_flag; pub mod cell; -pub mod ephemeron; +pub mod ephemeron_box; pub use cell::*; -pub use ephemeron::Ephemeron; +pub use ephemeron_box::EphemeronBox; diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 4417ea83d91..a0c1892c202 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -25,8 +25,8 @@ pub use boa_gc_macros::{Finalize, Trace}; pub use crate::trace::{Finalize, Trace}; pub(crate) use gc_box::GcBox; -pub use internals::{Ephemeron, GcCell as Cell, GcCellRef as Ref, GcCellRefMut as RefMut}; -pub use pointers::{Gc, WeakGc, WeakPair}; +pub use internals::{EphemeronBox, GcCell as Cell, GcCellRef as Ref, GcCellRefMut as RefMut}; +pub use pointers::{Ephemeron, Gc, WeakGc}; pub type GcPointer = NonNull>; @@ -59,7 +59,6 @@ impl Default for GcConfig { struct GcRuntimeData { collections: usize, total_bytes_allocated: usize, - adult_bytes: usize, } struct BoaGc { @@ -100,10 +99,10 @@ pub fn finalizer_safe() -> bool { /// The GcAllocater handles initialization and allocation of garbage collected values. /// /// The allocator can trigger a garbage collection -pub struct BoaAlloc; +pub(crate) struct GcAlloc; -impl BoaAlloc { - pub fn new(value: T) -> Gc { +impl GcAlloc { + pub fn new(value: T) -> NonNull> { let _timer = Profiler::global().start_event("New Pointer", "BoaAlloc"); BOA_GC.with(|st| { let mut gc = st.borrow_mut(); @@ -124,52 +123,23 @@ impl BoaAlloc { .set(Some(NonNull::new_unchecked(element_pointer))); gc.runtime.total_bytes_allocated += element_size; - gc.runtime.adult_bytes += element_size; - Gc::new(NonNull::new_unchecked(element_pointer)) + NonNull::new_unchecked(element_pointer) } }) } - pub fn new_cell(value: T) -> Gc> { - let _timer = Profiler::global().start_event("New Cell", "BoaAlloc"); - BOA_GC.with(|st| { - let mut gc = st.borrow_mut(); - - // Manage state preps the internal state for allocation and - // triggers a collection if the state dictates it. - unsafe { - Self::manage_state(&mut gc); - - let new_cell = Cell::new(value); - - let gc_box = GcBox::new(new_cell); - let element_size = mem::size_of_val::>>(&gc_box); - let element_pointer = Box::into_raw(Box::from(gc_box)); - - let old_start = gc.adult_start.take(); - (*element_pointer).set_header_pointer(old_start); - (*element_pointer).value().unroot(); - - gc.adult_start - .set(Some(NonNull::new_unchecked(element_pointer))); - - gc.runtime.adult_bytes += element_size; - gc.runtime.total_bytes_allocated += element_size; - - Gc::new(NonNull::new_unchecked(element_pointer)) - } - }) - } - - pub fn new_weak_pair(key: NonNull>, value: V) -> WeakPair { + pub fn new_ephemeron( + key: &Gc, + value: V, + ) -> NonNull>> { let _timer = Profiler::global().start_event("New Weak Pair", "BoaAlloc"); BOA_GC.with(|internals| { let mut gc = internals.borrow_mut(); unsafe { Self::manage_state(&mut gc); - let ephem = Ephemeron::new_pair(key, value); + let ephem = EphemeronBox::new_pair(key, value); let gc_box = GcBox::new_weak(ephem); let element_size = mem::size_of_val::>(&gc_box); @@ -184,12 +154,12 @@ impl BoaAlloc { gc.runtime.total_bytes_allocated += element_size; - WeakPair::new(NonNull::new_unchecked(element_pointer)) + NonNull::new_unchecked(element_pointer) } }) } - pub fn new_weak_ref(value: NonNull>) -> WeakGc { + pub fn new_weak_box(value: &Gc) -> NonNull>> { let _timer = Profiler::global().start_event("New Weak Pointer", "BoaAlloc"); BOA_GC.with(|state| { let mut gc = state.borrow_mut(); @@ -197,7 +167,7 @@ impl BoaAlloc { unsafe { Self::manage_state(&mut gc); - let ephemeron = Ephemeron::new(value); + let ephemeron = EphemeronBox::new(value); let gc_box = GcBox::new_weak(ephemeron); let element_size = mem::size_of_val::>(&gc_box); @@ -212,20 +182,20 @@ impl BoaAlloc { gc.runtime.total_bytes_allocated += element_size; - WeakGc::new(NonNull::new_unchecked(element_pointer)) + NonNull::new_unchecked(element_pointer) } }) } unsafe fn manage_state(gc: &mut BoaGc) { - if gc.runtime.adult_bytes > gc.config.adult_threshold { + if gc.runtime.total_bytes_allocated > gc.config.adult_threshold { Collector::run_full_collection(gc); - if gc.runtime.adult_bytes as f64 + if gc.runtime.total_bytes_allocated as f64 > gc.config.adult_threshold as f64 * gc.config.growth_ratio { gc.config.adult_threshold = - (gc.runtime.adult_bytes as f64 / gc.config.growth_ratio) as usize + (gc.runtime.total_bytes_allocated as f64 / gc.config.growth_ratio) as usize } } } @@ -258,11 +228,7 @@ impl Collector { let _final_unreachable_adults = Self::mark_heap(&gc.adult_start); // Sweep both without promoting any values - Self::sweep( - &gc.adult_start, - &mut gc.runtime.adult_bytes, - &mut gc.runtime.total_bytes_allocated, - ); + Self::sweep(&gc.adult_start, &mut gc.runtime.total_bytes_allocated); } pub(crate) unsafe fn mark_heap( @@ -356,7 +322,6 @@ impl Collector { unsafe fn sweep( heap_start: &StdCell>>>, - bytes_allocated: &mut usize, total_allocated: &mut usize, ) { let _timer = Profiler::global().start_event("Gc Sweeping", "gc"); @@ -371,7 +336,6 @@ impl Collector { // Drops occur here let unmarked_node = Box::from_raw(node.as_ptr()); let unallocated_bytes = mem::size_of_val::>(&*unmarked_node); - *bytes_allocated -= unallocated_bytes; *total_allocated -= unallocated_bytes; sweep_head.set(unmarked_node.header.next.take()); } @@ -406,7 +370,7 @@ pub fn force_collect() { let mut gc = current.borrow_mut(); unsafe { - if gc.runtime.adult_bytes > 0 { + if gc.runtime.total_bytes_allocated > 0 { Collector::run_full_collection(&mut *gc) } } @@ -435,14 +399,14 @@ impl GcTester { let gc = current.borrow(); assert!(gc.adult_start.get().is_none()); - assert!(gc.runtime.adult_bytes == 0); + assert!(gc.runtime.total_bytes_allocated == 0); }) } - pub fn assert_adult_bytes_allocated() { + pub fn assert_bytes_allocated() { BOA_GC.with(|current| { let gc = current.borrow(); - assert!(gc.runtime.adult_bytes > 0); + assert!(gc.runtime.total_bytes_allocated > 0); }) } } diff --git a/boa_gc/src/pointers/weak_pair.rs b/boa_gc/src/pointers/ephemeron.rs similarity index 59% rename from boa_gc/src/pointers/weak_pair.rs rename to boa_gc/src/pointers/ephemeron.rs index 7009becc8c2..939ba4f27ed 100644 --- a/boa_gc/src/pointers/weak_pair.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -1,36 +1,37 @@ use crate::{ finalizer_safe, - internals::Ephemeron, + internals::EphemeronBox, trace::{Finalize, Trace}, - GcBox, EPHEMERON_QUEUE, + Gc, GcAlloc, GcBox, EPHEMERON_QUEUE, }; use std::cell::Cell; use std::ptr::NonNull; -pub struct WeakPair { - inner_ptr: Cell>>>, +pub struct Ephemeron { + inner_ptr: Cell>>>, } -impl WeakPair { - pub fn new(value: NonNull>>) -> Self { +impl Ephemeron { + pub fn new(key: &Gc, value: V) -> Self { + let ephemeron_box = GcAlloc::new_ephemeron(key, value); unsafe { Self { - inner_ptr: Cell::new(NonNull::new_unchecked(value.as_ptr())), + inner_ptr: Cell::new(NonNull::new_unchecked(ephemeron_box.as_ptr())), } } } } -impl WeakPair { +impl Ephemeron { #[inline] - fn inner_ptr(&self) -> *mut GcBox> { + fn inner_ptr(&self) -> *mut GcBox> { assert!(finalizer_safe()); self.inner_ptr.get().as_ptr() } #[inline] - pub fn inner(&self) -> &GcBox> { + pub fn inner(&self) -> &GcBox> { unsafe { &*self.inner_ptr() } } @@ -45,9 +46,9 @@ impl WeakPair { } } -impl Finalize for WeakPair {} +impl Finalize for Ephemeron {} -unsafe impl Trace for WeakPair { +unsafe impl Trace for Ephemeron { #[inline] unsafe fn trace(&self) {} diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc_ptr.rs index ee3e1ad02a2..4144c74b3bf 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc_ptr.rs @@ -9,7 +9,7 @@ use std::rc::Rc; use crate::gc_box::GcBox; use crate::trace::{Finalize, Trace}; -use crate::{finalizer_safe, BoaAlloc}; +use crate::{finalizer_safe, Cell as GcCell, GcAlloc}; pub(crate) unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); @@ -24,10 +24,27 @@ pub struct Gc { impl Gc { /// Constructs a new `Gc` with the given value. - pub fn new(value: NonNull>) -> Self { + pub fn new(value: T) -> Self { unsafe { + let pointer = GcAlloc::new(value); + + let gc = Gc { + inner_ptr: Cell::new(NonNull::new_unchecked(pointer.as_ptr())), + marker: PhantomData, + }; + gc.set_root(); + gc + } + } + + pub fn new_cell(value: T) -> Gc> { + unsafe { + let new_cell = GcCell::new(value); + + let pointer = GcAlloc::new(new_cell); + let gc = Gc { - inner_ptr: Cell::new(NonNull::new_unchecked(value.as_ptr())), + inner_ptr: Cell::new(NonNull::new_unchecked(pointer.as_ptr())), marker: PhantomData, }; gc.set_root(); @@ -72,7 +89,7 @@ impl Gc { } #[inline] - fn inner_ptr(&self) -> *mut GcBox { + pub(crate) fn inner_ptr(&self) -> *mut GcBox { assert!(finalizer_safe()); unsafe { clear_root_bit(self.inner_ptr.get()).as_ptr() } } @@ -168,7 +185,7 @@ impl Drop for Gc { impl Default for Gc { #[inline] fn default() -> Self { - BoaAlloc::new(Default::default()) + Self::new(Default::default()) } } diff --git a/boa_gc/src/pointers/mod.rs b/boa_gc/src/pointers/mod.rs index 8c21bbfa5eb..bcfaac1d590 100644 --- a/boa_gc/src/pointers/mod.rs +++ b/boa_gc/src/pointers/mod.rs @@ -1,9 +1,9 @@ //! Pointers represents the External types returned by the Boa Garbage Collector +mod ephemeron; mod gc_ptr; -mod weak_pair; mod weak_ptr; +pub use ephemeron::Ephemeron; pub use gc_ptr::Gc; -pub use weak_pair::WeakPair; pub use weak_ptr::WeakGc; diff --git a/boa_gc/src/pointers/weak_ptr.rs b/boa_gc/src/pointers/weak_ptr.rs index d2d28187533..34d112205bd 100644 --- a/boa_gc/src/pointers/weak_ptr.rs +++ b/boa_gc/src/pointers/weak_ptr.rs @@ -1,21 +1,22 @@ use crate::{ finalizer_safe, - internals::Ephemeron, + internals::EphemeronBox, trace::{Finalize, Trace}, - GcBox, EPHEMERON_QUEUE, + Gc, GcAlloc, GcBox, EPHEMERON_QUEUE, }; use std::cell::Cell; use std::ptr::NonNull; pub struct WeakGc { - inner_ptr: Cell>>>, + inner_ptr: Cell>>>, } impl WeakGc { - pub fn new(value: NonNull>>) -> Self { + pub fn new(value: &Gc) -> Self { + let weak_box = GcAlloc::new_weak_box(value); unsafe { Self { - inner_ptr: Cell::new(NonNull::new_unchecked(value.as_ptr())), + inner_ptr: Cell::new(NonNull::new_unchecked(weak_box.as_ptr())), } } } @@ -23,14 +24,14 @@ impl WeakGc { impl WeakGc { #[inline] - fn inner_ptr(&self) -> *mut GcBox> { + fn inner_ptr(&self) -> *mut GcBox> { assert!(finalizer_safe()); self.inner_ptr.get().as_ptr() } #[inline] - fn inner(&self) -> &GcBox> { + fn inner(&self) -> &GcBox> { unsafe { &*self.inner_ptr() } } diff --git a/boa_gc/tests/allocation.rs b/boa_gc/tests/allocation.rs index b1034f647cf..42e618973d4 100644 --- a/boa_gc/tests/allocation.rs +++ b/boa_gc/tests/allocation.rs @@ -1,22 +1,22 @@ -use boa_gc::{force_collect, BoaAlloc, GcTester}; +use boa_gc::{force_collect, Gc, GcTester}; #[test] fn gc_basic_cell_allocation() { - let gc_cell = BoaAlloc::new_cell(16_u16); + let gc_cell = Gc::new_cell(16_u16); force_collect(); GcTester::assert_collections(1); - GcTester::assert_adult_bytes_allocated(); + GcTester::assert_bytes_allocated(); assert_eq!(*gc_cell.borrow_mut(), 16); } #[test] fn gc_basic_pointer_alloc() { - let gc = BoaAlloc::new(16_u8); + let gc = Gc::new(16_u8); force_collect(); GcTester::assert_collections(1); - GcTester::assert_adult_bytes_allocated(); + GcTester::assert_bytes_allocated(); assert_eq!(*gc, 16); drop(gc); diff --git a/boa_gc/tests/boa.rs b/boa_gc/tests/boa.rs index 1173ac0abbb..1e40a62aede 100644 --- a/boa_gc/tests/boa.rs +++ b/boa_gc/tests/boa.rs @@ -1,11 +1,11 @@ -use boa_gc::BoaAlloc; +use boa_gc::Gc; #[test] fn boa_borrow_mut_test() { - let v = BoaAlloc::new_cell(Vec::new()); + let v = Gc::new_cell(Vec::new()); for _ in 1..=259 { - let cell = BoaAlloc::new_cell([0u8; 10]); + let cell = Gc::new_cell([0u8; 10]); v.borrow_mut().push(cell); } } diff --git a/boa_gc/tests/promotions.rs b/boa_gc/tests/promotions.rs deleted file mode 100644 index 6e47e7991cf..00000000000 --- a/boa_gc/tests/promotions.rs +++ /dev/null @@ -1,36 +0,0 @@ -// Let's get weird and age some heap values - -use boa_gc::{force_collect, BoaAlloc, GcTester}; - -#[ignore] -#[test] -fn generational_promo_one() { - let mut storage = Vec::new(); - - // Super basic loop that loads bytes and force collections - for i in 0..15000_usize { - let gc = BoaAlloc::new(i); - storage.push(gc); - } - GcTester::assert_collection_floor(2); - // assert that items were promoted to adults - GcTester::assert_adult_bytes_allocated(); - drop(storage); - force_collect(); - GcTester::assert_empty_gc() -} - -#[ignore] -#[test] -fn generational_promo_two() { - let mut storage = Vec::new(); - for i in 0..2000_usize { - let gc = BoaAlloc::new(i); - if i % 10 == 0 { - storage.push(gc) - } - } - GcTester::assert_collection_floor(3); - - GcTester::assert_adult_bytes_allocated(); -} diff --git a/boa_tester/src/exec/mod.rs b/boa_tester/src/exec/mod.rs index 9a79fe4de51..26e067bae89 100644 --- a/boa_tester/src/exec/mod.rs +++ b/boa_tester/src/exec/mod.rs @@ -12,7 +12,7 @@ use boa_engine::{ builtins::JsArgs, object::FunctionBuilder, property::Attribute, Context, JsNativeErrorKind, JsResult, JsValue, }; -use boa_gc::{BoaAlloc, Cell, Finalize, Gc, Trace}; +use boa_gc::{Cell, Finalize, Gc, Trace}; use boa_parser::Parser; use colored::Colorize; use rayon::prelude::*; @@ -406,7 +406,7 @@ struct AsyncResult { impl Default for AsyncResult { fn default() -> Self { Self { - inner: BoaAlloc::new_cell(Ok(())), + inner: Gc::new_cell(Ok(())), } } } From 53ba41c423c821f4abf7b51de00ff53e15a9aee1 Mon Sep 17 00:00:00 2001 From: nekevss Date: Mon, 7 Nov 2022 23:59:03 -0500 Subject: [PATCH 30/55] Missing newline in Cargo.toml --- boa_gc/derive_macros/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/boa_gc/derive_macros/Cargo.toml b/boa_gc/derive_macros/Cargo.toml index e7f90833608..c00174f2bfa 100644 --- a/boa_gc/derive_macros/Cargo.toml +++ b/boa_gc/derive_macros/Cargo.toml @@ -13,4 +13,4 @@ proc-macro = true syn = "1.0" proc-macro2 = "1.0" quote = "1.0" -synstructure = "0.12" \ No newline at end of file +synstructure = "0.12" From ae6cfdba60ccba7a13ed9e1abd65467807cdc2a0 Mon Sep 17 00:00:00 2001 From: jedel1043 Date: Mon, 7 Nov 2022 10:47:22 -0600 Subject: [PATCH 31/55] Redesign API --- Cargo.lock | 150 ++++++------ .../src/builtins/async_generator/mod.rs | 6 +- boa_engine/src/builtins/function/mod.rs | 10 +- boa_engine/src/builtins/generator/mod.rs | 4 +- boa_engine/src/builtins/promise/mod.rs | 12 +- boa_engine/src/bytecompiler/mod.rs | 4 +- boa_engine/src/environments/compile.rs | 10 +- boa_engine/src/environments/runtime.rs | 2 +- boa_engine/src/object/jsobject.rs | 14 +- boa_engine/src/realm.rs | 6 +- boa_engine/src/vm/code_block.rs | 12 +- boa_gc/src/{internals => }/cell.rs | 75 +++++- boa_gc/src/internals/borrow_flag.rs | 69 ------ .../{ephemeron_box.rs => eph_box.rs} | 31 +-- boa_gc/src/{ => internals}/gc_box.rs | 9 - boa_gc/src/internals/mod.rs | 9 +- boa_gc/src/lib.rs | 225 +++++------------- boa_gc/src/pointers/ephemeron.rs | 22 +- boa_gc/src/pointers/{gc_ptr.rs => gc.rs} | 43 ++-- boa_gc/src/pointers/mod.rs | 8 +- boa_gc/src/pointers/weak.rs | 28 +++ boa_gc/src/pointers/weak_ptr.rs | 73 ------ boa_gc/src/test/allocation.rs | 27 +++ boa_gc/src/test/mod.rs | 30 +++ boa_gc/src/trace.rs | 8 +- boa_gc/tests/allocation.rs | 26 -- boa_gc/tests/boa.rs | 6 +- boa_tester/src/exec/mod.rs | 6 +- 28 files changed, 389 insertions(+), 536 deletions(-) rename boa_gc/src/{internals => }/cell.rs (86%) delete mode 100644 boa_gc/src/internals/borrow_flag.rs rename boa_gc/src/internals/{ephemeron_box.rs => eph_box.rs} (70%) rename boa_gc/src/{ => internals}/gc_box.rs (93%) rename boa_gc/src/pointers/{gc_ptr.rs => gc.rs} (86%) create mode 100644 boa_gc/src/pointers/weak.rs delete mode 100644 boa_gc/src/pointers/weak_ptr.rs create mode 100644 boa_gc/src/test/allocation.rs create mode 100644 boa_gc/src/test/mod.rs delete mode 100644 boa_gc/tests/allocation.rs diff --git a/Cargo.lock b/Cargo.lock index 423428b13ef..29fa520fed1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -112,7 +112,6 @@ dependencies = [ "dyn-clone", "fast-float", "float-cmp", - "gc", "icu_datetime", "icu_locale_canonicalizer", "icu_locid", @@ -155,10 +154,21 @@ dependencies = [ name = "boa_gc" version = "0.16.0" dependencies = [ - "gc", + "boa_gc_macros", + "boa_profiler", "measureme", ] +[[package]] +name = "boa_gc_macros" +version = "0.16.0" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + [[package]] name = "boa_interner" version = "0.16.0" @@ -218,7 +228,6 @@ dependencies = [ "clap 4.0.22", "colored", "fxhash", - "gc", "once_cell", "rayon", "regex", @@ -245,9 +254,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.11.0" +version = "3.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d" +checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" [[package]] name = "byteorder" @@ -263,9 +272,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.0.73" +version = "1.0.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" +checksum = "581f5dba903aac52ea3feb5ec4810848460ee833876f1f9b0fdeab1f19091574" [[package]] name = "cfg-if" @@ -317,9 +326,9 @@ dependencies = [ [[package]] name = "clap" -version = "3.2.22" +version = "3.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86447ad904c7fb335a790c9d7fe3d0d971dc523b8ccd1561a520de9a85302750" +checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" dependencies = [ "bitflags", "clap_lex 0.2.4", @@ -421,7 +430,7 @@ dependencies = [ "atty", "cast", "ciborium", - "clap 3.2.22", + "clap 3.2.23", "criterion-plot", "itertools", "lazy_static", @@ -492,9 +501,9 @@ dependencies = [ [[package]] name = "cxx" -version = "1.0.78" +version = "1.0.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19f39818dcfc97d45b03953c1292efc4e80954e1583c4aa770bac1383e2310a4" +checksum = "6b7d4e43b25d3c994662706a1d4fcfc32aaa6afd287502c111b237093bb23f3a" dependencies = [ "cc", "cxxbridge-flags", @@ -504,9 +513,9 @@ dependencies = [ [[package]] name = "cxx-build" -version = "1.0.78" +version = "1.0.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e580d70777c116df50c390d1211993f62d40302881e54d4b79727acb83d0199" +checksum = "84f8829ddc213e2c1368e51a2564c552b65a8cb6a28f31e576270ac81d5e5827" dependencies = [ "cc", "codespan-reporting", @@ -519,15 +528,15 @@ dependencies = [ [[package]] name = "cxxbridge-flags" -version = "1.0.78" +version = "1.0.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56a46460b88d1cec95112c8c363f0e2c39afdb237f60583b0b36343bf627ea9c" +checksum = "e72537424b474af1460806647c41d4b6d35d09ef7fe031c5c2fa5766047cc56a" [[package]] name = "cxxbridge-macro" -version = "1.0.78" +version = "1.0.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "747b608fecf06b0d72d440f27acc99288207324b793be2c17991839f3d4995ea" +checksum = "309e4fb93eed90e1e14bea0da16b209f81813ba9fc7830c20ed151dd7bc0a4d7" dependencies = [ "proc-macro2", "quote", @@ -634,9 +643,9 @@ checksum = "95765f67b4b18863968b4a1bd5bb576f732b29a4a28c7cd84c09fa3e2875f33c" [[package]] name = "fd-lock" -version = "3.0.6" +version = "3.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e11dcc7e4d79a8c89b9ab4c6f5c30b1fc4a83c420792da3542fd31179ed5f517" +checksum = "0c93a581058d957dc4176875aad04f82f81613e6611d64aa1a9c755bdfb16711" dependencies = [ "cfg-if", "rustix", @@ -684,21 +693,6 @@ name = "gc" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3edaac0f5832202ebc99520cb77c932248010c4645d20be1dc62d6579f5b3752" -dependencies = [ - "gc_derive", -] - -[[package]] -name = "gc_derive" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60df8444f094ff7885631d80e78eb7d88c3c2361a98daaabb06256e4500db941" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] [[package]] name = "getrandom" @@ -742,9 +736,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.51" +version = "0.1.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5a6ef98976b22b3b7f2f3a806f858cb862044cfa66805aa3ad84cb3d3b785ed" +checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -756,9 +750,9 @@ dependencies = [ [[package]] name = "iana-time-zone-haiku" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fde6edd6cef363e9359ed3c98ba64590ba9eecba2293eb5a723ab32aee8926aa" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" dependencies = [ "cxx", "cxx-build", @@ -914,9 +908,9 @@ dependencies = [ [[package]] name = "io-lifetimes" -version = "0.7.3" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ea37f355c05dde75b84bba2d767906ad522e97cd9e2eef2be7a4ab7fb442c06" +checksum = "59ce5ef949d49ee85593fc4d3f3f95ad61657076395cbbce23e2121fc5542074" [[package]] name = "itertools" @@ -971,9 +965,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.135" +version = "0.2.137" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c" +checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89" [[package]] name = "link-cplusplus" @@ -1111,9 +1105,9 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.13.1" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" +checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5" dependencies = [ "hermit-abi", "libc", @@ -1133,9 +1127,9 @@ checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" [[package]] name = "os_str_bytes" -version = "6.3.0" +version = "6.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ff7415e9ae3fff1225851df9e0d9e4e5479f947619774677a63572e55e80eff" +checksum = "3baf96e39c5359d2eb0dd6ccb42c62b91d9678aa68160d261b9e0ccbf9e9dea9" [[package]] name = "parking_lot" @@ -1259,9 +1253,9 @@ checksum = "7c68cb38ed13fd7bc9dd5db8f165b7c8d9c1a315104083a2b10f11354c2af97f" [[package]] name = "ppv-lite86" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "proc-macro-error" @@ -1289,9 +1283,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.46" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b" +checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" dependencies = [ "unicode-ident", ] @@ -1402,9 +1396,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.27" +version = "0.6.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" +checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" [[package]] name = "regress" @@ -1423,9 +1417,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustix" -version = "0.35.11" +version = "0.35.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbb2fda4666def1433b1b05431ab402e42a1084285477222b72d6c564c417cef" +checksum = "727a1a6d65f786ec22df8a81ca3121107f235970dc1705ed681d3e6e8b9cd5f9" dependencies = [ "bitflags", "errno", @@ -1644,9 +1638,9 @@ dependencies = [ [[package]] name = "textwrap" -version = "0.15.1" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "949517c0cf1bf4ee812e2e07e08ab448e3ae0d23472aee8a06c985f0c8815b16" +checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" @@ -1893,46 +1887,60 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-sys" -version = "0.36.1" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" dependencies = [ + "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_msvc", "windows_x86_64_gnu", + "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" + [[package]] name = "windows_aarch64_msvc" -version = "0.36.1" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" +checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" [[package]] name = "windows_i686_gnu" -version = "0.36.1" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" +checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" [[package]] name = "windows_i686_msvc" -version = "0.36.1" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" +checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" [[package]] name = "windows_x86_64_gnu" -version = "0.36.1" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" +checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" [[package]] name = "windows_x86_64_msvc" -version = "0.36.1" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" +checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" [[package]] name = "writeable" @@ -1975,9 +1983,9 @@ dependencies = [ [[package]] name = "zerofrom-derive" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8785f47d6062c1932866147f91297286a9f350b3070e9d9f0b6078e37d623c1a" +checksum = "2e8aa86add9ddbd2409c1ed01e033cd457d79b1b1229b64922c25095c595e829" dependencies = [ "proc-macro2", "quote", diff --git a/boa_engine/src/builtins/async_generator/mod.rs b/boa_engine/src/builtins/async_generator/mod.rs index e6ea7180641..768dfd48a62 100644 --- a/boa_engine/src/builtins/async_generator/mod.rs +++ b/boa_engine/src/builtins/async_generator/mod.rs @@ -18,7 +18,7 @@ use crate::{ vm::GeneratorResumeKind, Context, JsError, JsResult, }; -use boa_gc::{Cell, Finalize, Gc, Trace}; +use boa_gc::{Finalize, Gc, GcCell, Trace}; use boa_profiler::Profiler; use std::collections::VecDeque; @@ -56,7 +56,7 @@ pub struct AsyncGenerator { pub(crate) state: AsyncGeneratorState, /// The `[[AsyncGeneratorContext]]` internal slot. - pub(crate) context: Option>>, + pub(crate) context: Option>>, /// The `[[AsyncGeneratorQueue]]` internal slot. pub(crate) queue: VecDeque, @@ -511,7 +511,7 @@ impl AsyncGenerator { pub(crate) fn resume( generator: &JsObject, state: AsyncGeneratorState, - generator_context: &Gc>, + generator_context: &Gc>, completion: (JsResult, bool), context: &mut Context, ) { diff --git a/boa_engine/src/builtins/function/mod.rs b/boa_engine/src/builtins/function/mod.rs index a98a2fe1bc8..ebd927fd795 100644 --- a/boa_engine/src/builtins/function/mod.rs +++ b/boa_engine/src/builtins/function/mod.rs @@ -34,7 +34,7 @@ use boa_ast::{ operations::{bound_names, contains, lexically_declared_names, ContainsSymbol}, StatementList, }; -use boa_gc::{self, custom_trace, Finalize, Gc, Trace}; +use boa_gc::{self, custom_trace, Finalize, Gc, GcCell, Trace}; use boa_interner::Sym; use boa_parser::Parser; use boa_profiler::Profiler; @@ -178,7 +178,7 @@ unsafe impl Trace for ClassFieldDefinition { /// with `Any::downcast_ref` and `Any::downcast_mut` to recover the original /// type. #[derive(Clone, Debug, Trace, Finalize)] -pub struct Captures(Gc>>); +pub struct Captures(Gc>>); impl Captures { /// Creates a new capture context. @@ -186,7 +186,7 @@ impl Captures { where T: NativeObject, { - Self(Gc::new_cell(Box::new(captures))) + Self(Gc::new(GcCell::new(Box::new(captures)))) } /// Casts `Captures` to `Any` @@ -194,7 +194,7 @@ impl Captures { /// # Panics /// /// Panics if it's already borrowed as `&mut Any` - pub fn as_any(&self) -> boa_gc::Ref<'_, dyn Any> { + pub fn as_any(&self) -> boa_gc::GcCellRef<'_, dyn Any> { Ref::map(self.0.borrow(), |data| data.deref().as_any()) } @@ -203,7 +203,7 @@ impl Captures { /// # Panics /// /// Panics if it's already borrowed as `&mut Any` - pub fn as_mut_any(&self) -> boa_gc::RefMut<'_, Box, dyn Any> { + pub fn as_mut_any(&self) -> boa_gc::GcCellRefMut<'_, Box, dyn Any> { RefMut::map(self.0.borrow_mut(), |data| data.deref_mut().as_mut_any()) } } diff --git a/boa_engine/src/builtins/generator/mod.rs b/boa_engine/src/builtins/generator/mod.rs index 74eea06fc13..7599dd90742 100644 --- a/boa_engine/src/builtins/generator/mod.rs +++ b/boa_engine/src/builtins/generator/mod.rs @@ -20,7 +20,7 @@ use crate::{ vm::{CallFrame, GeneratorResumeKind, ReturnType}, Context, JsError, JsResult, }; -use boa_gc::{Cell, Finalize, Gc, Trace}; +use boa_gc::{Finalize, Gc, GcCell, Trace}; use boa_profiler::Profiler; /// Indicates the state of a generator. @@ -52,7 +52,7 @@ pub struct Generator { pub(crate) state: GeneratorState, /// The `[[GeneratorContext]]` internal slot. - pub(crate) context: Option>>, + pub(crate) context: Option>>, } impl BuiltIn for Generator { diff --git a/boa_engine/src/builtins/promise/mod.rs b/boa_engine/src/builtins/promise/mod.rs index 922ce57dd01..7bdf64cbc7a 100644 --- a/boa_engine/src/builtins/promise/mod.rs +++ b/boa_engine/src/builtins/promise/mod.rs @@ -21,7 +21,7 @@ use crate::{ value::JsValue, Context, JsError, JsResult, }; -use boa_gc::{Cell as GcCell, Finalize, Gc, Trace}; +use boa_gc::{Finalize, Gc, GcCell, Trace}; use boa_profiler::Profiler; use std::{cell::Cell, rc::Rc}; use tap::{Conv, Pipe}; @@ -118,10 +118,10 @@ impl PromiseCapability { // 2. NOTE: C is assumed to be a constructor function that supports the parameter conventions of the Promise constructor (see 27.2.3.1). // 3. Let promiseCapability be the PromiseCapability Record { [[Promise]]: undefined, [[Resolve]]: undefined, [[Reject]]: undefined }. - let promise_capability = Gc::new_cell(RejectResolve { + let promise_capability = Gc::new(GcCell::new(RejectResolve { reject: JsValue::undefined(), resolve: JsValue::undefined(), - }); + })); // 4. Let executorClosure be a new Abstract Closure with parameters (resolve, reject) that captures promiseCapability and performs the following steps when called: // 5. Let executor be CreateBuiltinFunction(executorClosure, 2, "", « »). @@ -436,7 +436,7 @@ impl Promise { } // 1. Let values be a new empty List. - let values = Gc::new_cell(Vec::new()); + let values = Gc::new(GcCell::new(Vec::new())); // 2. Let remainingElementsCount be the Record { [[Value]]: 1 }. let remaining_elements_count = Rc::new(Cell::new(1)); @@ -677,7 +677,7 @@ impl Promise { } // 1. Let values be a new empty List. - let values = Gc::new_cell(Vec::new()); + let values = Gc::new(GcCell::new(Vec::new())); // 2. Let remainingElementsCount be the Record { [[Value]]: 1 }. let remaining_elements_count = Rc::new(Cell::new(1)); @@ -1008,7 +1008,7 @@ impl Promise { } // 1. Let errors be a new empty List. - let errors = Gc::new_cell(Vec::new()); + let errors = Gc::new(GcCell::new(Vec::new())); // 2. Let remainingElementsCount be the Record { [[Value]]: 1 }. let remaining_elements_count = Rc::new(Cell::new(1)); diff --git a/boa_engine/src/bytecompiler/mod.rs b/boa_engine/src/bytecompiler/mod.rs index 081a6b2259a..60282bed538 100644 --- a/boa_engine/src/bytecompiler/mod.rs +++ b/boa_engine/src/bytecompiler/mod.rs @@ -30,7 +30,7 @@ use boa_ast::{ }, Declaration, Expression, Statement, StatementList, StatementListItem, }; -use boa_gc::Gc; +use boa_gc::{Gc, GcCell}; use boa_interner::{Interner, Sym}; use rustc_hash::FxHashMap; use std::mem::size_of; @@ -263,7 +263,7 @@ impl<'b> ByteCompiler<'b> { #[inline] fn push_compile_environment( &mut self, - environment: Gc>, + environment: Gc>, ) -> usize { let index = self.code_block.compile_environments.len(); self.code_block.compile_environments.push(environment); diff --git a/boa_engine/src/environments/compile.rs b/boa_engine/src/environments/compile.rs index 17129f324c5..1ff6f3faf78 100644 --- a/boa_engine/src/environments/compile.rs +++ b/boa_engine/src/environments/compile.rs @@ -2,7 +2,7 @@ use crate::{ environments::runtime::BindingLocator, property::PropertyDescriptor, Context, JsString, JsValue, }; use boa_ast::expression::Identifier; -use boa_gc::{Cell, Finalize, Gc, Trace}; +use boa_gc::{Finalize, Gc, GcCell, Trace}; use rustc_hash::FxHashMap; @@ -22,7 +22,7 @@ struct CompileTimeBinding { /// A compile time environment also indicates, if it is a function environment. #[derive(Debug, Finalize, Trace)] pub(crate) struct CompileTimeEnvironment { - outer: Option>>, + outer: Option>>, environment_index: usize, #[unsafe_ignore_trace] bindings: FxHashMap, @@ -223,12 +223,12 @@ impl Context { let environment_index = self.realm.compile_env.borrow().environment_index + 1; let outer = self.realm.compile_env.clone(); - self.realm.compile_env = Gc::new_cell(CompileTimeEnvironment { + self.realm.compile_env = Gc::new(GcCell::new(CompileTimeEnvironment { outer: Some(outer), environment_index, bindings: FxHashMap::default(), function_scope, - }); + })); } /// Pop the last compile time environment from the stack. @@ -241,7 +241,7 @@ impl Context { #[inline] pub(crate) fn pop_compile_time_environment( &mut self, - ) -> (usize, Gc>) { + ) -> (usize, Gc>) { let current_env_borrow = self.realm.compile_env.borrow(); if let Some(outer) = ¤t_env_borrow.outer { let outer_clone = outer.clone(); diff --git a/boa_engine/src/environments/runtime.rs b/boa_engine/src/environments/runtime.rs index 3314eb75e51..c06ab965724 100644 --- a/boa_engine/src/environments/runtime.rs +++ b/boa_engine/src/environments/runtime.rs @@ -3,7 +3,7 @@ use std::cell::Cell; use crate::{ environments::CompileTimeEnvironment, error::JsNativeError, object::JsObject, Context, JsValue, }; -use boa_gc::{Cell as GcCell, Finalize, Gc, Trace}; +use boa_gc::{Finalize, Gc, GcCell, Trace}; use boa_ast::expression::Identifier; use rustc_hash::FxHashSet; diff --git a/boa_engine/src/object/jsobject.rs b/boa_engine/src/object/jsobject.rs index 6ee970796d8..6bcf077f9bd 100644 --- a/boa_engine/src/object/jsobject.rs +++ b/boa_engine/src/object/jsobject.rs @@ -10,7 +10,7 @@ use crate::{ value::PreferredType, Context, JsResult, JsValue, }; -use boa_gc::{self, Finalize, Gc, Trace}; +use boa_gc::{self, Finalize, Gc, GcCell, Trace}; use rustc_hash::FxHashMap; use std::{ cell::RefCell, @@ -21,15 +21,15 @@ use std::{ }; /// A wrapper type for an immutably borrowed type T. -pub type Ref<'a, T> = boa_gc::Ref<'a, T>; +pub type Ref<'a, T> = boa_gc::GcCellRef<'a, T>; /// A wrapper type for a mutably borrowed type T. -pub type RefMut<'a, T, U> = boa_gc::RefMut<'a, T, U>; +pub type RefMut<'a, T, U> = boa_gc::GcCellRefMut<'a, T, U>; /// Garbage collected `Object`. #[derive(Trace, Finalize, Clone, Default)] pub struct JsObject { - inner: Gc>, + inner: Gc>, } impl JsObject { @@ -37,7 +37,7 @@ impl JsObject { #[inline] fn from_object(object: Object) -> Self { Self { - inner: Gc::new_cell(object), + inner: Gc::new(GcCell::new(object)), } } @@ -738,9 +738,9 @@ Cannot both specify accessors and a value or writable attribute", } } -impl AsRef> for JsObject { +impl AsRef> for JsObject { #[inline] - fn as_ref(&self) -> &boa_gc::Cell { + fn as_ref(&self) -> &GcCell { &self.inner } } diff --git a/boa_engine/src/realm.rs b/boa_engine/src/realm.rs index c1ace1ef3fb..ab8cf8dfbd9 100644 --- a/boa_engine/src/realm.rs +++ b/boa_engine/src/realm.rs @@ -8,7 +8,7 @@ use crate::{ environments::{CompileTimeEnvironment, DeclarativeEnvironmentStack}, object::{GlobalPropertyMap, JsObject, JsPrototype, ObjectData, PropertyMap}, }; -use boa_gc::{Cell, Gc}; +use boa_gc::{Gc, GcCell}; use boa_profiler::Profiler; /// Representation of a Realm. @@ -21,7 +21,7 @@ pub struct Realm { pub(crate) global_property_map: PropertyMap, pub(crate) global_prototype: JsPrototype, pub(crate) environments: DeclarativeEnvironmentStack, - pub(crate) compile_env: Gc>, + pub(crate) compile_env: Gc>, } impl Realm { @@ -33,7 +33,7 @@ impl Realm { // Allow identification of the global object easily let global_object = JsObject::from_proto_and_data(None, ObjectData::global()); - let global_compile_environment = Gc::new_cell(CompileTimeEnvironment::new_global()); + let global_compile_environment = Gc::new(GcCell::new(CompileTimeEnvironment::new_global())); Self { global_object, diff --git a/boa_engine/src/vm/code_block.rs b/boa_engine/src/vm/code_block.rs index 59f80a3ff6e..96e8bed9998 100644 --- a/boa_engine/src/vm/code_block.rs +++ b/boa_engine/src/vm/code_block.rs @@ -24,7 +24,7 @@ use crate::{ Context, JsResult, JsString, JsValue, }; use boa_ast::{expression::Identifier, function::FormalParameterList}; -use boa_gc::{Cell, Finalize, Gc, Trace}; +use boa_gc::{Finalize, Gc, GcCell, Trace}; use boa_interner::{Interner, Sym, ToInternedString}; use boa_profiler::Profiler; use std::{collections::VecDeque, convert::TryInto, mem::size_of}; @@ -103,7 +103,7 @@ pub struct CodeBlock { pub(crate) arguments_binding: Option, /// Compile time environments in this function. - pub(crate) compile_environments: Vec>>, + pub(crate) compile_environments: Vec>>, /// The `[[IsClassConstructor]]` internal slot. pub(crate) is_class_constructor: bool, @@ -1098,11 +1098,11 @@ impl JsObject { prototype, ObjectData::generator(Generator { state: GeneratorState::SuspendedStart, - context: Some(Gc::new_cell(GeneratorContext { + context: Some(Gc::new(GcCell::new(GeneratorContext { environments, call_frame, stack, - })), + }))), }), ); @@ -1241,11 +1241,11 @@ impl JsObject { prototype, ObjectData::async_generator(AsyncGenerator { state: AsyncGeneratorState::SuspendedStart, - context: Some(Gc::new_cell(GeneratorContext { + context: Some(Gc::new(GcCell::new(GeneratorContext { environments, call_frame, stack, - })), + }))), queue: VecDeque::new(), }), ); diff --git a/boa_gc/src/internals/cell.rs b/boa_gc/src/cell.rs similarity index 86% rename from boa_gc/src/internals/cell.rs rename to boa_gc/src/cell.rs index 5750ea218f1..e49ad18afaa 100644 --- a/boa_gc/src/internals/cell.rs +++ b/boa_gc/src/cell.rs @@ -5,10 +5,77 @@ use std::fmt::{self, Debug, Display}; use std::hash::Hash; use std::ops::{Deref, DerefMut}; -use crate::{ - internals::borrow_flag::{BorrowFlag, BorrowState, BORROWFLAG_INIT}, - trace::{Finalize, Trace}, -}; +use crate::trace::{Finalize, Trace}; + +#[derive(Copy, Clone)] +pub(crate) struct BorrowFlag(usize); + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub(crate) enum BorrowState { + Reading, + Writing, + Unused, +} + +const ROOT: usize = 1; +const WRITING: usize = !1; +const UNUSED: usize = 0; + +/// The base borrowflag init is rooted, and has no outstanding borrows. +pub(crate) const BORROWFLAG_INIT: BorrowFlag = BorrowFlag(1); + +impl BorrowFlag { + pub(crate) fn borrowed(self) -> BorrowState { + match self.0 & !ROOT { + UNUSED => BorrowState::Unused, + WRITING => BorrowState::Writing, + _ => BorrowState::Reading, + } + } + + pub(crate) fn rooted(self) -> bool { + match self.0 & ROOT { + 0 => false, + _ => true, + } + } + + pub(crate) fn set_writing(self) -> Self { + // Set every bit other than the root bit, which is preserved + BorrowFlag(self.0 | WRITING) + } + + pub(crate) fn set_unused(self) -> Self { + // Clear every bit other than the root bit, which is preserved + BorrowFlag(self.0 & ROOT) + } + + pub(crate) fn add_reading(self) -> Self { + assert!(self.borrowed() != BorrowState::Writing); + // Add 1 to the integer starting at the second binary digit. As our + // borrowstate is not writing, we know that overflow cannot happen, so + // this is equivalent to the following, more complicated, expression: + // + // BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) + 1) << 1)) + BorrowFlag(self.0 + 0b10) + } + + pub(crate) fn sub_reading(self) -> Self { + assert!(self.borrowed() == BorrowState::Reading); + // Subtract 1 from the integer starting at the second binary digit. As + // our borrowstate is not writing or unused, we know that overflow or + // undeflow cannot happen, so this is equivalent to the following, more + // complicated, expression: + // + // BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) - 1) << 1)) + BorrowFlag(self.0 - 0b10) + } + + pub(crate) fn set_rooted(self, rooted: bool) -> Self { + // Preserve the non-root bits + BorrowFlag((self.0 & !ROOT) | (rooted as usize)) + } +} /// A mutable memory location with dynamically checked borrow rules /// that can be used inside of a garbage-collected pointer. diff --git a/boa_gc/src/internals/borrow_flag.rs b/boa_gc/src/internals/borrow_flag.rs deleted file mode 100644 index fa149952bbb..00000000000 --- a/boa_gc/src/internals/borrow_flag.rs +++ /dev/null @@ -1,69 +0,0 @@ -#[derive(Copy, Clone)] -pub(crate) struct BorrowFlag(usize); - -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub(crate) enum BorrowState { - Reading, - Writing, - Unused, -} - -const ROOT: usize = 1; -const WRITING: usize = !1; -const UNUSED: usize = 0; - -/// The base borrowflag init is rooted, and has no outstanding borrows. -pub(crate) const BORROWFLAG_INIT: BorrowFlag = BorrowFlag(1); - -impl BorrowFlag { - pub(crate) fn borrowed(self) -> BorrowState { - match self.0 & !ROOT { - UNUSED => BorrowState::Unused, - WRITING => BorrowState::Writing, - _ => BorrowState::Reading, - } - } - - pub(crate) fn rooted(self) -> bool { - match self.0 & ROOT { - 0 => false, - _ => true, - } - } - - pub(crate) fn set_writing(self) -> Self { - // Set every bit other than the root bit, which is preserved - BorrowFlag(self.0 | WRITING) - } - - pub(crate) fn set_unused(self) -> Self { - // Clear every bit other than the root bit, which is preserved - BorrowFlag(self.0 & ROOT) - } - - pub(crate) fn add_reading(self) -> Self { - assert!(self.borrowed() != BorrowState::Writing); - // Add 1 to the integer starting at the second binary digit. As our - // borrowstate is not writing, we know that overflow cannot happen, so - // this is equivalent to the following, more complicated, expression: - // - // BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) + 1) << 1)) - BorrowFlag(self.0 + 0b10) - } - - pub(crate) fn sub_reading(self) -> Self { - assert!(self.borrowed() == BorrowState::Reading); - // Subtract 1 from the integer starting at the second binary digit. As - // our borrowstate is not writing or unused, we know that overflow or - // undeflow cannot happen, so this is equivalent to the following, more - // complicated, expression: - // - // BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) - 1) << 1)) - BorrowFlag(self.0 - 0b10) - } - - pub(crate) fn set_rooted(self, rooted: bool) -> Self { - // Preserve the non-root bits - BorrowFlag((self.0 & !ROOT) | (rooted as usize)) - } -} diff --git a/boa_gc/src/internals/ephemeron_box.rs b/boa_gc/src/internals/eph_box.rs similarity index 70% rename from boa_gc/src/internals/ephemeron_box.rs rename to boa_gc/src/internals/eph_box.rs index 9fe1cb5ede8..effc0094f3f 100644 --- a/boa_gc/src/internals/ephemeron_box.rs +++ b/boa_gc/src/internals/eph_box.rs @@ -1,37 +1,20 @@ //! This module will implement the internal types GcBox and Ephemeron use crate::trace::Trace; -use crate::Finalize; -use crate::{finalizer_safe, Gc, GcBox}; +use crate::{finalizer_safe, GcBox}; +use crate::{Finalize, Gc}; use std::cell::Cell; use std::ptr::NonNull; -/// Implementation of an Ephemeron structure -pub struct EphemeronBox { +/// Implementation of an Ephemeron cell +pub(crate) struct EphemeronBox { key: Cell>>>, value: V, } -impl EphemeronBox { - // This could panic if called in while dropping / !finalizer_safe() - pub unsafe fn new(value: &Gc) -> Self { - let ptr = NonNull::new_unchecked(value.clone().inner_ptr()); - // Clone increments root, so we need to decrement it - (*ptr.as_ptr()).unroot_inner(); - EphemeronBox { - key: Cell::new(Some(ptr)), - value: (), - } - } -} - impl EphemeronBox { - // This could panic if called while dropping / !finalizer_safe() - pub unsafe fn new_pair(key: &Gc, value: V) -> Self { - let ptr = NonNull::new_unchecked(key.clone().inner_ptr()); - // Clone increments root, so we need to decrement it - (*ptr.as_ptr()).unroot_inner(); + pub fn new_pair(key: &Gc, value: V) -> Self { EphemeronBox { - key: Cell::new(Some(ptr)), + key: Cell::new(Some(key.inner_ptr())), value, } } @@ -125,7 +108,7 @@ unsafe impl Trace for EphemeronBox { #[inline] unsafe fn unroot(&self) { - // An ephemeron is never rotted in the GcBoxHeader + // An ephemeron is never rooted in the GcBoxHeader } #[inline] diff --git a/boa_gc/src/gc_box.rs b/boa_gc/src/internals/gc_box.rs similarity index 93% rename from boa_gc/src/gc_box.rs rename to boa_gc/src/internals/gc_box.rs index f0eb6710718..f4f92837abd 100644 --- a/boa_gc/src/gc_box.rs +++ b/boa_gc/src/internals/gc_box.rs @@ -32,11 +32,6 @@ impl GcBoxHeader { } } - #[inline] - pub fn set_next(&self, next: Option>>) { - self.next.set(next); - } - #[inline] pub fn roots(&self) -> usize { self.roots.get() & ROOTS_MASK @@ -116,10 +111,6 @@ impl GcBox { ptr::eq(&this.header, &other.header) } - pub(crate) fn set_header_pointer(&self, next: Option>>) { - self.header.set_next(next) - } - /// Marks this `GcBox` and marks through its data. pub(crate) unsafe fn trace_inner(&self) { if !self.header.is_marked() && !self.header.is_ephemeron() { diff --git a/boa_gc/src/internals/mod.rs b/boa_gc/src/internals/mod.rs index 06dd355507f..d8c26c985b3 100644 --- a/boa_gc/src/internals/mod.rs +++ b/boa_gc/src/internals/mod.rs @@ -1,6 +1,5 @@ -pub(crate) mod borrow_flag; -pub mod cell; -pub mod ephemeron_box; +mod eph_box; +pub(crate) use eph_box::EphemeronBox; -pub use cell::*; -pub use ephemeron_box::EphemeronBox; +mod gc_box; +pub(crate) use gc_box::GcBox; diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index a0c1892c202..5c1e4b64b6d 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -11,31 +11,35 @@ clippy::borrow_deref_ref, )] +extern crate self as boa_gc; + use boa_profiler::Profiler; -use std::cell::{Cell as StdCell, RefCell as StdRefCell}; +use std::cell::{Cell, RefCell}; use std::mem; use std::ptr::NonNull; -mod gc_box; -mod internals; -mod pointers; pub mod trace; -pub use boa_gc_macros::{Finalize, Trace}; +pub(crate) mod internals; + +mod cell; +mod pointers; pub use crate::trace::{Finalize, Trace}; -pub(crate) use gc_box::GcBox; -pub use internals::{EphemeronBox, GcCell as Cell, GcCellRef as Ref, GcCellRefMut as RefMut}; +pub use boa_gc_macros::{Finalize, Trace}; +pub use cell::{GcCell, GcCellRef, GcCellRefMut}; pub use pointers::{Ephemeron, Gc, WeakGc}; -pub type GcPointer = NonNull>; +use internals::GcBox; -thread_local!(pub static EPHEMERON_QUEUE: StdCell>> = StdCell::new(None)); -thread_local!(pub static GC_DROPPING: StdCell = StdCell::new(false)); -thread_local!(static BOA_GC: StdRefCell = StdRefCell::new( BoaGc { +type GcPointer = NonNull>; + +thread_local!(static EPHEMERON_QUEUE: Cell>> = Cell::new(None)); +thread_local!(static GC_DROPPING: Cell = Cell::new(false)); +thread_local!(static BOA_GC: RefCell = RefCell::new( BoaGc { config: GcConfig::default(), runtime: GcRuntimeData::default(), - adult_start: StdCell::new(None), + adult_start: Cell::new(None), })); struct GcConfig { @@ -64,14 +68,12 @@ struct GcRuntimeData { struct BoaGc { config: GcConfig, runtime: GcRuntimeData, - adult_start: StdCell>, + adult_start: Cell>, } impl Drop for BoaGc { fn drop(&mut self) { - unsafe { - Collector::dump(self); - } + Collector::dump(self); } } // Whether or not the thread is currently in the sweep phase of garbage collection. @@ -96,98 +98,34 @@ pub fn finalizer_safe() -> bool { GC_DROPPING.with(|dropping| !dropping.get()) } -/// The GcAllocater handles initialization and allocation of garbage collected values. +/// The Allocator handles allocation of garbage collected values. /// /// The allocator can trigger a garbage collection -pub(crate) struct GcAlloc; +struct Allocator; -impl GcAlloc { - pub fn new(value: T) -> NonNull> { +impl Allocator { + fn new(value: GcBox) -> NonNull> { let _timer = Profiler::global().start_event("New Pointer", "BoaAlloc"); + let eph = value.header.is_ephemeron(); + let element_size = mem::size_of_val::>(&value); BOA_GC.with(|st| { let mut gc = st.borrow_mut(); - unsafe { - Self::manage_state(&mut gc); - - let gc_box = GcBox::new(value); - - let element_size = mem::size_of_val::>(&gc_box); - let element_pointer = Box::into_raw(Box::from(gc_box)); - - let old_start = gc.adult_start.take(); - (*element_pointer).set_header_pointer(old_start); - (*element_pointer).value().unroot(); - - gc.adult_start - .set(Some(NonNull::new_unchecked(element_pointer))); + Self::manage_state(&mut gc); + value.header.next.set(gc.adult_start.take()); + let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::from(value))) }; + gc.adult_start.set(Some(ptr)); + gc.runtime.total_bytes_allocated += element_size; + if !eph { gc.runtime.total_bytes_allocated += element_size; - - NonNull::new_unchecked(element_pointer) } - }) - } - - pub fn new_ephemeron( - key: &Gc, - value: V, - ) -> NonNull>> { - let _timer = Profiler::global().start_event("New Weak Pair", "BoaAlloc"); - BOA_GC.with(|internals| { - let mut gc = internals.borrow_mut(); - - unsafe { - Self::manage_state(&mut gc); - let ephem = EphemeronBox::new_pair(key, value); - let gc_box = GcBox::new_weak(ephem); - - let element_size = mem::size_of_val::>(&gc_box); - let element_pointer = Box::into_raw(Box::from(gc_box)); - - let old_start = gc.adult_start.take(); - (*element_pointer).set_header_pointer(old_start); - (*element_pointer).value().unroot(); - - gc.adult_start - .set(Some(NonNull::new_unchecked(element_pointer))); - - gc.runtime.total_bytes_allocated += element_size; - NonNull::new_unchecked(element_pointer) - } + ptr }) } - pub fn new_weak_box(value: &Gc) -> NonNull>> { - let _timer = Profiler::global().start_event("New Weak Pointer", "BoaAlloc"); - BOA_GC.with(|state| { - let mut gc = state.borrow_mut(); - - unsafe { - Self::manage_state(&mut gc); - - let ephemeron = EphemeronBox::new(value); - let gc_box = GcBox::new_weak(ephemeron); - - let element_size = mem::size_of_val::>(&gc_box); - let element_pointer = Box::into_raw(Box::from(gc_box)); - - let old_start = gc.adult_start.take(); - (*element_pointer).set_header_pointer(old_start); - (*element_pointer).value().unroot(); - - gc.adult_start - .set(Some(NonNull::new_unchecked(element_pointer))); - - gc.runtime.total_bytes_allocated += element_size; - - NonNull::new_unchecked(element_pointer) - } - }) - } - - unsafe fn manage_state(gc: &mut BoaGc) { + fn manage_state(gc: &mut BoaGc) { if gc.runtime.total_bytes_allocated > gc.config.adult_threshold { Collector::run_full_collection(gc); @@ -209,30 +147,35 @@ impl GcAlloc { // then needs to be retriggered as finalization can potentially resurrect dead // nodes. // -// A better appraoch in a more concurrent structure may be to reorder. +// A better approach in a more concurrent structure may be to reorder. // // Mark -> Sweep -> Finalize pub struct Collector; impl Collector { - pub(crate) unsafe fn run_full_collection(gc: &mut BoaGc) { + fn run_full_collection(gc: &mut BoaGc) { let _timer = Profiler::global().start_event("Gc Full Collection", "gc"); gc.runtime.collections += 1; - let unreachable_adults = Self::mark_heap(&gc.adult_start); + let unreachable_adults = unsafe { Self::mark_heap(&gc.adult_start) }; // Check if any unreachable nodes were found and finalize if !unreachable_adults.is_empty() { - Self::finalize(unreachable_adults); + unsafe { Self::finalize(unreachable_adults) }; } - let _final_unreachable_adults = Self::mark_heap(&gc.adult_start); + let _final_unreachable_adults = unsafe { Self::mark_heap(&gc.adult_start) }; // Sweep both without promoting any values - Self::sweep(&gc.adult_start, &mut gc.runtime.total_bytes_allocated); + unsafe { + Self::sweep( + &gc.adult_start, + &mut gc.runtime.total_bytes_allocated, + ); + } } - pub(crate) unsafe fn mark_heap( - head: &StdCell>>>, + unsafe fn mark_heap( + head: &Cell>>>, ) -> Vec>> { let _timer = Profiler::global().start_event("Gc Marking", "gc"); // Walk the list, tracing and marking the nodes @@ -270,28 +213,27 @@ impl Collector { ) -> Vec>> { let mut ephemeron_queue = initial_queue; loop { - let mut reachable_nodes = Vec::new(); - let mut other_nodes = Vec::new(); // iterate through ephemeron queue, sorting nodes by whether they // are reachable or unreachable - for node in ephemeron_queue { - if (*node.as_ptr()).value.is_marked_ephemeron() { - (*node.as_ptr()).header.mark(); - reachable_nodes.push(node); - } else { - other_nodes.push(node); - } - } + let (reachable, other): (Vec<_>, Vec<_>) = + ephemeron_queue.into_iter().partition(|node| { + if node.as_ref().value.is_marked_ephemeron() { + node.as_ref().header.mark(); + true + } else { + false + } + }); // Replace the old queue with the unreachable - ephemeron_queue = other_nodes; + ephemeron_queue = other; // If reachable nodes is not empty, trace values. If it is empty, // break from the loop - if !reachable_nodes.is_empty() { + if !reachable.is_empty() { EPHEMERON_QUEUE.with(|state| state.set(Some(Vec::new()))); // iterate through reachable nodes and trace their values, // enqueuing any ephemeron that is found during the trace - for node in reachable_nodes { + for node in reachable { // TODO: deal with fetch ephemeron_queue (*node.as_ptr()).weak_trace_inner() } @@ -321,7 +263,7 @@ impl Collector { } unsafe fn sweep( - heap_start: &StdCell>>>, + heap_start: &Cell>>>, total_allocated: &mut usize, ) { let _timer = Profiler::global().start_event("Gc Sweeping", "gc"); @@ -343,18 +285,14 @@ impl Collector { } // Clean up the heap when BoaGc is dropped - unsafe fn dump(gc: &mut BoaGc) { - Self::drop_heap(&gc.adult_start); - } - - unsafe fn drop_heap(heap_start: &StdCell>>>) { + fn dump(gc: &mut BoaGc) { // Not initializing a dropguard since this should only be invoked when BOA_GC is being dropped. let _guard = DropGuard::new(); - let sweep_head = heap_start; + let sweep_head = &gc.adult_start; while let Some(node) = sweep_head.get() { // Drops every node - let unmarked_node = Box::from_raw(node.as_ptr()); + let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) }; sweep_head.set(unmarked_node.header.next.take()); } } @@ -369,44 +307,11 @@ pub fn force_collect() { BOA_GC.with(|current| { let mut gc = current.borrow_mut(); - unsafe { - if gc.runtime.total_bytes_allocated > 0 { - Collector::run_full_collection(&mut *gc) - } + if gc.runtime.total_bytes_allocated > 0 { + Collector::run_full_collection(&mut *gc) } }) } -pub struct GcTester; - -impl GcTester { - pub fn assert_collections(o: usize) { - BOA_GC.with(|current| { - let gc = current.borrow(); - assert_eq!(gc.runtime.collections, o); - }) - } - - pub fn assert_collection_floor(floor: usize) { - BOA_GC.with(|current| { - let gc = current.borrow(); - assert!(gc.runtime.collections > floor); - }) - } - - pub fn assert_empty_gc() { - BOA_GC.with(|current| { - let gc = current.borrow(); - - assert!(gc.adult_start.get().is_none()); - assert!(gc.runtime.total_bytes_allocated == 0); - }) - } - - pub fn assert_bytes_allocated() { - BOA_GC.with(|current| { - let gc = current.borrow(); - assert!(gc.runtime.total_bytes_allocated > 0); - }) - } -} +#[cfg(test)] +mod test; diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index 939ba4f27ed..d34477e3e5f 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -2,7 +2,7 @@ use crate::{ finalizer_safe, internals::EphemeronBox, trace::{Finalize, Trace}, - Gc, GcAlloc, GcBox, EPHEMERON_QUEUE, + Allocator, Gc, GcBox, EPHEMERON_QUEUE, }; use std::cell::Cell; use std::ptr::NonNull; @@ -13,26 +13,26 @@ pub struct Ephemeron { impl Ephemeron { pub fn new(key: &Gc, value: V) -> Self { - let ephemeron_box = GcAlloc::new_ephemeron(key, value); - unsafe { - Self { - inner_ptr: Cell::new(NonNull::new_unchecked(ephemeron_box.as_ptr())), - } + unsafe { value.unroot() }; + Self { + inner_ptr: Cell::new(Allocator::new(GcBox::new_weak(EphemeronBox::new_pair( + key, value, + )))), } } } impl Ephemeron { #[inline] - fn inner_ptr(&self) -> *mut GcBox> { + fn inner_ptr(&self) -> NonNull>> { assert!(finalizer_safe()); - self.inner_ptr.get().as_ptr() + self.inner_ptr.get() } #[inline] - pub fn inner(&self) -> &GcBox> { - unsafe { &*self.inner_ptr() } + fn inner(&self) -> &GcBox> { + unsafe { &*self.inner_ptr().as_ptr() } } #[inline] @@ -61,7 +61,7 @@ unsafe impl Trace for Ephemeron { unsafe fn weak_trace(&self) { EPHEMERON_QUEUE.with(|q| { let mut queue = q.take().expect("queue is initialized by weak_trace"); - queue.push(NonNull::new_unchecked(self.inner_ptr())) + queue.push(self.inner_ptr()) }) } diff --git a/boa_gc/src/pointers/gc_ptr.rs b/boa_gc/src/pointers/gc.rs similarity index 86% rename from boa_gc/src/pointers/gc_ptr.rs rename to boa_gc/src/pointers/gc.rs index 4144c74b3bf..4d1594d3418 100644 --- a/boa_gc/src/pointers/gc_ptr.rs +++ b/boa_gc/src/pointers/gc.rs @@ -7,9 +7,9 @@ use std::ops::Deref; use std::ptr::{self, NonNull}; use std::rc::Rc; -use crate::gc_box::GcBox; +use crate::internals::GcBox; use crate::trace::{Finalize, Trace}; -use crate::{finalizer_safe, Cell as GcCell, GcAlloc}; +use crate::{finalizer_safe, Allocator}; pub(crate) unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); @@ -26,30 +26,15 @@ impl Gc { /// Constructs a new `Gc` with the given value. pub fn new(value: T) -> Self { unsafe { - let pointer = GcAlloc::new(value); - - let gc = Gc { - inner_ptr: Cell::new(NonNull::new_unchecked(pointer.as_ptr())), - marker: PhantomData, - }; - gc.set_root(); - gc - } - } - - pub fn new_cell(value: T) -> Gc> { - unsafe { - let new_cell = GcCell::new(value); - - let pointer = GcAlloc::new(new_cell); - - let gc = Gc { - inner_ptr: Cell::new(NonNull::new_unchecked(pointer.as_ptr())), - marker: PhantomData, - }; - gc.set_root(); - gc + value.unroot(); } + let inner_ptr = Allocator::new(GcBox::new(value)); + let gc = Self { + inner_ptr: Cell::new(inner_ptr), + marker: PhantomData, + }; + unsafe { gc.set_root() }; + gc } } @@ -89,14 +74,14 @@ impl Gc { } #[inline] - pub(crate) fn inner_ptr(&self) -> *mut GcBox { + pub(crate) fn inner_ptr(&self) -> NonNull> { assert!(finalizer_safe()); - unsafe { clear_root_bit(self.inner_ptr.get()).as_ptr() } + unsafe { clear_root_bit(self.inner_ptr.get()) } } #[inline] fn inner(&self) -> &GcBox { - unsafe { &*self.inner_ptr() } + unsafe { self.inner_ptr().as_ref() } } } @@ -185,7 +170,7 @@ impl Drop for Gc { impl Default for Gc { #[inline] fn default() -> Self { - Self::new(Default::default()) + Gc::new(Default::default()) } } diff --git a/boa_gc/src/pointers/mod.rs b/boa_gc/src/pointers/mod.rs index bcfaac1d590..5c14182762d 100644 --- a/boa_gc/src/pointers/mod.rs +++ b/boa_gc/src/pointers/mod.rs @@ -1,9 +1,9 @@ //! Pointers represents the External types returned by the Boa Garbage Collector mod ephemeron; -mod gc_ptr; -mod weak_ptr; +mod gc; +mod weak; pub use ephemeron::Ephemeron; -pub use gc_ptr::Gc; -pub use weak_ptr::WeakGc; +pub use gc::Gc; +pub use weak::WeakGc; diff --git a/boa_gc/src/pointers/weak.rs b/boa_gc/src/pointers/weak.rs new file mode 100644 index 00000000000..7f88fba7f8a --- /dev/null +++ b/boa_gc/src/pointers/weak.rs @@ -0,0 +1,28 @@ +use crate::{Ephemeron, Finalize, Gc, Trace}; + +#[derive(Trace, Finalize)] +#[repr(transparent)] +pub struct WeakGc { + inner: Ephemeron, +} + +impl WeakGc { + pub fn new(value: &Gc) -> Self { + Self { + inner: Ephemeron::new(value, ()), + } + } +} + +impl WeakGc { + #[inline] + pub fn value(&self) -> Option<&T> { + self.inner.key() + } +} + +impl From> for WeakGc { + fn from(inner: Ephemeron) -> Self { + Self { inner } + } +} diff --git a/boa_gc/src/pointers/weak_ptr.rs b/boa_gc/src/pointers/weak_ptr.rs deleted file mode 100644 index 34d112205bd..00000000000 --- a/boa_gc/src/pointers/weak_ptr.rs +++ /dev/null @@ -1,73 +0,0 @@ -use crate::{ - finalizer_safe, - internals::EphemeronBox, - trace::{Finalize, Trace}, - Gc, GcAlloc, GcBox, EPHEMERON_QUEUE, -}; -use std::cell::Cell; -use std::ptr::NonNull; - -pub struct WeakGc { - inner_ptr: Cell>>>, -} - -impl WeakGc { - pub fn new(value: &Gc) -> Self { - let weak_box = GcAlloc::new_weak_box(value); - unsafe { - Self { - inner_ptr: Cell::new(NonNull::new_unchecked(weak_box.as_ptr())), - } - } - } -} - -impl WeakGc { - #[inline] - fn inner_ptr(&self) -> *mut GcBox> { - assert!(finalizer_safe()); - - self.inner_ptr.get().as_ptr() - } - - #[inline] - fn inner(&self) -> &GcBox> { - unsafe { &*self.inner_ptr() } - } - - #[inline] - pub fn value(&self) -> Option<&T> { - self.inner().value().key() - } -} - -impl Finalize for WeakGc {} - -unsafe impl Trace for WeakGc { - #[inline] - unsafe fn trace(&self) {} - - #[inline] - unsafe fn is_marked_ephemeron(&self) -> bool { - false - } - - #[inline] - unsafe fn weak_trace(&self) { - EPHEMERON_QUEUE.with(|q| { - let mut queue = q.take().expect("queue is initialized by weak_trace"); - queue.push(NonNull::new_unchecked(self.inner_ptr())) - }) - } - - #[inline] - unsafe fn root(&self) {} - - #[inline] - unsafe fn unroot(&self) {} - - #[inline] - fn run_finalizer(&self) { - Finalize::finalize(self) - } -} diff --git a/boa_gc/src/test/allocation.rs b/boa_gc/src/test/allocation.rs new file mode 100644 index 00000000000..11826fa7d38 --- /dev/null +++ b/boa_gc/src/test/allocation.rs @@ -0,0 +1,27 @@ +use super::Harness; +use crate::{force_collect, Gc, GcCell}; + +#[test] +fn gc_basic_cell_allocation() { + let gc_cell = Gc::new(GcCell::new(16_u16)); + + force_collect(); + Harness::assert_collections(1); + Harness::assert_bytes_allocated(); + assert_eq!(*gc_cell.borrow_mut(), 16); +} + +#[test] +fn gc_basic_pointer_alloc() { + let gc = Gc::new(16_u8); + + force_collect(); + Harness::assert_collections(1); + Harness::assert_bytes_allocated(); + assert_eq!(*gc, 16); + + drop(gc); + force_collect(); + Harness::assert_collections(2); + Harness::assert_empty_gc(); +} diff --git a/boa_gc/src/test/mod.rs b/boa_gc/src/test/mod.rs new file mode 100644 index 00000000000..b21eedc0ed9 --- /dev/null +++ b/boa_gc/src/test/mod.rs @@ -0,0 +1,30 @@ +use crate::BOA_GC; + +mod allocation; + +struct Harness; + +impl Harness { + pub fn assert_collections(o: usize) { + BOA_GC.with(|current| { + let gc = current.borrow(); + assert_eq!(gc.runtime.collections, o); + }) + } + + pub fn assert_empty_gc() { + BOA_GC.with(|current| { + let gc = current.borrow(); + + assert!(gc.adult_start.get().is_none()); + assert!(gc.runtime.total_bytes_allocated == 0); + }) + } + + pub fn assert_bytes_allocated() { + BOA_GC.with(|current| { + let gc = current.borrow(); + assert!(gc.runtime.total_bytes_allocated > 0); + }) + } +} diff --git a/boa_gc/src/trace.rs b/boa_gc/src/trace.rs index f63084e086b..36129bfd3aa 100644 --- a/boa_gc/src/trace.rs +++ b/boa_gc/src/trace.rs @@ -27,7 +27,9 @@ pub unsafe trait Trace: Finalize { /// Checks if an ephemeron's key is marked. /// /// Note: value should always be implemented to return false - unsafe fn is_marked_ephemeron(&self) -> bool; + unsafe fn is_marked_ephemeron(&self) -> bool { + false + } /// Returns true if a marked `Gc` is found unsafe fn weak_trace(&self); @@ -52,10 +54,6 @@ macro_rules! unsafe_empty_trace { #[inline] unsafe fn trace(&self) {} #[inline] - unsafe fn is_marked_ephemeron(&self) -> bool { - false - } - #[inline] unsafe fn weak_trace(&self) {} #[inline] unsafe fn root(&self) {} diff --git a/boa_gc/tests/allocation.rs b/boa_gc/tests/allocation.rs deleted file mode 100644 index 42e618973d4..00000000000 --- a/boa_gc/tests/allocation.rs +++ /dev/null @@ -1,26 +0,0 @@ -use boa_gc::{force_collect, Gc, GcTester}; - -#[test] -fn gc_basic_cell_allocation() { - let gc_cell = Gc::new_cell(16_u16); - - force_collect(); - GcTester::assert_collections(1); - GcTester::assert_bytes_allocated(); - assert_eq!(*gc_cell.borrow_mut(), 16); -} - -#[test] -fn gc_basic_pointer_alloc() { - let gc = Gc::new(16_u8); - - force_collect(); - GcTester::assert_collections(1); - GcTester::assert_bytes_allocated(); - assert_eq!(*gc, 16); - - drop(gc); - force_collect(); - GcTester::assert_collections(2); - GcTester::assert_empty_gc(); -} diff --git a/boa_gc/tests/boa.rs b/boa_gc/tests/boa.rs index 1e40a62aede..3c825887eab 100644 --- a/boa_gc/tests/boa.rs +++ b/boa_gc/tests/boa.rs @@ -1,11 +1,11 @@ -use boa_gc::Gc; +use boa_gc::{Gc, GcCell}; #[test] fn boa_borrow_mut_test() { - let v = Gc::new_cell(Vec::new()); + let v = Gc::new(GcCell::new(Vec::new())); for _ in 1..=259 { - let cell = Gc::new_cell([0u8; 10]); + let cell = Gc::new(GcCell::new([0u8; 10])); v.borrow_mut().push(cell); } } diff --git a/boa_tester/src/exec/mod.rs b/boa_tester/src/exec/mod.rs index 26e067bae89..dc948894646 100644 --- a/boa_tester/src/exec/mod.rs +++ b/boa_tester/src/exec/mod.rs @@ -12,7 +12,7 @@ use boa_engine::{ builtins::JsArgs, object::FunctionBuilder, property::Attribute, Context, JsNativeErrorKind, JsResult, JsValue, }; -use boa_gc::{Cell, Finalize, Gc, Trace}; +use boa_gc::{Finalize, Gc, GcCell, Trace}; use boa_parser::Parser; use colored::Colorize; use rayon::prelude::*; @@ -400,13 +400,13 @@ impl Test { /// Object which includes the result of the async operation. #[derive(Debug, Clone, Trace, Finalize)] struct AsyncResult { - inner: Gc>>, + inner: Gc>>, } impl Default for AsyncResult { fn default() -> Self { Self { - inner: Gc::new_cell(Ok(())), + inner: Gc::new(GcCell::new(Ok(()))), } } } From b7ae55b36c16db9f1c48d054f479f752993c1ed6 Mon Sep 17 00:00:00 2001 From: jedel1043 Date: Tue, 8 Nov 2022 00:48:30 -0600 Subject: [PATCH 32/55] Provide default for `is_marked_ephemeron` --- boa_gc/derive_macros/src/lib.rs | 3 --- boa_gc/src/cell.rs | 5 ----- boa_gc/src/lib.rs | 9 +-------- boa_gc/src/pointers/ephemeron.rs | 5 ----- boa_gc/src/pointers/gc.rs | 5 ----- boa_gc/src/trace.rs | 4 ---- 6 files changed, 1 insertion(+), 30 deletions(-) diff --git a/boa_gc/derive_macros/src/lib.rs b/boa_gc/derive_macros/src/lib.rs index 7f2c000710d..69b26115b2c 100644 --- a/boa_gc/derive_macros/src/lib.rs +++ b/boa_gc/derive_macros/src/lib.rs @@ -24,9 +24,6 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream { } match *self { #trace_body } } - #[inline] unsafe fn is_marked_ephemeron(&self) -> bool { - false - } #[inline] unsafe fn weak_trace(&self) { #[allow(dead_code, unreachable_code)] #[inline] diff --git a/boa_gc/src/cell.rs b/boa_gc/src/cell.rs index e49ad18afaa..25b676d8cda 100644 --- a/boa_gc/src/cell.rs +++ b/boa_gc/src/cell.rs @@ -220,11 +220,6 @@ unsafe impl Trace for GcCell { } } - #[inline] - unsafe fn is_marked_ephemeron(&self) -> bool { - false - } - #[inline] unsafe fn weak_trace(&self) { match self.flags.get().borrowed() { diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 5c1e4b64b6d..98d32db502f 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -106,7 +106,6 @@ struct Allocator; impl Allocator { fn new(value: GcBox) -> NonNull> { let _timer = Profiler::global().start_event("New Pointer", "BoaAlloc"); - let eph = value.header.is_ephemeron(); let element_size = mem::size_of_val::>(&value); BOA_GC.with(|st| { let mut gc = st.borrow_mut(); @@ -117,9 +116,6 @@ impl Allocator { gc.adult_start.set(Some(ptr)); gc.runtime.total_bytes_allocated += element_size; - if !eph { - gc.runtime.total_bytes_allocated += element_size; - } ptr }) @@ -167,10 +163,7 @@ impl Collector { // Sweep both without promoting any values unsafe { - Self::sweep( - &gc.adult_start, - &mut gc.runtime.total_bytes_allocated, - ); + Self::sweep(&gc.adult_start, &mut gc.runtime.total_bytes_allocated); } } diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index d34477e3e5f..cd59b9c905d 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -52,11 +52,6 @@ unsafe impl Trace for Ephemeron { #[inline] unsafe fn trace(&self) {} - #[inline] - unsafe fn is_marked_ephemeron(&self) -> bool { - false - } - #[inline] unsafe fn weak_trace(&self) { EPHEMERON_QUEUE.with(|q| { diff --git a/boa_gc/src/pointers/gc.rs b/boa_gc/src/pointers/gc.rs index 4d1594d3418..602a181449b 100644 --- a/boa_gc/src/pointers/gc.rs +++ b/boa_gc/src/pointers/gc.rs @@ -93,11 +93,6 @@ unsafe impl Trace for Gc { self.inner().trace_inner(); } - #[inline] - unsafe fn is_marked_ephemeron(&self) -> bool { - false - } - #[inline] unsafe fn weak_trace(&self) { self.inner().weak_trace_inner(); diff --git a/boa_gc/src/trace.rs b/boa_gc/src/trace.rs index 36129bfd3aa..405b233b928 100644 --- a/boa_gc/src/trace.rs +++ b/boa_gc/src/trace.rs @@ -84,10 +84,6 @@ macro_rules! custom_trace { $body } #[inline] - unsafe fn is_marked_ephemeron(&self) -> bool { - false - } - #[inline] unsafe fn weak_trace(&self) { #[inline] unsafe fn mark(it: &T) { From bb5de24d54aa32ad10c9165380b812ab6c69b4cd Mon Sep 17 00:00:00 2001 From: nekevss Date: Tue, 8 Nov 2022 20:47:59 -0500 Subject: [PATCH 33/55] Address errors from merge --- boa_gc/src/internals/eph_box.rs | 2 +- boa_gc/src/internals/gc_box.rs | 3 +- boa_gc/src/lib.rs | 1 - boa_gc/src/pointers/ephemeron.rs | 3 +- boa_gc/src/pointers/gc.rs | 7 ++-- boa_gc/{tests/boa.rs => src/test/cell.rs} | 0 boa_gc/src/test/mod.rs | 2 ++ boa_gc/src/test/weak.rs | 44 +++++++++++++++++++++++ 8 files changed, 53 insertions(+), 9 deletions(-) rename boa_gc/{tests/boa.rs => src/test/cell.rs} (100%) create mode 100644 boa_gc/src/test/weak.rs diff --git a/boa_gc/src/internals/eph_box.rs b/boa_gc/src/internals/eph_box.rs index effc0094f3f..55969bbe597 100644 --- a/boa_gc/src/internals/eph_box.rs +++ b/boa_gc/src/internals/eph_box.rs @@ -12,7 +12,7 @@ pub(crate) struct EphemeronBox EphemeronBox { - pub fn new_pair(key: &Gc, value: V) -> Self { + pub fn new(key: &Gc, value: V) -> Self { EphemeronBox { key: Cell::new(Some(key.inner_ptr())), value, diff --git a/boa_gc/src/internals/gc_box.rs b/boa_gc/src/internals/gc_box.rs index f4f92837abd..a9720230318 100644 --- a/boa_gc/src/internals/gc_box.rs +++ b/boa_gc/src/internals/gc_box.rs @@ -16,7 +16,6 @@ pub(crate) struct GcBoxHeader { impl GcBoxHeader { #[inline] pub fn new() -> Self { - // TODO: implement a way for a cell to start out weak with WEAK_MASK GcBoxHeader { roots: Cell::new(1), next: Cell::new(None), @@ -27,7 +26,7 @@ impl GcBoxHeader { pub fn new_weak() -> Self { // Set weak_flag GcBoxHeader { - roots: Cell::new(0), + roots: Cell::new(WEAK_MASK), next: Cell::new(None), } } diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 98d32db502f..90ba630d955 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -161,7 +161,6 @@ impl Collector { let _final_unreachable_adults = unsafe { Self::mark_heap(&gc.adult_start) }; - // Sweep both without promoting any values unsafe { Self::sweep(&gc.adult_start, &mut gc.runtime.total_bytes_allocated); } diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index cd59b9c905d..390b5c9ca6b 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -13,9 +13,8 @@ pub struct Ephemeron { impl Ephemeron { pub fn new(key: &Gc, value: V) -> Self { - unsafe { value.unroot() }; Self { - inner_ptr: Cell::new(Allocator::new(GcBox::new_weak(EphemeronBox::new_pair( + inner_ptr: Cell::new(Allocator::new(GcBox::new_weak(EphemeronBox::new( key, value, )))), } diff --git a/boa_gc/src/pointers/gc.rs b/boa_gc/src/pointers/gc.rs index 602a181449b..e5012b1b8fa 100644 --- a/boa_gc/src/pointers/gc.rs +++ b/boa_gc/src/pointers/gc.rs @@ -25,10 +25,11 @@ pub struct Gc { impl Gc { /// Constructs a new `Gc` with the given value. pub fn new(value: T) -> Self { - unsafe { - value.unroot(); - } + // Create GcBox and allocate it to heap. + // + // Note: Allocator can cause Collector to run let inner_ptr = Allocator::new(GcBox::new(value)); + unsafe { (*inner_ptr.as_ptr()).value().unroot() } let gc = Self { inner_ptr: Cell::new(inner_ptr), marker: PhantomData, diff --git a/boa_gc/tests/boa.rs b/boa_gc/src/test/cell.rs similarity index 100% rename from boa_gc/tests/boa.rs rename to boa_gc/src/test/cell.rs diff --git a/boa_gc/src/test/mod.rs b/boa_gc/src/test/mod.rs index b21eedc0ed9..e926492d477 100644 --- a/boa_gc/src/test/mod.rs +++ b/boa_gc/src/test/mod.rs @@ -1,6 +1,8 @@ use crate::BOA_GC; mod allocation; +mod cell; +mod weak; struct Harness; diff --git a/boa_gc/src/test/weak.rs b/boa_gc/src/test/weak.rs new file mode 100644 index 00000000000..880984d70dd --- /dev/null +++ b/boa_gc/src/test/weak.rs @@ -0,0 +1,44 @@ +use boa_gc::{force_collect, Ephemeron, Gc, WeakGc}; + +#[test] +fn eph_weak_gc_test() { + let gc_value = Gc::new(3); + + { + let cloned_gc = gc_value.clone(); + + let weak = WeakGc::new(&cloned_gc); + + assert_eq!(*weak.value().expect("Is live currently"), 3); + drop(cloned_gc); + force_collect(); + assert_eq!(*weak.value().expect("WeakGc is still live here"), 3); + + drop(gc_value); + force_collect(); + + assert!(weak.value().is_none()) + } +} + +#[test] +fn eph_ephemeron_test() { + let gc_value = Gc::new(3); + + { + let cloned_gc = gc_value.clone(); + + let ephemeron = Ephemeron::new(&cloned_gc, String::from("Hello World!")); + + assert_eq!(*ephemeron.key().expect("Ephemeron is live"), 3); + assert_eq!(*ephemeron.value(), String::from("Hello World!")); + drop(cloned_gc); + force_collect(); + assert_eq!(*ephemeron.key().expect("Ephemeron is still live here"), 3); + + drop(gc_value); + force_collect(); + + assert!(ephemeron.key().is_none()); + } +} From b0732a47766d00708f84f1caf7b5b4f4fd4224a1 Mon Sep 17 00:00:00 2001 From: nekevss Date: Tue, 8 Nov 2022 23:40:38 -0500 Subject: [PATCH 34/55] Address reviews add root for ephemeron --- boa_gc/Cargo.toml | 2 +- boa_gc/derive_macros/Cargo.toml | 16 ------ boa_gc/derive_macros/src/lib.rs | 87 -------------------------------- boa_gc/src/internals/gc_box.rs | 2 +- boa_gc/src/lib.rs | 10 +++- boa_gc/src/pointers/ephemeron.rs | 9 +++- boa_macros/Cargo.toml | 3 +- boa_macros/src/lib.rs | 86 +++++++++++++++++++++++++++++++ 8 files changed, 107 insertions(+), 108 deletions(-) delete mode 100644 boa_gc/derive_macros/Cargo.toml delete mode 100644 boa_gc/derive_macros/src/lib.rs diff --git a/boa_gc/Cargo.toml b/boa_gc/Cargo.toml index 4fd137aa351..ceaa195cdc6 100644 --- a/boa_gc/Cargo.toml +++ b/boa_gc/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true [dependencies] boa_profiler.workspace = true -boa_gc_macros = { path = "derive_macros", version = "0.16.0" } +boa_macros.workspace = true # Optional Dependencies measureme = { version = "10.1.0", optional = true } diff --git a/boa_gc/derive_macros/Cargo.toml b/boa_gc/derive_macros/Cargo.toml deleted file mode 100644 index c00174f2bfa..00000000000 --- a/boa_gc/derive_macros/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "boa_gc_macros" -version = "0.16.0" -description = "Garbage collector for the Boa JavaScript engine." -keywords = ["javascript", "js", "garbage", "memory", "derive"] -edition = "2021" - -[lib] -name = "boa_gc_macros" -proc-macro = true - -[dependencies] -syn = "1.0" -proc-macro2 = "1.0" -quote = "1.0" -synstructure = "0.12" diff --git a/boa_gc/derive_macros/src/lib.rs b/boa_gc/derive_macros/src/lib.rs deleted file mode 100644 index 69b26115b2c..00000000000 --- a/boa_gc/derive_macros/src/lib.rs +++ /dev/null @@ -1,87 +0,0 @@ -use quote::quote; -use synstructure::{decl_derive, AddBounds, Structure}; - -decl_derive!([Trace, attributes(unsafe_ignore_trace)] => derive_trace); - -fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream { - s.filter(|bi| { - !bi.ast() - .attrs - .iter() - .any(|attr| attr.path.is_ident("unsafe_ignore_trace")) - }); - let trace_body = s.each(|bi| quote!(mark(#bi))); - - s.add_bounds(AddBounds::Fields); - let trace_impl = s.unsafe_bound_impl( - quote!(::boa_gc::Trace), - quote! { - #[inline] unsafe fn trace(&self) { - #[allow(dead_code)] - #[inline] - unsafe fn mark(it: &T) { - ::boa_gc::Trace::trace(it); - } - match *self { #trace_body } - } - #[inline] unsafe fn weak_trace(&self) { - #[allow(dead_code, unreachable_code)] - #[inline] - unsafe fn mark(it: &T) { - ::boa_gc::Trace::weak_trace(it) - } - match *self { #trace_body } - } - #[inline] unsafe fn root(&self) { - #[allow(dead_code)] - #[inline] - unsafe fn mark(it: &T) { - ::boa_gc::Trace::root(it); - } - match *self { #trace_body } - } - #[inline] unsafe fn unroot(&self) { - #[allow(dead_code)] - #[inline] - unsafe fn mark(it: &T) { - ::boa_gc::Trace::unroot(it); - } - match *self { #trace_body } - } - #[inline] fn run_finalizer(&self) { - ::boa_gc::Finalize::finalize(self); - #[allow(dead_code)] - #[inline] - fn mark(it: &T) { - ::boa_gc::Trace::run_finalizer(it); - } - match *self { #trace_body } - } - }, - ); - - // We also implement drop to prevent unsafe drop implementations on this - // type and encourage people to use Finalize. This implementation will - // call `Finalize::finalize` if it is safe to do so. - let drop_impl = s.unbound_impl( - quote!(::std::ops::Drop), - quote! { - fn drop(&mut self) { - if ::boa_gc::finalizer_safe() { - ::boa_gc::Finalize::finalize(self); - } - } - }, - ); - - quote! { - #trace_impl - #drop_impl - } -} - -decl_derive!([Finalize] => derive_finalize); - -fn derive_finalize(s: Structure<'_>) -> proc_macro2::TokenStream { - s.unbound_impl(quote!(::boa_gc::Finalize), quote!()) -} diff --git a/boa_gc/src/internals/gc_box.rs b/boa_gc/src/internals/gc_box.rs index a9720230318..fbb3e271891 100644 --- a/boa_gc/src/internals/gc_box.rs +++ b/boa_gc/src/internals/gc_box.rs @@ -26,7 +26,7 @@ impl GcBoxHeader { pub fn new_weak() -> Self { // Set weak_flag GcBoxHeader { - roots: Cell::new(WEAK_MASK), + roots: Cell::new(WEAK_MASK | 1), next: Cell::new(None), } } diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 90ba630d955..05626420b0d 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -5,6 +5,7 @@ clippy::should_implement_trait, clippy::match_like_matches_macro, clippy::new_ret_no_self, + clippy::needless_bool, // Putting the below on the allow list for now, but these should eventually be addressed clippy::missing_safety_doc, clippy::explicit_auto_deref, @@ -26,7 +27,7 @@ mod cell; mod pointers; pub use crate::trace::{Finalize, Trace}; -pub use boa_gc_macros::{Finalize, Trace}; +pub use boa_macros::{Finalize, Trace}; pub use cell::{GcCell, GcCellRef, GcCellRefMut}; pub use pointers::{Ephemeron, Gc, WeakGc}; @@ -212,6 +213,8 @@ impl Collector { if node.as_ref().value.is_marked_ephemeron() { node.as_ref().header.mark(); true + } else if node.as_ref().header.roots() > 0 { + true } else { false } @@ -266,6 +269,11 @@ impl Collector { if (*node.as_ptr()).is_marked() { (*node.as_ptr()).header.unmark(); sweep_head = &(*node.as_ptr()).header.next; + } else if (*node.as_ptr()).header.is_ephemeron() && (*node.as_ptr()).header.roots() > 0 + { + // Keep the ephemeron box's alive if rooted, but note that it's pointer is no longer safe + Trace::run_finalizer(&(*node.as_ptr()).value); + sweep_head = &(*node.as_ptr()).header.next; } else { // Drops occur here let unmarked_node = Box::from_raw(node.as_ptr()); diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index 390b5c9ca6b..3a45f3fc04c 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -7,7 +7,7 @@ use crate::{ use std::cell::Cell; use std::ptr::NonNull; -pub struct Ephemeron { +pub struct Ephemeron { inner_ptr: Cell>>>, } @@ -70,3 +70,10 @@ unsafe impl Trace for Ephemeron { Finalize::finalize(self) } } + +impl Drop for Ephemeron { + #[inline] + fn drop(&mut self) { + unsafe { self.inner().unroot_inner() } + } +} diff --git a/boa_macros/Cargo.toml b/boa_macros/Cargo.toml index d68aae7e972..048fd0f0d67 100644 --- a/boa_macros/Cargo.toml +++ b/boa_macros/Cargo.toml @@ -15,4 +15,5 @@ proc-macro = true [dependencies] quote = "1.0.21" syn = "1.0.103" - +proc-macro2 = "1.0" +synstructure = "0.12" diff --git a/boa_macros/src/lib.rs b/boa_macros/src/lib.rs index 205905786c9..f38c5d2b2fb 100644 --- a/boa_macros/src/lib.rs +++ b/boa_macros/src/lib.rs @@ -1,6 +1,7 @@ use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, LitStr}; +use synstructure::{decl_derive, AddBounds, Structure}; /// Construct a utf-16 array literal from a utf-8 [`str`] literal. #[proc_macro] @@ -13,3 +14,88 @@ pub fn utf16(input: TokenStream) -> TokenStream { } .into() } + +decl_derive!([Trace, attributes(unsafe_ignore_trace)] => derive_trace); + +fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream { + s.filter(|bi| { + !bi.ast() + .attrs + .iter() + .any(|attr| attr.path.is_ident("unsafe_ignore_trace")) + }); + let trace_body = s.each(|bi| quote!(mark(#bi))); + + s.add_bounds(AddBounds::Fields); + let trace_impl = s.unsafe_bound_impl( + quote!(::boa_gc::Trace), + quote! { + #[inline] unsafe fn trace(&self) { + #[allow(dead_code)] + #[inline] + unsafe fn mark(it: &T) { + ::boa_gc::Trace::trace(it); + } + match *self { #trace_body } + } + #[inline] unsafe fn weak_trace(&self) { + #[allow(dead_code, unreachable_code)] + #[inline] + unsafe fn mark(it: &T) { + ::boa_gc::Trace::weak_trace(it) + } + match *self { #trace_body } + } + #[inline] unsafe fn root(&self) { + #[allow(dead_code)] + #[inline] + unsafe fn mark(it: &T) { + ::boa_gc::Trace::root(it); + } + match *self { #trace_body } + } + #[inline] unsafe fn unroot(&self) { + #[allow(dead_code)] + #[inline] + unsafe fn mark(it: &T) { + ::boa_gc::Trace::unroot(it); + } + match *self { #trace_body } + } + #[inline] fn run_finalizer(&self) { + ::boa_gc::Finalize::finalize(self); + #[allow(dead_code)] + #[inline] + fn mark(it: &T) { + ::boa_gc::Trace::run_finalizer(it); + } + match *self { #trace_body } + } + }, + ); + + // We also implement drop to prevent unsafe drop implementations on this + // type and encourage people to use Finalize. This implementation will + // call `Finalize::finalize` if it is safe to do so. + let drop_impl = s.unbound_impl( + quote!(::std::ops::Drop), + quote! { + fn drop(&mut self) { + if ::boa_gc::finalizer_safe() { + ::boa_gc::Finalize::finalize(self); + } + } + }, + ); + + quote! { + #trace_impl + #drop_impl + } +} + +decl_derive!([Finalize] => derive_finalize); + +fn derive_finalize(s: Structure<'_>) -> proc_macro2::TokenStream { + s.unbound_impl(quote!(::boa_gc::Finalize), quote!()) +} From 0fe6c9a013a04aad5cb1293c63eae18a3060bee4 Mon Sep 17 00:00:00 2001 From: jedel1043 Date: Wed, 9 Nov 2022 09:23:50 -0600 Subject: [PATCH 35/55] Move tests to thread --- Cargo.lock | 14 ++------- boa_gc/src/test/allocation.rs | 34 +++++++++++---------- boa_gc/src/test/cell.rs | 14 +++++---- boa_gc/src/test/mod.rs | 5 ++++ boa_gc/src/test/weak.rs | 56 +++++++++++++++++++---------------- 5 files changed, 67 insertions(+), 56 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 29fa520fed1..719e0bb0d62 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -154,21 +154,11 @@ dependencies = [ name = "boa_gc" version = "0.16.0" dependencies = [ - "boa_gc_macros", + "boa_macros", "boa_profiler", "measureme", ] -[[package]] -name = "boa_gc_macros" -version = "0.16.0" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - [[package]] name = "boa_interner" version = "0.16.0" @@ -187,8 +177,10 @@ dependencies = [ name = "boa_macros" version = "0.16.0" dependencies = [ + "proc-macro2", "quote", "syn", + "synstructure", ] [[package]] diff --git a/boa_gc/src/test/allocation.rs b/boa_gc/src/test/allocation.rs index 11826fa7d38..d318c7e04ff 100644 --- a/boa_gc/src/test/allocation.rs +++ b/boa_gc/src/test/allocation.rs @@ -1,27 +1,31 @@ -use super::Harness; +use super::{run_test, Harness}; use crate::{force_collect, Gc, GcCell}; #[test] fn gc_basic_cell_allocation() { - let gc_cell = Gc::new(GcCell::new(16_u16)); + run_test(|| { + let gc_cell = Gc::new(GcCell::new(16_u16)); - force_collect(); - Harness::assert_collections(1); - Harness::assert_bytes_allocated(); - assert_eq!(*gc_cell.borrow_mut(), 16); + force_collect(); + Harness::assert_collections(1); + Harness::assert_bytes_allocated(); + assert_eq!(*gc_cell.borrow_mut(), 16); + }); } #[test] fn gc_basic_pointer_alloc() { - let gc = Gc::new(16_u8); + run_test(|| { + let gc = Gc::new(16_u8); - force_collect(); - Harness::assert_collections(1); - Harness::assert_bytes_allocated(); - assert_eq!(*gc, 16); + force_collect(); + Harness::assert_collections(1); + Harness::assert_bytes_allocated(); + assert_eq!(*gc, 16); - drop(gc); - force_collect(); - Harness::assert_collections(2); - Harness::assert_empty_gc(); + drop(gc); + force_collect(); + Harness::assert_collections(2); + Harness::assert_empty_gc(); + }) } diff --git a/boa_gc/src/test/cell.rs b/boa_gc/src/test/cell.rs index 3c825887eab..1e82acf1e7c 100644 --- a/boa_gc/src/test/cell.rs +++ b/boa_gc/src/test/cell.rs @@ -1,11 +1,15 @@ use boa_gc::{Gc, GcCell}; +use super::run_test; + #[test] fn boa_borrow_mut_test() { - let v = Gc::new(GcCell::new(Vec::new())); + run_test(|| { + let v = Gc::new(GcCell::new(Vec::new())); - for _ in 1..=259 { - let cell = Gc::new(GcCell::new([0u8; 10])); - v.borrow_mut().push(cell); - } + for _ in 1..=259 { + let cell = Gc::new(GcCell::new([0u8; 10])); + v.borrow_mut().push(cell); + } + }); } diff --git a/boa_gc/src/test/mod.rs b/boa_gc/src/test/mod.rs index e926492d477..df457505497 100644 --- a/boa_gc/src/test/mod.rs +++ b/boa_gc/src/test/mod.rs @@ -30,3 +30,8 @@ impl Harness { }) } } + +fn run_test(test: impl FnOnce() + Send + 'static) { + let handle = std::thread::spawn(test); + handle.join().unwrap(); +} diff --git a/boa_gc/src/test/weak.rs b/boa_gc/src/test/weak.rs index 880984d70dd..a4f5ad0d82c 100644 --- a/boa_gc/src/test/weak.rs +++ b/boa_gc/src/test/weak.rs @@ -1,44 +1,50 @@ use boa_gc::{force_collect, Ephemeron, Gc, WeakGc}; +use super::run_test; + #[test] fn eph_weak_gc_test() { - let gc_value = Gc::new(3); + run_test(|| { + let gc_value = Gc::new(3); - { - let cloned_gc = gc_value.clone(); + { + let cloned_gc = gc_value.clone(); - let weak = WeakGc::new(&cloned_gc); + let weak = WeakGc::new(&cloned_gc); - assert_eq!(*weak.value().expect("Is live currently"), 3); - drop(cloned_gc); - force_collect(); - assert_eq!(*weak.value().expect("WeakGc is still live here"), 3); + assert_eq!(*weak.value().expect("Is live currently"), 3); + drop(cloned_gc); + force_collect(); + assert_eq!(*weak.value().expect("WeakGc is still live here"), 3); - drop(gc_value); - force_collect(); + drop(gc_value); + force_collect(); - assert!(weak.value().is_none()) - } + assert!(weak.value().is_none()) + } + }); } #[test] fn eph_ephemeron_test() { - let gc_value = Gc::new(3); + run_test(|| { + let gc_value = Gc::new(3); - { - let cloned_gc = gc_value.clone(); + { + let cloned_gc = gc_value.clone(); - let ephemeron = Ephemeron::new(&cloned_gc, String::from("Hello World!")); + let ephemeron = Ephemeron::new(&cloned_gc, String::from("Hello World!")); - assert_eq!(*ephemeron.key().expect("Ephemeron is live"), 3); - assert_eq!(*ephemeron.value(), String::from("Hello World!")); - drop(cloned_gc); - force_collect(); - assert_eq!(*ephemeron.key().expect("Ephemeron is still live here"), 3); + assert_eq!(*ephemeron.key().expect("Ephemeron is live"), 3); + assert_eq!(*ephemeron.value(), String::from("Hello World!")); + drop(cloned_gc); + force_collect(); + assert_eq!(*ephemeron.key().expect("Ephemeron is still live here"), 3); - drop(gc_value); - force_collect(); + drop(gc_value); + force_collect(); - assert!(ephemeron.key().is_none()); - } + assert!(ephemeron.key().is_none()); + } + }); } From e0df35c60504058100f81d9decfe6baebb33927d Mon Sep 17 00:00:00 2001 From: jedel1043 Date: Wed, 9 Nov 2022 13:13:49 -0600 Subject: [PATCH 36/55] Fix clippy lints --- boa_gc/src/cell.rs | 39 ++++++---- boa_gc/src/internals/eph_box.rs | 19 +++-- boa_gc/src/internals/gc_box.rs | 29 ++++--- boa_gc/src/lib.rs | 126 +++++++++++++++++++------------ boa_gc/src/pointers/ephemeron.rs | 18 ++++- boa_gc/src/pointers/gc.rs | 20 ++--- boa_gc/src/pointers/weak.rs | 8 +- boa_gc/src/test/allocation.rs | 2 +- boa_gc/src/test/mod.rs | 16 ++-- boa_gc/src/test/weak.rs | 2 +- boa_gc/src/trace.rs | 12 +-- 11 files changed, 172 insertions(+), 119 deletions(-) diff --git a/boa_gc/src/cell.rs b/boa_gc/src/cell.rs index 25b676d8cda..1f88d7c788d 100644 --- a/boa_gc/src/cell.rs +++ b/boa_gc/src/cell.rs @@ -34,10 +34,7 @@ impl BorrowFlag { } pub(crate) fn rooted(self) -> bool { - match self.0 & ROOT { - 0 => false, - _ => true, - } + self.0 & ROOT > 0 } pub(crate) fn set_writing(self) -> Self { @@ -57,7 +54,14 @@ impl BorrowFlag { // this is equivalent to the following, more complicated, expression: // // BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) + 1) << 1)) - BorrowFlag(self.0 + 0b10) + let flags = BorrowFlag(self.0 + 0b10); + + // This will fail if the borrow count overflows, which shouldn't happen, + // but let's be safe + { + assert!(flags.borrowed() == BorrowState::Reading); + } + flags } pub(crate) fn sub_reading(self) -> Self { @@ -73,7 +77,7 @@ impl BorrowFlag { pub(crate) fn set_rooted(self, rooted: bool) -> Self { // Preserve the non-root bits - BorrowFlag((self.0 & !ROOT) | (rooted as usize)) + BorrowFlag((self.0 & !ROOT) | (usize::from(rooted))) } } @@ -144,16 +148,15 @@ impl GcCell { /// /// This is the non-panicking variant of [`borrow`](#method.borrow). /// + /// # Errors + /// + /// Returns an `Err` if the value is currently mutably borrowed. pub fn try_borrow(&self) -> Result, BorrowError> { if self.flags.get().borrowed() == BorrowState::Writing { return Err(BorrowError); } self.flags.set(self.flags.get().add_reading()); - // This will fail if the borrow count overflows, which shouldn't happen, - // but let's be safe - assert!(self.flags.get().borrowed() == BorrowState::Reading); - unsafe { Ok(GcCellRef { flags: &self.flags, @@ -168,6 +171,10 @@ impl GcCell { /// The value cannot be borrowed while this borrow is active. /// /// This is the non-panicking variant of [`borrow_mut`](#method.borrow_mut). + /// + /// # Errors + /// + /// Returns an `Err` if the value is currently borrowed. pub fn try_borrow_mut(&self) -> Result, BorrowMutError> { if self.flags.get().borrowed() != BorrowState::Unused { return Err(BorrowMutError); @@ -193,7 +200,7 @@ impl GcCell { #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Default, Hash)] pub struct BorrowError; -impl std::fmt::Display for BorrowError { +impl Display for BorrowError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { Display::fmt("GcCell already mutably borrowed", f) } @@ -203,7 +210,7 @@ impl std::fmt::Display for BorrowError { #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Default, Hash)] pub struct BorrowMutError; -impl std::fmt::Display for BorrowMutError { +impl Display for BorrowMutError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { Display::fmt("GcCell already borrowed", f) } @@ -275,6 +282,8 @@ impl<'a, T: ?Sized> GcCellRef<'a, T> { /// would interfere with the use of `c.borrow().clone()` to clone /// the contents of a `GcCell`. #[inline] + #[allow(clippy::should_implement_trait)] + #[must_use] pub fn clone(orig: &GcCellRef<'a, T>) -> GcCellRef<'a, T> { orig.flags.set(orig.flags.get().add_reading()); GcCellRef { @@ -312,7 +321,7 @@ impl<'a, T: ?Sized> GcCellRef<'a, T> { /// /// The `GcCell` is already immutably borrowed, so this cannot fail. /// - /// This is an associated function that needs to be used as GcCellRef::map_split(...). + /// This is an associated function that needs to be used as `GcCellRef::map_split(...)`. /// A method would interfere with methods of the same name on the contents of a `GcCellRef` used through `Deref`. #[inline] pub fn map_split(orig: Self, f: F) -> (GcCellRef<'a, U>, GcCellRef<'a, V>) @@ -443,7 +452,7 @@ impl<'a, T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'a, T, U> { impl<'a, T: Trace + ?Sized, U: Debug + ?Sized> Debug for GcCellRefMut<'a, T, U> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Debug::fmt(&*(self.deref()), f) + Debug::fmt(&**self, f) } } @@ -469,6 +478,7 @@ impl Default for GcCell { } } +#[allow(clippy::inline_always)] impl PartialEq for GcCell { #[inline(always)] fn eq(&self, other: &Self) -> bool { @@ -478,6 +488,7 @@ impl PartialEq for GcCell { impl Eq for GcCell {} +#[allow(clippy::inline_always)] impl PartialOrd for GcCell { #[inline(always)] fn partial_cmp(&self, other: &Self) -> Option { diff --git a/boa_gc/src/internals/eph_box.rs b/boa_gc/src/internals/eph_box.rs index 55969bbe597..a8f43735634 100644 --- a/boa_gc/src/internals/eph_box.rs +++ b/boa_gc/src/internals/eph_box.rs @@ -1,18 +1,17 @@ -//! This module will implement the internal types GcBox and Ephemeron use crate::trace::Trace; use crate::{finalizer_safe, GcBox}; use crate::{Finalize, Gc}; use std::cell::Cell; use std::ptr::NonNull; -/// Implementation of an Ephemeron cell +/// The inner allocation of an [`Ephemeron`][crate::Ephemeron] pointer. pub(crate) struct EphemeronBox { key: Cell>>>, value: V, } impl EphemeronBox { - pub fn new(key: &Gc, value: V) -> Self { + pub(crate) fn new(key: &Gc, value: V) -> Self { EphemeronBox { key: Cell::new(Some(key.inner_ptr())), value, @@ -33,7 +32,7 @@ impl EphemeronBox { #[inline] fn inner_key_ptr(&self) -> Option<*mut GcBox> { assert!(finalizer_safe()); - self.key.get().map(|key_node| key_node.as_ptr()) + self.key.get().map(NonNull::as_ptr) } #[inline] @@ -48,7 +47,7 @@ impl EphemeronBox { } #[inline] - pub fn key(&self) -> Option<&K> { + pub(crate) fn key(&self) -> Option<&K> { if let Some(key_box) = self.inner_key() { Some(key_box.value()) } else { @@ -57,27 +56,27 @@ impl EphemeronBox { } #[inline] - pub fn value(&self) -> &V { + pub(crate) fn value(&self) -> &V { &self.value } #[inline] unsafe fn weak_trace_key(&self) { if let Some(key) = self.inner_key() { - key.weak_trace_inner() + key.weak_trace_inner(); } } #[inline] unsafe fn weak_trace_value(&self) { - self.value().weak_trace() + self.value().weak_trace(); } } impl Finalize for EphemeronBox { #[inline] fn finalize(&self) { - self.key.set(None) + self.key.set(None); } } @@ -113,6 +112,6 @@ unsafe impl Trace for EphemeronBox { #[inline] fn run_finalizer(&self) { - Finalize::finalize(self) + Finalize::finalize(self); } } diff --git a/boa_gc/src/internals/gc_box.rs b/boa_gc/src/internals/gc_box.rs index fbb3e271891..71a00009b56 100644 --- a/boa_gc/src/internals/gc_box.rs +++ b/boa_gc/src/internals/gc_box.rs @@ -15,7 +15,7 @@ pub(crate) struct GcBoxHeader { impl GcBoxHeader { #[inline] - pub fn new() -> Self { + pub(crate) fn new() -> Self { GcBoxHeader { roots: Cell::new(1), next: Cell::new(None), @@ -23,7 +23,7 @@ impl GcBoxHeader { } #[inline] - pub fn new_weak() -> Self { + pub(crate) fn new_weak() -> Self { // Set weak_flag GcBoxHeader { roots: Cell::new(WEAK_MASK | 1), @@ -32,12 +32,12 @@ impl GcBoxHeader { } #[inline] - pub fn roots(&self) -> usize { + pub(crate) fn roots(&self) -> usize { self.roots.get() & ROOTS_MASK } #[inline] - pub fn inc_roots(&self) { + pub(crate) fn inc_roots(&self) { let roots = self.roots.get(); if (roots & ROOTS_MASK) < ROOTS_MAX { @@ -49,39 +49,38 @@ impl GcBoxHeader { } #[inline] - pub fn dec_roots(&self) { + pub(crate) fn dec_roots(&self) { // Underflow check as a stop gap for current issue when dropping if self.roots.get() > 0 { - self.roots.set(self.roots.get() - 1) + self.roots.set(self.roots.get() - 1); } } #[inline] - pub fn is_marked(&self) -> bool { + pub(crate) fn is_marked(&self) -> bool { self.roots.get() & MARK_MASK != 0 } #[inline] - pub fn mark(&self) { - self.roots.set(self.roots.get() | MARK_MASK) + pub(crate) fn mark(&self) { + self.roots.set(self.roots.get() | MARK_MASK); } #[inline] - pub fn unmark(&self) { - self.roots.set(self.roots.get() & !MARK_MASK) + pub(crate) fn unmark(&self) { + self.roots.set(self.roots.get() & !MARK_MASK); } #[inline] - pub fn is_ephemeron(&self) -> bool { + pub(crate) fn is_ephemeron(&self) -> bool { self.roots.get() & WEAK_MASK != 0 } } // NOTE: [repr(C)] is most likely unneeded here, but will keep it for now -/// The GcBox represents a box on `BoaGc`'s heap. The GcBox's creation and allocation is handled -/// by the allocator +/// A garbage collected allocation. #[repr(C)] -pub struct GcBox { +pub(crate) struct GcBox { pub(crate) header: GcBoxHeader, pub(crate) value: T, } diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 05626420b0d..82dab3f2f22 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -1,15 +1,46 @@ //! Garbage collector for the Boa JavaScript engine. +#![warn( + clippy::perf, + clippy::single_match_else, + clippy::dbg_macro, + clippy::doc_markdown, + clippy::wildcard_imports, + clippy::struct_excessive_bools, + clippy::doc_markdown, + clippy::semicolon_if_nothing_returned, + clippy::pedantic +)] +#![deny( + clippy::all, + clippy::cast_lossless, + clippy::redundant_closure_for_method_calls, + clippy::unnested_or_patterns, + clippy::trivially_copy_pass_by_ref, + clippy::needless_pass_by_value, + clippy::match_wildcard_for_single_variants, + clippy::map_unwrap_or, + unused_qualifications, + unused_import_braces, + unused_lifetimes, + unreachable_pub, + trivial_numeric_casts, + rustdoc::broken_intra_doc_links, + missing_debug_implementations, + missing_copy_implementations, + deprecated_in_future, + meta_variable_misuse, + non_ascii_idents, + rust_2018_compatibility, + rust_2018_idioms, + future_incompatible, + nonstandard_style, + missing_docs +)] #![allow( clippy::let_unit_value, - clippy::should_implement_trait, - clippy::match_like_matches_macro, - clippy::new_ret_no_self, - clippy::needless_bool, - // Putting the below on the allow list for now, but these should eventually be addressed clippy::missing_safety_doc, - clippy::explicit_auto_deref, - clippy::borrow_deref_ref, + clippy::module_name_repetitions )] extern crate self as boa_gc; @@ -19,7 +50,7 @@ use std::cell::{Cell, RefCell}; use std::mem; use std::ptr::NonNull; -pub mod trace; +mod trace; pub(crate) mod internals; @@ -44,8 +75,8 @@ thread_local!(static BOA_GC: RefCell = RefCell::new( BoaGc { })); struct GcConfig { - adult_threshold: usize, - growth_ratio: f64, + threshold: usize, + used_space_percentage: usize, } // Setting the defaults to an arbitrary value currently. @@ -54,8 +85,8 @@ struct GcConfig { impl Default for GcConfig { fn default() -> Self { Self { - adult_threshold: 1024, - growth_ratio: 0.8, + threshold: 1024, + used_space_percentage: 80, } } } @@ -63,7 +94,7 @@ impl Default for GcConfig { #[derive(Default)] struct GcRuntimeData { collections: usize, - total_bytes_allocated: usize, + bytes_allocated: usize, } struct BoaGc { @@ -95,6 +126,8 @@ impl Drop for DropGuard { } } +/// Returns `true` if it is safe for a type to run [`Finalize::finalize`]. +#[must_use] pub fn finalizer_safe() -> bool { GC_DROPPING.with(|dropping| !dropping.get()) } @@ -105,7 +138,7 @@ pub fn finalizer_safe() -> bool { struct Allocator; impl Allocator { - fn new(value: GcBox) -> NonNull> { + fn allocate(value: GcBox) -> NonNull> { let _timer = Profiler::global().start_event("New Pointer", "BoaAlloc"); let element_size = mem::size_of_val::>(&value); BOA_GC.with(|st| { @@ -116,21 +149,21 @@ impl Allocator { let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::from(value))) }; gc.adult_start.set(Some(ptr)); - gc.runtime.total_bytes_allocated += element_size; + gc.runtime.bytes_allocated += element_size; ptr }) } fn manage_state(gc: &mut BoaGc) { - if gc.runtime.total_bytes_allocated > gc.config.adult_threshold { + if gc.runtime.bytes_allocated > gc.config.threshold { Collector::run_full_collection(gc); - if gc.runtime.total_bytes_allocated as f64 - > gc.config.adult_threshold as f64 * gc.config.growth_ratio + if gc.runtime.bytes_allocated + > gc.config.threshold / 100 * gc.config.used_space_percentage { - gc.config.adult_threshold = - (gc.runtime.total_bytes_allocated as f64 / gc.config.growth_ratio) as usize + gc.config.threshold = + gc.runtime.bytes_allocated / gc.config.used_space_percentage * 100; } } } @@ -147,7 +180,7 @@ impl Allocator { // A better approach in a more concurrent structure may be to reorder. // // Mark -> Sweep -> Finalize -pub struct Collector; +struct Collector; impl Collector { fn run_full_collection(gc: &mut BoaGc) { @@ -163,7 +196,7 @@ impl Collector { let _final_unreachable_adults = unsafe { Self::mark_heap(&gc.adult_start) }; unsafe { - Self::sweep(&gc.adult_start, &mut gc.runtime.total_bytes_allocated); + Self::sweep(&gc.adult_start, &mut gc.runtime.bytes_allocated); } } @@ -181,7 +214,7 @@ impl Collector { } else if (*node.as_ptr()).header.roots() > 0 { (*node.as_ptr()).trace_inner(); } else { - finalize.push(node) + finalize.push(node); } mark_head = &(*node.as_ptr()).header.next; } @@ -213,10 +246,8 @@ impl Collector { if node.as_ref().value.is_marked_ephemeron() { node.as_ref().header.mark(); true - } else if node.as_ref().header.roots() > 0 { - true } else { - false + node.as_ref().header.roots() > 0 } }); // Replace the old queue with the unreachable @@ -224,23 +255,22 @@ impl Collector { // If reachable nodes is not empty, trace values. If it is empty, // break from the loop - if !reachable.is_empty() { - EPHEMERON_QUEUE.with(|state| state.set(Some(Vec::new()))); - // iterate through reachable nodes and trace their values, - // enqueuing any ephemeron that is found during the trace - for node in reachable { - // TODO: deal with fetch ephemeron_queue - (*node.as_ptr()).weak_trace_inner() - } - - EPHEMERON_QUEUE.with(|st| { - if let Some(found_nodes) = st.take() { - ephemeron_queue.extend(found_nodes) - } - }) - } else { + if reachable.is_empty() { break; } + EPHEMERON_QUEUE.with(|state| state.set(Some(Vec::new()))); + // iterate through reachable nodes and trace their values, + // enqueuing any ephemeron that is found during the trace + for node in reachable { + // TODO: deal with fetch ephemeron_queue + (*node.as_ptr()).weak_trace_inner(); + } + + EPHEMERON_QUEUE.with(|st| { + if let Some(found_nodes) = st.take() { + ephemeron_queue.extend(found_nodes); + } + }); } ephemeron_queue } @@ -252,7 +282,7 @@ impl Collector { // prior to finalization as they could have been marked by a different // trace after initially being added to the queue if !(*node.as_ptr()).header.is_marked() { - Trace::run_finalizer(&(*node.as_ptr()).value) + Trace::run_finalizer(&(*node.as_ptr()).value); } } } @@ -298,19 +328,15 @@ impl Collector { } } -// A utility function that forces runs through Collector method based off the state. -// -// Note: -// - This method is meant solely for testing purposes only -// - `force_collect` will not extend threshold +/// Forcefully runs a garbage collection of all unaccessible nodes. pub fn force_collect() { BOA_GC.with(|current| { let mut gc = current.borrow_mut(); - if gc.runtime.total_bytes_allocated > 0 { - Collector::run_full_collection(&mut *gc) + if gc.runtime.bytes_allocated > 0 { + Collector::run_full_collection(&mut gc); } - }) + }); } #[cfg(test)] diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index 3a45f3fc04c..8508f92cd76 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -7,14 +7,21 @@ use crate::{ use std::cell::Cell; use std::ptr::NonNull; +#[derive(Debug)] +/// A key-value pair where the value becomes unaccesible when the key is garbage collected. +/// +/// See Racket's explanation on [**ephemerons**][eph] for a more detailed explanation. +/// +/// [eph]: https://docs.racket-lang.org/reference/ephemerons.html pub struct Ephemeron { inner_ptr: Cell>>>, } impl Ephemeron { + /// Creates a new `Ephemeron`. pub fn new(key: &Gc, value: V) -> Self { Self { - inner_ptr: Cell::new(Allocator::new(GcBox::new_weak(EphemeronBox::new( + inner_ptr: Cell::new(Allocator::allocate(GcBox::new_weak(EphemeronBox::new( key, value, )))), } @@ -35,11 +42,14 @@ impl Ephemeron { } #[inline] + /// Gets the weak key of this `Ephemeron`, or `None` if the key was already garbage + /// collected. pub fn key(&self) -> Option<&K> { self.inner().value().key() } #[inline] + /// Gets the stored value of this `Ephemeron`. pub fn value(&self) -> &V { self.inner().value().value() } @@ -55,8 +65,8 @@ unsafe impl Trace for Ephemeron { unsafe fn weak_trace(&self) { EPHEMERON_QUEUE.with(|q| { let mut queue = q.take().expect("queue is initialized by weak_trace"); - queue.push(self.inner_ptr()) - }) + queue.push(self.inner_ptr()); + }); } #[inline] @@ -67,7 +77,7 @@ unsafe impl Trace for Ephemeron { #[inline] fn run_finalizer(&self) { - Finalize::finalize(self) + Finalize::finalize(self); } } diff --git a/boa_gc/src/pointers/gc.rs b/boa_gc/src/pointers/gc.rs index e5012b1b8fa..36279b47561 100644 --- a/boa_gc/src/pointers/gc.rs +++ b/boa_gc/src/pointers/gc.rs @@ -4,7 +4,7 @@ use std::fmt::{self, Debug, Display}; use std::hash::{Hash, Hasher}; use std::marker::PhantomData; use std::ops::Deref; -use std::ptr::{self, NonNull}; +use std::ptr::{self, addr_of_mut, NonNull}; use std::rc::Rc; use crate::internals::GcBox; @@ -12,7 +12,7 @@ use crate::trace::{Finalize, Trace}; use crate::{finalizer_safe, Allocator}; pub(crate) unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { - ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8); + ptr::write(addr_of_mut!(ptr).cast::<*mut u8>(), data.cast::()); ptr } @@ -28,7 +28,7 @@ impl Gc { // Create GcBox and allocate it to heap. // // Note: Allocator can cause Collector to run - let inner_ptr = Allocator::new(GcBox::new(value)); + let inner_ptr = Allocator::allocate(GcBox::new(value)); unsafe { (*inner_ptr.as_ptr()).value().unroot() } let gc = Self { inner_ptr: Cell::new(inner_ptr), @@ -51,7 +51,7 @@ pub(crate) unsafe fn clear_root_bit( ptr: NonNull>, ) -> NonNull> { let ptr = ptr.as_ptr(); - let data = ptr as *mut u8; + let data = ptr.cast::(); let addr = data as isize; let ptr = set_data_ptr(ptr, data.wrapping_offset((addr & !1) - addr)); NonNull::new_unchecked(ptr) @@ -59,12 +59,12 @@ pub(crate) unsafe fn clear_root_bit( impl Gc { fn rooted(&self) -> bool { - self.inner_ptr.get().as_ptr() as *mut u8 as usize & 1 != 0 + self.inner_ptr.get().as_ptr().cast::() as usize & 1 != 0 } unsafe fn set_root(&self) { let ptr = self.inner_ptr.get().as_ptr(); - let data = ptr as *mut u8; + let data = ptr.cast::(); let addr = data as isize; let ptr = set_data_ptr(ptr, data.wrapping_offset((addr | 1) - addr)); self.inner_ptr.set(NonNull::new_unchecked(ptr)); @@ -170,6 +170,7 @@ impl Default for Gc { } } +#[allow(clippy::inline_always)] impl PartialEq for Gc { #[inline(always)] fn eq(&self, other: &Self) -> bool { @@ -179,6 +180,7 @@ impl PartialEq for Gc { impl Eq for Gc {} +#[allow(clippy::inline_always)] impl PartialOrd for Gc { #[inline(always)] fn partial_cmp(&self, other: &Self) -> Option { @@ -239,12 +241,12 @@ impl fmt::Pointer for Gc { impl std::borrow::Borrow for Gc { fn borrow(&self) -> &T { - &**self + self } } -impl std::convert::AsRef for Gc { +impl AsRef for Gc { fn as_ref(&self) -> &T { - &**self + self } } diff --git a/boa_gc/src/pointers/weak.rs b/boa_gc/src/pointers/weak.rs index 7f88fba7f8a..f349ab262c6 100644 --- a/boa_gc/src/pointers/weak.rs +++ b/boa_gc/src/pointers/weak.rs @@ -1,12 +1,17 @@ use crate::{Ephemeron, Finalize, Gc, Trace}; -#[derive(Trace, Finalize)] +/// A weak reference to a [`Gc`]. +/// +/// This type allows keeping references to [`Gc`] managed values without keeping them alive for +/// garbage collections. However, this also means [`WeakGc::value`] can return `None` at any moment. +#[derive(Debug, Trace, Finalize)] #[repr(transparent)] pub struct WeakGc { inner: Ephemeron, } impl WeakGc { + /// Creates a new weak pointer for a garbage collected value. pub fn new(value: &Gc) -> Self { Self { inner: Ephemeron::new(value, ()), @@ -16,6 +21,7 @@ impl WeakGc { impl WeakGc { #[inline] + /// Gets the value of this weak pointer, or `None` if the value was already garbage collected. pub fn value(&self) -> Option<&T> { self.inner.key() } diff --git a/boa_gc/src/test/allocation.rs b/boa_gc/src/test/allocation.rs index d318c7e04ff..27386999836 100644 --- a/boa_gc/src/test/allocation.rs +++ b/boa_gc/src/test/allocation.rs @@ -27,5 +27,5 @@ fn gc_basic_pointer_alloc() { force_collect(); Harness::assert_collections(2); Harness::assert_empty_gc(); - }) + }); } diff --git a/boa_gc/src/test/mod.rs b/boa_gc/src/test/mod.rs index df457505497..559fbb8d80f 100644 --- a/boa_gc/src/test/mod.rs +++ b/boa_gc/src/test/mod.rs @@ -7,27 +7,27 @@ mod weak; struct Harness; impl Harness { - pub fn assert_collections(o: usize) { + fn assert_collections(o: usize) { BOA_GC.with(|current| { let gc = current.borrow(); assert_eq!(gc.runtime.collections, o); - }) + }); } - pub fn assert_empty_gc() { + fn assert_empty_gc() { BOA_GC.with(|current| { let gc = current.borrow(); assert!(gc.adult_start.get().is_none()); - assert!(gc.runtime.total_bytes_allocated == 0); - }) + assert!(gc.runtime.bytes_allocated == 0); + }); } - pub fn assert_bytes_allocated() { + fn assert_bytes_allocated() { BOA_GC.with(|current| { let gc = current.borrow(); - assert!(gc.runtime.total_bytes_allocated > 0); - }) + assert!(gc.runtime.bytes_allocated > 0); + }); } } diff --git a/boa_gc/src/test/weak.rs b/boa_gc/src/test/weak.rs index a4f5ad0d82c..fac2edabf60 100644 --- a/boa_gc/src/test/weak.rs +++ b/boa_gc/src/test/weak.rs @@ -20,7 +20,7 @@ fn eph_weak_gc_test() { drop(gc_value); force_collect(); - assert!(weak.value().is_none()) + assert!(weak.value().is_none()); } }); } diff --git a/boa_gc/src/trace.rs b/boa_gc/src/trace.rs index 405b233b928..fca89523fe6 100644 --- a/boa_gc/src/trace.rs +++ b/boa_gc/src/trace.rs @@ -13,9 +13,9 @@ use std::sync::atomic::{ AtomicU64, AtomicU8, AtomicUsize, }; -/// The Finalize trait, which needs to be implemented on -/// garbage-collected objects to define finalization logic. +/// Substitute for the [`Drop`] trait for garbage collected types. pub trait Finalize { + /// Cleanup logic for a type. fn finalize(&self) {} } @@ -40,12 +40,12 @@ pub unsafe trait Trace: Finalize { /// Decrements the root-count of all contained `Gc`s. unsafe fn unroot(&self); - /// Runs Finalize::finalize() on this object and all + /// Runs [`Finalize::finalize`] on this object and all /// contained subobjects fn run_finalizer(&self); } -/// This rule implements the trace methods with empty implementations. +/// Utility macro to define an empty implementation of [`Trace`]. /// /// Use this for marking types as not containing any `Trace` types. #[macro_export] @@ -66,7 +66,7 @@ macro_rules! unsafe_empty_trace { }; } -/// This rule implements the trace method. +/// Utility macro to manually implement [`Trace`] on a type. /// /// You define a `this` parameter name and pass in a body, which should call `mark` on every /// traceable element inside the body. The mark implementation will automatically delegate to the @@ -112,11 +112,11 @@ macro_rules! custom_trace { } #[inline] fn run_finalizer(&self) { - $crate::Finalize::finalize(self); #[inline] fn mark(it: &T) { $crate::Trace::run_finalizer(it); } + $crate::Finalize::finalize(self); let $this = self; $body } From 2f8c92e49eb0f3e6169a4d7423a9d73816e8c873 Mon Sep 17 00:00:00 2001 From: jedel1043 Date: Wed, 9 Nov 2022 15:48:49 -0600 Subject: [PATCH 37/55] Add safety lints and document some unsafe --- boa_engine/src/string/mod.rs | 4 +- boa_engine/src/symbol.rs | 4 +- boa_gc/src/cell.rs | 8 +- boa_gc/src/internals/eph_box.rs | 16 ++-- boa_gc/src/internals/gc_box.rs | 12 ++- boa_gc/src/lib.rs | 54 +++++++------ boa_gc/src/pointers/ephemeron.rs | 2 +- boa_gc/src/pointers/gc.rs | 54 +++++++------ boa_gc/src/trace.rs | 127 +++++++++++++++++++++---------- boa_macros/src/lib.rs | 43 +++++++---- 10 files changed, 205 insertions(+), 119 deletions(-) diff --git a/boa_engine/src/string/mod.rs b/boa_engine/src/string/mod.rs index 2b6ea514eb8..2932e76b9b7 100644 --- a/boa_engine/src/string/mod.rs +++ b/boa_engine/src/string/mod.rs @@ -24,7 +24,7 @@ mod common; use crate::{builtins::string::is_trimmable_whitespace, JsBigInt}; -use boa_gc::{unsafe_empty_trace, Finalize, Trace}; +use boa_gc::{empty_trace, Finalize, Trace}; pub use boa_macros::utf16; use std::{ @@ -292,7 +292,7 @@ sa::assert_eq_size!(JsString, *const ()); // Safety: `JsString` does not contain any objects which needs to be traced, so this is safe. unsafe impl Trace for JsString { - unsafe_empty_trace!(); + empty_trace!(); } impl JsString { diff --git a/boa_engine/src/symbol.rs b/boa_engine/src/symbol.rs index f7f44488294..c0b69c841f2 100644 --- a/boa_engine/src/symbol.rs +++ b/boa_engine/src/symbol.rs @@ -16,7 +16,7 @@ //! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol use crate::{js_string, string::utf16, JsString}; -use boa_gc::{unsafe_empty_trace, Finalize, Trace}; +use boa_gc::{empty_trace, Finalize, Trace}; use std::{ cell::Cell, hash::{Hash, Hasher}, @@ -255,7 +255,7 @@ pub struct JsSymbol { // Safety: JsSymbol does not contain any objects which needs to be traced, // so this is safe. unsafe impl Trace for JsSymbol { - unsafe_empty_trace!(); + empty_trace!(); } impl JsSymbol { diff --git a/boa_gc/src/cell.rs b/boa_gc/src/cell.rs index 1f88d7c788d..ac9f784db1b 100644 --- a/boa_gc/src/cell.rs +++ b/boa_gc/src/cell.rs @@ -223,7 +223,7 @@ unsafe impl Trace for GcCell { unsafe fn trace(&self) { match self.flags.get().borrowed() { BorrowState::Writing => (), - _ => (*self.cell.get()).trace(), + _ => unsafe { (*self.cell.get()).trace() }, } } @@ -231,7 +231,7 @@ unsafe impl Trace for GcCell { unsafe fn weak_trace(&self) { match self.flags.get().borrowed() { BorrowState::Writing => (), - _ => (*self.cell.get()).weak_trace(), + _ => unsafe { (*self.cell.get()).weak_trace() }, } } @@ -241,7 +241,7 @@ unsafe impl Trace for GcCell { match self.flags.get().borrowed() { BorrowState::Writing => (), - _ => (*self.cell.get()).root(), + _ => unsafe { (*self.cell.get()).root() }, } } @@ -252,7 +252,7 @@ unsafe impl Trace for GcCell { match self.flags.get().borrowed() { BorrowState::Writing => (), - _ => (*self.cell.get()).unroot(), + _ => unsafe { (*self.cell.get()).unroot() }, } } diff --git a/boa_gc/src/internals/eph_box.rs b/boa_gc/src/internals/eph_box.rs index a8f43735634..48218303fae 100644 --- a/boa_gc/src/internals/eph_box.rs +++ b/boa_gc/src/internals/eph_box.rs @@ -63,13 +63,17 @@ impl EphemeronBox { #[inline] unsafe fn weak_trace_key(&self) { if let Some(key) = self.inner_key() { - key.weak_trace_inner(); + unsafe { + key.weak_trace_inner(); + } } } #[inline] unsafe fn weak_trace_value(&self) { - self.value().weak_trace(); + unsafe { + self.value().weak_trace(); + } } } @@ -88,15 +92,17 @@ unsafe impl Trace for EphemeronBox { } #[inline] - unsafe fn is_marked_ephemeron(&self) -> bool { + fn is_marked_ephemeron(&self) -> bool { self.is_marked() } #[inline] unsafe fn weak_trace(&self) { if self.is_marked() { - self.weak_trace_key(); - self.weak_trace_value(); + unsafe { + self.weak_trace_key(); + self.weak_trace_value(); + } } } diff --git a/boa_gc/src/internals/gc_box.rs b/boa_gc/src/internals/gc_box.rs index 71a00009b56..fc0a43266b5 100644 --- a/boa_gc/src/internals/gc_box.rs +++ b/boa_gc/src/internals/gc_box.rs @@ -113,24 +113,28 @@ impl GcBox { pub(crate) unsafe fn trace_inner(&self) { if !self.header.is_marked() && !self.header.is_ephemeron() { self.header.mark(); - self.value.trace(); + unsafe { + self.value.trace(); + } } } /// Trace inner data pub(crate) unsafe fn weak_trace_inner(&self) { - self.value.weak_trace(); + unsafe { + self.value.weak_trace(); + } } /// Increases the root count on this `GcBox`. /// Roots prevent the `GcBox` from being destroyed by the garbage collector. - pub(crate) unsafe fn root_inner(&self) { + pub(crate) fn root_inner(&self) { self.header.inc_roots(); } /// Decreases the root count on this `GcBox`. /// Roots prevent the `GcBox` from being destroyed by the garbage collector. - pub(crate) unsafe fn unroot_inner(&self) { + pub(crate) fn unroot_inner(&self) { self.header.dec_roots(); } diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 82dab3f2f22..04f4b27d4f6 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -20,6 +20,9 @@ clippy::needless_pass_by_value, clippy::match_wildcard_for_single_variants, clippy::map_unwrap_or, + clippy::undocumented_unsafe_blocks, + clippy::missing_safety_doc, + unsafe_op_in_unsafe_fn, unused_qualifications, unused_import_braces, unused_lifetimes, @@ -37,11 +40,7 @@ nonstandard_style, missing_docs )] -#![allow( - clippy::let_unit_value, - clippy::missing_safety_doc, - clippy::module_name_repetitions -)] +#![allow(clippy::let_unit_value, clippy::module_name_repetitions)] extern crate self as boa_gc; @@ -209,19 +208,22 @@ impl Collector { let mut ephemeron_queue = Vec::new(); let mut mark_head = head; while let Some(node) = mark_head.get() { - if (*node.as_ptr()).header.is_ephemeron() { + let node_ref = unsafe { node.as_ref() }; + if node_ref.header.is_ephemeron() { ephemeron_queue.push(node); - } else if (*node.as_ptr()).header.roots() > 0 { - (*node.as_ptr()).trace_inner(); + } else if node_ref.header.roots() > 0 { + unsafe { + node_ref.trace_inner(); + } } else { finalize.push(node); } - mark_head = &(*node.as_ptr()).header.next; + mark_head = &node_ref.header.next; } // Ephemeron Evaluation if !ephemeron_queue.is_empty() { - ephemeron_queue = Self::mark_ephemerons(ephemeron_queue); + ephemeron_queue = unsafe { Self::mark_ephemerons(ephemeron_queue) }; } // Any left over nodes in the ephemeron queue at this point are @@ -243,11 +245,12 @@ impl Collector { // are reachable or unreachable let (reachable, other): (Vec<_>, Vec<_>) = ephemeron_queue.into_iter().partition(|node| { - if node.as_ref().value.is_marked_ephemeron() { - node.as_ref().header.mark(); + let node = unsafe { node.as_ref() }; + if node.value.is_marked_ephemeron() { + node.header.mark(); true } else { - node.as_ref().header.roots() > 0 + node.header.roots() > 0 } }); // Replace the old queue with the unreachable @@ -263,7 +266,9 @@ impl Collector { // enqueuing any ephemeron that is found during the trace for node in reachable { // TODO: deal with fetch ephemeron_queue - (*node.as_ptr()).weak_trace_inner(); + unsafe { + node.as_ref().weak_trace_inner(); + } } EPHEMERON_QUEUE.with(|st| { @@ -281,8 +286,9 @@ impl Collector { // We double check that the unreachable nodes are actually unreachable // prior to finalization as they could have been marked by a different // trace after initially being added to the queue - if !(*node.as_ptr()).header.is_marked() { - Trace::run_finalizer(&(*node.as_ptr()).value); + let node = unsafe { node.as_ref() }; + if !node.header.is_marked() { + Trace::run_finalizer(&node.value); } } } @@ -296,17 +302,17 @@ impl Collector { let mut sweep_head = heap_start; while let Some(node) = sweep_head.get() { - if (*node.as_ptr()).is_marked() { - (*node.as_ptr()).header.unmark(); - sweep_head = &(*node.as_ptr()).header.next; - } else if (*node.as_ptr()).header.is_ephemeron() && (*node.as_ptr()).header.roots() > 0 - { + let node_ref = unsafe { node.as_ref() }; + if node_ref.is_marked() { + node_ref.header.unmark(); + sweep_head = &node_ref.header.next; + } else if node_ref.header.is_ephemeron() && node_ref.header.roots() > 0 { // Keep the ephemeron box's alive if rooted, but note that it's pointer is no longer safe - Trace::run_finalizer(&(*node.as_ptr()).value); - sweep_head = &(*node.as_ptr()).header.next; + Trace::run_finalizer(&node_ref.value); + sweep_head = &node_ref.header.next; } else { // Drops occur here - let unmarked_node = Box::from_raw(node.as_ptr()); + let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) }; let unallocated_bytes = mem::size_of_val::>(&*unmarked_node); *total_allocated -= unallocated_bytes; sweep_head.set(unmarked_node.header.next.take()); diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index 8508f92cd76..371dcf62288 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -84,6 +84,6 @@ unsafe impl Trace for Ephemeron { impl Drop for Ephemeron { #[inline] fn drop(&mut self) { - unsafe { self.inner().unroot_inner() } + self.inner().unroot_inner(); } } diff --git a/boa_gc/src/pointers/gc.rs b/boa_gc/src/pointers/gc.rs index 36279b47561..daf1841868e 100644 --- a/boa_gc/src/pointers/gc.rs +++ b/boa_gc/src/pointers/gc.rs @@ -11,8 +11,12 @@ use crate::internals::GcBox; use crate::trace::{Finalize, Trace}; use crate::{finalizer_safe, Allocator}; -pub(crate) unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { - ptr::write(addr_of_mut!(ptr).cast::<*mut u8>(), data.cast::()); +// Technically, this function is safe, since we're just modifying the address of a pointer without +// dereferencing it. +pub(crate) fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { + unsafe { + ptr::write(addr_of_mut!(ptr).cast::<*mut u8>(), data.cast::()); + } ptr } @@ -34,7 +38,7 @@ impl Gc { inner_ptr: Cell::new(inner_ptr), marker: PhantomData, }; - unsafe { gc.set_root() }; + gc.set_root(); gc } } @@ -54,7 +58,7 @@ pub(crate) unsafe fn clear_root_bit( let data = ptr.cast::(); let addr = data as isize; let ptr = set_data_ptr(ptr, data.wrapping_offset((addr & !1) - addr)); - NonNull::new_unchecked(ptr) + unsafe { NonNull::new_unchecked(ptr) } } impl Gc { @@ -62,16 +66,20 @@ impl Gc { self.inner_ptr.get().as_ptr().cast::() as usize & 1 != 0 } - unsafe fn set_root(&self) { + fn set_root(&self) { let ptr = self.inner_ptr.get().as_ptr(); let data = ptr.cast::(); let addr = data as isize; let ptr = set_data_ptr(ptr, data.wrapping_offset((addr | 1) - addr)); - self.inner_ptr.set(NonNull::new_unchecked(ptr)); + unsafe { + self.inner_ptr.set(NonNull::new_unchecked(ptr)); + } } - unsafe fn clear_root(&self) { - self.inner_ptr.set(clear_root_bit(self.inner_ptr.get())); + fn clear_root(&self) { + unsafe { + self.inner_ptr.set(clear_root_bit(self.inner_ptr.get())); + } } #[inline] @@ -91,12 +99,16 @@ impl Finalize for Gc {} unsafe impl Trace for Gc { #[inline] unsafe fn trace(&self) { - self.inner().trace_inner(); + unsafe { + self.inner().trace_inner(); + } } #[inline] unsafe fn weak_trace(&self) { - self.inner().weak_trace_inner(); + unsafe { + self.inner().weak_trace_inner(); + } } #[inline] @@ -106,7 +118,6 @@ unsafe impl Trace for Gc { // inaccessible due to this method being invoked during the sweeping // phase, and we don't want to modify our state before panicking. self.inner().root_inner(); - self.set_root(); } @@ -117,7 +128,6 @@ unsafe impl Trace for Gc { // inaccessible due to this method being invoked during the sweeping // phase, and we don't want to modify our state before panicking. self.inner().unroot_inner(); - self.clear_root(); } @@ -130,15 +140,13 @@ unsafe impl Trace for Gc { impl Clone for Gc { #[inline] fn clone(&self) -> Self { - unsafe { - self.inner().root_inner(); - let gc = Gc { - inner_ptr: Cell::new(self.inner_ptr.get()), - marker: PhantomData, - }; - gc.set_root(); - gc - } + self.inner().root_inner(); + let gc = Gc { + inner_ptr: Cell::new(self.inner_ptr.get()), + marker: PhantomData, + }; + gc.set_root(); + gc } } @@ -156,9 +164,7 @@ impl Drop for Gc { fn drop(&mut self) { // If this pointer was a root, we should unroot it. if self.rooted() { - unsafe { - self.inner().unroot_inner(); - } + self.inner().unroot_inner(); } } } diff --git a/boa_gc/src/trace.rs b/boa_gc/src/trace.rs index fca89523fe6..354a0119d40 100644 --- a/boa_gc/src/trace.rs +++ b/boa_gc/src/trace.rs @@ -20,28 +20,51 @@ pub trait Finalize { } /// The Trace trait, which needs to be implemented on garbage-collected objects. +/// +/// # Safety +/// +/// - An incorrect implementation of the trait can result in heap overflows, data corruption, +/// use-after-free, or Undefined Behaviour in general. +/// +/// - Calling any of the functions marked as `unsafe` outside of the context of the garbage collector +/// can result in Undefined Behaviour. pub unsafe trait Trace: Finalize { /// Marks all contained `Gc`s. + /// + /// # Safety + /// + /// See [`Trace`]. unsafe fn trace(&self); - /// Checks if an ephemeron's key is marked. + /// Marks all contained weak references of a `Gc`. /// - /// Note: value should always be implemented to return false - unsafe fn is_marked_ephemeron(&self) -> bool { - false - } - - /// Returns true if a marked `Gc` is found + /// # Safety + /// + /// See [`Trace`]. unsafe fn weak_trace(&self); /// Increments the root-count of all contained `Gc`s. + /// + /// # Safety + /// + /// See [`Trace`]. unsafe fn root(&self); /// Decrements the root-count of all contained `Gc`s. + /// + /// # Safety + /// + /// See [`Trace`]. unsafe fn unroot(&self); + /// Checks if an ephemeron's key is marked. + #[doc(hidden)] + fn is_marked_ephemeron(&self) -> bool { + false + } + /// Runs [`Finalize::finalize`] on this object and all - /// contained subobjects + /// contained subobjects. fn run_finalizer(&self); } @@ -49,7 +72,7 @@ pub unsafe trait Trace: Finalize { /// /// Use this for marking types as not containing any `Trace` types. #[macro_export] -macro_rules! unsafe_empty_trace { +macro_rules! empty_trace { () => { #[inline] unsafe fn trace(&self) {} @@ -71,14 +94,21 @@ macro_rules! unsafe_empty_trace { /// You define a `this` parameter name and pass in a body, which should call `mark` on every /// traceable element inside the body. The mark implementation will automatically delegate to the /// correct method on the argument. +/// +/// # Safety +/// +/// Misusing the `mark` function may result in Undefined Behaviour. #[macro_export] macro_rules! custom_trace { ($this:ident, $body:expr) => { #[inline] unsafe fn trace(&self) { #[inline] - unsafe fn mark(it: &T) { - $crate::Trace::trace(it); + fn mark(it: &T) { + // SAFETY: The implementor must ensure that `trace` is correctly implemented. + unsafe { + $crate::Trace::trace(it); + } } let $this = self; $body @@ -86,8 +116,11 @@ macro_rules! custom_trace { #[inline] unsafe fn weak_trace(&self) { #[inline] - unsafe fn mark(it: &T) { - $crate::Trace::weak_trace(it) + fn mark(it: &T) { + // SAFETY: The implementor must ensure that `weak_trace` is correctly implemented. + unsafe { + $crate::Trace::weak_trace(it); + } } let $this = self; $body @@ -95,8 +128,11 @@ macro_rules! custom_trace { #[inline] unsafe fn root(&self) { #[inline] - unsafe fn mark(it: &T) { - $crate::Trace::root(it); + fn mark(it: &T) { + // SAFETY: The implementor must ensure that `root` is correctly implemented. + unsafe { + $crate::Trace::root(it); + } } let $this = self; $body @@ -104,8 +140,11 @@ macro_rules! custom_trace { #[inline] unsafe fn unroot(&self) { #[inline] - unsafe fn mark(it: &T) { - $crate::Trace::unroot(it); + fn mark(it: &T) { + // SAFETY: The implementor must ensure that `unroot` is correctly implemented. + unsafe { + $crate::Trace::unroot(it); + } } let $this = self; $body @@ -124,15 +163,19 @@ macro_rules! custom_trace { } impl Finalize for &'static T {} +// SAFETY: 'static references don't need to be traced, since they live indefinitely. unsafe impl Trace for &'static T { - unsafe_empty_trace!(); + empty_trace!(); } macro_rules! simple_empty_finalize_trace { ($($T:ty),*) => { $( impl Finalize for $T {} - unsafe impl Trace for $T { unsafe_empty_trace!(); } + + // SAFETY: + // Primitive types and string types don't have inner nodes that need to be marked. + unsafe impl Trace for $T { empty_trace!(); } )* } } @@ -186,6 +229,8 @@ simple_empty_finalize_trace![ ]; impl Finalize for [T; N] {} +// SAFETY: +// All elements inside the array are correctly marked. unsafe impl Trace for [T; N] { custom_trace!(this, { for v in this { @@ -197,7 +242,9 @@ unsafe impl Trace for [T; N] { macro_rules! fn_finalize_trace_one { ($ty:ty $(,$args:ident)*) => { impl Finalize for $ty {} - unsafe impl Trace for $ty { unsafe_empty_trace!(); } + // SAFETY: + // Function pointers don't have inner nodes that need to be marked. + unsafe impl Trace for $ty { empty_trace!(); } } } macro_rules! fn_finalize_trace_group { @@ -221,10 +268,13 @@ macro_rules! tuple_finalize_trace { () => {}; // This case is handled above, by simple_finalize_empty_trace!(). ($($args:ident),*) => { impl<$($args),*> Finalize for ($($args,)*) {} + // SAFETY: + // All elements inside the tuple are correctly marked. unsafe impl<$($args: $crate::Trace),*> Trace for ($($args,)*) { custom_trace!(this, { #[allow(non_snake_case, unused_unsafe)] fn avoid_lints<$($args: $crate::Trace),*>(&($(ref $args,)*): &($($args,)*)) { + // SAFETY: The implementor must ensure a correct implementation. unsafe { $(mark($args);)* } } avoid_lints(this) @@ -258,23 +308,8 @@ type_arg_tuple_based_finalize_trace_impls![ (A, B, C, D, E, F, G, H, I, J, K, L); ]; -impl Finalize for Rc {} -unsafe impl Trace for Rc { - custom_trace!(this, { - mark(&**this); - }); -} - -impl Finalize for Rc<[T]> {} -unsafe impl Trace for Rc<[T]> { - custom_trace!(this, { - for e in this.iter() { - mark(e); - } - }); -} - impl Finalize for Box {} +// SAFETY: The inner value of the `Box` is correctly marked. unsafe impl Trace for Box { custom_trace!(this, { mark(&**this); @@ -282,6 +317,7 @@ unsafe impl Trace for Box { } impl Finalize for Box<[T]> {} +// SAFETY: All the inner elements of the `Box` array are correctly marked. unsafe impl Trace for Box<[T]> { custom_trace!(this, { for e in this.iter() { @@ -291,6 +327,7 @@ unsafe impl Trace for Box<[T]> { } impl Finalize for Vec {} +// SAFETY: All the inner elements of the `Vec` are correctly marked. unsafe impl Trace for Vec { custom_trace!(this, { for e in this { @@ -300,6 +337,7 @@ unsafe impl Trace for Vec { } impl Finalize for Option {} +// SAFETY: The inner value of the `Option` is correctly marked. unsafe impl Trace for Option { custom_trace!(this, { if let Some(ref v) = *this { @@ -309,6 +347,7 @@ unsafe impl Trace for Option { } impl Finalize for Result {} +// SAFETY: Both inner values of the `Result` are correctly marked. unsafe impl Trace for Result { custom_trace!(this, { match *this { @@ -319,6 +358,7 @@ unsafe impl Trace for Result { } impl Finalize for BinaryHeap {} +// SAFETY: All the elements of the `BinaryHeap` are correctly marked. unsafe impl Trace for BinaryHeap { custom_trace!(this, { for v in this.iter() { @@ -328,6 +368,7 @@ unsafe impl Trace for BinaryHeap { } impl Finalize for BTreeMap {} +// SAFETY: All the elements of the `BTreeMap` are correctly marked. unsafe impl Trace for BTreeMap { custom_trace!(this, { for (k, v) in this { @@ -338,6 +379,7 @@ unsafe impl Trace for BTreeMap { } impl Finalize for BTreeSet {} +// SAFETY: All the elements of the `BTreeSet` are correctly marked. unsafe impl Trace for BTreeSet { custom_trace!(this, { for v in this { @@ -347,6 +389,7 @@ unsafe impl Trace for BTreeSet { } impl Finalize for HashMap {} +// SAFETY: All the elements of the `HashMap` are correctly marked. unsafe impl Trace for HashMap { custom_trace!(this, { for (k, v) in this.iter() { @@ -357,6 +400,7 @@ unsafe impl Trace for HashMap Finalize for HashSet {} +// SAFETY: All the elements of the `HashSet` are correctly marked. unsafe impl Trace for HashSet { custom_trace!(this, { for v in this.iter() { @@ -366,6 +410,7 @@ unsafe impl Trace for HashSet { } impl Finalize for LinkedList {} +// SAFETY: All the elements of the `LinkedList` are correctly marked. unsafe impl Trace for LinkedList { custom_trace!(this, { for v in this.iter() { @@ -375,11 +420,13 @@ unsafe impl Trace for LinkedList { } impl Finalize for PhantomData {} +// SAFETY: A `PhantomData` doesn't have inner data that needs to be marked. unsafe impl Trace for PhantomData { - unsafe_empty_trace!(); + empty_trace!(); } impl Finalize for VecDeque {} +// SAFETY: All the elements of the `VecDeque` are correctly marked. unsafe impl Trace for VecDeque { custom_trace!(this, { for v in this.iter() { @@ -388,8 +435,10 @@ unsafe impl Trace for VecDeque { }); } -impl<'a, T: ToOwned + Trace + ?Sized> Finalize for Cow<'a, T> {} -unsafe impl<'a, T: ToOwned + Trace + ?Sized> Trace for Cow<'a, T> +impl Finalize for Cow<'static, T> {} +// SAFETY: 'static references don't need to be traced, since they live indefinitely, and the owned +// variant is correctly marked. +unsafe impl Trace for Cow<'static, T> where T::Owned: Trace, { diff --git a/boa_macros/src/lib.rs b/boa_macros/src/lib.rs index f38c5d2b2fb..c1fc87fa6dd 100644 --- a/boa_macros/src/lib.rs +++ b/boa_macros/src/lib.rs @@ -30,44 +30,59 @@ fn derive_trace(mut s: Structure<'_>) -> proc_macro2::TokenStream { let trace_impl = s.unsafe_bound_impl( quote!(::boa_gc::Trace), quote! { - #[inline] unsafe fn trace(&self) { + #[inline] + unsafe fn trace(&self) { #[allow(dead_code)] #[inline] - unsafe fn mark(it: &T) { - ::boa_gc::Trace::trace(it); + fn mark(it: &T) { + unsafe { + ::boa_gc::Trace::trace(it); + } } match *self { #trace_body } } - #[inline] unsafe fn weak_trace(&self) { + #[inline] + unsafe fn weak_trace(&self) { #[allow(dead_code, unreachable_code)] #[inline] - unsafe fn mark(it: &T) { - ::boa_gc::Trace::weak_trace(it) + fn mark(it: &T) { + unsafe { + ::boa_gc::Trace::weak_trace(it) + } } match *self { #trace_body } } - #[inline] unsafe fn root(&self) { + #[inline] + unsafe fn root(&self) { #[allow(dead_code)] #[inline] - unsafe fn mark(it: &T) { - ::boa_gc::Trace::root(it); + fn mark(it: &T) { + unsafe { + ::boa_gc::Trace::root(it); + } } match *self { #trace_body } } - #[inline] unsafe fn unroot(&self) { + #[inline] + unsafe fn unroot(&self) { #[allow(dead_code)] #[inline] - unsafe fn mark(it: &T) { - ::boa_gc::Trace::unroot(it); + fn mark(it: &T) { + unsafe { + ::boa_gc::Trace::unroot(it); + } } match *self { #trace_body } } - #[inline] fn run_finalizer(&self) { + #[inline] + fn run_finalizer(&self) { ::boa_gc::Finalize::finalize(self); #[allow(dead_code)] #[inline] fn mark(it: &T) { - ::boa_gc::Trace::run_finalizer(it); + unsafe { + ::boa_gc::Trace::run_finalizer(it); + } } match *self { #trace_body } } From 336ff58a8f8b47055399ec1cc5f8a206eea222df Mon Sep 17 00:00:00 2001 From: jedel1043 Date: Wed, 9 Nov 2022 17:11:59 -0600 Subject: [PATCH 38/55] Document some unsafe pt. 2 --- boa_gc/src/lib.rs | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 04f4b27d4f6..abe95e28bac 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -133,7 +133,7 @@ pub fn finalizer_safe() -> bool { /// The Allocator handles allocation of garbage collected values. /// -/// The allocator can trigger a garbage collection +/// The allocator can trigger a garbage collection. struct Allocator; impl Allocator { @@ -280,12 +280,17 @@ impl Collector { ephemeron_queue } + /// # Safety + /// + /// Passing a vec with invalid pointers will result in Undefined Behaviour. unsafe fn finalize(finalize_vec: Vec>>) { let _timer = Profiler::global().start_event("Gc Finalization", "gc"); for node in finalize_vec { // We double check that the unreachable nodes are actually unreachable // prior to finalization as they could have been marked by a different // trace after initially being added to the queue + // + // SAFETY: The caller must ensure all pointers inside `finalize_vec` are valid. let node = unsafe { node.as_ref() }; if !node.header.is_marked() { Trace::run_finalizer(&node.value); @@ -293,6 +298,12 @@ impl Collector { } } + /// # Safety + /// + /// - Providing an invalid pointer in the `heap_start` or in any of the headers of each + /// node will result in Undefined Behaviour. + /// - Providing a list of pointers that weren't allocated by `Box::into_raw(Box::new(..))` + /// will result in Undefined Behaviour. unsafe fn sweep( heap_start: &Cell>>>, total_allocated: &mut usize, @@ -302,6 +313,7 @@ impl Collector { let mut sweep_head = heap_start; while let Some(node) = sweep_head.get() { + // SAFETY: The caller must ensure the validity of every node of `heap_start`. let node_ref = unsafe { node.as_ref() }; if node_ref.is_marked() { node_ref.header.unmark(); @@ -311,7 +323,8 @@ impl Collector { Trace::run_finalizer(&node_ref.value); sweep_head = &node_ref.header.next; } else { - // Drops occur here + // SAFETY: The algorithm ensures only unmarked/unreachable pointers are dropped. + // The caller must ensure all pointers were allocated by `Box::into_raw(Box::new(..))`. let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) }; let unallocated_bytes = mem::size_of_val::>(&*unmarked_node); *total_allocated -= unallocated_bytes; @@ -327,7 +340,9 @@ impl Collector { let sweep_head = &gc.adult_start; while let Some(node) = sweep_head.get() { - // Drops every node + // SAFETY: + // The `Allocator` must always ensure its start node is a valid, non-null pointer that + // was allocated by `Box::from_raw(Box::new(..))`. let unmarked_node = unsafe { Box::from_raw(node.as_ptr()) }; sweep_head.set(unmarked_node.header.next.take()); } From 6e00c4adaddc303c03c7c09f3c2713e55f1d24d1 Mon Sep 17 00:00:00 2001 From: nekevss Date: Wed, 9 Nov 2022 22:29:19 -0500 Subject: [PATCH 39/55] Safety documentation and basic allocation and dump test --- boa_gc/src/cell.rs | 13 ++++++++ boa_gc/src/internals/eph_box.rs | 57 ++++++++++++++++++++++++++------ boa_gc/src/internals/gc_box.rs | 12 +++++-- boa_gc/src/lib.rs | 21 ++++++++---- boa_gc/src/pointers/ephemeron.rs | 7 +++- boa_gc/src/pointers/gc.rs | 14 ++++++-- boa_gc/src/test/weak.rs | 13 ++++++++ 7 files changed, 114 insertions(+), 23 deletions(-) diff --git a/boa_gc/src/cell.rs b/boa_gc/src/cell.rs index ac9f784db1b..e96eb650e71 100644 --- a/boa_gc/src/cell.rs +++ b/boa_gc/src/cell.rs @@ -157,6 +157,7 @@ impl GcCell { } self.flags.set(self.flags.get().add_reading()); + // SAFETY: calling value on a rooted value may cause Undefined Behavior unsafe { Ok(GcCellRef { flags: &self.flags, @@ -181,6 +182,8 @@ impl GcCell { } self.flags.set(self.flags.get().set_writing()); + // SAFETY: This is safe as the value is rooted if it was not previously rooted, + // so it cannot be dropped. unsafe { // Force the val_ref's contents to be rooted for the duration of the // mutable borrow @@ -218,11 +221,13 @@ impl Display for BorrowMutError { impl Finalize for GcCell {} +// SAFETY: Please see [`Trace`]. Borrowed unsafe impl Trace for GcCell { #[inline] unsafe fn trace(&self) { match self.flags.get().borrowed() { BorrowState::Writing => (), + // SAFETY: Please see [`Trace`] _ => unsafe { (*self.cell.get()).trace() }, } } @@ -231,6 +236,7 @@ unsafe impl Trace for GcCell { unsafe fn weak_trace(&self) { match self.flags.get().borrowed() { BorrowState::Writing => (), + // SAFETY: Please see [`Trace`] _ => unsafe { (*self.cell.get()).weak_trace() }, } } @@ -241,6 +247,7 @@ unsafe impl Trace for GcCell { match self.flags.get().borrowed() { BorrowState::Writing => (), + // SAFETY: Please see [`Trace`] _ => unsafe { (*self.cell.get()).root() }, } } @@ -252,6 +259,7 @@ unsafe impl Trace for GcCell { match self.flags.get().borrowed() { BorrowState::Writing => (), + // SAFETY: Please see [`Trace`] _ => unsafe { (*self.cell.get()).unroot() }, } } @@ -261,6 +269,7 @@ unsafe impl Trace for GcCell { Finalize::finalize(self); match self.flags.get().borrowed() { BorrowState::Writing => (), + // SAFETY: Please see [`Trace`] _ => unsafe { (*self.cell.get()).run_finalizer() }, } } @@ -402,6 +411,7 @@ impl<'a, T: Trace + ?Sized, U: ?Sized> GcCellRefMut<'a, T, U> { V: ?Sized, F: FnOnce(&mut U) -> &mut V, { + // SAFETY: This is safe as `GcCellRefMut` is already borrowed, so the value is rooted. let value = unsafe { &mut *(orig.value as *mut U) }; let ret = GcCellRefMut { @@ -440,6 +450,8 @@ impl<'a, T: Trace + ?Sized, U: ?Sized> Drop for GcCellRefMut<'a, T, U> { // Restore the rooted state of the GcCell's contents to the state of the GcCell. // During the lifetime of the GcCellRefMut, the GcCell's contents are rooted. if !self.gc_cell.flags.get().rooted() { + // SAFETY: If `GcCell` is no longer rooted, then unroot it. This should be safe + // as the internal `GcBox` should be guaranteed to have at least 1 root. unsafe { (*self.gc_cell.cell.get()).unroot(); } @@ -462,6 +474,7 @@ impl<'a, T: Trace + ?Sized, U: Display + ?Sized> Display for GcCellRefMut<'a, T, } } +// SAFETY: GcCell tracks it's `BorrowState` is `Writing` unsafe impl Send for GcCell {} impl Clone for GcCell { diff --git a/boa_gc/src/internals/eph_box.rs b/boa_gc/src/internals/eph_box.rs index 48218303fae..a46cbd79df7 100644 --- a/boa_gc/src/internals/eph_box.rs +++ b/boa_gc/src/internals/eph_box.rs @@ -20,6 +20,7 @@ impl EphemeronBox { } impl EphemeronBox { + /// Checks if the key pointer is marked by Trace #[inline] pub(crate) fn is_marked(&self) -> bool { if let Some(key) = self.inner_key() { @@ -29,14 +30,22 @@ impl EphemeronBox { } } + /// Returns some pointer to the `key`'s `GcBox` or None + /// # Panics + /// This method will panic if called while the garbage collector is dropping. #[inline] fn inner_key_ptr(&self) -> Option<*mut GcBox> { assert!(finalizer_safe()); self.key.get().map(NonNull::as_ptr) } + /// Returns some reference to `key`'s `GcBox` or None #[inline] fn inner_key(&self) -> Option<&GcBox> { + // SAFETY: This is safe as `EphemeronBox::inner_key_ptr()` will + // fetch either a live `GcBox` or None. The value of `key` is set + // to None in the case where `EphemeronBox` and `key`'s `GcBox` + // entered into `Collector::sweep()` as unmarked. unsafe { if let Some(inner_key) = self.inner_key_ptr() { Some(&*inner_key) @@ -46,6 +55,7 @@ impl EphemeronBox { } } + /// Returns a reference to the value of `key`'s `GcBox` #[inline] pub(crate) fn key(&self) -> Option<&K> { if let Some(key_box) = self.inner_key() { @@ -55,28 +65,36 @@ impl EphemeronBox { } } + /// Returns a reference to `value` #[inline] pub(crate) fn value(&self) -> &V { &self.value } + /// Calls [`Trace::weak_trace()`][crate::Trace] on key #[inline] - unsafe fn weak_trace_key(&self) { + fn weak_trace_key(&self) { if let Some(key) = self.inner_key() { - unsafe { - key.weak_trace_inner(); - } + key.weak_trace_inner(); } } + /// Calls [`Trace::weak_trace()`][crate::Trace] on value + /// #[inline] - unsafe fn weak_trace_value(&self) { + fn weak_trace_value(&self) { + // SAFETY: Value is a sized element that must implement trace. The + // operation is safe as EphemeronBox owns value and `Trace::weak_trace` + // must be implemented on it unsafe { self.value().weak_trace(); } } } +// `EphemeronBox`'s Finalize is special in that if it is determined to be unreachable +// and therefore so has the `GcBox` that `key`stores the pointer to, then we set `key` +// to None to guarantee that we do not access freed memory. impl Finalize for EphemeronBox { #[inline] fn finalize(&self) { @@ -84,38 +102,57 @@ impl Finalize for EphemeronBox { } } +// SAFETY: Please see [`Trace] unsafe impl Trace for EphemeronBox { + /// # Safety + /// + /// Please see [`Trace`]. #[inline] unsafe fn trace(&self) { /* An ephemeron is never traced with Phase One Trace */ - /* May be traced in phase 3, so this still may need to be implemented */ } + /// Checks if the `key`'s `GcBox` has been marked by `Trace::trace()` or `Trace::weak_trace`. + /// + /// # Safety + /// + /// Please see [`Trace`]. #[inline] fn is_marked_ephemeron(&self) -> bool { self.is_marked() } + /// Checks if this `EphemeronBox` has already been determined reachable. If so, continue to trace + /// value in `key` and `value` + /// + /// # Safety + /// + /// Please see [`Traced`]. #[inline] unsafe fn weak_trace(&self) { if self.is_marked() { - unsafe { - self.weak_trace_key(); - self.weak_trace_value(); - } + self.weak_trace_key(); + self.weak_trace_value(); } } + /// # Safety + /// + /// Please see [`Trace`]. #[inline] unsafe fn root(&self) { // An ephemeron here should probably not be rooted. } + /// # Safety + /// + /// Please see [`Trace`]. #[inline] unsafe fn unroot(&self) { // An ephemeron is never rooted in the GcBoxHeader } + // SAFETY: Please see [`Trace`] #[inline] fn run_finalizer(&self) { Finalize::finalize(self); diff --git a/boa_gc/src/internals/gc_box.rs b/boa_gc/src/internals/gc_box.rs index fc0a43266b5..36078e1b143 100644 --- a/boa_gc/src/internals/gc_box.rs +++ b/boa_gc/src/internals/gc_box.rs @@ -113,6 +113,10 @@ impl GcBox { pub(crate) unsafe fn trace_inner(&self) { if !self.header.is_marked() && !self.header.is_ephemeron() { self.header.mark(); + // SAFETY: if `GcBox::trace_inner()` has been called, then, + // this box must have been deemed as reachable via tracing + // from a root, which by extension means that value has not + // been dropped either. unsafe { self.value.trace(); } @@ -120,7 +124,9 @@ impl GcBox { } /// Trace inner data - pub(crate) unsafe fn weak_trace_inner(&self) { + pub(crate) fn weak_trace_inner(&self) { + // SAFETY: if a `GcBox` has `weak_trace_inner` called, then the inner. + // value must have been deemed as reachable. unsafe { self.value.weak_trace(); } @@ -135,7 +141,9 @@ impl GcBox { /// Decreases the root count on this `GcBox`. /// Roots prevent the `GcBox` from being destroyed by the garbage collector. pub(crate) fn unroot_inner(&self) { - self.header.dec_roots(); + if !self.header.is_ephemeron() { + self.header.dec_roots(); + } } /// Returns a reference to the `GcBox`'s value. diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index abe95e28bac..8f054623201 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -145,6 +145,7 @@ impl Allocator { Self::manage_state(&mut gc); value.header.next.set(gc.adult_start.take()); + // Safety: Value Cannot be a null as it must be a GcBox let ptr = unsafe { NonNull::new_unchecked(Box::into_raw(Box::from(value))) }; gc.adult_start.set(Some(ptr)); @@ -183,35 +184,39 @@ struct Collector; impl Collector { fn run_full_collection(gc: &mut BoaGc) { + println!("run collect"); let _timer = Profiler::global().start_event("Gc Full Collection", "gc"); gc.runtime.collections += 1; - let unreachable_adults = unsafe { Self::mark_heap(&gc.adult_start) }; + let unreachable_adults = Self::mark_heap(&gc.adult_start); // Check if any unreachable nodes were found and finalize if !unreachable_adults.is_empty() { + // SAFETY: Please see `Collector::finalize()` unsafe { Self::finalize(unreachable_adults) }; } - let _final_unreachable_adults = unsafe { Self::mark_heap(&gc.adult_start) }; + let _final_unreachable_adults = Self::mark_heap(&gc.adult_start); + // SAFETY: Please see `Collector::sweep()` unsafe { Self::sweep(&gc.adult_start, &mut gc.runtime.bytes_allocated); } } - unsafe fn mark_heap( - head: &Cell>>>, - ) -> Vec>> { + fn mark_heap(head: &Cell>>>) -> Vec>> { let _timer = Profiler::global().start_event("Gc Marking", "gc"); // Walk the list, tracing and marking the nodes let mut finalize = Vec::new(); let mut ephemeron_queue = Vec::new(); let mut mark_head = head; while let Some(node) = mark_head.get() { + // SAFETY: node must be valid as it is coming directly from the heap. let node_ref = unsafe { node.as_ref() }; if node_ref.header.is_ephemeron() { ephemeron_queue.push(node); } else if node_ref.header.roots() > 0 { + // SAFETY: the reference to node must be valid as it is rooted. Passing + // invalid references can result in Undefined Behavior unsafe { node_ref.trace_inner(); } @@ -223,7 +228,7 @@ impl Collector { // Ephemeron Evaluation if !ephemeron_queue.is_empty() { - ephemeron_queue = unsafe { Self::mark_ephemerons(ephemeron_queue) }; + ephemeron_queue = Self::mark_ephemerons(ephemeron_queue); } // Any left over nodes in the ephemeron queue at this point are @@ -236,7 +241,7 @@ impl Collector { // Tracing Ephemerons/Weak is always requires tracing the inner nodes in case it ends up marking unmarked node // // Time complexity should be something like O(nd) where d is the longest chain of epehemerons - unsafe fn mark_ephemerons( + fn mark_ephemerons( initial_queue: Vec>>, ) -> Vec>> { let mut ephemeron_queue = initial_queue; @@ -245,6 +250,7 @@ impl Collector { // are reachable or unreachable let (reachable, other): (Vec<_>, Vec<_>) = ephemeron_queue.into_iter().partition(|node| { + // SAFETY: Any node on the eph_queue or the heap must be non null let node = unsafe { node.as_ref() }; if node.value.is_marked_ephemeron() { node.header.mark(); @@ -266,6 +272,7 @@ impl Collector { // enqueuing any ephemeron that is found during the trace for node in reachable { // TODO: deal with fetch ephemeron_queue + // SAFETY: Node must be a valid pointer or else it would not be deemed reachable. unsafe { node.as_ref().weak_trace_inner(); } diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index 371dcf62288..a795c5bb90e 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -10,9 +10,12 @@ use std::ptr::NonNull; #[derive(Debug)] /// A key-value pair where the value becomes unaccesible when the key is garbage collected. /// -/// See Racket's explanation on [**ephemerons**][eph] for a more detailed explanation. +/// See Racket's explanation on [**ephemerons**][eph] for a brief overview or read Barry Hayes' +/// [_Ephemerons_: a new finalization mechanism][acm]. +/// /// /// [eph]: https://docs.racket-lang.org/reference/ephemerons.html +/// [acm]: https://dl.acm.org/doi/10.1145/263700.263733 pub struct Ephemeron { inner_ptr: Cell>>>, } @@ -38,6 +41,7 @@ impl Ephemeron { #[inline] fn inner(&self) -> &GcBox> { + // SAFETY: GcBox> must live until it is unrooted by Drop unsafe { &*self.inner_ptr().as_ptr() } } @@ -57,6 +61,7 @@ impl Ephemeron { impl Finalize for Ephemeron {} +// SAFETY: Please see [`Trace] unsafe impl Trace for Ephemeron { #[inline] unsafe fn trace(&self) {} diff --git a/boa_gc/src/pointers/gc.rs b/boa_gc/src/pointers/gc.rs index daf1841868e..2634438f9dc 100644 --- a/boa_gc/src/pointers/gc.rs +++ b/boa_gc/src/pointers/gc.rs @@ -14,6 +14,7 @@ use crate::{finalizer_safe, Allocator}; // Technically, this function is safe, since we're just modifying the address of a pointer without // dereferencing it. pub(crate) fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { + // SAFETY: this should be safe as ptr must be a valid nonnull unsafe { ptr::write(addr_of_mut!(ptr).cast::<*mut u8>(), data.cast::()); } @@ -33,6 +34,7 @@ impl Gc { // // Note: Allocator can cause Collector to run let inner_ptr = Allocator::allocate(GcBox::new(value)); + // SAFETY: inner_ptr was just allocated, so it must be a valid value that implements [`Trace`] unsafe { (*inner_ptr.as_ptr()).value().unroot() } let gc = Self { inner_ptr: Cell::new(inner_ptr), @@ -58,6 +60,7 @@ pub(crate) unsafe fn clear_root_bit( let data = ptr.cast::(); let addr = data as isize; let ptr = set_data_ptr(ptr, data.wrapping_offset((addr & !1) - addr)); + // SAFETY: ptr must be a non null value unsafe { NonNull::new_unchecked(ptr) } } @@ -71,12 +74,14 @@ impl Gc { let data = ptr.cast::(); let addr = data as isize; let ptr = set_data_ptr(ptr, data.wrapping_offset((addr | 1) - addr)); + // SAFETY: ptr must be a non null value. unsafe { self.inner_ptr.set(NonNull::new_unchecked(ptr)); } } fn clear_root(&self) { + // SAFETY: inner_ptr must be a valid non-null pointer to a live GcBox. unsafe { self.inner_ptr.set(clear_root_bit(self.inner_ptr.get())); } @@ -85,20 +90,25 @@ impl Gc { #[inline] pub(crate) fn inner_ptr(&self) -> NonNull> { assert!(finalizer_safe()); + // SAFETY: inner_ptr must be a live GcBox. Calling this on a dropped GcBox + // can result in Undefined Behavior. unsafe { clear_root_bit(self.inner_ptr.get()) } } #[inline] fn inner(&self) -> &GcBox { + // SAFETY: Please see Gc::inner_ptr() unsafe { self.inner_ptr().as_ref() } } } impl Finalize for Gc {} +// SAFETY: Please see [`Trace`]. unsafe impl Trace for Gc { #[inline] unsafe fn trace(&self) { + // SAFETY: Inner must be live and allocated GcBox. unsafe { self.inner().trace_inner(); } @@ -106,9 +116,7 @@ unsafe impl Trace for Gc { #[inline] unsafe fn weak_trace(&self) { - unsafe { - self.inner().weak_trace_inner(); - } + self.inner().weak_trace_inner(); } #[inline] diff --git a/boa_gc/src/test/weak.rs b/boa_gc/src/test/weak.rs index fac2edabf60..adaed251859 100644 --- a/boa_gc/src/test/weak.rs +++ b/boa_gc/src/test/weak.rs @@ -48,3 +48,16 @@ fn eph_ephemeron_test() { } }); } + +#[test] +fn eph_basic_alloc_dump_test() { + run_test(|| { + let gc_value = Gc::new(String::from("gc here")); + let _gc_two = Gc::new("hmmm"); + + let eph = Ephemeron::new(&gc_value, 4); + let _fourth = Gc::new("tail"); + + assert_eq!(*eph.key().expect("must be live"), String::from("gc here")); + }) +} From 4de6a58b2eab5856fbfb1a77084c887a77545cf8 Mon Sep 17 00:00:00 2001 From: nekevss Date: Wed, 9 Nov 2022 22:36:31 -0500 Subject: [PATCH 40/55] Fix doc error --- boa_gc/src/internals/eph_box.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/boa_gc/src/internals/eph_box.rs b/boa_gc/src/internals/eph_box.rs index a46cbd79df7..5e7b30eafb3 100644 --- a/boa_gc/src/internals/eph_box.rs +++ b/boa_gc/src/internals/eph_box.rs @@ -127,7 +127,7 @@ unsafe impl Trace for EphemeronBox { /// /// # Safety /// - /// Please see [`Traced`]. + /// Please see [`Trace`]. #[inline] unsafe fn weak_trace(&self) { if self.is_marked() { From 8d629be412be32e914e326e856bd4d0112946cfa Mon Sep 17 00:00:00 2001 From: nekevss Date: Thu, 10 Nov 2022 00:41:17 -0500 Subject: [PATCH 41/55] Updated Trace docs and comments --- boa_gc/src/internals/eph_box.rs | 34 ++++++++------------------------ boa_gc/src/pointers/ephemeron.rs | 4 +++- boa_gc/src/pointers/gc.rs | 3 ++- 3 files changed, 13 insertions(+), 28 deletions(-) diff --git a/boa_gc/src/internals/eph_box.rs b/boa_gc/src/internals/eph_box.rs index 5e7b30eafb3..50f4a3f46be 100644 --- a/boa_gc/src/internals/eph_box.rs +++ b/boa_gc/src/internals/eph_box.rs @@ -102,32 +102,23 @@ impl Finalize for EphemeronBox { } } -// SAFETY: Please see [`Trace] +// SAFETY: EphemeronBox implements primarly two methods of trace `Trace::is_marked_ephemeron` +// to determine whether the key field is stored and `Trace;:weak_trace` which continues the `Trace::weak_trace()` +// into `key` and `value`. unsafe impl Trace for EphemeronBox { - /// # Safety - /// - /// Please see [`Trace`]. #[inline] unsafe fn trace(&self) { /* An ephemeron is never traced with Phase One Trace */ } /// Checks if the `key`'s `GcBox` has been marked by `Trace::trace()` or `Trace::weak_trace`. - /// - /// # Safety - /// - /// Please see [`Trace`]. #[inline] fn is_marked_ephemeron(&self) -> bool { self.is_marked() } /// Checks if this `EphemeronBox` has already been determined reachable. If so, continue to trace - /// value in `key` and `value` - /// - /// # Safety - /// - /// Please see [`Trace`]. + /// value in `key` and `value`. #[inline] unsafe fn weak_trace(&self) { if self.is_marked() { @@ -136,23 +127,14 @@ unsafe impl Trace for EphemeronBox { } } - /// # Safety - /// - /// Please see [`Trace`]. + // EphemeronBox does not implement root. #[inline] - unsafe fn root(&self) { - // An ephemeron here should probably not be rooted. - } + unsafe fn root(&self) {} - /// # Safety - /// - /// Please see [`Trace`]. + // EphemeronBox does not implement unroot #[inline] - unsafe fn unroot(&self) { - // An ephemeron is never rooted in the GcBoxHeader - } + unsafe fn unroot(&self) {} - // SAFETY: Please see [`Trace`] #[inline] fn run_finalizer(&self) { Finalize::finalize(self); diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index a795c5bb90e..8b5738547d4 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -61,11 +61,13 @@ impl Ephemeron { impl Finalize for Ephemeron {} -// SAFETY: Please see [`Trace] +// SAFETY: Ephemerons trace implementation is standard for everything except `Trace::weak_trace()`, +// which pushes the GcBox> onto the EphemeronQueue unsafe impl Trace for Ephemeron { #[inline] unsafe fn trace(&self) {} + // Push this Ephemeron's pointer onto the EphemeronQueue #[inline] unsafe fn weak_trace(&self) { EPHEMERON_QUEUE.with(|q| { diff --git a/boa_gc/src/pointers/gc.rs b/boa_gc/src/pointers/gc.rs index 2634438f9dc..a50b7c40ac2 100644 --- a/boa_gc/src/pointers/gc.rs +++ b/boa_gc/src/pointers/gc.rs @@ -104,7 +104,8 @@ impl Gc { impl Finalize for Gc {} -// SAFETY: Please see [`Trace`]. +// SAFETY: `Gc` maintains it's own rootedness and implements all methods of +// Trace. It is not possible to root an already rooted `Gc` and vice versa. unsafe impl Trace for Gc { #[inline] unsafe fn trace(&self) { From 36e6aa870309efc43d51705c5a5ad3f4ecaa0944 Mon Sep 17 00:00:00 2001 From: nekevss Date: Thu, 10 Nov 2022 18:08:47 -0500 Subject: [PATCH 42/55] Safety doc on GcCell's Trace implementation --- boa_gc/src/cell.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/boa_gc/src/cell.rs b/boa_gc/src/cell.rs index e96eb650e71..cda7ea18454 100644 --- a/boa_gc/src/cell.rs +++ b/boa_gc/src/cell.rs @@ -221,13 +221,15 @@ impl Display for BorrowMutError { impl Finalize for GcCell {} -// SAFETY: Please see [`Trace`]. Borrowed +// SAFETY: GcCell maintains it's own BorrowState and rootedness. GcCell's implementation +// focuses on only continuing Trace based methods while the cell state is not written. +// Implementing a Trace while the cell is being written to or incorrectly implementing Trace +// on GcCell's value may cause Undefined Behavior unsafe impl Trace for GcCell { #[inline] unsafe fn trace(&self) { match self.flags.get().borrowed() { BorrowState::Writing => (), - // SAFETY: Please see [`Trace`] _ => unsafe { (*self.cell.get()).trace() }, } } @@ -236,7 +238,6 @@ unsafe impl Trace for GcCell { unsafe fn weak_trace(&self) { match self.flags.get().borrowed() { BorrowState::Writing => (), - // SAFETY: Please see [`Trace`] _ => unsafe { (*self.cell.get()).weak_trace() }, } } @@ -247,7 +248,6 @@ unsafe impl Trace for GcCell { match self.flags.get().borrowed() { BorrowState::Writing => (), - // SAFETY: Please see [`Trace`] _ => unsafe { (*self.cell.get()).root() }, } } @@ -259,7 +259,6 @@ unsafe impl Trace for GcCell { match self.flags.get().borrowed() { BorrowState::Writing => (), - // SAFETY: Please see [`Trace`] _ => unsafe { (*self.cell.get()).unroot() }, } } @@ -269,7 +268,6 @@ unsafe impl Trace for GcCell { Finalize::finalize(self); match self.flags.get().borrowed() { BorrowState::Writing => (), - // SAFETY: Please see [`Trace`] _ => unsafe { (*self.cell.get()).run_finalizer() }, } } From ec25eacd57e391d6d6c6da4fe172aef9bad35bf3 Mon Sep 17 00:00:00 2001 From: nekevss Date: Thu, 10 Nov 2022 18:47:57 -0500 Subject: [PATCH 43/55] Add test and check eph drop call is not from Collector --- boa_gc/src/lib.rs | 1 - boa_gc/src/pointers/ephemeron.rs | 8 +++++--- boa_gc/src/test/weak.rs | 27 +++++++++++++++++++++++++++ 3 files changed, 32 insertions(+), 4 deletions(-) diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 8f054623201..ab357f5d762 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -184,7 +184,6 @@ struct Collector; impl Collector { fn run_full_collection(gc: &mut BoaGc) { - println!("run collect"); let _timer = Profiler::global().start_event("Gc Full Collection", "gc"); gc.runtime.collections += 1; let unreachable_adults = Self::mark_heap(&gc.adult_start); diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index 8b5738547d4..dc9345c54d9 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -34,8 +34,6 @@ impl Ephemeron { impl Ephemeron { #[inline] fn inner_ptr(&self) -> NonNull>> { - assert!(finalizer_safe()); - self.inner_ptr.get() } @@ -91,6 +89,10 @@ unsafe impl Trace for Ephemeron { impl Drop for Ephemeron { #[inline] fn drop(&mut self) { - self.inner().unroot_inner(); + // NOTE: We assert that this drop call is not a + // drop from `Collector::dump` or `Collector::sweep` + if finalizer_safe() { + self.inner().unroot_inner(); + } } } diff --git a/boa_gc/src/test/weak.rs b/boa_gc/src/test/weak.rs index adaed251859..6cf8f1ad27e 100644 --- a/boa_gc/src/test/weak.rs +++ b/boa_gc/src/test/weak.rs @@ -49,6 +49,33 @@ fn eph_ephemeron_test() { }); } +#[test] +fn eph_allocation_chains() { + run_test(|| { + let gc_value = Gc::new(String::from("foo")); + + { + let cloned_gc = gc_value.clone(); + let weak = WeakGc::new(&cloned_gc); + let wrap = Gc::new(weak); + + assert_eq!(wrap.value().expect("weak is live"), &String::from("foo")); + + let eph = Ephemeron::new(&wrap, 3); + + drop(cloned_gc); + force_collect(); + + assert_eq!(eph.key().expect("eph is still live").value().expect("weak is still live"), &String::from("foo")); + + drop(gc_value); + force_collect(); + + assert!(eph.key().expect("eph is still live").value().is_none()); + } + }) +} + #[test] fn eph_basic_alloc_dump_test() { run_test(|| { From 465c8bd0c00d72acaca757fab7c0980f5fef21a0 Mon Sep 17 00:00:00 2001 From: nekevss Date: Thu, 10 Nov 2022 18:52:05 -0500 Subject: [PATCH 44/55] Fmt and clippy lints on GcCell Trace methods --- boa_gc/src/cell.rs | 7 ++++++- boa_gc/src/pointers/gc.rs | 2 +- boa_gc/src/test/weak.rs | 12 +++++++++--- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/boa_gc/src/cell.rs b/boa_gc/src/cell.rs index cda7ea18454..2849617f71e 100644 --- a/boa_gc/src/cell.rs +++ b/boa_gc/src/cell.rs @@ -222,7 +222,7 @@ impl Display for BorrowMutError { impl Finalize for GcCell {} // SAFETY: GcCell maintains it's own BorrowState and rootedness. GcCell's implementation -// focuses on only continuing Trace based methods while the cell state is not written. +// focuses on only continuing Trace based methods while the cell state is not written. // Implementing a Trace while the cell is being written to or incorrectly implementing Trace // on GcCell's value may cause Undefined Behavior unsafe impl Trace for GcCell { @@ -230,6 +230,7 @@ unsafe impl Trace for GcCell { unsafe fn trace(&self) { match self.flags.get().borrowed() { BorrowState::Writing => (), + // SAFETY: Please see GcCell's Trace impl Safety note. _ => unsafe { (*self.cell.get()).trace() }, } } @@ -238,6 +239,7 @@ unsafe impl Trace for GcCell { unsafe fn weak_trace(&self) { match self.flags.get().borrowed() { BorrowState::Writing => (), + // SAFETY: Please see GcCell's Trace impl Safety note. _ => unsafe { (*self.cell.get()).weak_trace() }, } } @@ -248,6 +250,7 @@ unsafe impl Trace for GcCell { match self.flags.get().borrowed() { BorrowState::Writing => (), + // SAFETY: Please see GcCell's Trace impl Safety note. _ => unsafe { (*self.cell.get()).root() }, } } @@ -259,6 +262,7 @@ unsafe impl Trace for GcCell { match self.flags.get().borrowed() { BorrowState::Writing => (), + // SAFETY: Please see GcCell's Trace impl Safety note. _ => unsafe { (*self.cell.get()).unroot() }, } } @@ -268,6 +272,7 @@ unsafe impl Trace for GcCell { Finalize::finalize(self); match self.flags.get().borrowed() { BorrowState::Writing => (), + // SAFETY: Please see GcCell's Trace impl Safety note. _ => unsafe { (*self.cell.get()).run_finalizer() }, } } diff --git a/boa_gc/src/pointers/gc.rs b/boa_gc/src/pointers/gc.rs index a50b7c40ac2..6b42e085360 100644 --- a/boa_gc/src/pointers/gc.rs +++ b/boa_gc/src/pointers/gc.rs @@ -104,7 +104,7 @@ impl Gc { impl Finalize for Gc {} -// SAFETY: `Gc` maintains it's own rootedness and implements all methods of +// SAFETY: `Gc` maintains it's own rootedness and implements all methods of // Trace. It is not possible to root an already rooted `Gc` and vice versa. unsafe impl Trace for Gc { #[inline] diff --git a/boa_gc/src/test/weak.rs b/boa_gc/src/test/weak.rs index 6cf8f1ad27e..30f97a434ca 100644 --- a/boa_gc/src/test/weak.rs +++ b/boa_gc/src/test/weak.rs @@ -58,15 +58,21 @@ fn eph_allocation_chains() { let cloned_gc = gc_value.clone(); let weak = WeakGc::new(&cloned_gc); let wrap = Gc::new(weak); - + assert_eq!(wrap.value().expect("weak is live"), &String::from("foo")); - + let eph = Ephemeron::new(&wrap, 3); drop(cloned_gc); force_collect(); - assert_eq!(eph.key().expect("eph is still live").value().expect("weak is still live"), &String::from("foo")); + assert_eq!( + eph.key() + .expect("eph is still live") + .value() + .expect("weak is still live"), + &String::from("foo") + ); drop(gc_value); force_collect(); From 37d263f5f85a90f9141c791569463bc619927bd8 Mon Sep 17 00:00:00 2001 From: nekevss Date: Thu, 10 Nov 2022 21:14:30 -0500 Subject: [PATCH 45/55] Upgrade and clone methods with more tests --- boa_gc/src/internals/eph_box.rs | 4 ++-- boa_gc/src/internals/gc_box.rs | 4 +--- boa_gc/src/pointers/ephemeron.rs | 28 ++++++++++++++++++++++++++ boa_gc/src/pointers/gc.rs | 28 ++++++++++++++++---------- boa_gc/src/pointers/weak.rs | 15 ++++++++++++++ boa_gc/src/test/weak.rs | 34 ++++++++++++++++++++++++++++++++ 6 files changed, 98 insertions(+), 15 deletions(-) diff --git a/boa_gc/src/internals/eph_box.rs b/boa_gc/src/internals/eph_box.rs index 50f4a3f46be..ee304264eb8 100644 --- a/boa_gc/src/internals/eph_box.rs +++ b/boa_gc/src/internals/eph_box.rs @@ -34,14 +34,14 @@ impl EphemeronBox { /// # Panics /// This method will panic if called while the garbage collector is dropping. #[inline] - fn inner_key_ptr(&self) -> Option<*mut GcBox> { + pub(crate) fn inner_key_ptr(&self) -> Option<*mut GcBox> { assert!(finalizer_safe()); self.key.get().map(NonNull::as_ptr) } /// Returns some reference to `key`'s `GcBox` or None #[inline] - fn inner_key(&self) -> Option<&GcBox> { + pub(crate) fn inner_key(&self) -> Option<&GcBox> { // SAFETY: This is safe as `EphemeronBox::inner_key_ptr()` will // fetch either a live `GcBox` or None. The value of `key` is set // to None in the case where `EphemeronBox` and `key`'s `GcBox` diff --git a/boa_gc/src/internals/gc_box.rs b/boa_gc/src/internals/gc_box.rs index 36078e1b143..12cdd2141e1 100644 --- a/boa_gc/src/internals/gc_box.rs +++ b/boa_gc/src/internals/gc_box.rs @@ -141,9 +141,7 @@ impl GcBox { /// Decreases the root count on this `GcBox`. /// Roots prevent the `GcBox` from being destroyed by the garbage collector. pub(crate) fn unroot_inner(&self) { - if !self.header.is_ephemeron() { - self.header.dec_roots(); - } + self.header.dec_roots(); } /// Returns a reference to the `GcBox`'s value. diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index dc9345c54d9..4ac5bda3915 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -55,6 +55,16 @@ impl Ephemeron { pub fn value(&self) -> &V { self.inner().value().value() } + + #[inline] + /// Gets a `Gc` for the stored key of this `Ephemeron`. + pub fn upgrade_key(&self) -> Option> { + // SAFETY: ptr must be a valid pointer or None would have been returned. + self.inner().value().inner_key_ptr().map(|ptr| unsafe { + let inner_ptr = NonNull::new_unchecked(ptr); + Gc::from_ptr(inner_ptr) + }) + } } impl Finalize for Ephemeron {} @@ -86,12 +96,30 @@ unsafe impl Trace for Ephemeron { } } +impl Clone for Ephemeron { + #[inline] + fn clone(&self) -> Ephemeron { + // SAFETY: This is safe because the inner_ptr must live as long as it's roots. + // Mismanagement of roots can cause inner_ptr to use after free or Undefined + // Behavior. + unsafe { + let eph = Ephemeron { + inner_ptr: Cell::new(NonNull::new_unchecked(self.inner_ptr().as_ptr())), + }; + // Increment the Ephemeron's GcBox roots by 1 + self.inner().root_inner(); + eph + } + } +} + impl Drop for Ephemeron { #[inline] fn drop(&mut self) { // NOTE: We assert that this drop call is not a // drop from `Collector::dump` or `Collector::sweep` if finalizer_safe() { + println!("drop was run"); self.inner().unroot_inner(); } } diff --git a/boa_gc/src/pointers/gc.rs b/boa_gc/src/pointers/gc.rs index 6b42e085360..4213ea3ff34 100644 --- a/boa_gc/src/pointers/gc.rs +++ b/boa_gc/src/pointers/gc.rs @@ -23,8 +23,8 @@ pub(crate) fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut /// A garbage-collected pointer type over an immutable value. pub struct Gc { - inner_ptr: Cell>>, - marker: PhantomData>, + pub(crate) inner_ptr: Cell>>, + pub(crate) marker: PhantomData>, } impl Gc { @@ -50,6 +50,20 @@ impl Gc { pub fn ptr_eq(this: &Gc, other: &Gc) -> bool { GcBox::ptr_eq(this.inner(), other.inner()) } + + /// Will return a new rooted `Gc` from a `GcBox` pointer + pub(crate) fn from_ptr(ptr: NonNull>) -> Gc { + // SAFETY: the value provided as a pointer MUST be a valid GcBox. + unsafe { + ptr.as_ref().root_inner(); + let gc = Gc { + inner_ptr: Cell::new(ptr), + marker: PhantomData, + }; + gc.set_root(); + gc + } + } } /// Returns the given pointer with its root bit cleared. @@ -69,7 +83,7 @@ impl Gc { self.inner_ptr.get().as_ptr().cast::() as usize & 1 != 0 } - fn set_root(&self) { + pub(crate) fn set_root(&self) { let ptr = self.inner_ptr.get().as_ptr(); let data = ptr.cast::(); let addr = data as isize; @@ -149,13 +163,7 @@ unsafe impl Trace for Gc { impl Clone for Gc { #[inline] fn clone(&self) -> Self { - self.inner().root_inner(); - let gc = Gc { - inner_ptr: Cell::new(self.inner_ptr.get()), - marker: PhantomData, - }; - gc.set_root(); - gc + Gc::from_ptr(self.inner_ptr()) } } diff --git a/boa_gc/src/pointers/weak.rs b/boa_gc/src/pointers/weak.rs index f349ab262c6..398ad39632e 100644 --- a/boa_gc/src/pointers/weak.rs +++ b/boa_gc/src/pointers/weak.rs @@ -25,6 +25,21 @@ impl WeakGc { pub fn value(&self) -> Option<&T> { self.inner.key() } + + #[inline] + /// Upgrade returns a `Gc` pointer for the internal value if valid, or None if the value was already garbage collected. + pub fn upgrade(&self) -> Option> { + self.inner.upgrade_key() + } +} + +impl Clone for WeakGc { + #[inline] + fn clone(&self) -> Self { + WeakGc { + inner: self.inner.clone(), + } + } } impl From> for WeakGc { diff --git a/boa_gc/src/test/weak.rs b/boa_gc/src/test/weak.rs index 30f97a434ca..9504799fec7 100644 --- a/boa_gc/src/test/weak.rs +++ b/boa_gc/src/test/weak.rs @@ -94,3 +94,37 @@ fn eph_basic_alloc_dump_test() { assert_eq!(*eph.key().expect("must be live"), String::from("gc here")); }) } + +#[test] +fn eph_basic_upgrade_test() { + run_test(|| { + let init_gc = Gc::new(String::from("foo")); + + let weak = WeakGc::new(&init_gc); + + let new_gc = weak.upgrade().expect("Weak is still live"); + + drop(weak); + force_collect(); + + assert_eq!(*init_gc, *new_gc); + }) +} + +#[test] +fn eph_basic_clone_test() { + run_test(|| { + let init_gc = Gc::new(String::from("bar")); + + let weak = WeakGc::new(&init_gc); + + let new_gc = weak.upgrade().expect("Weak is live"); + let new_weak = weak.clone(); + + drop(weak); + force_collect(); + + assert_eq!(*new_gc, *new_weak.value().expect("weak should be live")); + assert_eq!(*init_gc, *new_weak.value().expect("weak_should be live still")); + }) +} \ No newline at end of file From 5ffe1b3bd775cacafb4088c4c3256423c8e2ee77 Mon Sep 17 00:00:00 2001 From: nekevss Date: Thu, 10 Nov 2022 21:19:27 -0500 Subject: [PATCH 46/55] cargo fmt --- boa_gc/src/test/weak.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/boa_gc/src/test/weak.rs b/boa_gc/src/test/weak.rs index 9504799fec7..8defb47c22d 100644 --- a/boa_gc/src/test/weak.rs +++ b/boa_gc/src/test/weak.rs @@ -99,7 +99,7 @@ fn eph_basic_alloc_dump_test() { fn eph_basic_upgrade_test() { run_test(|| { let init_gc = Gc::new(String::from("foo")); - + let weak = WeakGc::new(&init_gc); let new_gc = weak.upgrade().expect("Weak is still live"); @@ -125,6 +125,9 @@ fn eph_basic_clone_test() { force_collect(); assert_eq!(*new_gc, *new_weak.value().expect("weak should be live")); - assert_eq!(*init_gc, *new_weak.value().expect("weak_should be live still")); + assert_eq!( + *init_gc, + *new_weak.value().expect("weak_should be live still") + ); }) -} \ No newline at end of file +} From 51d0ca5b3318132bf3ed2300d1f5b07a2b64d13f Mon Sep 17 00:00:00 2001 From: nekevss Date: Fri, 11 Nov 2022 00:33:27 -0500 Subject: [PATCH 47/55] Removed print, oops --- boa_gc/src/pointers/ephemeron.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index 4ac5bda3915..48e99823e2f 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -119,7 +119,6 @@ impl Drop for Ephemeron { // NOTE: We assert that this drop call is not a // drop from `Collector::dump` or `Collector::sweep` if finalizer_safe() { - println!("drop was run"); self.inner().unroot_inner(); } } From a68e5b5d835b93ba0808f3b9c279d51a99bf5fe7 Mon Sep 17 00:00:00 2001 From: nekevss Date: Fri, 11 Nov 2022 09:16:08 -0500 Subject: [PATCH 48/55] Review updates - debug for GcBoxHeader, more documentation --- boa_gc/src/internals/gc_box.rs | 51 ++++++++++++++++++++++++++++++---- boa_gc/src/lib.rs | 5 ++++ 2 files changed, 50 insertions(+), 6 deletions(-) diff --git a/boa_gc/src/internals/gc_box.rs b/boa_gc/src/internals/gc_box.rs index 12cdd2141e1..2a5b5786dc6 100644 --- a/boa_gc/src/internals/gc_box.rs +++ b/boa_gc/src/internals/gc_box.rs @@ -1,5 +1,6 @@ use crate::Trace; use std::cell::Cell; +use std::fmt; use std::ptr::{self, NonNull}; // Age and Weak Flags @@ -8,12 +9,23 @@ const WEAK_MASK: usize = 1 << (usize::BITS - 1); const ROOTS_MASK: usize = !(MARK_MASK | WEAK_MASK); const ROOTS_MAX: usize = ROOTS_MASK; +/// The `GcBoxheader` contains the `GcBox`'s current state for the `Collector`'s +/// Mark/Sweep as well as a pointer to the next node in the heap. +/// +/// These flags include: +/// - Root Count +/// - Mark Flag Bit +/// - Weak Flag Bit +/// +/// The next node is set by the `Allocator` during initialization and by the +/// `Collector` during the sweep phase. pub(crate) struct GcBoxHeader { roots: Cell, pub(crate) next: Cell>>>, } impl GcBoxHeader { + /// Creates a new `GcBoxHeader` with a root of 1 and next set to None. #[inline] pub(crate) fn new() -> Self { GcBoxHeader { @@ -22,6 +34,7 @@ impl GcBoxHeader { } } + /// Creates a new `GcBoxHeader` with the Weak bit at 1 and roots of 1. #[inline] pub(crate) fn new_weak() -> Self { // Set weak_flag @@ -31,11 +44,13 @@ impl GcBoxHeader { } } + /// Returns the `GcBoxHeader`'s current root count #[inline] pub(crate) fn roots(&self) -> usize { self.roots.get() & ROOTS_MASK } + /// Increments `GcBoxHeader`'s root count. #[inline] pub(crate) fn inc_roots(&self) { let roots = self.roots.get(); @@ -43,58 +58,73 @@ impl GcBoxHeader { if (roots & ROOTS_MASK) < ROOTS_MAX { self.roots.set(roots + 1); } else { - // TODO: implement a better way to handle root overload + // TODO: implement a better way to handle root overload. panic!("roots counter overflow"); } } + /// Decreases `GcBoxHeader`'s current root count. #[inline] pub(crate) fn dec_roots(&self) { - // Underflow check as a stop gap for current issue when dropping + // Underflow check as a stop gap for current issue when dropping. if self.roots.get() > 0 { self.roots.set(self.roots.get() - 1); } } + /// Returns a bool for whether `GcBoxHeader`'s mark bit is 1. #[inline] pub(crate) fn is_marked(&self) -> bool { self.roots.get() & MARK_MASK != 0 } + /// Sets `GcBoxHeader`'s mark bit to 1. #[inline] pub(crate) fn mark(&self) { self.roots.set(self.roots.get() | MARK_MASK); } + /// Sets `GcBoxHeader`'s mark bit to 0. #[inline] pub(crate) fn unmark(&self) { self.roots.set(self.roots.get() & !MARK_MASK); } + /// Returns a bool for whether the `GcBoxHeader`'s weak bit is 1. #[inline] pub(crate) fn is_ephemeron(&self) -> bool { self.roots.get() & WEAK_MASK != 0 } } -// NOTE: [repr(C)] is most likely unneeded here, but will keep it for now +impl fmt::Debug for GcBoxHeader { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("GcBoxHeader") + .field("Roots", &self.roots()) + .field("Weak", &self.is_ephemeron()) + .field("Marked", &self.is_marked()) + .finish() + } +} + /// A garbage collected allocation. -#[repr(C)] pub(crate) struct GcBox { pub(crate) header: GcBoxHeader, pub(crate) value: T, } impl GcBox { + /// Returns a new `GcBox` with a rooted `GcBoxHeader`. pub(crate) fn new(value: T) -> Self { - GcBox { + Self { header: GcBoxHeader::new(), value, } } + /// Returns a new `GcBox` with a rooted and weak `GcBoxHeader`. pub(crate) fn new_weak(value: T) -> Self { - GcBox { + Self { header: GcBoxHeader::new_weak(), value, } @@ -110,6 +140,7 @@ impl GcBox { } /// Marks this `GcBox` and marks through its data. + #[inline] pub(crate) unsafe fn trace_inner(&self) { if !self.header.is_marked() && !self.header.is_ephemeron() { self.header.mark(); @@ -124,6 +155,7 @@ impl GcBox { } /// Trace inner data + #[inline] pub(crate) fn weak_trace_inner(&self) { // SAFETY: if a `GcBox` has `weak_trace_inner` called, then the inner. // value must have been deemed as reachable. @@ -133,22 +165,29 @@ impl GcBox { } /// Increases the root count on this `GcBox`. + /// /// Roots prevent the `GcBox` from being destroyed by the garbage collector. + #[inline] pub(crate) fn root_inner(&self) { self.header.inc_roots(); } /// Decreases the root count on this `GcBox`. + /// /// Roots prevent the `GcBox` from being destroyed by the garbage collector. + #[inline] pub(crate) fn unroot_inner(&self) { self.header.dec_roots(); } /// Returns a reference to the `GcBox`'s value. + #[inline] pub(crate) fn value(&self) -> &T { &self.value } + /// Returns a bool for whether the header is marked. + #[inline] pub(crate) fn is_marked(&self) -> bool { self.header.is_marked() } diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index ab357f5d762..c27ad9ea837 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -73,6 +73,7 @@ thread_local!(static BOA_GC: RefCell = RefCell::new( BoaGc { adult_start: Cell::new(None), })); +#[derive(Debug, Clone, Copy)] struct GcConfig { threshold: usize, used_space_percentage: usize, @@ -137,6 +138,7 @@ pub fn finalizer_safe() -> bool { struct Allocator; impl Allocator { + /// Allocate a new garbage collected value to the Garbage Collector's heap. fn allocate(value: GcBox) -> NonNull> { let _timer = Profiler::global().start_event("New Pointer", "BoaAlloc"); let element_size = mem::size_of_val::>(&value); @@ -183,6 +185,7 @@ impl Allocator { struct Collector; impl Collector { + /// Run a collection on the full heap. fn run_full_collection(gc: &mut BoaGc) { let _timer = Profiler::global().start_event("Gc Full Collection", "gc"); gc.runtime.collections += 1; @@ -202,6 +205,7 @@ impl Collector { } } + /// Walk the heap and mark any nodes deemed reachable fn mark_heap(head: &Cell>>>) -> Vec>> { let _timer = Profiler::global().start_event("Gc Marking", "gc"); // Walk the list, tracing and marking the nodes @@ -240,6 +244,7 @@ impl Collector { // Tracing Ephemerons/Weak is always requires tracing the inner nodes in case it ends up marking unmarked node // // Time complexity should be something like O(nd) where d is the longest chain of epehemerons + /// Mark any ephemerons that are deemed live and trace their fields. fn mark_ephemerons( initial_queue: Vec>>, ) -> Vec>> { From 4603cce74fce6f4a8c9df87ec63dfc813fdbc114 Mon Sep 17 00:00:00 2001 From: jedel1043 Date: Fri, 11 Nov 2022 12:55:29 -0600 Subject: [PATCH 49/55] Remove `gc` from `boa_examples` --- Cargo.lock | 7 ------- boa_examples/Cargo.toml | 1 - 2 files changed, 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 719e0bb0d62..e84d0886a99 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -147,7 +147,6 @@ dependencies = [ "boa_gc", "boa_interner", "boa_parser", - "gc", ] [[package]] @@ -680,12 +679,6 @@ dependencies = [ "byteorder", ] -[[package]] -name = "gc" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3edaac0f5832202ebc99520cb77c932248010c4645d20be1dc62d6579f5b3752" - [[package]] name = "getrandom" version = "0.2.8" diff --git a/boa_examples/Cargo.toml b/boa_examples/Cargo.toml index 28b1be49752..b6135a7dc82 100644 --- a/boa_examples/Cargo.toml +++ b/boa_examples/Cargo.toml @@ -17,4 +17,3 @@ boa_ast.workspace = true boa_interner.workspace = true boa_gc.workspace = true boa_parser.workspace = true -gc = "0.4.1" From f85f746815e7e859c9ae1381e581c5d9774ea119 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sun, 13 Nov 2022 09:07:34 -0500 Subject: [PATCH 50/55] Address review and new clippy lints --- boa_gc/src/cell.rs | 13 +++++++------ boa_gc/src/internals/eph_box.rs | 4 ++-- boa_gc/src/internals/gc_box.rs | 6 ++++-- boa_gc/src/lib.rs | 15 +++++++++++---- boa_gc/src/pointers/ephemeron.rs | 4 ++-- boa_gc/src/pointers/gc.rs | 9 +++++---- boa_gc/src/pointers/weak.rs | 2 +- boa_gc/src/test/weak.rs | 8 ++++---- 8 files changed, 36 insertions(+), 25 deletions(-) diff --git a/boa_gc/src/cell.rs b/boa_gc/src/cell.rs index 2849617f71e..03643c521be 100644 --- a/boa_gc/src/cell.rs +++ b/boa_gc/src/cell.rs @@ -39,12 +39,12 @@ impl BorrowFlag { pub(crate) fn set_writing(self) -> Self { // Set every bit other than the root bit, which is preserved - BorrowFlag(self.0 | WRITING) + Self(self.0 | WRITING) } pub(crate) fn set_unused(self) -> Self { // Clear every bit other than the root bit, which is preserved - BorrowFlag(self.0 & ROOT) + Self(self.0 & ROOT) } pub(crate) fn add_reading(self) -> Self { @@ -54,7 +54,7 @@ impl BorrowFlag { // this is equivalent to the following, more complicated, expression: // // BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) + 1) << 1)) - let flags = BorrowFlag(self.0 + 0b10); + let flags = Self(self.0 + 0b10); // This will fail if the borrow count overflows, which shouldn't happen, // but let's be safe @@ -72,12 +72,12 @@ impl BorrowFlag { // complicated, expression: // // BorrowFlag((self.0 & ROOT) | (((self.0 >> 1) - 1) << 1)) - BorrowFlag(self.0 - 0b10) + Self(self.0 - 0b10) } pub(crate) fn set_rooted(self, rooted: bool) -> Self { // Preserve the non-root bits - BorrowFlag((self.0 & !ROOT) | (usize::from(rooted))) + Self((self.0 & !ROOT) | (usize::from(rooted))) } } @@ -94,7 +94,7 @@ impl GcCell { /// Creates a new `GcCell` containing `value`. #[inline] pub fn new(value: T) -> Self { - GcCell { + Self { flags: Cell::new(BORROWFLAG_INIT), cell: UnsafeCell::new(value), } @@ -534,6 +534,7 @@ impl PartialOrd for GcCell { impl Ord for GcCell { #[inline] + #[allow(clippy::use_self)] fn cmp(&self, other: &GcCell) -> Ordering { (*self.borrow()).cmp(&*other.borrow()) } diff --git a/boa_gc/src/internals/eph_box.rs b/boa_gc/src/internals/eph_box.rs index ee304264eb8..e80145d0774 100644 --- a/boa_gc/src/internals/eph_box.rs +++ b/boa_gc/src/internals/eph_box.rs @@ -12,7 +12,7 @@ pub(crate) struct EphemeronBox EphemeronBox { pub(crate) fn new(key: &Gc, value: V) -> Self { - EphemeronBox { + Self { key: Cell::new(Some(key.inner_ptr())), value, } @@ -103,7 +103,7 @@ impl Finalize for EphemeronBox { } // SAFETY: EphemeronBox implements primarly two methods of trace `Trace::is_marked_ephemeron` -// to determine whether the key field is stored and `Trace;:weak_trace` which continues the `Trace::weak_trace()` +// to determine whether the key field is stored and `Trace::weak_trace` which continues the `Trace::weak_trace()` // into `key` and `value`. unsafe impl Trace for EphemeronBox { #[inline] diff --git a/boa_gc/src/internals/gc_box.rs b/boa_gc/src/internals/gc_box.rs index 2a5b5786dc6..6018c3f430d 100644 --- a/boa_gc/src/internals/gc_box.rs +++ b/boa_gc/src/internals/gc_box.rs @@ -28,7 +28,7 @@ impl GcBoxHeader { /// Creates a new `GcBoxHeader` with a root of 1 and next set to None. #[inline] pub(crate) fn new() -> Self { - GcBoxHeader { + Self { roots: Cell::new(1), next: Cell::new(None), } @@ -38,7 +38,7 @@ impl GcBoxHeader { #[inline] pub(crate) fn new_weak() -> Self { // Set weak_flag - GcBoxHeader { + Self { roots: Cell::new(WEAK_MASK | 1), next: Cell::new(None), } @@ -108,6 +108,7 @@ impl fmt::Debug for GcBoxHeader { } /// A garbage collected allocation. +#[derive(Debug)] pub(crate) struct GcBox { pub(crate) header: GcBoxHeader, pub(crate) value: T, @@ -133,6 +134,7 @@ impl GcBox { impl GcBox { /// Returns `true` if the two references refer to the same `GcBox`. + #[allow(clippy::use_self)] pub(crate) fn ptr_eq(this: &GcBox, other: &GcBox) -> bool { // Use .header to ignore fat pointer vtables, to work around // https://github.com/rust-lang/rust/issues/46139 diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index c27ad9ea837..7604fb2e10c 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -15,6 +15,7 @@ clippy::all, clippy::cast_lossless, clippy::redundant_closure_for_method_calls, + clippy::use_self, clippy::unnested_or_patterns, clippy::trivially_copy_pass_by_ref, clippy::needless_pass_by_value, @@ -91,12 +92,13 @@ impl Default for GcConfig { } } -#[derive(Default)] +#[derive(Default, Debug, Clone, Copy)] struct GcRuntimeData { collections: usize, bytes_allocated: usize, } +#[derive(Debug)] struct BoaGc { config: GcConfig, runtime: GcRuntimeData, @@ -108,15 +110,19 @@ impl Drop for BoaGc { Collector::dump(self); } } + // Whether or not the thread is currently in the sweep phase of garbage collection. // During this phase, attempts to dereference a `Gc` pointer will trigger a panic. - +/// `DropGuard` flags whether the Collector is currently running `Collector::sweep()` or `Collector::dump()` +/// +/// While the `DropGuard` is active, all `GcBox`s must not be dereferenced or accessed as it could cause Undefined Behavior +#[derive(Debug, Clone)] struct DropGuard; impl DropGuard { - fn new() -> DropGuard { + fn new() -> Self { GC_DROPPING.with(|dropping| dropping.set(true)); - DropGuard + Self } } @@ -128,6 +134,7 @@ impl Drop for DropGuard { /// Returns `true` if it is safe for a type to run [`Finalize::finalize`]. #[must_use] +#[inline] pub fn finalizer_safe() -> bool { GC_DROPPING.with(|dropping| !dropping.get()) } diff --git a/boa_gc/src/pointers/ephemeron.rs b/boa_gc/src/pointers/ephemeron.rs index 48e99823e2f..35702a7b119 100644 --- a/boa_gc/src/pointers/ephemeron.rs +++ b/boa_gc/src/pointers/ephemeron.rs @@ -98,12 +98,12 @@ unsafe impl Trace for Ephemeron { impl Clone for Ephemeron { #[inline] - fn clone(&self) -> Ephemeron { + fn clone(&self) -> Self { // SAFETY: This is safe because the inner_ptr must live as long as it's roots. // Mismanagement of roots can cause inner_ptr to use after free or Undefined // Behavior. unsafe { - let eph = Ephemeron { + let eph = Self { inner_ptr: Cell::new(NonNull::new_unchecked(self.inner_ptr().as_ptr())), }; // Increment the Ephemeron's GcBox roots by 1 diff --git a/boa_gc/src/pointers/gc.rs b/boa_gc/src/pointers/gc.rs index 4213ea3ff34..cf904215eca 100644 --- a/boa_gc/src/pointers/gc.rs +++ b/boa_gc/src/pointers/gc.rs @@ -47,16 +47,17 @@ impl Gc { impl Gc { /// Returns `true` if the two `Gc`s point to the same allocation. + #[allow(clippy::use_self)] pub fn ptr_eq(this: &Gc, other: &Gc) -> bool { GcBox::ptr_eq(this.inner(), other.inner()) } /// Will return a new rooted `Gc` from a `GcBox` pointer - pub(crate) fn from_ptr(ptr: NonNull>) -> Gc { + pub(crate) fn from_ptr(ptr: NonNull>) -> Self { // SAFETY: the value provided as a pointer MUST be a valid GcBox. unsafe { ptr.as_ref().root_inner(); - let gc = Gc { + let gc = Self { inner_ptr: Cell::new(ptr), marker: PhantomData, }; @@ -163,7 +164,7 @@ unsafe impl Trace for Gc { impl Clone for Gc { #[inline] fn clone(&self) -> Self { - Gc::from_ptr(self.inner_ptr()) + Self::from_ptr(self.inner_ptr()) } } @@ -189,7 +190,7 @@ impl Drop for Gc { impl Default for Gc { #[inline] fn default() -> Self { - Gc::new(Default::default()) + Self::new(Default::default()) } } diff --git a/boa_gc/src/pointers/weak.rs b/boa_gc/src/pointers/weak.rs index 398ad39632e..26a1945efa2 100644 --- a/boa_gc/src/pointers/weak.rs +++ b/boa_gc/src/pointers/weak.rs @@ -36,7 +36,7 @@ impl WeakGc { impl Clone for WeakGc { #[inline] fn clone(&self) -> Self { - WeakGc { + Self { inner: self.inner.clone(), } } diff --git a/boa_gc/src/test/weak.rs b/boa_gc/src/test/weak.rs index 8defb47c22d..5f1e33ccc19 100644 --- a/boa_gc/src/test/weak.rs +++ b/boa_gc/src/test/weak.rs @@ -79,7 +79,7 @@ fn eph_allocation_chains() { assert!(eph.key().expect("eph is still live").value().is_none()); } - }) + }); } #[test] @@ -92,7 +92,7 @@ fn eph_basic_alloc_dump_test() { let _fourth = Gc::new("tail"); assert_eq!(*eph.key().expect("must be live"), String::from("gc here")); - }) + }); } #[test] @@ -108,7 +108,7 @@ fn eph_basic_upgrade_test() { force_collect(); assert_eq!(*init_gc, *new_gc); - }) + }); } #[test] @@ -129,5 +129,5 @@ fn eph_basic_clone_test() { *init_gc, *new_weak.value().expect("weak_should be live still") ); - }) + }); } From 2ca8320fae11daefe2f7a37e7d5fb322b2f2cec7 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sun, 13 Nov 2022 09:12:09 -0500 Subject: [PATCH 51/55] Forgot derive on Allocator --- boa_gc/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/boa_gc/src/lib.rs b/boa_gc/src/lib.rs index 7604fb2e10c..4b26855ea85 100644 --- a/boa_gc/src/lib.rs +++ b/boa_gc/src/lib.rs @@ -142,6 +142,7 @@ pub fn finalizer_safe() -> bool { /// The Allocator handles allocation of garbage collected values. /// /// The allocator can trigger a garbage collection. +#[derive(Debug, Clone, Copy)] struct Allocator; impl Allocator { From 3d5005d2dbe719736c7ab351f4da495014e85e68 Mon Sep 17 00:00:00 2001 From: nekevss Date: Sun, 13 Nov 2022 10:14:44 -0500 Subject: [PATCH 52/55] Remove use_self allows --- boa_gc/src/cell.rs | 3 +-- boa_gc/src/internals/gc_box.rs | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/boa_gc/src/cell.rs b/boa_gc/src/cell.rs index 03643c521be..84abaad209c 100644 --- a/boa_gc/src/cell.rs +++ b/boa_gc/src/cell.rs @@ -534,8 +534,7 @@ impl PartialOrd for GcCell { impl Ord for GcCell { #[inline] - #[allow(clippy::use_self)] - fn cmp(&self, other: &GcCell) -> Ordering { + fn cmp(&self, other: &Self) -> Ordering { (*self.borrow()).cmp(&*other.borrow()) } } diff --git a/boa_gc/src/internals/gc_box.rs b/boa_gc/src/internals/gc_box.rs index 6018c3f430d..eaca4b48f79 100644 --- a/boa_gc/src/internals/gc_box.rs +++ b/boa_gc/src/internals/gc_box.rs @@ -134,8 +134,7 @@ impl GcBox { impl GcBox { /// Returns `true` if the two references refer to the same `GcBox`. - #[allow(clippy::use_self)] - pub(crate) fn ptr_eq(this: &GcBox, other: &GcBox) -> bool { + pub(crate) fn ptr_eq(this: &Self, other: &Self) -> bool { // Use .header to ignore fat pointer vtables, to work around // https://github.com/rust-lang/rust/issues/46139 ptr::eq(&this.header, &other.header) From 973d11771a5fac324a56483dd18580d9840b5dfa Mon Sep 17 00:00:00 2001 From: nekevss Date: Sun, 13 Nov 2022 10:16:12 -0500 Subject: [PATCH 53/55] Missed one --- boa_gc/src/pointers/gc.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/boa_gc/src/pointers/gc.rs b/boa_gc/src/pointers/gc.rs index cf904215eca..df44a8b327e 100644 --- a/boa_gc/src/pointers/gc.rs +++ b/boa_gc/src/pointers/gc.rs @@ -47,8 +47,7 @@ impl Gc { impl Gc { /// Returns `true` if the two `Gc`s point to the same allocation. - #[allow(clippy::use_self)] - pub fn ptr_eq(this: &Gc, other: &Gc) -> bool { + pub fn ptr_eq(this: &Self, other: &Self) -> bool { GcBox::ptr_eq(this.inner(), other.inner()) } From 10d5d26976eb78d27c1d55ef0be2cd0ce0527664 Mon Sep 17 00:00:00 2001 From: nekevss Date: Mon, 14 Nov 2022 13:25:42 -0500 Subject: [PATCH 54/55] Add inline and docs to BorrowFlag & other review changes --- boa_gc/src/cell.rs | 41 ++++++++++++++++++- .../{eph_box.rs => ephemeron_box.rs} | 6 ++- boa_gc/src/internals/mod.rs | 4 +- 3 files changed, 47 insertions(+), 4 deletions(-) rename boa_gc/src/internals/{eph_box.rs => ephemeron_box.rs} (94%) diff --git a/boa_gc/src/cell.rs b/boa_gc/src/cell.rs index 84abaad209c..6f4afbe44e8 100644 --- a/boa_gc/src/cell.rs +++ b/boa_gc/src/cell.rs @@ -7,9 +7,16 @@ use std::ops::{Deref, DerefMut}; use crate::trace::{Finalize, Trace}; +/// `BorrowFlag` represent the internal state of a `GcCell` and +/// keeps track of the amount of current borrows. #[derive(Copy, Clone)] pub(crate) struct BorrowFlag(usize); +/// `BorrowState` represents the various states of a `BorrowFlag` +/// +/// - Reading: the value is currently being read/borrowed. +/// - Writing: the value is currently being written/borrowed mutably. +/// - Unused: the value is currently unrooted. #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub(crate) enum BorrowState { Reading, @@ -22,9 +29,11 @@ const WRITING: usize = !1; const UNUSED: usize = 0; /// The base borrowflag init is rooted, and has no outstanding borrows. -pub(crate) const BORROWFLAG_INIT: BorrowFlag = BorrowFlag(1); +pub(crate) const BORROWFLAG_INIT: BorrowFlag = BorrowFlag(ROOT); impl BorrowFlag { + /// Check the current `BorrowState` of `BorrowFlag`. + #[inline] pub(crate) fn borrowed(self) -> BorrowState { match self.0 & !ROOT { UNUSED => BorrowState::Unused, @@ -33,20 +42,32 @@ impl BorrowFlag { } } + /// Check whether the borrow bit is flagged. + #[inline] pub(crate) fn rooted(self) -> bool { self.0 & ROOT > 0 } + /// Set the `BorrowFlag`'s state to writing. + #[inline] pub(crate) fn set_writing(self) -> Self { // Set every bit other than the root bit, which is preserved Self(self.0 | WRITING) } + /// Remove the root flag on `BorrowFlag` + #[inline] pub(crate) fn set_unused(self) -> Self { // Clear every bit other than the root bit, which is preserved Self(self.0 & ROOT) } + /// Increments the counter for a new borrow. + /// + /// # Panic + /// - This method will panic if the current `BorrowState` is writing. + /// - This method will panic after incrementing if the borrow count overflows. + #[inline] pub(crate) fn add_reading(self) -> Self { assert!(self.borrowed() != BorrowState::Writing); // Add 1 to the integer starting at the second binary digit. As our @@ -64,6 +85,11 @@ impl BorrowFlag { flags } + /// Decrements the counter to remove a borrow. + /// + /// # Panic + /// - This method will panic if the current `BorrowState` is not reading. + #[inline] pub(crate) fn sub_reading(self) -> Self { assert!(self.borrowed() == BorrowState::Reading); // Subtract 1 from the integer starting at the second binary digit. As @@ -75,12 +101,23 @@ impl BorrowFlag { Self(self.0 - 0b10) } + /// Set the root flag on the `BorrowFlag`. + #[inline] pub(crate) fn set_rooted(self, rooted: bool) -> Self { // Preserve the non-root bits Self((self.0 & !ROOT) | (usize::from(rooted))) } } +impl Debug for BorrowFlag { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("BorrowFlag") + .field("Rooted", &self.rooted()) + .field("State", &self.borrowed()) + .finish() + } +} + /// A mutable memory location with dynamically checked borrow rules /// that can be used inside of a garbage-collected pointer. /// @@ -544,10 +581,12 @@ impl Debug for GcCell { match self.flags.get().borrowed() { BorrowState::Unused | BorrowState::Reading => f .debug_struct("GcCell") + .field("flags", &self.flags.get()) .field("value", &self.borrow()) .finish(), BorrowState::Writing => f .debug_struct("GcCell") + .field("flags", &self.flags.get()) .field("value", &"") .finish(), } diff --git a/boa_gc/src/internals/eph_box.rs b/boa_gc/src/internals/ephemeron_box.rs similarity index 94% rename from boa_gc/src/internals/eph_box.rs rename to boa_gc/src/internals/ephemeron_box.rs index e80145d0774..bbb4427790f 100644 --- a/boa_gc/src/internals/eph_box.rs +++ b/boa_gc/src/internals/ephemeron_box.rs @@ -80,7 +80,6 @@ impl EphemeronBox { } /// Calls [`Trace::weak_trace()`][crate::Trace] on value - /// #[inline] fn weak_trace_value(&self) { // SAFETY: Value is a sized element that must implement trace. The @@ -135,6 +134,11 @@ unsafe impl Trace for EphemeronBox { #[inline] unsafe fn unroot(&self) {} + // An `EphemeronBox`'s key is set to None once it has been finalized. + // + // NOTE: while it is possible for the `key`'s pointer value to be + // resurrected, we should still consider the finalize the ephemeron + // box and set the `key` to None. #[inline] fn run_finalizer(&self) { Finalize::finalize(self); diff --git a/boa_gc/src/internals/mod.rs b/boa_gc/src/internals/mod.rs index d8c26c985b3..005c000ade5 100644 --- a/boa_gc/src/internals/mod.rs +++ b/boa_gc/src/internals/mod.rs @@ -1,5 +1,5 @@ -mod eph_box; -pub(crate) use eph_box::EphemeronBox; +mod ephemeron_box; +pub(crate) use ephemeron_box::EphemeronBox; mod gc_box; pub(crate) use gc_box::GcBox; From 7b477b6292c4253fdabe7b84dedf33cf2154a7b8 Mon Sep 17 00:00:00 2001 From: nekevss Date: Mon, 14 Nov 2022 13:27:26 -0500 Subject: [PATCH 55/55] Rustfmt --- boa_gc/src/cell.rs | 6 +++--- boa_gc/src/internals/ephemeron_box.rs | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/boa_gc/src/cell.rs b/boa_gc/src/cell.rs index 6f4afbe44e8..36607629347 100644 --- a/boa_gc/src/cell.rs +++ b/boa_gc/src/cell.rs @@ -13,7 +13,7 @@ use crate::trace::{Finalize, Trace}; pub(crate) struct BorrowFlag(usize); /// `BorrowState` represents the various states of a `BorrowFlag` -/// +/// /// - Reading: the value is currently being read/borrowed. /// - Writing: the value is currently being written/borrowed mutably. /// - Unused: the value is currently unrooted. @@ -63,7 +63,7 @@ impl BorrowFlag { } /// Increments the counter for a new borrow. - /// + /// /// # Panic /// - This method will panic if the current `BorrowState` is writing. /// - This method will panic after incrementing if the borrow count overflows. @@ -86,7 +86,7 @@ impl BorrowFlag { } /// Decrements the counter to remove a borrow. - /// + /// /// # Panic /// - This method will panic if the current `BorrowState` is not reading. #[inline] diff --git a/boa_gc/src/internals/ephemeron_box.rs b/boa_gc/src/internals/ephemeron_box.rs index bbb4427790f..420b7fb36c6 100644 --- a/boa_gc/src/internals/ephemeron_box.rs +++ b/boa_gc/src/internals/ephemeron_box.rs @@ -136,8 +136,8 @@ unsafe impl Trace for EphemeronBox { // An `EphemeronBox`'s key is set to None once it has been finalized. // - // NOTE: while it is possible for the `key`'s pointer value to be - // resurrected, we should still consider the finalize the ephemeron + // NOTE: while it is possible for the `key`'s pointer value to be + // resurrected, we should still consider the finalize the ephemeron // box and set the `key` to None. #[inline] fn run_finalizer(&self) {