diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..73cef05 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,7 @@ +language: rust +sudo: true +rust: nightly +script: +- cd gc +- cargo build +- cargo test diff --git a/gc/Cargo.toml b/gc/Cargo.toml index 9b8f1bd..7b9681a 100644 --- a/gc/Cargo.toml +++ b/gc/Cargo.toml @@ -8,6 +8,9 @@ readme = "../README.md" license = "MPL-2.0" keywords = ["garbage", "plugin", "memory"] +[dependencies] +lazy_static = "0.1" + [dependencies.gc_plugin] path = "../gc_plugin" version = "0.0.1" diff --git a/gc/src/cgc.rs b/gc/src/cgc.rs new file mode 100644 index 0000000..1f3e3b2 --- /dev/null +++ b/gc/src/cgc.rs @@ -0,0 +1,116 @@ +//! Concurrently garbage-collected boxes (The `Cgc` type). +//! +//! The `Cgc` type provides shared ownership of an immutable value. +//! Unlike `Gc`, `Cgc` can be sent across threads, because collection +//! occurs in a thread-safe way. + +use std::cell::Cell; +use std::ops::{Deref, CoerceUnsized}; +use std::marker; +use cgc_internals::GcBox; +use trace::{Trace, Tracer}; + +// We expose the force_collect method from the gc internals +pub use cgc_internals::force_collect; + +///////// +// Cgc // +///////// + +/// A garbage-collected pointer type over an immutable value. +/// +/// See the [module level documentation](./) for more details. +pub struct Cgc { + // XXX We can probably take advantage of alignment to store this + root: Cell, + _ptr: *mut GcBox, +} + +impl, U: Trace + ?Sized> CoerceUnsized> for Cgc {} + +impl Cgc { + /// Constructs a new `Cgc`. + /// + /// # Collection + /// + /// This method could trigger a Garbage Collection. + /// + /// # Examples + /// + /// ``` + /// use gc::Cgc; + /// + /// let five = Cgc::new(5); + /// ``` + pub fn new(value: T) -> Cgc { + unsafe { + // Allocate the memory for the object + let ptr = GcBox::new(value); + + // When we create a Cgc, all pointers which have been moved to the + // heap no longer need to be rooted, so we unroot them. + (*ptr).value()._cgc_unroot(); + Cgc { _ptr: ptr, root: Cell::new(true) } + } + } +} + +impl Cgc { + #[inline] + fn inner(&self) -> &GcBox { + unsafe { &*self._ptr } + } +} + +impl Trace for Cgc { + #[inline] + unsafe fn _trace(&self, _: U) { /* do nothing */ } + + #[inline] + unsafe fn _cgc_mark(&self, mark: bool) { + self.inner().mark(mark); + } + + #[inline] + unsafe fn _cgc_root(&self) { + assert!(!self.root.get(), "Can't double-root a Cgc"); + self.root.set(true); + + self.inner().root(); + } + + #[inline] + unsafe fn _cgc_unroot(&self) { + assert!(self.root.get(), "Can't double-unroot a Cgc"); + self.root.set(false); + + self.inner().unroot(); + } +} + +impl Clone for Cgc { + #[inline] + fn clone(&self) -> Cgc { + unsafe { self.inner().root(); } + Cgc { _ptr: self._ptr, root: Cell::new(true) } + } +} + +impl Deref for Cgc { + type Target = T; + + #[inline] + fn deref(&self) -> &T { + &self.inner().value() + } +} + +impl Drop for Cgc { + #[inline] + fn drop(&mut self) { + // If this pointer was a root, we should unroot it. + if self.root.get() { + unsafe { self.inner().unroot(); } + } + } +} diff --git a/gc/src/cgc_internals.rs b/gc/src/cgc_internals.rs new file mode 100644 index 0000000..3eef714 --- /dev/null +++ b/gc/src/cgc_internals.rs @@ -0,0 +1,255 @@ +use std::mem; +use std::sync::atomic::{AtomicUsize, AtomicBool, Ordering, ATOMIC_USIZE_INIT, ATOMIC_BOOL_INIT}; +use std::sync::{RwLock, Mutex, TryLockError}; +use std::sync::mpsc::{channel, Sender, Receiver}; +use trace::Trace; + +// XXX Obviously not 100 bytes GC threshold - choose a number +const GC_THRESHOLD: usize = 100; + +/// The current usage of the heap +static GC_HEAP_USAGE: AtomicUsize = ATOMIC_USIZE_INIT; + +/// When this value is true, newly created objects should be marked `true`, +/// and values should be sent to senders.1. +/// When this value is false, newly created objects should be marked `false`, +/// and values should be sent to senders.0. +static GC_CHANNEL: AtomicBool = ATOMIC_BOOL_INIT; + +/// True if the GC is currently sweeping. When this is true, attempts to +/// dereference gc-ed pointers will panic! +static GC_SWEEPING: AtomicBool = ATOMIC_BOOL_INIT; + +struct GcBoxChans { + /// This is held before we modify roots, to ensure that we don't + /// modify these roots during the garbage collection process. + /// It will be held when the garbage collector is running + rootlock: RwLock<()>, + senders: Mutex<(Sender>, + Sender>)>, + + // XXX We only access when we hold the write lock on rootlock + // We could probably use an unsafe system for this, which could save + // us the extra overhead of the second mutex lock. + // XXX OPTIMIZE + receivers: Mutex<(Receiver>, + Receiver>)>, +} + +/// The GCBOX channel queue +lazy_static! { + static ref GCBOX_CHANS: GcBoxChans = { + let (txa, rxa) = channel(); + let (txb, rxb) = channel(); + + GcBoxChans { + senders: Mutex::new((txa, txb)), + receivers: Mutex::new((rxa, rxb)), + rootlock: RwLock::new(()), + } + }; +} + +/// Thread-local cache of the senders +thread_local! { + static GCBOX_SENDERS: (Sender>, + Sender>) + = GCBOX_CHANS.senders.lock().unwrap().clone() +} + +struct GcBoxHeader { + roots: AtomicUsize, + marked: AtomicBool, +} + +/// Internal trait - must be implemented by every garbage collected allocation +/// GcBoxTraits form a linked list of allocations. +trait GcBoxTrait { + /// Get a reference to the internal GcBoxHeader + fn header(&self) -> &GcBoxHeader; + + /// Initiate a trace through the GcBoxTrait + unsafe fn mark_value(&self, mark: bool); + + /// Get the size of the allocationr required to create the GcBox + fn size_of(&self) -> usize; +} + +pub struct GcBox { + header: GcBoxHeader, + data: T, +} + +impl GcBox { + /// Allocate a garbage collected GcBox on the heap, + /// + /// The GcBox allocated this way starts it's life rooted. + pub fn new(value: T) -> *mut GcBox { + // Check if we should collect! + let usage = GC_HEAP_USAGE.fetch_add(mem::size_of::>(), Ordering::SeqCst); + + if usage > GC_THRESHOLD { + collect_garbage(); + } + + GCBOX_SENDERS.with(|senders| { + let chan_sel = GC_CHANNEL.load(Ordering::SeqCst); + + // Copy the data onto the heap + let mut gcbox = Box::new(GcBox { + header: GcBoxHeader { + roots: AtomicUsize::new(1), + marked: AtomicBool::new(chan_sel), + }, + data: value, + }); + let ptr: *mut _ = &mut *gcbox; + + // Save the gcbox on the gc queue + // + // There is a chance that chan_sel has changed by now, this chance + // is very low, and if it has, then the worst that will happen is + // that the newly allocated object will miss the next collection + // cycle, and only be collected in the cycle after that. + if chan_sel { + senders.1.send(gcbox).unwrap(); + } else { + senders.0.send(gcbox).unwrap(); + } + + ptr + }) + } +} + +impl GcBox { + /// Mark this GcBox, and trace through it's data + pub unsafe fn mark(&self, mark: bool) { + // Mark this node + let marked = self.header.marked.swap(mark, Ordering::Relaxed); + + // If we weren't already marked, trace through child nodes + if marked != mark { self.data._cgc_mark(mark); } + } + + /// Increase the root count on this GcBox. + /// Roots prevent the GcBox from being destroyed by + /// the garbage collector. + pub unsafe fn root(&self) { + // XXX we may be able to avoid blocking here in some cases + let _modifyroots_ok = GCBOX_CHANS.rootlock.read(); + self.header.roots.fetch_add(1, Ordering::SeqCst); + } + + /// Decrease the root count on this GcBox. + /// Roots prevent the GcBox from being destroyed by + /// the garbage collector. + pub unsafe fn unroot(&self) { + // XXX we may be able to avoid blocking here in some cases + let _modifyroots_ok = GCBOX_CHANS.rootlock.read(); + self.header.roots.fetch_sub(1, Ordering::SeqCst); + } + + /// Get the value form the GcBox + pub fn value(&self) -> &T { + assert!(!GC_SWEEPING.load(Ordering::Relaxed), + "Gc pointers may be invalid when Gc is running, don't deref Gc pointers in drop()"); + + &self.data + } +} + +impl GcBoxTrait for GcBox { + fn header(&self) -> &GcBoxHeader { &self.header } + + unsafe fn mark_value(&self, mark: bool) { self.mark(mark) } + + fn size_of(&self) -> usize { mem::size_of::() } +} + +/// Collect some garbage! +fn collect_garbage() -> bool { + // Try and gain access to the garbage collecting lock - + match GCBOX_CHANS.rootlock.try_write() { + Ok(_) => { + // This is only locked when the write block from GCBOX_CHANS.rootlock, + // so the mutex is unnecessary. Unfortunately, as the Receivers inside + // the mutex don't implement sync, we can't put them directly inside of + // the RwLock, so instead we are acquiring them in a seperate lock. + // + // It may make sense to do some unsafe code here to avoid this extra lock. + let receivs = GCBOX_CHANS.receivers.lock().unwrap(); + + // Toggle GC_CHANNEL - after this point, nothing more will be added + // to the input queue + let old_chan_sel = GC_CHANNEL.fetch_xor(true, Ordering::SeqCst); + + GCBOX_SENDERS.with(|sends| { + let (in_chan, out_chan) = if old_chan_sel { + (&receivs.1, &sends.0) + } else { + (&receivs.0, &sends.1) + }; + + let mut sweep_list = Vec::new(); + + // Mark items off - if they are marked, we can already + // put them on the out_chan for the next garbage collection + loop { + match in_chan.try_recv() { + Ok(gcbox) => { + let (roots, marked) = { + let header = gcbox.header(); + + (header.roots.load(Ordering::Relaxed), + header.marked.load(Ordering::Relaxed)) + }; + + if roots > 0 { + unsafe { gcbox.mark_value(!old_chan_sel); } + out_chan.send(gcbox).unwrap(); + } else { + if marked == old_chan_sel { + // This may not be marked - add it to sweep_list + sweep_list.push(gcbox); + } else { + // Already marked - just send it + out_chan.send(gcbox).unwrap(); + } + } + } + Err(_) => break + } + } + + // Go through the remaining nodes and send them on the channel if + // they are marked. If they are not, drop them. + for gcbox in sweep_list { + if gcbox.header().marked.load(Ordering::Relaxed) != old_chan_sel { + out_chan.send(gcbox).unwrap(); + } else { + drop(gcbox); + } + } + }); + + true + } + Err(TryLockError::Poisoned(_)) => + panic!("The garbage collector lock is poisoned"), + Err(TryLockError::WouldBlock) => false, + } +} + +/// Immediately trigger a garbage collection +pub fn force_collect() { + // XXX: We want to always collect garbage, no matter what. + // otherwise, running force_collect doesn't guarantee that previously + // unrooted values in the current thread will be collected as we expect. + // Currently, we may not actually collect garbage when we run force_collect + + if !collect_garbage() { + println!("Already Collecting Garbage!"); + let _read = GCBOX_CHANS.rootlock.read().unwrap(); + } +} diff --git a/gc/src/gc.rs b/gc/src/gc.rs index 9e1e6b0..4102d84 100644 --- a/gc/src/gc.rs +++ b/gc/src/gc.rs @@ -1,211 +1,255 @@ -use std::ptr; -use std::mem; -use std::cell::{Cell, RefCell}; -use trace::Trace; - -// XXX Obviously not 100 bytes GC threshold - choose a number -const GC_THRESHOLD: usize = 100; - -struct GcState { - bytes_allocated: usize, - boxes_start: Option>, - boxes_end: *mut Option>, +//! Thread-local garbage-collected boxes (The `Gc` type). +//! +//! The `Gc` type provides shared ownership of an immutable value. +//! It is marked as non-sendable because the garbage collection only occurs +//! thread locally. + +use std::cell::{self, Cell, RefCell, BorrowState}; +use std::ops::{Deref, DerefMut, CoerceUnsized}; +use std::marker; +use gc_internals::GcBox; +use trace::{Trace, Tracer}; + +// We expose the force_collect method from the gc internals +pub use gc_internals::force_collect; + +//////// +// Gc // +//////// + +/// A garbage-collected pointer type over an immutable value. +/// +/// See the [module level documentation](./) for more details. +pub struct Gc { + // XXX We can probably take advantage of alignment to store this + root: Cell, + _ptr: *mut GcBox, } -/// Whether or not the thread is currently in the sweep phase of garbage collection -/// During this phase, attempts to dereference a Gc pointer will trigger a panic -thread_local!(static GC_SWEEPING: Cell = Cell::new(false)); - -/// The garbage collector's internal state. -thread_local!(static GC_STATE: RefCell = RefCell::new(GcState { - bytes_allocated: 0, - boxes_start: None, - boxes_end: ptr::null_mut(), -})); - -pub struct GcBoxHeader { - // XXX This is horribly space inefficient - not sure if we care - // We are using a word word bool - there is a full 63 bits of unused data :( - roots: Cell, - next: Option>, - marked: Cell, +impl !marker::Send for Gc {} + +impl !marker::Sync for Gc {} + +impl, U: Trace + ?Sized> CoerceUnsized> for Gc {} + +impl Gc { + /// Constructs a new `Gc`. + /// + /// # Collection + /// + /// This method could trigger a Garbage Collection. + /// + /// # Examples + /// + /// ``` + /// use gc::Gc; + /// + /// let five = Gc::new(5); + /// ``` + pub fn new(value: T) -> Gc { + unsafe { + // Allocate the memory for the object + let ptr = GcBox::new(value); + + // When we create a Gc, all pointers which have been moved to the + // heap no longer need to be rooted, so we unroot them. + (*ptr).value()._gc_unroot(); + Gc { _ptr: ptr, root: Cell::new(true) } + } + } } -/// Internal trait - must be implemented by every garbage collected allocation -/// GcBoxTraits form a linked list of allocations. -trait GcBoxTrait { - /// Get a reference to the internal GcBoxHeader - fn header(&self) -> &GcBoxHeader; +impl Gc { + #[inline] + fn inner(&self) -> &GcBox { + unsafe { &*self._ptr } + } +} + +impl Trace for Gc { + #[inline] + unsafe fn _trace(&self, _: U) { /* do nothing */ } - /// Get a mutable reference to the internal GcBoxHeader - fn header_mut(&mut self) -> &mut GcBoxHeader; + #[inline] + unsafe fn _gc_mark(&self) { + self.inner().mark(); + } - /// Initiate a trace through the GcBoxTrait - unsafe fn trace_value(&self); + #[inline] + unsafe fn _gc_root(&self) { + assert!(!self.root.get(), "Can't double-root a Gc"); + self.root.set(true); - /// Get the size of the allocationr required to create the GcBox - fn size_of(&self) -> usize; + self.inner().root(); + } + + #[inline] + unsafe fn _gc_unroot(&self) { + assert!(self.root.get(), "Can't double-unroot a Gc"); + self.root.set(false); + + self.inner().unroot(); + } } -pub struct GcBox { - header: GcBoxHeader, - data: T, +impl Clone for Gc { + #[inline] + fn clone(&self) -> Gc { + unsafe { self.inner().root(); } + Gc { _ptr: self._ptr, root: Cell::new(true) } + } } -impl GcBox { - /// Allocate a garbage collected GcBox on the heap, - /// and append it to the thread local GcBox chain. - /// - /// The GcBox allocated this way starts it's life - /// rooted. - pub fn new(value: T) -> *mut GcBox { - GC_STATE.with(|_st| { - let mut st = _st.borrow_mut(); - - // XXX We should probably be more clever about collecting - if st.bytes_allocated > GC_THRESHOLD { - collect_garbage(&mut *st); - } - - let mut gcbox = Box::new(GcBox { - header: GcBoxHeader { - roots: Cell::new(1), - marked: Cell::new(false), - next: None, - }, - data: value, - }); - - let gcbox_ptr = &mut *gcbox as *mut _; - - let next_boxes_end = &mut gcbox.header.next as *mut _; - if st.boxes_end.is_null() { - assert!(st.boxes_start.is_none(), - "If something had been allocated, boxes_end would be set"); - // The next place we're going to add something! - st.boxes_end = next_boxes_end; - st.boxes_start = Some(gcbox); - } else { - unsafe { - *st.boxes_end = Some(gcbox); - } - st.boxes_end = next_boxes_end; - } - - // We allocated some bytes! Let's record it - st.bytes_allocated += mem::size_of::>(); - - // Return the pointer to the newly allocated data - gcbox_ptr - }) +impl Deref for Gc { + type Target = T; + + #[inline] + fn deref(&self) -> &T { + &self.inner().value() } } -impl GcBox { - /// Mark this GcBox, and trace through it's data - pub unsafe fn trace_inner(&self) { - let marked = self.header.marked.get(); - if !marked { - self.header.marked.set(true); - self.data.trace(); +impl Drop for Gc { + #[inline] + fn drop(&mut self) { + // If this pointer was a root, we should unroot it. + if self.root.get() { + unsafe { self.inner().unroot(); } } } +} + +//////////// +// GcCell // +//////////// + +/// A mutable memory location with dynamically checked borrow rules +/// which can be used inside of a garbage collected pointer. +/// +/// This object is a RefCell which can be used inside of a Gc. +pub struct GcCell { + rooted: Cell, + cell: RefCell, +} - /// Increase the root count on this GcBox. - /// Roots prevent the GcBox from being destroyed by - /// the garbage collector. - pub unsafe fn root_inner(&self) { - self.header.roots.set(self.header.roots.get() + 1); +impl GcCell { + /// Creates a new `GcCell` containing `value`. + #[inline] + pub fn new(value: T) -> GcCell { + GcCell { + rooted: Cell::new(true), + cell: RefCell::new(value), + } } - /// Decrease the root count on this GcBox. - /// Roots prevent the GcBox from being destroyed by - /// the garbage collector. - pub unsafe fn unroot_inner(&self) { - self.header.roots.set(self.header.roots.get() - 1); + /// Consumes the `GcCell`, returning the wrapped value. + #[inline] + pub fn into_inner(self) -> T { + self.cell.into_inner() } +} - /// Get the value form the GcBox - pub fn value(&self) -> &T { - // XXX This may be too expensive, but will help catch errors with - // accessing Gc values in destructors. - GC_SWEEPING.with(|sweeping| assert!(!sweeping.get(), - "Gc pointers may be invalid when GC is running")); - &self.data +impl GcCell { + /// Immutably borrows the wrapped value. + /// + /// The borrow lasts until the returned `GcCellRef` exits scope. + /// Multiple immutable borrows can be taken out at the same time. + /// + /// # Panics + /// + /// Panics if the value is currently mutably borrowed. + #[inline] + pub fn borrow(&self) -> GcCellRef { + self.cell.borrow() + } + + /// Mutably borrows the wrapped value. + /// + /// The borrow lasts until the returned `GcCellRefMut` exits scope. + /// The value cannot be borrowed while this borrow is active. + /// + /// #Panics + /// + /// Panics if the value is currently borrowed. + #[inline] + pub fn borrow_mut(&self) -> GcCellRefMut { + let val_ref = self.cell.borrow_mut(); + + // Force the val_ref's contents to be rooted for the duration of the mutable borrow + if !self.rooted.get() { + unsafe { val_ref._gc_root(); } + } + + GcCellRefMut { + _ref: val_ref, + _rooted: &self.rooted, + } + } +} + +impl Trace for GcCell { + #[inline] + unsafe fn _trace(&self, _: U) { /* do nothing */ } + + #[inline] + unsafe fn _gc_mark(&self) { + match self.cell.borrow_state() { + BorrowState::Writing => (), + _ => self.cell.borrow()._gc_mark(), + } + } + + #[inline] + unsafe fn _gc_root(&self) { + assert!(!self.rooted.get(), "Can't root a GcCell Twice!"); + self.rooted.set(true); + + match self.cell.borrow_state() { + BorrowState::Writing => (), + _ => self.cell.borrow()._gc_root(), + } + } + + #[inline] + unsafe fn _gc_unroot(&self) { + assert!(self.rooted.get(), "Can't unroot a GcCell Twice!"); + self.rooted.set(false); + + match self.cell.borrow_state() { + BorrowState::Writing => (), + _ => self.cell.borrow()._gc_unroot(), + } } } -impl GcBoxTrait for GcBox { - fn header(&self) -> &GcBoxHeader { &self.header } +/// A wrapper type for an immutably borrowed value from a GcCell +pub type GcCellRef<'a, T> = cell::Ref<'a, T>; - fn header_mut(&mut self) -> &mut GcBoxHeader { &mut self.header } +/// A wrapper type for a mutably borrowed value from a GcCell +pub struct GcCellRefMut<'a, T: Trace + ?Sized + 'static> { + _ref: ::std::cell::RefMut<'a, T>, + _rooted: &'a Cell, +} - unsafe fn trace_value(&self) { self.trace_inner() } +impl<'a, T: Trace + ?Sized> Deref for GcCellRefMut<'a, T> { + type Target = T; - fn size_of(&self) -> usize { mem::size_of::() } + #[inline] + fn deref(&self) -> &T { &*self._ref } } -/// Collect some garbage! -fn collect_garbage(st: &mut GcState) { - let mut next_node = &mut st.boxes_start - as *mut Option>; - - // Mark - loop { - if let Some(ref mut node) = *unsafe { &mut *next_node } { - { - // XXX This virtual method call is nasty :( - let header = node.header_mut(); - next_node = &mut header.next as *mut _; - - // If it doesn't have roots - we can abort now - if header.roots.get() == 0 { continue } - } - // We trace in a different scope such that node isn't - // mutably borrowed anymore - unsafe { node.trace_value(); } - } else { break } - } - - GC_SWEEPING.with(|collecting| collecting.set(true)); - - let mut next_node = &mut st.boxes_start - as *mut Option>; - - // Sweep - loop { - if let Some(ref mut node) = *unsafe { &mut *next_node } { - // XXX This virtual method call is nasty :( - let size = node.size_of(); - let header = node.header_mut(); - - if header.marked.get() { - // This node has already been marked - we're done! - header.marked.set(false); - next_node = &mut header.next; - } else { - // The node wasn't marked - we need to delete it - st.bytes_allocated -= size; - let mut tmp = None; - mem::swap(&mut tmp, &mut header.next); - mem::swap(&mut tmp, unsafe { &mut *next_node }); - - // At this point, the node is destroyed if it exists due to tmp dropping - } - } else { break } - } - - // Update the end pointer to point to the correct location - st.boxes_end = next_node; - - // XXX This should probably be done with some kind of finally guard - GC_SWEEPING.with(|collecting| collecting.set(false)); +impl<'a, T: Trace + ?Sized> DerefMut for GcCellRefMut<'a, T> { + #[inline] + fn deref_mut(&mut self) -> &mut T { &mut *self._ref } } -/// Immediately trigger a garbage collection on the current thread. -pub fn force_collect() { - GC_STATE.with(|_st| { - let mut st = _st.borrow_mut(); - collect_garbage(&mut *st); - }); +impl<'a, T: Trace + ?Sized> Drop for GcCellRefMut<'a, T> { + #[inline] + fn drop(&mut self) { + // Restore the rooted state of the GcCell's contents to the state of the GcCell. + // During the lifetime of the GcCellRefMut, the GcCell's contents are rooted. + if !self._rooted.get() { + unsafe { self._ref._gc_unroot(); } + } + } } diff --git a/gc/src/gc_internals.rs b/gc/src/gc_internals.rs new file mode 100644 index 0000000..4575b78 --- /dev/null +++ b/gc/src/gc_internals.rs @@ -0,0 +1,211 @@ +use std::ptr; +use std::mem; +use std::cell::{Cell, RefCell}; +use trace::Trace; + +// XXX Obviously not 100 bytes GC threshold - choose a number +const GC_THRESHOLD: usize = 100; + +struct GcState { + bytes_allocated: usize, + boxes_start: Option>, + boxes_end: *mut Option>, +} + +/// Whether or not the thread is currently in the sweep phase of garbage collection +/// During this phase, attempts to dereference a Gc pointer will trigger a panic +thread_local!(static GC_SWEEPING: Cell = Cell::new(false)); + +/// The garbage collector's internal state. +thread_local!(static GC_STATE: RefCell = RefCell::new(GcState { + bytes_allocated: 0, + boxes_start: None, + boxes_end: ptr::null_mut(), +})); + +pub struct GcBoxHeader { + // XXX This is horribly space inefficient - not sure if we care + // We are using a word word bool - there is a full 63 bits of unused data :( + roots: Cell, + next: Option>, + marked: Cell, +} + +/// Internal trait - must be implemented by every garbage collected allocation +/// GcBoxTraits form a linked list of allocations. +trait GcBoxTrait { + /// Get a reference to the internal GcBoxHeader + fn header(&self) -> &GcBoxHeader; + + /// Get a mutable reference to the internal GcBoxHeader + fn header_mut(&mut self) -> &mut GcBoxHeader; + + /// Initiate a marking trace through the GcBoxTrait + unsafe fn mark_value(&self); + + /// Get the size of the allocationr required to create the GcBox + fn size_of(&self) -> usize; +} + +pub struct GcBox { + header: GcBoxHeader, + data: T, +} + +impl GcBox { + /// Allocate a garbage collected GcBox on the heap, + /// and append it to the thread local GcBox chain. + /// + /// The GcBox allocated this way starts it's life + /// rooted. + pub fn new(value: T) -> *mut GcBox { + GC_STATE.with(|_st| { + let mut st = _st.borrow_mut(); + + // XXX We should probably be more clever about collecting + if st.bytes_allocated > GC_THRESHOLD { + collect_garbage(&mut *st); + } + + let mut gcbox = Box::new(GcBox { + header: GcBoxHeader { + roots: Cell::new(1), + marked: Cell::new(false), + next: None, + }, + data: value, + }); + + let gcbox_ptr = &mut *gcbox as *mut _; + + let next_boxes_end = &mut gcbox.header.next as *mut _; + if st.boxes_end.is_null() { + assert!(st.boxes_start.is_none(), + "If something had been allocated, boxes_end would be set"); + // The next place we're going to add something! + st.boxes_end = next_boxes_end; + st.boxes_start = Some(gcbox); + } else { + unsafe { + *st.boxes_end = Some(gcbox); + } + st.boxes_end = next_boxes_end; + } + + // We allocated some bytes! Let's record it + st.bytes_allocated += mem::size_of::>(); + + // Return the pointer to the newly allocated data + gcbox_ptr + }) + } +} + +impl GcBox { + /// Mark this GcBox, and trace through it's data + pub unsafe fn mark(&self) { + let marked = self.header.marked.get(); + if !marked { + self.header.marked.set(true); + self.data._gc_mark(); + } + } + + /// Increase the root count on this GcBox. + /// Roots prevent the GcBox from being destroyed by + /// the garbage collector. + pub unsafe fn root(&self) { + self.header.roots.set(self.header.roots.get() + 1); + } + + /// Decrease the root count on this GcBox. + /// Roots prevent the GcBox from being destroyed by + /// the garbage collector. + pub unsafe fn unroot(&self) { + self.header.roots.set(self.header.roots.get() - 1); + } + + /// Get the value form the GcBox + pub fn value(&self) -> &T { + // XXX This may be too expensive, but will help catch errors with + // accessing Gc values in destructors. + GC_SWEEPING.with(|sweeping| assert!(!sweeping.get(), + "Gc pointers may be invalid when GC is running")); + &self.data + } +} + +impl GcBoxTrait for GcBox { + fn header(&self) -> &GcBoxHeader { &self.header } + + fn header_mut(&mut self) -> &mut GcBoxHeader { &mut self.header } + + unsafe fn mark_value(&self) { self.mark() } + + fn size_of(&self) -> usize { mem::size_of::() } +} + +/// Collect some garbage! +fn collect_garbage(st: &mut GcState) { + let mut next_node = &mut st.boxes_start + as *mut Option>; + + // Mark + loop { + if let Some(ref mut node) = *unsafe { &mut *next_node } { + { + // XXX This virtual method call is nasty :( + let header = node.header_mut(); + next_node = &mut header.next as *mut _; + + // If it doesn't have roots - we can abort now + if header.roots.get() == 0 { continue } + } + // We trace in a different scope such that node isn't + // mutably borrowed anymore + unsafe { node.mark_value(); } + } else { break } + } + + GC_SWEEPING.with(|collecting| collecting.set(true)); + + let mut next_node = &mut st.boxes_start + as *mut Option>; + + // Sweep + loop { + if let Some(ref mut node) = *unsafe { &mut *next_node } { + // XXX This virtual method call is nasty :( + let size = node.size_of(); + let header = node.header_mut(); + + if header.marked.get() { + // This node has already been marked - we're done! + header.marked.set(false); + next_node = &mut header.next; + } else { + // The node wasn't marked - we need to delete it + st.bytes_allocated -= size; + let mut tmp = None; + mem::swap(&mut tmp, &mut header.next); + mem::swap(&mut tmp, unsafe { &mut *next_node }); + + // At this point, the node is destroyed if it exists due to tmp dropping + } + } else { break } + } + + // Update the end pointer to point to the correct location + st.boxes_end = next_node; + + // XXX This should probably be done with some kind of finally guard + GC_SWEEPING.with(|collecting| collecting.set(false)); +} + +/// Immediately trigger a garbage collection on the current thread. +pub fn force_collect() { + GC_STATE.with(|_st| { + let mut st = _st.borrow_mut(); + collect_garbage(&mut *st); + }); +} diff --git a/gc/src/lib.rs b/gc/src/lib.rs index 456ff9e..6f74da9 100644 --- a/gc/src/lib.rs +++ b/gc/src/lib.rs @@ -1,255 +1,19 @@ -//! Thread-local garbage-collected boxes (The `Gc` type). -//! -//! The `Gc` type provides shared ownership of an immutable value. -//! It is marked as non-sendable because the garbage collection only occurs -//! thread locally. - #![feature(optin_builtin_traits, unsize, coerce_unsized, borrow_state)] -use std::cell::{self, Cell, RefCell, BorrowState}; -use std::ops::{Deref, DerefMut, CoerceUnsized}; -use std::marker; -use gc::GcBox; +#[macro_use] +extern crate lazy_static; -mod gc; mod trace; -// We re-export the Trace method, as well as some useful internal methods for -// managing collections or configuring the garbage collector. -pub use trace::Trace; -pub use gc::force_collect; - -//////// -// Gc // -//////// - -/// A garbage-collected pointer type over an immutable value. -/// -/// See the [module level documentation](./) for more details. -pub struct Gc { - // XXX We can probably take advantage of alignment to store this - root: Cell, - _ptr: *mut GcBox, -} - -impl !marker::Send for Gc {} - -impl !marker::Sync for Gc {} - -impl, U: Trace + ?Sized> CoerceUnsized> for Gc {} - -impl Gc { - /// Constructs a new `Gc`. - /// - /// # Collection - /// - /// This method could trigger a Garbage Collection. - /// - /// # Examples - /// - /// ``` - /// use gc::Gc; - /// - /// let five = Gc::new(5); - /// ``` - pub fn new(value: T) -> Gc { - unsafe { - // Allocate the memory for the object - let ptr = GcBox::new(value); - - // When we create a Gc, all pointers which have been moved to the - // heap no longer need to be rooted, so we unroot them. - (*ptr).value().unroot(); - Gc { _ptr: ptr, root: Cell::new(true) } - } - } -} - -impl Gc { - #[inline] - fn inner(&self) -> &GcBox { - unsafe { &*self._ptr } - } -} - -impl Trace for Gc { - #[inline] - unsafe fn trace(&self) { - self.inner().trace_inner(); - } - - #[inline] - unsafe fn root(&self) { - assert!(!self.root.get(), "Can't double-root a Gc"); - self.root.set(true); - - self.inner().root_inner(); - } - - #[inline] - unsafe fn unroot(&self) { - assert!(self.root.get(), "Can't double-unroot a Gc"); - self.root.set(false); - - self.inner().unroot_inner(); - } -} - -impl Clone for Gc { - #[inline] - fn clone(&self) -> Gc { - unsafe { self.inner().root_inner(); } - Gc { _ptr: self._ptr, root: Cell::new(true) } - } -} - -impl Deref for Gc { - type Target = T; - - #[inline] - fn deref(&self) -> &T { - &self.inner().value() - } -} - -impl Drop for Gc { - #[inline] - fn drop(&mut self) { - // If this pointer was a root, we should unroot it. - if self.root.get() { - unsafe { self.inner().unroot_inner(); } - } - } -} - -//////////// -// GcCell // -//////////// - -/// A mutable memory location with dynamically checked borrow rules -/// which can be used inside of a garbage collected pointer. -/// -/// This object is a RefCell which can be used inside of a Gc. -pub struct GcCell { - rooted: Cell, - cell: RefCell, -} - -impl GcCell { - /// Creates a new `GcCell` containing `value`. - #[inline] - pub fn new(value: T) -> GcCell { - GcCell { - rooted: Cell::new(true), - cell: RefCell::new(value), - } - } - - /// Consumes the `GcCell`, returning the wrapped value. - #[inline] - pub fn into_inner(self) -> T { - self.cell.into_inner() - } -} - -impl GcCell { - /// Immutably borrows the wrapped value. - /// - /// The borrow lasts until the returned `GcCellRef` exits scope. - /// Multiple immutable borrows can be taken out at the same time. - /// - /// # Panics - /// - /// Panics if the value is currently mutably borrowed. - #[inline] - pub fn borrow(&self) -> GcCellRef { - self.cell.borrow() - } - - /// Mutably borrows the wrapped value. - /// - /// The borrow lasts until the returned `GcCellRefMut` exits scope. - /// The value cannot be borrowed while this borrow is active. - /// - /// #Panics - /// - /// Panics if the value is currently borrowed. - #[inline] - pub fn borrow_mut(&self) -> GcCellRefMut { - let val_ref = self.cell.borrow_mut(); - - // Force the val_ref's contents to be rooted for the duration of the mutable borrow - if !self.rooted.get() { - unsafe { val_ref.root(); } - } - - GcCellRefMut { - _ref: val_ref, - _rooted: &self.rooted, - } - } -} - -impl Trace for GcCell { - #[inline] - unsafe fn trace(&self) { - match self.cell.borrow_state() { - BorrowState::Writing => (), - _ => self.cell.borrow().trace(), - } - } - - #[inline] - unsafe fn root(&self) { - assert!(!self.rooted.get(), "Can't root a GcCell Twice!"); - self.rooted.set(true); - - match self.cell.borrow_state() { - BorrowState::Writing => (), - _ => self.cell.borrow().root(), - } - } - - #[inline] - unsafe fn unroot(&self) { - assert!(self.rooted.get(), "Can't unroot a GcCell Twice!"); - self.rooted.set(false); - - match self.cell.borrow_state() { - BorrowState::Writing => (), - _ => self.cell.borrow().unroot(), - } - } -} - -/// A wrapper type for an immutably borrowed value from a GcCell -pub type GcCellRef<'a, T> = cell::Ref<'a, T>; - -/// A wrapper type for a mutably borrowed value from a GcCell -pub struct GcCellRefMut<'a, T: Trace + ?Sized + 'static> { - _ref: ::std::cell::RefMut<'a, T>, - _rooted: &'a Cell, -} - -impl<'a, T: Trace + ?Sized> Deref for GcCellRefMut<'a, T> { - type Target = T; +pub mod gc; +mod gc_internals; - #[inline] - fn deref(&self) -> &T { &*self._ref } -} +pub mod cgc; +mod cgc_internals; -impl<'a, T: Trace + ?Sized> DerefMut for GcCellRefMut<'a, T> { - #[inline] - fn deref_mut(&mut self) -> &mut T { &mut *self._ref } -} +pub use trace::{Trace, Tracer}; +pub use gc::{Gc, GcCell}; +pub use cgc::{Cgc}; -impl<'a, T: Trace + ?Sized> Drop for GcCellRefMut<'a, T> { - #[inline] - fn drop(&mut self) { - // Restore the rooted state of the GcCell's contents to the state of the GcCell. - // During the lifetime of the GcCellRefMut, the GcCell's contents are rooted. - if !self._rooted.get() { - unsafe { self._ref.unroot(); } - } - } -} +pub fn gc_force_collect() { gc_internals::force_collect(); } +pub fn cgc_force_collect() { cgc_internals::force_collect(); } diff --git a/gc/src/trace.rs b/gc/src/trace.rs index 6e84818..654ef06 100644 --- a/gc/src/trace.rs +++ b/gc/src/trace.rs @@ -1,100 +1,144 @@ -/// The Trace trait which needs to be implemented on garbage collected objects -pub trait Trace { - /// Mark all contained Gcs - unsafe fn trace(&self); - /// Increment the root-count of all contained Gcs - unsafe fn root(&self); - /// Decrement the root-count of all contained Gcs - unsafe fn unroot(&self); +pub trait Tracer { + unsafe fn traverse(&self, obj: &T); } -/// This simple rule implements the trace methods such with empty -/// implementations - use this for marking types as not containing any Trace types! -#[macro_export] -macro_rules! empty_trace { - () => { - #[inline] - unsafe fn trace(&self) {} - #[inline] - unsafe fn root(&self) {} - #[inline] - unsafe fn unroot(&self) {} - } -} +/// The Trace trait must be implemented for every garbage-collectable object +/// Only the _trace method should be overridden, unless you are doing something +/// super super weird. +/// +/// This trait can be auto-derived using #[derive(Trace)] if you are using the +/// gc_plugin compiler plugin in your program. +pub trait Trace { + /// This method should be overridden for every implementer of Trace. + /// It is called by the default implementations of the other methods. + /// + /// tracer.traverse() should be called on every collectable element of + /// the object in question implementing Trace. + /// + /// Generally avoid implementing this yourself, and prefer using #[derive(Trace)] + /// to avoid unsafety. + unsafe fn _trace(&self, tracer: T); -/// This rule implements the trace method. You define a this parameter name, and -/// pass in a body, the body should call `mark` on every traceable element inside -/// the body, and the mark implementation will automatically delegate to the correct -/// method on the argument. -#[macro_export] -macro_rules! custom_trace { - ($this:ident, $body:expr) => { - #[inline] - unsafe fn trace(&self) { - #[inline] - unsafe fn mark(it: &T) { - (*it).trace(); + unsafe fn _gc_mark(&self) { + struct MarkTracer; + impl Tracer for MarkTracer { + #[inline(always)] + unsafe fn traverse(&self, obj: &T) { + obj._gc_mark() } - let $this = self; - $body } - #[inline] - unsafe fn root(&self) { - #[inline] - unsafe fn mark(it: &T) { - (*it).root(); + self._trace(MarkTracer); + } + unsafe fn _gc_root(&self) { + struct RootTracer; + impl Tracer for RootTracer { + #[inline(always)] + unsafe fn traverse(&self, obj: &T) { + obj._gc_root() + } + } + self._trace(RootTracer); + } + unsafe fn _gc_unroot(&self) { + struct UnrootTracer; + impl Tracer for UnrootTracer { + #[inline(always)] + unsafe fn traverse(&self, obj: &T) { + obj._gc_unroot() } - let $this = self; - $body } - #[inline] - unsafe fn unroot(&self) { - #[inline] - unsafe fn mark(it: &T) { - (*it).unroot(); + self._trace(UnrootTracer); + } + unsafe fn _cgc_mark(&self, mark: bool) { + struct MarkTracer(bool); + impl Tracer for MarkTracer { + #[inline(always)] + unsafe fn traverse(&self, obj: &T) { + obj._cgc_mark(self.0) + } + } + self._trace(MarkTracer(mark)); + } + unsafe fn _cgc_root(&self) { + struct RootTracer; + impl Tracer for RootTracer { + #[inline(always)] + unsafe fn traverse(&self, obj: &T) { + obj._cgc_root() + } + } + self._trace(RootTracer); + } + unsafe fn _cgc_unroot(&self) { + struct UnrootTracer; + impl Tracer for UnrootTracer { + #[inline(always)] + unsafe fn traverse(&self, obj: &T) { + obj._cgc_unroot() } - let $this = self; - $body } + self._trace(UnrootTracer); } } -impl Trace for &'static T { - empty_trace!(); +impl Trace for &'static U { + unsafe fn _trace(&self, _: T) {} } -impl Trace for i8 { empty_trace!(); } -impl Trace for u8 { empty_trace!(); } -impl Trace for i16 { empty_trace!(); } -impl Trace for u16 { empty_trace!(); } -impl Trace for i32 { empty_trace!(); } -impl Trace for u32 { empty_trace!(); } -impl Trace for i64 { empty_trace!(); } -impl Trace for u64 { empty_trace!(); } +impl Trace for i8 { + unsafe fn _trace(&self, _: T) {} +} +impl Trace for u8 { + unsafe fn _trace(&self, _: T) {} +} +impl Trace for i16 { + unsafe fn _trace(&self, _: T) {} +} +impl Trace for u16 { + unsafe fn _trace(&self, _: T) {} +} +impl Trace for i32 { + unsafe fn _trace(&self, _: T) {} +} +impl Trace for u32 { + unsafe fn _trace(&self, _: T) {} +} +impl Trace for i64 { + unsafe fn _trace(&self, _: T) {} +} +impl Trace for u64 { + unsafe fn _trace(&self, _: T) {} +} -impl Trace for f32 { empty_trace!(); } -impl Trace for f64 { empty_trace!(); } +impl Trace for f32 { + unsafe fn _trace(&self, _: T) {} +} +impl Trace for f64 { + unsafe fn _trace(&self, _: T) {} +} -impl Trace for String { empty_trace!(); } +impl Trace for String { + unsafe fn _trace(&self, _: T) {} +} -impl Trace for Box { - custom_trace!(this, { - mark(&**this); - }); +impl Trace for Box { + unsafe fn _trace(&self, t: T) { + t.traverse(&**self); + } } -impl Trace for Vec { - custom_trace!(this, { - for e in this { - mark(e); +impl Trace for Vec { + unsafe fn _trace(&self, t: T) { + for e in self { + t.traverse(e); } - }); + } } -impl Trace for Option { - custom_trace!(this, { - if let Some(ref v) = *this { - mark(v); +impl Trace for Option { + unsafe fn _trace(&self, t: T) { + if let Some(ref v) = *self { + t.traverse(v); } - }); + } } diff --git a/gc/tests/cgc_semantics.rs b/gc/tests/cgc_semantics.rs new file mode 100644 index 0000000..4086f4c --- /dev/null +++ b/gc/tests/cgc_semantics.rs @@ -0,0 +1,84 @@ +#![feature(plugin, custom_derive, test)] + +#![plugin(gc_plugin)] +extern crate gc; + +extern crate test; + +use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; +use gc::{Trace, Cgc, cgc_force_collect}; + +// Helper method for comparing the fields of GcWatchFlags instances +macro_rules! assert_gcwf { + ($flags:expr, $root:expr, $unroot:expr, $drop:expr) => { + { + let flgs = & $flags; + let got = (flgs.root.load(Ordering::Relaxed), + flgs.unroot.load(Ordering::Relaxed), + flgs.drop.load(Ordering::Relaxed)); + let expected = ($root, $unroot, $drop); + + assert_eq!(got, expected); + } + } +} + +// Utility methods for the tests +struct GcWatchFlags { + root: AtomicUsize, + unroot: AtomicUsize, + drop: AtomicUsize, +} + +const GC_WATCH_FLAGS_INIT: GcWatchFlags = GcWatchFlags { + root: ATOMIC_USIZE_INIT, + unroot: ATOMIC_USIZE_INIT, + drop: ATOMIC_USIZE_INIT, +}; + +struct GcWatch(&'static GcWatchFlags); + +impl Drop for GcWatch { + fn drop(&mut self) { + self.0.drop.fetch_add(1, Ordering::SeqCst); + } +} + +impl Trace for GcWatch { + unsafe fn _trace(&self, _: T) { + unimplemented!(); + } + unsafe fn _cgc_mark(&self, _: bool) { + // As multiple tests can be running at the same time, + // mark events can happen at times when we wouldn't expect. + // + // It is pretty meaningless to measure mark events, as + // they are non-deterministic. + } + unsafe fn _cgc_root(&self) { + self.0.root.fetch_add(1, Ordering::SeqCst); + } + unsafe fn _cgc_unroot(&self) { + self.0.unroot.fetch_add(1, Ordering::SeqCst); + } +} + +// Tests + +#[test] +fn basic_allocate() { + static FLAGS: GcWatchFlags = GC_WATCH_FLAGS_INIT; + + { + let _gced_val = Cgc::new(GcWatch(&FLAGS)); + assert_gcwf!(FLAGS, 0, 1, 0); + cgc_force_collect(); + assert_gcwf!(FLAGS, 0, 1, 0); + } + + // A collection could have happened on a seperate thread here + cgc_force_collect(); + assert_gcwf!(FLAGS, 0, 1, 1); +} + +// XXX FIXME more tests diff --git a/gc/tests/gc_semantics.rs b/gc/tests/gc_semantics.rs index a5b4756..365aac0 100644 --- a/gc/tests/gc_semantics.rs +++ b/gc/tests/gc_semantics.rs @@ -5,7 +5,7 @@ extern crate gc; use std::cell::Cell; use std::thread::LocalKey; -use gc::{Trace, GcCell, Gc, force_collect}; +use gc::{Trace, GcCell, Gc, gc_force_collect}; // Utility methods for the tests #[derive(PartialEq, Eq, Debug, Clone, Copy)] @@ -49,21 +49,24 @@ impl Drop for GcWatch { } impl Trace for GcWatch { - unsafe fn trace(&self) { + unsafe fn _trace(&self, _: T) { + unimplemented!(); + } + unsafe fn _gc_mark(&self) { self.0.with(|f| { let mut of = f.get(); of.trace += 1; f.set(of); }); } - unsafe fn root(&self) { + unsafe fn _gc_root(&self) { self.0.with(|f| { let mut of = f.get(); of.root += 1; f.set(of); }); } - unsafe fn unroot(&self) { + unsafe fn _gc_unroot(&self) { self.0.with(|f| { let mut of = f.get(); of.unroot += 1; @@ -87,12 +90,12 @@ fn basic_allocate() { { let _gced_val = Gc::new(GcWatch(&FLAGS)); FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 1, 0))); - force_collect(); + gc_force_collect(); FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0))); } FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0))); - force_collect(); + gc_force_collect(); FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 1))); } @@ -116,7 +119,7 @@ fn basic_cycle_allocate() { FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 1, 0))); FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 1, 0))); - force_collect(); + gc_force_collect(); FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0))); FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0))); @@ -128,7 +131,7 @@ fn basic_cycle_allocate() { FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0))); FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0))); - force_collect(); + gc_force_collect(); FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 0))); FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 0))); @@ -137,7 +140,7 @@ fn basic_cycle_allocate() { FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 0))); FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 0))); - force_collect(); + gc_force_collect(); FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 0))); FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 0))); @@ -146,7 +149,7 @@ fn basic_cycle_allocate() { FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 0))); FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 0))); - force_collect(); + gc_force_collect(); FLAGS1.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 1))); FLAGS2.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 0, 1, 1))); @@ -186,7 +189,7 @@ fn gccell_rooting() { FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(0, 0, 1, 0))); // It should be traced by the GC - force_collect(); + gc_force_collect(); FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0))); { @@ -199,7 +202,7 @@ fn gccell_rooting() { FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(1, 0, 1, 0))); // It should be traced by the GC - force_collect(); + gc_force_collect(); FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 0, 1, 0))); } @@ -212,7 +215,7 @@ fn gccell_rooting() { // It shouldn't be traced by the GC (as it's owned by the GcCell) // If it had rootable members, they would be traced by the GC - force_collect(); + gc_force_collect(); FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 1, 1, 0))); } @@ -220,15 +223,19 @@ fn gccell_rooting() { FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(2, 1, 2, 0))); // It should be traced by the GC - force_collect(); + gc_force_collect(); FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 1, 2, 0))) } // It should be collected by the GC - force_collect(); + gc_force_collect(); FLAGS.with(|f| assert_eq!(f.get(), GcWatchFlags::new(3, 1, 2, 1))) } +/* +// XXX Broken by new changes to Trace (method _trace has generic type parameters) +// XXX Object Safety - FIXME + #[test] fn trait_gc() { #[derive(Trace)] @@ -247,3 +254,4 @@ fn trait_gc() { use_trait_gc(gc_foo); use_trait_gc(gc_bar); } +*/ diff --git a/gc/tests/gymnastics_cycle.rs b/gc/tests/gymnastics_cycle.rs index 424f937..9fbe255 100644 --- a/gc/tests/gymnastics_cycle.rs +++ b/gc/tests/gymnastics_cycle.rs @@ -4,7 +4,7 @@ extern crate gc; use std::cell::Cell; -use gc::{Trace, GcCell, Gc, force_collect}; +use gc::{GcCell, Gc, gc_force_collect}; thread_local!(static COUNTER: Cell = Cell::new(0u8)); @@ -41,7 +41,7 @@ fn test_cycle() { } println!("Before collection: {:?}", COUNTER.with(|s| s.get())); - force_collect(); + gc_force_collect(); println!("After collection: {:?}", COUNTER.with(|s| s.get())); assert_eq!(COUNTER.with(|s| s.get()), 4); -} \ No newline at end of file +} diff --git a/gc/tests/trace_impl.rs b/gc/tests/trace_impl.rs index 8a1abef..e9b36fd 100644 --- a/gc/tests/trace_impl.rs +++ b/gc/tests/trace_impl.rs @@ -6,20 +6,18 @@ use std::cell::RefCell; thread_local!(static X: RefCell = RefCell::new(0)); -use gc::Trace; +use gc::{Trace, Tracer}; #[derive(Copy, Clone)] struct Foo; impl Trace for Foo { - unsafe fn trace(&self) { + unsafe fn _trace(&self, _: T) { X.with(|x| { let mut m = x.borrow_mut(); *m = *m + 1; }) } - unsafe fn root(&self){} - unsafe fn unroot(&self){} } #[derive(Trace, Copy, Clone)] @@ -36,7 +34,7 @@ struct Baz { #[test] fn test() { let bar = Bar{inner: Foo}; - unsafe { bar.trace(); } + unsafe { bar._gc_mark(); } X.with(|x| { assert!(*x.borrow() == 1) }); @@ -44,7 +42,7 @@ fn test() { a: bar, b: bar }; - unsafe { baz.trace(); } + unsafe { baz._gc_mark(); } X.with(|x| { assert!(*x.borrow() == 3) }); diff --git a/gc_plugin/src/lib.rs b/gc_plugin/src/lib.rs index bc24ee1..ec78dd0 100644 --- a/gc_plugin/src/lib.rs +++ b/gc_plugin/src/lib.rs @@ -3,12 +3,13 @@ #[macro_use] extern crate syntax; +extern crate syntax_ext; #[macro_use] extern crate rustc; +extern crate rustc_plugin; - -use rustc::plugin::Registry; +use rustc_plugin::Registry; use syntax::parse::token::intern; use syntax::ext::base::{Annotatable, ExtCtxt, MultiDecorator}; @@ -16,7 +17,7 @@ use syntax::codemap::Span; use syntax::ptr::P; use syntax::ast::{MetaItem, Expr}; use syntax::ext::build::AstBuilder; -use syntax::ext::deriving::generic::{combine_substructure, EnumMatching, FieldInfo, MethodDef, Struct, Substructure, TraitDef, ty}; +use syntax_ext::deriving::generic::{combine_substructure, EnumMatching, FieldInfo, MethodDef, Struct, Substructure, TraitDef, ty}; #[plugin_registrar] @@ -33,36 +34,22 @@ pub fn expand_trace(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, item: &Annot generics: ty::LifetimeBounds::empty(), methods: vec![ MethodDef { - name: "trace", - generics: ty::LifetimeBounds::empty(), + name: "_trace", + generics: ty::LifetimeBounds { + lifetimes: Vec::new(), + bounds: vec![("__T", + vec![ty::Path::new(vec!["gc", "Tracer"])])], + }, explicit_self: ty::borrowed_explicit_self(), - args: vec!(), - ret_ty: ty::nil_ty(), - attributes: vec![], // todo: handle inlining - is_unsafe: true, - combine_substructure: combine_substructure(box trace_substructure) - }, - MethodDef { - name: "root", - generics: ty::LifetimeBounds::empty(), - explicit_self: ty::borrowed_explicit_self(), - args: vec!(), - ret_ty: ty::nil_ty(), - attributes: vec![], - is_unsafe: true, - combine_substructure: combine_substructure(box trace_substructure) - }, - MethodDef { - name: "unroot", - generics: ty::LifetimeBounds::empty(), - explicit_self: ty::borrowed_explicit_self(), - args: vec!(), + args: vec![ty::Literal(ty::Path::new_local("__T"))], ret_ty: ty::nil_ty(), attributes: vec![], is_unsafe: true, + unify_fieldless_variants: false, combine_substructure: combine_substructure(box trace_substructure) } ], + is_unsafe: false, associated_types: vec![], }; trait_def.expand(cx, mitem, item, push) @@ -70,20 +57,26 @@ pub fn expand_trace(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, item: &Annot // Mostly copied from syntax::ext::deriving::hash and Servo's #[jstraceable] fn trace_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P { - let trace_ident = substr.method_ident; - let call_trace = |span, thing_expr| { - let expr = cx.expr_method_call(span, thing_expr, trace_ident, vec!()); + let tracer_expr = match (substr.nonself_args.len(), substr.nonself_args.get(0)) { + (1, Some(o_f)) => o_f, + _ => cx.span_bug(trait_span, "incorrect number of arguments in #[derive(Trace)]") + }; + let call_traverse = |span, thing_expr| { + let expr = cx.expr_method_call(span, + tracer_expr.clone(), + cx.ident_of("traverse"), + vec![cx.expr_addr_of(span, thing_expr)]); cx.stmt_expr(expr) }; let mut stmts = Vec::new(); let fields = match *substr.fields { - Struct(ref fs) | EnumMatching(_, _, ref fs) => fs, + Struct(_, ref fs) | EnumMatching(_, _, ref fs) => fs, _ => cx.span_bug(trait_span, "impossible substructure in `#[derive(Trace)]`") }; - for &FieldInfo { ref self_, span, .. } in fields.iter() { - stmts.push(call_trace(span, self_.clone())); + for &FieldInfo { ref self_, span, .. } in fields { + stmts.push(call_traverse(span, self_.clone())); } cx.expr_block(cx.block(trait_span, stmts, None))