From 229cefe028665e759c75a5be8cf07602ebb79186 Mon Sep 17 00:00:00 2001 From: bad Date: Mon, 16 May 2022 15:11:41 +0200 Subject: [PATCH] Create a GcRef type --- Cargo.lock | 1 + Cargo.toml | 1 + gc/src/allocator.rs | 75 +++++++++++++++++++++++++++ gc/src/gc_ref.rs | 25 +++++++++ gc/src/lib.rs | 121 +++++--------------------------------------- gc/src/trace.rs | 55 ++++++++++++++++++++ 6 files changed, 171 insertions(+), 107 deletions(-) create mode 100644 gc/src/allocator.rs create mode 100644 gc/src/gc_ref.rs create mode 100644 gc/src/trace.rs diff --git a/Cargo.lock b/Cargo.lock index 28d81fc..8318b40 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -150,6 +150,7 @@ dependencies = [ "color-eyre", "dotenv", "from_variants", + "gc", "itertools", "match_any", "tracing", diff --git a/Cargo.toml b/Cargo.toml index c5ef5fb..3f33d6f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ members = [ ] [dependencies] +gc = { path = "./gc" } clap = { version = "3.1.9", features = ["derive"] } color-eyre = "0.6.1" dotenv = "0.15.0" diff --git a/gc/src/allocator.rs b/gc/src/allocator.rs new file mode 100644 index 0000000..ad14de4 --- /dev/null +++ b/gc/src/allocator.rs @@ -0,0 +1,75 @@ +use std::ptr::NonNull; + +use super::gc_ref::GcRef; +use super::trace; + +#[derive(Default)] +pub struct GCAllocator { + allocations: Vec, +} + +impl GCAllocator { + #[inline(always)] + pub fn alloc(&mut self, x: T) -> GcRef { + let alloc = Allocation::new(x); + let ptr = alloc.ptr as *mut T; + self.allocations.push(alloc); + unsafe { GcRef::new(NonNull::new(ptr).unwrap()) } + } + + /// # Safety + /// Root needs to contain all the accessible gc allocated references + /// In case root itself is gc allocated use [`gc_ref_root`] + /// + /// [`gc_ref_root`]: GCAllocator::gc_ref_root + pub unsafe fn gc(&mut self, root: &T) { + // Mark + let mut tracer = trace::GCTracer::with_capacity(self.allocations.len()); + root.trace(&mut tracer); + + // And sweep + self.allocations + .drain_filter(|a| !tracer.is_accessible(a.ptr)); + } + + // Specialization when ;-; + /// [`gc`] but for roots that are allocated on the heap + /// # Safety + /// See [`gc`] + /// + /// [`gc`]: GCAllocator::gc + pub unsafe fn gc_ref_root(&mut self, root: &GcRef) { + // Mark + let mut tracer = trace::GCTracer::with_capacity(self.allocations.len()); + tracer.mark_reachable(root); + root.trace(&mut tracer); + + // And sweep + self.allocations + .drain_filter(|a| !tracer.is_accessible(a.ptr)); + } +} + +struct Allocation { + ptr: *mut (), + drop: unsafe fn(*mut ()), +} + +impl Allocation { + fn new(x: T) -> Self { + let alloc = Box::new(x); + let ptr = Box::into_raw(alloc) as *mut (); + + let drop = |ptr| unsafe { + Box::from_raw(ptr as *mut T); + }; + + Self { ptr, drop } + } +} + +impl Drop for Allocation { + fn drop(&mut self) { + unsafe { (self.drop)(self.ptr) }; + } +} diff --git a/gc/src/gc_ref.rs b/gc/src/gc_ref.rs new file mode 100644 index 0000000..53f4a2d --- /dev/null +++ b/gc/src/gc_ref.rs @@ -0,0 +1,25 @@ +use std::{ops::Deref, ptr::NonNull}; + +pub struct GcRef(pub(crate) NonNull); + +impl Deref for GcRef { + type Target = T; + + fn deref(&self) -> &Self::Target { + unsafe { self.0.as_ref() } + } +} + +impl GcRef { + pub(crate) unsafe fn new(ptr: NonNull) -> Self { + Self(ptr) + } + + /// # Safety + /// Ensure that this is the only instance of a pointer to the underlying value. + /// You might want to instead use one of various [cell][`std::cell`] types as the allocated + /// type + pub unsafe fn get_mut(this: &mut Self) -> &mut T { + this.0.as_mut() + } +} diff --git a/gc/src/lib.rs b/gc/src/lib.rs index 97fea36..f1c4c7b 100644 --- a/gc/src/lib.rs +++ b/gc/src/lib.rs @@ -1,112 +1,21 @@ #![feature(drain_filter)] -use std::collections::HashSet; - -struct Allocation { - ptr: *mut (), - drop: unsafe fn(*mut ()), -} - -impl Allocation { - fn new(x: T) -> Self { - let alloc = Box::new(x); - let ptr = Box::into_raw(alloc) as *mut (); - - let drop = |ptr| unsafe { - Box::from_raw(ptr as *mut T); - }; - - Self { ptr, drop } - } -} - -impl Drop for Allocation { - fn drop(&mut self) { - unsafe { (self.drop)(self.ptr) }; - } -} - -#[derive(Default)] -pub struct GCAllocator { - allocations: Vec, -} - -impl GCAllocator { - #[inline(always)] - pub fn alloc(&mut self, x: T) -> *mut T { - let alloc = Allocation::new(x); - let ptr = alloc.ptr as *mut T; - self.allocations.push(alloc); - ptr - } - - pub fn gc(&mut self, root: &T) { - // Mark - let mut tracer = GCTracer::with_capacity(self.allocations.len()); - tracer.mark_reachable_rec(root); - - // And sweep - self.allocations - .drain_filter(|a| !tracer.accessible.contains(&(a.ptr as *const ()))); - } -} - -pub struct GCTracer { - accessible: HashSet<*const ()>, -} - -impl GCTracer { - fn with_capacity(cap: usize) -> Self { - Self { - accessible: HashSet::with_capacity(cap), - } - } - - pub fn mark_reachable(&mut self, obj: &T) { - self.accessible.insert(obj as *const T as *const ()); - } - - pub fn mark_reachable_rec(&mut self, obj: &T) { - if !self.accessible.contains(&(obj as *const T as *const ())) { - self.accessible.insert(obj as *const T as *const ()); - obj.trace(self); - } - } -} - -/// -/// # Safety -/// Implementors of the trait *need* to ensure that every reachable reference gets marked as -/// reachable with mark_reachable or mark_reachable_rec. -pub unsafe trait GCTrace { - fn trace(&self, tracer: &mut GCTracer); -} - -unsafe impl GCTrace for [T] -where - T: GCTrace, -{ - fn trace(&self, tracer: &mut GCTracer) { - for item in self { - item.trace(tracer) - } - } -} - -unsafe impl GCTrace for () { - fn trace(&self, _tracer: &mut GCTracer) {} -} +pub mod allocator; +pub mod gc_ref; +pub mod trace; #[cfg(test)] mod tests { - use super::*; + use super::allocator::GCAllocator; + use super::gc_ref::GcRef; + use super::trace; use std::sync::atomic::{AtomicBool, Ordering}; - struct GCRoot<'a>(*mut GotDropped<'a>); + struct GCRoot<'a>(GcRef>); - unsafe impl<'a> GCTrace for GCRoot<'a> { - fn trace(&self, tracer: &mut GCTracer) { - tracer.mark_reachable(unsafe { &*self.0 }) + unsafe impl<'a> trace::GCTrace for GCRoot<'a> { + fn trace(&self, tracer: &mut trace::GCTracer) { + tracer.mark_reachable(&self.0) } } @@ -124,20 +33,18 @@ mod tests { let mut gc = GCAllocator::default(); gc.alloc(got_dropped); - gc.gc(&()); + unsafe { gc.gc(&()) }; assert!(dropped.load(Ordering::Acquire)); let dropped = AtomicBool::from(false); let got_dropped = gc.alloc(GotDropped(&dropped)); let gc_root = gc.alloc(GCRoot(got_dropped)); + unsafe { - gc.gc(&*gc_root); - gc.gc(&*gc_root); - gc.gc(&*gc_root); - gc.gc(&*gc_root); + gc.gc_ref_root(&gc_root); }; assert!(!dropped.load(Ordering::Acquire)); - gc.gc(&()); + unsafe { gc.gc(&()) }; assert!(dropped.load(Ordering::Acquire)); } } diff --git a/gc/src/trace.rs b/gc/src/trace.rs new file mode 100644 index 0000000..c2bc79f --- /dev/null +++ b/gc/src/trace.rs @@ -0,0 +1,55 @@ +use std::collections::HashSet; + +use crate::gc_ref::GcRef; + +pub struct GCTracer { + accessible: HashSet<*const ()>, +} + +impl GCTracer { + pub(super) fn with_capacity(cap: usize) -> Self { + Self { + accessible: HashSet::with_capacity(cap), + } + } + + pub fn is_accessible(&self, ptr: *const ()) -> bool { + self.accessible.contains(&ptr) + } + + pub fn mark_reachable(&mut self, obj: &GcRef) { + let ptr = obj.0.as_ptr() as *const (); + self.accessible.insert(ptr); + } + + pub fn mark_reachable_rec(&mut self, obj: &GcRef) { + let ptr = obj.0.as_ptr() as *const (); + if !self.accessible.contains(&ptr) { + self.accessible.insert(ptr); + obj.trace(self); + } + } +} + +/// +/// # Safety +/// Implementors of the trait *need* to ensure that every reachable reference gets marked as +/// reachable with mark_reachable or mark_reachable_rec. +pub unsafe trait GCTrace { + fn trace(&self, tracer: &mut GCTracer); +} + +unsafe impl GCTrace for &[T] +where + T: GCTrace, +{ + fn trace(&self, tracer: &mut GCTracer) { + for item in self.iter() { + item.trace(tracer) + } + } +} + +unsafe impl GCTrace for () { + fn trace(&self, _tracer: &mut GCTracer) {} +}