Create a GcRef type

This commit is contained in:
bad 2022-05-16 15:11:41 +02:00
parent ba8f854b28
commit 229cefe028
6 changed files with 171 additions and 107 deletions

1
Cargo.lock generated
View File

@ -150,6 +150,7 @@ dependencies = [
"color-eyre",
"dotenv",
"from_variants",
"gc",
"itertools",
"match_any",
"tracing",

View File

@ -9,6 +9,7 @@ members = [
]
[dependencies]
gc = { path = "./gc" }
clap = { version = "3.1.9", features = ["derive"] }
color-eyre = "0.6.1"
dotenv = "0.15.0"

75
gc/src/allocator.rs Normal file
View File

@ -0,0 +1,75 @@
use std::ptr::NonNull;
use super::gc_ref::GcRef;
use super::trace;
#[derive(Default)]
pub struct GCAllocator {
allocations: Vec<Allocation>,
}
impl GCAllocator {
#[inline(always)]
pub fn alloc<T>(&mut self, x: T) -> GcRef<T> {
let alloc = Allocation::new(x);
let ptr = alloc.ptr as *mut T;
self.allocations.push(alloc);
unsafe { GcRef::new(NonNull::new(ptr).unwrap()) }
}
/// # Safety
/// Root needs to contain all the accessible gc allocated references
/// In case root itself is gc allocated use [`gc_ref_root`]
///
/// [`gc_ref_root`]: GCAllocator::gc_ref_root
pub unsafe fn gc<T: trace::GCTrace>(&mut self, root: &T) {
// Mark
let mut tracer = trace::GCTracer::with_capacity(self.allocations.len());
root.trace(&mut tracer);
// And sweep
self.allocations
.drain_filter(|a| !tracer.is_accessible(a.ptr));
}
// Specialization when ;-;
/// [`gc`] but for roots that are allocated on the heap
/// # Safety
/// See [`gc`]
///
/// [`gc`]: GCAllocator::gc
pub unsafe fn gc_ref_root<T: trace::GCTrace>(&mut self, root: &GcRef<T>) {
// Mark
let mut tracer = trace::GCTracer::with_capacity(self.allocations.len());
tracer.mark_reachable(root);
root.trace(&mut tracer);
// And sweep
self.allocations
.drain_filter(|a| !tracer.is_accessible(a.ptr));
}
}
struct Allocation {
ptr: *mut (),
drop: unsafe fn(*mut ()),
}
impl Allocation {
fn new<T>(x: T) -> Self {
let alloc = Box::new(x);
let ptr = Box::into_raw(alloc) as *mut ();
let drop = |ptr| unsafe {
Box::from_raw(ptr as *mut T);
};
Self { ptr, drop }
}
}
impl Drop for Allocation {
fn drop(&mut self) {
unsafe { (self.drop)(self.ptr) };
}
}

25
gc/src/gc_ref.rs Normal file
View File

@ -0,0 +1,25 @@
use std::{ops::Deref, ptr::NonNull};
pub struct GcRef<T>(pub(crate) NonNull<T>);
impl<T> Deref for GcRef<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { self.0.as_ref() }
}
}
impl<T> GcRef<T> {
pub(crate) unsafe fn new(ptr: NonNull<T>) -> Self {
Self(ptr)
}
/// # Safety
/// Ensure that this is the only instance of a pointer to the underlying value.
/// You might want to instead use one of various [cell][`std::cell`] types as the allocated
/// type
pub unsafe fn get_mut(this: &mut Self) -> &mut T {
this.0.as_mut()
}
}

View File

@ -1,112 +1,21 @@
#![feature(drain_filter)]
use std::collections::HashSet;
struct Allocation {
ptr: *mut (),
drop: unsafe fn(*mut ()),
}
impl Allocation {
fn new<T>(x: T) -> Self {
let alloc = Box::new(x);
let ptr = Box::into_raw(alloc) as *mut ();
let drop = |ptr| unsafe {
Box::from_raw(ptr as *mut T);
};
Self { ptr, drop }
}
}
impl Drop for Allocation {
fn drop(&mut self) {
unsafe { (self.drop)(self.ptr) };
}
}
#[derive(Default)]
pub struct GCAllocator {
allocations: Vec<Allocation>,
}
impl GCAllocator {
#[inline(always)]
pub fn alloc<T>(&mut self, x: T) -> *mut T {
let alloc = Allocation::new(x);
let ptr = alloc.ptr as *mut T;
self.allocations.push(alloc);
ptr
}
pub fn gc<T: GCTrace>(&mut self, root: &T) {
// Mark
let mut tracer = GCTracer::with_capacity(self.allocations.len());
tracer.mark_reachable_rec(root);
// And sweep
self.allocations
.drain_filter(|a| !tracer.accessible.contains(&(a.ptr as *const ())));
}
}
pub struct GCTracer {
accessible: HashSet<*const ()>,
}
impl GCTracer {
fn with_capacity(cap: usize) -> Self {
Self {
accessible: HashSet::with_capacity(cap),
}
}
pub fn mark_reachable<T>(&mut self, obj: &T) {
self.accessible.insert(obj as *const T as *const ());
}
pub fn mark_reachable_rec<T: GCTrace>(&mut self, obj: &T) {
if !self.accessible.contains(&(obj as *const T as *const ())) {
self.accessible.insert(obj as *const T as *const ());
obj.trace(self);
}
}
}
///
/// # Safety
/// Implementors of the trait *need* to ensure that every reachable reference gets marked as
/// reachable with mark_reachable or mark_reachable_rec.
pub unsafe trait GCTrace {
fn trace(&self, tracer: &mut GCTracer);
}
unsafe impl<T> GCTrace for [T]
where
T: GCTrace,
{
fn trace(&self, tracer: &mut GCTracer) {
for item in self {
item.trace(tracer)
}
}
}
unsafe impl GCTrace for () {
fn trace(&self, _tracer: &mut GCTracer) {}
}
pub mod allocator;
pub mod gc_ref;
pub mod trace;
#[cfg(test)]
mod tests {
use super::*;
use super::allocator::GCAllocator;
use super::gc_ref::GcRef;
use super::trace;
use std::sync::atomic::{AtomicBool, Ordering};
struct GCRoot<'a>(*mut GotDropped<'a>);
struct GCRoot<'a>(GcRef<GotDropped<'a>>);
unsafe impl<'a> GCTrace for GCRoot<'a> {
fn trace(&self, tracer: &mut GCTracer) {
tracer.mark_reachable(unsafe { &*self.0 })
unsafe impl<'a> trace::GCTrace for GCRoot<'a> {
fn trace(&self, tracer: &mut trace::GCTracer) {
tracer.mark_reachable(&self.0)
}
}
@ -124,20 +33,18 @@ mod tests {
let mut gc = GCAllocator::default();
gc.alloc(got_dropped);
gc.gc(&());
unsafe { gc.gc(&()) };
assert!(dropped.load(Ordering::Acquire));
let dropped = AtomicBool::from(false);
let got_dropped = gc.alloc(GotDropped(&dropped));
let gc_root = gc.alloc(GCRoot(got_dropped));
unsafe {
gc.gc(&*gc_root);
gc.gc(&*gc_root);
gc.gc(&*gc_root);
gc.gc(&*gc_root);
gc.gc_ref_root(&gc_root);
};
assert!(!dropped.load(Ordering::Acquire));
gc.gc(&());
unsafe { gc.gc(&()) };
assert!(dropped.load(Ordering::Acquire));
}
}

55
gc/src/trace.rs Normal file
View File

@ -0,0 +1,55 @@
use std::collections::HashSet;
use crate::gc_ref::GcRef;
pub struct GCTracer {
accessible: HashSet<*const ()>,
}
impl GCTracer {
pub(super) fn with_capacity(cap: usize) -> Self {
Self {
accessible: HashSet::with_capacity(cap),
}
}
pub fn is_accessible(&self, ptr: *const ()) -> bool {
self.accessible.contains(&ptr)
}
pub fn mark_reachable<T>(&mut self, obj: &GcRef<T>) {
let ptr = obj.0.as_ptr() as *const ();
self.accessible.insert(ptr);
}
pub fn mark_reachable_rec<T: GCTrace>(&mut self, obj: &GcRef<T>) {
let ptr = obj.0.as_ptr() as *const ();
if !self.accessible.contains(&ptr) {
self.accessible.insert(ptr);
obj.trace(self);
}
}
}
///
/// # Safety
/// Implementors of the trait *need* to ensure that every reachable reference gets marked as
/// reachable with mark_reachable or mark_reachable_rec.
pub unsafe trait GCTrace {
fn trace(&self, tracer: &mut GCTracer);
}
unsafe impl<T> GCTrace for &[T]
where
T: GCTrace,
{
fn trace(&self, tracer: &mut GCTracer) {
for item in self.iter() {
item.trace(tracer)
}
}
}
unsafe impl GCTrace for () {
fn trace(&self, _tracer: &mut GCTracer) {}
}