crftng-intrprtrs/gc/src/lib.rs

144 lines
2.9 KiB
Rust
Raw Normal View History

2022-05-07 20:02:50 +02:00
#![feature(drain_filter)]
use std::collections::HashSet;
struct Allocation {
ptr: *mut (),
drop: unsafe fn(*mut ()),
}
impl Allocation {
fn new<T>(x: T) -> Self {
let alloc = Box::new(x);
let ptr = Box::into_raw(alloc) as *mut ();
let drop = |ptr| unsafe {
Box::from_raw(ptr as *mut T);
};
Self { ptr, drop }
}
}
impl Drop for Allocation {
fn drop(&mut self) {
unsafe { (self.drop)(self.ptr) };
}
}
#[derive(Default)]
pub struct GCAllocator {
allocations: Vec<Allocation>,
}
impl GCAllocator {
#[inline(always)]
pub fn alloc<T>(&mut self, x: T) -> *mut T {
let alloc = Allocation::new(x);
let ptr = alloc.ptr as *mut T;
self.allocations.push(alloc);
ptr
}
pub fn gc<T: GCTrace>(&mut self, root: &T) {
// Mark
let mut tracer = GCTracer::with_capacity(self.allocations.len());
tracer.mark_reachable_rec(root);
// And sweep
self.allocations
.drain_filter(|a| !tracer.accessible.contains(&(a.ptr as *const ())));
}
}
pub struct GCTracer {
accessible: HashSet<*const ()>,
}
impl GCTracer {
fn with_capacity(cap: usize) -> Self {
Self {
accessible: HashSet::with_capacity(cap),
}
}
pub fn mark_reachable<T>(&mut self, obj: &T) {
self.accessible.insert(obj as *const T as *const ());
}
pub fn mark_reachable_rec<T: GCTrace>(&mut self, obj: &T) {
if !self.accessible.contains(&(obj as *const T as *const ())) {
self.accessible.insert(obj as *const T as *const ());
obj.trace(self);
}
}
}
///
/// # Safety
/// Implementors of the trait *need* to ensure that every reachable reference gets marked as
/// reachable with mark_reachable or mark_reachable_rec.
pub unsafe trait GCTrace {
fn trace(&self, tracer: &mut GCTracer);
}
unsafe impl<T> GCTrace for [T]
where
T: GCTrace,
{
fn trace(&self, tracer: &mut GCTracer) {
for item in self {
item.trace(tracer)
}
}
}
unsafe impl GCTrace for () {
fn trace(&self, _tracer: &mut GCTracer) {}
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::atomic::{AtomicBool, Ordering};
struct GCRoot<'a>(*mut GotDropped<'a>);
unsafe impl<'a> GCTrace for GCRoot<'a> {
fn trace(&self, tracer: &mut GCTracer) {
tracer.mark_reachable(unsafe { &*self.0 })
}
}
struct GotDropped<'a>(&'a AtomicBool);
impl<'a> Drop for GotDropped<'a> {
fn drop(&mut self) {
self.0.store(true, Ordering::Release);
}
}
#[test]
fn it_works() {
let dropped = AtomicBool::from(false);
let got_dropped = GotDropped(&dropped);
let mut gc = GCAllocator::default();
gc.alloc(got_dropped);
gc.gc(&());
assert!(dropped.load(Ordering::Acquire));
let dropped = AtomicBool::from(false);
let got_dropped = gc.alloc(GotDropped(&dropped));
let gc_root = gc.alloc(GCRoot(got_dropped));
unsafe {
gc.gc(&*gc_root);
gc.gc(&*gc_root);
gc.gc(&*gc_root);
gc.gc(&*gc_root);
};
assert!(!dropped.load(Ordering::Acquire));
gc.gc(&());
assert!(dropped.load(Ordering::Acquire));
}
}