diff --git a/src/lib.rs b/src/lib.rs index 52af6cb..0bc1d06 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -29,32 +29,6 @@ const BLOCK_REF_BLOCK_AMOUNT: usize = BLOCK_REF_BLOCK_SIZE / std::mem::size_of:: /// a terrible allocator that mmaps every single allocation. it's horrible. yeah. pub struct Awwoc; -unsafe fn alloc_block_ref_block() -> Option> { - let new_ptr = map::map(BLOCK_REF_BLOCK_SIZE)?; - - // we have to allocate some space for the BlockRefs themselves - - let block = new_ptr.cast::(); - Some(block) -} - -unsafe impl GlobalAlloc for Awwoc { - unsafe fn alloc(&self, layout: std::alloc::Layout) -> *mut u8 { - let mut root = lock(&ROOT); - - match root.alloc_inner(layout) { - Some(ptr) => ptr.as_ptr(), - None => null_mut(), - } - } - - unsafe fn dealloc(&self, ptr: *mut u8, _layout: std::alloc::Layout) { - let mut root = lock(&ROOT); - - root.dealloc(ptr); - } -} - static ROOT: Mutex = Mutex::new(RootNode::new()); /// ┌──────────────────────────────────────────────────────────────────────────┐ @@ -248,6 +222,32 @@ impl RootNode { } } +unsafe fn alloc_block_ref_block() -> Option> { + let new_ptr = map::map(BLOCK_REF_BLOCK_SIZE)?; + + // we have to allocate some space for the BlockRefs themselves + + let block = new_ptr.cast::(); + Some(block) +} + +unsafe impl GlobalAlloc for Awwoc { + unsafe fn alloc(&self, layout: std::alloc::Layout) -> *mut u8 { + let mut root = lock(&ROOT); + + match root.alloc_inner(layout) { + Some(ptr) => ptr.as_ptr(), + None => null_mut(), + } + } + + unsafe fn dealloc(&self, ptr: *mut u8, _layout: std::alloc::Layout) { + let mut root = lock(&ROOT); + + root.dealloc(ptr); + } +} + // SAFETY: I guess unsafe impl Send for RootNode {}