mirror of
https://github.com/Noratrieb/awwoc.git
synced 2026-01-14 11:45:03 +01:00
yeah
This commit is contained in:
commit
763b057fe3
5 changed files with 241 additions and 0 deletions
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
/target
|
||||
/Cargo.lock
|
||||
9
Cargo.toml
Normal file
9
Cargo.toml
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
[package]
|
||||
name = "awwoc"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
libc = "0.2.132"
|
||||
163
src/lib.rs
Normal file
163
src/lib.rs
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
#![feature(strict_provenance)]
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::{
|
||||
alloc::GlobalAlloc,
|
||||
io::Write,
|
||||
mem,
|
||||
ptr::{addr_of_mut, null_mut, NonNull},
|
||||
sync::{Mutex, MutexGuard},
|
||||
};
|
||||
|
||||
mod map;
|
||||
|
||||
fn lock<T>(mutex: &Mutex<T>) -> MutexGuard<'_, T> {
|
||||
match mutex.lock().map_err(|e| e.into_inner()) {
|
||||
Ok(t) => t,
|
||||
Err(t) => t,
|
||||
}
|
||||
}
|
||||
|
||||
const BLOCK_REF_BLOCK_SIZE: usize = 4096;
|
||||
const BLOCK_REF_BLOCK_AMOUNT: usize = BLOCK_REF_BLOCK_SIZE / std::mem::size_of::<BlockRef>();
|
||||
|
||||
pub struct Awwoc;
|
||||
|
||||
unsafe fn allow_block_ref_block() -> Option<NonNull<BlockRef>> {
|
||||
let new_ptr = map::map(BLOCK_REF_BLOCK_SIZE)?;
|
||||
|
||||
// we have to allocate some space for the BlockRefs themselves
|
||||
|
||||
let block = new_ptr.cast::<BlockRef>();
|
||||
Some(block)
|
||||
}
|
||||
|
||||
unsafe fn allow_inner(layout: std::alloc::Layout) -> Option<NonNull<u8>> {
|
||||
// SAFETY: soup
|
||||
|
||||
let mut root = lock(&BLOCK);
|
||||
|
||||
// first, try to find something in the free list
|
||||
if let Some(mut free_block) = root.next_free_block {
|
||||
let prev_next_ptr = addr_of_mut!(root.next_free_block);
|
||||
loop {
|
||||
let block_ref_ptr = free_block.as_ptr();
|
||||
let block_ref = block_ref_ptr.read();
|
||||
|
||||
if block_ref.size <= layout.size() {
|
||||
prev_next_ptr.write(block_ref.next_free_block);
|
||||
(*block_ref_ptr).next_free_block = None;
|
||||
return NonNull::new(block_ref.start);
|
||||
}
|
||||
|
||||
match block_ref.next_free_block {
|
||||
Some(block) => free_block = block,
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// nothing free, we have to allocate
|
||||
let first_block = match root.first_block {
|
||||
Some(block) => block,
|
||||
None => {
|
||||
let block_ref_block = allow_block_ref_block()?;
|
||||
root.first_block = Some(block_ref_block);
|
||||
|
||||
block_ref_block
|
||||
}
|
||||
};
|
||||
|
||||
let prev_last_block = root.last_block;
|
||||
|
||||
let new_block_ptr = if root.block_count < BLOCK_REF_BLOCK_AMOUNT {
|
||||
// just append another block
|
||||
let ptr = first_block.as_ptr().add(root.block_count);
|
||||
root.last_block = NonNull::new(ptr);
|
||||
ptr
|
||||
} else {
|
||||
let new_block_ref_block = allow_block_ref_block()?;
|
||||
let last_ptr = root.last_block?;
|
||||
|
||||
(*last_ptr.as_ptr()).next = Some(new_block_ref_block);
|
||||
|
||||
root.last_block = Some(new_block_ref_block);
|
||||
|
||||
new_block_ref_block.as_ptr()
|
||||
};
|
||||
|
||||
let size = layout.size();
|
||||
let new_data_ptr = map::map(size)?;
|
||||
|
||||
root.block_count += 1;
|
||||
|
||||
if let Some(prev_last_block) = prev_last_block {
|
||||
(*prev_last_block.as_ptr()).next = NonNull::new(new_block_ptr);
|
||||
}
|
||||
|
||||
new_block_ptr.write(BlockRef {
|
||||
start: new_data_ptr.as_ptr(),
|
||||
size,
|
||||
next: None,
|
||||
next_free_block: None,
|
||||
});
|
||||
|
||||
Some(new_data_ptr)
|
||||
}
|
||||
|
||||
unsafe impl GlobalAlloc for Awwoc {
|
||||
unsafe fn alloc(&self, layout: std::alloc::Layout) -> *mut u8 {
|
||||
match allow_inner(layout) {
|
||||
Some(ptr) => ptr.as_ptr(),
|
||||
None => null_mut(),
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn dealloc(&self, ptr: *mut u8, _layout: std::alloc::Layout) {
|
||||
return;
|
||||
|
||||
let mut root = lock(&BLOCK);
|
||||
|
||||
let mut option_block = root.first_block;
|
||||
while let Some(block) = option_block {
|
||||
let block_ptr = block.as_ptr();
|
||||
|
||||
if (*block_ptr).start == ptr {
|
||||
let free = mem::replace(&mut root.next_free_block, Some(block));
|
||||
(*block_ptr).next_free_block = free;
|
||||
return;
|
||||
}
|
||||
|
||||
option_block = (*block_ptr).next;
|
||||
}
|
||||
|
||||
let _ = std::io::stderr().write_all("invalid pointer passed to dealloc\n".as_bytes());
|
||||
libc::abort();
|
||||
}
|
||||
}
|
||||
|
||||
static BLOCK: Mutex<RootNode> = Mutex::new(RootNode {
|
||||
first_block: None,
|
||||
last_block: None,
|
||||
block_count: 0,
|
||||
next_free_block: None,
|
||||
});
|
||||
|
||||
struct RootNode {
|
||||
first_block: Option<NonNull<BlockRef>>,
|
||||
last_block: Option<NonNull<BlockRef>>,
|
||||
block_count: usize,
|
||||
next_free_block: Option<NonNull<BlockRef>>,
|
||||
}
|
||||
|
||||
unsafe impl Send for RootNode {}
|
||||
unsafe impl Sync for RootNode {}
|
||||
|
||||
#[repr(C)]
|
||||
struct BlockRef {
|
||||
start: *mut u8,
|
||||
size: usize,
|
||||
next: Option<NonNull<BlockRef>>,
|
||||
/// only present on freed blocks
|
||||
next_free_block: Option<NonNull<BlockRef>>,
|
||||
}
|
||||
39
src/map.rs
Normal file
39
src/map.rs
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
use std::ptr::{self, NonNull};
|
||||
|
||||
pub unsafe fn map(len: usize) -> Option<NonNull<u8>> {
|
||||
let prot = libc::PROT_READ | libc::PROT_WRITE;
|
||||
let flags = libc::MAP_PRIVATE | libc::MAP_ANONYMOUS;
|
||||
let ptr = libc::mmap(ptr::null_mut(), len, prot, flags, 0, 0).cast();
|
||||
|
||||
if is_invalid(ptr) {
|
||||
None
|
||||
} else {
|
||||
Some(NonNull::new_unchecked(ptr))
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn unmap(addr: *mut u8, len: usize) {
|
||||
libc::munmap(addr.cast(), len);
|
||||
}
|
||||
|
||||
pub fn is_invalid(ptr: *mut u8) -> bool {
|
||||
ptr.is_null() || ptr.addr() == 0xffffffffffffffff
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn map_write_unmap() {
|
||||
unsafe {
|
||||
let ptr = super::map(1000).unwrap();
|
||||
let ptr = ptr.as_ptr();
|
||||
|
||||
assert_eq!(ptr.read(), 0);
|
||||
|
||||
ptr.write_volatile(5);
|
||||
assert_eq!(ptr.read_volatile(), 5);
|
||||
|
||||
super::unmap(ptr, 1000);
|
||||
}
|
||||
}
|
||||
}
|
||||
28
tests/collections.rs
Normal file
28
tests/collections.rs
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use awwoc::Awwoc;
|
||||
|
||||
#[global_allocator]
|
||||
static AWWOC: Awwoc = Awwoc;
|
||||
|
||||
#[test]
|
||||
fn vec() {
|
||||
let mut vec = Vec::new();
|
||||
|
||||
for i in 0..10_000 {
|
||||
vec.push(i);
|
||||
}
|
||||
|
||||
assert!(vec.iter().enumerate().all(|(i, &item)| i == item));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn btree_map() {
|
||||
let mut map = BTreeMap::new();
|
||||
|
||||
for i in (0..1000).map(|i| i * 3) {
|
||||
map.insert(i, i + 10);
|
||||
}
|
||||
|
||||
assert!(map.iter().all(|(k, v)| *v == *k + 10));
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue