make raw vec to share code

This commit is contained in:
nora 2021-12-21 19:49:25 +01:00
parent 180228337d
commit 0550582d03
4 changed files with 362 additions and 320 deletions

View file

@ -1,18 +1,9 @@
use crate::Vechonk;
use crate::{RawVechonk, Vechonk};
use core::marker::PhantomData;
use core::ptr::NonNull;
/// An iterator over the elements of a [`Vechonk`]
pub struct Iter<'a, T: ?Sized> {
/// A pointer to the first element
ptr: NonNull<u8>,
/// How many elements the Vechonk has
len: usize,
/// How much memory the Vechonk owns
cap: usize,
/// How much memory has been used by the elements, where the next element starts
elem_size: usize,
/// The next element the iterator will return
raw: RawVechonk<T>,
current_index: usize,
_marker: PhantomData<&'a T>,
}
@ -20,10 +11,7 @@ pub struct Iter<'a, T: ?Sized> {
impl<'a, T: ?Sized> Iter<'a, T> {
pub(super) fn new(chonk: &'a Vechonk<T>) -> Iter<'a, T> {
Self {
ptr: chonk.ptr,
len: chonk.len,
cap: chonk.cap,
elem_size: chonk.elem_size,
raw: chonk.raw.copy(),
current_index: 0,
_marker: PhantomData,
}
@ -34,11 +22,21 @@ impl<'a, T: ?Sized> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
todo!()
if self.current_index == self.raw.len {
return None;
}
// SAFETY: We just did a bounds check above
let ptr = unsafe { self.raw.get_unchecked_ptr(self.current_index) };
self.current_index += 1;
// SAFETY: We rely on `get_unchecked_ptr` returning a valid pointer, which is does, see its SAFETY comments
unsafe { Some(&*ptr) }
}
fn size_hint(&self) -> (usize, Option<usize>) {
let count = self.len - self.current_index;
let count = self.raw.len - self.current_index;
(count, Some(count))
}
@ -46,6 +44,6 @@ impl<'a, T: ?Sized> Iterator for Iter<'a, T> {
impl<'a, T: ?Sized> ExactSizeIterator for Iter<'a, T> {
fn len(&self) -> usize {
self.len - self.current_index
self.raw.len - self.current_index
}
}

View file

@ -36,17 +36,14 @@
//! ```
mod iter;
mod raw;
mod test;
extern crate alloc;
use crate::raw::RawVechonk;
use alloc::boxed::Box;
use core::alloc::Layout;
use core::marker::PhantomData;
use core::num::NonZeroUsize;
use core::ops::{Index, IndexMut};
use core::ptr::{NonNull, Pointee};
use core::{mem, ptr};
pub use iter::Iter;
@ -54,34 +51,13 @@ pub use iter::Iter;
///
/// note: it does not run destructors for now, thankfully that is 100% safe :))))
pub struct Vechonk<T: ?Sized> {
/// A pointer to the first element
ptr: NonNull<u8>,
/// How many elements the Vechonk has
len: usize,
/// How much memory the Vechonk owns
cap: usize,
/// How much memory has been used by the elements, where the next element starts
elem_size: usize,
_marker: PhantomData<T>,
}
/// The offset + metadata for each element, stored at the end
struct PtrData<T: ?Sized> {
offset: usize,
meta: <T as Pointee>::Metadata,
}
impl<T: ?Sized> Copy for PtrData<T> {}
impl<T: ?Sized> Clone for PtrData<T> {
fn clone(&self) -> Self {
*self
}
raw: RawVechonk<T>,
}
impl<T: ?Sized> Vechonk<T> {
/// The amount of elements in the `Vechonk`, O(1)
pub const fn len(&self) -> usize {
self.len
self.raw.len
}
/// Whether the `Vechonk` is empty, O(1)
@ -92,151 +68,27 @@ impl<T: ?Sized> Vechonk<T> {
/// Create a new empty Vechonk that doesn't allocate anything
pub const fn new() -> Self {
Self {
ptr: NonNull::dangling(),
len: 0,
cap: 0,
elem_size: 0,
_marker: PhantomData,
raw: RawVechonk::new(),
}
}
/// Create a new Vechonk that allocates `capacity` bytes. `capacity` gets shrunken down
/// to the next multiple of the alignment of usize + metadata of `T`
pub fn with_capacity(capacity: usize) -> Self {
let capacity = force_align(capacity, Self::data_align());
let mut vechonk = Self::new();
if capacity == 0 {
return vechonk;
Self {
raw: RawVechonk::with_capacity(capacity),
}
// SAFETY: capacity has been checked to not be 0 and the len is 0
unsafe {
vechonk.realloc(NonZeroUsize::new_unchecked(capacity));
}
vechonk
}
/// Pushes a new element into the [`Vechonk`]. Does panic (for now) if there is no more capacity
/// todo: don't take a box but some U that can be unsized into T
pub fn push(&mut self, element: Box<T>) {
let elem_size = mem::size_of_val(element.as_ref());
let elem_align = mem::align_of_val(element.as_ref());
let elem_ptr = Box::into_raw(element);
let meta = ptr::metadata(elem_ptr);
let data_size = mem::size_of::<PtrData<T>>();
let elem_offset = self.elem_size;
// SAFETY: `self.elem_size` can't be longer than the allocation, because `PtrData<T>` needs space as well
let required_align_offset =
unsafe { self.ptr.as_ptr().add(elem_offset).align_offset(elem_align) };
if required_align_offset == usize::MAX {
panic!(
"Cannot align pointer for element with size: {}, alignment: {}",
elem_size, elem_align
);
}
// just panic here instead of a proper realloc
if self.needs_grow(elem_size + data_size + required_align_offset) {
self.regrow(self.cap + elem_size + data_size);
}
// Copy the element to the new location
// Calculate the dest pointer again because we might have realloced
// SAFETY: `self.elem_size` can't be longer than the allocation, because `PtrData<T>` needs space as well
let dest_ptr = unsafe { self.ptr.as_ptr().add(elem_offset) };
let dest_align_offset = dest_ptr.align_offset(elem_align);
let dest_ptr = unsafe { dest_ptr.add(dest_align_offset) };
let data = PtrData {
offset: elem_offset + dest_align_offset,
meta,
};
// SAFETY: `elem_ptr` comes from `Box`, and is therefore valid to read from for the size
// We have made sure above that we have more than `elem_size` bytes free
// The two allocations cannot overlap, since the `Box` owned its contents, and so do we
// `dest_ptr` has been aligned above
unsafe {
ptr::copy_nonoverlapping::<u8>(elem_ptr as _, dest_ptr, elem_size);
}
let data_offset = self.offset_for_data(self.len);
// SAFETY: The offset will always be less than `self.cap`, because we can't have more than `self.len` `PtrData`
let data_ptr = unsafe { self.ptr.as_ptr().add(data_offset) };
let data_ptr = data_ptr as *mut PtrData<T>;
// SAFETY: The pointer is aligned, because `self.ptr` and `self.cap` are
// It's not out of bounds for our allocation, see above
unsafe {
*data_ptr = data;
}
self.elem_size += elem_size;
self.len += 1;
// SAFETY: This was allocated by `Box`, so we know that it is valid.
// The ownership of the value was transferred to `Vechonk` by copying it out
unsafe {
alloc::alloc::dealloc(
elem_ptr as _,
Layout::from_size_align(elem_size, mem::align_of_val(&*elem_ptr)).unwrap(),
)
}
self.raw.push(element)
}
/// Get the last element, returns `None` if the `Vechonk` is empty
pub fn pop(&mut self) -> Option<Box<T>> {
if self.is_empty() {
return None;
}
// SAFETY: `self.len - 1` is the last element, and therefore not out of bounds
let data = unsafe { self.get_data(self.len - 1) };
// SAFETY: We can assume that the `offset` from `data` is not out of bounds
let elem_ptr = unsafe { self.ptr.as_ptr().add(data.offset) };
// allocate a new `Box` for the return value
let elem_fat_ptr = ptr::from_raw_parts_mut::<T>(elem_ptr as *mut (), data.meta);
// SAFETY: The metadata has been preserved, and the pointer has been properly aligned and initialized
// when the element was added
let elem_fat_ref = unsafe { &*elem_fat_ptr };
let element_box_layout = Layout::for_value(elem_fat_ref);
// SAFETY: TODO does not work with ZST
let box_ptr = unsafe { alloc::alloc::alloc(element_box_layout) };
if box_ptr.is_null() {
alloc::alloc::handle_alloc_error(element_box_layout);
}
let elem_size = mem::size_of_val(elem_fat_ref);
// SAFETY: The new allocation doesn't overlap, `box_ptr` was just allocated and is non_null
// For `elem_ptr`, see safety comments above, the size was obtained above as well
unsafe {
ptr::copy_nonoverlapping(elem_ptr, box_ptr, elem_size);
}
// SAFETY: See above for both variables. `data.meta` is the valid metadata for the element
let box_fat_ptr = ptr::from_raw_parts_mut(box_ptr as *mut (), data.meta);
// We don't need to care about our memory, we can just decrement the `len` and let the old memory be, it's
// now semantically uninitialized
self.len -= 1;
// SAFETY: We decremented the `len`, so no one else can get access to the element,
// therefore it's safe to transfer ownership to the Box here
let return_box = unsafe { Box::from_raw(box_fat_ptr) };
Some(return_box)
self.raw.pop()
}
pub fn iter(&self) -> Iter<T> {
@ -245,7 +97,7 @@ impl<T: ?Sized> Vechonk<T> {
/// Get a reference to an element at the index. Returns `None` if the index is out of bounds
pub fn get(&self, index: usize) -> Option<&T> {
if index < self.len {
if index < self.len() {
// SAFETY: The index has been checked above
unsafe { Some(self.get_unchecked(index)) }
} else {
@ -255,7 +107,7 @@ impl<T: ?Sized> Vechonk<T> {
/// Get a mutable reference to an element at the index. Returns `None` if the index is out of bounds
pub fn get_mut(&mut self, index: usize) -> Option<&T> {
if index < self.len {
if index < self.len() {
// SAFETY: The index has been checked above
unsafe { Some(self.get_unchecked_mut(index)) }
} else {
@ -269,7 +121,7 @@ impl<T: ?Sized> Vechonk<T> {
// SAFETY: The metadata is only assigned directly from the pointer metadata of the original object and therefore valid
// The pointer is calculated from the offset, which is also valid
// The pointer is aligned, because it has been aligned manually in `Self::push`
unsafe { &*self.get_unchecked_ptr(index) }
unsafe { &*self.raw.get_unchecked_ptr(index) }
}
/// # Safety
@ -279,146 +131,16 @@ impl<T: ?Sized> Vechonk<T> {
// The pointer is calculated from the offset, which is also valid
// The pointer is aligned, because it has been aligned manually in `Self::push`
// This function takes `*mut self`, so we have exclusive access to ourselves
unsafe { &mut *self.get_unchecked_ptr(index) }
}
// private helper methods
fn regrow(&mut self, min_size: usize) {
// new_cap must be properly "aligned" for `PtrData<T>`
let new_cap = force_align(min_size * 2, Self::data_align());
let old_ptr = self.ptr.as_ptr();
let old_cap = self.cap;
let last_data_index = self.len.saturating_sub(1);
let old_data_offset = self.offset_for_data(last_data_index);
// SAFETY: new_cap can't be 0 because of the +1
// We will copy the elements over
unsafe {
self.realloc(NonZeroUsize::new_unchecked(new_cap));
}
// copy the elements first
// SAFETY: both pointers point to the start of allocations smaller than `self.elem_size` and own them
unsafe {
ptr::copy_nonoverlapping(old_ptr, self.ptr.as_ptr(), self.elem_size);
}
// then copy the data
// SAFETY: both pointers have been offset by less than `self.cap`, and the `data_section_size` fills the allocation perfectly
unsafe {
let new_data_ptr = self.ptr.as_ptr().add(self.offset_for_data(last_data_index));
ptr::copy_nonoverlapping(
old_ptr.add(old_data_offset),
new_data_ptr,
self.data_section_size(),
)
}
// now free the old data
// SAFETY: This was previously allocated and is not used anymore
unsafe {
Self::dealloc(old_cap, old_ptr);
}
}
/// Reallocs the `Vechonk`, setting its capacity to `size`. This will not copy any elements. This will put the `Vechonk`
/// into an invalid state, since the `len` is still the length of the elements in the old allocation.
///
/// This doesn't free any memory
///
/// # Safety
/// The caller must either set the `len` to zero, or copy the elements to the new allocation by saving
/// `self.ptr` before calling this function.
unsafe fn realloc(&mut self, size: NonZeroUsize) {
let layout = Layout::from_size_align(size.get(), Self::data_align()).unwrap();
// SAFETY: layout is guaranteed to have a non-zero size
let alloced_ptr;
// we only care about it being zeroed for debugging since it makes it easier
#[cfg(debug_assertions)]
unsafe {
alloced_ptr = alloc::alloc::alloc_zeroed(layout)
}
#[cfg(not(debug_assertions))]
unsafe {
alloced_ptr = alloc::alloc::alloc(layout)
}
self.ptr =
NonNull::new(alloced_ptr).unwrap_or_else(|| alloc::alloc::handle_alloc_error(layout));
self.cap = size.get();
}
/// Get the data for the index
/// # Safety
/// `index` must not be out of bounds
unsafe fn get_data(&self, index: usize) -> PtrData<T> {
let data_offset = self.offset_for_data(index);
// SAFETY: The offset will always be less than `self.cap`, because we can't have more than `self.len` `PtrData`
let data_ptr = unsafe { self.ptr.as_ptr().add(data_offset) };
let data_ptr = data_ptr as *mut PtrData<T>;
// SAFETY: The pointer is aligned because `self.ptr` is aligned and `data_offset` is a multiple of the alignment
// The value behind it is always a `PtrData<T>`
unsafe { *data_ptr }
}
/// Get a raw ptr to an element. Be careful about casting this into a `mut &T`
/// # SAFETY
/// The index must be in bounds
unsafe fn get_unchecked_ptr(&self, index: usize) -> *mut T {
// SAFETY: We can assume that `index` is valid
let data = unsafe { self.get_data(index) };
let elem_ptr = unsafe { self.ptr.as_ptr().add(data.offset) };
ptr::from_raw_parts_mut(elem_ptr as *mut (), data.meta)
}
// SAFETY: The allocation must be owned by `ptr` and have the length `cap`
unsafe fn dealloc(cap: usize, ptr: *mut u8) {
if cap == 0 {
return;
}
// SAFETY: Align must be valid since it's obtained using `align_of`
let layout =
unsafe { Layout::from_size_align_unchecked(cap, mem::align_of::<PtrData<T>>()) };
unsafe { alloc::alloc::dealloc(ptr, layout) };
}
/// Returns a multiple of the alignment of `PtrData<T>`, since `self.cap` is one, and so is the size
const fn offset_for_data(&self, index: usize) -> usize {
self.cap
.saturating_sub(mem::size_of::<PtrData<T>>() * (index + 1))
}
fn needs_grow(&self, additional_size: usize) -> bool {
additional_size > self.cap - (self.elem_size + self.data_section_size())
}
const fn data_section_size(&self) -> usize {
self.len * mem::size_of::<PtrData<T>>()
}
const fn data_align() -> usize {
mem::align_of::<PtrData<T>>()
unsafe { &mut *self.raw.get_unchecked_ptr(index) }
}
/// used for debugging memory layout
/// safety: cap must be 96
#[allow(dead_code)]
#[doc(hidden)]
#[cfg(debug_assertions)]
pub unsafe fn debug_chonk(&self) {
let array = unsafe { *(self.ptr.as_ptr() as *mut [u8; 96]) };
let array = unsafe { *(self.raw.ptr.as_ptr() as *mut [u8; 96]) };
panic!("{:?}", array)
}
@ -428,8 +150,8 @@ impl<T: ?Sized> Index<usize> for Vechonk<T> {
type Output = T;
fn index(&self, index: usize) -> &Self::Output {
if index >= self.len {
panic!("Out of bounds, index {} for len {}", index, self.len);
if index >= self.len() {
panic!("Out of bounds, index {} for len {}", index, self.len());
}
// SAFETY: The index is not out of bounds
@ -439,8 +161,8 @@ impl<T: ?Sized> Index<usize> for Vechonk<T> {
impl<T: ?Sized> IndexMut<usize> for Vechonk<T> {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
if index >= self.len {
panic!("Out of bounds, index {} for len {}", index, self.len);
if index >= self.len() {
panic!("Out of bounds, index {} for len {}", index, self.len());
}
// SAFETY: The index is not out of bounds
@ -451,8 +173,9 @@ impl<T: ?Sized> IndexMut<usize> for Vechonk<T> {
/// don't bother with destructors for now
impl<T: ?Sized> Drop for Vechonk<T> {
fn drop(&mut self) {
// SAFETY: We as `Vechonk` do own the data, and it has the length `self.raw.cap`
unsafe {
Self::dealloc(self.cap, self.ptr.as_ptr());
RawVechonk::<T>::dealloc(self.raw.cap, self.raw.ptr.as_ptr());
}
}
}

321
src/raw.rs Normal file
View file

@ -0,0 +1,321 @@
use crate::force_align;
use alloc::boxed::Box;
use core::alloc::Layout;
use core::marker::PhantomData;
use core::num::NonZeroUsize;
use core::ptr::{NonNull, Pointee};
use core::{mem, ptr};
/// The offset + metadata for each element, stored at the end
pub struct PtrData<T: ?Sized> {
offset: usize,
meta: <T as Pointee>::Metadata,
}
impl<T: ?Sized> Copy for PtrData<T> {}
impl<T: ?Sized> Clone for PtrData<T> {
fn clone(&self) -> Self {
*self
}
}
/// `RawVechonk` is a view onto heap memory layout like shown in the crate level docs ([`crate`])
///
/// This could be owned or borrowed, ownership semantics are added by the wrapper
pub struct RawVechonk<T: ?Sized> {
/// A pointer to the first element
pub ptr: NonNull<u8>,
/// How many elements the Vechonk has
pub len: usize,
/// How much memory the Vechonk owns
pub cap: usize,
/// How much memory has been used by the elements, where the next element starts
pub elem_size: usize,
pub _marker: PhantomData<T>,
}
impl<T: ?Sized> RawVechonk<T> {
/// crate a shallow copy of this
pub const fn copy(&self) -> Self {
Self {
ptr: self.ptr,
len: self.len,
cap: self.cap,
elem_size: self.elem_size,
_marker: PhantomData,
}
}
pub const fn new() -> Self {
Self {
ptr: NonNull::dangling(),
len: 0,
cap: 0,
elem_size: 0,
_marker: PhantomData,
}
}
pub fn with_capacity(capacity: usize) -> Self {
let capacity = force_align(capacity, Self::data_align());
let mut vechonk = Self::new();
if capacity == 0 {
return vechonk;
}
// SAFETY: capacity has been checked to not be 0 and the len is 0
unsafe {
vechonk.realloc(NonZeroUsize::new_unchecked(capacity));
}
vechonk
}
pub fn push(&mut self, element: Box<T>) {
let elem_size = mem::size_of_val(element.as_ref());
let elem_align = mem::align_of_val(element.as_ref());
let elem_ptr = Box::into_raw(element);
let meta = ptr::metadata(elem_ptr);
let data_size = mem::size_of::<PtrData<T>>();
let elem_offset = self.elem_size;
// SAFETY: `self.elem_size` can't be longer than the allocation, because `PtrData<T>` needs space as well
let required_align_offset =
unsafe { self.ptr.as_ptr().add(elem_offset).align_offset(elem_align) };
if required_align_offset == usize::MAX {
panic!(
"Cannot align pointer for element with size: {}, alignment: {}",
elem_size, elem_align
);
}
// just panic here instead of a proper realloc
if self.needs_grow(elem_size + data_size + required_align_offset) {
self.regrow(self.cap + elem_size + data_size);
}
// Copy the element to the new location
// Calculate the dest pointer again because we might have realloced
// SAFETY: `self.elem_size` can't be longer than the allocation, because `PtrData<T>` needs space as well
let dest_ptr = unsafe { self.ptr.as_ptr().add(elem_offset) };
let dest_align_offset = dest_ptr.align_offset(elem_align);
let dest_ptr = unsafe { dest_ptr.add(dest_align_offset) };
let data = PtrData {
offset: elem_offset + dest_align_offset,
meta,
};
// SAFETY: `elem_ptr` comes from `Box`, and is therefore valid to read from for the size
// We have made sure above that we have more than `elem_size` bytes free
// The two allocations cannot overlap, since the `Box` owned its contents, and so do we
// `dest_ptr` has been aligned above
unsafe {
ptr::copy_nonoverlapping::<u8>(elem_ptr as _, dest_ptr, elem_size);
}
let data_offset = self.offset_for_data(self.len);
// SAFETY: The offset will always be less than `self.cap`, because we can't have more than `self.len` `PtrData`
let data_ptr = unsafe { self.ptr.as_ptr().add(data_offset) };
let data_ptr = data_ptr as *mut PtrData<T>;
// SAFETY: The pointer is aligned, because `self.ptr` and `self.cap` are
// It's not out of bounds for our allocation, see above
unsafe {
*data_ptr = data;
}
self.elem_size += elem_size;
self.len += 1;
// SAFETY: This was allocated by `Box`, so we know that it is valid.
// The ownership of the value was transferred to `Vechonk` by copying it out
unsafe {
alloc::alloc::dealloc(
elem_ptr as _,
Layout::from_size_align(elem_size, mem::align_of_val(&*elem_ptr)).unwrap(),
)
}
}
pub fn pop(&mut self) -> Option<Box<T>> {
if self.len == 0 {
return None;
}
// SAFETY: `self.len - 1` is the last element, and therefore not out of bounds
let data = unsafe { self.get_data(self.len - 1) };
// SAFETY: We can assume that the `offset` from `data` is not out of bounds
let elem_ptr = unsafe { self.ptr.as_ptr().add(data.offset) };
// allocate a new `Box` for the return value
let elem_fat_ptr = ptr::from_raw_parts_mut::<T>(elem_ptr as *mut (), data.meta);
// SAFETY: The metadata has been preserved, and the pointer has been properly aligned and initialized
// when the element was added
let elem_fat_ref = unsafe { &*elem_fat_ptr };
let element_box_layout = Layout::for_value(elem_fat_ref);
// SAFETY: TODO does not work with ZST
let box_ptr = unsafe { alloc::alloc::alloc(element_box_layout) };
if box_ptr.is_null() {
alloc::alloc::handle_alloc_error(element_box_layout);
}
let elem_size = mem::size_of_val(elem_fat_ref);
// SAFETY: The new allocation doesn't overlap, `box_ptr` was just allocated and is non_null
// For `elem_ptr`, see safety comments above, the size was obtained above as well
unsafe {
ptr::copy_nonoverlapping(elem_ptr, box_ptr, elem_size);
}
// SAFETY: See above for both variables. `data.meta` is the valid metadata for the element
let box_fat_ptr = ptr::from_raw_parts_mut(box_ptr as *mut (), data.meta);
// We don't need to care about our memory, we can just decrement the `len` and let the old memory be, it's
// now semantically uninitialized
self.len -= 1;
// SAFETY: We decremented the `len`, so no one else can get access to the element,
// therefore it's safe to transfer ownership to the Box here
let return_box = unsafe { Box::from_raw(box_fat_ptr) };
Some(return_box)
}
/// Get a raw ptr to an element. Be careful about casting this into a `mut &T`
/// # SAFETY
/// The index must be in bounds
pub unsafe fn get_unchecked_ptr(&self, index: usize) -> *mut T {
// SAFETY: We can assume that `index` is valid
let data = unsafe { self.get_data(index) };
let elem_ptr = unsafe { self.ptr.as_ptr().add(data.offset) };
ptr::from_raw_parts_mut(elem_ptr as *mut (), data.meta)
}
fn regrow(&mut self, min_size: usize) {
// new_cap must be properly "aligned" for `PtrData<T>`
let new_cap = force_align(min_size * 2, Self::data_align());
let old_ptr = self.ptr.as_ptr();
let old_cap = self.cap;
let last_data_index = self.len.saturating_sub(1);
let old_data_offset = self.offset_for_data(last_data_index);
// SAFETY: new_cap can't be 0 because of the +1
// We will copy the elements over
unsafe {
self.realloc(NonZeroUsize::new_unchecked(new_cap));
}
// copy the elements first
// SAFETY: both pointers point to the start of allocations smaller than `self.elem_size` and own them
unsafe {
ptr::copy_nonoverlapping(old_ptr, self.ptr.as_ptr(), self.elem_size);
}
// then copy the data
// SAFETY: both pointers have been offset by less than `self.cap`, and the `data_section_size` fills the allocation perfectly
unsafe {
let new_data_ptr = self.ptr.as_ptr().add(self.offset_for_data(last_data_index));
ptr::copy_nonoverlapping(
old_ptr.add(old_data_offset),
new_data_ptr,
self.data_section_size(),
)
}
// now free the old data
// SAFETY: This was previously allocated and is not used anymore
unsafe {
Self::dealloc(old_cap, old_ptr);
}
}
/// Reallocs the `Vechonk`, setting its capacity to `size`. This will not copy any elements. This will put the `Vechonk`
/// into an invalid state, since the `len` is still the length of the elements in the old allocation.
///
/// This doesn't free any memory
///
/// # Safety
/// The caller must either set the `len` to zero, or copy the elements to the new allocation by saving
/// `self.ptr` before calling this function.
unsafe fn realloc(&mut self, size: NonZeroUsize) {
let layout = Layout::from_size_align(size.get(), Self::data_align()).unwrap();
// SAFETY: layout is guaranteed to have a non-zero size
let alloced_ptr;
// we only care about it being zeroed for debugging since it makes it easier
#[cfg(debug_assertions)]
unsafe {
alloced_ptr = alloc::alloc::alloc_zeroed(layout)
}
#[cfg(not(debug_assertions))]
unsafe {
alloced_ptr = alloc::alloc::alloc(layout)
}
self.ptr =
NonNull::new(alloced_ptr).unwrap_or_else(|| alloc::alloc::handle_alloc_error(layout));
self.cap = size.get();
}
/// Get the data for the index
/// # Safety
/// `index` must not be out of bounds
unsafe fn get_data(&self, index: usize) -> PtrData<T> {
let data_offset = self.offset_for_data(index);
// SAFETY: The offset will always be less than `self.cap`, because we can't have more than `self.len` `PtrData`
let data_ptr = unsafe { self.ptr.as_ptr().add(data_offset) };
let data_ptr = data_ptr as *mut PtrData<T>;
// SAFETY: The pointer is aligned because `self.ptr` is aligned and `data_offset` is a multiple of the alignment
// The value behind it is always a `PtrData<T>`
unsafe { *data_ptr }
}
/// SAFETY: The allocation must be owned by `ptr` and have the length `cap`
pub unsafe fn dealloc(cap: usize, ptr: *mut u8) {
if cap == 0 {
return;
}
// SAFETY: Align must be valid since it's obtained using `align_of`
let layout =
unsafe { Layout::from_size_align_unchecked(cap, mem::align_of::<PtrData<T>>()) };
unsafe { alloc::alloc::dealloc(ptr, layout) };
}
/// Returns a multiple of the alignment of `PtrData<T>`, since `self.cap` is one, and so is the size
const fn offset_for_data(&self, index: usize) -> usize {
self.cap
.saturating_sub(mem::size_of::<PtrData<T>>() * (index + 1))
}
fn needs_grow(&self, additional_size: usize) -> bool {
additional_size > self.cap - (self.elem_size + self.data_section_size())
}
pub const fn data_section_size(&self) -> usize {
self.len * mem::size_of::<PtrData<T>>()
}
const fn data_align() -> usize {
mem::align_of::<PtrData<T>>()
}
}

View file

@ -53,8 +53,8 @@ fn push_two_sized_elem() {
chonk.push(Box::new(2));
assert_eq!(chonk.len(), 2);
assert_eq!(chonk.elem_size, 2);
assert_eq!(chonk.data_section_size(), 16); // two indecies
assert_eq!(chonk.raw.elem_size, 2);
assert_eq!(chonk.raw.data_section_size(), 16); // two indecies
}
#[test]
@ -65,8 +65,8 @@ fn push_two_unsized_elem() {
chonk.push("uwu".into());
assert_eq!(chonk.len(), 2);
assert_eq!(chonk.elem_size, 8);
assert_eq!(chonk.data_section_size(), 32); // two indecies + lengths
assert_eq!(chonk.raw.elem_size, 8);
assert_eq!(chonk.raw.data_section_size(), 32); // two indecies + lengths
}
#[test]