mut is back

This commit is contained in:
nora 2021-12-22 14:39:17 +01:00
parent dbd07bb4c8
commit cd3182a056
4 changed files with 218 additions and 10 deletions

View file

@ -1,4 +1,4 @@
use crate::{RawVechonk, Vechonk}; use crate::{MutGuard, RawVechonk, Vechonk};
use alloc::boxed::Box; use alloc::boxed::Box;
use core::marker::PhantomData; use core::marker::PhantomData;
use core::mem; use core::mem;
@ -50,6 +50,52 @@ impl<'a, T: ?Sized> ExactSizeIterator for Iter<'a, T> {
} }
} }
/// An iterator over the elements of a [`Vechonk`]
pub struct IterMut<'a, T: ?Sized> {
raw: RawVechonk<T>,
current_index: usize,
_marker: PhantomData<&'a T>,
}
impl<'a, T: ?Sized> IterMut<'a, T> {
pub(super) fn new(chonk: &'a mut Vechonk<T>) -> IterMut<'a, T> {
Self {
raw: chonk.raw.copy(),
current_index: 0,
_marker: PhantomData,
}
}
}
impl<'a, T: ?Sized> Iterator for IterMut<'a, T> {
type Item = MutGuard<T>;
fn next(&mut self) -> Option<Self::Item> {
if self.current_index == self.raw.len {
return None;
}
let old_index = self.current_index;
self.current_index += 1;
// SAFETY: We did a bounds check above, and taken `&mut Vechonk`
unsafe { Some(MutGuard::new(self.raw.copy(), old_index)) }
}
fn size_hint(&self) -> (usize, Option<usize>) {
let count = self.raw.len - self.current_index;
(count, Some(count))
}
}
impl<'a, T: ?Sized> ExactSizeIterator for IterMut<'a, T> {
fn len(&self) -> usize {
self.raw.len - self.current_index
}
}
/// An iterator over the elements of a [`Vechonk`] /// An iterator over the elements of a [`Vechonk`]
pub struct IntoIter<T: ?Sized> { pub struct IntoIter<T: ?Sized> {
raw: RawVechonk<T>, raw: RawVechonk<T>,

View file

@ -46,10 +46,10 @@ use alloc::boxed::Box;
use core::cmp; use core::cmp;
use core::cmp::Ordering; use core::cmp::Ordering;
use core::hash::{Hash, Hasher}; use core::hash::{Hash, Hasher};
use core::ops::Index; use core::ops::{Deref, Index};
use crate::iter::IntoIter; use crate::iter::IterMut;
pub use iter::Iter; pub use iter::{IntoIter, Iter};
/// chonky af /// chonky af
/// ///
@ -100,6 +100,11 @@ impl<T: ?Sized> Vechonk<T> {
Iter::new(self) Iter::new(self)
} }
/// An iterator over the elements yielding [`MutGuard`]s
pub fn iter_mut(&mut self) -> IterMut<T> {
IterMut::new(self)
}
/// Get a reference to an element at the index. Returns `None` if the index is out of bounds /// Get a reference to an element at the index. Returns `None` if the index is out of bounds
pub fn get(&self, index: usize) -> Option<&T> { pub fn get(&self, index: usize) -> Option<&T> {
if index < self.len() { if index < self.len() {
@ -110,6 +115,23 @@ impl<T: ?Sized> Vechonk<T> {
} }
} }
/// Get a mutable guard to an element at the index. Returns `None` if the index is out of bounds
pub fn get_mut(&mut self, index: usize) -> Option<MutGuard<T>> {
if index < self.len() {
// SAFETY: The index has been checked above
unsafe { Some(self.get_unchecked_mut(index)) }
} else {
None
}
}
/// # Safety
/// The index must be in bounds
pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> MutGuard<T> {
// SAFETY: We can assume that `index` is not out of bounds
unsafe { MutGuard::new(self.raw.copy(), index) }
}
/// # Safety /// # Safety
/// The index must be in bounds /// The index must be in bounds
pub unsafe fn get_unchecked(&self, index: usize) -> &T { pub unsafe fn get_unchecked(&self, index: usize) -> &T {
@ -131,6 +153,42 @@ impl<T: ?Sized> Vechonk<T> {
} }
} }
/// A guard that acts similarly to a `&mut T`, but does not allow any arbitrary value to be written,
/// instead checking whether the element has the correct size/alignment to fit the space of the old element.
pub struct MutGuard<T: ?Sized> {
raw: RawVechonk<T>,
/// Must always be in bounds
index: usize,
}
impl<T: ?Sized> MutGuard<T> {
/// # Safety
/// The index must not be out of bounds, and `raw` must be mutable
pub(crate) unsafe fn new(raw: RawVechonk<T>, index: usize) -> Self {
Self { raw, index }
}
/// Write a new element to the location.
/// * If the element fits in the space, the old element is returned
/// * If the element does not fit in the space, the new element is returned again
pub fn write(&mut self, element: Box<T>) -> Result<Box<T>, Box<T>> {
// SAFETY: We can assume that `index` is in bounds
unsafe { self.raw.insert_elem_unchecked(element, self.index) }
}
}
impl<T: ?Sized> Deref for MutGuard<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
// SAFETY: The metadata is only assigned directly from the pointer metadata of the original object and therefore valid
// The pointer is calculated from the offset, which is also valid
// The pointer is aligned, because it has been aligned manually in `Self::push`
// We can assume that the index is in bounds
unsafe { &*self.raw.get_unchecked_ptr(self.index) }
}
}
impl<T: ?Sized> Index<usize> for Vechonk<T> { impl<T: ?Sized> Index<usize> for Vechonk<T> {
type Output = T; type Output = T;

View file

@ -143,6 +143,61 @@ impl<T: ?Sized> RawVechonk<T> {
} }
} }
/// Insert an element at an index
/// # Safety
/// * The index must be in bounds
///
/// If the insertion was successful, the old element is returned.
/// If the new element doesn't fit the gap or can't be aligned, it is returned.
pub unsafe fn insert_elem_unchecked(
&mut self,
element: Box<T>,
index: usize,
) -> Result<Box<T>, Box<T>> {
// this is where the free space, where we could place the element starts
// since there might be padding for the previous element, this is sometimes before `elem_offset`
let free_space_start_offset = if index == 0 {
self.cap
} else {
// SAFETY: `index` is not 0
unsafe {
let data_element_before = self.get_data(index - 1);
data_element_before.offset + self.sizeof_elem(index - 1)
}
};
let next_element_start_offset = if index == self.len - 1 {
self.cap - self.data_section_size()
} else {
// SAFETY: We have checked that `index` is not the last element
unsafe { self.get_data(index + 1).offset }
};
let elem_size = mem::size_of_val::<T>(element.as_ref());
let elem_align = mem::align_of_val::<T>(element.as_ref());
let required_align_offset = self
.ptr
.as_ptr()
.wrapping_add(free_space_start_offset)
.align_offset(elem_align);
if required_align_offset == usize::MAX {
// we could not align the element, so just return it
return Err(element);
}
let new_elem_starting_offset = free_space_start_offset + required_align_offset;
let actual_free_space = next_element_start_offset.saturating_sub(new_elem_starting_offset);
if actual_free_space < elem_size {
// We don't have enough space for the element.
return Err(element);
}
todo!()
}
pub fn pop(&mut self) -> Option<Box<T>> { pub fn pop(&mut self) -> Option<Box<T>> {
if self.len == 0 { if self.len == 0 {
return None; return None;
@ -184,7 +239,9 @@ impl<T: ?Sized> RawVechonk<T> {
alloc::alloc::handle_alloc_error(element_box_layout); alloc::alloc::handle_alloc_error(element_box_layout);
} }
let elem_size = mem::size_of_val(elem_fat_ref); // SAFETY: We can rely on `index` not being out of bounds
let elem_size = unsafe { self.sizeof_elem(index) };
// SAFETY: The new allocation doesn't overlap, `box_ptr` was just allocated and is non_null // SAFETY: The new allocation doesn't overlap, `box_ptr` was just allocated and is non_null
// For `elem_ptr`, see safety comments above, the size was obtained above as well // For `elem_ptr`, see safety comments above, the size was obtained above as well
unsafe { unsafe {
@ -286,17 +343,42 @@ impl<T: ?Sized> RawVechonk<T> {
/// # Safety /// # Safety
/// `index` must not be out of bounds /// `index` must not be out of bounds
unsafe fn get_data(&self, index: usize) -> PtrData<T> { unsafe fn get_data(&self, index: usize) -> PtrData<T> {
let data_offset = self.offset_for_data(index); // SAFETY: We can assume that `index` is not out of bounds
let data_ptr = unsafe { self.get_data_ptr(index) };
// SAFETY: The offset will always be less than `self.cap`, because we can't have more than `self.len` `PtrData`
let data_ptr = unsafe { self.ptr.as_ptr().add(data_offset) };
let data_ptr = data_ptr as *mut PtrData<T>;
// SAFETY: The pointer is aligned because `self.ptr` is aligned and `data_offset` is a multiple of the alignment // SAFETY: The pointer is aligned because `self.ptr` is aligned and `data_offset` is a multiple of the alignment
// The value behind it is always a `PtrData<T>` // The value behind it is always a `PtrData<T>`
unsafe { *data_ptr } unsafe { *data_ptr }
} }
/// # Safety
/// `index` must not be out of bounds
unsafe fn sizeof_elem(&self, index: usize) -> usize {
// SAFETY: We can rely on `index` not being out of bounds
let data = unsafe { self.get_data(index) };
// SAFETY: We can assume that the `offset` from `data` is not out of bounds
let elem_ptr = unsafe { self.ptr.as_ptr().add(data.offset) };
// allocate a new `Box` for the return value
let elem_fat_ptr = ptr::from_raw_parts_mut::<T>(elem_ptr as *mut (), data.meta);
// SAFETY: The metadata has been preserved, and the pointer has been properly aligned and initialized
// when the element was added
let elem_fat_ref = unsafe { &*elem_fat_ptr };
mem::size_of_val(elem_fat_ref)
}
/// # Safety
/// `index` must not be out of bounds
unsafe fn get_data_ptr(&self, index: usize) -> *mut PtrData<T> {
let data_offset = self.offset_for_data(index);
// SAFETY: The offset will always be less than `self.cap`, because we can't have more than `self.len` `PtrData`
let data_ptr = unsafe { self.ptr.as_ptr().add(data_offset) };
data_ptr as *mut PtrData<T>
}
/// SAFETY: The allocation must be owned by `ptr` and have the length `cap` /// SAFETY: The allocation must be owned by `ptr` and have the length `cap`
pub unsafe fn dealloc(cap: usize, ptr: *mut u8) { pub unsafe fn dealloc(cap: usize, ptr: *mut u8) {
if cap == 0 { if cap == 0 {

View file

@ -235,6 +235,28 @@ fn eq_ne() {
assert!(!chonk1.eq(&chonk2)); assert!(!chonk1.eq(&chonk2));
} }
#[test]
fn get_mut_deref() {
let mut chonk1: Vechonk<str> = vechonk!["hello".into(), "uwu".into()];
let hello = chonk1.get_mut(0).unwrap();
assert_eq!(&*hello, "hello");
}
#[test]
#[ignore]
fn get_mut_mutating() {
let mut chonk1: Vechonk<str> = vechonk!["hello".into(), "uwu".into()];
let mut hello = chonk1.get_mut(0).unwrap();
hello.write("owo".into()).unwrap();
hello.write("hi, I'm wayyyyy too long".into()).unwrap_err();
assert_eq!(&*hello, "owo");
}
#[test] #[test]
#[ignore] #[ignore]
fn zst() { fn zst() {