diff --git a/src/iter.rs b/src/iter.rs index b430557..e19c105 100644 --- a/src/iter.rs +++ b/src/iter.rs @@ -1,4 +1,4 @@ -use crate::{RawVechonk, Vechonk}; +use crate::{MutGuard, RawVechonk, Vechonk}; use alloc::boxed::Box; use core::marker::PhantomData; use core::mem; @@ -50,6 +50,52 @@ impl<'a, T: ?Sized> ExactSizeIterator for Iter<'a, T> { } } +/// An iterator over the elements of a [`Vechonk`] +pub struct IterMut<'a, T: ?Sized> { + raw: RawVechonk, + current_index: usize, + _marker: PhantomData<&'a T>, +} + +impl<'a, T: ?Sized> IterMut<'a, T> { + pub(super) fn new(chonk: &'a mut Vechonk) -> IterMut<'a, T> { + Self { + raw: chonk.raw.copy(), + current_index: 0, + _marker: PhantomData, + } + } +} + +impl<'a, T: ?Sized> Iterator for IterMut<'a, T> { + type Item = MutGuard; + + fn next(&mut self) -> Option { + if self.current_index == self.raw.len { + return None; + } + + let old_index = self.current_index; + + self.current_index += 1; + + // SAFETY: We did a bounds check above, and taken `&mut Vechonk` + unsafe { Some(MutGuard::new(self.raw.copy(), old_index)) } + } + + fn size_hint(&self) -> (usize, Option) { + let count = self.raw.len - self.current_index; + + (count, Some(count)) + } +} + +impl<'a, T: ?Sized> ExactSizeIterator for IterMut<'a, T> { + fn len(&self) -> usize { + self.raw.len - self.current_index + } +} + /// An iterator over the elements of a [`Vechonk`] pub struct IntoIter { raw: RawVechonk, diff --git a/src/lib.rs b/src/lib.rs index fae7ee3..7cca83b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -46,10 +46,10 @@ use alloc::boxed::Box; use core::cmp; use core::cmp::Ordering; use core::hash::{Hash, Hasher}; -use core::ops::Index; +use core::ops::{Deref, Index}; -use crate::iter::IntoIter; -pub use iter::Iter; +use crate::iter::IterMut; +pub use iter::{IntoIter, Iter}; /// chonky af /// @@ -100,6 +100,11 @@ impl Vechonk { Iter::new(self) } + /// An iterator over the elements yielding [`MutGuard`]s + pub fn iter_mut(&mut self) -> IterMut { + IterMut::new(self) + } + /// Get a reference to an element at the index. Returns `None` if the index is out of bounds pub fn get(&self, index: usize) -> Option<&T> { if index < self.len() { @@ -110,6 +115,23 @@ impl Vechonk { } } + /// Get a mutable guard to an element at the index. Returns `None` if the index is out of bounds + pub fn get_mut(&mut self, index: usize) -> Option> { + if index < self.len() { + // SAFETY: The index has been checked above + unsafe { Some(self.get_unchecked_mut(index)) } + } else { + None + } + } + + /// # Safety + /// The index must be in bounds + pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> MutGuard { + // SAFETY: We can assume that `index` is not out of bounds + unsafe { MutGuard::new(self.raw.copy(), index) } + } + /// # Safety /// The index must be in bounds pub unsafe fn get_unchecked(&self, index: usize) -> &T { @@ -131,6 +153,42 @@ impl Vechonk { } } +/// A guard that acts similarly to a `&mut T`, but does not allow any arbitrary value to be written, +/// instead checking whether the element has the correct size/alignment to fit the space of the old element. +pub struct MutGuard { + raw: RawVechonk, + /// Must always be in bounds + index: usize, +} + +impl MutGuard { + /// # Safety + /// The index must not be out of bounds, and `raw` must be mutable + pub(crate) unsafe fn new(raw: RawVechonk, index: usize) -> Self { + Self { raw, index } + } + + /// Write a new element to the location. + /// * If the element fits in the space, the old element is returned + /// * If the element does not fit in the space, the new element is returned again + pub fn write(&mut self, element: Box) -> Result, Box> { + // SAFETY: We can assume that `index` is in bounds + unsafe { self.raw.insert_elem_unchecked(element, self.index) } + } +} + +impl Deref for MutGuard { + type Target = T; + + fn deref(&self) -> &Self::Target { + // SAFETY: The metadata is only assigned directly from the pointer metadata of the original object and therefore valid + // The pointer is calculated from the offset, which is also valid + // The pointer is aligned, because it has been aligned manually in `Self::push` + // We can assume that the index is in bounds + unsafe { &*self.raw.get_unchecked_ptr(self.index) } + } +} + impl Index for Vechonk { type Output = T; diff --git a/src/raw.rs b/src/raw.rs index b4d7ad0..74eed4f 100644 --- a/src/raw.rs +++ b/src/raw.rs @@ -143,6 +143,61 @@ impl RawVechonk { } } + /// Insert an element at an index + /// # Safety + /// * The index must be in bounds + /// + /// If the insertion was successful, the old element is returned. + /// If the new element doesn't fit the gap or can't be aligned, it is returned. + pub unsafe fn insert_elem_unchecked( + &mut self, + element: Box, + index: usize, + ) -> Result, Box> { + // this is where the free space, where we could place the element starts + // since there might be padding for the previous element, this is sometimes before `elem_offset` + let free_space_start_offset = if index == 0 { + self.cap + } else { + // SAFETY: `index` is not 0 + unsafe { + let data_element_before = self.get_data(index - 1); + data_element_before.offset + self.sizeof_elem(index - 1) + } + }; + + let next_element_start_offset = if index == self.len - 1 { + self.cap - self.data_section_size() + } else { + // SAFETY: We have checked that `index` is not the last element + unsafe { self.get_data(index + 1).offset } + }; + + let elem_size = mem::size_of_val::(element.as_ref()); + let elem_align = mem::align_of_val::(element.as_ref()); + + let required_align_offset = self + .ptr + .as_ptr() + .wrapping_add(free_space_start_offset) + .align_offset(elem_align); + + if required_align_offset == usize::MAX { + // we could not align the element, so just return it + return Err(element); + } + + let new_elem_starting_offset = free_space_start_offset + required_align_offset; + let actual_free_space = next_element_start_offset.saturating_sub(new_elem_starting_offset); + + if actual_free_space < elem_size { + // We don't have enough space for the element. + return Err(element); + } + + todo!() + } + pub fn pop(&mut self) -> Option> { if self.len == 0 { return None; @@ -184,7 +239,9 @@ impl RawVechonk { alloc::alloc::handle_alloc_error(element_box_layout); } - let elem_size = mem::size_of_val(elem_fat_ref); + // SAFETY: We can rely on `index` not being out of bounds + let elem_size = unsafe { self.sizeof_elem(index) }; + // SAFETY: The new allocation doesn't overlap, `box_ptr` was just allocated and is non_null // For `elem_ptr`, see safety comments above, the size was obtained above as well unsafe { @@ -286,17 +343,42 @@ impl RawVechonk { /// # Safety /// `index` must not be out of bounds unsafe fn get_data(&self, index: usize) -> PtrData { - let data_offset = self.offset_for_data(index); - - // SAFETY: The offset will always be less than `self.cap`, because we can't have more than `self.len` `PtrData` - let data_ptr = unsafe { self.ptr.as_ptr().add(data_offset) }; - let data_ptr = data_ptr as *mut PtrData; + // SAFETY: We can assume that `index` is not out of bounds + let data_ptr = unsafe { self.get_data_ptr(index) }; // SAFETY: The pointer is aligned because `self.ptr` is aligned and `data_offset` is a multiple of the alignment // The value behind it is always a `PtrData` unsafe { *data_ptr } } + /// # Safety + /// `index` must not be out of bounds + unsafe fn sizeof_elem(&self, index: usize) -> usize { + // SAFETY: We can rely on `index` not being out of bounds + let data = unsafe { self.get_data(index) }; + + // SAFETY: We can assume that the `offset` from `data` is not out of bounds + let elem_ptr = unsafe { self.ptr.as_ptr().add(data.offset) }; + + // allocate a new `Box` for the return value + let elem_fat_ptr = ptr::from_raw_parts_mut::(elem_ptr as *mut (), data.meta); + // SAFETY: The metadata has been preserved, and the pointer has been properly aligned and initialized + // when the element was added + let elem_fat_ref = unsafe { &*elem_fat_ptr }; + + mem::size_of_val(elem_fat_ref) + } + + /// # Safety + /// `index` must not be out of bounds + unsafe fn get_data_ptr(&self, index: usize) -> *mut PtrData { + let data_offset = self.offset_for_data(index); + + // SAFETY: The offset will always be less than `self.cap`, because we can't have more than `self.len` `PtrData` + let data_ptr = unsafe { self.ptr.as_ptr().add(data_offset) }; + data_ptr as *mut PtrData + } + /// SAFETY: The allocation must be owned by `ptr` and have the length `cap` pub unsafe fn dealloc(cap: usize, ptr: *mut u8) { if cap == 0 { diff --git a/src/test.rs b/src/test.rs index fb5fbfe..3c9dd54 100644 --- a/src/test.rs +++ b/src/test.rs @@ -235,6 +235,28 @@ fn eq_ne() { assert!(!chonk1.eq(&chonk2)); } +#[test] +fn get_mut_deref() { + let mut chonk1: Vechonk = vechonk!["hello".into(), "uwu".into()]; + + let hello = chonk1.get_mut(0).unwrap(); + + assert_eq!(&*hello, "hello"); +} + +#[test] +#[ignore] +fn get_mut_mutating() { + let mut chonk1: Vechonk = vechonk!["hello".into(), "uwu".into()]; + + let mut hello = chonk1.get_mut(0).unwrap(); + + hello.write("owo".into()).unwrap(); + hello.write("hi, I'm wayyyyy too long".into()).unwrap_err(); + + assert_eq!(&*hello, "owo"); +} + #[test] #[ignore] fn zst() {