This commit is contained in:
nora 2024-06-27 18:59:08 +02:00
parent 1721d6a45a
commit 9bab547bcf
20 changed files with 1305 additions and 0 deletions

54
old-stuff/Cargo.lock generated Normal file
View file

@ -0,0 +1,54 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "old-stuff"
version = "0.1.0"
dependencies = [
"pm",
]
[[package]]
name = "pm"
version = "0.1.0"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"

12
old-stuff/Cargo.toml Normal file
View file

@ -0,0 +1,12 @@
[workspace]
members = [".", "./pm"]
[package]
name = "old-stuff"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
pm = { path = "./pm" }

3
old-stuff/README.md Normal file
View file

@ -0,0 +1,3 @@
# the-good-stuff
random rust experiments

View file

@ -0,0 +1,14 @@
use uwu::scratch::{
actual_scratch_read, actual_scratch_write, define_scratch, scratch_space, Scratch,
};
#[scratch_space]
fn has_scratch_space(mut scratch: Scratch<'_>) {
scratch_write!(scratch, 10u32);
let _: u32 = scratch_read!(scratch);
}
fn main() {
define_scratch!(scratch, 10);
has_scratch_space(scratch);
}

14
old-stuff/pm/Cargo.toml Normal file
View file

@ -0,0 +1,14 @@
[package]
name = "pm"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
proc-macro2 = "1.0.49"
quote = "1.0.23"
syn = { version = "1.0.107", features = ["full", "fold"] }

46
old-stuff/pm/src/lib.rs Normal file
View file

@ -0,0 +1,46 @@
use proc_macro::TokenStream;
mod safe_extern;
mod scratch;
#[proc_macro_attribute]
pub fn scratch_space(attr: TokenStream, input: TokenStream) -> TokenStream {
scratch::scratch_space(attr, input)
}
/// # safe-extern
///
/// Mark foreign functions as to be safe to call.
///
/// ```ignore
/// #[safe_extern]
/// extern "Rust" {
/// fn add(a: u8, b: u8) -> u8;
/// }
///
/// fn main() {
/// assert_eq!(add(1, 2), 3);
/// }
/// ```
///
/// It works by expanding the above to this
///
/// ```ignore
/// extern "Rust" {
/// #[link_name = "add"]
/// fn _safe_extern_inner_add(a: u8, b: u8) -> u8;
/// }
/// fn add(a: u8, b: u8) -> u8 {
/// unsafe { _safe_extern_inner_add(a, b) }
/// }
///
/// fn main() {
/// assert_eq!(add(1, 2), 3);
/// }
/// ```
///
/// This is of course unsound and the macro needs to be `unsafe` somehow but I can't be bothered with that right now lol.
#[proc_macro_attribute]
pub fn safe_extern(attr: TokenStream, input: TokenStream) -> TokenStream {
safe_extern::safe_extern(attr, input)
}

View file

@ -0,0 +1,98 @@
use proc_macro::TokenStream;
use proc_macro2::Ident;
use quote::{quote, quote_spanned};
use syn::{
parse_macro_input, ForeignItem, ForeignItemFn, ItemFn, Pat, PatIdent, PatType, Visibility,
};
pub fn safe_extern(_: TokenStream, input: TokenStream) -> TokenStream {
let mut foreign = parse_macro_input!(input as syn::ItemForeignMod);
let mut safe_wrappers = Vec::new();
let src_items = std::mem::take(&mut foreign.items);
for item in src_items {
match item {
ForeignItem::Fn(item_fn) => {
let (replacement, safe_wrapper) = mangle_ident_and_add_link_name(item_fn);
foreign.items.push(ForeignItem::Fn(replacement));
safe_wrappers.push(safe_wrapper);
}
item => match head_span_foreign_item(&item) {
Some(span) => {
return quote_spanned! {
span => compile_error! { "only foreign functions are allowed" }
}
.into();
}
None => {
return quote! {
compile_error! { "only foreign functions are allowed" }
}
.into();
}
},
}
}
quote! { #foreign #(#safe_wrappers)* }.into()
}
fn mangle_ident_and_add_link_name(mut item: ForeignItemFn) -> (ForeignItemFn, ItemFn) {
if item.attrs.iter().any(|attr| {
attr.path
.get_ident()
.map_or(false, |ident| ident.to_string() == "link_name")
}) {
panic!("oh no you have alink name already")
}
let vis = std::mem::replace(&mut item.vis, Visibility::Inherited);
let name = item.sig.ident;
let name_str = name.to_string();
if name_str.starts_with("r#") {
panic!("rawr :>(");
}
let mangled = format!("_safe_extern_inner_{name_str}");
let new_name = Ident::new(&mangled, name.span());
item.sig.ident = new_name.clone();
item.attrs
.push(syn::parse_quote! { #[link_name = #name_str] });
let args = item.sig.inputs.iter().map(|param| match param {
syn::FnArg::Receiver(_) => panic!("cannot have reciver in foreign function"),
syn::FnArg::Typed(PatType { pat, .. }) => match &**pat {
Pat::Ident(PatIdent { ident, .. }) => quote! { #ident },
_ => panic!("invalid argument in foreign function"),
},
});
let mut safe_sig = item.sig.clone();
safe_sig.ident = name;
let safe_wrapper = ItemFn {
attrs: Vec::new(),
vis,
sig: safe_sig,
block: syn::parse_quote! {
{
unsafe { #new_name(#(#args),*) }
}
},
};
(item, safe_wrapper)
}
fn head_span_foreign_item(item: &ForeignItem) -> Option<proc_macro2::Span> {
Some(match item {
ForeignItem::Fn(_) => unreachable!(),
ForeignItem::Static(s) => s.static_token.span,
ForeignItem::Type(ty) => ty.type_token.span,
ForeignItem::Macro(m) => m.mac.path.segments[0].ident.span(),
_ => return None,
})
}

View file

@ -0,0 +1,49 @@
use proc_macro::TokenStream;
use proc_macro2::{Ident, Span};
use quote::quote;
use syn::{fold::Fold, parse_macro_input, parse_quote, ItemFn, Stmt};
pub fn scratch_space(_: TokenStream, input: TokenStream) -> TokenStream {
let fn_def = parse_macro_input!(input as ItemFn);
let track_ident = Ident::new("scratch_local", Span::mixed_site());
let mut fn_def = LocalInitFolder {
track_ident: track_ident.clone(),
}
.fold_item_fn(fn_def);
let init: Stmt = parse_quote! { let #track_ident: (); };
fn_def.block.stmts.insert(0, init);
quote! { #fn_def }.into()
}
struct LocalInitFolder {
track_ident: Ident,
}
impl syn::fold::Fold for LocalInitFolder {
fn fold_macro(&mut self, mut mac: syn::Macro) -> syn::Macro {
if let Some(last_path) = mac.path.segments.iter().next_back() {
match last_path.ident.to_string().as_str() {
"scratch_write" => {
let track_ident = &self.track_ident.clone();
mac.path = parse_quote! { actual_scratch_write };
mac.tokens.extend(quote! { ; #track_ident });
}
"scratch_read" => {
let mut track_ident = self.track_ident.clone();
track_ident.set_span(track_ident.span().located_at(last_path.ident.span()));
mac.path = parse_quote! { actual_scratch_read };
mac.tokens.extend(quote! { ; #track_ident });
}
_ => {}
}
mac
} else {
mac
}
}
}

67
old-stuff/src/assert.rs Normal file
View file

@ -0,0 +1,67 @@
use std::fmt::Debug;
pub fn assert<T>(v: T) -> Assert<T> {
Assert { v }
}
pub struct Assert<T> {
v: T,
}
impl Assert<bool> {
#[track_caller]
pub fn is_true(self) {
assert!(self.v);
}
#[track_caller]
pub fn is_false(self) {
assert!(!self.v);
}
}
impl<T: Debug> Assert<T> {
#[track_caller]
pub fn equals<U: Debug>(self, other: U)
where
T: PartialEq<U>,
{
assert_eq!(self.v, other);
}
#[track_caller]
pub fn not_equals<U: Debug>(self, other: U)
where
T: PartialEq<U>,
{
assert_ne!(self.v, other);
}
}
impl<T: AsRef<str>> Assert<T> {
#[track_caller]
pub fn contains(self, other: &str) {
assert!(self.v.as_ref().contains(other), "pattern '{other}' not found in string: {}", self.v.as_ref());
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn assert_bool() {
assert(true).is_true();
assert(false).is_false();
}
#[test]
fn assert_equal() {
assert(1).equals(1);
assert(2).not_equals(1);
}
#[test]
fn assert_str() {
assert("uwu owo").contains("uwu");
assert("uwu owo".to_owned()).contains("uwu");
}
}

View file

@ -0,0 +1,58 @@
#[macro_export]
macro_rules! cfg_match {
() => {};
(_ => { $($tt:tt)* }) => {
$($tt)*
};
(
$head_pattern:meta => { $($head_body:tt)* }
$($rest:tt)*
) => {
#[cfg($head_pattern)]
$crate::cfg_match! { _ => { $($head_body)* } }
#[cfg(not($head_pattern))]
$crate::cfg_match! {
$($rest)*
}
};
}
#[cfg(test)]
mod tests {
#[test]
fn correct_one_selected() {
crate::cfg_match! {
any() => {
panic!();
}
all() => {
}
any() => {
panic!();
}
}
}
#[test]
fn underscore() {
crate::cfg_match! {
_ => {}
}
}
#[test]
fn fallback() {
crate::cfg_match! {
any() => {
panic!();
}
any() => {
panic!();
}
_ => {}
}
}
}

View file

@ -0,0 +1,94 @@
use std::hash::{BuildHasher, Hash};
pub mod simple_open_addressing;
pub trait HashMapFamily {
type Map<K, V, S>: HashMap<K, V, S>;
}
pub trait HashMap<K, V, S>: IntoIterator<Item = (K, V)> {
fn with_hasher(state: S) -> Self;
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
fn get(&self, key: &K) -> Option<&V>
where
K: Eq + Hash,
S: BuildHasher;
fn insert(&mut self, key: K, value: V) -> Option<V>
where
K: Eq + Hash,
S: BuildHasher;
}
#[cfg(test)]
mod tests {
use std::hash::{BuildHasher, BuildHasherDefault, Hasher, RandomState};
use super::{HashMap, HashMapFamily};
#[derive(Default)]
struct CollidingHasher;
impl Hasher for CollidingHasher {
fn finish(&self) -> u64 {
0
}
fn write(&mut self, _bytes: &[u8]) {}
}
pub(super) fn run_tests<M>()
where
M: HashMapFamily,
{
let mk_str = || M::Map::<&str, &str, _>::with_hasher(RandomState::new());
let m = mk_str();
assert_eq!(m.get(&"uwu"), None);
assert_eq!(m.get(&"uwu"), None);
let mut m = mk_str();
m.insert("hello", "world");
assert_eq!(m.get(&"hello"), Some(&"world"));
assert_eq!(m.len(), 1);
m.insert("aaa", "yes");
assert_eq!(m.get(&"hello"), Some(&"world"));
assert_eq!(m.get(&"aaa"), Some(&"yes"));
assert_eq!(m.len(), 2);
let mut m = mk_str();
m.insert("hello", "world");
assert_eq!(m.get(&"hello"), Some(&"world"));
assert_eq!(m.len(), 1);
m.insert("hello", "no");
assert_eq!(m.get(&"hello"), Some(&"no"));
assert_eq!(m.len(), 1);
for count in [1, 10, 100, 1000, 10_000, 100_000] {
test_many::<M, _>(count, RandomState::new());
}
test_many::<M, _>(1000, BuildHasherDefault::<CollidingHasher>::default());
}
fn test_many<M: HashMapFamily, H: BuildHasher>(count: usize, h: H) {
let mut m = M::Map::with_hasher(h);
for i in 0..count {
m.insert(i, i);
}
let mut found = vec![false; count];
for (k, v) in m.into_iter() {
assert_eq!(k, v);
assert!(!found[k], "duplicate element");
found[k] = true;
}
for (i, found) in found.iter().enumerate() {
assert!(found, "element {i} was lost");
}
}
}

View file

@ -0,0 +1,151 @@
use super::{HashMap, HashMapFamily};
use std::{
hash::{BuildHasher, Hash, RandomState},
vec,
};
type Entry<K, V> = Option<(K, V)>;
pub struct SimpleOAHashMap<K, V, S = RandomState> {
buckets: Vec<Entry<K, V>>,
filled: usize,
s: S,
}
impl<K: Eq + Hash, V> SimpleOAHashMap<K, V, RandomState> {
pub fn new() -> Self {
Self::with_hasher(RandomState::new())
}
}
impl<K: Eq + Hash, V, S: BuildHasher> SimpleOAHashMap<K, V, S> {
fn bucket_of_elem(&self, key: &K) -> usize {
assert_ne!(self.buckets.len(), 0, "cannot compute bucket of empty map");
let hash = self.s.hash_one(&key) as usize;
hash % self.buckets.len()
}
fn grow(&mut self) {
let len = self.buckets.len();
let new = if len == 0 { 8 } else { len * 2 };
let old = IntoIter::new(std::mem::take(&mut self.buckets));
let new_buckets = (0..new).map(|_| None).collect();
self.buckets = new_buckets;
self.extend(old);
}
}
impl<K: Eq + Hash, V, S: BuildHasher> Extend<(K, V)> for SimpleOAHashMap<K, V, S> {
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
iter.into_iter()
.for_each(|(key, value)| drop(self.insert(key, value)));
}
}
impl<K, V, S> super::HashMap<K, V, S> for SimpleOAHashMap<K, V, S> {
fn with_hasher(state: S) -> Self {
Self {
buckets: Vec::new(),
filled: 0,
s: state,
}
}
fn len(&self) -> usize {
self.filled
}
fn is_empty(&self) -> bool {
self.len() == 0
}
fn get(&self, key: &K) -> Option<&V>
where
K: Eq + Hash,
S: BuildHasher,
{
if self.is_empty() {
return None;
}
let bucket = self.bucket_of_elem(&key);
let result = self.buckets[bucket..]
.iter()
.take_while(|elem| elem.is_some())
.find(|elem| matches!(elem, Some((elem_key, _)) if elem_key == key));
if let Some(Some((_, value))) = result {
Some(value)
} else {
None
}
}
fn insert(&mut self, key: K, value: V) -> Option<V>
where
K: Eq + Hash,
S: BuildHasher,
{
if self.filled >= self.buckets.len() {
self.grow();
}
loop {
let bucket = self.bucket_of_elem(&key);
let bucket = self.buckets[bucket..].iter_mut().find(|bucket| {
bucket.is_none() || matches!(bucket, Some((elem_key, _)) if *elem_key == key)
});
if let Some(bucket) = bucket {
if bucket.is_none() {
self.filled += 1;
}
let before = std::mem::replace(bucket, Some((key, value)));
return before.map(|(_, v)| v);
} else {
self.grow();
}
}
}
}
pub struct IntoIter<K, V> {
buckets: std::iter::FilterMap<vec::IntoIter<Entry<K, V>>, fn(Entry<K, V>) -> Option<(K, V)>>,
}
impl<K, V> IntoIter<K, V> {
fn new(buckets: Vec<Entry<K, V>>) -> Self {
IntoIter {
buckets: buckets.into_iter().filter_map(std::convert::identity),
}
}
}
impl<K, V> Iterator for IntoIter<K, V> {
type Item = (K, V);
fn next(&mut self) -> Option<Self::Item> {
self.buckets.next()
}
}
impl<K, V, S> IntoIterator for SimpleOAHashMap<K, V, S> {
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self.buckets)
}
}
pub struct SimpleOAHashMapFamily;
impl HashMapFamily for SimpleOAHashMapFamily {
type Map<K, V, S> = SimpleOAHashMap<K, V, S>;
}
#[cfg(test)]
mod tests {
#[test]
fn do_tests() {
crate::hashmaps::tests::run_tests::<super::SimpleOAHashMapFamily>();
}
}

View file

@ -0,0 +1,37 @@
#![allow(warnings)]
pub struct Node<'a, 'n, T> {
item: T,
outer: Option<&'a mut Node<'a, 'n, T>>,
}
impl<'a, 'n, T> Node<'a, 'n, T> {
pub fn new(item: T) -> Self {
Self { item, outer: None }
}
pub fn push<R>(&mut self, item: T, with_func: impl FnOnce(&mut Self) -> R) -> R {
let mut inner = Node {
item,
outer: Some(todo!()),
};
with_func(&mut inner)
}
}
#[cfg(test)]
mod tests {
use super::Node;
#[test]
#[ignore = "todo"]
fn push() {
let mut list = Node::<u8>::new(0);
inner(&mut list);
fn inner(list: &mut Node<u8>) {
list.push(1, |list| {});
}
}
}

17
old-stuff/src/lib.rs Normal file
View file

@ -0,0 +1,17 @@
#![feature(ptr_metadata)]
#![feature(strict_provenance)]
pub mod cfg_match;
pub mod hashmaps;
pub mod innocent_linked_list;
pub mod scratch;
#[cfg(FALSE)]
pub mod sendsync;
pub mod thin_u128;
pub mod unroll_int;
pub mod unsized_clone;
pub mod assert;
pub mod safe_extern {
pub use pm::safe_extern;
}

83
old-stuff/src/scratch.rs Normal file
View file

@ -0,0 +1,83 @@
use std::mem::{self, MaybeUninit};
pub use pm::scratch_space;
pub struct Scratch<'a>(&'a mut [MaybeUninit<u8>]);
impl<'a> Scratch<'a> {
pub fn new(buf: &'a mut [MaybeUninit<u8>]) -> Self {
Self(buf)
}
pub fn write<T>(&mut self, _value: T) {
let size = mem::size_of::<T>();
assert!(size <= self.0.len());
}
pub fn read<T: Default>(&mut self) -> T {
T::default()
}
}
#[macro_export]
macro_rules! scratch_write {
($scratch:ident, $value:expr) => {
/* transformed to a call to actual_scratch_write */
compile_error!("Failed to transform macro invocation");
};
}
#[macro_export]
macro_rules! scratch_read {
($scratch:ident) => {
/* transformed to a call to actual_scratch_write */
compile_error!("Failed to transform macro invocation");
};
}
#[macro_export]
macro_rules! actual_scratch_write {
($scratch:ident, $value:expr ; $track_local:ident) => {
$track_local = ();
$scratch.write($value);
};
}
#[macro_export]
macro_rules! actual_scratch_read {
($scratch:ident ; $track_local:ident) => {{
let _read = $track_local;
$scratch.read()
}};
}
#[macro_export]
macro_rules! define_scratch {
($name:ident, $size:expr) => {
let mut __buffer: [::core::mem::MaybeUninit<u8>; $size] =
unsafe { ::core::mem::MaybeUninit::uninit().assume_init() };
#[allow(unused_mut)]
let mut $name = $crate::scratch::Scratch::new(&mut __buffer);
};
}
pub use {actual_scratch_read, actual_scratch_write, define_scratch, scratch_read, scratch_write};
#[cfg(test)]
mod tests {
use pm::scratch_space;
use super::Scratch;
#[scratch_space]
fn has_scratch_space(mut scratch: Scratch<'_>) {
scratch_write!(scratch, 10u32);
let _: u32 = scratch_read!(scratch);
}
#[test]
fn simple_scratch() {
define_scratch!(scratch, 100);
has_scratch_space(scratch);
}
}

148
old-stuff/src/sendsync.rs Normal file
View file

@ -0,0 +1,148 @@
#![cfg_attr(not(test), allow(unused))]
#![allow(dropping_copy_types)]
use std::{
cell::{Cell, UnsafeCell},
rc::Rc,
sync::Arc,
};
// Today we want to design the safe `std::thread::spawn` function and the traits around that.
// First we have the following signature. <maybe start without 'static?>
pub fn spawn<F: FnOnce() + 'static>(f: F) {
// SAFETY: Well, that's what we're here for today.
unsafe { magic_unchecked_spawn_for_our_convenience(f) }
}
#[test]
fn send_over_integer() {
// This is perfectly safe. No data is shared. Our function allows this, which is very nice.
let x = 0;
spawn(move || drop(dbg!(x)));
}
#[test]
fn rc_the_new_contender() {
// Now, let's send over a more complex type like an Rc.
let x = Rc::new(0);
let x2 = x.clone();
spawn(move || {
let _ = x2.clone();
});
let _ = x.clone(); // DATA RACE
}
// Oh no, we have a data race. This is not exactly good, in fact it's really bad.
// So, how can we forbid Rc from being sent over?
// We need some kind of "this can be sent to other threads" trait. Let's call it "Send".
pub unsafe auto trait Send {}
// It's an auto trait because we really don't want everyone having to implement this manually.
// It's also unsafe because the safety of our spawn function relies on it.
// Why exactly was Rc able to trigger a data race here? The key lies in interior mutability.
// Interior mutability like Cells but also raw pointers should therefore be forbidden by default.
impl<T> !Send for *const T {}
impl<T> !Send for *mut T {}
impl<T> !Send for UnsafeCell<T> {}
// When we now add a F: Send bound to our spawn function, the Rc stops cinoukubgè
#[test]
fn but_arc_is_fine() {
// Now, let's send over a more complex type like an Rc.
let x = Arc::new(0);
let x2 = x.clone();
spawn(move || {
let _ = x2.clone();
});
let _ = x.clone();
}
// Arc is fine here because it uses atomics internally. But it fails to compile! Here, Arc (or us in this case)
// needs to assert that it's fine:
unsafe impl<T> Send for Arc<T> {}
// So now, everything is good.
#[test]
fn an_arc_of_sadness() {
let x = Arc::new(Cell::new(0));
let x2 = x.clone();
spawn(move || {
x2.set(0);
});
x.set(1); // DATA RACE
}
// Oh, not quite. We have an issue. Luckily it's a simple one, we just forgot to put a `T: Send` bound
// on the impl.
// unsafe impl<T: Send> Send for Arc<T> {}
// After we fix this, it fails to compile as desired.
#[test]
fn i_am_just_sending_over_a_cell() {
// We just send the Cell over and only ever use it from the other thread.
// This is perfectly sound. We want to allow this.
let x = Cell::new(0);
spawn(move || {
let x = x;
x.set(1)
});
}
// The example above fails to compile. But there is no unsoundness here, we want to allow this.
// But as we've seen above, we cannot make `Cell: Send`.
// Really, we have two concepts at play here
// - Something that we can send owned top a thread.
// - Something that we can send a reference of to another thread
// Rc can support neither of those, as its possibly unsoundness (clone) can be triggered just
// with a shared reference to it, but also with an owned Rc because two owned Rcs can point to the same memory.
// Cell is different. Having a &Cell across threads can lead to the data race. But having an owned Cell cannot
// trigger the unsoundness, as it will just mutate the local value.
// Let's add a new trait for types that support being shared behind a reference.
pub unsafe auto trait Sync {}
// UnsafeCell is the key here and will make sure that types like Cell are !Sync.
impl<T> !Sync for UnsafeCell<T> {}
// Also forbid pointers to make sure that unsafe datastructures have to manually assert syncness.
impl<T> !Sync for *const T {}
impl<T> !Sync for *mut T {}
// Now we can actually implement Send for UnsafeCell again. Sending a Cell-like type to another thread
// is not problematic, only sharing it is.
// -impl<T> !Send for UnsafeCell<T> {}
// Now we just need one last piece, the interactions of Send and Sync.
// Sync means that we can share a reference across a thread, so let's represent that in an impl.
unsafe impl<T: Sync> Send for &T {}
// The same "reference like behavior" applies to Arc. We are only allowed to Send an Arc to another thread
// if the thing it holds is Sync. Arc<Cell<u8>> is therefore not Send, as this type is not thread-safe.
// unsafe impl<T: Sync> Send for Arc<T> {}
// In general, anything that provides shared access to T needs a T: Sync bound on its Send impl.
// Bonus: The cursed impl of magic_unchecked_spawn_for_our_convenience.
pub unsafe fn magic_unchecked_spawn_for_our_convenience<F: FnOnce()>(f: F) {
// Pretend that we're Send.
struct AssertSend<T>(T);
unsafe impl<T> std::marker::Send for AssertSend<T> {}
// Get over the annoying 'static requirement by just sending over an erased pointer and reading from it.
let s = Box::into_raw(Box::new(f));
let p = AssertSend(s.cast::<()>());
std::thread::spawn(|| {
let p = unsafe { Box::from_raw({ p }.0 as *mut F) };
(p)();
});
}

146
old-stuff/src/thin_u128.rs Normal file
View file

@ -0,0 +1,146 @@
use std::{
fmt::{Debug, Display},
num::NonZeroUsize,
ptr::{self, NonNull},
};
/// thin.
/// ```text
/// 000000 ... 000000 0000000
/// ^-always 1 for niche
/// ^- tag, 1 for inline u62, 0 for box
/// ```
pub struct ThinU128(NonNull<u128>);
enum Repr {
Inline(u128),
Boxed(NonNull<u128>),
}
const USIZE_TWO_BIT_LESS_MAX: u128 = (usize::MAX as u128) >> 2;
const ALWAYS_ONE_NICHE: usize = 0b1;
const TAG_MASK: usize = 0b10;
impl ThinU128 {
pub fn new(int: u128) -> Self {
if int > USIZE_TWO_BIT_LESS_MAX {
let ptr = Box::into_raw(Box::new(int));
let repr = ptr.map_addr(|addr| addr | ALWAYS_ONE_NICHE);
unsafe { Self(NonNull::new_unchecked(repr)) }
} else {
let value = (int as usize) << 2;
let repr = value | TAG_MASK | ALWAYS_ONE_NICHE;
Self(NonNull::new(ptr::without_provenance_mut(repr)).unwrap())
}
}
fn is_inline(&self) -> bool {
(self.addr() & TAG_MASK) != 0
}
fn addr(&self) -> usize {
self.0.addr().get()
}
fn repr(&self) -> Repr {
if self.is_inline() {
let value = self.addr() >> 2;
Repr::Inline(value as u128)
} else {
let ptr = self.0.map_addr(|addr| unsafe {
NonZeroUsize::new_unchecked(addr.get() & !ALWAYS_ONE_NICHE)
});
Repr::Boxed(ptr)
}
}
pub fn value(&self) -> u128 {
match self.repr() {
Repr::Inline(value) => value,
Repr::Boxed(ptr) => unsafe { ptr.as_ptr().read() },
}
}
}
impl Drop for ThinU128 {
fn drop(&mut self) {
if let Repr::Boxed(ptr) = self.repr() {
unsafe {
drop(Box::from_raw(ptr.as_ptr()));
}
}
}
}
impl Debug for ThinU128 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&self.value(), f)
}
}
impl Display for ThinU128 {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt(&self.value(), f)
}
}
impl PartialEq for ThinU128 {
fn eq(&self, other: &Self) -> bool {
self.value().eq(&other.value())
}
}
impl Eq for ThinU128 {}
impl PartialOrd for ThinU128 {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.value().partial_cmp(&other.value())
}
}
impl Ord for ThinU128 {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.value().cmp(&other.value())
}
}
impl Clone for ThinU128 {
fn clone(&self) -> Self {
Self::new(self.value())
}
}
unsafe impl Send for ThinU128 {}
unsafe impl Sync for ThinU128 {}
#[cfg(test)]
mod tests {
use super::ThinU128;
fn roundtrip(a: u128) {
let thin = ThinU128::new(a);
assert_eq!(thin.value(), a);
let other = ThinU128::new(a);
assert_eq!(thin, other);
let dbg_a = format!("{a:?}{a}");
let dbg_thin = format!("{thin:?}{thin}");
assert_eq!(dbg_a, dbg_thin);
}
#[test]
fn small() {
roundtrip(0);
roundtrip(1);
roundtrip(100);
roundtrip((usize::MAX >> 2) as u128);
}
#[test]
fn big() {
roundtrip(((usize::MAX >> 2) as u128) + 1);
roundtrip(usize::MAX as u128);
roundtrip(u128::MAX);
}
}

View file

@ -0,0 +1,37 @@
macro_rules! create_unroll_int {
// (_, 5) => 5
(replace@ ($a:tt, $($b:tt)*)) => { $($b)* };
// 2, 1, 0 => [0, 0, 0]
(turn_into_zero_array@ $($num:literal)*) => {
[$( create_unroll_int!(replace@ ($num, 0)) ),*]
};
([$first:tt $($rest:tt)*] | $($acc:tt)*) => {
create_unroll_int! {
[$($rest)*]
|
($first) => { create_unroll_int!(turn_into_zero_array@ $($rest)*) };
$($acc)*
}
};
([] | $($acc:tt)*) => {
macro_rules! unroll_int {
$($acc)*
}
};
($($num:tt)*) => {
create_unroll_int! { [$($num)*] | }
};
}
create_unroll_int! {
20 19 18 17 16 15 14 13 12 11
10 9 8 7 6 5 4 3 1 2 0
}
pub fn x() {
let _ = unroll_int!(20);
}

View file

@ -0,0 +1,159 @@
// TODO: This should probably be fallible instead of panic
// TODO: Needs more safety docs around alignment
use std::{marker::PhantomData, ptr::Pointee};
/// a replacement for Clone (ignoring the old methods)
pub trait NewClone {
fn clone_unsized<P>(&self, place: ClonePlace<P, Self>) -> InitClonePlace<P, Self>;
}
/// a replacement for Copy
pub trait NewCopy: NewClone {}
/// A trait which denotes a pointer to a place
pub trait Pointer<T: ?Sized> {
/// Create a pointer from a raw pointer
/// # Safety
/// The pointer needs to be valid to create a `Self`. This method can't really be called
/// generically, but `ClonePlace` provides a safe interface over it.
unsafe fn from_raw(ptr: *mut T) -> Self;
}
impl<T: ?Sized> Pointer<T> for Box<T> {
unsafe fn from_raw(ptr: *mut T) -> Self {
Self::from_raw(ptr)
}
}
impl<T: ?Sized> Pointer<T> for &mut T {
unsafe fn from_raw(ptr: *mut T) -> Self {
&mut *ptr
}
}
// more impls...
/// Denotes a place which something can be cloned into.
pub struct ClonePlace<P, T: ?Sized> {
ptr: *mut u8,
max_size: usize,
_boo: PhantomData<(P, *const T)>,
}
/// Denotes a place where something has been cloned into successfully.
pub struct InitClonePlace<P, T: ?Sized> {
ptr: *mut u8,
metadata: <T as Pointee>::Metadata,
_boo: PhantomData<P>,
}
impl<P, T: ?Sized> ClonePlace<P, T> {
/// Get the raw pointer of the place to write things yourself
pub fn as_ptr(&self) -> *const u8 {
self.ptr
}
/// Get the maximum allocation size of the place
pub fn max_size(&self) -> usize {
self.max_size
}
/// Create a new ClonePlace from a pointer and maximum size
/// # Safety
/// `ptr` has to be valid for writes of size `max_size`
unsafe fn from_raw(ptr: *mut u8, max_size: usize) -> Self {
Self {
ptr,
max_size,
_boo: PhantomData,
}
}
/// Unsafely assert that the place has been initialized for as many bytes as covered
/// by the metadata. This is done by using `as_ptr` and writing to it before
/// # Safety
/// `self.ptr` must be valid for reads of at least as much bytes as denoted by the `metadata`
unsafe fn assert_init_with_meta(
self,
metadata: <T as Pointee>::Metadata,
) -> InitClonePlace<P, T> {
InitClonePlace {
ptr: self.ptr,
metadata,
_boo: PhantomData,
}
}
}
impl<P, T: ?Sized + NewCopy> ClonePlace<P, T> {
/// Safe convenience function for implementing Clone via Copy
pub fn copy_trivially(self, data: &T) -> InitClonePlace<P, T> {
let size = std::mem::size_of_val(data);
assert!(self.max_size() >= size);
// SAFETY: `data` is valid for reads of `sizeof(data)`
// `self.ptr` must be writable for at least as many bytes as `self.max_size`, which we just asserted
// We have initialized `self.ptr` by `sizeof(data)` bytes, meaning it's fine to assert it as init
unsafe {
std::ptr::copy_nonoverlapping(data as *const T as *const u8, self.ptr, size);
ClonePlace::assert_init_with_meta(self, std::ptr::metadata(data))
}
}
}
impl<P: Pointer<T>, T: ?Sized> InitClonePlace<P, T> {
/// Turn the initialized place into the safe pointer type
pub fn into_init_value(self) -> P {
// SAFETY: Our pointer must point to valid initialized data
// The way it has been created initially asserts that it's valid for the pointer type or something like that i guess
unsafe { P::from_raw(std::ptr::from_raw_parts_mut(self.ptr, self.metadata)) }
}
}
// convenience function
impl<T: ?Sized> ClonePlace<Box<T>, T> {
/// Creates a new boxed ClonePlace and allocates as many bytes as required for `value`
pub fn boxed(value: &T) -> Self {
// SAFETY: We checked the pointer for null meaning it's valid for `laoyut.size()` bytes
// That's the safety requirement for creating a box basically so we're fine
unsafe {
let layout = std::alloc::Layout::for_value(value);
let allocated = std::alloc::alloc(layout);
if allocated.is_null() {
std::alloc::handle_alloc_error(layout);
}
Self::from_raw(allocated, layout.size())
}
}
}
impl NewClone for str {
fn clone_unsized<P>(&self, place: ClonePlace<P, Self>) -> InitClonePlace<P, Self> {
place.copy_trivially(self)
}
}
impl NewCopy for str {}
#[test]
fn boxit() {
let str = "aaaa";
let place = ClonePlace::boxed(str);
let init_place = str.clone_unsized(place);
let the_box = init_place.into_init_value();
assert_eq!(&*the_box, "aaaa");
}
#[test]
fn on_the_stack() {
let mut storage = [std::mem::MaybeUninit::<u8>::uninit(); 10];
let str = "aaaa";
// SAFETY: `storage` is valid for writes of 10 bytes.
let place: ClonePlace<&mut str, _> =
unsafe { ClonePlace::from_raw(storage.as_mut_ptr().cast::<u8>(), 10) };
let init_place = str.clone_unsized(place);
let the_box = init_place.into_init_value();
assert_eq!(&*the_box, "aaaa");
}

View file

@ -0,0 +1,18 @@
use uwu::safe_extern::safe_extern;
#[safe_extern]
extern "Rust" {
fn add(a: u8, b: u8) -> u8;
}
mod _impl {
#[no_mangle]
pub(super) fn add(a: u8, b: u8) -> u8 {
a + b
}
}
#[test]
fn adding() {
assert_eq!(add(1, 2), 3);
}