diff --git a/examples/stores/Cargo.toml b/examples/stores/Cargo.toml index 5d17c320d..748298769 100644 --- a/examples/stores/Cargo.toml +++ b/examples/stores/Cargo.toml @@ -13,6 +13,9 @@ leptos = { path = "../../leptos", features = ["csr"] } reactive_stores = { path = "../../reactive_stores" } reactive_stores_macro = { path = "../../reactive_stores_macro" } console_error_panic_hook = "0.1.7" +chrono = { version = "0.4.38", features = ["serde"] } +serde = { version = "1.0.210", features = ["derive"] } +serde_json = "1.0.128" [dev-dependencies] wasm-bindgen = "0.2.93" diff --git a/examples/stores/index.html b/examples/stores/index.html index 75fa1f12a..f248365c5 100644 --- a/examples/stores/index.html +++ b/examples/stores/index.html @@ -3,6 +3,11 @@ + - \ No newline at end of file + diff --git a/examples/stores/src/lib.rs b/examples/stores/src/lib.rs index 1827ae74f..0d0b7e930 100644 --- a/examples/stores/src/lib.rs +++ b/examples/stores/src/lib.rs @@ -1,24 +1,57 @@ -use leptos::prelude::*; -use reactive_stores::{Field, Store, StoreFieldIterator}; -use reactive_stores_macro::Store; +use std::sync::atomic::{AtomicUsize, Ordering}; -#[derive(Debug, Store)] +use chrono::{Local, NaiveDate}; +use leptos::prelude::*; +use reactive_stores::{Field, Store}; +use reactive_stores_macro::Store; +use serde::{Deserialize, Serialize}; + +// ID starts higher than 0 because we have a few starting todos by default +static NEXT_ID: AtomicUsize = AtomicUsize::new(3); + +#[derive(Debug, Store, Serialize, Deserialize)] struct Todos { user: String, + #[store(key: usize = |todo| todo.id)] todos: Vec, } -#[derive(Debug, Store)] +#[derive(Debug, Store, Serialize, Deserialize)] struct Todo { + id: usize, label: String, - completed: bool, + status: Status, +} + +#[derive(Debug, Default, Clone, Store, Serialize, Deserialize)] +enum Status { + #[default] + Pending, + Scheduled, + ScheduledFor { + date: NaiveDate, + }, + Done, +} + +impl Status { + pub fn next_step(&mut self) { + *self = match self { + Status::Pending => Status::ScheduledFor { + date: Local::now().naive_local().into(), + }, + Status::Scheduled | Status::ScheduledFor { .. } => Status::Done, + Status::Done => Status::Done, + }; + } } impl Todo { pub fn new(label: impl ToString) -> Self { Self { + id: NEXT_ID.fetch_add(1, Ordering::Relaxed), label: label.to_string(), - completed: false, + status: Status::Pending, } } } @@ -28,16 +61,19 @@ fn data() -> Todos { user: "Bob".to_string(), todos: vec![ Todo { + id: 0, label: "Create reactive store".to_string(), - completed: true, + status: Status::Pending, }, Todo { + id: 1, label: "???".to_string(), - completed: false, + status: Status::Pending, }, Todo { + id: 2, label: "Profit".to_string(), - completed: false, + status: Status::Pending, }, ], } @@ -49,15 +85,6 @@ pub fn App() -> impl IntoView { let input_ref = NodeRef::new(); - let rows = move || { - store - .todos() - .iter() - .enumerate() - .map(|(idx, todo)| view! { }) - .collect_view() - }; - view! {

"Hello, " {move || store.user().get()}

impl IntoView {
-
    {rows}
-
+
    + + + + +
+
{move || serde_json::to_string_pretty(&*store.read())}
} } #[component] fn TodoRow( store: Store, - idx: usize, #[prop(into)] todo: Field, ) -> impl IntoView { - let completed = todo.completed(); + let status = todo.status(); let title = todo.label(); - let editing = RwSignal::new(false); + let editing = RwSignal::new(true); view! { -
  • - class:foo=move || completed.get() - >

    - + + + { + date.set(new_date); + } + Err(e) => warn!("{e}"), + } + } + } + /> +

  • } } diff --git a/reactive_graph/src/lib.rs b/reactive_graph/src/lib.rs index 6dc23cd60..5aa32601d 100644 --- a/reactive_graph/src/lib.rs +++ b/reactive_graph/src/lib.rs @@ -99,7 +99,8 @@ pub mod prelude { // TODO remove this, it's just useful while developing #[allow(unused)] -fn log_warning(text: Arguments) { +#[doc(hidden)] +pub fn log_warning(text: Arguments) { #[cfg(feature = "tracing")] { tracing::warn!(text); diff --git a/reactive_stores/src/arc_field.rs b/reactive_stores/src/arc_field.rs index f318758b3..c6bc04b93 100644 --- a/reactive_stores/src/arc_field.rs +++ b/reactive_stores/src/arc_field.rs @@ -1,6 +1,6 @@ use crate::{ path::{StorePath, StorePathSegment}, - AtIndex, StoreField, Subfield, + AtIndex, AtKeyed, KeyMap, KeyedSubfield, StoreField, Subfield, }; use reactive_graph::{ signal::ArcTrigger, @@ -9,6 +9,8 @@ use reactive_graph::{ }, }; use std::{ + fmt::Debug, + hash::Hash, ops::{Deref, DerefMut, IndexMut}, panic::Location, sync::Arc, @@ -25,6 +27,7 @@ where get_trigger: Arc ArcTrigger + Send + Sync>, read: Arc Option> + Send + Sync>, write: Arc Option> + Send + Sync>, + keys: Arc Option + Send + Sync>, } pub struct StoreFieldReader(Box>); @@ -75,6 +78,7 @@ impl StoreField for ArcField { type Value = T; type Reader = StoreFieldReader; type Writer = StoreFieldWriter; + type UntrackedWriter = StoreFieldWriter; fn get_trigger(&self, path: StorePath) -> ArcTrigger { (self.get_trigger)(path) @@ -91,6 +95,16 @@ impl StoreField for ArcField { fn writer(&self) -> Option { (self.write)().map(StoreFieldWriter::new) } + + fn untracked_writer(&self) -> Option { + let mut writer = (self.write)().map(StoreFieldWriter::new)?; + writer.untrack(); + Some(writer) + } + + fn keys(&self) -> Option { + (self.keys)() + } } impl From> for ArcField @@ -119,6 +133,10 @@ where let value = value.clone(); move || value.writer().map(StoreFieldWriter::new) }), + keys: Arc::new({ + let value = value.clone(); + move || value.keys() + }), } } } @@ -149,6 +167,48 @@ where let value = value.clone(); move || value.writer().map(StoreFieldWriter::new) }), + keys: Arc::new({ + let value = value.clone(); + move || value.keys() + }), + } + } +} + +impl From> for ArcField +where + AtKeyed: Clone, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField + Send + Sync + 'static, + Prev: 'static, + T: IndexMut + 'static, + T::Output: Sized, +{ + #[track_caller] + fn from(value: AtKeyed) -> Self { + ArcField { + #[cfg(debug_assertions)] + defined_at: Location::caller(), + path: value.path().into_iter().collect(), + trigger: value.get_trigger(value.path().into_iter().collect()), + get_trigger: Arc::new({ + let value = value.clone(); + move |path| value.get_trigger(path) + }), + read: Arc::new({ + let value = value.clone(); + move || value.reader().map(StoreFieldReader::new) + }), + write: Arc::new({ + let value = value.clone(); + move || value.writer().map(StoreFieldWriter::new) + }), + keys: Arc::new({ + let value = value.clone(); + move || value.keys() + }), } } } @@ -163,6 +223,7 @@ impl Clone for ArcField { get_trigger: Arc::clone(&self.get_trigger), read: Arc::clone(&self.read), write: Arc::clone(&self.write), + keys: Arc::clone(&self.keys), } } } diff --git a/reactive_stores/src/field.rs b/reactive_stores/src/field.rs index fb4fcb6f9..65cb53bc9 100644 --- a/reactive_stores/src/field.rs +++ b/reactive_stores/src/field.rs @@ -1,7 +1,7 @@ use crate::{ arc_field::{StoreFieldReader, StoreFieldWriter}, path::{StorePath, StorePathSegment}, - ArcField, AtIndex, StoreField, Subfield, + ArcField, AtIndex, AtKeyed, KeyMap, KeyedSubfield, StoreField, Subfield, }; use reactive_graph::{ owner::{Storage, StoredValue, SyncStorage}, @@ -9,7 +9,7 @@ use reactive_graph::{ traits::{DefinedAt, IsDisposed, Notify, ReadUntracked, Track}, unwrap_signal, }; -use std::{ops::IndexMut, panic::Location}; +use std::{fmt::Debug, hash::Hash, ops::IndexMut, panic::Location}; pub struct Field where @@ -27,6 +27,7 @@ where type Value = T; type Reader = StoreFieldReader; type Writer = StoreFieldWriter; + type UntrackedWriter = StoreFieldWriter; fn get_trigger(&self, path: StorePath) -> ArcTrigger { self.inner @@ -49,6 +50,16 @@ where fn writer(&self) -> Option { self.inner.try_get_value().and_then(|inner| inner.writer()) } + + fn untracked_writer(&self) -> Option { + self.inner + .try_get_value() + .and_then(|inner| inner.untracked_writer()) + } + + fn keys(&self) -> Option { + self.inner.try_get_value().and_then(|n| n.keys()) + } } impl From> for Field @@ -87,6 +98,29 @@ where } } +impl From> + for Field +where + S: Storage>, + AtKeyed: Clone, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField + Send + Sync + 'static, + Prev: 'static, + T: IndexMut + 'static, + T::Output: Sized, +{ + #[track_caller] + fn from(value: AtKeyed) -> Self { + Field { + #[cfg(debug_assertions)] + defined_at: Location::caller(), + inner: StoredValue::new_with_storage(value.into()), + } + } +} + impl Clone for Field { fn clone(&self) -> Self { *self diff --git a/reactive_stores/src/iter.rs b/reactive_stores/src/iter.rs index 0959c3d59..dbd3daace 100644 --- a/reactive_stores/src/iter.rs +++ b/reactive_stores/src/iter.rs @@ -1,6 +1,7 @@ use crate::{ path::{StorePath, StorePathSegment}, store_field::StoreField, + KeyMap, }; use reactive_graph::{ signal::{ @@ -20,10 +21,7 @@ use std::{ }; #[derive(Debug)] -pub struct AtIndex -where - Inner: StoreField, -{ +pub struct AtIndex { #[cfg(debug_assertions)] defined_at: &'static Location<'static>, inner: Inner, @@ -33,7 +31,7 @@ where impl Clone for AtIndex where - Inner: StoreField + Clone, + Inner: Clone, { fn clone(&self) -> Self { Self { @@ -46,15 +44,9 @@ where } } -impl Copy for AtIndex where - Inner: StoreField + Copy -{ -} +impl Copy for AtIndex where Inner: Copy {} -impl AtIndex -where - Inner: StoreField, -{ +impl AtIndex { #[track_caller] pub fn new(inner: Inner, index: usize) -> Self { Self { @@ -77,6 +69,8 @@ where type Reader = MappedMutArc; type Writer = MappedMutArc, Prev::Output>; + type UntrackedWriter = + MappedMutArc, Prev::Output>; fn path(&self) -> impl IntoIterator { self.inner @@ -109,6 +103,17 @@ where move |n| &mut n[index], )) } + + fn untracked_writer(&self) -> Option { + let mut guard = self.writer()?; + guard.untrack(); + Some(guard) + } + + #[inline(always)] + fn keys(&self) -> Option { + self.inner.keys() + } } impl DefinedAt for AtIndex @@ -195,16 +200,27 @@ where } } -pub trait StoreFieldIterator: Sized { +pub trait StoreFieldIterator +where + Self: StoreField, +{ + fn at(self, index: usize) -> AtIndex; + fn iter(self) -> StoreFieldIter; } impl StoreFieldIterator for Inner where - Inner: StoreField, + Inner: StoreField + Clone, Prev::Output: Sized, Prev: IndexMut + AsRef<[Prev::Output]>, { + #[track_caller] + fn at(self, index: usize) -> AtIndex { + AtIndex::new(self.clone(), index) + } + + #[track_caller] fn iter(self) -> StoreFieldIter { // reactively track changes to this field let trigger = self.get_trigger(self.path().into_iter().collect()); @@ -240,13 +256,7 @@ where fn next(&mut self) -> Option { if self.idx < self.len { - let field = AtIndex { - #[cfg(debug_assertions)] - defined_at: Location::caller(), - index: self.idx, - inner: self.inner.clone(), - ty: PhantomData, - }; + let field = AtIndex::new(self.inner.clone(), self.idx); self.idx += 1; Some(field) } else { diff --git a/reactive_stores/src/keyed.rs b/reactive_stores/src/keyed.rs new file mode 100644 index 000000000..b59709ea4 --- /dev/null +++ b/reactive_stores/src/keyed.rs @@ -0,0 +1,698 @@ +use crate::{ + path::{StorePath, StorePathSegment}, + store_field::StoreField, + KeyMap, +}; +use reactive_graph::{ + signal::{ + guards::{Mapped, MappedMut, MappedMutArc, WriteGuard}, + ArcTrigger, + }, + traits::{ + DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard, + Writeable, + }, +}; +use std::{ + collections::VecDeque, + fmt::Debug, + hash::Hash, + iter, + ops::{Deref, DerefMut, IndexMut}, + panic::Location, +}; + +#[derive(Debug)] +pub struct KeyedSubfield +where + for<'a> &'a T: IntoIterator, +{ + #[cfg(debug_assertions)] + defined_at: &'static Location<'static>, + path_segment: StorePathSegment, + inner: Inner, + read: fn(&Prev) -> &T, + write: fn(&mut Prev) -> &mut T, + key_fn: fn(<&T as IntoIterator>::Item) -> K, +} + +impl Clone for KeyedSubfield +where + for<'a> &'a T: IntoIterator, + Inner: Clone, +{ + fn clone(&self) -> Self { + Self { + #[cfg(debug_assertions)] + defined_at: self.defined_at, + path_segment: self.path_segment, + inner: self.inner.clone(), + read: self.read, + write: self.write, + key_fn: self.key_fn, + } + } +} + +impl Copy for KeyedSubfield +where + for<'a> &'a T: IntoIterator, + Inner: Copy, +{ +} + +impl KeyedSubfield +where + for<'a> &'a T: IntoIterator, +{ + #[track_caller] + pub fn new( + inner: Inner, + path_segment: StorePathSegment, + key_fn: fn(<&T as IntoIterator>::Item) -> K, + read: fn(&Prev) -> &T, + write: fn(&mut Prev) -> &mut T, + ) -> Self { + Self { + #[cfg(debug_assertions)] + defined_at: Location::caller(), + inner, + path_segment, + read, + write, + key_fn, + } + } +} + +impl StoreField for KeyedSubfield +where + Self: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + type Value = T; + type Reader = Mapped; + type Writer = KeyedSubfieldWriteGuard< + Inner, + Prev, + K, + T, + MappedMut, T>, + >; + type UntrackedWriter = + MappedMut, T>; + + fn path(&self) -> impl IntoIterator { + self.inner + .path() + .into_iter() + .chain(iter::once(self.path_segment)) + } + + fn get_trigger(&self, path: StorePath) -> ArcTrigger { + self.inner.get_trigger(path) + } + + fn reader(&self) -> Option { + let inner = self.inner.reader()?; + Some(Mapped::new_with_guard(inner, self.read)) + } + + fn writer(&self) -> Option { + let path = self.path().into_iter().collect::(); + let trigger = self.get_trigger(path.clone()); + let guard = WriteGuard::new(trigger, self.inner.writer()?); + let guard = MappedMut::new(guard, self.read, self.write); + Some(KeyedSubfieldWriteGuard { + inner: self.clone(), + guard: Some(guard), + }) + } + + fn untracked_writer(&self) -> Option { + let trigger = self.get_trigger(self.path().into_iter().collect()); + let inner = WriteGuard::new(trigger, self.inner.untracked_writer()?); + Some(MappedMut::new(inner, self.read, self.write)) + } + + #[inline(always)] + fn keys(&self) -> Option { + self.inner.keys() + } +} + +impl KeyedSubfield +where + Self: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + fn latest_keys(&self) -> Vec { + self.reader() + .expect("trying to update keys") + .deref() + .into_iter() + .map(|n| (self.key_fn)(n)) + .collect() + } +} + +pub struct KeyedSubfieldWriteGuard +where + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + inner: KeyedSubfield, + guard: Option, +} + +impl Deref + for KeyedSubfieldWriteGuard +where + Guard: Deref, + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + type Target = Guard::Target; + + fn deref(&self) -> &Self::Target { + self.guard + .as_ref() + .expect("should be Some(_) until dropped") + .deref() + } +} + +impl DerefMut + for KeyedSubfieldWriteGuard +where + Guard: DerefMut, + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + fn deref_mut(&mut self) -> &mut Self::Target { + self.guard + .as_mut() + .expect("should be Some(_) until dropped") + .deref_mut() + } +} + +impl UntrackableGuard + for KeyedSubfieldWriteGuard +where + Guard: UntrackableGuard, + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + fn untrack(&mut self) { + if let Some(inner) = self.guard.as_mut() { + inner.untrack(); + } + } +} + +impl Drop + for KeyedSubfieldWriteGuard +where + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + fn drop(&mut self) { + // dropping the inner guard will + // 1) synchronously release its write lock on the store's value + // 2) trigger an (asynchronous) reactive update + drop(self.guard.take()); + + // now that the write lock is release, we can get a read lock to refresh this keyed field + // based on the new value + self.inner.update_keys(); + + // reactive updates happen on the next tick + } +} + +impl DefinedAt for KeyedSubfield +where + for<'a> &'a T: IntoIterator, + Inner: StoreField, +{ + fn defined_at(&self) -> Option<&'static Location<'static>> { + #[cfg(debug_assertions)] + { + Some(self.defined_at) + } + #[cfg(not(debug_assertions))] + { + None + } + } +} + +impl IsDisposed for KeyedSubfield +where + for<'a> &'a T: IntoIterator, + Inner: IsDisposed, +{ + fn is_disposed(&self) -> bool { + self.inner.is_disposed() + } +} + +impl Notify for KeyedSubfield +where + Self: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + fn notify(&self) { + let trigger = self.get_trigger(self.path().into_iter().collect()); + trigger.notify(); + } +} + +impl Track for KeyedSubfield +where + Self: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField + Track + 'static, + Prev: 'static, + T: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + fn track(&self) { + self.inner.track(); + let trigger = self.get_trigger(self.path().into_iter().collect()); + trigger.track(); + } +} + +impl ReadUntracked for KeyedSubfield +where + Self: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + type Value = ::Reader; + + fn try_read_untracked(&self) -> Option { + self.reader() + } +} + +impl Writeable for KeyedSubfield +where + Self: Clone, + for<'a> &'a T: IntoIterator, + T: 'static, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + type Value = T; + + fn try_write(&self) -> Option> { + self.writer() + } + + fn try_write_untracked( + &self, + ) -> Option> { + self.writer().map(|mut writer| { + writer.untrack(); + writer + }) + } +} + +#[derive(Debug)] +pub struct AtKeyed +where + for<'a> &'a T: IntoIterator, +{ + #[cfg(debug_assertions)] + defined_at: &'static Location<'static>, + inner: KeyedSubfield, + key: K, +} + +impl Clone for AtKeyed +where + for<'a> &'a T: IntoIterator, + KeyedSubfield: Clone, + K: Debug + Clone, +{ + fn clone(&self) -> Self { + Self { + #[cfg(debug_assertions)] + defined_at: self.defined_at, + inner: self.inner.clone(), + key: self.key.clone(), + } + } +} + +impl Copy for AtKeyed +where + for<'a> &'a T: IntoIterator, + KeyedSubfield: Copy, + K: Debug + Copy, +{ +} + +impl AtKeyed +where + for<'a> &'a T: IntoIterator, +{ + #[track_caller] + pub fn new(inner: KeyedSubfield, key: K) -> Self { + Self { + #[cfg(debug_assertions)] + defined_at: Location::caller(), + inner, + key, + } + } +} + +impl StoreField for AtKeyed +where + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + T: IndexMut, + T::Output: Sized, +{ + type Value = T::Output; + type Reader = MappedMutArc< + as StoreField>::Reader, + T::Output, + >; + type Writer = WriteGuard< + ArcTrigger, + MappedMutArc< + as StoreField>::Writer, + T::Output, + >, + >; + type UntrackedWriter = WriteGuard< + ArcTrigger, + MappedMutArc< + as StoreField>::Writer, + T::Output, + >, + >; + + fn path(&self) -> impl IntoIterator { + let inner = self.inner.path().into_iter().collect::(); + let keys = self + .inner + .keys() + .expect("using keys on a store with no keys"); + let this = keys + .with_field_keys( + inner.clone(), + |keys| keys.get(&self.key), + || self.inner.latest_keys(), + ) + .flatten() + .map(|(path, _)| path); + inner.into_iter().chain(this) + } + + fn get_trigger(&self, path: StorePath) -> ArcTrigger { + self.inner.get_trigger(path) + } + + fn reader(&self) -> Option { + let inner = self.inner.reader()?; + + let inner_path = self.inner.path().into_iter().collect(); + let keys = self + .inner + .keys() + .expect("using keys on a store with no keys"); + let index = keys + .with_field_keys( + inner_path, + |keys| keys.get(&self.key), + || self.inner.latest_keys(), + ) + .flatten() + .map(|(_, idx)| idx) + .expect("reading from a keyed field that has not yet been created"); + + Some(MappedMutArc::new( + inner, + move |n| &n[index], + move |n| &mut n[index], + )) + } + + fn writer(&self) -> Option { + let inner = self.inner.writer()?; + let trigger = self.get_trigger(self.path().into_iter().collect()); + + let inner_path = self.inner.path().into_iter().collect::(); + let keys = self + .inner + .keys() + .expect("using keys on a store with no keys"); + let index = keys + .with_field_keys( + inner_path.clone(), + |keys| keys.get(&self.key), + || self.inner.latest_keys(), + ) + .flatten() + .map(|(_, idx)| idx) + .expect("reading from a keyed field that has not yet been created"); + + Some(WriteGuard::new( + trigger, + MappedMutArc::new( + inner, + move |n| &n[index], + move |n| &mut n[index], + ), + )) + } + + fn untracked_writer(&self) -> Option { + let mut guard = self.writer()?; + guard.untrack(); + Some(guard) + } + + #[inline(always)] + fn keys(&self) -> Option { + self.inner.keys() + } +} + +impl DefinedAt for AtKeyed +where + for<'a> &'a T: IntoIterator, +{ + fn defined_at(&self) -> Option<&'static Location<'static>> { + #[cfg(debug_assertions)] + { + Some(self.defined_at) + } + #[cfg(not(debug_assertions))] + { + None + } + } +} + +impl IsDisposed for AtKeyed +where + for<'a> &'a T: IntoIterator, + Inner: IsDisposed, +{ + fn is_disposed(&self) -> bool { + self.inner.is_disposed() + } +} + +impl Notify for AtKeyed +where + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + T: IndexMut, + T::Output: Sized, +{ + fn notify(&self) { + let trigger = self.get_trigger(self.path().into_iter().collect()); + trigger.notify(); + } +} + +impl Track for AtKeyed +where + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + T: IndexMut, + T::Output: Sized, +{ + fn track(&self) { + let trigger = self.get_trigger(self.path().into_iter().collect()); + trigger.track(); + } +} + +impl ReadUntracked for AtKeyed +where + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + T: IndexMut, + T::Output: Sized, +{ + type Value = ::Reader; + + fn try_read_untracked(&self) -> Option { + self.reader() + } +} + +impl Writeable for AtKeyed +where + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, + KeyedSubfield: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + T: IndexMut, + T::Output: Sized + 'static, +{ + type Value = T::Output; + + fn try_write(&self) -> Option> { + self.writer() + } + + fn try_write_untracked( + &self, + ) -> Option> { + self.writer().map(|mut writer| { + writer.untrack(); + writer + }) + } +} + +impl KeyedSubfield +where + Self: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, +{ + pub fn update_keys(&self) { + let inner_path = self.path().into_iter().collect(); + let keys = self + .inner + .keys() + .expect("updating keys on a store with no keys"); + + keys.with_field_keys( + inner_path, + |keys| { + keys.update(self.latest_keys()); + }, + || self.latest_keys(), + ); + } +} + +impl KeyedSubfield +where + Self: Clone, + for<'a> &'a T: IntoIterator, + Inner: StoreField, + Prev: 'static, + K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static, + T: IndexMut, + T::Output: Sized, +{ + #[track_caller] + pub fn iter_keyed(self) -> StoreFieldKeyedIter { + // reactively track changes to this field + let trigger = self.get_trigger(self.path().into_iter().collect()); + trigger.track(); + + // get the current length of the field by accessing slice + let reader = self + .reader() + .expect("creating iterator from unavailable store field"); + let keys = reader + .into_iter() + .map(|item| (self.key_fn)(item)) + .collect::>(); + + // return the iterator + StoreFieldKeyedIter { inner: self, keys } + } +} + +pub struct StoreFieldKeyedIter +where + for<'a> &'a T: IntoIterator, + T: IndexMut, +{ + inner: KeyedSubfield, + keys: VecDeque, +} + +impl Iterator for StoreFieldKeyedIter +where + Inner: StoreField + Clone + 'static, + T: IndexMut + 'static, + T::Output: Sized + 'static, + for<'a> &'a T: IntoIterator, +{ + type Item = AtKeyed; + + fn next(&mut self) -> Option { + self.keys + .pop_front() + .map(|key| AtKeyed::new(self.inner.clone(), key)) + } +} diff --git a/reactive_stores/src/lib.rs b/reactive_stores/src/lib.rs index 64419aebe..816636b84 100644 --- a/reactive_stores/src/lib.rs +++ b/reactive_stores/src/lib.rs @@ -1,3 +1,4 @@ +use or_poisoned::OrPoisoned; use reactive_graph::{ owner::{LocalStorage, Storage, StoredValue, SyncStorage}, signal::{ @@ -8,7 +9,10 @@ use reactive_graph::{ }; use rustc_hash::FxHashMap; use std::{ + any::Any, + collections::HashMap, fmt::Debug, + hash::Hash, panic::Location, sync::{Arc, RwLock}, }; @@ -16,6 +20,8 @@ use std::{ mod arc_field; mod field; mod iter; +mod keyed; +mod option; mod patch; mod path; mod store_field; @@ -24,9 +30,11 @@ mod subfield; pub use arc_field::ArcField; pub use field::Field; pub use iter::*; +pub use keyed::*; +pub use option::*; pub use patch::*; -use path::StorePath; -pub use store_field::StoreField; +use path::{StorePath, StorePathSegment}; +pub use store_field::{StoreField, Then}; pub use subfield::Subfield; #[derive(Debug, Default)] @@ -49,11 +57,118 @@ impl TriggerMap { } } +pub struct FieldKeys { + spare_keys: Vec, + current_key: usize, + keys: HashMap, +} + +impl FieldKeys +where + K: Debug + Hash + PartialEq + Eq, +{ + pub fn new(from_iter: impl IntoIterator) -> Self { + let mut current_key = 0; + let mut keys = HashMap::new(); + for (idx, key) in from_iter.into_iter().enumerate() { + let segment = current_key.into(); + keys.insert(key, (segment, idx)); + current_key += 1; + } + + Self { + spare_keys: Vec::new(), + current_key: 0, + keys, + } + } +} + +impl FieldKeys +where + K: Hash + PartialEq + Eq, +{ + pub fn get(&self, key: &K) -> Option<(StorePathSegment, usize)> { + self.keys.get(key).copied() + } + + fn next_key(&mut self) -> StorePathSegment { + self.spare_keys.pop().unwrap_or_else(|| { + self.current_key += 1; + self.current_key.into() + }) + } + + pub fn update(&mut self, iter: impl IntoIterator) { + let new_keys = iter + .into_iter() + .enumerate() + .map(|(idx, key)| (key, idx)) + .collect::>(); + + // remove old keys and recycle the slots + self.keys.retain(|key, old_entry| match new_keys.get(key) { + Some(idx) => { + old_entry.1 = *idx; + true + } + None => { + self.spare_keys.push(old_entry.0); + false + } + }); + + // add new keys + for (key, idx) in new_keys { + // the suggestion doesn't compile because we need &mut for self.next_key(), + // and we don't want to call that until after the check + #[allow(clippy::map_entry)] + if !self.keys.contains_key(&key) { + let path = self.next_key(); + self.keys.insert(key, (path, idx)); + } + } + } +} + +impl Default for FieldKeys { + fn default() -> Self { + Self { + spare_keys: Default::default(), + current_key: Default::default(), + keys: Default::default(), + } + } +} + +#[derive(Default, Clone)] +pub struct KeyMap(Arc>>>); + +impl KeyMap { + pub fn with_field_keys( + &self, + path: StorePath, + fun: impl FnOnce(&mut FieldKeys) -> T, + initialize: impl FnOnce() -> Vec, + ) -> Option + where + K: Debug + Hash + PartialEq + Eq + Send + Sync + 'static, + { + let mut guard = self.0.write().or_poisoned(); + let entry = guard + .entry(path) + .or_insert_with(|| Box::new(FieldKeys::new(initialize()))); + let entry = entry.downcast_mut::>()?; + Some(fun(entry)) + } +} + pub struct ArcStore { #[cfg(debug_assertions)] defined_at: &'static Location<'static>, pub(crate) value: Arc>, signals: Arc>, + keys: KeyMap, } impl ArcStore { @@ -63,7 +178,7 @@ impl ArcStore { defined_at: Location::caller(), value: Arc::new(RwLock::new(value)), signals: Default::default(), - /* inner: Arc::new(RwLock::new(SubscriberSet::new())), */ + keys: Default::default(), } } } @@ -86,6 +201,7 @@ impl Clone for ArcStore { defined_at: self.defined_at, value: Arc::clone(&self.value), signals: Arc::clone(&self.signals), + keys: self.keys.clone(), } } } @@ -123,7 +239,7 @@ where impl Track for ArcStore { fn track(&self) { - self.get_trigger(Default::default()).notify(); + self.get_trigger(Default::default()).track(); } } @@ -248,7 +364,9 @@ where #[cfg(test)] mod tests { - use crate::{self as reactive_stores, Patch, Store, StoreFieldIterator}; + use crate::{ + self as reactive_stores, Patch, Store, StoreField, StoreFieldIterator, + }; use reactive_graph::{ effect::Effect, traits::{Read, ReadUntracked, Set, Update, Writeable}, @@ -462,4 +580,9 @@ mod tests { tick().await; assert_eq!(combined_count.load(Ordering::Relaxed), 2); } + + #[derive(Debug, Store)] + pub struct StructWithOption { + opt_field: Option, + } } diff --git a/reactive_stores/src/option.rs b/reactive_stores/src/option.rs new file mode 100644 index 000000000..32a00db64 --- /dev/null +++ b/reactive_stores/src/option.rs @@ -0,0 +1,280 @@ +use crate::{StoreField, Subfield}; +use reactive_graph::traits::Read; +use std::ops::Deref; + +pub trait OptionStoreExt +where + Self: StoreField>, +{ + type Output; + + fn unwrap(self) -> Subfield, Self::Output>; + + fn map( + self, + map_fn: impl FnOnce(Subfield, Self::Output>) -> U, + ) -> Option; +} + +impl OptionStoreExt for S +where + S: StoreField> + Read, + ::Value: Deref>, +{ + type Output = T; + + fn unwrap(self) -> Subfield, Self::Output> { + Subfield::new( + self, + 0.into(), + |t| t.as_ref().unwrap(), + |t| t.as_mut().unwrap(), + ) + } + + fn map( + self, + map_fn: impl FnOnce(Subfield, T>) -> U, + ) -> Option { + if self.read().is_some() { + Some(map_fn(self.unwrap())) + } else { + None + } + } +} + +#[cfg(test)] +mod tests { + use crate::{self as reactive_stores, Store}; + use reactive_graph::{ + effect::Effect, + traits::{Get, Read, ReadUntracked, Set, Writeable}, + }; + use reactive_stores_macro::Store; + use std::sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, + }; + + pub async fn tick() { + tokio::time::sleep(std::time::Duration::from_micros(1)).await; + } + + #[derive(Debug, Clone, Store)] + pub struct User { + pub name: Option, + } + + #[derive(Debug, Clone, Store)] + pub struct Name { + pub first_name: Option, + } + + #[tokio::test] + async fn substores_reachable_through_option() { + use crate::OptionStoreExt; + + _ = any_spawner::Executor::init_tokio(); + + let combined_count = Arc::new(AtomicUsize::new(0)); + + let store = Store::new(User { name: None }); + + Effect::new_sync({ + let combined_count = Arc::clone(&combined_count); + move |prev: Option<()>| { + if prev.is_none() { + println!("first run"); + } else { + println!("next run"); + } + + if store.name().read().is_some() { + println!( + "inner value = {:?}", + *store.name().unwrap().first_name().read() + ); + } else { + println!("no inner value"); + } + + combined_count.fetch_add(1, Ordering::Relaxed); + } + }); + + tick().await; + store.name().set(Some(Name { + first_name: Some("Greg".into()), + })); + tick().await; + store.name().set(None); + tick().await; + store.name().set(Some(Name { + first_name: Some("Bob".into()), + })); + tick().await; + store + .name() + .unwrap() + .first_name() + .write() + .as_mut() + .unwrap() + .push_str("!!!"); + tick().await; + assert_eq!(combined_count.load(Ordering::Relaxed), 5); + assert_eq!( + store + .name() + .read_untracked() + .as_ref() + .unwrap() + .first_name + .as_ref() + .unwrap(), + "Bob!!!" + ); + } + #[tokio::test] + async fn updating_option_unwrap_subfield_doesnt_notify_option() { + use crate::OptionStoreExt; + + _ = any_spawner::Executor::init_tokio(); + + let parent_count = Arc::new(AtomicUsize::new(0)); + let inner_count = Arc::new(AtomicUsize::new(0)); + + let store = Store::new(User { name: None }); + + Effect::new_sync({ + let parent_count = Arc::clone(&parent_count); + move |prev: Option<()>| { + if prev.is_none() { + println!("parent: first run"); + } else { + println!("parent: next run"); + } + + println!(" is_some = {}", store.name().read().is_some()); + parent_count.fetch_add(1, Ordering::Relaxed); + } + }); + Effect::new_sync({ + let inner_count = Arc::clone(&inner_count); + move |prev: Option<()>| { + if prev.is_none() { + println!("inner: first run"); + } else { + println!("inner: next run"); + } + + if store.name().read().is_some() { + println!( + " inner label = {:?}", + *store.name().unwrap().first_name().read() + ); + } else { + println!(" no inner value"); + } + inner_count.fetch_add(1, Ordering::Relaxed); + } + }); + + tick().await; + assert_eq!(parent_count.load(Ordering::Relaxed), 1); + assert_eq!(inner_count.load(Ordering::Relaxed), 1); + + store.name().set(Some(Name { + first_name: Some("Alice".into()), + })); + tick().await; + assert_eq!(parent_count.load(Ordering::Relaxed), 2); + assert_eq!(inner_count.load(Ordering::Relaxed), 2); + + println!("\nUpdating first name only"); + store + .name() + .unwrap() + .first_name() + .write() + .as_mut() + .unwrap() + .push_str("!!!"); + + tick().await; + assert_eq!(parent_count.load(Ordering::Relaxed), 2); + assert_eq!(inner_count.load(Ordering::Relaxed), 3); + } + + #[tokio::test] + async fn mapping_over_optional_store_field() { + use crate::OptionStoreExt; + + _ = any_spawner::Executor::init_tokio(); + + let parent_count = Arc::new(AtomicUsize::new(0)); + let inner_count = Arc::new(AtomicUsize::new(0)); + + let store = Store::new(User { name: None }); + + Effect::new_sync({ + let parent_count = Arc::clone(&parent_count); + move |prev: Option<()>| { + if prev.is_none() { + println!("parent: first run"); + } else { + println!("parent: next run"); + } + + println!(" is_some = {}", store.name().read().is_some()); + parent_count.fetch_add(1, Ordering::Relaxed); + } + }); + Effect::new_sync({ + let inner_count = Arc::clone(&inner_count); + move |prev: Option<()>| { + if prev.is_none() { + println!("inner: first run"); + } else { + println!("inner: next run"); + } + + println!( + "store inner value length = {:?}", + store.name().map(|inner| inner + .first_name() + .get() + .unwrap_or_default() + .len()) + ); + inner_count.fetch_add(1, Ordering::Relaxed); + } + }); + + tick().await; + assert_eq!(parent_count.load(Ordering::Relaxed), 1); + assert_eq!(inner_count.load(Ordering::Relaxed), 1); + + store.name().set(Some(Name { + first_name: Some("Greg".into()), + })); + tick().await; + assert_eq!(parent_count.load(Ordering::Relaxed), 2); + assert_eq!(inner_count.load(Ordering::Relaxed), 2); + + println!("\nUpdating first name only"); + store + .name() + .unwrap() + .first_name() + .write() + .as_mut() + .unwrap() + .push_str("!!!"); + + tick().await; + assert_eq!(parent_count.load(Ordering::Relaxed), 2); + assert_eq!(inner_count.load(Ordering::Relaxed), 3); + } +} diff --git a/reactive_stores/src/store_field.rs b/reactive_stores/src/store_field.rs index 5822ec5e3..90700a33f 100644 --- a/reactive_stores/src/store_field.rs +++ b/reactive_stores/src/store_field.rs @@ -1,32 +1,65 @@ use crate::{ path::{StorePath, StorePathSegment}, - ArcStore, Store, + ArcStore, KeyMap, Store, }; -use guardian::ArcRwLockWriteGuardian; use or_poisoned::OrPoisoned; use reactive_graph::{ owner::Storage, signal::{ - guards::{Plain, WriteGuard}, + guards::{Mapped, MappedMut, Plain, UntrackedWriteGuard, WriteGuard}, ArcTrigger, }, - traits::{DefinedAt, UntrackableGuard}, + traits::{ + DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard, + Writeable, + }, unwrap_signal, }; -use std::{iter, ops::Deref, sync::Arc}; +use std::{ + iter, + ops::{Deref, DerefMut}, + panic::Location, + sync::Arc, +}; pub trait StoreField: Sized { type Value; type Reader: Deref; type Writer: UntrackableGuard; + type UntrackedWriter: DerefMut; fn get_trigger(&self, path: StorePath) -> ArcTrigger; fn path(&self) -> impl IntoIterator; + fn track_field(&self) { + let path = self.path().into_iter().collect(); + let trigger = self.get_trigger(path); + trigger.track(); + } + fn reader(&self) -> Option; fn writer(&self) -> Option; + + fn untracked_writer(&self) -> Option; + + fn keys(&self) -> Option; + + #[track_caller] + fn then( + self, + map_fn: fn(&Self::Value) -> &T, + map_fn_mut: fn(&mut Self::Value) -> &mut T, + ) -> Then { + Then { + #[cfg(debug_assertions)] + defined_at: Location::caller(), + inner: self, + map_fn, + map_fn_mut, + } + } } impl StoreField for ArcStore @@ -35,7 +68,8 @@ where { type Value = T; type Reader = Plain; - type Writer = WriteGuard>; + type Writer = WriteGuard>; + type UntrackedWriter = UntrackedWriteGuard; fn get_trigger(&self, path: StorePath) -> ArcTrigger { let triggers = &self.signals; @@ -53,10 +87,17 @@ where fn writer(&self) -> Option { let trigger = self.get_trigger(Default::default()); - let guard = - ArcRwLockWriteGuardian::take(Arc::clone(&self.value)).ok()?; + let guard = self.untracked_writer()?; Some(WriteGuard::new(trigger, guard)) } + + fn untracked_writer(&self) -> Option { + UntrackedWriteGuard::try_new(Arc::clone(&self.value)) + } + + fn keys(&self) -> Option { + Some(self.keys.clone()) + } } impl StoreField for Store @@ -66,7 +107,8 @@ where { type Value = T; type Reader = Plain; - type Writer = WriteGuard>; + type Writer = WriteGuard>; + type UntrackedWriter = UntrackedWriteGuard; fn get_trigger(&self, path: StorePath) -> ArcTrigger { self.inner @@ -89,4 +131,161 @@ where fn writer(&self) -> Option { self.inner.try_get_value().and_then(|n| n.writer()) } + + fn untracked_writer(&self) -> Option { + self.inner + .try_get_value() + .and_then(|n| n.untracked_writer()) + } + + fn keys(&self) -> Option { + self.inner.try_get_value().and_then(|inner| inner.keys()) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct Then +where + S: StoreField, +{ + inner: S, + map_fn: fn(&S::Value) -> &T, + map_fn_mut: fn(&mut S::Value) -> &mut T, + #[cfg(debug_assertions)] + defined_at: &'static Location<'static>, +} + +impl Then +where + S: StoreField, +{ + #[track_caller] + pub fn new( + inner: S, + map_fn: fn(&S::Value) -> &T, + map_fn_mut: fn(&mut S::Value) -> &mut T, + ) -> Self { + Self { + inner, + map_fn, + map_fn_mut, + #[cfg(debug_assertions)] + defined_at: Location::caller(), + } + } +} + +impl StoreField for Then +where + S: StoreField, +{ + type Value = T; + type Reader = Mapped; + type Writer = MappedMut; + type UntrackedWriter = MappedMut; + + fn get_trigger(&self, path: StorePath) -> ArcTrigger { + self.inner.get_trigger(path) + } + + fn path(&self) -> impl IntoIterator { + self.inner.path() + } + + fn reader(&self) -> Option { + let inner = self.inner.reader()?; + Some(Mapped::new_with_guard(inner, self.map_fn)) + } + + fn writer(&self) -> Option { + let inner = self.inner.writer()?; + Some(MappedMut::new(inner, self.map_fn, self.map_fn_mut)) + } + + fn untracked_writer(&self) -> Option { + let inner = self.inner.untracked_writer()?; + Some(MappedMut::new(inner, self.map_fn, self.map_fn_mut)) + } + + #[inline(always)] + fn keys(&self) -> Option { + self.inner.keys() + } +} + +impl DefinedAt for Then +where + S: StoreField, +{ + fn defined_at(&self) -> Option<&'static Location<'static>> { + #[cfg(debug_assertions)] + { + Some(self.defined_at) + } + #[cfg(not(debug_assertions))] + { + None + } + } +} + +impl IsDisposed for Then +where + S: StoreField + IsDisposed, +{ + fn is_disposed(&self) -> bool { + self.inner.is_disposed() + } +} + +impl Notify for Then +where + S: StoreField, +{ + fn notify(&self) { + let trigger = self.get_trigger(self.path().into_iter().collect()); + trigger.notify(); + } +} + +impl Track for Then +where + S: StoreField, +{ + fn track(&self) { + let trigger = self.get_trigger(self.path().into_iter().collect()); + trigger.track(); + } +} + +impl ReadUntracked for Then +where + S: StoreField, +{ + type Value = ::Reader; + + fn try_read_untracked(&self) -> Option { + self.reader() + } +} + +impl Writeable for Then +where + T: 'static, + S: StoreField, +{ + type Value = T; + + fn try_write(&self) -> Option> { + self.writer() + } + + fn try_write_untracked( + &self, + ) -> Option> { + self.writer().map(|mut writer| { + writer.untrack(); + writer + }) + } } diff --git a/reactive_stores/src/subfield.rs b/reactive_stores/src/subfield.rs index f073e3550..4c01fa522 100644 --- a/reactive_stores/src/subfield.rs +++ b/reactive_stores/src/subfield.rs @@ -1,6 +1,7 @@ use crate::{ path::{StorePath, StorePathSegment}, store_field::StoreField, + KeyMap, }; use reactive_graph::{ signal::{ @@ -15,10 +16,7 @@ use reactive_graph::{ use std::{iter, marker::PhantomData, ops::DerefMut, panic::Location}; #[derive(Debug)] -pub struct Subfield -where - Inner: StoreField, -{ +pub struct Subfield { #[cfg(debug_assertions)] defined_at: &'static Location<'static>, path_segment: StorePathSegment, @@ -30,7 +28,7 @@ where impl Clone for Subfield where - Inner: StoreField + Clone, + Inner: Clone, { fn clone(&self) -> Self { Self { @@ -45,15 +43,9 @@ where } } -impl Copy for Subfield where - Inner: StoreField + Copy -{ -} +impl Copy for Subfield where Inner: Copy {} -impl Subfield -where - Inner: StoreField, -{ +impl Subfield { #[track_caller] pub fn new( inner: Inner, @@ -81,6 +73,8 @@ where type Value = T; type Reader = Mapped; type Writer = MappedMut, T>; + type UntrackedWriter = + MappedMut, T>; fn path(&self) -> impl IntoIterator { self.inner @@ -103,6 +97,17 @@ where let inner = WriteGuard::new(trigger, self.inner.writer()?); Some(MappedMut::new(inner, self.read, self.write)) } + + fn untracked_writer(&self) -> Option { + let trigger = self.get_trigger(self.path().into_iter().collect()); + let inner = WriteGuard::new(trigger, self.inner.untracked_writer()?); + Some(MappedMut::new(inner, self.read, self.write)) + } + + #[inline(always)] + fn keys(&self) -> Option { + self.inner.keys() + } } impl DefinedAt for Subfield @@ -123,7 +128,7 @@ where impl IsDisposed for Subfield where - Inner: StoreField + IsDisposed, + Inner: IsDisposed, { fn is_disposed(&self) -> bool { self.inner.is_disposed() @@ -143,11 +148,12 @@ where impl Track for Subfield where - Inner: StoreField + 'static, + Inner: StoreField + Track + 'static, Prev: 'static, T: 'static, { fn track(&self) { + self.inner.track(); let trigger = self.get_trigger(self.path().into_iter().collect()); trigger.track(); } diff --git a/reactive_stores_macro/Cargo.toml b/reactive_stores_macro/Cargo.toml index 713dbd720..fdc6bd911 100644 --- a/reactive_stores_macro/Cargo.toml +++ b/reactive_stores_macro/Cargo.toml @@ -8,6 +8,7 @@ edition.workspace = true proc-macro = true [dependencies] +convert_case = "0.6" proc-macro-error = "1.0" proc-macro2 = "1.0" quote = "1.0" diff --git a/reactive_stores_macro/src/lib.rs b/reactive_stores_macro/src/lib.rs index 3acc33075..6ad45b59e 100644 --- a/reactive_stores_macro/src/lib.rs +++ b/reactive_stores_macro/src/lib.rs @@ -1,12 +1,13 @@ -use proc_macro2::Span; +use convert_case::{Case, Casing}; +use proc_macro2::{Span, TokenStream}; use proc_macro_error::{abort, abort_call_site, proc_macro_error}; use quote::{quote, ToTokens}; use syn::{ parse::{Parse, ParseStream, Parser}, punctuated::Punctuated, token::Comma, - Field, Generics, Ident, Index, Meta, Result, Token, Type, Visibility, - WhereClause, + Field, Fields, Generics, Ident, Index, Meta, Result, Token, Type, Variant, + Visibility, WhereClause, ExprClosure, }; #[proc_macro_error] @@ -26,54 +27,69 @@ pub fn derive_patch(input: proc_macro::TokenStream) -> proc_macro::TokenStream { } struct Model { - pub vis: Visibility, - pub struct_name: Ident, - pub generics: Generics, - pub fields: Vec, + vis: Visibility, + name: Ident, + generics: Generics, + ty: ModelTy, +} + +enum ModelTy { + Struct { fields: Vec }, + Enum { variants: Vec }, } impl Parse for Model { fn parse(input: ParseStream) -> Result { let input = syn::DeriveInput::parse(input)?; - let syn::Data::Struct(s) = input.data else { - abort_call_site!("only structs can be used with `Store`"); - }; + let ty = match input.data { + syn::Data::Struct(s) => { + let fields = match s.fields { + syn::Fields::Unit => { + abort!(s.semi_token, "unit structs are not supported"); + } + syn::Fields::Named(fields) => { + fields.named.into_iter().collect::>() + } + syn::Fields::Unnamed(fields) => { + fields.unnamed.into_iter().collect::>() + } + }; - let fields = match s.fields { - syn::Fields::Unit => { - abort!(s.semi_token, "unit structs are not supported"); + ModelTy::Struct { fields } } - syn::Fields::Named(fields) => { - fields.named.into_iter().collect::>() - } - syn::Fields::Unnamed(fields) => { - fields.unnamed.into_iter().collect::>() + syn::Data::Enum(e) => ModelTy::Enum { + variants: e.variants.into_iter().collect(), + }, + _ => { + abort_call_site!( + "only structs and enums can be used with `Store`" + ); } }; Ok(Self { vis: input.vis, - struct_name: input.ident, generics: input.generics, - fields, + name: input.ident, + ty, }) } } #[derive(Clone)] enum SubfieldMode { - Keyed(Ident, Type), + Keyed(ExprClosure, Type), } impl Parse for SubfieldMode { fn parse(input: syn::parse::ParseStream) -> syn::Result { let mode: Ident = input.parse()?; if mode == "key" { - let _eq: Token!(=) = input.parse()?; - let ident: Ident = input.parse()?; let _col: Token!(:) = input.parse()?; let ty: Type = input.parse()?; + let _eq: Token!(=) = input.parse()?; + let ident: ExprClosure = input.parse()?; Ok(SubfieldMode::Keyed(ident, ty)) } else { Err(input.error("expected `key = : `")) @@ -81,6 +97,161 @@ impl Parse for SubfieldMode { } } +impl ToTokens for Model { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + let library_path = quote! { reactive_stores }; + let Model { + vis, + name, + generics, + ty, + } = &self; + let any_store_field = Ident::new("AnyStoreField", Span::call_site()); + let trait_name = Ident::new(&format!("{name}StoreFields"), name.span()); + let generics_with_orig = { + let params = &generics.params; + quote! { <#any_store_field, #params> } + }; + let where_with_orig = { + generics + .where_clause + .as_ref() + .map(|w| { + let WhereClause { + where_token, + predicates, + } = &w; + quote! { + #where_token + #any_store_field: #library_path::StoreField, + #predicates + } + }) + .unwrap_or_else(|| quote! { where #any_store_field: #library_path::StoreField }) + }; + + // define an extension trait that matches this struct + // and implement that trait for all StoreFields + let (trait_fields, read_fields): (Vec<_>, Vec<_>) = + ty.to_field_data(&library_path, generics, &any_store_field, name); + + // read access + tokens.extend(quote! { + #vis trait #trait_name + #where_with_orig + { + #(#trait_fields)* + } + + impl #generics_with_orig #trait_name for AnyStoreField + #where_with_orig + { + #(#read_fields)* + } + }); + } +} + +impl ModelTy { + fn to_field_data( + &self, + library_path: &TokenStream, + generics: &Generics, + any_store_field: &Ident, + name: &Ident, + ) -> (Vec, Vec) { + match self { + ModelTy::Struct { fields } => fields + .iter() + .enumerate() + .map(|(idx, field)| { + let Field { + ident, ty, attrs, .. + } = &field; + let modes = attrs + .iter() + .find_map(|attr| { + attr.meta.path().is_ident("store").then(|| { + match &attr.meta { + Meta::List(list) => { + match Punctuated::< + SubfieldMode, + Comma, + >::parse_terminated + .parse2(list.tokens.clone()) + { + Ok(modes) => Some( + modes + .iter() + .cloned() + .collect::>(), + ), + Err(e) => abort!(list, e), + } + } + _ => None, + } + }) + }) + .flatten(); + + ( + field_to_tokens( + idx, + false, + modes.as_deref(), + library_path, + ident.as_ref(), + generics, + any_store_field, + name, + ty, + ), + field_to_tokens( + idx, + true, + modes.as_deref(), + library_path, + ident.as_ref(), + generics, + any_store_field, + name, + ty, + ), + ) + }) + .unzip(), + ModelTy::Enum { variants } => variants + .iter() + .map(|variant| { + let Variant { ident, fields, .. } = variant; + + ( + variant_to_tokens( + false, + library_path, + ident, + generics, + any_store_field, + name, + fields, + ), + variant_to_tokens( + true, + library_path, + ident, + generics, + any_store_field, + name, + fields, + ), + ) + }) + .unzip(), + } + } +} + #[allow(clippy::too_many_arguments)] fn field_to_tokens( idx: usize, @@ -90,7 +261,7 @@ fn field_to_tokens( orig_ident: Option<&Ident>, generics: &Generics, any_store_field: &Ident, - struct_name: &Ident, + name: &Ident, ty: &Type, ) -> proc_macro2::TokenStream { let ident = if orig_ident.is_none() { @@ -110,15 +281,20 @@ fn field_to_tokens( if modes.len() == 1 { let mode = &modes[0]; // Can replace with a match if additional modes added - // TODO keyed_by - let SubfieldMode::Keyed(_keyed_by, key_ty) = mode; + let SubfieldMode::Keyed(keyed_by, key_ty) = mode; let signature = quote! { - fn #ident(self) -> #library_path::KeyedField<#any_store_field, #struct_name #generics, #ty, #key_ty> + fn #ident(self) -> #library_path::KeyedSubfield<#any_store_field, #name #generics, #key_ty, #ty> }; return if include_body { quote! { #signature { - todo!() + #library_path::KeyedSubfield::new( + self, + #idx.into(), + #keyed_by, + |prev| &prev.#locator, + |prev| &mut prev.#locator, + ) } } } else { @@ -137,7 +313,7 @@ fn field_to_tokens( // default subfield if include_body { quote! { - fn #ident(self) -> #library_path::Subfield<#any_store_field, #struct_name #generics, #ty> { + fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #generics, #ty> { #library_path::Subfield::new( self, #idx.into(), @@ -148,93 +324,212 @@ fn field_to_tokens( } } else { quote! { - fn #ident(self) -> #library_path::Subfield<#any_store_field, #struct_name #generics, #ty>; + fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #generics, #ty>; } } } -impl ToTokens for Model { - fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { - let library_path = quote! { reactive_stores }; - let Model { - vis, - struct_name, - generics, - fields, - } = &self; - let any_store_field = Ident::new("AnyStoreField", Span::call_site()); - let trait_name = Ident::new( - &format!("{struct_name}StoreFields"), - struct_name.span(), - ); - let generics_with_orig = { - let params = &generics.params; - quote! { <#any_store_field, #params> } - }; - let where_with_orig = { - generics - .where_clause - .as_ref() - .map(|w| { - let WhereClause { - where_token, - predicates, - } = &w; - quote! { - #where_token - #any_store_field: #library_path::StoreField, - #predicates - } - }) - .unwrap_or_else(|| quote! { where #any_store_field: #library_path::StoreField }) - }; +#[allow(clippy::too_many_arguments)] +fn variant_to_tokens( + include_body: bool, + library_path: &proc_macro2::TokenStream, + ident: &Ident, + generics: &Generics, + any_store_field: &Ident, + name: &Ident, + fields: &Fields, +) -> proc_macro2::TokenStream { + // the method name will always be the snake_cased ident + let orig_ident = &ident; + let ident = + Ident::new(&ident.to_string().to_case(Case::Snake), ident.span()); - // define an extension trait that matches this struct - let all_field_data = fields.iter().enumerate().map(|(idx, field)| { - let Field { ident, ty, attrs, .. } = &field; - let modes = attrs.iter().find_map(|attr| { - attr.meta.path().is_ident("store").then(|| { - match &attr.meta { - Meta::List(list) => { - match Punctuated::::parse_terminated.parse2(list.tokens.clone()) { - Ok(modes) => Some(modes.iter().cloned().collect::>()), - Err(e) => abort!(list, e) + match fields { + // For unit enum fields, we will just return a `bool` subfield, which is + // true when this field matches + Fields::Unit => { + // default subfield + if include_body { + quote! { + fn #ident(self) -> bool { + match #library_path::StoreField::reader(&self) { + Some(reader) => { + #library_path::StoreField::track_field(&self); + matches!(&*reader, #name::#orig_ident) + }, + None => false + } + } + } + } else { + quote! { + fn #ident(self) -> bool; + } + } + } + // If an enum branch has named fields, we create N + 1 methods: + // 1 `bool` subfield, which is true when this field matches + // N `Option` subfields for each of the named fields + Fields::Named(fields) => { + let mut tokens = if include_body { + quote! { + fn #ident(self) -> bool { + match #library_path::StoreField::reader(&self) { + Some(reader) => { + #library_path::StoreField::track_field(&self); + matches!(&*reader, #name::#orig_ident { .. }) + }, + None => false + } + } + } + } else { + quote! { + fn #ident(self) -> bool; + } + }; + + tokens.extend(fields + .named + .iter() + .map(|field| { + let field_ident = field.ident.as_ref().unwrap(); + let field_ty = &field.ty; + let combined_ident = Ident::new( + &format!("{}_{}", ident, field_ident), + field_ident.span(), + ); + + // default subfield + if include_body { + quote! { + fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>> { + #library_path::StoreField::track_field(&self); + let reader = #library_path::StoreField::reader(&self); + let matches = reader + .map(|reader| matches!(&*reader, #name::#orig_ident { .. })) + .unwrap_or(false); + if matches { + Some(#library_path::Subfield::new( + self, + 0.into(), + |prev| { + match prev { + #name::#orig_ident { #field_ident, .. } => Some(#field_ident), + _ => None, + } + .expect("accessed an enum field that is no longer matched") + }, + |prev| { + match prev { + #name::#orig_ident { #field_ident, .. } => Some(#field_ident), + _ => None, + } + .expect("accessed an enum field that is no longer matched") + }, + )) + } else { + None + } } - }, - _ => None + } + } else { + quote! { + fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>>; + } } - }) - }).flatten(); + })); - ( - field_to_tokens(idx, false, modes.as_deref(), &library_path, ident.as_ref(), generics, &any_store_field, struct_name, ty), - field_to_tokens(idx, true, modes.as_deref(), &library_path, ident.as_ref(), generics, &any_store_field, struct_name, ty), - ) - }); + tokens + } + // If an enum branch has unnamed fields, we create N + 1 methods: + // 1 `bool` subfield, which is true when this field matches + // N `Option` subfields for each of the unnamed fields + Fields::Unnamed(fields) => { + let mut tokens = if include_body { + quote! { + fn #ident(self) -> bool { + match #library_path::StoreField::reader(&self) { + Some(reader) => { + #library_path::StoreField::track_field(&self); + matches!(&*reader, #name::#orig_ident { .. }) + }, + None => false + } + } + } + } else { + quote! { + fn #ident(self) -> bool; + } + }; - // implement that trait for all StoreFields - let (trait_fields, read_fields): (Vec<_>, Vec<_>) = - all_field_data.unzip(); + let number_of_fields = fields.unnamed.len(); - // read access - tokens.extend(quote! { - #vis trait #trait_name - #where_with_orig - { - #(#trait_fields)* - } + tokens.extend(fields + .unnamed + .iter() + .enumerate() + .map(|(idx, field)| { + let field_ident = idx; + let field_ty = &field.ty; + let combined_ident = Ident::new( + &format!("{}_{}", ident, field_ident), + ident.span(), + ); - impl #generics_with_orig #trait_name for AnyStoreField - #where_with_orig - { - #(#read_fields)* - } - }); + let ignore_before = (0..idx).map(|_| quote! { _, }); + let ignore_before2 = ignore_before.clone(); + let ignore_after = (idx..number_of_fields.saturating_sub(1)).map(|_| quote !{_, }); + let ignore_after2 = ignore_after.clone(); + + // default subfield + if include_body { + quote! { + fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>> { + #library_path::StoreField::track_field(&self); + let reader = #library_path::StoreField::reader(&self); + let matches = reader + .map(|reader| matches!(&*reader, #name::#orig_ident(..))) + .unwrap_or(false); + if matches { + Some(#library_path::Subfield::new( + self, + 0.into(), + |prev| { + match prev { + #name::#orig_ident(#(#ignore_before)* this, #(#ignore_after)*) => Some(this), + _ => None, + } + .expect("accessed an enum field that is no longer matched") + }, + |prev| { + match prev { + #name::#orig_ident(#(#ignore_before2)* this, #(#ignore_after2)*) => Some(this), + _ => None, + } + .expect("accessed an enum field that is no longer matched") + }, + )) + } else { + None + } + } + } + } else { + quote! { + fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>>; + } + } + })); + + tokens + } } } struct PatchModel { - pub struct_name: Ident, + pub name: Ident, pub generics: Generics, pub fields: Vec, } @@ -260,7 +555,7 @@ impl Parse for PatchModel { }; Ok(Self { - struct_name: input.ident, + name: input.ident, generics: input.generics, fields, }) @@ -271,7 +566,7 @@ impl ToTokens for PatchModel { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { let library_path = quote! { reactive_stores }; let PatchModel { - struct_name, + name, generics, fields, } = &self; @@ -294,7 +589,7 @@ impl ToTokens for PatchModel { // read access tokens.extend(quote! { - impl #library_path::PatchField for #struct_name #generics + impl #library_path::PatchField for #name #generics { fn patch_field( &mut self,