Merge pull request #2871 from leptos-rs/more-stores

(draft) More work on stores
This commit is contained in:
Greg Johnston 2024-09-14 17:34:13 -04:00 committed by GitHub
commit f0c60f6ef6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 1970 additions and 200 deletions

View file

@ -13,6 +13,9 @@ leptos = { path = "../../leptos", features = ["csr"] }
reactive_stores = { path = "../../reactive_stores" }
reactive_stores_macro = { path = "../../reactive_stores_macro" }
console_error_panic_hook = "0.1.7"
chrono = { version = "0.4.38", features = ["serde"] }
serde = { version = "1.0.210", features = ["derive"] }
serde_json = "1.0.128"
[dev-dependencies]
wasm-bindgen = "0.2.93"

View file

@ -3,6 +3,11 @@
<head>
<link data-trunk rel="rust" data-wasm-opt="z"/>
<link data-trunk rel="icon" type="image/ico" href="/public/favicon.ico"/>
<style>
.hidden {
display: none;
}
</style>
</head>
<body></body>
</html>
</html>

View file

@ -1,24 +1,57 @@
use leptos::prelude::*;
use reactive_stores::{Field, Store, StoreFieldIterator};
use reactive_stores_macro::Store;
use std::sync::atomic::{AtomicUsize, Ordering};
#[derive(Debug, Store)]
use chrono::{Local, NaiveDate};
use leptos::prelude::*;
use reactive_stores::{Field, Store};
use reactive_stores_macro::Store;
use serde::{Deserialize, Serialize};
// ID starts higher than 0 because we have a few starting todos by default
static NEXT_ID: AtomicUsize = AtomicUsize::new(3);
#[derive(Debug, Store, Serialize, Deserialize)]
struct Todos {
user: String,
#[store(key: usize = |todo| todo.id)]
todos: Vec<Todo>,
}
#[derive(Debug, Store)]
#[derive(Debug, Store, Serialize, Deserialize)]
struct Todo {
id: usize,
label: String,
completed: bool,
status: Status,
}
#[derive(Debug, Default, Clone, Store, Serialize, Deserialize)]
enum Status {
#[default]
Pending,
Scheduled,
ScheduledFor {
date: NaiveDate,
},
Done,
}
impl Status {
pub fn next_step(&mut self) {
*self = match self {
Status::Pending => Status::ScheduledFor {
date: Local::now().naive_local().into(),
},
Status::Scheduled | Status::ScheduledFor { .. } => Status::Done,
Status::Done => Status::Done,
};
}
}
impl Todo {
pub fn new(label: impl ToString) -> Self {
Self {
id: NEXT_ID.fetch_add(1, Ordering::Relaxed),
label: label.to_string(),
completed: false,
status: Status::Pending,
}
}
}
@ -28,16 +61,19 @@ fn data() -> Todos {
user: "Bob".to_string(),
todos: vec![
Todo {
id: 0,
label: "Create reactive store".to_string(),
completed: true,
status: Status::Pending,
},
Todo {
id: 1,
label: "???".to_string(),
completed: false,
status: Status::Pending,
},
Todo {
id: 2,
label: "Profit".to_string(),
completed: false,
status: Status::Pending,
},
],
}
@ -49,15 +85,6 @@ pub fn App() -> impl IntoView {
let input_ref = NodeRef::new();
let rows = move || {
store
.todos()
.iter()
.enumerate()
.map(|(idx, todo)| view! { <TodoRow store idx todo/> })
.collect_view()
};
view! {
<p>"Hello, " {move || store.user().get()}</p>
<form on:submit=move |ev| {
@ -67,30 +94,31 @@ pub fn App() -> impl IntoView {
<label>"Add a Todo" <input type="text" node_ref=input_ref/></label>
<input type="submit"/>
</form>
<ol>{rows}</ol>
<div style="display: flex"></div>
<ol>
<For each=move || store.todos().iter_keyed() key=|row| row.id().get() let:todo>
<TodoRow store todo/>
</For>
</ol>
<pre>{move || serde_json::to_string_pretty(&*store.read())}</pre>
}
}
#[component]
fn TodoRow(
store: Store<Todos>,
idx: usize,
#[prop(into)] todo: Field<Todo>,
) -> impl IntoView {
let completed = todo.completed();
let status = todo.status();
let title = todo.label();
let editing = RwSignal::new(false);
let editing = RwSignal::new(true);
view! {
<li
style:text-decoration=move || {
completed.get().then_some("line-through").unwrap_or_default()
}
<li style:text-decoration=move || {
status.done().then_some("line-through").unwrap_or_default()
}>
class:foo=move || completed.get()
>
<p
class:hidden=move || editing.get()
on:click=move |_| {
@ -106,25 +134,51 @@ fn TodoRow(
prop:value=move || title.get()
on:change=move |ev| {
title.set(event_target_value(&ev));
editing.set(false);
}
on:blur=move |_| editing.set(false)
autofocus
/>
<input
type="checkbox"
prop:checked=move || completed.get()
on:click=move |_| { completed.update(|n| *n = !*n) }
/>
<button on:click=move |_| {
status.write().next_step()
}>
{move || {
if todo.status().done() {
"Done"
} else if status.scheduled() || status.scheduled_for() {
"Scheduled"
} else {
"Pending"
}
}}
</button>
<button on:click=move |_| {
let id = todo.id().get();
store
.todos()
.update(|todos| {
todos.remove(idx);
todos.remove(id);
});
}>"X"</button>
<input
type="date"
prop:value=move || {
todo.status().scheduled_for_date().map(|n| n.get().to_string())
}
class:hidden=move || !todo.status().scheduled_for()
on:change:target=move |ev| {
if let Some(date) = todo.status().scheduled_for_date() {
let value = ev.target().value();
match NaiveDate::parse_from_str(&value, "%Y-%m-%d") {
Ok(new_date) => {
date.set(new_date);
}
Err(e) => warn!("{e}"),
}
}
}
/>
</li>
}
}

View file

@ -99,7 +99,8 @@ pub mod prelude {
// TODO remove this, it's just useful while developing
#[allow(unused)]
fn log_warning(text: Arguments) {
#[doc(hidden)]
pub fn log_warning(text: Arguments) {
#[cfg(feature = "tracing")]
{
tracing::warn!(text);

View file

@ -1,6 +1,6 @@
use crate::{
path::{StorePath, StorePathSegment},
AtIndex, StoreField, Subfield,
AtIndex, AtKeyed, KeyMap, KeyedSubfield, StoreField, Subfield,
};
use reactive_graph::{
signal::ArcTrigger,
@ -9,6 +9,8 @@ use reactive_graph::{
},
};
use std::{
fmt::Debug,
hash::Hash,
ops::{Deref, DerefMut, IndexMut},
panic::Location,
sync::Arc,
@ -25,6 +27,7 @@ where
get_trigger: Arc<dyn Fn(StorePath) -> ArcTrigger + Send + Sync>,
read: Arc<dyn Fn() -> Option<StoreFieldReader<T>> + Send + Sync>,
write: Arc<dyn Fn() -> Option<StoreFieldWriter<T>> + Send + Sync>,
keys: Arc<dyn Fn() -> Option<KeyMap> + Send + Sync>,
}
pub struct StoreFieldReader<T>(Box<dyn Deref<Target = T>>);
@ -75,6 +78,7 @@ impl<T> StoreField for ArcField<T> {
type Value = T;
type Reader = StoreFieldReader<T>;
type Writer = StoreFieldWriter<T>;
type UntrackedWriter = StoreFieldWriter<T>;
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
(self.get_trigger)(path)
@ -91,6 +95,16 @@ impl<T> StoreField for ArcField<T> {
fn writer(&self) -> Option<Self::Writer> {
(self.write)().map(StoreFieldWriter::new)
}
fn untracked_writer(&self) -> Option<Self::UntrackedWriter> {
let mut writer = (self.write)().map(StoreFieldWriter::new)?;
writer.untrack();
Some(writer)
}
fn keys(&self) -> Option<KeyMap> {
(self.keys)()
}
}
impl<Inner, Prev, T> From<Subfield<Inner, Prev, T>> for ArcField<T>
@ -119,6 +133,10 @@ where
let value = value.clone();
move || value.writer().map(StoreFieldWriter::new)
}),
keys: Arc::new({
let value = value.clone();
move || value.keys()
}),
}
}
}
@ -149,6 +167,48 @@ where
let value = value.clone();
move || value.writer().map(StoreFieldWriter::new)
}),
keys: Arc::new({
let value = value.clone();
move || value.keys()
}),
}
}
}
impl<Inner, Prev, K, T> From<AtKeyed<Inner, Prev, K, T>> for ArcField<T::Output>
where
AtKeyed<Inner, Prev, K, T>: Clone,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev> + Send + Sync + 'static,
Prev: 'static,
T: IndexMut<usize> + 'static,
T::Output: Sized,
{
#[track_caller]
fn from(value: AtKeyed<Inner, Prev, K, T>) -> Self {
ArcField {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
path: value.path().into_iter().collect(),
trigger: value.get_trigger(value.path().into_iter().collect()),
get_trigger: Arc::new({
let value = value.clone();
move |path| value.get_trigger(path)
}),
read: Arc::new({
let value = value.clone();
move || value.reader().map(StoreFieldReader::new)
}),
write: Arc::new({
let value = value.clone();
move || value.writer().map(StoreFieldWriter::new)
}),
keys: Arc::new({
let value = value.clone();
move || value.keys()
}),
}
}
}
@ -163,6 +223,7 @@ impl<T> Clone for ArcField<T> {
get_trigger: Arc::clone(&self.get_trigger),
read: Arc::clone(&self.read),
write: Arc::clone(&self.write),
keys: Arc::clone(&self.keys),
}
}
}

View file

@ -1,7 +1,7 @@
use crate::{
arc_field::{StoreFieldReader, StoreFieldWriter},
path::{StorePath, StorePathSegment},
ArcField, AtIndex, StoreField, Subfield,
ArcField, AtIndex, AtKeyed, KeyMap, KeyedSubfield, StoreField, Subfield,
};
use reactive_graph::{
owner::{Storage, StoredValue, SyncStorage},
@ -9,7 +9,7 @@ use reactive_graph::{
traits::{DefinedAt, IsDisposed, Notify, ReadUntracked, Track},
unwrap_signal,
};
use std::{ops::IndexMut, panic::Location};
use std::{fmt::Debug, hash::Hash, ops::IndexMut, panic::Location};
pub struct Field<T, S = SyncStorage>
where
@ -27,6 +27,7 @@ where
type Value = T;
type Reader = StoreFieldReader<T>;
type Writer = StoreFieldWriter<T>;
type UntrackedWriter = StoreFieldWriter<T>;
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
self.inner
@ -49,6 +50,16 @@ where
fn writer(&self) -> Option<Self::Writer> {
self.inner.try_get_value().and_then(|inner| inner.writer())
}
fn untracked_writer(&self) -> Option<Self::UntrackedWriter> {
self.inner
.try_get_value()
.and_then(|inner| inner.untracked_writer())
}
fn keys(&self) -> Option<KeyMap> {
self.inner.try_get_value().and_then(|n| n.keys())
}
}
impl<Inner, Prev, T, S> From<Subfield<Inner, Prev, T>> for Field<T, S>
@ -87,6 +98,29 @@ where
}
}
impl<Inner, Prev, K, T, S> From<AtKeyed<Inner, Prev, K, T>>
for Field<T::Output, S>
where
S: Storage<ArcField<T::Output>>,
AtKeyed<Inner, Prev, K, T>: Clone,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev> + Send + Sync + 'static,
Prev: 'static,
T: IndexMut<usize> + 'static,
T::Output: Sized,
{
#[track_caller]
fn from(value: AtKeyed<Inner, Prev, K, T>) -> Self {
Field {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: StoredValue::new_with_storage(value.into()),
}
}
}
impl<T, S> Clone for Field<T, S> {
fn clone(&self) -> Self {
*self

View file

@ -1,6 +1,7 @@
use crate::{
path::{StorePath, StorePathSegment},
store_field::StoreField,
KeyMap,
};
use reactive_graph::{
signal::{
@ -20,10 +21,7 @@ use std::{
};
#[derive(Debug)]
pub struct AtIndex<Inner, Prev>
where
Inner: StoreField<Value = Prev>,
{
pub struct AtIndex<Inner, Prev> {
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
inner: Inner,
@ -33,7 +31,7 @@ where
impl<Inner, Prev> Clone for AtIndex<Inner, Prev>
where
Inner: StoreField<Value = Prev> + Clone,
Inner: Clone,
{
fn clone(&self) -> Self {
Self {
@ -46,15 +44,9 @@ where
}
}
impl<Inner, Prev> Copy for AtIndex<Inner, Prev> where
Inner: StoreField<Value = Prev> + Copy
{
}
impl<Inner, Prev> Copy for AtIndex<Inner, Prev> where Inner: Copy {}
impl<Inner, Prev> AtIndex<Inner, Prev>
where
Inner: StoreField<Value = Prev>,
{
impl<Inner, Prev> AtIndex<Inner, Prev> {
#[track_caller]
pub fn new(inner: Inner, index: usize) -> Self {
Self {
@ -77,6 +69,8 @@ where
type Reader = MappedMutArc<Inner::Reader, Prev::Output>;
type Writer =
MappedMutArc<WriteGuard<ArcTrigger, Inner::Writer>, Prev::Output>;
type UntrackedWriter =
MappedMutArc<WriteGuard<ArcTrigger, Inner::Writer>, Prev::Output>;
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
self.inner
@ -109,6 +103,17 @@ where
move |n| &mut n[index],
))
}
fn untracked_writer(&self) -> Option<Self::UntrackedWriter> {
let mut guard = self.writer()?;
guard.untrack();
Some(guard)
}
#[inline(always)]
fn keys(&self) -> Option<KeyMap> {
self.inner.keys()
}
}
impl<Inner, Prev> DefinedAt for AtIndex<Inner, Prev>
@ -195,16 +200,27 @@ where
}
}
pub trait StoreFieldIterator<Prev>: Sized {
pub trait StoreFieldIterator<Prev>
where
Self: StoreField<Value = Prev>,
{
fn at(self, index: usize) -> AtIndex<Self, Prev>;
fn iter(self) -> StoreFieldIter<Self, Prev>;
}
impl<Inner, Prev> StoreFieldIterator<Prev> for Inner
where
Inner: StoreField<Value = Prev>,
Inner: StoreField<Value = Prev> + Clone,
Prev::Output: Sized,
Prev: IndexMut<usize> + AsRef<[Prev::Output]>,
{
#[track_caller]
fn at(self, index: usize) -> AtIndex<Inner, Prev> {
AtIndex::new(self.clone(), index)
}
#[track_caller]
fn iter(self) -> StoreFieldIter<Inner, Prev> {
// reactively track changes to this field
let trigger = self.get_trigger(self.path().into_iter().collect());
@ -240,13 +256,7 @@ where
fn next(&mut self) -> Option<Self::Item> {
if self.idx < self.len {
let field = AtIndex {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
index: self.idx,
inner: self.inner.clone(),
ty: PhantomData,
};
let field = AtIndex::new(self.inner.clone(), self.idx);
self.idx += 1;
Some(field)
} else {

View file

@ -0,0 +1,698 @@
use crate::{
path::{StorePath, StorePathSegment},
store_field::StoreField,
KeyMap,
};
use reactive_graph::{
signal::{
guards::{Mapped, MappedMut, MappedMutArc, WriteGuard},
ArcTrigger,
},
traits::{
DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard,
Writeable,
},
};
use std::{
collections::VecDeque,
fmt::Debug,
hash::Hash,
iter,
ops::{Deref, DerefMut, IndexMut},
panic::Location,
};
#[derive(Debug)]
pub struct KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
{
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
path_segment: StorePathSegment,
inner: Inner,
read: fn(&Prev) -> &T,
write: fn(&mut Prev) -> &mut T,
key_fn: fn(<&T as IntoIterator>::Item) -> K,
}
impl<Inner, Prev, K, T> Clone for KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
Inner: Clone,
{
fn clone(&self) -> Self {
Self {
#[cfg(debug_assertions)]
defined_at: self.defined_at,
path_segment: self.path_segment,
inner: self.inner.clone(),
read: self.read,
write: self.write,
key_fn: self.key_fn,
}
}
}
impl<Inner, Prev, K, T> Copy for KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
Inner: Copy,
{
}
impl<Inner, Prev, K, T> KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
{
#[track_caller]
pub fn new(
inner: Inner,
path_segment: StorePathSegment,
key_fn: fn(<&T as IntoIterator>::Item) -> K,
read: fn(&Prev) -> &T,
write: fn(&mut Prev) -> &mut T,
) -> Self {
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner,
path_segment,
read,
write,
key_fn,
}
}
}
impl<Inner, Prev, K, T> StoreField for KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
type Value = T;
type Reader = Mapped<Inner::Reader, T>;
type Writer = KeyedSubfieldWriteGuard<
Inner,
Prev,
K,
T,
MappedMut<WriteGuard<ArcTrigger, Inner::Writer>, T>,
>;
type UntrackedWriter =
MappedMut<WriteGuard<ArcTrigger, Inner::UntrackedWriter>, T>;
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
self.inner
.path()
.into_iter()
.chain(iter::once(self.path_segment))
}
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
self.inner.get_trigger(path)
}
fn reader(&self) -> Option<Self::Reader> {
let inner = self.inner.reader()?;
Some(Mapped::new_with_guard(inner, self.read))
}
fn writer(&self) -> Option<Self::Writer> {
let path = self.path().into_iter().collect::<StorePath>();
let trigger = self.get_trigger(path.clone());
let guard = WriteGuard::new(trigger, self.inner.writer()?);
let guard = MappedMut::new(guard, self.read, self.write);
Some(KeyedSubfieldWriteGuard {
inner: self.clone(),
guard: Some(guard),
})
}
fn untracked_writer(&self) -> Option<Self::UntrackedWriter> {
let trigger = self.get_trigger(self.path().into_iter().collect());
let inner = WriteGuard::new(trigger, self.inner.untracked_writer()?);
Some(MappedMut::new(inner, self.read, self.write))
}
#[inline(always)]
fn keys(&self) -> Option<KeyMap> {
self.inner.keys()
}
}
impl<Inner, Prev, K, T> KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn latest_keys(&self) -> Vec<K> {
self.reader()
.expect("trying to update keys")
.deref()
.into_iter()
.map(|n| (self.key_fn)(n))
.collect()
}
}
pub struct KeyedSubfieldWriteGuard<Inner, Prev, K, T, Guard>
where
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
inner: KeyedSubfield<Inner, Prev, K, T>,
guard: Option<Guard>,
}
impl<Inner, Prev, K, T, Guard> Deref
for KeyedSubfieldWriteGuard<Inner, Prev, K, T, Guard>
where
Guard: Deref,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
type Target = Guard::Target;
fn deref(&self) -> &Self::Target {
self.guard
.as_ref()
.expect("should be Some(_) until dropped")
.deref()
}
}
impl<Inner, Prev, K, T, Guard> DerefMut
for KeyedSubfieldWriteGuard<Inner, Prev, K, T, Guard>
where
Guard: DerefMut,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn deref_mut(&mut self) -> &mut Self::Target {
self.guard
.as_mut()
.expect("should be Some(_) until dropped")
.deref_mut()
}
}
impl<Inner, Prev, K, T, Guard> UntrackableGuard
for KeyedSubfieldWriteGuard<Inner, Prev, K, T, Guard>
where
Guard: UntrackableGuard,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn untrack(&mut self) {
if let Some(inner) = self.guard.as_mut() {
inner.untrack();
}
}
}
impl<Inner, Prev, K, T, Guard> Drop
for KeyedSubfieldWriteGuard<Inner, Prev, K, T, Guard>
where
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn drop(&mut self) {
// dropping the inner guard will
// 1) synchronously release its write lock on the store's value
// 2) trigger an (asynchronous) reactive update
drop(self.guard.take());
// now that the write lock is release, we can get a read lock to refresh this keyed field
// based on the new value
self.inner.update_keys();
// reactive updates happen on the next tick
}
}
impl<Inner, Prev, K, T> DefinedAt for KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
{
fn defined_at(&self) -> Option<&'static Location<'static>> {
#[cfg(debug_assertions)]
{
Some(self.defined_at)
}
#[cfg(not(debug_assertions))]
{
None
}
}
}
impl<Inner, Prev, K, T> IsDisposed for KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
Inner: IsDisposed,
{
fn is_disposed(&self) -> bool {
self.inner.is_disposed()
}
}
impl<Inner, Prev, K, T> Notify for KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn notify(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.notify();
}
}
impl<Inner, Prev, K, T> Track for KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev> + Track + 'static,
Prev: 'static,
T: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn track(&self) {
self.inner.track();
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.track();
}
}
impl<Inner, Prev, K, T> ReadUntracked for KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
type Value = <Self as StoreField>::Reader;
fn try_read_untracked(&self) -> Option<Self::Value> {
self.reader()
}
}
impl<Inner, Prev, K, T> Writeable for KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
T: 'static,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
type Value = T;
fn try_write(&self) -> Option<impl UntrackableGuard<Target = Self::Value>> {
self.writer()
}
fn try_write_untracked(
&self,
) -> Option<impl DerefMut<Target = Self::Value>> {
self.writer().map(|mut writer| {
writer.untrack();
writer
})
}
}
#[derive(Debug)]
pub struct AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
{
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
inner: KeyedSubfield<Inner, Prev, K, T>,
key: K,
}
impl<Inner, Prev, K, T> Clone for AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
KeyedSubfield<Inner, Prev, K, T>: Clone,
K: Debug + Clone,
{
fn clone(&self) -> Self {
Self {
#[cfg(debug_assertions)]
defined_at: self.defined_at,
inner: self.inner.clone(),
key: self.key.clone(),
}
}
}
impl<Inner, Prev, K, T> Copy for AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
KeyedSubfield<Inner, Prev, K, T>: Copy,
K: Debug + Copy,
{
}
impl<Inner, Prev, K, T> AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
{
#[track_caller]
pub fn new(inner: KeyedSubfield<Inner, Prev, K, T>, key: K) -> Self {
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner,
key,
}
}
}
impl<Inner, Prev, K, T> StoreField for AtKeyed<Inner, Prev, K, T>
where
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
T: IndexMut<usize>,
T::Output: Sized,
{
type Value = T::Output;
type Reader = MappedMutArc<
<KeyedSubfield<Inner, Prev, K, T> as StoreField>::Reader,
T::Output,
>;
type Writer = WriteGuard<
ArcTrigger,
MappedMutArc<
<KeyedSubfield<Inner, Prev, K, T> as StoreField>::Writer,
T::Output,
>,
>;
type UntrackedWriter = WriteGuard<
ArcTrigger,
MappedMutArc<
<KeyedSubfield<Inner, Prev, K, T> as StoreField>::Writer,
T::Output,
>,
>;
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
let inner = self.inner.path().into_iter().collect::<StorePath>();
let keys = self
.inner
.keys()
.expect("using keys on a store with no keys");
let this = keys
.with_field_keys(
inner.clone(),
|keys| keys.get(&self.key),
|| self.inner.latest_keys(),
)
.flatten()
.map(|(path, _)| path);
inner.into_iter().chain(this)
}
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
self.inner.get_trigger(path)
}
fn reader(&self) -> Option<Self::Reader> {
let inner = self.inner.reader()?;
let inner_path = self.inner.path().into_iter().collect();
let keys = self
.inner
.keys()
.expect("using keys on a store with no keys");
let index = keys
.with_field_keys(
inner_path,
|keys| keys.get(&self.key),
|| self.inner.latest_keys(),
)
.flatten()
.map(|(_, idx)| idx)
.expect("reading from a keyed field that has not yet been created");
Some(MappedMutArc::new(
inner,
move |n| &n[index],
move |n| &mut n[index],
))
}
fn writer(&self) -> Option<Self::Writer> {
let inner = self.inner.writer()?;
let trigger = self.get_trigger(self.path().into_iter().collect());
let inner_path = self.inner.path().into_iter().collect::<StorePath>();
let keys = self
.inner
.keys()
.expect("using keys on a store with no keys");
let index = keys
.with_field_keys(
inner_path.clone(),
|keys| keys.get(&self.key),
|| self.inner.latest_keys(),
)
.flatten()
.map(|(_, idx)| idx)
.expect("reading from a keyed field that has not yet been created");
Some(WriteGuard::new(
trigger,
MappedMutArc::new(
inner,
move |n| &n[index],
move |n| &mut n[index],
),
))
}
fn untracked_writer(&self) -> Option<Self::UntrackedWriter> {
let mut guard = self.writer()?;
guard.untrack();
Some(guard)
}
#[inline(always)]
fn keys(&self) -> Option<KeyMap> {
self.inner.keys()
}
}
impl<Inner, Prev, K, T> DefinedAt for AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
{
fn defined_at(&self) -> Option<&'static Location<'static>> {
#[cfg(debug_assertions)]
{
Some(self.defined_at)
}
#[cfg(not(debug_assertions))]
{
None
}
}
}
impl<Inner, Prev, K, T> IsDisposed for AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
Inner: IsDisposed,
{
fn is_disposed(&self) -> bool {
self.inner.is_disposed()
}
}
impl<Inner, Prev, K, T> Notify for AtKeyed<Inner, Prev, K, T>
where
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
T: IndexMut<usize>,
T::Output: Sized,
{
fn notify(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.notify();
}
}
impl<Inner, Prev, K, T> Track for AtKeyed<Inner, Prev, K, T>
where
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
T: IndexMut<usize>,
T::Output: Sized,
{
fn track(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.track();
}
}
impl<Inner, Prev, K, T> ReadUntracked for AtKeyed<Inner, Prev, K, T>
where
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
T: IndexMut<usize>,
T::Output: Sized,
{
type Value = <Self as StoreField>::Reader;
fn try_read_untracked(&self) -> Option<Self::Value> {
self.reader()
}
}
impl<Inner, Prev, K, T> Writeable for AtKeyed<Inner, Prev, K, T>
where
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
T: IndexMut<usize>,
T::Output: Sized + 'static,
{
type Value = T::Output;
fn try_write(&self) -> Option<impl UntrackableGuard<Target = Self::Value>> {
self.writer()
}
fn try_write_untracked(
&self,
) -> Option<impl DerefMut<Target = Self::Value>> {
self.writer().map(|mut writer| {
writer.untrack();
writer
})
}
}
impl<Inner, Prev, K, T> KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
pub fn update_keys(&self) {
let inner_path = self.path().into_iter().collect();
let keys = self
.inner
.keys()
.expect("updating keys on a store with no keys");
keys.with_field_keys(
inner_path,
|keys| {
keys.update(self.latest_keys());
},
|| self.latest_keys(),
);
}
}
impl<Inner, Prev, K, T> KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
T: IndexMut<usize>,
T::Output: Sized,
{
#[track_caller]
pub fn iter_keyed(self) -> StoreFieldKeyedIter<Inner, Prev, K, T> {
// reactively track changes to this field
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.track();
// get the current length of the field by accessing slice
let reader = self
.reader()
.expect("creating iterator from unavailable store field");
let keys = reader
.into_iter()
.map(|item| (self.key_fn)(item))
.collect::<VecDeque<_>>();
// return the iterator
StoreFieldKeyedIter { inner: self, keys }
}
}
pub struct StoreFieldKeyedIter<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
T: IndexMut<usize>,
{
inner: KeyedSubfield<Inner, Prev, K, T>,
keys: VecDeque<K>,
}
impl<Inner, Prev, K, T> Iterator for StoreFieldKeyedIter<Inner, Prev, K, T>
where
Inner: StoreField<Value = Prev> + Clone + 'static,
T: IndexMut<usize> + 'static,
T::Output: Sized + 'static,
for<'a> &'a T: IntoIterator,
{
type Item = AtKeyed<Inner, Prev, K, T>;
fn next(&mut self) -> Option<Self::Item> {
self.keys
.pop_front()
.map(|key| AtKeyed::new(self.inner.clone(), key))
}
}

View file

@ -1,3 +1,4 @@
use or_poisoned::OrPoisoned;
use reactive_graph::{
owner::{LocalStorage, Storage, StoredValue, SyncStorage},
signal::{
@ -8,7 +9,10 @@ use reactive_graph::{
};
use rustc_hash::FxHashMap;
use std::{
any::Any,
collections::HashMap,
fmt::Debug,
hash::Hash,
panic::Location,
sync::{Arc, RwLock},
};
@ -16,6 +20,8 @@ use std::{
mod arc_field;
mod field;
mod iter;
mod keyed;
mod option;
mod patch;
mod path;
mod store_field;
@ -24,9 +30,11 @@ mod subfield;
pub use arc_field::ArcField;
pub use field::Field;
pub use iter::*;
pub use keyed::*;
pub use option::*;
pub use patch::*;
use path::StorePath;
pub use store_field::StoreField;
use path::{StorePath, StorePathSegment};
pub use store_field::{StoreField, Then};
pub use subfield::Subfield;
#[derive(Debug, Default)]
@ -49,11 +57,118 @@ impl TriggerMap {
}
}
pub struct FieldKeys<K> {
spare_keys: Vec<StorePathSegment>,
current_key: usize,
keys: HashMap<K, (StorePathSegment, usize)>,
}
impl<K> FieldKeys<K>
where
K: Debug + Hash + PartialEq + Eq,
{
pub fn new(from_iter: impl IntoIterator<Item = K>) -> Self {
let mut current_key = 0;
let mut keys = HashMap::new();
for (idx, key) in from_iter.into_iter().enumerate() {
let segment = current_key.into();
keys.insert(key, (segment, idx));
current_key += 1;
}
Self {
spare_keys: Vec::new(),
current_key: 0,
keys,
}
}
}
impl<K> FieldKeys<K>
where
K: Hash + PartialEq + Eq,
{
pub fn get(&self, key: &K) -> Option<(StorePathSegment, usize)> {
self.keys.get(key).copied()
}
fn next_key(&mut self) -> StorePathSegment {
self.spare_keys.pop().unwrap_or_else(|| {
self.current_key += 1;
self.current_key.into()
})
}
pub fn update(&mut self, iter: impl IntoIterator<Item = K>) {
let new_keys = iter
.into_iter()
.enumerate()
.map(|(idx, key)| (key, idx))
.collect::<HashMap<K, usize>>();
// remove old keys and recycle the slots
self.keys.retain(|key, old_entry| match new_keys.get(key) {
Some(idx) => {
old_entry.1 = *idx;
true
}
None => {
self.spare_keys.push(old_entry.0);
false
}
});
// add new keys
for (key, idx) in new_keys {
// the suggestion doesn't compile because we need &mut for self.next_key(),
// and we don't want to call that until after the check
#[allow(clippy::map_entry)]
if !self.keys.contains_key(&key) {
let path = self.next_key();
self.keys.insert(key, (path, idx));
}
}
}
}
impl<K> Default for FieldKeys<K> {
fn default() -> Self {
Self {
spare_keys: Default::default(),
current_key: Default::default(),
keys: Default::default(),
}
}
}
#[derive(Default, Clone)]
pub struct KeyMap(Arc<RwLock<HashMap<StorePath, Box<dyn Any + Send + Sync>>>>);
impl KeyMap {
pub fn with_field_keys<K, T>(
&self,
path: StorePath,
fun: impl FnOnce(&mut FieldKeys<K>) -> T,
initialize: impl FnOnce() -> Vec<K>,
) -> Option<T>
where
K: Debug + Hash + PartialEq + Eq + Send + Sync + 'static,
{
let mut guard = self.0.write().or_poisoned();
let entry = guard
.entry(path)
.or_insert_with(|| Box::new(FieldKeys::new(initialize())));
let entry = entry.downcast_mut::<FieldKeys<K>>()?;
Some(fun(entry))
}
}
pub struct ArcStore<T> {
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
pub(crate) value: Arc<RwLock<T>>,
signals: Arc<RwLock<TriggerMap>>,
keys: KeyMap,
}
impl<T> ArcStore<T> {
@ -63,7 +178,7 @@ impl<T> ArcStore<T> {
defined_at: Location::caller(),
value: Arc::new(RwLock::new(value)),
signals: Default::default(),
/* inner: Arc::new(RwLock::new(SubscriberSet::new())), */
keys: Default::default(),
}
}
}
@ -86,6 +201,7 @@ impl<T> Clone for ArcStore<T> {
defined_at: self.defined_at,
value: Arc::clone(&self.value),
signals: Arc::clone(&self.signals),
keys: self.keys.clone(),
}
}
}
@ -123,7 +239,7 @@ where
impl<T: 'static> Track for ArcStore<T> {
fn track(&self) {
self.get_trigger(Default::default()).notify();
self.get_trigger(Default::default()).track();
}
}
@ -248,7 +364,9 @@ where
#[cfg(test)]
mod tests {
use crate::{self as reactive_stores, Patch, Store, StoreFieldIterator};
use crate::{
self as reactive_stores, Patch, Store, StoreField, StoreFieldIterator,
};
use reactive_graph::{
effect::Effect,
traits::{Read, ReadUntracked, Set, Update, Writeable},
@ -462,4 +580,9 @@ mod tests {
tick().await;
assert_eq!(combined_count.load(Ordering::Relaxed), 2);
}
#[derive(Debug, Store)]
pub struct StructWithOption {
opt_field: Option<Todo>,
}
}

View file

@ -0,0 +1,280 @@
use crate::{StoreField, Subfield};
use reactive_graph::traits::Read;
use std::ops::Deref;
pub trait OptionStoreExt
where
Self: StoreField<Value = Option<Self::Output>>,
{
type Output;
fn unwrap(self) -> Subfield<Self, Option<Self::Output>, Self::Output>;
fn map<U>(
self,
map_fn: impl FnOnce(Subfield<Self, Option<Self::Output>, Self::Output>) -> U,
) -> Option<U>;
}
impl<T, S> OptionStoreExt for S
where
S: StoreField<Value = Option<T>> + Read,
<S as Read>::Value: Deref<Target = Option<T>>,
{
type Output = T;
fn unwrap(self) -> Subfield<Self, Option<Self::Output>, Self::Output> {
Subfield::new(
self,
0.into(),
|t| t.as_ref().unwrap(),
|t| t.as_mut().unwrap(),
)
}
fn map<U>(
self,
map_fn: impl FnOnce(Subfield<S, Option<T>, T>) -> U,
) -> Option<U> {
if self.read().is_some() {
Some(map_fn(self.unwrap()))
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use crate::{self as reactive_stores, Store};
use reactive_graph::{
effect::Effect,
traits::{Get, Read, ReadUntracked, Set, Writeable},
};
use reactive_stores_macro::Store;
use std::sync::{
atomic::{AtomicUsize, Ordering},
Arc,
};
pub async fn tick() {
tokio::time::sleep(std::time::Duration::from_micros(1)).await;
}
#[derive(Debug, Clone, Store)]
pub struct User {
pub name: Option<Name>,
}
#[derive(Debug, Clone, Store)]
pub struct Name {
pub first_name: Option<String>,
}
#[tokio::test]
async fn substores_reachable_through_option() {
use crate::OptionStoreExt;
_ = any_spawner::Executor::init_tokio();
let combined_count = Arc::new(AtomicUsize::new(0));
let store = Store::new(User { name: None });
Effect::new_sync({
let combined_count = Arc::clone(&combined_count);
move |prev: Option<()>| {
if prev.is_none() {
println!("first run");
} else {
println!("next run");
}
if store.name().read().is_some() {
println!(
"inner value = {:?}",
*store.name().unwrap().first_name().read()
);
} else {
println!("no inner value");
}
combined_count.fetch_add(1, Ordering::Relaxed);
}
});
tick().await;
store.name().set(Some(Name {
first_name: Some("Greg".into()),
}));
tick().await;
store.name().set(None);
tick().await;
store.name().set(Some(Name {
first_name: Some("Bob".into()),
}));
tick().await;
store
.name()
.unwrap()
.first_name()
.write()
.as_mut()
.unwrap()
.push_str("!!!");
tick().await;
assert_eq!(combined_count.load(Ordering::Relaxed), 5);
assert_eq!(
store
.name()
.read_untracked()
.as_ref()
.unwrap()
.first_name
.as_ref()
.unwrap(),
"Bob!!!"
);
}
#[tokio::test]
async fn updating_option_unwrap_subfield_doesnt_notify_option() {
use crate::OptionStoreExt;
_ = any_spawner::Executor::init_tokio();
let parent_count = Arc::new(AtomicUsize::new(0));
let inner_count = Arc::new(AtomicUsize::new(0));
let store = Store::new(User { name: None });
Effect::new_sync({
let parent_count = Arc::clone(&parent_count);
move |prev: Option<()>| {
if prev.is_none() {
println!("parent: first run");
} else {
println!("parent: next run");
}
println!(" is_some = {}", store.name().read().is_some());
parent_count.fetch_add(1, Ordering::Relaxed);
}
});
Effect::new_sync({
let inner_count = Arc::clone(&inner_count);
move |prev: Option<()>| {
if prev.is_none() {
println!("inner: first run");
} else {
println!("inner: next run");
}
if store.name().read().is_some() {
println!(
" inner label = {:?}",
*store.name().unwrap().first_name().read()
);
} else {
println!(" no inner value");
}
inner_count.fetch_add(1, Ordering::Relaxed);
}
});
tick().await;
assert_eq!(parent_count.load(Ordering::Relaxed), 1);
assert_eq!(inner_count.load(Ordering::Relaxed), 1);
store.name().set(Some(Name {
first_name: Some("Alice".into()),
}));
tick().await;
assert_eq!(parent_count.load(Ordering::Relaxed), 2);
assert_eq!(inner_count.load(Ordering::Relaxed), 2);
println!("\nUpdating first name only");
store
.name()
.unwrap()
.first_name()
.write()
.as_mut()
.unwrap()
.push_str("!!!");
tick().await;
assert_eq!(parent_count.load(Ordering::Relaxed), 2);
assert_eq!(inner_count.load(Ordering::Relaxed), 3);
}
#[tokio::test]
async fn mapping_over_optional_store_field() {
use crate::OptionStoreExt;
_ = any_spawner::Executor::init_tokio();
let parent_count = Arc::new(AtomicUsize::new(0));
let inner_count = Arc::new(AtomicUsize::new(0));
let store = Store::new(User { name: None });
Effect::new_sync({
let parent_count = Arc::clone(&parent_count);
move |prev: Option<()>| {
if prev.is_none() {
println!("parent: first run");
} else {
println!("parent: next run");
}
println!(" is_some = {}", store.name().read().is_some());
parent_count.fetch_add(1, Ordering::Relaxed);
}
});
Effect::new_sync({
let inner_count = Arc::clone(&inner_count);
move |prev: Option<()>| {
if prev.is_none() {
println!("inner: first run");
} else {
println!("inner: next run");
}
println!(
"store inner value length = {:?}",
store.name().map(|inner| inner
.first_name()
.get()
.unwrap_or_default()
.len())
);
inner_count.fetch_add(1, Ordering::Relaxed);
}
});
tick().await;
assert_eq!(parent_count.load(Ordering::Relaxed), 1);
assert_eq!(inner_count.load(Ordering::Relaxed), 1);
store.name().set(Some(Name {
first_name: Some("Greg".into()),
}));
tick().await;
assert_eq!(parent_count.load(Ordering::Relaxed), 2);
assert_eq!(inner_count.load(Ordering::Relaxed), 2);
println!("\nUpdating first name only");
store
.name()
.unwrap()
.first_name()
.write()
.as_mut()
.unwrap()
.push_str("!!!");
tick().await;
assert_eq!(parent_count.load(Ordering::Relaxed), 2);
assert_eq!(inner_count.load(Ordering::Relaxed), 3);
}
}

View file

@ -1,32 +1,65 @@
use crate::{
path::{StorePath, StorePathSegment},
ArcStore, Store,
ArcStore, KeyMap, Store,
};
use guardian::ArcRwLockWriteGuardian;
use or_poisoned::OrPoisoned;
use reactive_graph::{
owner::Storage,
signal::{
guards::{Plain, WriteGuard},
guards::{Mapped, MappedMut, Plain, UntrackedWriteGuard, WriteGuard},
ArcTrigger,
},
traits::{DefinedAt, UntrackableGuard},
traits::{
DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard,
Writeable,
},
unwrap_signal,
};
use std::{iter, ops::Deref, sync::Arc};
use std::{
iter,
ops::{Deref, DerefMut},
panic::Location,
sync::Arc,
};
pub trait StoreField: Sized {
type Value;
type Reader: Deref<Target = Self::Value>;
type Writer: UntrackableGuard<Target = Self::Value>;
type UntrackedWriter: DerefMut<Target = Self::Value>;
fn get_trigger(&self, path: StorePath) -> ArcTrigger;
fn path(&self) -> impl IntoIterator<Item = StorePathSegment>;
fn track_field(&self) {
let path = self.path().into_iter().collect();
let trigger = self.get_trigger(path);
trigger.track();
}
fn reader(&self) -> Option<Self::Reader>;
fn writer(&self) -> Option<Self::Writer>;
fn untracked_writer(&self) -> Option<Self::UntrackedWriter>;
fn keys(&self) -> Option<KeyMap>;
#[track_caller]
fn then<T>(
self,
map_fn: fn(&Self::Value) -> &T,
map_fn_mut: fn(&mut Self::Value) -> &mut T,
) -> Then<T, Self> {
Then {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: self,
map_fn,
map_fn_mut,
}
}
}
impl<T> StoreField for ArcStore<T>
@ -35,7 +68,8 @@ where
{
type Value = T;
type Reader = Plain<T>;
type Writer = WriteGuard<ArcTrigger, ArcRwLockWriteGuardian<T>>;
type Writer = WriteGuard<ArcTrigger, UntrackedWriteGuard<T>>;
type UntrackedWriter = UntrackedWriteGuard<T>;
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
let triggers = &self.signals;
@ -53,10 +87,17 @@ where
fn writer(&self) -> Option<Self::Writer> {
let trigger = self.get_trigger(Default::default());
let guard =
ArcRwLockWriteGuardian::take(Arc::clone(&self.value)).ok()?;
let guard = self.untracked_writer()?;
Some(WriteGuard::new(trigger, guard))
}
fn untracked_writer(&self) -> Option<Self::UntrackedWriter> {
UntrackedWriteGuard::try_new(Arc::clone(&self.value))
}
fn keys(&self) -> Option<KeyMap> {
Some(self.keys.clone())
}
}
impl<T, S> StoreField for Store<T, S>
@ -66,7 +107,8 @@ where
{
type Value = T;
type Reader = Plain<T>;
type Writer = WriteGuard<ArcTrigger, ArcRwLockWriteGuardian<T>>;
type Writer = WriteGuard<ArcTrigger, UntrackedWriteGuard<T>>;
type UntrackedWriter = UntrackedWriteGuard<T>;
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
self.inner
@ -89,4 +131,161 @@ where
fn writer(&self) -> Option<Self::Writer> {
self.inner.try_get_value().and_then(|n| n.writer())
}
fn untracked_writer(&self) -> Option<Self::UntrackedWriter> {
self.inner
.try_get_value()
.and_then(|n| n.untracked_writer())
}
fn keys(&self) -> Option<KeyMap> {
self.inner.try_get_value().and_then(|inner| inner.keys())
}
}
#[derive(Debug, Copy, Clone)]
pub struct Then<T, S>
where
S: StoreField,
{
inner: S,
map_fn: fn(&S::Value) -> &T,
map_fn_mut: fn(&mut S::Value) -> &mut T,
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
}
impl<T, S> Then<T, S>
where
S: StoreField,
{
#[track_caller]
pub fn new(
inner: S,
map_fn: fn(&S::Value) -> &T,
map_fn_mut: fn(&mut S::Value) -> &mut T,
) -> Self {
Self {
inner,
map_fn,
map_fn_mut,
#[cfg(debug_assertions)]
defined_at: Location::caller(),
}
}
}
impl<T, S> StoreField for Then<T, S>
where
S: StoreField,
{
type Value = T;
type Reader = Mapped<S::Reader, T>;
type Writer = MappedMut<S::Writer, T>;
type UntrackedWriter = MappedMut<S::UntrackedWriter, T>;
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
self.inner.get_trigger(path)
}
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
self.inner.path()
}
fn reader(&self) -> Option<Self::Reader> {
let inner = self.inner.reader()?;
Some(Mapped::new_with_guard(inner, self.map_fn))
}
fn writer(&self) -> Option<Self::Writer> {
let inner = self.inner.writer()?;
Some(MappedMut::new(inner, self.map_fn, self.map_fn_mut))
}
fn untracked_writer(&self) -> Option<Self::UntrackedWriter> {
let inner = self.inner.untracked_writer()?;
Some(MappedMut::new(inner, self.map_fn, self.map_fn_mut))
}
#[inline(always)]
fn keys(&self) -> Option<KeyMap> {
self.inner.keys()
}
}
impl<T, S> DefinedAt for Then<T, S>
where
S: StoreField,
{
fn defined_at(&self) -> Option<&'static Location<'static>> {
#[cfg(debug_assertions)]
{
Some(self.defined_at)
}
#[cfg(not(debug_assertions))]
{
None
}
}
}
impl<T, S> IsDisposed for Then<T, S>
where
S: StoreField + IsDisposed,
{
fn is_disposed(&self) -> bool {
self.inner.is_disposed()
}
}
impl<T, S> Notify for Then<T, S>
where
S: StoreField,
{
fn notify(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.notify();
}
}
impl<T, S> Track for Then<T, S>
where
S: StoreField,
{
fn track(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.track();
}
}
impl<T, S> ReadUntracked for Then<T, S>
where
S: StoreField,
{
type Value = <Self as StoreField>::Reader;
fn try_read_untracked(&self) -> Option<Self::Value> {
self.reader()
}
}
impl<T, S> Writeable for Then<T, S>
where
T: 'static,
S: StoreField,
{
type Value = T;
fn try_write(&self) -> Option<impl UntrackableGuard<Target = Self::Value>> {
self.writer()
}
fn try_write_untracked(
&self,
) -> Option<impl DerefMut<Target = Self::Value>> {
self.writer().map(|mut writer| {
writer.untrack();
writer
})
}
}

View file

@ -1,6 +1,7 @@
use crate::{
path::{StorePath, StorePathSegment},
store_field::StoreField,
KeyMap,
};
use reactive_graph::{
signal::{
@ -15,10 +16,7 @@ use reactive_graph::{
use std::{iter, marker::PhantomData, ops::DerefMut, panic::Location};
#[derive(Debug)]
pub struct Subfield<Inner, Prev, T>
where
Inner: StoreField<Value = Prev>,
{
pub struct Subfield<Inner, Prev, T> {
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
path_segment: StorePathSegment,
@ -30,7 +28,7 @@ where
impl<Inner, Prev, T> Clone for Subfield<Inner, Prev, T>
where
Inner: StoreField<Value = Prev> + Clone,
Inner: Clone,
{
fn clone(&self) -> Self {
Self {
@ -45,15 +43,9 @@ where
}
}
impl<Inner, Prev, T> Copy for Subfield<Inner, Prev, T> where
Inner: StoreField<Value = Prev> + Copy
{
}
impl<Inner, Prev, T> Copy for Subfield<Inner, Prev, T> where Inner: Copy {}
impl<Inner, Prev, T> Subfield<Inner, Prev, T>
where
Inner: StoreField<Value = Prev>,
{
impl<Inner, Prev, T> Subfield<Inner, Prev, T> {
#[track_caller]
pub fn new(
inner: Inner,
@ -81,6 +73,8 @@ where
type Value = T;
type Reader = Mapped<Inner::Reader, T>;
type Writer = MappedMut<WriteGuard<ArcTrigger, Inner::Writer>, T>;
type UntrackedWriter =
MappedMut<WriteGuard<ArcTrigger, Inner::UntrackedWriter>, T>;
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
self.inner
@ -103,6 +97,17 @@ where
let inner = WriteGuard::new(trigger, self.inner.writer()?);
Some(MappedMut::new(inner, self.read, self.write))
}
fn untracked_writer(&self) -> Option<Self::UntrackedWriter> {
let trigger = self.get_trigger(self.path().into_iter().collect());
let inner = WriteGuard::new(trigger, self.inner.untracked_writer()?);
Some(MappedMut::new(inner, self.read, self.write))
}
#[inline(always)]
fn keys(&self) -> Option<KeyMap> {
self.inner.keys()
}
}
impl<Inner, Prev, T> DefinedAt for Subfield<Inner, Prev, T>
@ -123,7 +128,7 @@ where
impl<Inner, Prev, T> IsDisposed for Subfield<Inner, Prev, T>
where
Inner: StoreField<Value = Prev> + IsDisposed,
Inner: IsDisposed,
{
fn is_disposed(&self) -> bool {
self.inner.is_disposed()
@ -143,11 +148,12 @@ where
impl<Inner, Prev, T> Track for Subfield<Inner, Prev, T>
where
Inner: StoreField<Value = Prev> + 'static,
Inner: StoreField<Value = Prev> + Track + 'static,
Prev: 'static,
T: 'static,
{
fn track(&self) {
self.inner.track();
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.track();
}

View file

@ -8,6 +8,7 @@ edition.workspace = true
proc-macro = true
[dependencies]
convert_case = "0.6"
proc-macro-error = "1.0"
proc-macro2 = "1.0"
quote = "1.0"

View file

@ -1,12 +1,13 @@
use proc_macro2::Span;
use convert_case::{Case, Casing};
use proc_macro2::{Span, TokenStream};
use proc_macro_error::{abort, abort_call_site, proc_macro_error};
use quote::{quote, ToTokens};
use syn::{
parse::{Parse, ParseStream, Parser},
punctuated::Punctuated,
token::Comma,
Field, Generics, Ident, Index, Meta, Result, Token, Type, Visibility,
WhereClause,
Field, Fields, Generics, Ident, Index, Meta, Result, Token, Type, Variant,
Visibility, WhereClause, ExprClosure,
};
#[proc_macro_error]
@ -26,54 +27,69 @@ pub fn derive_patch(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
}
struct Model {
pub vis: Visibility,
pub struct_name: Ident,
pub generics: Generics,
pub fields: Vec<Field>,
vis: Visibility,
name: Ident,
generics: Generics,
ty: ModelTy,
}
enum ModelTy {
Struct { fields: Vec<Field> },
Enum { variants: Vec<Variant> },
}
impl Parse for Model {
fn parse(input: ParseStream) -> Result<Self> {
let input = syn::DeriveInput::parse(input)?;
let syn::Data::Struct(s) = input.data else {
abort_call_site!("only structs can be used with `Store`");
};
let ty = match input.data {
syn::Data::Struct(s) => {
let fields = match s.fields {
syn::Fields::Unit => {
abort!(s.semi_token, "unit structs are not supported");
}
syn::Fields::Named(fields) => {
fields.named.into_iter().collect::<Vec<_>>()
}
syn::Fields::Unnamed(fields) => {
fields.unnamed.into_iter().collect::<Vec<_>>()
}
};
let fields = match s.fields {
syn::Fields::Unit => {
abort!(s.semi_token, "unit structs are not supported");
ModelTy::Struct { fields }
}
syn::Fields::Named(fields) => {
fields.named.into_iter().collect::<Vec<_>>()
}
syn::Fields::Unnamed(fields) => {
fields.unnamed.into_iter().collect::<Vec<_>>()
syn::Data::Enum(e) => ModelTy::Enum {
variants: e.variants.into_iter().collect(),
},
_ => {
abort_call_site!(
"only structs and enums can be used with `Store`"
);
}
};
Ok(Self {
vis: input.vis,
struct_name: input.ident,
generics: input.generics,
fields,
name: input.ident,
ty,
})
}
}
#[derive(Clone)]
enum SubfieldMode {
Keyed(Ident, Type),
Keyed(ExprClosure, Type),
}
impl Parse for SubfieldMode {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
let mode: Ident = input.parse()?;
if mode == "key" {
let _eq: Token!(=) = input.parse()?;
let ident: Ident = input.parse()?;
let _col: Token!(:) = input.parse()?;
let ty: Type = input.parse()?;
let _eq: Token!(=) = input.parse()?;
let ident: ExprClosure = input.parse()?;
Ok(SubfieldMode::Keyed(ident, ty))
} else {
Err(input.error("expected `key = <ident>: <Type>`"))
@ -81,6 +97,161 @@ impl Parse for SubfieldMode {
}
}
impl ToTokens for Model {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let library_path = quote! { reactive_stores };
let Model {
vis,
name,
generics,
ty,
} = &self;
let any_store_field = Ident::new("AnyStoreField", Span::call_site());
let trait_name = Ident::new(&format!("{name}StoreFields"), name.span());
let generics_with_orig = {
let params = &generics.params;
quote! { <#any_store_field, #params> }
};
let where_with_orig = {
generics
.where_clause
.as_ref()
.map(|w| {
let WhereClause {
where_token,
predicates,
} = &w;
quote! {
#where_token
#any_store_field: #library_path::StoreField<Value = #name #generics>,
#predicates
}
})
.unwrap_or_else(|| quote! { where #any_store_field: #library_path::StoreField<Value = #name #generics> })
};
// define an extension trait that matches this struct
// and implement that trait for all StoreFields
let (trait_fields, read_fields): (Vec<_>, Vec<_>) =
ty.to_field_data(&library_path, generics, &any_store_field, name);
// read access
tokens.extend(quote! {
#vis trait #trait_name <AnyStoreField>
#where_with_orig
{
#(#trait_fields)*
}
impl #generics_with_orig #trait_name <AnyStoreField> for AnyStoreField
#where_with_orig
{
#(#read_fields)*
}
});
}
}
impl ModelTy {
fn to_field_data(
&self,
library_path: &TokenStream,
generics: &Generics,
any_store_field: &Ident,
name: &Ident,
) -> (Vec<TokenStream>, Vec<TokenStream>) {
match self {
ModelTy::Struct { fields } => fields
.iter()
.enumerate()
.map(|(idx, field)| {
let Field {
ident, ty, attrs, ..
} = &field;
let modes = attrs
.iter()
.find_map(|attr| {
attr.meta.path().is_ident("store").then(|| {
match &attr.meta {
Meta::List(list) => {
match Punctuated::<
SubfieldMode,
Comma,
>::parse_terminated
.parse2(list.tokens.clone())
{
Ok(modes) => Some(
modes
.iter()
.cloned()
.collect::<Vec<_>>(),
),
Err(e) => abort!(list, e),
}
}
_ => None,
}
})
})
.flatten();
(
field_to_tokens(
idx,
false,
modes.as_deref(),
library_path,
ident.as_ref(),
generics,
any_store_field,
name,
ty,
),
field_to_tokens(
idx,
true,
modes.as_deref(),
library_path,
ident.as_ref(),
generics,
any_store_field,
name,
ty,
),
)
})
.unzip(),
ModelTy::Enum { variants } => variants
.iter()
.map(|variant| {
let Variant { ident, fields, .. } = variant;
(
variant_to_tokens(
false,
library_path,
ident,
generics,
any_store_field,
name,
fields,
),
variant_to_tokens(
true,
library_path,
ident,
generics,
any_store_field,
name,
fields,
),
)
})
.unzip(),
}
}
}
#[allow(clippy::too_many_arguments)]
fn field_to_tokens(
idx: usize,
@ -90,7 +261,7 @@ fn field_to_tokens(
orig_ident: Option<&Ident>,
generics: &Generics,
any_store_field: &Ident,
struct_name: &Ident,
name: &Ident,
ty: &Type,
) -> proc_macro2::TokenStream {
let ident = if orig_ident.is_none() {
@ -110,15 +281,20 @@ fn field_to_tokens(
if modes.len() == 1 {
let mode = &modes[0];
// Can replace with a match if additional modes added
// TODO keyed_by
let SubfieldMode::Keyed(_keyed_by, key_ty) = mode;
let SubfieldMode::Keyed(keyed_by, key_ty) = mode;
let signature = quote! {
fn #ident(self) -> #library_path::KeyedField<#any_store_field, #struct_name #generics, #ty, #key_ty>
fn #ident(self) -> #library_path::KeyedSubfield<#any_store_field, #name #generics, #key_ty, #ty>
};
return if include_body {
quote! {
#signature {
todo!()
#library_path::KeyedSubfield::new(
self,
#idx.into(),
#keyed_by,
|prev| &prev.#locator,
|prev| &mut prev.#locator,
)
}
}
} else {
@ -137,7 +313,7 @@ fn field_to_tokens(
// default subfield
if include_body {
quote! {
fn #ident(self) -> #library_path::Subfield<#any_store_field, #struct_name #generics, #ty> {
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #generics, #ty> {
#library_path::Subfield::new(
self,
#idx.into(),
@ -148,93 +324,212 @@ fn field_to_tokens(
}
} else {
quote! {
fn #ident(self) -> #library_path::Subfield<#any_store_field, #struct_name #generics, #ty>;
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #generics, #ty>;
}
}
}
impl ToTokens for Model {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let library_path = quote! { reactive_stores };
let Model {
vis,
struct_name,
generics,
fields,
} = &self;
let any_store_field = Ident::new("AnyStoreField", Span::call_site());
let trait_name = Ident::new(
&format!("{struct_name}StoreFields"),
struct_name.span(),
);
let generics_with_orig = {
let params = &generics.params;
quote! { <#any_store_field, #params> }
};
let where_with_orig = {
generics
.where_clause
.as_ref()
.map(|w| {
let WhereClause {
where_token,
predicates,
} = &w;
quote! {
#where_token
#any_store_field: #library_path::StoreField<Value = #struct_name #generics>,
#predicates
}
})
.unwrap_or_else(|| quote! { where #any_store_field: #library_path::StoreField<Value = #struct_name #generics> })
};
#[allow(clippy::too_many_arguments)]
fn variant_to_tokens(
include_body: bool,
library_path: &proc_macro2::TokenStream,
ident: &Ident,
generics: &Generics,
any_store_field: &Ident,
name: &Ident,
fields: &Fields,
) -> proc_macro2::TokenStream {
// the method name will always be the snake_cased ident
let orig_ident = &ident;
let ident =
Ident::new(&ident.to_string().to_case(Case::Snake), ident.span());
// define an extension trait that matches this struct
let all_field_data = fields.iter().enumerate().map(|(idx, field)| {
let Field { ident, ty, attrs, .. } = &field;
let modes = attrs.iter().find_map(|attr| {
attr.meta.path().is_ident("store").then(|| {
match &attr.meta {
Meta::List(list) => {
match Punctuated::<SubfieldMode, Comma>::parse_terminated.parse2(list.tokens.clone()) {
Ok(modes) => Some(modes.iter().cloned().collect::<Vec<_>>()),
Err(e) => abort!(list, e)
match fields {
// For unit enum fields, we will just return a `bool` subfield, which is
// true when this field matches
Fields::Unit => {
// default subfield
if include_body {
quote! {
fn #ident(self) -> bool {
match #library_path::StoreField::reader(&self) {
Some(reader) => {
#library_path::StoreField::track_field(&self);
matches!(&*reader, #name::#orig_ident)
},
None => false
}
}
}
} else {
quote! {
fn #ident(self) -> bool;
}
}
}
// If an enum branch has named fields, we create N + 1 methods:
// 1 `bool` subfield, which is true when this field matches
// N `Option<T>` subfields for each of the named fields
Fields::Named(fields) => {
let mut tokens = if include_body {
quote! {
fn #ident(self) -> bool {
match #library_path::StoreField::reader(&self) {
Some(reader) => {
#library_path::StoreField::track_field(&self);
matches!(&*reader, #name::#orig_ident { .. })
},
None => false
}
}
}
} else {
quote! {
fn #ident(self) -> bool;
}
};
tokens.extend(fields
.named
.iter()
.map(|field| {
let field_ident = field.ident.as_ref().unwrap();
let field_ty = &field.ty;
let combined_ident = Ident::new(
&format!("{}_{}", ident, field_ident),
field_ident.span(),
);
// default subfield
if include_body {
quote! {
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>> {
#library_path::StoreField::track_field(&self);
let reader = #library_path::StoreField::reader(&self);
let matches = reader
.map(|reader| matches!(&*reader, #name::#orig_ident { .. }))
.unwrap_or(false);
if matches {
Some(#library_path::Subfield::new(
self,
0.into(),
|prev| {
match prev {
#name::#orig_ident { #field_ident, .. } => Some(#field_ident),
_ => None,
}
.expect("accessed an enum field that is no longer matched")
},
|prev| {
match prev {
#name::#orig_ident { #field_ident, .. } => Some(#field_ident),
_ => None,
}
.expect("accessed an enum field that is no longer matched")
},
))
} else {
None
}
}
},
_ => None
}
} else {
quote! {
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>>;
}
}
})
}).flatten();
}));
(
field_to_tokens(idx, false, modes.as_deref(), &library_path, ident.as_ref(), generics, &any_store_field, struct_name, ty),
field_to_tokens(idx, true, modes.as_deref(), &library_path, ident.as_ref(), generics, &any_store_field, struct_name, ty),
)
});
tokens
}
// If an enum branch has unnamed fields, we create N + 1 methods:
// 1 `bool` subfield, which is true when this field matches
// N `Option<T>` subfields for each of the unnamed fields
Fields::Unnamed(fields) => {
let mut tokens = if include_body {
quote! {
fn #ident(self) -> bool {
match #library_path::StoreField::reader(&self) {
Some(reader) => {
#library_path::StoreField::track_field(&self);
matches!(&*reader, #name::#orig_ident { .. })
},
None => false
}
}
}
} else {
quote! {
fn #ident(self) -> bool;
}
};
// implement that trait for all StoreFields
let (trait_fields, read_fields): (Vec<_>, Vec<_>) =
all_field_data.unzip();
let number_of_fields = fields.unnamed.len();
// read access
tokens.extend(quote! {
#vis trait #trait_name <AnyStoreField>
#where_with_orig
{
#(#trait_fields)*
}
tokens.extend(fields
.unnamed
.iter()
.enumerate()
.map(|(idx, field)| {
let field_ident = idx;
let field_ty = &field.ty;
let combined_ident = Ident::new(
&format!("{}_{}", ident, field_ident),
ident.span(),
);
impl #generics_with_orig #trait_name <AnyStoreField> for AnyStoreField
#where_with_orig
{
#(#read_fields)*
}
});
let ignore_before = (0..idx).map(|_| quote! { _, });
let ignore_before2 = ignore_before.clone();
let ignore_after = (idx..number_of_fields.saturating_sub(1)).map(|_| quote !{_, });
let ignore_after2 = ignore_after.clone();
// default subfield
if include_body {
quote! {
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>> {
#library_path::StoreField::track_field(&self);
let reader = #library_path::StoreField::reader(&self);
let matches = reader
.map(|reader| matches!(&*reader, #name::#orig_ident(..)))
.unwrap_or(false);
if matches {
Some(#library_path::Subfield::new(
self,
0.into(),
|prev| {
match prev {
#name::#orig_ident(#(#ignore_before)* this, #(#ignore_after)*) => Some(this),
_ => None,
}
.expect("accessed an enum field that is no longer matched")
},
|prev| {
match prev {
#name::#orig_ident(#(#ignore_before2)* this, #(#ignore_after2)*) => Some(this),
_ => None,
}
.expect("accessed an enum field that is no longer matched")
},
))
} else {
None
}
}
}
} else {
quote! {
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>>;
}
}
}));
tokens
}
}
}
struct PatchModel {
pub struct_name: Ident,
pub name: Ident,
pub generics: Generics,
pub fields: Vec<Field>,
}
@ -260,7 +555,7 @@ impl Parse for PatchModel {
};
Ok(Self {
struct_name: input.ident,
name: input.ident,
generics: input.generics,
fields,
})
@ -271,7 +566,7 @@ impl ToTokens for PatchModel {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let library_path = quote! { reactive_stores };
let PatchModel {
struct_name,
name,
generics,
fields,
} = &self;
@ -294,7 +589,7 @@ impl ToTokens for PatchModel {
// read access
tokens.extend(quote! {
impl #library_path::PatchField for #struct_name #generics
impl #library_path::PatchField for #name #generics
{
fn patch_field(
&mut self,