mirror of
https://github.com/DioxusLabs/dioxus
synced 2025-02-17 06:08:26 +00:00
wip: get keyed diffing compiling
This commit is contained in:
parent
1749eba8eb
commit
0a0be95c3e
6 changed files with 485 additions and 180 deletions
|
@ -261,12 +261,10 @@ impl<const AS: HTML_OR_RSX> ToTokens for Component<AS> {
|
|||
|
||||
let key_token = match has_key {
|
||||
Some(field) => {
|
||||
let inners = field.content.to_token_stream();
|
||||
quote! {
|
||||
Some(#inners)
|
||||
}
|
||||
let inners = &field.content;
|
||||
quote! { Some(format_args_f!(#inners)) }
|
||||
}
|
||||
None => quote! {None},
|
||||
None => quote! { None },
|
||||
};
|
||||
|
||||
let childs = &self.children;
|
||||
|
@ -279,7 +277,7 @@ impl<const AS: HTML_OR_RSX> ToTokens for Component<AS> {
|
|||
#name,
|
||||
#builder,
|
||||
#key_token,
|
||||
__cx.bump().alloc(#children)
|
||||
#children
|
||||
)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ use syn::{
|
|||
// =======================================
|
||||
pub struct Element<const AS: HTML_OR_RSX> {
|
||||
name: Ident,
|
||||
key: Option<AttrType>,
|
||||
key: Option<LitStr>,
|
||||
attributes: Vec<ElementAttr<AS>>,
|
||||
listeners: Vec<ElementAttr<AS>>,
|
||||
children: Vec<BodyNode<AS>>,
|
||||
|
@ -195,6 +195,10 @@ impl<const AS: HTML_OR_RSX> ToTokens for Element<AS> {
|
|||
let attr = &self.attributes;
|
||||
let childs = &self.children;
|
||||
let listeners = &self.listeners;
|
||||
let key = match &self.key {
|
||||
Some(ty) => quote! { Some(format_args_f!(#ty)) },
|
||||
None => quote! { None },
|
||||
};
|
||||
|
||||
tokens.append_all(quote! {
|
||||
__cx.element(
|
||||
|
@ -202,10 +206,7 @@ impl<const AS: HTML_OR_RSX> ToTokens for Element<AS> {
|
|||
[ #(#listeners),* ],
|
||||
[ #(#attr),* ],
|
||||
[ #(#childs),* ],
|
||||
// __cx.bump().alloc([ #(#listeners),* ]),
|
||||
// __cx.bump().alloc([ #(#attr),* ]),
|
||||
// __cx.bump().alloc([ #(#childs),* ]),
|
||||
None,
|
||||
#key,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
@ -235,7 +236,7 @@ fn parse_rsx_element_field(
|
|||
stream: ParseStream,
|
||||
attrs: &mut Vec<ElementAttr<AS_RSX>>,
|
||||
listeners: &mut Vec<ElementAttr<AS_RSX>>,
|
||||
key: &mut Option<AttrType>,
|
||||
key: &mut Option<LitStr>,
|
||||
element_name: Ident,
|
||||
) -> Result<()> {
|
||||
let name = Ident::parse_any(stream)?;
|
||||
|
@ -301,7 +302,7 @@ fn parse_rsx_element_field(
|
|||
return Ok(());
|
||||
}
|
||||
"key" => {
|
||||
*key = Some(AttrType::BumpText(stream.parse::<LitStr>()?));
|
||||
*key = Some(stream.parse::<LitStr>()?);
|
||||
return Ok(());
|
||||
}
|
||||
"classes" => {
|
||||
|
|
|
@ -334,7 +334,7 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
| VNodeKind::Element(_)
|
||||
| VNodeKind::Anchor(_),
|
||||
) => {
|
||||
self.replace_many_with_many([old_node], new_node);
|
||||
self.replace_many_with_many([old_node], [new_node]);
|
||||
}
|
||||
|
||||
// TODO
|
||||
|
@ -615,7 +615,7 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
|
||||
// Replace whatever nodes are sitting there with the anchor
|
||||
(_, VNodeKind::Anchor(anchor)) => {
|
||||
self.replace_many_with_many(old, first_new);
|
||||
self.replace_many_with_many(old, [first_new]);
|
||||
}
|
||||
|
||||
// Use the complex diff algorithm to diff the nodes
|
||||
|
@ -665,6 +665,7 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
//
|
||||
// Upon exiting, the change list stack is in the same state.
|
||||
fn diff_keyed_children(&mut self, old: &'bump [VNode<'bump>], new: &'bump [VNode<'bump>]) {
|
||||
println!("diffing keyed children!");
|
||||
if cfg!(debug_assertions) {
|
||||
let mut keys = fxhash::FxHashSet::default();
|
||||
let mut assert_unique_keys = |children: &'bump [VNode<'bump>]| {
|
||||
|
@ -749,7 +750,7 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
) -> KeyedPrefixResult {
|
||||
let mut shared_prefix_count = 0;
|
||||
|
||||
for (i, (old, new)) in old.iter().zip(new.iter()).enumerate() {
|
||||
for (old, new) in old.iter().zip(new.iter()) {
|
||||
// abort early if we finally run into nodes with different keys
|
||||
if old.key() != new.key() {
|
||||
break;
|
||||
|
@ -770,8 +771,6 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
// And if that was all of the new children, then remove all of the remaining
|
||||
// old children and we're finished.
|
||||
if shared_prefix_count == new.len() {
|
||||
// self.go_to_sibling(shared_prefix_count);
|
||||
// self.commit_traversal();
|
||||
self.remove_self_and_next_siblings(&old[shared_prefix_count..]);
|
||||
return KeyedPrefixResult::Finished;
|
||||
}
|
||||
|
@ -801,15 +800,17 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
// this subsequence will remain in place, minimizing the number of DOM moves we
|
||||
// will have to do.
|
||||
//
|
||||
// Upon entry to this function, the change list stack must be:
|
||||
// Upon entry to this function, the change list stack must be empty.
|
||||
//
|
||||
// This function will load the appropriate nodes onto the stack and do diffing in place.
|
||||
//
|
||||
// used to be:
|
||||
// [... parent]
|
||||
//
|
||||
// Upon exit from this function, it will be restored to that same state.
|
||||
fn diff_keyed_middle(
|
||||
&mut self,
|
||||
old: &'bump [VNode<'bump>],
|
||||
new: &'bump [VNode<'bump>],
|
||||
mut new: &'bump [VNode<'bump>],
|
||||
shared_prefix_count: usize,
|
||||
shared_suffix_count: usize,
|
||||
old_shared_suffix_start: usize,
|
||||
|
@ -855,20 +856,23 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
// remove all the remaining old children and create the new children
|
||||
// afresh.
|
||||
if shared_suffix_count == 0 && shared_keys.is_empty() {
|
||||
if shared_prefix_count == 0 {
|
||||
// self.commit_traversal();
|
||||
self.remove_all_children(old);
|
||||
} else {
|
||||
// self.go_down_to_child(shared_prefix_count);
|
||||
// self.commit_traversal();
|
||||
self.remove_self_and_next_siblings(&old[shared_prefix_count..]);
|
||||
}
|
||||
// create all the children anew
|
||||
// load all the old childrne
|
||||
// replace n to m
|
||||
|
||||
self.create_and_append_children(new);
|
||||
// if shared_prefix_count == 0 {
|
||||
// self.remove_all_children(old);
|
||||
// } else {
|
||||
// self.remove_self_and_next_siblings(&old[shared_prefix_count..]);
|
||||
// }
|
||||
|
||||
// self.create_and_append_children(new);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
dbg!("hello!");
|
||||
|
||||
// Save each of the old children whose keys are reused in the new
|
||||
// children.
|
||||
let mut old_index_to_temp = vec![u32::MAX; old.len()];
|
||||
|
@ -883,14 +887,15 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
|
||||
if end - start > 0 {
|
||||
// self.commit_traversal();
|
||||
let mut t = 5;
|
||||
// let mut t = self.save_children_to_temporaries(
|
||||
// shared_prefix_count + start,
|
||||
// shared_prefix_count + end,
|
||||
// );
|
||||
// for i in start..end {
|
||||
// old_index_to_temp[i] = t;
|
||||
// t += 1;
|
||||
// }
|
||||
for i in start..end {
|
||||
old_index_to_temp[i] = t;
|
||||
t += 1;
|
||||
}
|
||||
}
|
||||
|
||||
debug_assert!(end <= old.len());
|
||||
|
@ -901,132 +906,133 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
}
|
||||
}
|
||||
|
||||
// // Remove any old children whose keys were not reused in the new
|
||||
// // children. Remove from the end first so that we don't mess up indices.
|
||||
// let mut removed_count = 0;
|
||||
// for (i, old_child) in old.iter().enumerate().rev() {
|
||||
// if !shared_keys.contains(&old_child.key()) {
|
||||
// // registry.remove_subtree(old_child);
|
||||
// // todo
|
||||
// // self.commit_traversal();
|
||||
// self.remove(old_child.dom_id.get());
|
||||
// self.remove_child(i + shared_prefix_count);
|
||||
// removed_count += 1;
|
||||
// }
|
||||
// }
|
||||
// Remove any old children whose keys were not reused in the new
|
||||
// children. Remove from the end first so that we don't mess up indices.
|
||||
let mut removed_count = 0;
|
||||
for (i, old_child) in old.iter().enumerate().rev() {
|
||||
if !shared_keys.contains(&old_child.key()) {
|
||||
// self.remove(old_child.dom_id.get());
|
||||
// self.remove_child(i + shared_prefix_count);
|
||||
removed_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// // If there aren't any more new children, then we are done!
|
||||
// if new.is_empty() {
|
||||
// return;
|
||||
// }
|
||||
// If there aren't any more new children, then we are done!
|
||||
if new.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// // The longest increasing subsequence within `new_index_to_old_index`. This
|
||||
// // is the longest sequence on DOM nodes in `old` that are relatively ordered
|
||||
// // correctly within `new`. We will leave these nodes in place in the DOM,
|
||||
// // and only move nodes that are not part of the LIS. This results in the
|
||||
// // maximum number of DOM nodes left in place, AKA the minimum number of DOM
|
||||
// // nodes moved.
|
||||
// let mut new_index_is_in_lis = FxHashSet::default();
|
||||
// new_index_is_in_lis.reserve(new_index_to_old_index.len());
|
||||
// let mut predecessors = vec![0; new_index_to_old_index.len()];
|
||||
// let mut starts = vec![0; new_index_to_old_index.len()];
|
||||
// longest_increasing_subsequence::lis_with(
|
||||
// &new_index_to_old_index,
|
||||
// &mut new_index_is_in_lis,
|
||||
// |a, b| a < b,
|
||||
// &mut predecessors,
|
||||
// &mut starts,
|
||||
// );
|
||||
// The longest increasing subsequence within `new_index_to_old_index`. This
|
||||
// is the longest sequence on DOM nodes in `old` that are relatively ordered
|
||||
// correctly within `new`. We will leave these nodes in place in the DOM,
|
||||
// and only move nodes that are not part of the LIS. This results in the
|
||||
// maximum number of DOM nodes left in place, AKA the minimum number of DOM
|
||||
// nodes moved.
|
||||
let mut new_index_is_in_lis = FxHashSet::default();
|
||||
new_index_is_in_lis.reserve(new_index_to_old_index.len());
|
||||
let mut predecessors = vec![0; new_index_to_old_index.len()];
|
||||
let mut starts = vec![0; new_index_to_old_index.len()];
|
||||
longest_increasing_subsequence::lis_with(
|
||||
&new_index_to_old_index,
|
||||
&mut new_index_is_in_lis,
|
||||
|a, b| a < b,
|
||||
&mut predecessors,
|
||||
&mut starts,
|
||||
);
|
||||
|
||||
// // Now we will iterate from the end of the new children back to the
|
||||
// // beginning, diffing old children we are reusing and if they aren't in the
|
||||
// // LIS moving them to their new destination, or creating new children. Note
|
||||
// // that iterating in reverse order lets us use `Node.prototype.insertBefore`
|
||||
// // to move/insert children.
|
||||
// //
|
||||
// // But first, we ensure that we have a child on the change list stack that
|
||||
// // we can `insertBefore`. We handle this once before looping over `new`
|
||||
// // children, so that we don't have to keep checking on every loop iteration.
|
||||
// if shared_suffix_count > 0 {
|
||||
// // There is a shared suffix after these middle children. We will be
|
||||
// // inserting before that shared suffix, so add the first child of that
|
||||
// // shared suffix to the change list stack.
|
||||
// //
|
||||
// // [... parent]
|
||||
// self.edits
|
||||
// .go_down_to_child(old_shared_suffix_start - removed_count);
|
||||
// // [... parent first_child_of_shared_suffix]
|
||||
// } else {
|
||||
// // There is no shared suffix coming after these middle children.
|
||||
// // Therefore we have to process the last child in `new` and move it to
|
||||
// // the end of the parent's children if it isn't already there.
|
||||
// let last_index = new.len() - 1;
|
||||
// // uhhhh why an unwrap?
|
||||
// let last = new.last().unwrap();
|
||||
// // let last = new.last().unwrap_throw();
|
||||
// new = &new[..new.len() - 1];
|
||||
// if shared_keys.contains(&last.key()) {
|
||||
// let old_index = new_index_to_old_index[last_index];
|
||||
// let temp = old_index_to_temp[old_index];
|
||||
// // [... parent]
|
||||
// self.go_down_to_temp_child(temp);
|
||||
// // [... parent last]
|
||||
// self.diff_node(&old[old_index], last);
|
||||
// Now we will iterate from the end of the new children back to the
|
||||
// beginning, diffing old children we are reusing and if they aren't in the
|
||||
// LIS moving them to their new destination, or creating new children. Note
|
||||
// that iterating in reverse order lets us use `Node.prototype.insertBefore`
|
||||
// to move/insert children.
|
||||
//
|
||||
// But first, we ensure that we have a child on the change list stack that
|
||||
// we can `insertBefore`. We handle this once before looping over `new`
|
||||
// children, so that we don't have to keep checking on every loop iteration.
|
||||
if shared_suffix_count > 0 {
|
||||
// There is a shared suffix after these middle children. We will be
|
||||
// inserting before that shared suffix, so add the first child of that
|
||||
// shared suffix to the change list stack.
|
||||
//
|
||||
// [... parent]
|
||||
|
||||
// if new_index_is_in_lis.contains(&last_index) {
|
||||
// // Don't move it, since it is already where it needs to be.
|
||||
// } else {
|
||||
// // self.commit_traversal();
|
||||
// // [... parent last]
|
||||
// self.append_child();
|
||||
// // [... parent]
|
||||
// self.go_down_to_temp_child(temp);
|
||||
// // [... parent last]
|
||||
// }
|
||||
// } else {
|
||||
// // self.commit_traversal();
|
||||
// // [... parent]
|
||||
// self.create(last);
|
||||
// TODO
|
||||
|
||||
// // [... parent last]
|
||||
// self.append_child();
|
||||
// // [... parent]
|
||||
// self.go_down_to_reverse_child(0);
|
||||
// // [... parent last]
|
||||
// }
|
||||
// }
|
||||
// self.edits
|
||||
// .go_down_to_child(old_shared_suffix_start - removed_count);
|
||||
// [... parent first_child_of_shared_suffix]
|
||||
} else {
|
||||
// There is no shared suffix coming after these middle children.
|
||||
// Therefore we have to process the last child in `new` and move it to
|
||||
// the end of the parent's children if it isn't already there.
|
||||
let last_index = new.len() - 1;
|
||||
// uhhhh why an unwrap?
|
||||
let last = new.last().unwrap();
|
||||
// let last = new.last().unwrap_throw();
|
||||
new = &new[..new.len() - 1];
|
||||
if shared_keys.contains(&last.key()) {
|
||||
let old_index = new_index_to_old_index[last_index];
|
||||
let temp = old_index_to_temp[old_index];
|
||||
// [... parent]
|
||||
// self.go_down_to_temp_child(temp);
|
||||
// [... parent last]
|
||||
self.diff_node(&old[old_index], last);
|
||||
|
||||
// for (new_index, new_child) in new.iter().enumerate().rev() {
|
||||
// let old_index = new_index_to_old_index[new_index];
|
||||
// if old_index == u32::MAX as usize {
|
||||
// debug_assert!(!shared_keys.contains(&new_child.key()));
|
||||
// // self.commit_traversal();
|
||||
// // [... parent successor]
|
||||
// self.create(new_child);
|
||||
// // [... parent successor new_child]
|
||||
// self.insert_before();
|
||||
// // [... parent new_child]
|
||||
// } else {
|
||||
// debug_assert!(shared_keys.contains(&new_child.key()));
|
||||
// let temp = old_index_to_temp[old_index];
|
||||
// debug_assert_ne!(temp, u32::MAX);
|
||||
if new_index_is_in_lis.contains(&last_index) {
|
||||
// Don't move it, since it is already where it needs to be.
|
||||
} else {
|
||||
// self.commit_traversal();
|
||||
// [... parent last]
|
||||
// self.append_child();
|
||||
// [... parent]
|
||||
// self.go_down_to_temp_child(temp);
|
||||
// [... parent last]
|
||||
}
|
||||
} else {
|
||||
// self.commit_traversal();
|
||||
// [... parent]
|
||||
let meta = self.create_vnode(last);
|
||||
|
||||
// if new_index_is_in_lis.contains(&new_index) {
|
||||
// // [... parent successor]
|
||||
// self.go_to_temp_sibling(temp);
|
||||
// // [... parent new_child]
|
||||
// } else {
|
||||
// // self.commit_traversal();
|
||||
// // [... parent successor]
|
||||
// self.push_temporary(temp);
|
||||
// // [... parent successor new_child]
|
||||
// self.insert_before();
|
||||
// // [... parent new_child]
|
||||
// }
|
||||
// [... parent last]
|
||||
// self.append_child();
|
||||
// [... parent]
|
||||
// self.go_down_to_reverse_child(0);
|
||||
// [... parent last]
|
||||
}
|
||||
}
|
||||
|
||||
// self.diff_node(&old[old_index], new_child);
|
||||
// }
|
||||
// }
|
||||
for (new_index, new_child) in new.iter().enumerate().rev() {
|
||||
let old_index = new_index_to_old_index[new_index];
|
||||
if old_index == u32::MAX as usize {
|
||||
debug_assert!(!shared_keys.contains(&new_child.key()));
|
||||
// self.commit_traversal();
|
||||
// [... parent successor]
|
||||
let meta = self.create_vnode(new_child);
|
||||
// [... parent successor new_child]
|
||||
self.edit_insert_after(meta.added_to_stack);
|
||||
// self.insert_before();
|
||||
// [... parent new_child]
|
||||
} else {
|
||||
debug_assert!(shared_keys.contains(&new_child.key()));
|
||||
let temp = old_index_to_temp[old_index];
|
||||
debug_assert_ne!(temp, u32::MAX);
|
||||
|
||||
if new_index_is_in_lis.contains(&new_index) {
|
||||
// [... parent successor]
|
||||
// self.go_to_temp_sibling(temp);
|
||||
// [... parent new_child]
|
||||
} else {
|
||||
// self.commit_traversal();
|
||||
// [... parent successor]
|
||||
// self.push_temporary(temp);
|
||||
// [... parent successor new_child]
|
||||
// self.insert_before();
|
||||
// [... parent new_child]
|
||||
}
|
||||
|
||||
self.diff_node(&old[old_index], new_child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Diff the suffix of keyed children that share the same keys in the same order.
|
||||
|
@ -1061,16 +1067,19 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
fn diff_non_keyed_children(&mut self, old: &'bump [VNode<'bump>], new: &'bump [VNode<'bump>]) {
|
||||
// Handled these cases in `diff_children` before calling this function.
|
||||
//
|
||||
// debug_assert!(!new.is_empty());
|
||||
// debug_assert!(!old.is_empty());
|
||||
|
||||
for (_i, (new_child, old_child)) in new.iter().zip(old.iter()).enumerate() {
|
||||
self.diff_node(old_child, new_child);
|
||||
}
|
||||
debug_assert!(!new.is_empty());
|
||||
debug_assert!(!old.is_empty());
|
||||
|
||||
match old.len().cmp(&new.len()) {
|
||||
// old.len > new.len -> removing some nodes
|
||||
Ordering::Greater => {
|
||||
// diff them together
|
||||
for (new_child, old_child) in new.iter().zip(old.iter()) {
|
||||
self.diff_node(old_child, new_child);
|
||||
}
|
||||
|
||||
// todo: we would emit fewer instructions if we just did a replace many
|
||||
// remove whatever is still dangling
|
||||
for item in &old[new.len()..] {
|
||||
for i in RealChildIterator::new(item, self.vdom) {
|
||||
self.edit_push_root(i.direct_id());
|
||||
|
@ -1078,12 +1087,34 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// old.len < new.len -> adding some nodes
|
||||
// this is wrong in the case where we're diffing fragments
|
||||
//
|
||||
// we need to save the last old element and then replace it with all the new ones
|
||||
Ordering::Less => {
|
||||
self.create_and_append_children(&new[old.len()..]);
|
||||
// Add the new elements to the last old element while it still exists
|
||||
let last = self.find_last_element(old.last().unwrap());
|
||||
self.edit_push_root(last.direct_id());
|
||||
|
||||
// create the rest and insert them
|
||||
let meta = self.create_children(&new[old.len()..]);
|
||||
self.edit_insert_after(meta.added_to_stack);
|
||||
|
||||
self.edit_pop();
|
||||
|
||||
// diff the rest
|
||||
new.iter()
|
||||
.zip(old.iter())
|
||||
.for_each(|(new_child, old_child)| self.diff_node(old_child, new_child));
|
||||
}
|
||||
|
||||
// old.len == new.len -> no nodes added/removed, but perhaps changed
|
||||
Ordering::Equal => {
|
||||
for (new_child, old_child) in new.iter().zip(old.iter()) {
|
||||
self.diff_node(old_child, new_child);
|
||||
}
|
||||
}
|
||||
// old.len == new.len -> no nodes added/removed, but πerhaps changed
|
||||
Ordering::Equal => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1147,11 +1178,58 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
// self.remove_self_and_next_siblings();
|
||||
}
|
||||
|
||||
fn find_last_element(&mut self, vnode: &'bump VNode<'bump>) -> &'bump VNode<'bump> {
|
||||
let mut search_node = Some(vnode);
|
||||
|
||||
loop {
|
||||
let node = search_node.take().unwrap();
|
||||
match &node.kind {
|
||||
// the ones that have a direct id
|
||||
VNodeKind::Text(_)
|
||||
| VNodeKind::Element(_)
|
||||
| VNodeKind::Anchor(_)
|
||||
| VNodeKind::Suspended(_) => break node,
|
||||
|
||||
VNodeKind::Fragment(frag) => {
|
||||
search_node = frag.children.last();
|
||||
}
|
||||
VNodeKind::Component(el) => {
|
||||
let scope_id = el.ass_scope.get().unwrap();
|
||||
let scope = self.get_scope(&scope_id).unwrap();
|
||||
search_node = Some(scope.root());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn find_first_element(&mut self, vnode: &'bump VNode<'bump>) -> &'bump VNode<'bump> {
|
||||
let mut search_node = Some(vnode);
|
||||
|
||||
loop {
|
||||
let node = search_node.take().unwrap();
|
||||
match &node.kind {
|
||||
// the ones that have a direct id
|
||||
VNodeKind::Text(_)
|
||||
| VNodeKind::Element(_)
|
||||
| VNodeKind::Anchor(_)
|
||||
| VNodeKind::Suspended(_) => break node,
|
||||
|
||||
VNodeKind::Fragment(frag) => {
|
||||
search_node = Some(&frag.children[0]);
|
||||
}
|
||||
VNodeKind::Component(el) => {
|
||||
let scope_id = el.ass_scope.get().unwrap();
|
||||
let scope = self.get_scope(&scope_id).unwrap();
|
||||
search_node = Some(scope.root());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn replace_many_with_many(
|
||||
&mut self,
|
||||
old_node: impl IntoIterator<Item = &'bump VNode<'bump>>,
|
||||
// old_node: &'bump VNode<'bump>,
|
||||
new_node: &'bump VNode<'bump>,
|
||||
new_node: impl IntoIterator<Item = &'bump VNode<'bump>>,
|
||||
) {
|
||||
let mut nodes_to_replace = Vec::new();
|
||||
let mut nodes_to_search = old_node.into_iter().collect::<Vec<_>>();
|
||||
|
@ -1184,9 +1262,13 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
self.edit_push_root(node);
|
||||
}
|
||||
|
||||
let meta = self.create_vnode(new_node);
|
||||
let mut nodes_created = 0;
|
||||
for node in new_node {
|
||||
let meta = self.create_vnode(node);
|
||||
nodes_created += meta.added_to_stack;
|
||||
}
|
||||
|
||||
self.edit_replace_with(n as u32, meta.added_to_stack);
|
||||
self.edit_replace_with(n as u32, nodes_created);
|
||||
|
||||
// obliterate!
|
||||
for scope in scopes_obliterated {
|
||||
|
@ -1195,10 +1277,15 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
}
|
||||
|
||||
fn create_garbage(&mut self, node: &'bump VNode<'bump>) {
|
||||
let cur_scope = self.current_scope().unwrap();
|
||||
let scope = self.get_scope(&cur_scope).unwrap();
|
||||
let garbage: &'bump VNode<'static> = unsafe { std::mem::transmute(node) };
|
||||
scope.pending_garbage.borrow_mut().push(garbage);
|
||||
match self.current_scope().and_then(|id| self.get_scope(&id)) {
|
||||
Some(scope) => {
|
||||
let garbage: &'bump VNode<'static> = unsafe { std::mem::transmute(node) };
|
||||
scope.pending_garbage.borrow_mut().push(garbage);
|
||||
}
|
||||
None => {
|
||||
log::info!("No scope to collect garbage into")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn immediately_dispose_garabage(&mut self, node: ElementId) {
|
||||
|
@ -1292,6 +1379,10 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
self.edits.push(ReplaceWith { n, m });
|
||||
}
|
||||
|
||||
pub(crate) fn edit_insert_after(&mut self, n: u32) {
|
||||
self.edits.push(InsertAfter { n });
|
||||
}
|
||||
|
||||
// Remove Nodesfrom the dom
|
||||
pub(crate) fn edit_remove(&mut self) {
|
||||
self.edits.push(Remove);
|
||||
|
@ -1374,6 +1465,7 @@ impl<'real, 'bump> DiffMachine<'real, 'bump> {
|
|||
|
||||
// When we create new nodes, we need to propagate some information back up the call chain.
|
||||
// This gives the caller some information on how to handle things like insertins, appending, and subtree discarding.
|
||||
#[derive(Debug)]
|
||||
pub struct CreateMeta {
|
||||
pub is_static: bool,
|
||||
pub added_to_stack: u32,
|
||||
|
|
|
@ -30,6 +30,9 @@ pub enum DomEdit<'bump> {
|
|||
// the last m elements
|
||||
m: u32,
|
||||
},
|
||||
InsertAfter {
|
||||
n: u32,
|
||||
},
|
||||
Remove,
|
||||
RemoveAllChildren,
|
||||
CreateTextNode {
|
||||
|
@ -71,6 +74,7 @@ pub enum DomEdit<'bump> {
|
|||
impl DomEdit<'_> {
|
||||
pub fn is(&self, id: &'static str) -> bool {
|
||||
match self {
|
||||
DomEdit::InsertAfter { .. } => id == "InsertAfter",
|
||||
DomEdit::PushRoot { .. } => id == "PushRoot",
|
||||
DomEdit::PopRoot => id == "PopRoot",
|
||||
DomEdit::AppendChildren { .. } => id == "AppendChildren",
|
||||
|
|
|
@ -252,7 +252,7 @@ impl<'a> NodeFactory<'a> {
|
|||
listeners: L,
|
||||
attributes: A,
|
||||
children: V,
|
||||
key: Option<&'a str>,
|
||||
key: Option<Arguments>,
|
||||
) -> VNode<'a>
|
||||
where
|
||||
L: 'a + AsRef<[Listener<'a>]>,
|
||||
|
@ -276,7 +276,7 @@ impl<'a> NodeFactory<'a> {
|
|||
listeners: L,
|
||||
attributes: A,
|
||||
children: V,
|
||||
key: Option<&'a str>,
|
||||
key: Option<Arguments>,
|
||||
) -> VNode<'a>
|
||||
where
|
||||
L: 'a + AsRef<[Listener<'a>]>,
|
||||
|
@ -292,6 +292,8 @@ impl<'a> NodeFactory<'a> {
|
|||
let children: &'a V = self.bump().alloc(children);
|
||||
let children = children.as_ref();
|
||||
|
||||
let key = key.map(|f| self.raw_text(f).0);
|
||||
|
||||
VNode {
|
||||
key,
|
||||
kind: VNodeKind::Element(self.bump().alloc(VElement {
|
||||
|
@ -352,21 +354,26 @@ impl<'a> NodeFactory<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn component<P>(
|
||||
pub fn component<P, V>(
|
||||
&self,
|
||||
component: FC<P>,
|
||||
props: P,
|
||||
key: Option<&'a str>,
|
||||
children: &'a [VNode<'a>],
|
||||
key: Option<Arguments>,
|
||||
// key: Option<&'a str>,
|
||||
children: V,
|
||||
) -> VNode<'a>
|
||||
where
|
||||
P: Properties + 'a,
|
||||
V: 'a + AsRef<[VNode<'a>]>,
|
||||
{
|
||||
// TODO
|
||||
// It's somewhat wrong to go about props like this
|
||||
|
||||
// We don't want the fat part of the fat pointer
|
||||
// This function does static dispatch so we don't need any VTable stuff
|
||||
let children: &'a V = self.bump().alloc(children);
|
||||
let children = children.as_ref();
|
||||
|
||||
let props = self.bump().alloc(props);
|
||||
|
||||
let raw_props = props as *mut P as *mut ();
|
||||
|
@ -408,6 +415,8 @@ impl<'a> NodeFactory<'a> {
|
|||
|
||||
let is_static = children.len() == 0 && P::IS_STATIC && key.is_none();
|
||||
|
||||
let key = key.map(|f| self.raw_text(f).0);
|
||||
|
||||
VNode {
|
||||
key,
|
||||
kind: VNodeKind::Component(self.bump().alloc_with(|| VComponent {
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
//! Diffing Tests
|
||||
//! -------------
|
||||
//!
|
||||
//! These should always compile and run, but the result is not validated for each test.
|
||||
//! TODO: Validate the results beyond visual inspection.
|
||||
|
||||
use bumpalo::Bump;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
|
@ -58,7 +64,7 @@ impl TestDom {
|
|||
&'a self,
|
||||
left: LazyNodes<'a, F1>,
|
||||
right: LazyNodes<'a, F2>,
|
||||
) -> Vec<DomEdit<'a>>
|
||||
) -> (Vec<DomEdit<'a>>, Vec<DomEdit<'a>>)
|
||||
where
|
||||
F1: FnOnce(NodeFactory<'a>) -> VNode<'a>,
|
||||
F2: FnOnce(NodeFactory<'a>) -> VNode<'a>,
|
||||
|
@ -67,26 +73,26 @@ impl TestDom {
|
|||
|
||||
let new = self.bump.alloc(self.render(right));
|
||||
|
||||
let mut edits = Vec::new();
|
||||
let mut create_edits = Vec::new();
|
||||
let dom = DebugDom::new();
|
||||
|
||||
let mut machine = DiffMachine::new_headless(&mut edits, &dom, &self.resources);
|
||||
let mut machine = DiffMachine::new_headless(&mut create_edits, &dom, &self.resources);
|
||||
machine.create_vnode(old);
|
||||
edits.clear();
|
||||
|
||||
let mut edits = Vec::new();
|
||||
let mut machine = DiffMachine::new_headless(&mut edits, &dom, &self.resources);
|
||||
machine.diff_node(old, new);
|
||||
edits
|
||||
(create_edits, edits)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn diffing_works() {}
|
||||
|
||||
/// Should push the text node onto the stack and modify it
|
||||
#[test]
|
||||
fn html_and_rsx_generate_the_same_output() {
|
||||
let dom = TestDom::new();
|
||||
|
||||
let edits = dom.lazy_diff(
|
||||
rsx! ( div { "Hello world" } ),
|
||||
rsx! ( div { "Goodbye world" } ),
|
||||
|
@ -94,6 +100,7 @@ fn html_and_rsx_generate_the_same_output() {
|
|||
dbg!(edits);
|
||||
}
|
||||
|
||||
/// Should result in 3 elements on the stack
|
||||
#[test]
|
||||
fn fragments_create_properly() {
|
||||
let dom = TestDom::new();
|
||||
|
@ -109,3 +116,197 @@ fn fragments_create_properly() {
|
|||
assert_eq!(meta.added_to_stack, 3);
|
||||
dbg!(edits);
|
||||
}
|
||||
|
||||
/// Should result in the creation of an anchor (placeholder) and then a replacewith
|
||||
#[test]
|
||||
fn empty_fragments_create_anchors() {
|
||||
let dom = TestDom::new();
|
||||
|
||||
let left = rsx!({ (0..0).map(|f| rsx! { div {}}) });
|
||||
let right = rsx!({ (0..1).map(|f| rsx! { div {}}) });
|
||||
|
||||
let edits = dom.lazy_diff(left, right);
|
||||
dbg!(edits);
|
||||
}
|
||||
|
||||
/// Should result in the creation of an anchor (placeholder) and then a replacewith m=5
|
||||
#[test]
|
||||
fn empty_fragments_create_many_anchors() {
|
||||
let dom = TestDom::new();
|
||||
|
||||
let left = rsx!({ (0..0).map(|f| rsx! { div {}}) });
|
||||
let right = rsx!({ (0..5).map(|f| rsx! { div {}}) });
|
||||
|
||||
let edits = dom.lazy_diff(left, right);
|
||||
dbg!(edits);
|
||||
}
|
||||
|
||||
/// Should result in the creation of an anchor (placeholder) and then a replacewith
|
||||
/// Includes child nodes inside the fragment
|
||||
#[test]
|
||||
fn empty_fragments_create_anchors_with_many_children() {
|
||||
let dom = TestDom::new();
|
||||
|
||||
let left = rsx!({ (0..0).map(|f| rsx! { div {} }) });
|
||||
let right = rsx!({
|
||||
(0..5).map(|f| {
|
||||
rsx! { div { "hello" }}
|
||||
})
|
||||
});
|
||||
|
||||
let edits = dom.lazy_diff(left, right);
|
||||
dbg!(&edits);
|
||||
let last_edit = edits.1.last().unwrap();
|
||||
assert!(last_edit.is("ReplaceWith"));
|
||||
}
|
||||
|
||||
/// Should result in every node being pushed and then replaced with an anchor
|
||||
#[test]
|
||||
fn many_items_become_fragment() {
|
||||
let dom = TestDom::new();
|
||||
|
||||
let left = rsx!({
|
||||
(0..2).map(|f| {
|
||||
rsx! { div { "hello" }}
|
||||
})
|
||||
});
|
||||
let right = rsx!({ (0..0).map(|f| rsx! { div {} }) });
|
||||
|
||||
let edits = dom.lazy_diff(left, right);
|
||||
dbg!(&edits);
|
||||
}
|
||||
|
||||
/// Should result in no edits
|
||||
#[test]
|
||||
fn two_equal_fragments_are_equal() {
|
||||
let dom = TestDom::new();
|
||||
|
||||
let left = rsx!({
|
||||
(0..2).map(|f| {
|
||||
rsx! { div { "hello" }}
|
||||
})
|
||||
});
|
||||
let right = rsx!({
|
||||
(0..2).map(|f| {
|
||||
rsx! { div { "hello" }}
|
||||
})
|
||||
});
|
||||
|
||||
let edits = dom.lazy_diff(left, right);
|
||||
dbg!(&edits);
|
||||
assert!(edits.1.is_empty());
|
||||
}
|
||||
|
||||
/// Should result the creation of more nodes appended after the old last node
|
||||
#[test]
|
||||
fn two_fragments_with_differrent_elements_are_differet() {
|
||||
let dom = TestDom::new();
|
||||
|
||||
let left = rsx!(
|
||||
{(0..2).map(|f| {rsx! { div { }}})}
|
||||
p {}
|
||||
);
|
||||
let right = rsx!(
|
||||
{(0..5).map(|f| {rsx! { h1 { }}})}
|
||||
p {}
|
||||
);
|
||||
|
||||
let edits = dom.lazy_diff(left, right);
|
||||
dbg!(&edits);
|
||||
}
|
||||
|
||||
/// Should result in multiple nodes destroyed - with changes to the first nodes
|
||||
#[test]
|
||||
fn two_fragments_with_differrent_elements_are_differet_shorter() {
|
||||
let dom = TestDom::new();
|
||||
|
||||
let left = rsx!(
|
||||
{(0..5).map(|f| {rsx! { div { }}})}
|
||||
p {}
|
||||
);
|
||||
let right = rsx!(
|
||||
{(0..2).map(|f| {rsx! { h1 { }}})}
|
||||
p {}
|
||||
);
|
||||
|
||||
let edits = dom.lazy_diff(left, right);
|
||||
dbg!(&edits);
|
||||
}
|
||||
|
||||
/// Should result in multiple nodes destroyed - with no changes
|
||||
#[test]
|
||||
fn two_fragments_with_same_elements_are_differet() {
|
||||
let dom = TestDom::new();
|
||||
|
||||
let left = rsx!(
|
||||
{(0..2).map(|f| {rsx! { div { }}})}
|
||||
p {}
|
||||
);
|
||||
let right = rsx!(
|
||||
{(0..5).map(|f| {rsx! { div { }}})}
|
||||
p {}
|
||||
);
|
||||
|
||||
let edits = dom.lazy_diff(left, right);
|
||||
dbg!(&edits);
|
||||
}
|
||||
|
||||
// Similar test from above, but with extra child nodes
|
||||
#[test]
|
||||
fn two_fragments_with_same_elements_are_differet_shorter() {
|
||||
let dom = TestDom::new();
|
||||
|
||||
let left = rsx!(
|
||||
{(0..5).map(|f| {rsx! { div { }}})}
|
||||
p {"e"}
|
||||
);
|
||||
let right = rsx!(
|
||||
{(0..2).map(|f| {rsx! { div { }}})}
|
||||
p {"e"}
|
||||
);
|
||||
|
||||
let edits = dom.lazy_diff(left, right);
|
||||
dbg!(&edits);
|
||||
}
|
||||
|
||||
/// should result in the removal of elements
|
||||
#[test]
|
||||
fn keyed_diffing_order() {
|
||||
let dom = TestDom::new();
|
||||
|
||||
let left = rsx!(
|
||||
{(0..5).map(|f| {rsx! { div { key: "{f}" }}})}
|
||||
p {"e"}
|
||||
);
|
||||
let right = rsx!(
|
||||
{(0..2).map(|f| {rsx! { div { key: "{f}" }}})}
|
||||
p {"e"}
|
||||
);
|
||||
|
||||
let edits = dom.lazy_diff(left, right);
|
||||
dbg!(&edits);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fragment_keys() {
|
||||
let r = 1;
|
||||
let p = rsx! {
|
||||
Fragment { key: "asd {r}" }
|
||||
};
|
||||
}
|
||||
|
||||
/// Should result in moves, but not removals or additions
|
||||
#[test]
|
||||
fn keyed_diffing_out_of_order() {
|
||||
let dom = TestDom::new();
|
||||
let left = rsx!(
|
||||
{(0..5).map(|f| {rsx! { div { key: "{f}" }}})}
|
||||
p {"e"}
|
||||
);
|
||||
let right = rsx!(
|
||||
{(0..5).rev().map(|f| {rsx! { div { key: "{f}" }}})}
|
||||
p {"e"}
|
||||
);
|
||||
let edits = dom.lazy_diff(left, right);
|
||||
dbg!(&edits);
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue