2022-11-04 00:34:42 +00:00
|
|
|
use std::any::Any;
|
|
|
|
|
2022-11-12 02:29:27 +00:00
|
|
|
use crate::factory::RenderReturn;
|
2022-11-20 01:07:29 +00:00
|
|
|
use crate::innerlude::{Mutations, VComponent, VFragment, VText};
|
2022-11-09 03:39:37 +00:00
|
|
|
use crate::virtual_dom::VirtualDom;
|
2022-11-04 00:34:42 +00:00
|
|
|
use crate::{Attribute, AttributeValue, TemplateNode};
|
2022-10-28 04:58:47 +00:00
|
|
|
|
2022-11-20 23:58:05 +00:00
|
|
|
use crate::any_props::VProps;
|
|
|
|
use DynamicNode::*;
|
2022-11-02 01:42:29 +00:00
|
|
|
|
2022-10-28 04:58:47 +00:00
|
|
|
use crate::mutations::Mutation;
|
|
|
|
use crate::nodes::{DynamicNode, Template, TemplateId};
|
|
|
|
use crate::scopes::Scope;
|
2022-10-22 01:54:14 +00:00
|
|
|
use crate::{
|
2022-10-28 04:58:47 +00:00
|
|
|
any_props::AnyProps,
|
|
|
|
arena::ElementId,
|
|
|
|
bump_frame::BumpFrame,
|
2022-11-02 01:42:29 +00:00
|
|
|
nodes::VNode,
|
|
|
|
scopes::{ScopeId, ScopeState},
|
2022-01-31 17:49:21 +00:00
|
|
|
};
|
2022-11-04 00:34:42 +00:00
|
|
|
use fxhash::{FxHashMap, FxHashSet};
|
2022-10-28 04:58:47 +00:00
|
|
|
use slab::Slab;
|
2022-10-22 01:54:14 +00:00
|
|
|
|
2022-11-04 00:34:42 +00:00
|
|
|
impl<'b> VirtualDom {
|
2022-11-09 18:58:11 +00:00
|
|
|
pub fn diff_scope(&mut self, mutations: &mut Mutations<'b>, scope: ScopeId) {
|
2022-11-02 01:42:29 +00:00
|
|
|
let scope_state = &mut self.scopes[scope.0];
|
2022-11-12 02:29:27 +00:00
|
|
|
|
2022-11-20 01:07:29 +00:00
|
|
|
// Load the old and new bump arenas
|
|
|
|
let cur_arena = scope_state.current_frame();
|
|
|
|
let prev_arena = scope_state.previous_frame();
|
2022-11-12 02:29:27 +00:00
|
|
|
|
|
|
|
// Make sure the nodes arent null (they've been set properly)
|
2022-11-20 01:07:29 +00:00
|
|
|
// This is a rough check to make sure we're not entering any UB
|
2022-11-12 02:29:27 +00:00
|
|
|
assert_ne!(
|
|
|
|
cur_arena.node.get(),
|
|
|
|
std::ptr::null_mut(),
|
|
|
|
"Call rebuild before diffing"
|
|
|
|
);
|
|
|
|
assert_ne!(
|
|
|
|
prev_arena.node.get(),
|
|
|
|
std::ptr::null_mut(),
|
|
|
|
"Call rebuild before diffing"
|
|
|
|
);
|
|
|
|
|
|
|
|
self.scope_stack.push(scope);
|
2022-11-20 01:07:29 +00:00
|
|
|
unsafe {
|
|
|
|
let cur_arena = cur_arena.load_node();
|
|
|
|
let prev_arena = prev_arena.load_node();
|
|
|
|
self.diff_maybe_node(mutations, prev_arena, cur_arena);
|
|
|
|
}
|
2022-11-12 02:29:27 +00:00
|
|
|
self.scope_stack.pop();
|
|
|
|
}
|
|
|
|
|
|
|
|
fn diff_maybe_node(
|
|
|
|
&mut self,
|
|
|
|
m: &mut Mutations<'b>,
|
|
|
|
left: &'b RenderReturn<'b>,
|
|
|
|
right: &'b RenderReturn<'b>,
|
|
|
|
) {
|
|
|
|
use RenderReturn::{Async, Sync};
|
|
|
|
match (left, right) {
|
|
|
|
// diff
|
2022-11-23 02:38:27 +00:00
|
|
|
(Sync(Ok(l)), Sync(Ok(r))) => self.diff_vnode(m, l, r),
|
|
|
|
|
|
|
|
_ => todo!("handle diffing nonstandard nodes"),
|
|
|
|
// // remove old with placeholder
|
|
|
|
// (Sync(Ok(l)), Sync(None)) | (Sync(Ok(l)), Async(_)) => {
|
|
|
|
// //
|
|
|
|
// let id = self.next_element(l, &[]); // todo!
|
|
|
|
// m.push(Mutation::CreatePlaceholder { id });
|
|
|
|
// self.drop_template(m, l, true);
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // remove placeholder with nodes
|
|
|
|
// (Sync(None), Sync(Ok(_))) => {}
|
|
|
|
// (Async(_), Sync(Ok(v))) => {}
|
|
|
|
|
|
|
|
// // nothing... just transfer the placeholders over
|
|
|
|
// (Async(_), Async(_))
|
|
|
|
// | (Sync(None), Sync(None))
|
|
|
|
// | (Sync(None), Async(_))
|
|
|
|
// | (Async(_), Sync(None)) => {}
|
2022-11-12 02:29:27 +00:00
|
|
|
}
|
2022-11-02 01:42:29 +00:00
|
|
|
}
|
2022-11-03 09:11:04 +00:00
|
|
|
|
2022-11-20 23:58:05 +00:00
|
|
|
pub fn diff_vnode(
|
2022-11-04 00:34:42 +00:00
|
|
|
&mut self,
|
2022-11-09 18:58:11 +00:00
|
|
|
muts: &mut Mutations<'b>,
|
2022-11-04 00:34:42 +00:00
|
|
|
left_template: &'b VNode<'b>,
|
|
|
|
right_template: &'b VNode<'b>,
|
|
|
|
) {
|
|
|
|
if left_template.template.id != right_template.template.id {
|
2022-11-20 23:58:05 +00:00
|
|
|
return self.light_diff_templates(muts, left_template, right_template);
|
2022-11-04 00:34:42 +00:00
|
|
|
}
|
|
|
|
|
2022-11-20 23:58:05 +00:00
|
|
|
for (left_attr, right_attr) in left_template
|
2022-11-04 00:34:42 +00:00
|
|
|
.dynamic_attrs
|
|
|
|
.iter()
|
|
|
|
.zip(right_template.dynamic_attrs.iter())
|
|
|
|
{
|
|
|
|
// Move over the ID from the old to the new
|
|
|
|
right_attr
|
|
|
|
.mounted_element
|
|
|
|
.set(left_attr.mounted_element.get());
|
|
|
|
|
2022-11-23 02:38:27 +00:00
|
|
|
if left_attr.value != right_attr.value || left_attr.volatile {
|
2022-11-20 01:07:29 +00:00
|
|
|
// todo: add more types of attribute values
|
2022-11-23 02:38:27 +00:00
|
|
|
match right_attr.value {
|
|
|
|
AttributeValue::Text(text) => {
|
|
|
|
muts.push(Mutation::SetAttribute {
|
|
|
|
id: left_attr.mounted_element.get(),
|
|
|
|
name: left_attr.name,
|
|
|
|
value: text,
|
|
|
|
ns: right_attr.namespace,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
// todo: more types of attribute values
|
|
|
|
_ => (),
|
2022-11-20 01:07:29 +00:00
|
|
|
}
|
2022-11-04 00:34:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-20 23:58:05 +00:00
|
|
|
for (left_node, right_node) in left_template
|
2022-11-04 00:34:42 +00:00
|
|
|
.dynamic_nodes
|
|
|
|
.iter()
|
|
|
|
.zip(right_template.dynamic_nodes.iter())
|
|
|
|
{
|
2022-11-20 01:07:29 +00:00
|
|
|
match (left_node, right_node) {
|
|
|
|
(Text(left), Text(right)) => self.diff_vtext(muts, left, right),
|
|
|
|
(Fragment(left), Fragment(right)) => self.diff_vfragment(muts, left, right),
|
2022-11-23 02:38:27 +00:00
|
|
|
(Component(left), Component(right)) => self.diff_vcomponent(muts, left, right),
|
2022-11-20 23:58:05 +00:00
|
|
|
_ => self.replace(muts, left_template, right_template, left_node, right_node),
|
2022-11-20 01:07:29 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-20 23:58:05 +00:00
|
|
|
fn replace(
|
|
|
|
&mut self,
|
|
|
|
muts: &mut Mutations<'b>,
|
|
|
|
left_template: &'b VNode<'b>,
|
|
|
|
right_template: &'b VNode<'b>,
|
|
|
|
left: &'b DynamicNode<'b>,
|
|
|
|
right: &'b DynamicNode<'b>,
|
|
|
|
) {
|
|
|
|
}
|
|
|
|
|
2022-11-20 01:07:29 +00:00
|
|
|
fn diff_vcomponent(
|
|
|
|
&mut self,
|
|
|
|
muts: &mut Mutations<'b>,
|
|
|
|
left: &'b VComponent<'b>,
|
|
|
|
right: &'b VComponent<'b>,
|
|
|
|
) {
|
2022-11-20 23:58:05 +00:00
|
|
|
// Due to how templates work, we should never get two different components. The only way we could enter
|
|
|
|
// this codepath is through "light_diff", but we check there that the pointers are the same
|
|
|
|
assert_eq!(left.render_fn, right.render_fn);
|
2022-11-20 01:07:29 +00:00
|
|
|
|
2022-11-20 23:58:05 +00:00
|
|
|
// Make sure the new vcomponent has the right scopeid associated to it
|
|
|
|
let scope_id = left.scope.get().unwrap();
|
|
|
|
right.scope.set(Some(scope_id));
|
|
|
|
|
|
|
|
// copy out the box for both
|
|
|
|
let old = left.props.replace(None).unwrap();
|
|
|
|
let new = right.props.replace(None).unwrap();
|
|
|
|
|
|
|
|
// If the props are static, then we try to memoize by setting the new with the old
|
|
|
|
// The target scopestate still has the reference to the old props, so there's no need to update anything
|
|
|
|
// This also implicitly drops the new props since they're not used
|
|
|
|
if left.static_props && unsafe { old.memoize(new.as_ref()) } {
|
|
|
|
return right.props.set(Some(old));
|
2022-11-20 01:07:29 +00:00
|
|
|
}
|
2022-11-20 23:58:05 +00:00
|
|
|
|
|
|
|
// If the props are dynamic *or* the memoization failed, then we need to diff the props
|
|
|
|
|
|
|
|
// First, move over the props from the old to the new, dropping old props in the process
|
|
|
|
self.scopes[scope_id.0].props = unsafe { std::mem::transmute(new.as_ref()) };
|
|
|
|
right.props.set(Some(new));
|
|
|
|
|
|
|
|
// Now run the component and diff it
|
|
|
|
self.run_scope(scope_id);
|
|
|
|
self.diff_scope(muts, scope_id);
|
2022-11-20 01:07:29 +00:00
|
|
|
}
|
2022-11-04 00:34:42 +00:00
|
|
|
|
2022-11-20 01:07:29 +00:00
|
|
|
/// Lightly diff the two templates, checking only their roots.
|
|
|
|
///
|
|
|
|
/// The goal here is to preserve any existing component state that might exist. This is to preserve some React-like
|
|
|
|
/// behavior where the component state is preserved when the component is re-rendered.
|
|
|
|
///
|
|
|
|
/// This is implemented by iterating each root, checking if the component is the same, if it is, then diff it.
|
|
|
|
///
|
|
|
|
/// We then pass the new template through "create" which should be smart enough to skip roots.
|
|
|
|
///
|
|
|
|
/// Currently, we only handle the case where the roots are the same component list. If there's any sort of deviation,
|
|
|
|
/// IE more nodes, less nodes, different nodes, or expressions, then we just replace the whole thing.
|
|
|
|
///
|
|
|
|
/// This is mostly implemented to help solve the issue where the same component is rendered under two different
|
|
|
|
/// conditions:
|
|
|
|
///
|
|
|
|
/// ```rust
|
|
|
|
/// if enabled {
|
|
|
|
/// rsx!{ Component { enabled_sign: "abc" } }
|
|
|
|
/// } else {
|
|
|
|
/// rsx!{ Component { enabled_sign: "xyz" } }
|
|
|
|
/// }
|
|
|
|
/// ```
|
|
|
|
///
|
|
|
|
/// However, we should not that it's explicit in the docs that this is not a guarantee. If you need to preserve state,
|
|
|
|
/// then you should be passing in separate props instead.
|
|
|
|
///
|
|
|
|
/// ```
|
|
|
|
///
|
|
|
|
/// let props = if enabled {
|
|
|
|
/// ComponentProps { enabled_sign: "abc" }
|
|
|
|
/// } else {
|
|
|
|
/// ComponentProps { enabled_sign: "xyz" }
|
|
|
|
/// };
|
|
|
|
///
|
|
|
|
/// rsx! {
|
|
|
|
/// Component { ..props }
|
|
|
|
/// }
|
|
|
|
/// ```
|
2022-11-20 23:58:05 +00:00
|
|
|
fn light_diff_templates(
|
2022-11-20 01:07:29 +00:00
|
|
|
&mut self,
|
|
|
|
muts: &mut Mutations<'b>,
|
|
|
|
left: &'b VNode<'b>,
|
|
|
|
right: &'b VNode<'b>,
|
2022-11-20 23:58:05 +00:00
|
|
|
) {
|
|
|
|
if let Some(components) = matching_components(left, right) {
|
|
|
|
components
|
|
|
|
.into_iter()
|
|
|
|
.for_each(|(l, r)| self.diff_vcomponent(muts, l, r))
|
2022-11-20 01:07:29 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-20 23:58:05 +00:00
|
|
|
/// Diff the two text nodes
|
|
|
|
///
|
|
|
|
/// This just moves the ID of the old node over to the new node, and then sets the text of the new node if it's
|
|
|
|
/// different.
|
2022-11-20 01:07:29 +00:00
|
|
|
fn diff_vtext(&mut self, muts: &mut Mutations<'b>, left: &'b VText<'b>, right: &'b VText<'b>) {
|
|
|
|
right.id.set(left.id.get());
|
|
|
|
if left.value != right.value {
|
|
|
|
muts.push(Mutation::SetText {
|
|
|
|
id: left.id.get(),
|
|
|
|
value: right.value,
|
|
|
|
});
|
2022-11-04 00:34:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-20 01:07:29 +00:00
|
|
|
fn diff_vfragment(
|
|
|
|
&mut self,
|
|
|
|
muts: &mut Mutations<'b>,
|
|
|
|
left: &'b VFragment<'b>,
|
|
|
|
right: &'b VFragment<'b>,
|
|
|
|
) {
|
2022-11-23 02:38:27 +00:00
|
|
|
use VFragment::*;
|
|
|
|
match (left, right) {
|
|
|
|
(Empty(l), Empty(r)) => r.set(l.get()),
|
|
|
|
(Empty(l), NonEmpty(r)) => self.replace_placeholder_with_nodes(muts, l, r),
|
|
|
|
(NonEmpty(l), Empty(r)) => self.replace_nodes_with_placeholder(muts, l, r),
|
|
|
|
(NonEmpty(old), NonEmpty(new)) => self.diff_non_empty_fragment(new, old, muts),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn replace_placeholder_with_nodes(
|
|
|
|
&mut self,
|
|
|
|
muts: &mut Mutations<'b>,
|
|
|
|
l: &'b std::cell::Cell<ElementId>,
|
|
|
|
r: &'b [VNode<'b>],
|
|
|
|
) {
|
|
|
|
let created = r
|
|
|
|
.iter()
|
|
|
|
.fold(0, |acc, child| acc + self.create(muts, child));
|
|
|
|
muts.push(Mutation::ReplaceWith {
|
|
|
|
id: l.get(),
|
|
|
|
m: created,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
fn replace_nodes_with_placeholder(
|
|
|
|
&mut self,
|
|
|
|
muts: &mut Mutations<'b>,
|
|
|
|
l: &'b [VNode<'b>],
|
|
|
|
r: &'b std::cell::Cell<ElementId>,
|
|
|
|
) {
|
|
|
|
//
|
|
|
|
|
|
|
|
// Remove the old nodes, except for one
|
|
|
|
self.remove_nodes(muts, &l[1..]);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn diff_non_empty_fragment(
|
|
|
|
&mut self,
|
|
|
|
new: &'b [VNode<'b>],
|
|
|
|
old: &'b [VNode<'b>],
|
|
|
|
muts: &mut Mutations<'b>,
|
|
|
|
) {
|
|
|
|
let new_is_keyed = new[0].key.is_some();
|
|
|
|
let old_is_keyed = old[0].key.is_some();
|
|
|
|
debug_assert!(
|
|
|
|
new.iter().all(|n| n.key.is_some() == new_is_keyed),
|
|
|
|
"all siblings must be keyed or all siblings must be non-keyed"
|
|
|
|
);
|
|
|
|
debug_assert!(
|
|
|
|
old.iter().all(|o| o.key.is_some() == old_is_keyed),
|
|
|
|
"all siblings must be keyed or all siblings must be non-keyed"
|
|
|
|
);
|
|
|
|
if new_is_keyed && old_is_keyed {
|
|
|
|
// self.diff_keyed_children(muts, old, new);
|
|
|
|
} else {
|
|
|
|
self.diff_non_keyed_children(muts, old, new);
|
|
|
|
}
|
2022-11-20 01:07:29 +00:00
|
|
|
}
|
|
|
|
|
2022-11-04 00:34:42 +00:00
|
|
|
// Diff children that are not keyed.
|
|
|
|
//
|
|
|
|
// The parent must be on the top of the change list stack when entering this
|
|
|
|
// function:
|
|
|
|
//
|
|
|
|
// [... parent]
|
|
|
|
//
|
|
|
|
// the change list stack is in the same state when this function returns.
|
|
|
|
fn diff_non_keyed_children(
|
|
|
|
&mut self,
|
2022-11-09 18:58:11 +00:00
|
|
|
muts: &mut Mutations<'b>,
|
2022-11-04 00:34:42 +00:00
|
|
|
old: &'b [VNode<'b>],
|
|
|
|
new: &'b [VNode<'b>],
|
|
|
|
) {
|
|
|
|
use std::cmp::Ordering;
|
|
|
|
|
|
|
|
// Handled these cases in `diff_children` before calling this function.
|
|
|
|
debug_assert!(!new.is_empty());
|
|
|
|
debug_assert!(!old.is_empty());
|
|
|
|
|
|
|
|
match old.len().cmp(&new.len()) {
|
|
|
|
Ordering::Greater => self.remove_nodes(muts, &old[new.len()..]),
|
2022-11-23 02:38:27 +00:00
|
|
|
Ordering::Less => {
|
|
|
|
self.create_and_insert_after(muts, &new[old.len()..], old.last().unwrap())
|
|
|
|
}
|
2022-11-04 00:34:42 +00:00
|
|
|
Ordering::Equal => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (new, old) in new.iter().zip(old.iter()) {
|
2022-11-20 23:58:05 +00:00
|
|
|
self.diff_vnode(muts, old, new);
|
2022-11-04 00:34:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-23 02:38:27 +00:00
|
|
|
// // Diffing "keyed" children.
|
|
|
|
// //
|
|
|
|
// // With keyed children, we care about whether we delete, move, or create nodes
|
|
|
|
// // versus mutate existing nodes in place. Presumably there is some sort of CSS
|
|
|
|
// // transition animation that makes the virtual DOM diffing algorithm
|
|
|
|
// // observable. By specifying keys for nodes, we know which virtual DOM nodes
|
|
|
|
// // must reuse (or not reuse) the same physical DOM nodes.
|
|
|
|
// //
|
|
|
|
// // This is loosely based on Inferno's keyed patching implementation. However, we
|
|
|
|
// // have to modify the algorithm since we are compiling the diff down into change
|
|
|
|
// // list instructions that will be executed later, rather than applying the
|
|
|
|
// // changes to the DOM directly as we compare virtual DOMs.
|
|
|
|
// //
|
|
|
|
// // https://github.com/infernojs/inferno/blob/36fd96/packages/inferno/src/DOM/patching.ts#L530-L739
|
|
|
|
// //
|
|
|
|
// // The stack is empty upon entry.
|
|
|
|
// fn diff_keyed_children(
|
|
|
|
// &mut self,
|
|
|
|
// muts: &mut Mutations<'b>,
|
|
|
|
// old: &'b [VNode<'b>],
|
|
|
|
// new: &'b [VNode<'b>],
|
|
|
|
// ) {
|
|
|
|
// if cfg!(debug_assertions) {
|
|
|
|
// let mut keys = fxhash::FxHashSet::default();
|
|
|
|
// let mut assert_unique_keys = |children: &'b [VNode<'b>]| {
|
|
|
|
// keys.clear();
|
|
|
|
// for child in children {
|
|
|
|
// let key = child.key;
|
|
|
|
// debug_assert!(
|
|
|
|
// key.is_some(),
|
|
|
|
// "if any sibling is keyed, all siblings must be keyed"
|
|
|
|
// );
|
|
|
|
// keys.insert(key);
|
|
|
|
// }
|
|
|
|
// debug_assert_eq!(
|
|
|
|
// children.len(),
|
|
|
|
// keys.len(),
|
|
|
|
// "keyed siblings must each have a unique key"
|
|
|
|
// );
|
|
|
|
// };
|
|
|
|
// assert_unique_keys(old);
|
|
|
|
// assert_unique_keys(new);
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // First up, we diff all the nodes with the same key at the beginning of the
|
|
|
|
// // children.
|
|
|
|
// //
|
|
|
|
// // `shared_prefix_count` is the count of how many nodes at the start of
|
|
|
|
// // `new` and `old` share the same keys.
|
|
|
|
// let (left_offset, right_offset) = match self.diff_keyed_ends(muts, old, new) {
|
|
|
|
// Some(count) => count,
|
|
|
|
// None => return,
|
|
|
|
// };
|
|
|
|
|
|
|
|
// // Ok, we now hopefully have a smaller range of children in the middle
|
|
|
|
// // within which to re-order nodes with the same keys, remove old nodes with
|
|
|
|
// // now-unused keys, and create new nodes with fresh keys.
|
|
|
|
|
|
|
|
// let old_middle = &old[left_offset..(old.len() - right_offset)];
|
|
|
|
// let new_middle = &new[left_offset..(new.len() - right_offset)];
|
|
|
|
|
|
|
|
// debug_assert!(
|
|
|
|
// !((old_middle.len() == new_middle.len()) && old_middle.is_empty()),
|
|
|
|
// "keyed children must have the same number of children"
|
|
|
|
// );
|
|
|
|
|
|
|
|
// if new_middle.is_empty() {
|
|
|
|
// // remove the old elements
|
|
|
|
// self.remove_nodes(muts, old_middle);
|
|
|
|
// } else if old_middle.is_empty() {
|
|
|
|
// // there were no old elements, so just create the new elements
|
|
|
|
// // we need to find the right "foothold" though - we shouldn't use the "append" at all
|
|
|
|
// if left_offset == 0 {
|
|
|
|
// // insert at the beginning of the old list
|
|
|
|
// let foothold = &old[old.len() - right_offset];
|
|
|
|
// self.create_and_insert_before(muts, new_middle, foothold);
|
|
|
|
// } else if right_offset == 0 {
|
|
|
|
// // insert at the end the old list
|
|
|
|
// let foothold = old.last().unwrap();
|
|
|
|
// self.create_and_insert_after(muts, new_middle, foothold);
|
|
|
|
// } else {
|
|
|
|
// // inserting in the middle
|
|
|
|
// let foothold = &old[left_offset - 1];
|
|
|
|
// self.create_and_insert_after(muts, new_middle, foothold);
|
|
|
|
// }
|
|
|
|
// } else {
|
|
|
|
// self.diff_keyed_middle(muts, old_middle, new_middle);
|
|
|
|
// }
|
|
|
|
// }
|
2022-11-04 00:34:42 +00:00
|
|
|
|
|
|
|
// /// Diff both ends of the children that share keys.
|
|
|
|
// ///
|
|
|
|
// /// Returns a left offset and right offset of that indicates a smaller section to pass onto the middle diffing.
|
|
|
|
// ///
|
|
|
|
// /// If there is no offset, then this function returns None and the diffing is complete.
|
|
|
|
// fn diff_keyed_ends(
|
|
|
|
// &mut self,
|
2022-11-23 02:38:27 +00:00
|
|
|
// muts: &mut Mutations<'b>,
|
2022-11-04 00:34:42 +00:00
|
|
|
// old: &'b [VNode<'b>],
|
|
|
|
// new: &'b [VNode<'b>],
|
|
|
|
// ) -> Option<(usize, usize)> {
|
|
|
|
// let mut left_offset = 0;
|
|
|
|
|
|
|
|
// for (old, new) in old.iter().zip(new.iter()) {
|
|
|
|
// // abort early if we finally run into nodes with different keys
|
|
|
|
// if old.key != new.key {
|
|
|
|
// break;
|
|
|
|
// }
|
|
|
|
// self.diff_node(muts, old, new);
|
|
|
|
// left_offset += 1;
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // If that was all of the old children, then create and append the remaining
|
|
|
|
// // new children and we're finished.
|
|
|
|
// if left_offset == old.len() {
|
|
|
|
// self.create_and_insert_after(&new[left_offset..], old.last().unwrap());
|
|
|
|
// return None;
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // And if that was all of the new children, then remove all of the remaining
|
|
|
|
// // old children and we're finished.
|
|
|
|
// if left_offset == new.len() {
|
|
|
|
// self.remove_nodes(muts, &old[left_offset..]);
|
|
|
|
// return None;
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // if the shared prefix is less than either length, then we need to walk backwards
|
|
|
|
// let mut right_offset = 0;
|
|
|
|
// for (old, new) in old.iter().rev().zip(new.iter().rev()) {
|
|
|
|
// // abort early if we finally run into nodes with different keys
|
|
|
|
// if old.key != new.key {
|
|
|
|
// break;
|
|
|
|
// }
|
|
|
|
// self.diff_node(muts, old, new);
|
|
|
|
// right_offset += 1;
|
|
|
|
// }
|
|
|
|
|
|
|
|
// Some((left_offset, right_offset))
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // The most-general, expensive code path for keyed children diffing.
|
|
|
|
// //
|
|
|
|
// // We find the longest subsequence within `old` of children that are relatively
|
|
|
|
// // ordered the same way in `new` (via finding a longest-increasing-subsequence
|
|
|
|
// // of the old child's index within `new`). The children that are elements of
|
|
|
|
// // this subsequence will remain in place, minimizing the number of DOM moves we
|
|
|
|
// // will have to do.
|
|
|
|
// //
|
|
|
|
// // Upon entry to this function, the change list stack must be empty.
|
|
|
|
// //
|
|
|
|
// // This function will load the appropriate nodes onto the stack and do diffing in place.
|
|
|
|
// //
|
|
|
|
// // Upon exit from this function, it will be restored to that same self.
|
|
|
|
// #[allow(clippy::too_many_lines)]
|
|
|
|
// fn diff_keyed_middle(
|
|
|
|
// &mut self,
|
2022-11-23 02:38:27 +00:00
|
|
|
// muts: &mut Mutations<'b>,
|
2022-11-04 00:34:42 +00:00
|
|
|
// old: &'b [VNode<'b>],
|
|
|
|
// new: &'b [VNode<'b>],
|
|
|
|
// ) {
|
|
|
|
// /*
|
|
|
|
// 1. Map the old keys into a numerical ordering based on indices.
|
|
|
|
// 2. Create a map of old key to its index
|
|
|
|
// 3. Map each new key to the old key, carrying over the old index.
|
|
|
|
// - IE if we have ABCD becomes BACD, our sequence would be 1,0,2,3
|
|
|
|
// - if we have ABCD to ABDE, our sequence would be 0,1,3,MAX because E doesn't exist
|
|
|
|
|
|
|
|
// now, we should have a list of integers that indicates where in the old list the new items map to.
|
|
|
|
|
|
|
|
// 4. Compute the LIS of this list
|
|
|
|
// - this indicates the longest list of new children that won't need to be moved.
|
|
|
|
|
|
|
|
// 5. Identify which nodes need to be removed
|
|
|
|
// 6. Identify which nodes will need to be diffed
|
|
|
|
|
|
|
|
// 7. Going along each item in the new list, create it and insert it before the next closest item in the LIS.
|
|
|
|
// - if the item already existed, just move it to the right place.
|
|
|
|
|
|
|
|
// 8. Finally, generate instructions to remove any old children.
|
|
|
|
// 9. Generate instructions to finally diff children that are the same between both
|
|
|
|
// */
|
|
|
|
// // 0. Debug sanity checks
|
|
|
|
// // Should have already diffed the shared-key prefixes and suffixes.
|
|
|
|
// debug_assert_ne!(new.first().map(|i| i.key), old.first().map(|i| i.key));
|
|
|
|
// debug_assert_ne!(new.last().map(|i| i.key), old.last().map(|i| i.key));
|
|
|
|
|
|
|
|
// // 1. Map the old keys into a numerical ordering based on indices.
|
|
|
|
// // 2. Create a map of old key to its index
|
|
|
|
// // IE if the keys were A B C, then we would have (A, 1) (B, 2) (C, 3).
|
|
|
|
// let old_key_to_old_index = old
|
|
|
|
// .iter()
|
|
|
|
// .enumerate()
|
|
|
|
// .map(|(i, o)| (o.key.unwrap(), i))
|
|
|
|
// .collect::<FxHashMap<_, _>>();
|
|
|
|
|
|
|
|
// let mut shared_keys = FxHashSet::default();
|
|
|
|
|
|
|
|
// // 3. Map each new key to the old key, carrying over the old index.
|
|
|
|
// let new_index_to_old_index = new
|
|
|
|
// .iter()
|
|
|
|
// .map(|node| {
|
|
|
|
// let key = node.key.unwrap();
|
|
|
|
// if let Some(&index) = old_key_to_old_index.get(&key) {
|
|
|
|
// shared_keys.insert(key);
|
|
|
|
// index
|
|
|
|
// } else {
|
|
|
|
// u32::MAX as usize
|
|
|
|
// }
|
|
|
|
// })
|
|
|
|
// .collect::<Vec<_>>();
|
|
|
|
|
|
|
|
// // If none of the old keys are reused by the new children, then we remove all the remaining old children and
|
|
|
|
// // create the new children afresh.
|
|
|
|
// if shared_keys.is_empty() {
|
|
|
|
// if let Some(first_old) = old.get(0) {
|
|
|
|
// self.remove_nodes(muts, &old[1..]);
|
|
|
|
// let nodes_created = self.create_children(new);
|
|
|
|
// self.replace_inner(first_old, nodes_created);
|
|
|
|
// } else {
|
|
|
|
// // I think this is wrong - why are we appending?
|
|
|
|
// // only valid of the if there are no trailing elements
|
|
|
|
// self.create_and_append_children(new);
|
|
|
|
// }
|
|
|
|
// return;
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // remove any old children that are not shared
|
|
|
|
// // todo: make this an iterator
|
|
|
|
// for child in old {
|
|
|
|
// let key = child.key.unwrap();
|
|
|
|
// if !shared_keys.contains(&key) {
|
|
|
|
// todo!("remove node");
|
|
|
|
// // self.remove_nodes(muts, [child]);
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // 4. Compute the LIS of this list
|
|
|
|
// let mut lis_sequence = Vec::default();
|
|
|
|
// lis_sequence.reserve(new_index_to_old_index.len());
|
|
|
|
|
|
|
|
// let mut predecessors = vec![0; new_index_to_old_index.len()];
|
|
|
|
// let mut starts = vec![0; new_index_to_old_index.len()];
|
|
|
|
|
|
|
|
// longest_increasing_subsequence::lis_with(
|
|
|
|
// &new_index_to_old_index,
|
|
|
|
// &mut lis_sequence,
|
|
|
|
// |a, b| a < b,
|
|
|
|
// &mut predecessors,
|
|
|
|
// &mut starts,
|
|
|
|
// );
|
|
|
|
|
|
|
|
// // the lis comes out backwards, I think. can't quite tell.
|
|
|
|
// lis_sequence.sort_unstable();
|
|
|
|
|
|
|
|
// // if a new node gets u32 max and is at the end, then it might be part of our LIS (because u32 max is a valid LIS)
|
|
|
|
// if lis_sequence.last().map(|f| new_index_to_old_index[*f]) == Some(u32::MAX as usize) {
|
|
|
|
// lis_sequence.pop();
|
|
|
|
// }
|
|
|
|
|
|
|
|
// for idx in &lis_sequence {
|
|
|
|
// self.diff_node(muts, &old[new_index_to_old_index[*idx]], &new[*idx]);
|
|
|
|
// }
|
|
|
|
|
|
|
|
// let mut nodes_created = 0;
|
|
|
|
|
|
|
|
// // add mount instruction for the first items not covered by the lis
|
|
|
|
// let last = *lis_sequence.last().unwrap();
|
|
|
|
// if last < (new.len() - 1) {
|
|
|
|
// for (idx, new_node) in new[(last + 1)..].iter().enumerate() {
|
|
|
|
// let new_idx = idx + last + 1;
|
|
|
|
// let old_index = new_index_to_old_index[new_idx];
|
|
|
|
// if old_index == u32::MAX as usize {
|
|
|
|
// nodes_created += self.create(muts, new_node);
|
|
|
|
// } else {
|
|
|
|
// self.diff_node(muts, &old[old_index], new_node);
|
|
|
|
// nodes_created += self.push_all_real_nodes(new_node);
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
|
|
|
|
// self.mutations.insert_after(
|
|
|
|
// self.find_last_element(&new[last]).unwrap(),
|
|
|
|
// nodes_created as u32,
|
|
|
|
// );
|
|
|
|
// nodes_created = 0;
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // for each spacing, generate a mount instruction
|
|
|
|
// let mut lis_iter = lis_sequence.iter().rev();
|
|
|
|
// let mut last = *lis_iter.next().unwrap();
|
|
|
|
// for next in lis_iter {
|
|
|
|
// if last - next > 1 {
|
|
|
|
// for (idx, new_node) in new[(next + 1)..last].iter().enumerate() {
|
|
|
|
// let new_idx = idx + next + 1;
|
|
|
|
// let old_index = new_index_to_old_index[new_idx];
|
|
|
|
// if old_index == u32::MAX as usize {
|
|
|
|
// nodes_created += self.create(muts, new_node);
|
|
|
|
// } else {
|
|
|
|
// self.diff_node(muts, &old[old_index], new_node);
|
|
|
|
// nodes_created += self.push_all_real_nodes(new_node);
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
|
|
|
|
// self.mutations.insert_before(
|
|
|
|
// self.find_first_element(&new[last]).unwrap(),
|
|
|
|
// nodes_created as u32,
|
|
|
|
// );
|
|
|
|
|
|
|
|
// nodes_created = 0;
|
|
|
|
// }
|
|
|
|
// last = *next;
|
|
|
|
// }
|
|
|
|
|
|
|
|
// // add mount instruction for the last items not covered by the lis
|
|
|
|
// let first_lis = *lis_sequence.first().unwrap();
|
|
|
|
// if first_lis > 0 {
|
|
|
|
// for (idx, new_node) in new[..first_lis].iter().enumerate() {
|
|
|
|
// let old_index = new_index_to_old_index[idx];
|
|
|
|
// if old_index == u32::MAX as usize {
|
|
|
|
// nodes_created += self.create_node(new_node);
|
|
|
|
// } else {
|
|
|
|
// self.diff_node(muts, &old[old_index], new_node);
|
|
|
|
// nodes_created += self.push_all_real_nodes(new_node);
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
|
|
|
|
// self.mutations.insert_before(
|
|
|
|
// self.find_first_element(&new[first_lis]).unwrap(),
|
|
|
|
// nodes_created as u32,
|
|
|
|
// );
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
|
|
|
|
/// Remove these nodes from the dom
|
|
|
|
/// Wont generate mutations for the inner nodes
|
2022-11-09 18:58:11 +00:00
|
|
|
fn remove_nodes(&mut self, muts: &mut Mutations<'b>, nodes: &'b [VNode<'b>]) {
|
2022-11-04 00:34:42 +00:00
|
|
|
//
|
2022-11-03 09:11:04 +00:00
|
|
|
}
|
2022-11-23 02:38:27 +00:00
|
|
|
|
|
|
|
/// Push all the real nodes on the stack
|
|
|
|
fn push_elements_onto_stack(&mut self, node: &VNode) -> usize {
|
|
|
|
todo!()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub(crate) fn create_and_insert_before(
|
|
|
|
&self,
|
|
|
|
mutations: &mut Mutations<'b>,
|
|
|
|
new: &[VNode],
|
|
|
|
after: &VNode,
|
|
|
|
) {
|
|
|
|
let id = self.get_last_real_node(after);
|
|
|
|
}
|
|
|
|
pub(crate) fn create_and_insert_after(
|
|
|
|
&self,
|
|
|
|
mutations: &mut Mutations<'b>,
|
|
|
|
new: &[VNode],
|
|
|
|
after: &VNode,
|
|
|
|
) {
|
|
|
|
let id = self.get_last_real_node(after);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_last_real_node(&self, node: &VNode) -> ElementId {
|
|
|
|
match node.template.roots.last().unwrap() {
|
|
|
|
TemplateNode::Element { .. } => todo!(),
|
|
|
|
TemplateNode::Text(t) => todo!(),
|
|
|
|
TemplateNode::Dynamic(_) => todo!(),
|
|
|
|
TemplateNode::DynamicText(_) => todo!(),
|
|
|
|
}
|
|
|
|
}
|
2022-11-02 01:42:29 +00:00
|
|
|
}
|
2022-11-20 23:58:05 +00:00
|
|
|
|
|
|
|
fn matching_components<'a>(
|
|
|
|
left: &'a VNode<'a>,
|
|
|
|
right: &'a VNode<'a>,
|
|
|
|
) -> Option<Vec<(&'a VComponent<'a>, &'a VComponent<'a>)>> {
|
|
|
|
if left.template.roots.len() != right.template.roots.len() {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
// run through the components, ensuring they're the same
|
|
|
|
left.template
|
|
|
|
.roots
|
|
|
|
.iter()
|
|
|
|
.zip(right.template.roots.iter())
|
|
|
|
.map(|(l, r)| {
|
|
|
|
let (l, r) = match (l, r) {
|
|
|
|
(TemplateNode::Dynamic(l), TemplateNode::Dynamic(r)) => (l, r),
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
|
|
|
|
let (l, r) = match (&left.dynamic_nodes[*l], &right.dynamic_nodes[*r]) {
|
|
|
|
(Component(l), Component(r)) => (l, r),
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
|
|
|
|
(l.render_fn == r.render_fn).then(|| (l, r))
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// We can apply various optimizations to dynamic nodes that are the single child of their parent.
|
|
|
|
///
|
|
|
|
/// IE
|
|
|
|
/// - for text - we can use SetTextContent
|
|
|
|
/// - for clearning children we can use RemoveChildren
|
|
|
|
/// - for appending children we can use AppendChildren
|
|
|
|
fn is_dyn_node_only_child(node: &VNode, idx: usize) -> bool {
|
|
|
|
let path = node.template.node_paths[idx];
|
|
|
|
|
|
|
|
// use a loop to index every static node's children until the path has run out
|
|
|
|
// only break if the last path index is a dynamic node
|
|
|
|
let mut static_node = &node.template.roots[path[0] as usize];
|
|
|
|
|
|
|
|
for i in 1..path.len() - 1 {
|
|
|
|
match static_node {
|
|
|
|
TemplateNode::Element { children, .. } => static_node = &children[path[i] as usize],
|
|
|
|
_ => return false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
match static_node {
|
|
|
|
TemplateNode::Element { children, .. } => children.len() == 1,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|