2022-10-22 01:54:14 +00:00
|
|
|
use crate::{
|
2022-10-28 04:58:47 +00:00
|
|
|
arena::ElementId,
|
2022-12-13 02:31:30 +00:00
|
|
|
innerlude::{DirtyScope, VComponent, VPlaceholder, VText},
|
2022-11-24 07:15:01 +00:00
|
|
|
mutations::Mutation,
|
2022-12-03 00:24:49 +00:00
|
|
|
nodes::RenderReturn,
|
2022-11-24 07:15:01 +00:00
|
|
|
nodes::{DynamicNode, VNode},
|
|
|
|
scopes::ScopeId,
|
|
|
|
virtual_dom::VirtualDom,
|
2022-12-13 02:31:30 +00:00
|
|
|
Attribute, AttributeValue, TemplateNode,
|
2022-01-31 17:49:21 +00:00
|
|
|
};
|
2022-11-24 07:15:01 +00:00
|
|
|
|
2022-12-10 02:56:48 +00:00
|
|
|
use rustc_hash::{FxHashMap, FxHashSet};
|
2022-11-24 07:15:01 +00:00
|
|
|
use DynamicNode::*;
|
2022-10-22 01:54:14 +00:00
|
|
|
|
2022-11-29 21:31:04 +00:00
|
|
|
impl<'b> VirtualDom {
|
2022-11-30 15:31:44 +00:00
|
|
|
pub(super) fn diff_scope(&mut self, scope: ScopeId) {
|
2022-11-02 01:42:29 +00:00
|
|
|
let scope_state = &mut self.scopes[scope.0];
|
2022-11-12 02:29:27 +00:00
|
|
|
|
|
|
|
self.scope_stack.push(scope);
|
2022-11-20 01:07:29 +00:00
|
|
|
unsafe {
|
2022-11-29 21:31:04 +00:00
|
|
|
// Load the old and new bump arenas
|
|
|
|
let old = scope_state
|
|
|
|
.previous_frame()
|
|
|
|
.try_load_node()
|
|
|
|
.expect("Call rebuild before diffing");
|
2022-12-05 22:16:54 +00:00
|
|
|
|
2022-11-29 21:31:04 +00:00
|
|
|
let new = scope_state
|
|
|
|
.current_frame()
|
|
|
|
.try_load_node()
|
|
|
|
.expect("Call rebuild before diffing");
|
2022-11-12 02:29:27 +00:00
|
|
|
|
2022-12-05 22:16:54 +00:00
|
|
|
use RenderReturn::{Async, Sync};
|
2022-11-23 02:38:27 +00:00
|
|
|
|
2022-12-05 22:16:54 +00:00
|
|
|
match (old, new) {
|
|
|
|
(Sync(Ok(l)), Sync(Ok(r))) => self.diff_node(l, r),
|
2022-11-23 02:38:27 +00:00
|
|
|
|
2022-12-05 22:16:54 +00:00
|
|
|
// Err cases
|
|
|
|
(Sync(Ok(l)), Sync(Err(e))) => self.diff_ok_to_err(l, e),
|
|
|
|
(Sync(Err(e)), Sync(Ok(r))) => self.diff_err_to_ok(e, r),
|
|
|
|
(Sync(Err(_eo)), Sync(Err(_en))) => { /* nothing */ }
|
|
|
|
|
|
|
|
// Async
|
|
|
|
(Sync(Ok(_l)), Async(_)) => todo!(),
|
|
|
|
(Sync(Err(_e)), Async(_)) => todo!(),
|
|
|
|
(Async(_), Sync(Ok(_r))) => todo!(),
|
|
|
|
(Async(_), Sync(Err(_e))) => { /* nothing */ }
|
|
|
|
(Async(_), Async(_)) => { /* nothing */ }
|
|
|
|
};
|
2022-11-12 02:29:27 +00:00
|
|
|
}
|
2022-12-05 22:16:54 +00:00
|
|
|
self.scope_stack.pop();
|
2022-11-02 01:42:29 +00:00
|
|
|
}
|
2022-11-03 09:11:04 +00:00
|
|
|
|
2022-12-07 01:44:29 +00:00
|
|
|
fn diff_ok_to_err(&mut self, _l: &'b VNode<'b>, _e: &anyhow::Error) {}
|
|
|
|
fn diff_err_to_ok(&mut self, _e: &anyhow::Error, _l: &'b VNode<'b>) {}
|
2022-11-24 14:11:27 +00:00
|
|
|
|
2022-11-30 15:31:44 +00:00
|
|
|
fn diff_node(&mut self, left_template: &'b VNode<'b>, right_template: &'b VNode<'b>) {
|
2022-12-13 02:31:30 +00:00
|
|
|
// If the templates are the same, we don't need to do anything, nor do we want to
|
|
|
|
if templates_are_the_same(left_template, right_template) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the templates are different by name, we need to replace the entire template
|
|
|
|
if templates_are_different(left_template, right_template) {
|
2022-11-23 03:59:56 +00:00
|
|
|
return self.light_diff_templates(left_template, right_template);
|
2022-11-04 00:34:42 +00:00
|
|
|
}
|
|
|
|
|
2022-12-13 02:31:30 +00:00
|
|
|
// If the templates are the same, we can diff the attributes and children
|
|
|
|
// Start with the attributes
|
|
|
|
left_template
|
2022-11-04 00:34:42 +00:00
|
|
|
.dynamic_attrs
|
|
|
|
.iter()
|
|
|
|
.zip(right_template.dynamic_attrs.iter())
|
2022-12-13 02:31:30 +00:00
|
|
|
.for_each(|(left_attr, right_attr)| {
|
|
|
|
// Move over the ID from the old to the new
|
|
|
|
right_attr
|
|
|
|
.mounted_element
|
|
|
|
.set(left_attr.mounted_element.get());
|
|
|
|
|
|
|
|
// We want to make sure anything listener that gets pulled is valid
|
|
|
|
if let AttributeValue::Listener(_) = right_attr.value {
|
|
|
|
self.update_template(left_attr.mounted_element.get(), right_template);
|
|
|
|
}
|
2022-11-30 22:21:10 +00:00
|
|
|
|
2022-12-13 02:31:30 +00:00
|
|
|
// If the attributes are different (or volatile), we need to update them
|
|
|
|
if left_attr.value != right_attr.value || left_attr.volatile {
|
|
|
|
self.update_attribute(right_attr, left_attr);
|
2022-11-20 01:07:29 +00:00
|
|
|
}
|
2022-12-13 02:31:30 +00:00
|
|
|
});
|
2022-11-04 00:34:42 +00:00
|
|
|
|
2022-12-13 02:31:30 +00:00
|
|
|
// Now diff the dynamic nodes
|
|
|
|
left_template
|
2022-11-04 00:34:42 +00:00
|
|
|
.dynamic_nodes
|
|
|
|
.iter()
|
|
|
|
.zip(right_template.dynamic_nodes.iter())
|
2022-11-29 21:31:04 +00:00
|
|
|
.enumerate()
|
2022-12-13 02:31:30 +00:00
|
|
|
.for_each(|(idx, (left_node, right_node))| {
|
|
|
|
self.diff_dynamic_node(left_node, right_node, right_template, idx);
|
|
|
|
});
|
2022-11-24 07:15:01 +00:00
|
|
|
|
2022-12-13 02:31:30 +00:00
|
|
|
// Make sure the roots get transferred over while we're here
|
|
|
|
left_template
|
2022-11-24 07:15:01 +00:00
|
|
|
.root_ids
|
|
|
|
.iter()
|
|
|
|
.zip(right_template.root_ids.iter())
|
2022-12-13 02:31:30 +00:00
|
|
|
.for_each(|(left, right)| right.set(left.get()));
|
|
|
|
}
|
|
|
|
|
|
|
|
fn diff_dynamic_node(
|
|
|
|
&mut self,
|
|
|
|
left_node: &'b DynamicNode<'b>,
|
|
|
|
right_node: &'b DynamicNode<'b>,
|
|
|
|
node: &'b VNode<'b>,
|
|
|
|
idx: usize,
|
|
|
|
) {
|
|
|
|
match (left_node, right_node) {
|
2022-12-13 22:44:47 +00:00
|
|
|
(Text(left), Text(right)) => self.diff_vtext(left, right, node),
|
2022-12-13 02:31:30 +00:00
|
|
|
(Fragment(left), Fragment(right)) => self.diff_non_empty_fragment(left, right),
|
|
|
|
(Placeholder(left), Placeholder(right)) => right.id.set(left.id.get()),
|
|
|
|
(Component(left), Component(right)) => self.diff_vcomponent(left, right, node, idx),
|
|
|
|
(Placeholder(left), Fragment(right)) => self.replace_placeholder(left, right),
|
|
|
|
(Fragment(left), Placeholder(right)) => self.node_to_placeholder(left, right),
|
|
|
|
_ => todo!("This is an usual custom case for dynamic nodes. We don't know how to handle it yet."),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
fn update_attribute(&mut self, right_attr: &Attribute, left_attr: &Attribute) {
|
|
|
|
// todo: add more types of attribute values
|
|
|
|
match right_attr.value {
|
|
|
|
AttributeValue::Text(text) => {
|
|
|
|
let name = unsafe { std::mem::transmute(left_attr.name) };
|
|
|
|
let value = unsafe { std::mem::transmute(text) };
|
|
|
|
self.mutations.push(Mutation::SetAttribute {
|
|
|
|
id: left_attr.mounted_element.get(),
|
|
|
|
ns: right_attr.namespace,
|
|
|
|
name,
|
|
|
|
value,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
// todo: more types of attribute values
|
|
|
|
_ => todo!("other attribute types"),
|
2022-11-24 07:15:01 +00:00
|
|
|
}
|
2022-11-20 01:07:29 +00:00
|
|
|
}
|
|
|
|
|
2022-12-13 02:31:30 +00:00
|
|
|
fn replace_placeholder(&mut self, l: &'b VPlaceholder, r: &'b [VNode<'b>]) {
|
2022-11-30 22:21:10 +00:00
|
|
|
let m = self.create_children(r);
|
2022-12-13 02:31:30 +00:00
|
|
|
let id = l.id.get().unwrap();
|
2022-11-30 22:21:10 +00:00
|
|
|
self.mutations.push(Mutation::ReplaceWith { id, m });
|
|
|
|
self.reclaim(id);
|
|
|
|
}
|
|
|
|
|
2022-12-13 02:31:30 +00:00
|
|
|
fn node_to_placeholder(&mut self, l: &'b [VNode<'b>], r: &'b VPlaceholder) {
|
2022-12-05 22:16:54 +00:00
|
|
|
// Remove the old nodes, except for one
|
|
|
|
self.remove_nodes(&l[1..]);
|
|
|
|
|
|
|
|
// Now create the new one
|
|
|
|
let first = self.replace_inner(&l[0]);
|
|
|
|
|
2022-11-30 22:21:10 +00:00
|
|
|
// Create the placeholder first, ensuring we get a dedicated ID for the placeholder
|
|
|
|
let placeholder = self.next_element(&l[0], &[]);
|
2022-12-13 02:31:30 +00:00
|
|
|
r.id.set(Some(placeholder));
|
2022-11-30 22:21:10 +00:00
|
|
|
self.mutations
|
|
|
|
.push(Mutation::CreatePlaceholder { id: placeholder });
|
|
|
|
|
|
|
|
self.mutations
|
|
|
|
.push(Mutation::ReplaceWith { id: first, m: 1 });
|
|
|
|
|
|
|
|
self.try_reclaim(first);
|
|
|
|
}
|
|
|
|
|
2022-11-29 21:31:04 +00:00
|
|
|
fn diff_vcomponent(
|
|
|
|
&mut self,
|
|
|
|
left: &'b VComponent<'b>,
|
|
|
|
right: &'b VComponent<'b>,
|
|
|
|
right_template: &'b VNode<'b>,
|
|
|
|
idx: usize,
|
|
|
|
) {
|
2022-12-13 22:44:47 +00:00
|
|
|
if std::ptr::eq(left, right) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-11-29 21:31:04 +00:00
|
|
|
// Replace components that have different render fns
|
|
|
|
if left.render_fn != right.render_fn {
|
|
|
|
let created = self.create_component_node(right_template, right, idx);
|
|
|
|
let head = unsafe {
|
|
|
|
self.scopes[left.scope.get().unwrap().0]
|
|
|
|
.root_node()
|
|
|
|
.extend_lifetime_ref()
|
|
|
|
};
|
|
|
|
let id = match head {
|
|
|
|
RenderReturn::Sync(Ok(node)) => self.replace_inner(node),
|
|
|
|
_ => todo!(),
|
|
|
|
};
|
|
|
|
self.mutations
|
|
|
|
.push(Mutation::ReplaceWith { id, m: created });
|
2022-11-30 22:21:10 +00:00
|
|
|
self.drop_scope(left.scope.get().unwrap());
|
2022-11-29 21:31:04 +00:00
|
|
|
return;
|
|
|
|
}
|
2022-11-20 01:07:29 +00:00
|
|
|
|
2022-11-20 23:58:05 +00:00
|
|
|
// Make sure the new vcomponent has the right scopeid associated to it
|
2022-12-13 22:44:47 +00:00
|
|
|
let Some(scope_id) = left.scope.get() else {
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
// let scope_id = left.scope.get().unwrap_or_else(|| {
|
|
|
|
// panic!(
|
|
|
|
// "A component should always have a scope associated to it. {:?}\n {:#?}",
|
|
|
|
// right.name,
|
|
|
|
// std::backtrace::Backtrace::force_capture()
|
|
|
|
// )
|
|
|
|
// });
|
2022-12-13 02:31:30 +00:00
|
|
|
|
2022-11-20 23:58:05 +00:00
|
|
|
right.scope.set(Some(scope_id));
|
|
|
|
|
|
|
|
// copy out the box for both
|
2022-11-29 21:31:04 +00:00
|
|
|
let old = self.scopes[scope_id.0].props.as_ref();
|
2022-12-13 02:31:30 +00:00
|
|
|
let new = right.props.take().unwrap();
|
2022-11-20 23:58:05 +00:00
|
|
|
|
|
|
|
// If the props are static, then we try to memoize by setting the new with the old
|
|
|
|
// The target scopestate still has the reference to the old props, so there's no need to update anything
|
|
|
|
// This also implicitly drops the new props since they're not used
|
2022-11-30 22:21:10 +00:00
|
|
|
if left.static_props && unsafe { old.as_ref().unwrap().memoize(new.as_ref()) } {
|
2022-11-29 21:31:04 +00:00
|
|
|
return;
|
2022-11-20 01:07:29 +00:00
|
|
|
}
|
2022-11-20 23:58:05 +00:00
|
|
|
|
|
|
|
// First, move over the props from the old to the new, dropping old props in the process
|
2022-11-30 22:21:10 +00:00
|
|
|
self.scopes[scope_id.0].props = unsafe { std::mem::transmute(new) };
|
2022-11-20 23:58:05 +00:00
|
|
|
|
|
|
|
// Now run the component and diff it
|
|
|
|
self.run_scope(scope_id);
|
2022-11-23 03:59:56 +00:00
|
|
|
self.diff_scope(scope_id);
|
2022-12-13 02:31:30 +00:00
|
|
|
|
|
|
|
self.dirty_scopes.remove(&DirtyScope {
|
|
|
|
height: self.scopes[scope_id.0].height,
|
|
|
|
id: scope_id,
|
|
|
|
});
|
2022-11-20 01:07:29 +00:00
|
|
|
}
|
2022-11-04 00:34:42 +00:00
|
|
|
|
2022-11-20 01:07:29 +00:00
|
|
|
/// Lightly diff the two templates, checking only their roots.
|
|
|
|
///
|
|
|
|
/// The goal here is to preserve any existing component state that might exist. This is to preserve some React-like
|
|
|
|
/// behavior where the component state is preserved when the component is re-rendered.
|
|
|
|
///
|
|
|
|
/// This is implemented by iterating each root, checking if the component is the same, if it is, then diff it.
|
|
|
|
///
|
|
|
|
/// We then pass the new template through "create" which should be smart enough to skip roots.
|
|
|
|
///
|
|
|
|
/// Currently, we only handle the case where the roots are the same component list. If there's any sort of deviation,
|
|
|
|
/// IE more nodes, less nodes, different nodes, or expressions, then we just replace the whole thing.
|
|
|
|
///
|
|
|
|
/// This is mostly implemented to help solve the issue where the same component is rendered under two different
|
|
|
|
/// conditions:
|
|
|
|
///
|
2022-11-30 15:31:44 +00:00
|
|
|
/// ```rust, ignore
|
2022-11-20 01:07:29 +00:00
|
|
|
/// if enabled {
|
|
|
|
/// rsx!{ Component { enabled_sign: "abc" } }
|
|
|
|
/// } else {
|
|
|
|
/// rsx!{ Component { enabled_sign: "xyz" } }
|
|
|
|
/// }
|
|
|
|
/// ```
|
|
|
|
///
|
|
|
|
/// However, we should not that it's explicit in the docs that this is not a guarantee. If you need to preserve state,
|
|
|
|
/// then you should be passing in separate props instead.
|
|
|
|
///
|
2022-11-30 15:31:44 +00:00
|
|
|
/// ```rust, ignore
|
2022-11-20 01:07:29 +00:00
|
|
|
/// let props = if enabled {
|
|
|
|
/// ComponentProps { enabled_sign: "abc" }
|
|
|
|
/// } else {
|
|
|
|
/// ComponentProps { enabled_sign: "xyz" }
|
|
|
|
/// };
|
|
|
|
///
|
|
|
|
/// rsx! {
|
|
|
|
/// Component { ..props }
|
|
|
|
/// }
|
|
|
|
/// ```
|
2022-11-23 03:59:56 +00:00
|
|
|
fn light_diff_templates(&mut self, left: &'b VNode<'b>, right: &'b VNode<'b>) {
|
2022-12-13 23:18:20 +00:00
|
|
|
match matching_components(left, right) {
|
|
|
|
None => self.replace(left, right),
|
|
|
|
Some(components) => components
|
|
|
|
.into_iter()
|
|
|
|
.enumerate()
|
|
|
|
.for_each(|(idx, (l, r))| self.diff_vcomponent(l, r, right, idx)),
|
|
|
|
}
|
2022-11-20 01:07:29 +00:00
|
|
|
}
|
|
|
|
|
2022-11-20 23:58:05 +00:00
|
|
|
/// Diff the two text nodes
|
|
|
|
///
|
|
|
|
/// This just moves the ID of the old node over to the new node, and then sets the text of the new node if it's
|
|
|
|
/// different.
|
2022-12-13 22:44:47 +00:00
|
|
|
fn diff_vtext(&mut self, left: &'b VText<'b>, right: &'b VText<'b>, node: &'b VNode<'b>) {
|
|
|
|
let id = left
|
|
|
|
.id
|
|
|
|
.get()
|
|
|
|
.unwrap_or_else(|| self.next_element(node, &[0]));
|
|
|
|
|
2022-12-13 02:31:30 +00:00
|
|
|
right.id.set(Some(id));
|
2022-11-20 01:07:29 +00:00
|
|
|
if left.value != right.value {
|
2022-11-29 21:31:04 +00:00
|
|
|
let value = unsafe { std::mem::transmute(right.value) };
|
|
|
|
self.mutations.push(Mutation::SetText { id, value });
|
2022-11-04 00:34:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-29 21:31:04 +00:00
|
|
|
/// Remove all the top-level nodes, returning the firstmost root ElementId
|
|
|
|
///
|
|
|
|
/// All IDs will be garbage collected
|
2022-11-24 14:11:27 +00:00
|
|
|
fn replace_inner(&mut self, node: &'b VNode<'b>) -> ElementId {
|
2022-11-25 07:12:29 +00:00
|
|
|
let id = match node.dynamic_root(0) {
|
2022-12-13 02:31:30 +00:00
|
|
|
None => node.root_ids[0].get().unwrap(),
|
|
|
|
Some(Text(t)) => t.id.get().unwrap(),
|
|
|
|
Some(Placeholder(e)) => e.id.get().unwrap(),
|
2022-11-30 16:24:13 +00:00
|
|
|
Some(Fragment(nodes)) => {
|
2022-11-25 07:12:29 +00:00
|
|
|
let id = self.replace_inner(&nodes[0]);
|
|
|
|
self.remove_nodes(&nodes[1..]);
|
|
|
|
id
|
|
|
|
}
|
|
|
|
Some(Component(comp)) => {
|
|
|
|
let scope = comp.scope.get().unwrap();
|
|
|
|
match unsafe { self.scopes[scope.0].root_node().extend_lifetime_ref() } {
|
|
|
|
RenderReturn::Sync(Ok(t)) => self.replace_inner(t),
|
|
|
|
_ => todo!("cannot handle nonstandard nodes"),
|
2022-11-24 14:11:27 +00:00
|
|
|
}
|
2022-11-24 11:47:14 +00:00
|
|
|
}
|
2022-11-24 14:11:27 +00:00
|
|
|
};
|
2022-11-24 11:47:14 +00:00
|
|
|
|
2022-11-24 14:11:27 +00:00
|
|
|
// Just remove the rest from the dom
|
2022-11-24 11:47:14 +00:00
|
|
|
for (idx, _) in node.template.roots.iter().enumerate().skip(1) {
|
|
|
|
self.remove_root_node(node, idx);
|
|
|
|
}
|
2022-11-24 14:11:27 +00:00
|
|
|
|
|
|
|
// Garabge collect all of the nodes since this gets used in replace
|
|
|
|
self.clean_up_node(node);
|
|
|
|
|
|
|
|
id
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Clean up the node, not generating mutations
|
|
|
|
///
|
|
|
|
/// Simply walks through the dynamic nodes
|
|
|
|
fn clean_up_node(&mut self, node: &'b VNode<'b>) {
|
2022-11-29 21:31:04 +00:00
|
|
|
for (idx, dyn_node) in node.dynamic_nodes.iter().enumerate() {
|
|
|
|
// Roots are cleaned up automatically?
|
|
|
|
if node.template.node_paths[idx].len() == 1 {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
match dyn_node {
|
2022-11-24 14:11:27 +00:00
|
|
|
Component(comp) => {
|
2022-12-13 22:44:47 +00:00
|
|
|
if let Some(scope) = comp.scope.take() {
|
|
|
|
match unsafe { self.scopes[scope.0].root_node().extend_lifetime_ref() } {
|
|
|
|
RenderReturn::Sync(Ok(t)) => self.clean_up_node(t),
|
|
|
|
_ => todo!("cannot handle nonstandard nodes"),
|
|
|
|
};
|
|
|
|
}
|
2022-11-24 14:11:27 +00:00
|
|
|
}
|
2022-12-13 02:31:30 +00:00
|
|
|
Text(t) => {
|
|
|
|
if let Some(id) = t.id.take() {
|
|
|
|
self.reclaim(id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Placeholder(t) => self.reclaim(t.id.take().unwrap()),
|
2022-11-30 16:24:13 +00:00
|
|
|
Fragment(nodes) => nodes.iter().for_each(|node| self.clean_up_node(node)),
|
2022-11-24 14:11:27 +00:00
|
|
|
};
|
|
|
|
}
|
2022-11-25 07:12:29 +00:00
|
|
|
|
2022-11-27 05:22:39 +00:00
|
|
|
// we clean up nodes with dynamic attributes, provided the node is unique and not a root node
|
|
|
|
let mut id = None;
|
2022-11-29 21:46:25 +00:00
|
|
|
for (idx, attr) in node.dynamic_attrs.iter().enumerate() {
|
2022-11-27 05:22:39 +00:00
|
|
|
// We'll clean up the root nodes either way, so don't worry
|
|
|
|
if node.template.attr_paths[idx].len() == 1 {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
let next_id = attr.mounted_element.get();
|
|
|
|
|
|
|
|
if id == Some(next_id) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
id = Some(next_id);
|
|
|
|
|
|
|
|
self.reclaim(next_id);
|
|
|
|
}
|
2022-11-24 11:47:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn remove_root_node(&mut self, node: &'b VNode<'b>, idx: usize) {
|
2022-11-25 07:12:29 +00:00
|
|
|
match node.dynamic_root(idx) {
|
|
|
|
Some(Text(i)) => {
|
2022-12-13 02:31:30 +00:00
|
|
|
let id = i.id.take().unwrap();
|
2022-11-24 14:11:27 +00:00
|
|
|
self.mutations.push(Mutation::Remove { id });
|
|
|
|
self.reclaim(id);
|
2022-11-24 11:47:14 +00:00
|
|
|
}
|
2022-11-30 16:24:13 +00:00
|
|
|
Some(Placeholder(e)) => {
|
2022-12-13 02:31:30 +00:00
|
|
|
let id = e.id.take().unwrap();
|
2022-11-25 07:12:29 +00:00
|
|
|
self.mutations.push(Mutation::Remove { id });
|
|
|
|
self.reclaim(id);
|
|
|
|
}
|
2022-11-30 16:24:13 +00:00
|
|
|
Some(Fragment(nodes)) => self.remove_nodes(nodes),
|
2022-11-25 07:12:29 +00:00
|
|
|
Some(Component(comp)) => {
|
2022-12-13 02:31:30 +00:00
|
|
|
let scope = comp.scope.take().unwrap();
|
2022-11-25 07:12:29 +00:00
|
|
|
match unsafe { self.scopes[scope.0].root_node().extend_lifetime_ref() } {
|
|
|
|
RenderReturn::Sync(Ok(t)) => self.remove_node(t),
|
|
|
|
_ => todo!("cannot handle nonstandard nodes"),
|
2022-11-24 11:47:14 +00:00
|
|
|
};
|
|
|
|
}
|
2022-11-25 07:12:29 +00:00
|
|
|
None => {
|
2022-12-13 02:31:30 +00:00
|
|
|
let id = node.root_ids[idx].get().unwrap();
|
2022-11-25 07:12:29 +00:00
|
|
|
self.mutations.push(Mutation::Remove { id });
|
|
|
|
self.reclaim(id);
|
|
|
|
}
|
|
|
|
};
|
2022-11-23 02:38:27 +00:00
|
|
|
}
|
|
|
|
|
2022-11-30 22:21:10 +00:00
|
|
|
fn diff_non_empty_fragment(&mut self, old: &'b [VNode<'b>], new: &'b [VNode<'b>]) {
|
2022-11-23 02:38:27 +00:00
|
|
|
let new_is_keyed = new[0].key.is_some();
|
|
|
|
let old_is_keyed = old[0].key.is_some();
|
|
|
|
debug_assert!(
|
|
|
|
new.iter().all(|n| n.key.is_some() == new_is_keyed),
|
|
|
|
"all siblings must be keyed or all siblings must be non-keyed"
|
|
|
|
);
|
|
|
|
debug_assert!(
|
|
|
|
old.iter().all(|o| o.key.is_some() == old_is_keyed),
|
|
|
|
"all siblings must be keyed or all siblings must be non-keyed"
|
|
|
|
);
|
2022-11-30 22:21:10 +00:00
|
|
|
|
2022-11-23 02:38:27 +00:00
|
|
|
if new_is_keyed && old_is_keyed {
|
2022-11-23 03:59:56 +00:00
|
|
|
self.diff_keyed_children(old, new);
|
2022-11-23 02:38:27 +00:00
|
|
|
} else {
|
2022-11-23 03:59:56 +00:00
|
|
|
self.diff_non_keyed_children(old, new);
|
2022-11-23 02:38:27 +00:00
|
|
|
}
|
2022-11-20 01:07:29 +00:00
|
|
|
}
|
|
|
|
|
2022-11-04 00:34:42 +00:00
|
|
|
// Diff children that are not keyed.
|
|
|
|
//
|
|
|
|
// The parent must be on the top of the change list stack when entering this
|
|
|
|
// function:
|
|
|
|
//
|
|
|
|
// [... parent]
|
|
|
|
//
|
|
|
|
// the change list stack is in the same state when this function returns.
|
2022-11-23 03:59:56 +00:00
|
|
|
fn diff_non_keyed_children(&mut self, old: &'b [VNode<'b>], new: &'b [VNode<'b>]) {
|
2022-11-04 00:34:42 +00:00
|
|
|
use std::cmp::Ordering;
|
|
|
|
|
|
|
|
// Handled these cases in `diff_children` before calling this function.
|
|
|
|
debug_assert!(!new.is_empty());
|
|
|
|
debug_assert!(!old.is_empty());
|
|
|
|
|
2022-12-06 20:24:35 +00:00
|
|
|
match old.len().cmp(&new.len()) {
|
2022-11-23 03:59:56 +00:00
|
|
|
Ordering::Greater => self.remove_nodes(&old[new.len()..]),
|
|
|
|
Ordering::Less => self.create_and_insert_after(&new[old.len()..], old.last().unwrap()),
|
2022-11-04 00:34:42 +00:00
|
|
|
Ordering::Equal => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (new, old) in new.iter().zip(old.iter()) {
|
2022-11-23 03:59:56 +00:00
|
|
|
self.diff_node(old, new);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Diffing "keyed" children.
|
|
|
|
//
|
|
|
|
// With keyed children, we care about whether we delete, move, or create nodes
|
|
|
|
// versus mutate existing nodes in place. Presumably there is some sort of CSS
|
|
|
|
// transition animation that makes the virtual DOM diffing algorithm
|
|
|
|
// observable. By specifying keys for nodes, we know which virtual DOM nodes
|
|
|
|
// must reuse (or not reuse) the same physical DOM nodes.
|
|
|
|
//
|
|
|
|
// This is loosely based on Inferno's keyed patching implementation. However, we
|
|
|
|
// have to modify the algorithm since we are compiling the diff down into change
|
|
|
|
// list instructions that will be executed later, rather than applying the
|
|
|
|
// changes to the DOM directly as we compare virtual DOMs.
|
|
|
|
//
|
|
|
|
// https://github.com/infernojs/inferno/blob/36fd96/packages/inferno/src/DOM/patching.ts#L530-L739
|
|
|
|
//
|
|
|
|
// The stack is empty upon entry.
|
|
|
|
fn diff_keyed_children(&mut self, old: &'b [VNode<'b>], new: &'b [VNode<'b>]) {
|
|
|
|
if cfg!(debug_assertions) {
|
2022-12-10 02:56:48 +00:00
|
|
|
let mut keys = rustc_hash::FxHashSet::default();
|
2022-11-23 03:59:56 +00:00
|
|
|
let mut assert_unique_keys = |children: &'b [VNode<'b>]| {
|
|
|
|
keys.clear();
|
|
|
|
for child in children {
|
|
|
|
let key = child.key;
|
|
|
|
debug_assert!(
|
|
|
|
key.is_some(),
|
|
|
|
"if any sibling is keyed, all siblings must be keyed"
|
|
|
|
);
|
|
|
|
keys.insert(key);
|
|
|
|
}
|
|
|
|
debug_assert_eq!(
|
|
|
|
children.len(),
|
|
|
|
keys.len(),
|
|
|
|
"keyed siblings must each have a unique key"
|
|
|
|
);
|
|
|
|
};
|
|
|
|
assert_unique_keys(old);
|
|
|
|
assert_unique_keys(new);
|
|
|
|
}
|
|
|
|
|
|
|
|
// First up, we diff all the nodes with the same key at the beginning of the
|
|
|
|
// children.
|
|
|
|
//
|
|
|
|
// `shared_prefix_count` is the count of how many nodes at the start of
|
|
|
|
// `new` and `old` share the same keys.
|
|
|
|
let (left_offset, right_offset) = match self.diff_keyed_ends(old, new) {
|
|
|
|
Some(count) => count,
|
|
|
|
None => return,
|
|
|
|
};
|
|
|
|
|
|
|
|
// Ok, we now hopefully have a smaller range of children in the middle
|
|
|
|
// within which to re-order nodes with the same keys, remove old nodes with
|
|
|
|
// now-unused keys, and create new nodes with fresh keys.
|
|
|
|
|
|
|
|
let old_middle = &old[left_offset..(old.len() - right_offset)];
|
|
|
|
let new_middle = &new[left_offset..(new.len() - right_offset)];
|
|
|
|
|
|
|
|
debug_assert!(
|
|
|
|
!((old_middle.len() == new_middle.len()) && old_middle.is_empty()),
|
|
|
|
"keyed children must have the same number of children"
|
|
|
|
);
|
|
|
|
|
|
|
|
if new_middle.is_empty() {
|
|
|
|
// remove the old elements
|
|
|
|
self.remove_nodes(old_middle);
|
|
|
|
} else if old_middle.is_empty() {
|
|
|
|
// there were no old elements, so just create the new elements
|
|
|
|
// we need to find the right "foothold" though - we shouldn't use the "append" at all
|
|
|
|
if left_offset == 0 {
|
|
|
|
// insert at the beginning of the old list
|
|
|
|
let foothold = &old[old.len() - right_offset];
|
|
|
|
self.create_and_insert_before(new_middle, foothold);
|
|
|
|
} else if right_offset == 0 {
|
|
|
|
// insert at the end the old list
|
|
|
|
let foothold = old.last().unwrap();
|
|
|
|
self.create_and_insert_after(new_middle, foothold);
|
|
|
|
} else {
|
|
|
|
// inserting in the middle
|
|
|
|
let foothold = &old[left_offset - 1];
|
|
|
|
self.create_and_insert_after(new_middle, foothold);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
self.diff_keyed_middle(old_middle, new_middle);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Diff both ends of the children that share keys.
|
|
|
|
///
|
|
|
|
/// Returns a left offset and right offset of that indicates a smaller section to pass onto the middle diffing.
|
|
|
|
///
|
|
|
|
/// If there is no offset, then this function returns None and the diffing is complete.
|
|
|
|
fn diff_keyed_ends(
|
|
|
|
&mut self,
|
|
|
|
old: &'b [VNode<'b>],
|
|
|
|
new: &'b [VNode<'b>],
|
|
|
|
) -> Option<(usize, usize)> {
|
|
|
|
let mut left_offset = 0;
|
|
|
|
|
|
|
|
for (old, new) in old.iter().zip(new.iter()) {
|
|
|
|
// abort early if we finally run into nodes with different keys
|
|
|
|
if old.key != new.key {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
self.diff_node(old, new);
|
|
|
|
left_offset += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If that was all of the old children, then create and append the remaining
|
|
|
|
// new children and we're finished.
|
|
|
|
if left_offset == old.len() {
|
|
|
|
self.create_and_insert_after(&new[left_offset..], old.last().unwrap());
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
// And if that was all of the new children, then remove all of the remaining
|
|
|
|
// old children and we're finished.
|
|
|
|
if left_offset == new.len() {
|
|
|
|
self.remove_nodes(&old[left_offset..]);
|
|
|
|
return None;
|
2022-11-04 00:34:42 +00:00
|
|
|
}
|
2022-11-23 03:59:56 +00:00
|
|
|
|
|
|
|
// if the shared prefix is less than either length, then we need to walk backwards
|
|
|
|
let mut right_offset = 0;
|
|
|
|
for (old, new) in old.iter().rev().zip(new.iter().rev()) {
|
|
|
|
// abort early if we finally run into nodes with different keys
|
|
|
|
if old.key != new.key {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
self.diff_node(old, new);
|
|
|
|
right_offset += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
Some((left_offset, right_offset))
|
2022-11-04 00:34:42 +00:00
|
|
|
}
|
|
|
|
|
2022-11-23 03:59:56 +00:00
|
|
|
// The most-general, expensive code path for keyed children diffing.
|
|
|
|
//
|
|
|
|
// We find the longest subsequence within `old` of children that are relatively
|
|
|
|
// ordered the same way in `new` (via finding a longest-increasing-subsequence
|
|
|
|
// of the old child's index within `new`). The children that are elements of
|
|
|
|
// this subsequence will remain in place, minimizing the number of DOM moves we
|
|
|
|
// will have to do.
|
|
|
|
//
|
|
|
|
// Upon entry to this function, the change list stack must be empty.
|
|
|
|
//
|
|
|
|
// This function will load the appropriate nodes onto the stack and do diffing in place.
|
|
|
|
//
|
|
|
|
// Upon exit from this function, it will be restored to that same self.
|
|
|
|
#[allow(clippy::too_many_lines)]
|
|
|
|
fn diff_keyed_middle(&mut self, old: &'b [VNode<'b>], new: &'b [VNode<'b>]) {
|
|
|
|
/*
|
|
|
|
1. Map the old keys into a numerical ordering based on indices.
|
|
|
|
2. Create a map of old key to its index
|
|
|
|
3. Map each new key to the old key, carrying over the old index.
|
|
|
|
- IE if we have ABCD becomes BACD, our sequence would be 1,0,2,3
|
|
|
|
- if we have ABCD to ABDE, our sequence would be 0,1,3,MAX because E doesn't exist
|
|
|
|
|
|
|
|
now, we should have a list of integers that indicates where in the old list the new items map to.
|
|
|
|
|
|
|
|
4. Compute the LIS of this list
|
|
|
|
- this indicates the longest list of new children that won't need to be moved.
|
|
|
|
|
|
|
|
5. Identify which nodes need to be removed
|
|
|
|
6. Identify which nodes will need to be diffed
|
|
|
|
|
|
|
|
7. Going along each item in the new list, create it and insert it before the next closest item in the LIS.
|
|
|
|
- if the item already existed, just move it to the right place.
|
|
|
|
|
|
|
|
8. Finally, generate instructions to remove any old children.
|
|
|
|
9. Generate instructions to finally diff children that are the same between both
|
|
|
|
*/
|
|
|
|
// 0. Debug sanity checks
|
|
|
|
// Should have already diffed the shared-key prefixes and suffixes.
|
|
|
|
debug_assert_ne!(new.first().map(|i| i.key), old.first().map(|i| i.key));
|
|
|
|
debug_assert_ne!(new.last().map(|i| i.key), old.last().map(|i| i.key));
|
|
|
|
|
|
|
|
// 1. Map the old keys into a numerical ordering based on indices.
|
|
|
|
// 2. Create a map of old key to its index
|
|
|
|
// IE if the keys were A B C, then we would have (A, 1) (B, 2) (C, 3).
|
|
|
|
let old_key_to_old_index = old
|
|
|
|
.iter()
|
|
|
|
.enumerate()
|
|
|
|
.map(|(i, o)| (o.key.unwrap(), i))
|
|
|
|
.collect::<FxHashMap<_, _>>();
|
|
|
|
|
|
|
|
let mut shared_keys = FxHashSet::default();
|
|
|
|
|
|
|
|
// 3. Map each new key to the old key, carrying over the old index.
|
|
|
|
let new_index_to_old_index = new
|
|
|
|
.iter()
|
|
|
|
.map(|node| {
|
|
|
|
let key = node.key.unwrap();
|
|
|
|
if let Some(&index) = old_key_to_old_index.get(&key) {
|
|
|
|
shared_keys.insert(key);
|
|
|
|
index
|
|
|
|
} else {
|
|
|
|
u32::MAX as usize
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
|
|
|
// If none of the old keys are reused by the new children, then we remove all the remaining old children and
|
|
|
|
// create the new children afresh.
|
|
|
|
if shared_keys.is_empty() {
|
2022-11-29 21:46:25 +00:00
|
|
|
if old.get(0).is_some() {
|
2022-11-23 03:59:56 +00:00
|
|
|
self.remove_nodes(&old[1..]);
|
2022-11-29 21:31:04 +00:00
|
|
|
self.replace_many(&old[0], new);
|
2022-11-23 03:59:56 +00:00
|
|
|
} else {
|
|
|
|
// I think this is wrong - why are we appending?
|
|
|
|
// only valid of the if there are no trailing elements
|
|
|
|
// self.create_and_append_children(new);
|
|
|
|
|
2022-11-27 07:06:04 +00:00
|
|
|
todo!("we should never be appending - just creating N");
|
2022-11-23 03:59:56 +00:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// remove any old children that are not shared
|
|
|
|
// todo: make this an iterator
|
|
|
|
for child in old {
|
|
|
|
let key = child.key.unwrap();
|
|
|
|
if !shared_keys.contains(&key) {
|
2022-11-27 07:06:04 +00:00
|
|
|
self.remove_node(child);
|
2022-11-23 03:59:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// 4. Compute the LIS of this list
|
|
|
|
let mut lis_sequence = Vec::default();
|
|
|
|
lis_sequence.reserve(new_index_to_old_index.len());
|
|
|
|
|
|
|
|
let mut predecessors = vec![0; new_index_to_old_index.len()];
|
|
|
|
let mut starts = vec![0; new_index_to_old_index.len()];
|
|
|
|
|
|
|
|
longest_increasing_subsequence::lis_with(
|
|
|
|
&new_index_to_old_index,
|
|
|
|
&mut lis_sequence,
|
|
|
|
|a, b| a < b,
|
|
|
|
&mut predecessors,
|
|
|
|
&mut starts,
|
|
|
|
);
|
|
|
|
|
|
|
|
// the lis comes out backwards, I think. can't quite tell.
|
|
|
|
lis_sequence.sort_unstable();
|
|
|
|
|
|
|
|
// if a new node gets u32 max and is at the end, then it might be part of our LIS (because u32 max is a valid LIS)
|
|
|
|
if lis_sequence.last().map(|f| new_index_to_old_index[*f]) == Some(u32::MAX as usize) {
|
|
|
|
lis_sequence.pop();
|
|
|
|
}
|
|
|
|
|
|
|
|
for idx in &lis_sequence {
|
|
|
|
self.diff_node(&old[new_index_to_old_index[*idx]], &new[*idx]);
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut nodes_created = 0;
|
|
|
|
|
|
|
|
// add mount instruction for the first items not covered by the lis
|
|
|
|
let last = *lis_sequence.last().unwrap();
|
|
|
|
if last < (new.len() - 1) {
|
|
|
|
for (idx, new_node) in new[(last + 1)..].iter().enumerate() {
|
|
|
|
let new_idx = idx + last + 1;
|
|
|
|
let old_index = new_index_to_old_index[new_idx];
|
|
|
|
if old_index == u32::MAX as usize {
|
|
|
|
nodes_created += self.create(new_node);
|
|
|
|
} else {
|
|
|
|
self.diff_node(&old[old_index], new_node);
|
|
|
|
nodes_created += self.push_all_real_nodes(new_node);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let id = self.find_last_element(&new[last]);
|
|
|
|
self.mutations.push(Mutation::InsertAfter {
|
|
|
|
id,
|
|
|
|
m: nodes_created,
|
|
|
|
});
|
|
|
|
nodes_created = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
// for each spacing, generate a mount instruction
|
|
|
|
let mut lis_iter = lis_sequence.iter().rev();
|
|
|
|
let mut last = *lis_iter.next().unwrap();
|
|
|
|
for next in lis_iter {
|
|
|
|
if last - next > 1 {
|
|
|
|
for (idx, new_node) in new[(next + 1)..last].iter().enumerate() {
|
|
|
|
let new_idx = idx + next + 1;
|
|
|
|
let old_index = new_index_to_old_index[new_idx];
|
|
|
|
if old_index == u32::MAX as usize {
|
|
|
|
nodes_created += self.create(new_node);
|
|
|
|
} else {
|
|
|
|
self.diff_node(&old[old_index], new_node);
|
|
|
|
nodes_created += self.push_all_real_nodes(new_node);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let id = self.find_first_element(&new[last]);
|
|
|
|
self.mutations.push(Mutation::InsertBefore {
|
|
|
|
id,
|
|
|
|
m: nodes_created,
|
|
|
|
});
|
|
|
|
|
|
|
|
nodes_created = 0;
|
|
|
|
}
|
|
|
|
last = *next;
|
|
|
|
}
|
|
|
|
|
|
|
|
// add mount instruction for the last items not covered by the lis
|
|
|
|
let first_lis = *lis_sequence.first().unwrap();
|
|
|
|
if first_lis > 0 {
|
|
|
|
for (idx, new_node) in new[..first_lis].iter().enumerate() {
|
|
|
|
let old_index = new_index_to_old_index[idx];
|
|
|
|
if old_index == u32::MAX as usize {
|
|
|
|
nodes_created += self.create(new_node);
|
|
|
|
} else {
|
|
|
|
self.diff_node(&old[old_index], new_node);
|
|
|
|
nodes_created += self.push_all_real_nodes(new_node);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let id = self.find_first_element(&new[first_lis]);
|
|
|
|
self.mutations.push(Mutation::InsertBefore {
|
|
|
|
id,
|
|
|
|
m: nodes_created,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-04 00:34:42 +00:00
|
|
|
/// Remove these nodes from the dom
|
|
|
|
/// Wont generate mutations for the inner nodes
|
2022-11-23 03:59:56 +00:00
|
|
|
fn remove_nodes(&mut self, nodes: &'b [VNode<'b>]) {
|
2022-12-05 22:16:54 +00:00
|
|
|
// note that we iterate in reverse to unlink lists of nodes in their rough index order
|
|
|
|
nodes.iter().rev().for_each(|node| self.remove_node(node));
|
2022-11-24 07:15:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn remove_node(&mut self, node: &'b VNode<'b>) {
|
2022-11-25 07:12:29 +00:00
|
|
|
for (idx, _) in node.template.roots.iter().enumerate() {
|
|
|
|
let id = match node.dynamic_root(idx) {
|
2022-12-13 02:31:30 +00:00
|
|
|
Some(Text(t)) => t.id.take(),
|
|
|
|
Some(Placeholder(t)) => t.id.take(),
|
2022-11-30 16:24:13 +00:00
|
|
|
Some(Fragment(t)) => return self.remove_nodes(t),
|
2022-12-13 02:31:30 +00:00
|
|
|
Some(Component(comp)) => {
|
|
|
|
comp.scope.set(None);
|
|
|
|
return self.remove_component(comp.scope.get().unwrap());
|
|
|
|
}
|
2022-11-25 07:12:29 +00:00
|
|
|
None => node.root_ids[idx].get(),
|
2022-12-13 02:31:30 +00:00
|
|
|
}
|
|
|
|
.unwrap();
|
2022-11-24 07:15:01 +00:00
|
|
|
|
|
|
|
self.mutations.push(Mutation::Remove { id })
|
|
|
|
}
|
2022-11-25 07:12:29 +00:00
|
|
|
|
|
|
|
self.clean_up_node(node);
|
|
|
|
|
|
|
|
for root in node.root_ids {
|
2022-12-13 02:31:30 +00:00
|
|
|
let id = root.get().unwrap();
|
2022-11-25 07:12:29 +00:00
|
|
|
if id.0 != 0 {
|
|
|
|
self.reclaim(id);
|
|
|
|
}
|
|
|
|
}
|
2022-11-24 07:15:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn remove_component(&mut self, scope_id: ScopeId) {
|
|
|
|
let height = self.scopes[scope_id.0].height;
|
|
|
|
self.dirty_scopes.remove(&DirtyScope {
|
|
|
|
height,
|
|
|
|
id: scope_id,
|
|
|
|
});
|
|
|
|
|
|
|
|
// I promise, since we're descending down the tree, this is safe
|
|
|
|
match unsafe { self.scopes[scope_id.0].root_node().extend_lifetime_ref() } {
|
|
|
|
RenderReturn::Sync(Ok(t)) => self.remove_node(t),
|
|
|
|
_ => todo!("cannot handle nonstandard nodes"),
|
|
|
|
}
|
2022-11-03 09:11:04 +00:00
|
|
|
}
|
2022-11-23 02:38:27 +00:00
|
|
|
|
|
|
|
/// Push all the real nodes on the stack
|
2022-11-29 21:31:04 +00:00
|
|
|
fn push_all_real_nodes(&mut self, node: &'b VNode<'b>) -> usize {
|
2022-12-13 02:31:30 +00:00
|
|
|
node.template
|
|
|
|
.roots
|
|
|
|
.iter()
|
|
|
|
.enumerate()
|
|
|
|
.map(|(idx, _)| {
|
|
|
|
match node.dynamic_root(idx) {
|
|
|
|
Some(Text(t)) => {
|
|
|
|
self.mutations.push(Mutation::PushRoot {
|
|
|
|
id: t.id.get().unwrap(),
|
|
|
|
});
|
|
|
|
1
|
2022-11-27 07:06:04 +00:00
|
|
|
}
|
2022-12-13 02:31:30 +00:00
|
|
|
Some(Placeholder(t)) => {
|
|
|
|
self.mutations.push(Mutation::PushRoot {
|
|
|
|
id: t.id.get().unwrap(),
|
|
|
|
});
|
|
|
|
1
|
|
|
|
}
|
|
|
|
Some(Fragment(nodes)) => nodes
|
|
|
|
.iter()
|
|
|
|
.map(|node| self.push_all_real_nodes(node))
|
|
|
|
.count(),
|
|
|
|
|
|
|
|
Some(Component(comp)) => {
|
|
|
|
let scope = comp.scope.get().unwrap();
|
2022-11-29 21:31:04 +00:00
|
|
|
match unsafe { self.scopes[scope.0].root_node().extend_lifetime_ref() } {
|
|
|
|
RenderReturn::Sync(Ok(node)) => self.push_all_real_nodes(node),
|
|
|
|
_ => todo!(),
|
|
|
|
}
|
2022-12-13 02:31:30 +00:00
|
|
|
}
|
|
|
|
None => {
|
|
|
|
self.mutations.push(Mutation::PushRoot {
|
|
|
|
id: node.root_ids[idx].get().unwrap(),
|
|
|
|
});
|
|
|
|
1
|
|
|
|
}
|
|
|
|
};
|
|
|
|
})
|
|
|
|
.count()
|
2022-11-23 02:38:27 +00:00
|
|
|
}
|
|
|
|
|
2022-11-23 03:59:56 +00:00
|
|
|
fn create_children(&mut self, nodes: &'b [VNode<'b>]) -> usize {
|
2022-11-24 07:15:01 +00:00
|
|
|
nodes.iter().fold(0, |acc, child| acc + self.create(child))
|
2022-11-23 02:38:27 +00:00
|
|
|
}
|
2022-11-23 03:59:56 +00:00
|
|
|
|
2022-11-24 07:15:01 +00:00
|
|
|
fn create_and_insert_before(&mut self, new: &'b [VNode<'b>], before: &'b VNode<'b>) {
|
|
|
|
let m = self.create_children(new);
|
|
|
|
let id = self.find_first_element(before);
|
|
|
|
self.mutations.push(Mutation::InsertBefore { id, m })
|
2022-11-23 03:59:56 +00:00
|
|
|
}
|
2022-11-24 07:15:01 +00:00
|
|
|
|
|
|
|
fn create_and_insert_after(&mut self, new: &'b [VNode<'b>], after: &'b VNode<'b>) {
|
|
|
|
let m = self.create_children(new);
|
2022-11-23 03:59:56 +00:00
|
|
|
let id = self.find_last_element(after);
|
2022-11-24 07:15:01 +00:00
|
|
|
self.mutations.push(Mutation::InsertAfter { id, m })
|
2022-11-23 02:38:27 +00:00
|
|
|
}
|
|
|
|
|
2022-11-29 21:31:04 +00:00
|
|
|
fn find_first_element(&self, node: &'b VNode<'b>) -> ElementId {
|
2022-11-25 07:12:29 +00:00
|
|
|
match node.dynamic_root(0) {
|
2022-12-13 02:31:30 +00:00
|
|
|
None => node.root_ids[0].get().unwrap(),
|
|
|
|
Some(Text(t)) => t.id.get().unwrap(),
|
2022-11-30 16:24:13 +00:00
|
|
|
Some(Fragment(t)) => self.find_first_element(&t[0]),
|
2022-12-13 02:31:30 +00:00
|
|
|
Some(Placeholder(t)) => t.id.get().unwrap(),
|
2022-11-25 07:12:29 +00:00
|
|
|
Some(Component(comp)) => {
|
|
|
|
let scope = comp.scope.get().unwrap();
|
2022-11-29 21:31:04 +00:00
|
|
|
match unsafe { self.scopes[scope.0].root_node().extend_lifetime_ref() } {
|
2022-11-25 07:12:29 +00:00
|
|
|
RenderReturn::Sync(Ok(t)) => self.find_first_element(t),
|
|
|
|
_ => todo!("cannot handle nonstandard nodes"),
|
2022-11-24 07:15:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-11-23 03:59:56 +00:00
|
|
|
}
|
|
|
|
|
2022-11-29 21:31:04 +00:00
|
|
|
fn find_last_element(&self, node: &'b VNode<'b>) -> ElementId {
|
2022-11-25 07:12:29 +00:00
|
|
|
match node.dynamic_root(node.template.roots.len() - 1) {
|
2022-12-13 02:31:30 +00:00
|
|
|
None => node.root_ids.last().unwrap().get().unwrap(),
|
|
|
|
Some(Text(t)) => t.id.get().unwrap(),
|
2022-11-30 16:24:13 +00:00
|
|
|
Some(Fragment(t)) => self.find_last_element(t.last().unwrap()),
|
2022-12-13 02:31:30 +00:00
|
|
|
Some(Placeholder(t)) => t.id.get().unwrap(),
|
2022-11-25 07:12:29 +00:00
|
|
|
Some(Component(comp)) => {
|
|
|
|
let scope = comp.scope.get().unwrap();
|
2022-11-29 21:31:04 +00:00
|
|
|
match unsafe { self.scopes[scope.0].root_node().extend_lifetime_ref() } {
|
2022-11-25 07:12:29 +00:00
|
|
|
RenderReturn::Sync(Ok(t)) => self.find_last_element(t),
|
|
|
|
_ => todo!("cannot handle nonstandard nodes"),
|
2022-11-24 07:15:01 +00:00
|
|
|
}
|
|
|
|
}
|
2022-11-23 02:38:27 +00:00
|
|
|
}
|
|
|
|
}
|
2022-11-23 03:59:56 +00:00
|
|
|
|
2022-11-24 14:11:27 +00:00
|
|
|
fn replace(&mut self, left: &'b VNode<'b>, right: &'b VNode<'b>) {
|
|
|
|
let first = self.find_first_element(left);
|
|
|
|
let id = self.replace_inner(left);
|
|
|
|
let created = self.create(right);
|
|
|
|
self.mutations.push(Mutation::ReplaceWith {
|
|
|
|
id: first,
|
|
|
|
m: created,
|
|
|
|
});
|
2022-11-29 21:31:04 +00:00
|
|
|
self.try_reclaim(id);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn replace_many(&mut self, left: &'b VNode<'b>, right: &'b [VNode<'b>]) {
|
|
|
|
let first = self.find_first_element(left);
|
|
|
|
let id = self.replace_inner(left);
|
|
|
|
let created = self.create_children(right);
|
|
|
|
self.mutations.push(Mutation::ReplaceWith {
|
|
|
|
id: first,
|
|
|
|
m: created,
|
|
|
|
});
|
|
|
|
self.try_reclaim(id);
|
2022-11-23 03:59:56 +00:00
|
|
|
}
|
2022-11-02 01:42:29 +00:00
|
|
|
}
|
2022-11-20 23:58:05 +00:00
|
|
|
|
2022-12-13 02:31:30 +00:00
|
|
|
/// Are the templates the same?
|
|
|
|
///
|
|
|
|
/// We need to check for the obvious case, and the non-obvious case where the template as cloned
|
|
|
|
///
|
|
|
|
/// We use the pointer of the dynamic_node list in this case
|
|
|
|
fn templates_are_the_same<'b>(left_template: &'b VNode<'b>, right_template: &'b VNode<'b>) -> bool {
|
|
|
|
std::ptr::eq(left_template, right_template)
|
|
|
|
|| std::ptr::eq(left_template.dynamic_nodes, right_template.dynamic_nodes)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn templates_are_different(left_template: &VNode, right_template: &VNode) -> bool {
|
|
|
|
!std::ptr::eq(left_template.template.name, right_template.template.name)
|
|
|
|
&& left_template.template.name != right_template.template.name
|
|
|
|
}
|
|
|
|
|
2022-11-20 23:58:05 +00:00
|
|
|
fn matching_components<'a>(
|
|
|
|
left: &'a VNode<'a>,
|
|
|
|
right: &'a VNode<'a>,
|
|
|
|
) -> Option<Vec<(&'a VComponent<'a>, &'a VComponent<'a>)>> {
|
|
|
|
if left.template.roots.len() != right.template.roots.len() {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
// run through the components, ensuring they're the same
|
|
|
|
left.template
|
|
|
|
.roots
|
|
|
|
.iter()
|
|
|
|
.zip(right.template.roots.iter())
|
|
|
|
.map(|(l, r)| {
|
|
|
|
let (l, r) = match (l, r) {
|
2022-12-06 00:08:41 +00:00
|
|
|
(TemplateNode::Dynamic { id: l }, TemplateNode::Dynamic { id: r }) => (l, r),
|
2022-11-20 23:58:05 +00:00
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
|
|
|
|
let (l, r) = match (&left.dynamic_nodes[*l], &right.dynamic_nodes[*r]) {
|
|
|
|
(Component(l), Component(r)) => (l, r),
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
|
2022-11-29 21:31:04 +00:00
|
|
|
Some((l, r))
|
2022-11-20 23:58:05 +00:00
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// We can apply various optimizations to dynamic nodes that are the single child of their parent.
|
|
|
|
///
|
|
|
|
/// IE
|
|
|
|
/// - for text - we can use SetTextContent
|
|
|
|
/// - for clearning children we can use RemoveChildren
|
|
|
|
/// - for appending children we can use AppendChildren
|
2022-11-29 21:31:04 +00:00
|
|
|
#[allow(dead_code)]
|
2022-11-20 23:58:05 +00:00
|
|
|
fn is_dyn_node_only_child(node: &VNode, idx: usize) -> bool {
|
|
|
|
let path = node.template.node_paths[idx];
|
|
|
|
|
|
|
|
// use a loop to index every static node's children until the path has run out
|
|
|
|
// only break if the last path index is a dynamic node
|
|
|
|
let mut static_node = &node.template.roots[path[0] as usize];
|
|
|
|
|
|
|
|
for i in 1..path.len() - 1 {
|
|
|
|
match static_node {
|
|
|
|
TemplateNode::Element { children, .. } => static_node = &children[path[i] as usize],
|
|
|
|
_ => return false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
match static_node {
|
|
|
|
TemplateNode::Element { children, .. } => children.len() == 1,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|