perf: remove global allocation for props

This commit is contained in:
Jonathan Kelley 2021-09-13 00:59:08 -04:00
parent 369b36b2c6
commit 8b3ac0b57c
15 changed files with 352 additions and 105 deletions

View file

@ -15,7 +15,7 @@ Dioxus-core builds off the many frameworks that came before it. Notably, Dioxus
- Preact: approach for normalization and ref
- Yew: passion and inspiration ❤️
Dioxus-core leverages some really cool techniques and hits a very high level of parity with mature frameworks. Some unique features include:
Dioxus-core leverages some really cool techniques and hits a very high level of parity with mature frameworks. However, Dioxus also brings some new unique features:
- managed lifetimes for borrowed data
- suspended nodes (task/fiber endpoints) for asynchronous vnodes
@ -24,7 +24,7 @@ Dioxus-core leverages some really cool techniques and hits a very high level of
- slab allocator for scopes
- mirrored-slab approach for remote vdoms
There's certainly more to the story, but these optimizations make Dioxus memory use and allocation count extremely minimal. For an average application, it is likely that zero allocations will need to be performed once the app has been mounted. Only when new components are added to the dom will allocations occur - and only en mass. The space of old VNodes is dynamically recycled as new nodes are added. Additionally, Dioxus tracks the average memory footprint of previous components to estimate how much memory allocate for future components.
There's certainly more to the story, but these optimizations make Dioxus memory use and allocation count extremely minimal. For an average application, it is possible that zero allocations will need to be performed once the app has been mounted. Only when new components are added to the dom will allocations occur - and only en mass. The space of old VNodes is dynamically recycled as new nodes are added. Additionally, Dioxus tracks the average memory footprint of previous components to estimate how much memory allocate for future components.
All in all, Dioxus treats memory as an incredibly valuable resource. Combined with the memory-efficient footprint of WASM compilation, Dioxus apps can scale to thousands of components and still stay snappy and respect your RAM usage.

View file

@ -109,6 +109,18 @@ pub(crate) struct DiffMachine<'bump> {
pub mutations: Mutations<'bump>,
pub stack: DiffStack<'bump>,
pub seen_scopes: FxHashSet<ScopeId>,
pub cfg: DiffCfg,
}
pub(crate) struct DiffCfg {
force_diff: bool,
}
impl Default for DiffCfg {
fn default() -> Self {
Self {
force_diff: Default::default(),
}
}
}
/// a "saved" form of a diff machine
@ -129,6 +141,7 @@ impl<'a> SavedDiffWork<'a> {
let extended: SavedDiffWork<'b> = std::mem::transmute(self);
DiffMachine {
vdom,
cfg: DiffCfg::default(),
mutations: extended.mutations,
stack: extended.stack,
seen_scopes: extended.seen_scopes,
@ -140,6 +153,7 @@ impl<'bump> DiffMachine<'bump> {
pub(crate) fn new(mutations: Mutations<'bump>, shared: &'bump ResourcePool) -> Self {
Self {
mutations,
cfg: DiffCfg::default(),
stack: DiffStack::new(),
vdom: shared,
seen_scopes: FxHashSet::default(),

View file

@ -28,6 +28,7 @@ pub mod scheduler;
pub mod scope;
pub mod test_dom;
pub mod util;
pub mod vdomdisplay;
pub mod virtual_dom;
pub(crate) mod innerlude {

View file

@ -267,7 +267,7 @@ pub struct VComponent<'src> {
// Function pointer to the FC that was used to generate this component
pub user_fc: *const (),
pub(crate) caller: Rc<dyn Fn(&Scope) -> DomTree>,
pub(crate) caller: &'src dyn for<'b> Fn(&'b Scope) -> DomTree<'b>,
pub(crate) children: &'src [VNode<'src>],
@ -501,11 +501,16 @@ impl<'a> NodeFactory<'a> {
let key = key.map(|f| self.raw_text(f).0);
let caller = NodeFactory::create_component_caller(component, raw_props);
let caller: &'a mut dyn for<'b> Fn(&'b Scope) -> DomTree<'b> =
bump.alloc(move |scope: &Scope| -> DomTree {
let props: &'_ P = unsafe { &*(raw_props as *const P) };
let res = component(Context { props, scope });
unsafe { std::mem::transmute(res) }
});
let can_memoize = children.len() == 0 && P::IS_STATIC;
VNode::Component(bump.alloc_with(|| VComponent {
VNode::Component(bump.alloc(VComponent {
user_fc,
comparator,
raw_props,
@ -519,24 +524,6 @@ impl<'a> NodeFactory<'a> {
}))
}
pub(crate) fn create_component_caller<'g, P: 'g>(
component: FC<P>,
raw_props: *const (),
) -> Rc<dyn for<'r> Fn(&'r Scope) -> DomTree<'r>> {
type Captured<'a> = Rc<dyn for<'r> Fn(&'r Scope) -> DomTree<'r> + 'a>;
let caller: Captured = Rc::new(move |scp: &Scope| -> DomTree {
// cast back into the right lifetime
let safe_props: &'_ P = unsafe { &*(raw_props as *const P) };
let cx: Context<P> = Context {
props: safe_props,
scope: scp,
};
let res = component(cx);
unsafe { std::mem::transmute(res) }
});
unsafe { std::mem::transmute::<_, Captured<'static>>(caller) }
}
pub fn fragment_from_iter(self, node_iter: impl IntoVNodeList<'a>) -> VNode<'a> {
let children = node_iter.into_vnode_list(self);

View file

@ -157,7 +157,7 @@ pub(crate) struct Scheduler {
}
impl Scheduler {
pub fn new() -> Self {
pub(crate) fn new() -> Self {
/*
Preallocate 2000 elements and 100 scopes to avoid dynamic allocation.
Perhaps this should be configurable from some external config?
@ -214,12 +214,14 @@ impl Scheduler {
channel,
};
let mut async_tasks = FuturesUnordered::new();
Self {
pool,
receiver,
async_tasks: FuturesUnordered::new(),
async_tasks,
pending_garbage: FxHashSet::default(),
@ -283,6 +285,8 @@ impl Scheduler {
| "drop" | "mousedown" | "mouseenter" | "mouseleave" | "mousemove"
| "mouseout" | "mouseover" | "mouseup" => EventPriority::Low,
"mousemove" => EventPriority::Medium,
// Pointer
"pointerdown" | "pointermove" | "pointerup" | "pointercancel"
| "gotpointercapture" | "lostpointercapture" | "pointerenter"
@ -360,10 +364,10 @@ impl Scheduler {
fn load_current_lane(&mut self) -> &mut PriorityLane {
match self.current_priority {
EventPriority::Immediate => todo!(),
EventPriority::High => todo!(),
EventPriority::Medium => todo!(),
EventPriority::Low => todo!(),
EventPriority::Immediate => &mut self.lanes[0],
EventPriority::High => &mut self.lanes[1],
EventPriority::Medium => &mut self.lanes[2],
EventPriority::Low => &mut self.lanes[3],
}
}
@ -426,6 +430,14 @@ impl Scheduler {
- If there are no pending discrete events, then check for continuous events. These can be completely batched
- we batch completely until we run into a discrete event
- all continuous events are batched together
- so D C C C C C would be two separate events - D and C. IE onclick and onscroll
- D C C C C C C D C C C D would be D C D C D in 5 distinct phases.
- !listener bubbling is not currently implemented properly and will need to be implemented somehow in the future
- we need to keep track of element parents to be able to traverse properly
Open questions:
- what if we get two clicks from the component during the same slice?
@ -434,18 +446,19 @@ impl Scheduler {
- but if we received both - then we don't need to diff, do we? run as many as we can and then finally diff?
*/
let mut committed_mutations = Vec::<Mutations<'static>>::new();
let mut deadline = Box::pin(deadline.fuse());
pin_mut!(deadline);
loop {
// Internalize any pending work since the last time we ran
self.manually_poll_events();
// Wait for any new events if we have nothing to do
// todo: poll the events once even if there is work to do to prevent starvation
if !self.has_any_work() {
let deadline_expired = self.wait_for_any_trigger(&mut deadline).await;
if deadline_expired {
return committed_mutations;
futures_util::select! {
msg = self.async_tasks.next() => {}
msg = self.receiver.next() => self.handle_channel_msg(msg.unwrap()),
_ = (&mut deadline).fuse() => return committed_mutations,
}
}
@ -522,34 +535,6 @@ impl Scheduler {
}
}
// waits for a trigger, canceling early if the deadline is reached
// returns true if the deadline was reached
// does not return the trigger, but caches it in the scheduler
pub async fn wait_for_any_trigger(
&mut self,
deadline: &mut Pin<Box<impl FusedFuture<Output = ()>>>,
) -> bool {
use futures_util::future::{select, Either};
let event_fut = async {
match select(self.receiver.next(), self.async_tasks.next()).await {
Either::Left((msg, _other)) => {
self.handle_channel_msg(msg.unwrap());
}
Either::Right((task, _other)) => {
// do nothing, async task will likely generate a set of scheduler messages
}
}
};
pin_mut!(event_fut);
match select(event_fut, deadline).await {
Either::Left((msg, _other)) => false,
Either::Right((deadline, _)) => true,
}
}
pub fn current_lane(&mut self) -> &mut PriorityLane {
match self.current_priority {
EventPriority::Immediate => &mut self.lanes[0],

View file

@ -4,6 +4,7 @@ use std::{
any::{Any, TypeId},
cell::RefCell,
collections::HashMap,
fmt::Formatter,
future::Future,
pin::Pin,
rc::Rc,
@ -29,7 +30,7 @@ pub struct Scope {
// an internal, highly efficient storage of vnodes
// lots of safety condsiderations
pub(crate) frames: ActiveFrame,
pub(crate) caller: Rc<WrappedCaller>,
pub(crate) caller: *const dyn for<'b> Fn(&'b Scope) -> DomTree<'b>,
pub(crate) child_nodes: ScopeChildren<'static>,
// Listeners
@ -47,7 +48,6 @@ pub struct Scope {
}
// The type of closure that wraps calling components
pub type WrappedCaller = dyn for<'b> Fn(&'b Scope) -> DomTree<'b>;
/// The type of task that gets sent to the task scheduler
/// Submitting a fiber task returns a handle to that task, which can be used to wake up suspended nodes
@ -62,7 +62,7 @@ impl Scope {
// Scopes cannot be made anywhere else except for this file
// Therefore, their lifetimes are connected exclusively to the virtual dom
pub(crate) fn new<'creator_node>(
caller: Rc<WrappedCaller>,
caller: &'creator_node dyn for<'b> Fn(&'b Scope) -> DomTree<'b>,
arena_idx: ScopeId,
parent: Option<ScopeId>,
height: u32,
@ -74,6 +74,9 @@ impl Scope {
let up = shared.schedule_any_immediate.clone();
let memoized_updater = Rc::new(move || up(arena_idx));
let caller = caller as *const _;
let caller = unsafe { std::mem::transmute(caller) };
Self {
memoized_updater,
shared,
@ -94,10 +97,12 @@ impl Scope {
pub(crate) fn update_scope_dependencies<'creator_node>(
&mut self,
caller: Rc<WrappedCaller>,
caller: &'creator_node dyn for<'b> Fn(&'b Scope) -> DomTree<'b>,
child_nodes: ScopeChildren,
) {
self.caller = caller;
let caller = caller as *const _;
self.caller = unsafe { std::mem::transmute(caller) };
let child_nodes = unsafe { child_nodes.extend_lifetime() };
self.child_nodes = child_nodes;
}
@ -120,7 +125,7 @@ impl Scope {
unsafe { self.frames.reset_wip_frame() };
// Cast the caller ptr from static to one with our own reference
let render: &WrappedCaller = self.caller.as_ref();
let render: &dyn for<'b> Fn(&'b Scope) -> DomTree<'b> = unsafe { &*self.caller };
match render(self) {
None => false,
@ -143,8 +148,6 @@ impl Scope {
/// Refrences to hook data can only be stored in listeners and component props. During diffing, we make sure to log
/// all listeners and borrowed props so we can clear them here.
pub(crate) fn ensure_drop_safety(&mut self, pool: &ResourcePool) {
// todo!("arch changes");
// make sure we drop all borrowed props manually to guarantee that their drop implementation is called before we
// run the hooks (which hold an &mut Referrence)
// right now, we don't drop

View file

@ -0,0 +1,131 @@
use crate::innerlude::*;
// this is more or less a debug tool, but it'll render the entire tree to the terminal
impl std::fmt::Display for VirtualDom {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
struct ScopeRenderer<'a> {
scope: &'a Scope,
cfg: Cfg,
}
struct Cfg {
pre_render: bool,
newline: bool,
indent: bool,
max_depth: usize,
skip_components: bool,
show_fragments: bool,
}
impl<'a> ScopeRenderer<'a> {
fn html_render(
&self,
vdom: &VirtualDom,
node: &VNode,
f: &mut std::fmt::Formatter,
il: u16,
) -> std::fmt::Result {
const INDENT: &str = " ";
let write_indent = |_f: &mut std::fmt::Formatter, le| {
for _ in 0..le {
write!(_f, "{}", INDENT).unwrap();
}
};
match &node {
VNode::Text(text) => {
write_indent(f, il);
write!(f, "\"{}\"\n", text.text)?
}
VNode::Anchor(anchor) => {
write_indent(f, il);
write!(f, "Anchor {{}}\n")?;
}
VNode::Element(el) => {
write_indent(f, il);
write!(f, "{} {{\n", el.tag_name)?;
// write!(f, "element: {}", el.tag_name)?;
let mut attr_iter = el.attributes.iter().peekable();
while let Some(attr) = attr_iter.next() {
match attr.namespace {
None => {
//
write_indent(f, il + 1);
write!(f, "{}: \"{}\"\n", attr.name, attr.value)?
}
Some(ns) => {
// write the opening tag
write_indent(f, il + 1);
write!(f, " {}:\"", ns)?;
let mut cur_ns_el = attr;
'ns_parse: loop {
write!(f, "{}:{};", cur_ns_el.name, cur_ns_el.value)?;
match attr_iter.peek() {
Some(next_attr) if next_attr.namespace == Some(ns) => {
cur_ns_el = attr_iter.next().unwrap();
}
_ => break 'ns_parse,
}
}
// write the closing tag
write!(f, "\"")?;
}
}
}
for child in el.children {
self.html_render(vdom, child, f, il + 1)?;
}
write_indent(f, il);
write!(f, "}}\n")?;
}
VNode::Fragment(frag) => {
if self.cfg.show_fragments {
write_indent(f, il);
write!(f, "Fragment {{\n")?;
for child in frag.children {
self.html_render(vdom, child, f, il + 1)?;
}
write_indent(f, il);
write!(f, "}}\n")?;
} else {
for child in frag.children {
self.html_render(vdom, child, f, il)?;
}
}
}
VNode::Component(vcomp) => {
let idx = vcomp.associated_scope.get().unwrap();
if !self.cfg.skip_components {
let new_node = vdom.get_scope(idx).unwrap().root();
self.html_render(vdom, new_node, f, il)?;
}
}
VNode::Suspended { .. } => {
// we can't do anything with suspended nodes
}
}
Ok(())
}
}
let base = self.base_scope();
let root = base.root();
let renderer = ScopeRenderer {
scope: base,
cfg: Cfg {
show_fragments: false,
pre_render: false,
newline: true,
indent: true,
max_depth: usize::MAX,
skip_components: false,
},
};
renderer.html_render(self, root, f, 0)
}
}

View file

@ -20,7 +20,6 @@
//! Additional functionality is defined in the respective files.
use crate::innerlude::*;
use futures_util::{Future, FutureExt};
use std::{any::Any, pin::Pin};
/// An integrated virtual node system that progresses events and diffs UI trees.
@ -60,6 +59,8 @@ pub struct VirtualDom {
root_fc: Box<dyn Any>,
root_caller: Box<dyn for<'b> Fn(&'b Scope) -> DomTree<'b> + 'static>,
root_props: Pin<Box<dyn Any>>,
}
@ -124,10 +125,14 @@ impl VirtualDom {
let root_props: Pin<Box<dyn Any>> = Box::pin(root_props);
let props_ptr = root_props.downcast_ref::<P>().unwrap() as *const P;
let root_caller: Box<dyn Fn(&Scope) -> DomTree> = Box::new(move |scope: &Scope| unsafe {
let props: &'_ P = &*(props_ptr as *const P);
std::mem::transmute(root(Context { props, scope }))
});
let base_scope = scheduler.pool.insert_scope_with_key(|myidx| {
let caller = NodeFactory::create_component_caller(root, props_ptr as *const _);
Scope::new(
caller,
root_caller.as_ref(),
myidx,
None,
0,
@ -137,6 +142,7 @@ impl VirtualDom {
});
Self {
root_caller,
root_fc: Box::new(root),
base_scope,
scheduler,
@ -186,9 +192,10 @@ impl VirtualDom {
let root = *self.root_fc.downcast_ref::<FC<P>>().unwrap();
let new_caller = NodeFactory::create_component_caller(root, props_ptr as *const _);
todo!();
root_scope.update_scope_dependencies(new_caller, ScopeChildren(&[]));
// let new_caller = NodeFactory::create_component_caller(root, props_ptr as *const _);
// root_scope.update_scope_dependencies(new_caller, ScopeChildren(&[]));
Some(self.rebuild())
} else {
@ -203,6 +210,8 @@ impl VirtualDom {
/// Tasks will not be polled with this method, nor will any events be processed from the event queue. Instead, the
/// root component will be ran once and then diffed. All updates will flow out as mutations.
///
/// All state stored in components will be completely wiped away.
///
/// # Example
/// ```
/// static App: FC<()> = |cx| cx.render(rsx!{ "hello world" });
@ -318,7 +327,7 @@ impl VirtualDom {
/// Mutations are the only link between the RealDOM and the VirtualDOM.
pub async fn run_with_deadline<'s>(
&'s mut self,
deadline: impl Future<Output = ()>,
deadline: impl std::future::Future<Output = ()>,
) -> Vec<Mutations<'s>> {
self.scheduler.work_with_deadline(deadline).await
}
@ -327,13 +336,19 @@ impl VirtualDom {
self.scheduler.pool.channel.sender.clone()
}
pub fn has_work(&self) -> bool {
true
}
/// Waits for the scheduler to have work
/// This lets us poll async tasks during idle periods without blocking the main thread.
pub async fn wait_for_work(&mut self) {
if self.scheduler.has_any_work() {
return;
}
pub async fn wait_for_any_work(&mut self) {
let mut timeout = Box::pin(futures_util::future::pending().fuse());
self.scheduler.wait_for_any_trigger(&mut timeout).await;
use futures_util::StreamExt;
futures_util::select! {
// hmm - will this resolve to none if there are no async tasks?
_ = self.scheduler.async_tasks.next() => {}
msg = self.scheduler.receiver.next() => self.scheduler.handle_channel_msg(msg.unwrap()),
}
}
}

View file

@ -1,4 +1,5 @@
//! Prove that the dom works normally through virtualdom methods.
//!
//! This methods all use "rebuild" which completely bypasses the scheduler.
//! Hard rebuilds don't consume any events from the event queue.
@ -45,8 +46,8 @@ fn test_original_diff() {
);
}
#[async_std::test]
async fn create() {
#[test]
fn create() {
static APP: FC<()> = |cx| {
cx.render(rsx! {
div {
@ -96,8 +97,8 @@ async fn create() {
);
}
#[async_std::test]
async fn create_list() {
#[test]
fn create_list() {
static APP: FC<()> = |cx| {
cx.render(rsx! {
{(0..3).map(|f| rsx!{ div {
@ -136,8 +137,8 @@ async fn create_list() {
);
}
#[async_std::test]
async fn create_simple() {
#[test]
fn create_simple() {
static APP: FC<()> = |cx| {
cx.render(rsx! {
div {}
@ -162,9 +163,8 @@ async fn create_simple() {
]
);
}
#[async_std::test]
async fn create_components() {
#[test]
fn create_components() {
static App: FC<()> = |cx| {
cx.render(rsx! {
Child { "abc1" }
@ -215,9 +215,8 @@ async fn create_components() {
]
);
}
#[async_std::test]
async fn anchors() {
#[test]
fn anchors() {
static App: FC<()> = |cx| {
cx.render(rsx! {
{true.then(|| rsx!{ div { "hello" } })}
@ -242,8 +241,8 @@ async fn anchors() {
);
}
#[async_std::test]
async fn suspended() {
#[test]
fn suspended() {
static App: FC<()> = |cx| {
let val = use_suspense(
cx,

View file

@ -0,0 +1,42 @@
//! test that we can display the virtualdom properly
//!
//!
//!
use std::{cell::RefCell, rc::Rc};
use anyhow::{Context, Result};
use dioxus::prelude::*;
use dioxus_core as dioxus;
use dioxus_html as dioxus_elements;
mod test_logging;
const IS_LOGGING_ENABLED: bool = true;
#[test]
fn please_work() {
static App: FC<()> = |cx| {
cx.render(rsx! {
div {
hidden: "true"
"hello"
div { "hello" }
Child {}
Child {}
Child {}
}
div { "hello" }
})
};
static Child: FC<()> = |cx| {
cx.render(rsx! {
div { "child" }
})
};
let mut dom = VirtualDom::new(App);
dom.rebuild();
println!("{}", dom);
}

View file

@ -1,10 +1,15 @@
use std::{cell::RefCell, rc::Rc};
use anyhow::{Context, Result};
use dioxus::prelude::*;
use dioxus_core as dioxus;
use dioxus_html as dioxus_elements;
type Shared<T> = Rc<RefCell<T>>;
#[test]
fn sample_refs() {
// static App: FC<()> = |cx| {
// let div_ref = use_node_ref::<MyRef, _>(cx);

View file

@ -1 +1,43 @@
//! Tests for the lifecycle of components.
use std::{cell::RefCell, rc::Rc};
use anyhow::{Context, Result};
use dioxus::prelude::*;
use dioxus_core as dioxus;
use dioxus_html as dioxus_elements;
mod test_logging;
const IS_LOGGING_ENABLED: bool = true;
type Shared<T> = Rc<RefCell<T>>;
#[test]
fn manual_diffing() {
#[derive(PartialEq, Props)]
struct AppProps {
value: Shared<&'static str>,
}
static App: FC<AppProps> = |cx| {
let val = cx.value.borrow();
cx.render(rsx! { div { "{val}" } })
};
test_logging::set_up_logging(IS_LOGGING_ENABLED);
let mut value = Rc::new(RefCell::new("Hello"));
let mut dom = VirtualDom::new_with_props(
App,
AppProps {
value: value.clone(),
},
);
let _ = dom.rebuild();
*value.borrow_mut() = "goodbye";
let edits = dom.diff();
log::debug!("edits: {:?}", edits);
}

View file

@ -1 +1,6 @@
//! Tests for the scheduler.
//!
//! TODO
//! - priority lanes
//! - periodic checking
//!

View file

@ -3,7 +3,7 @@
/// We intern all the HTML tags and attributes, making most operations much faster.
///
/// Interning takes about 1ms at the start of the app, but saves a *ton* of time later on.
pub fn intern_cache() {
pub fn intern_cached_strings() {
let cached_words = [
// All the HTML Tags
"a",
@ -222,13 +222,11 @@ pub fn intern_cache() {
"onmousewheel",
"onoffline",
"ononline",
"<body>",
"onpageshow",
"onpaste",
"onpause",
"onplay",
"onplaying",
"<body>",
"onprogress",
"onratechange",
"onreset",
@ -239,7 +237,6 @@ pub fn intern_cache() {
"onseeking",
"onselect",
"onstalled",
"<body>",
"onsubmit",
"onsuspend",
"ontimeupdate",
@ -284,6 +281,21 @@ pub fn intern_cache() {
"value",
"width",
"wrap",
"0",
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"10",
"11",
"12",
"13",
"14",
];
for s in cached_words {

View file

@ -56,7 +56,7 @@ use std::rc::Rc;
pub use crate::cfg::WebConfig;
use crate::dom::load_document;
use cache::intern_cache;
use cache::intern_cached_strings;
use dioxus::prelude::Properties;
use dioxus::virtual_dom::VirtualDom;
pub use dioxus_core as dioxus;
@ -115,7 +115,7 @@ where
pub async fn run_with_props<T: Properties + 'static>(root: FC<T>, root_props: T, cfg: WebConfig) {
let mut dom = VirtualDom::new_with_props(root, root_props);
intern_cache();
intern_cached_strings();
let hydrating = cfg.hydrate;
@ -134,16 +134,22 @@ pub async fn run_with_props<T: Properties + 'static>(root: FC<T>, root_props: T,
}
let work_loop = ric_raf::RafLoop::new();
loop {
// if virtualdom has nothing, wait for it to have something before requesting idle time
if !dom.has_work() {
dom.wait_for_any_work().await;
}
// if there is work then this future resolves immediately.
dom.wait_for_work().await;
// wait for the mainthread to schedule us in
let deadline = work_loop.wait_for_idle_time().await;
// run the virtualdom work phase until the frame deadline is reached
let mut mutations = dom.run_with_deadline(deadline).await;
// wait for the animation frame to fire so we can apply our changes
work_loop.wait_for_raf().await;
// actually apply our changes during the animation frame
websys_dom.process_edits(&mut mutations[0].edits);
}
}