diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index 718922a57..6ff2b1e9c 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -1,6 +1,17 @@ name: macOS tests on: + push: + branches: + - master + paths: + - packages/** + - examples/** + - src/** + - .github/** + - lib.rs + - Cargo.toml + pull_request: types: [opened, synchronize, reopened, ready_for_review] branches: diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 49ed00901..27116f17d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,6 +1,17 @@ name: Rust CI on: + push: + branches: + - master + paths: + - packages/** + - examples/** + - src/** + - .github/** + - lib.rs + - Cargo.toml + pull_request: types: [opened, synchronize, reopened, ready_for_review] branches: diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 75953807a..961adee97 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -1,6 +1,17 @@ name: windows on: + push: + branches: + - master + paths: + - packages/** + - examples/** + - src/** + - .github/** + - lib.rs + - Cargo.toml + pull_request: types: [opened, synchronize, reopened, ready_for_review] branches: diff --git a/Cargo.toml b/Cargo.toml index d4e41af61..d4d3f874e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,8 +16,8 @@ dioxus-core = { path = "./packages/core", version = "^0.1.9" } dioxus-html = { path = "./packages/html", version = "^0.1.6", optional = true } dioxus-core-macro = { path = "./packages/core-macro", version = "^0.1.7", optional = true } dioxus-hooks = { path = "./packages/hooks", version = "^0.1.7", optional = true } -dioxus-rsx = { path = "./packages/rsx", optional = true } fermi = { path = "./packages/fermi", version = "^0.1.0", optional = true } +# dioxus-rsx = { path = "./packages/rsx", optional = true } dioxus-web = { path = "./packages/web", version = "^0.0.5", optional = true } dioxus-desktop = { path = "./packages/desktop", version = "^0.1.6", optional = true } @@ -31,7 +31,8 @@ dioxus-interpreter-js = { path = "./packages/interpreter", version = "^0.0.0", o [features] default = ["macro", "hooks", "html"] -macro = ["dioxus-core-macro", "dioxus-rsx"] +macro = ["dioxus-core-macro"] +# macro = ["dioxus-core-macro", "dioxus-rsx"] hooks = ["dioxus-hooks"] html = ["dioxus-html"] ssr = ["dioxus-ssr"] diff --git a/README.md b/README.md index 23cd9c516..4ff7c0bed 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,5 @@

Dioxus

-

- Frontend that scales. -

@@ -160,9 +157,9 @@ You shouldn't use Dioxus if: ## Comparison with other Rust UI frameworks Dioxus primarily emphasizes **developer experience** and **familiarity with React principles**. -- [Yew](https://github.com/yewstack/yew): prefers the elm pattern instead of React-hooks, no borrowed props, supports SSR (no hydration). +- [Yew](https://github.com/yewstack/yew): prefers the elm pattern instead, no borrowed props, supports SSR (no hydration), no direct desktop/mobile support. - [Percy](https://github.com/chinedufn/percy): Supports SSR but with less emphasis on state management and event handling. -- [Sycamore](https://github.com/sycamore-rs/sycamore): VDOM-less using fine-grained reactivity, but lacking in ergonomics. +- [Sycamore](https://github.com/sycamore-rs/sycamore): VDOM-less using fine-grained reactivity, but no direct support for desktop/mobile. - [Dominator](https://github.com/Pauan/rust-dominator): Signal-based zero-cost alternative, less emphasis on community and docs. - [Azul](https://azul.rs): Fully native HTML/CSS renderer for desktop applications, no support for web/ssr diff --git a/examples/login_form.rs b/examples/login_form.rs index 7fd91262d..0a0c77e87 100644 --- a/examples/login_form.rs +++ b/examples/login_form.rs @@ -12,7 +12,7 @@ fn app(cx: Scope) -> Element { let onsubmit = move |evt: FormEvent| { cx.spawn(async move { let resp = reqwest::Client::new() - .post("http://localhost/login") + .post("http://localhost:8080/login") .form(&[ ("username", &evt.values["username"]), ("password", &evt.values["password"]), @@ -22,10 +22,12 @@ fn app(cx: Scope) -> Element { match resp { // Parse data from here, such as storing a response token - Ok(_data) => println!("Login successful"), + Ok(_data) => println!("Login successful!"), //Handle any errors from the fetch here - Err(_err) => println!("Login failed"), + Err(_err) => { + println!("Login failed - you need a login server running on localhost:8080.") + } } }); }; @@ -36,10 +38,10 @@ fn app(cx: Scope) -> Element { onsubmit: onsubmit, prevent_default: "onsubmit", // Prevent the default behavior of
to post - input { "type": "text" } + input { "type": "text", id: "username", name: "username" } label { "Username" } br {} - input { "type": "password" } + input { "type": "password", id: "password", name: "password" } label { "Password" } br {} button { "Login" } diff --git a/examples/textarea.rs b/examples/textarea.rs new file mode 100644 index 000000000..5988149ba --- /dev/null +++ b/examples/textarea.rs @@ -0,0 +1,23 @@ +// How to use textareas + +use dioxus::prelude::*; + +fn main() { + dioxus::desktop::launch(app); +} + +fn app(cx: Scope) -> Element { + let (model, set_model) = use_state(&cx, || String::from("asd")); + + println!("{}", model); + + cx.render(rsx! { + textarea { + class: "border", + rows: "10", + cols: "80", + value: "{model}", + oninput: move |e| set_model(e.value.clone()), + } + }) +} diff --git a/examples/todomvc.rs b/examples/todomvc.rs index 937d6992c..f0109eb4b 100644 --- a/examples/todomvc.rs +++ b/examples/todomvc.rs @@ -129,7 +129,6 @@ pub fn todo_entry<'a>(cx: Scope<'a, TodoEntryProps<'a>>) -> Element { label { r#for: "cbg-{todo.id}", onclick: move |_| set_is_editing(true), - onfocusout: move |_| set_is_editing(false), "{todo.contents}" } } @@ -139,6 +138,7 @@ pub fn todo_entry<'a>(cx: Scope<'a, TodoEntryProps<'a>>) -> Element { value: "{todo.contents}", oninput: move |evt| cx.props.set_todos.make_mut()[&cx.props.id].contents = evt.value.clone(), autofocus: "true", + onfocusout: move |_| set_is_editing(false), onkeydown: move |evt| { match evt.key.as_str() { "Enter" | "Escape" | "Tab" => set_is_editing(false), diff --git a/packages/core-macro/Cargo.toml b/packages/core-macro/Cargo.toml index ed5d34e53..e6ff29a22 100644 --- a/packages/core-macro/Cargo.toml +++ b/packages/core-macro/Cargo.toml @@ -15,8 +15,8 @@ keywords = ["dom", "ui", "gui", "react", "wasm"] proc-macro = true [dependencies] -dioxus-rsx = { path = "../rsx" } -proc-macro-error = "1.0.4" +# dioxus-rsx = { path = "../rsx" } +proc-macro-error = "1" proc-macro2 = { version = "1.0.6" } quote = "1.0" syn = { version = "1.0.11", features = ["full", "extra-traits"] } diff --git a/packages/core-macro/src/lib.rs b/packages/core-macro/src/lib.rs index 96ea0ce87..931933386 100644 --- a/packages/core-macro/src/lib.rs +++ b/packages/core-macro/src/lib.rs @@ -5,6 +5,7 @@ use syn::parse_macro_input; mod ifmt; mod inlineprops; mod props; +mod rsx; #[proc_macro] pub fn format_args_f(input: TokenStream) -> TokenStream { @@ -178,7 +179,7 @@ pub fn derive_typed_builder(input: proc_macro::TokenStream) -> proc_macro::Token #[proc_macro_error::proc_macro_error] #[proc_macro] pub fn rsx(s: TokenStream) -> TokenStream { - match syn::parse::(s) { + match syn::parse::(s) { Err(err) => err.to_compile_error().into(), Ok(stream) => stream.to_token_stream().into(), } diff --git a/packages/core-macro/src/rsx/component.rs b/packages/core-macro/src/rsx/component.rs new file mode 100644 index 000000000..17006e6ac --- /dev/null +++ b/packages/core-macro/src/rsx/component.rs @@ -0,0 +1,234 @@ +//! Parse components into the VComponent VNode +//! ========================================== +//! +//! This parsing path emerges from [`AmbiguousElement`] which supports validation of the vcomponent format. +//! We can be reasonably sure that whatever enters this parsing path is in the right format. +//! This feature must support +//! - [x] Namespaced components +//! - [x] Fields +//! - [x] Componentbuilder synax +//! - [x] Optional commas +//! - [ ] Children +//! - [ ] Keys +//! - [ ] Properties spreading with with `..` syntax + +use super::*; + +use proc_macro2::TokenStream as TokenStream2; +use quote::{quote, ToTokens, TokenStreamExt}; +use syn::{ + ext::IdentExt, + parse::{Parse, ParseBuffer, ParseStream}, + token, Expr, Ident, LitStr, Result, Token, +}; + +pub struct Component { + pub name: syn::Path, + pub body: Vec, + pub children: Vec, + pub manual_props: Option, +} + +impl Parse for Component { + fn parse(stream: ParseStream) -> Result { + let name = syn::Path::parse_mod_style(stream)?; + + let content: ParseBuffer; + + // if we see a `{` then we have a block + // else parse as a function-like call + if stream.peek(token::Brace) { + syn::braced!(content in stream); + } else { + syn::parenthesized!(content in stream); + } + + let mut body = Vec::new(); + let mut children = Vec::new(); + let mut manual_props = None; + + while !content.is_empty() { + // if we splat into a component then we're merging properties + if content.peek(Token![..]) { + content.parse::()?; + manual_props = Some(content.parse::()?); + } else if content.peek(Ident) && content.peek2(Token![:]) && !content.peek3(Token![:]) { + body.push(content.parse::()?); + } else { + children.push(content.parse::()?); + } + + if content.peek(Token![,]) { + let _ = content.parse::(); + } + } + + Ok(Self { + name, + body, + children, + manual_props, + }) + } +} + +impl ToTokens for Component { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let name = &self.name; + + let mut has_key = None; + + let builder = match &self.manual_props { + Some(manual_props) => { + let mut toks = quote! { + let mut __manual_props = #manual_props; + }; + for field in &self.body { + if field.name == "key" { + has_key = Some(field); + } else { + let name = &field.name; + let val = &field.content; + toks.append_all(quote! { + __manual_props.#name = #val; + }); + } + } + toks.append_all(quote! { + __manual_props + }); + quote! {{ + #toks + }} + } + None => { + let mut toks = quote! { fc_to_builder(#name) }; + for field in &self.body { + match field.name.to_string().as_str() { + "key" => { + // + has_key = Some(field); + } + _ => toks.append_all(quote! {#field}), + } + } + + if !self.children.is_empty() { + let childs = &self.children; + toks.append_all(quote! { + .children(__cx.create_children([ #( #childs ),* ])) + }); + } + + toks.append_all(quote! { + .build() + }); + toks + } + }; + + let key_token = match has_key { + Some(field) => { + let inners = &field.content; + quote! { Some(format_args_f!(#inners)) } + } + None => quote! { None }, + }; + + let fn_name = self.name.segments.last().unwrap().ident.to_string(); + + tokens.append_all(quote! { + __cx.component( + #name, + #builder, + #key_token, + #fn_name + ) + }) + } +} + +// the struct's fields info +pub struct ComponentField { + name: Ident, + content: ContentField, +} + +enum ContentField { + ManExpr(Expr), + Formatted(LitStr), + OnHandlerRaw(Expr), +} + +impl ToTokens for ContentField { + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + ContentField::ManExpr(e) => e.to_tokens(tokens), + ContentField::Formatted(s) => tokens.append_all(quote! { + __cx.raw_text(format_args_f!(#s)).0 + }), + ContentField::OnHandlerRaw(e) => tokens.append_all(quote! { + __cx.event_handler(#e) + }), + } + } +} + +impl Parse for ComponentField { + fn parse(input: ParseStream) -> Result { + let name = Ident::parse_any(input)?; + input.parse::()?; + + if name.to_string().starts_with("on") { + let content = ContentField::OnHandlerRaw(input.parse()?); + return Ok(Self { name, content }); + } + + if name == "key" { + let content = ContentField::ManExpr(input.parse()?); + return Ok(Self { name, content }); + } + + if input.peek(LitStr) && input.peek2(Token![,]) { + let t: LitStr = input.fork().parse()?; + + if is_literal_foramtted(&t) { + let content = ContentField::Formatted(input.parse()?); + return Ok(Self { name, content }); + } + } + + if input.peek(LitStr) && input.peek2(LitStr) { + let item = input.parse::().unwrap(); + proc_macro_error::emit_error!(item, "This attribute is missing a trailing comma") + } + + let content = ContentField::ManExpr(input.parse()?); + Ok(Self { name, content }) + } +} + +impl ToTokens for ComponentField { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let ComponentField { name, content, .. } = self; + tokens.append_all(quote! { + .#name(#content) + }) + } +} + +fn is_literal_foramtted(lit: &LitStr) -> bool { + let s = lit.value(); + let mut chars = s.chars(); + + while let Some(next) = chars.next() { + if next == '{' { + let nen = chars.next(); + if nen != Some('{') { + return true; + } + } + } + + false +} diff --git a/packages/core-macro/src/rsx/element.rs b/packages/core-macro/src/rsx/element.rs new file mode 100644 index 000000000..fa8fc9f51 --- /dev/null +++ b/packages/core-macro/src/rsx/element.rs @@ -0,0 +1,282 @@ +use super::*; + +use proc_macro2::TokenStream as TokenStream2; +use quote::{quote, ToTokens, TokenStreamExt}; +use syn::{ + parse::{Parse, ParseBuffer, ParseStream}, + Expr, Ident, LitStr, Result, Token, +}; + +// ======================================= +// Parse the VNode::Element type +// ======================================= +pub struct Element { + pub name: Ident, + pub key: Option, + pub attributes: Vec, + pub children: Vec, + pub _is_static: bool, +} + +impl Parse for Element { + fn parse(stream: ParseStream) -> Result { + let el_name = Ident::parse(stream)?; + + // parse the guts + let content: ParseBuffer; + syn::braced!(content in stream); + + let mut attributes: Vec = vec![]; + let mut children: Vec = vec![]; + let mut key = None; + let mut _el_ref = None; + + // parse fields with commas + // break when we don't get this pattern anymore + // start parsing bodynodes + // "def": 456, + // abc: 123, + loop { + // Parse the raw literal fields + if content.peek(LitStr) && content.peek2(Token![:]) && !content.peek3(Token![:]) { + let name = content.parse::()?; + let ident = name.clone(); + + content.parse::()?; + + if content.peek(LitStr) && content.peek2(Token![,]) { + let value = content.parse::()?; + attributes.push(ElementAttrNamed { + el_name: el_name.clone(), + attr: ElementAttr::CustomAttrText { name, value }, + }); + } else { + let value = content.parse::()?; + + attributes.push(ElementAttrNamed { + el_name: el_name.clone(), + attr: ElementAttr::CustomAttrExpression { name, value }, + }); + } + + if content.is_empty() { + break; + } + + // todo: add a message saying you need to include commas between fields + if content.parse::().is_err() { + proc_macro_error::emit_error!( + ident, + "This attribute is missing a trailing comma" + ) + } + continue; + } + + if content.peek(Ident) && content.peek2(Token![:]) && !content.peek3(Token![:]) { + let name = content.parse::()?; + let ident = name.clone(); + + let name_str = name.to_string(); + content.parse::()?; + + if name_str.starts_with("on") { + attributes.push(ElementAttrNamed { + el_name: el_name.clone(), + attr: ElementAttr::EventTokens { + name, + tokens: content.parse()?, + }, + }); + } else { + match name_str.as_str() { + "key" => { + key = Some(content.parse()?); + } + "classes" => todo!("custom class list not supported yet"), + // "namespace" => todo!("custom namespace not supported yet"), + "node_ref" => { + _el_ref = Some(content.parse::()?); + } + _ => { + if content.peek(LitStr) { + attributes.push(ElementAttrNamed { + el_name: el_name.clone(), + attr: ElementAttr::AttrText { + name, + value: content.parse()?, + }, + }); + } else { + attributes.push(ElementAttrNamed { + el_name: el_name.clone(), + attr: ElementAttr::AttrExpression { + name, + value: content.parse()?, + }, + }); + } + } + } + } + + if content.is_empty() { + break; + } + + // todo: add a message saying you need to include commas between fields + if content.parse::().is_err() { + proc_macro_error::emit_error!( + ident, + "This attribute is missing a trailing comma" + ) + } + continue; + } + + break; + } + + while !content.is_empty() { + if (content.peek(LitStr) && content.peek2(Token![:])) && !content.peek3(Token![:]) { + let ident = content.parse::().unwrap(); + let name = ident.value(); + proc_macro_error::emit_error!( + ident, "This attribute `{}` is in the wrong place.", name; + help = +"All attribute fields must be placed above children elements. + + div { + attr: \"...\", <---- attribute is above children + div { } <---- children are below attributes + }"; + ) + } + + if (content.peek(Ident) && content.peek2(Token![:])) && !content.peek3(Token![:]) { + let ident = content.parse::().unwrap(); + let name = ident.to_string(); + proc_macro_error::emit_error!( + ident, "This attribute `{}` is in the wrong place.", name; + help = +"All attribute fields must be placed above children elements. + + div { + attr: \"...\", <---- attribute is above children + div { } <---- children are below attributes + }"; + ) + } + + children.push(content.parse::()?); + // consume comma if it exists + // we don't actually care if there *are* commas after elements/text + if content.peek(Token![,]) { + let _ = content.parse::(); + } + } + + Ok(Self { + key, + name: el_name, + attributes, + children, + _is_static: false, + }) + } +} + +impl ToTokens for Element { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let name = &self.name; + let children = &self.children; + + let key = match &self.key { + Some(ty) => quote! { Some(format_args_f!(#ty)) }, + None => quote! { None }, + }; + + let listeners = self + .attributes + .iter() + .filter(|f| matches!(f.attr, ElementAttr::EventTokens { .. })); + + let attr = self + .attributes + .iter() + .filter(|f| !matches!(f.attr, ElementAttr::EventTokens { .. })); + + tokens.append_all(quote! { + __cx.element( + dioxus_elements::#name, + __cx.bump().alloc([ #(#listeners),* ]), + __cx.bump().alloc([ #(#attr),* ]), + __cx.bump().alloc([ #(#children),* ]), + #key, + ) + }); + } +} + +pub enum ElementAttr { + /// attribute: "valuee {}" + AttrText { name: Ident, value: LitStr }, + + /// attribute: true, + AttrExpression { name: Ident, value: Expr }, + + /// "attribute": "value {}" + CustomAttrText { name: LitStr, value: LitStr }, + + /// "attribute": true, + CustomAttrExpression { name: LitStr, value: Expr }, + + // /// onclick: move |_| {} + // EventClosure { name: Ident, closure: ExprClosure }, + /// onclick: {} + EventTokens { name: Ident, tokens: Expr }, +} + +pub struct ElementAttrNamed { + pub el_name: Ident, + pub attr: ElementAttr, +} + +impl ToTokens for ElementAttrNamed { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let ElementAttrNamed { el_name, attr } = self; + + tokens.append_all(match attr { + ElementAttr::AttrText { name, value } => { + quote! { + dioxus_elements::#el_name.#name(__cx, format_args_f!(#value)) + } + } + ElementAttr::AttrExpression { name, value } => { + quote! { + dioxus_elements::#el_name.#name(__cx, #value) + } + } + ElementAttr::CustomAttrText { name, value } => { + quote! { + __cx.attr( #name, format_args_f!(#value), None, false ) + } + } + ElementAttr::CustomAttrExpression { name, value } => { + quote! { + __cx.attr( #name, format_args_f!(#value), None, false ) + } + } + // ElementAttr::EventClosure { name, closure } => { + // quote! { + // dioxus_elements::on::#name(__cx, #closure) + // } + // } + ElementAttr::EventTokens { name, tokens } => { + quote! { + dioxus_elements::on::#name(__cx, #tokens) + } + } + }); + } +} diff --git a/packages/core-macro/src/rsx/mod.rs b/packages/core-macro/src/rsx/mod.rs new file mode 100644 index 000000000..e20d80dec --- /dev/null +++ b/packages/core-macro/src/rsx/mod.rs @@ -0,0 +1,97 @@ +//! Parse the root tokens in the rsx!{} macro +//! ========================================= +//! +//! This parsing path emerges directly from the macro call, with `RsxRender` being the primary entrance into parsing. +//! This feature must support: +//! - [x] Optionally rendering if the `in XYZ` pattern is present +//! - [x] Fragments as top-level element (through ambiguous) +//! - [x] Components as top-level element (through ambiguous) +//! - [x] Tags as top-level elements (through ambiguous) +//! - [x] Good errors if parsing fails +//! +//! Any errors in using rsx! will likely occur when people start using it, so the first errors must be really helpful. + +mod component; +mod element; +mod node; + +pub mod pretty; + +// Re-export the namespaces into each other +pub use component::*; +pub use element::*; +pub use node::*; + +// imports +use proc_macro2::TokenStream as TokenStream2; +use quote::{quote, ToTokens, TokenStreamExt}; +use syn::{ + parse::{Parse, ParseStream}, + Ident, Result, Token, +}; + +pub struct CallBody { + pub custom_context: Option, + pub roots: Vec, +} + +impl Parse for CallBody { + fn parse(input: ParseStream) -> Result { + let custom_context = if input.peek(Ident) && input.peek2(Token![,]) { + let name = input.parse::()?; + input.parse::()?; + + Some(name) + } else { + None + }; + + let mut roots = Vec::new(); + + while !input.is_empty() { + let node = input.parse::()?; + + if input.peek(Token![,]) { + let _ = input.parse::(); + } + + roots.push(node); + } + + Ok(Self { + custom_context, + roots, + }) + } +} + +/// Serialize the same way, regardless of flavor +impl ToTokens for CallBody { + fn to_tokens(&self, out_tokens: &mut TokenStream2) { + let inner = if self.roots.len() == 1 { + let inner = &self.roots[0]; + quote! { #inner } + } else { + let childs = &self.roots; + quote! { __cx.fragment_root([ #(#childs),* ]) } + }; + + match &self.custom_context { + // The `in cx` pattern allows directly rendering + Some(ident) => out_tokens.append_all(quote! { + #ident.render(LazyNodes::new_some(move |__cx: NodeFactory| -> VNode { + use dioxus_elements::{GlobalAttributes, SvgAttributes}; + #inner + })) + }), + + // Otherwise we just build the LazyNode wrapper + None => out_tokens.append_all(quote! { + LazyNodes::new_some(move |__cx: NodeFactory| -> VNode { + use dioxus_elements::{GlobalAttributes, SvgAttributes}; + #inner + }) + }), + }; + } +} diff --git a/packages/core-macro/src/rsx/node.rs b/packages/core-macro/src/rsx/node.rs new file mode 100644 index 000000000..d5d701767 --- /dev/null +++ b/packages/core-macro/src/rsx/node.rs @@ -0,0 +1,84 @@ +use super::*; + +use proc_macro2::TokenStream as TokenStream2; +use quote::{quote, ToTokens, TokenStreamExt}; +use syn::{ + parse::{Parse, ParseStream}, + token, Expr, LitStr, Result, Token, +}; + +/* +Parse +-> div {} +-> Component {} +-> component() +-> "text {with_args}" +-> (0..10).map(|f| rsx!("asd")), // <--- notice the comma - must be a complete expr +*/ +pub enum BodyNode { + Element(Element), + Component(Component), + Text(LitStr), + RawExpr(Expr), +} + +impl Parse for BodyNode { + fn parse(stream: ParseStream) -> Result { + if stream.peek(LitStr) { + return Ok(BodyNode::Text(stream.parse()?)); + } + + // div {} -> el + // Div {} -> comp + if stream.peek(syn::Ident) && stream.peek2(token::Brace) { + if stream + .fork() + .parse::()? + .to_string() + .chars() + .next() + .unwrap() + .is_ascii_uppercase() + { + return Ok(BodyNode::Component(stream.parse()?)); + } else { + return Ok(BodyNode::Element(stream.parse::()?)); + } + } + + // component() -> comp + // ::component {} -> comp + // ::component () -> comp + if (stream.peek(syn::Ident) && stream.peek2(token::Paren)) + || (stream.peek(Token![::])) + || (stream.peek(Token![:]) && stream.peek2(Token![:])) + { + return Ok(BodyNode::Component(stream.parse::()?)); + } + + // crate::component{} -> comp + // crate::component() -> comp + if let Ok(pat) = stream.fork().parse::() { + if pat.segments.len() > 1 { + return Ok(BodyNode::Component(stream.parse::()?)); + } + } + + Ok(BodyNode::RawExpr(stream.parse::()?)) + } +} + +impl ToTokens for BodyNode { + fn to_tokens(&self, tokens: &mut TokenStream2) { + match &self { + BodyNode::Element(el) => el.to_tokens(tokens), + BodyNode::Component(comp) => comp.to_tokens(tokens), + BodyNode::Text(txt) => tokens.append_all(quote! { + __cx.text(format_args_f!(#txt)) + }), + BodyNode::RawExpr(exp) => tokens.append_all(quote! { + __cx.fragment_from_iter(#exp) + }), + } + } +} diff --git a/packages/core-macro/src/rsx/pretty.rs b/packages/core-macro/src/rsx/pretty.rs new file mode 100644 index 000000000..26ad54006 --- /dev/null +++ b/packages/core-macro/src/rsx/pretty.rs @@ -0,0 +1 @@ +//! pretty printer for rsx! diff --git a/packages/core/src/diff.rs b/packages/core/src/diff.rs index d7189a710..50d5d8874 100644 --- a/packages/core/src/diff.rs +++ b/packages/core/src/diff.rs @@ -853,7 +853,7 @@ impl<'b> DiffState<'b> { nodes_created += self.create_node(new_node); } else { self.diff_node(&old[old_index], new_node); - nodes_created += self.push_all_nodes(new_node); + nodes_created += self.push_all_real_nodes(new_node); } } @@ -876,7 +876,7 @@ impl<'b> DiffState<'b> { nodes_created += self.create_node(new_node); } else { self.diff_node(&old[old_index], new_node); - nodes_created += self.push_all_nodes(new_node); + nodes_created += self.push_all_real_nodes(new_node); } } @@ -899,7 +899,7 @@ impl<'b> DiffState<'b> { nodes_created += self.create_node(new_node); } else { self.diff_node(&old[old_index], new_node); - nodes_created += self.push_all_nodes(new_node); + nodes_created += self.push_all_real_nodes(new_node); } } @@ -1100,9 +1100,9 @@ impl<'b> DiffState<'b> { } // recursively push all the nodes of a tree onto the stack and return how many are there - fn push_all_nodes(&mut self, node: &'b VNode<'b>) -> usize { + fn push_all_real_nodes(&mut self, node: &'b VNode<'b>) -> usize { match node { - VNode::Text(_) | VNode::Placeholder(_) => { + VNode::Text(_) | VNode::Placeholder(_) | VNode::Element(_) => { self.mutations.push_root(node.mounted_id()); 1 } @@ -1110,7 +1110,7 @@ impl<'b> DiffState<'b> { VNode::Fragment(frag) => { let mut added = 0; for child in frag.children { - added += self.push_all_nodes(child); + added += self.push_all_real_nodes(child); } added } @@ -1118,16 +1118,7 @@ impl<'b> DiffState<'b> { VNode::Component(c) => { let scope_id = c.scope.get().unwrap(); let root = self.scopes.root_node(scope_id); - self.push_all_nodes(root) - } - - VNode::Element(el) => { - let mut num_on_stack = 0; - for child in el.children.iter() { - num_on_stack += self.push_all_nodes(child); - } - self.mutations.push_root(el.id.get().unwrap()); - num_on_stack + 1 + self.push_all_real_nodes(root) } } } diff --git a/packages/interpreter/src/interpreter.js b/packages/interpreter/src/interpreter.js index 6a4ed0904..fd4170351 100644 --- a/packages/interpreter/src/interpreter.js +++ b/packages/interpreter/src/interpreter.js @@ -53,14 +53,12 @@ export class Interpreter { } } CreateTextNode(text, root) { - // todo: make it so the types are okay const node = document.createTextNode(text); this.nodes[root] = node; this.stack.push(node); } CreateElement(tag, root) { const el = document.createElement(tag); - // el.setAttribute("data-dioxus-id", `${root}`); this.nodes[root] = el; this.stack.push(el); } diff --git a/packages/rsx/Cargo.toml b/packages/rsx/Cargo.toml index 4cad9064c..0d84ba250 100644 --- a/packages/rsx/Cargo.toml +++ b/packages/rsx/Cargo.toml @@ -1,13 +1,12 @@ [package] name = "dioxus-rsx" -version = "0.0.0" +version = "0.1.0" edition = "2018" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -once_cell = "1.8" -proc-macro-error = "1.0.4" +proc-macro-error = "1" proc-macro2 = { version = "1.0.6" } quote = "1.0" syn = { version = "1.0.11", features = ["full", "extra-traits"] }