concept: move rsx out to its own crate

This commit is contained in:
Jonathan Kelley 2022-04-24 02:35:52 -04:00
parent 608795426a
commit 794f43ffee
10 changed files with 706 additions and 7 deletions

View file

@ -27,15 +27,12 @@ dioxus-interpreter-js = { path = "./packages/interpreter", version = "^0.2.0", o
dioxus-tui = { path = "./packages/tui", version = "^0.2.0", optional = true }
dioxus-liveview = { path = "./packages/liveview", optional = true }
# dioxus-mobile = { path = "./packages/mobile", version = "^0.2.0", optional = true }
# dioxus-rsx = { path = "./packages/rsx", optional = true }
# macro = ["dioxus-core-macro", "dioxus-rsx"]
dioxus-rsx = { path = "./packages/rsx", optional = true }
[features]
default = ["macro", "hooks", "html"]
macro = ["dioxus-core-macro"]
macro = ["dioxus-core-macro", "dioxus-rsx"]
hooks = ["dioxus-hooks"]
html = ["dioxus-html"]
ssr = ["dioxus-ssr"]
@ -61,6 +58,7 @@ members = [
"packages/fermi",
"packages/tui",
"packages/liveview",
"packages/rsx",
]
[dev-dependencies]
@ -91,4 +89,4 @@ harness = false
[[bench]]
name = "tui_update"
harness = false
harness = false

View file

@ -19,6 +19,7 @@ proc-macro-error = "1"
proc-macro2 = { version = "1.0.6" }
quote = "1.0"
syn = { version = "1.0.11", features = ["full", "extra-traits"] }
dioxus-rsx = { path = "../rsx" }
# testing
[dev-dependencies]

View file

@ -5,7 +5,9 @@ use syn::parse_macro_input;
mod ifmt;
mod inlineprops;
mod props;
mod rsx;
use dioxus_rsx as rsx;
// mod rsx;
#[proc_macro]
pub fn format_args_f(input: TokenStream) -> TokenStream {

12
packages/rsx/Cargo.toml Normal file
View file

@ -0,0 +1,12 @@
[package]
name = "dioxus-rsx"
version = "0.0.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
proc-macro2 = { version = "1.0.6" }
proc-macro-error = "1"
quote = "1.0"
syn = { version = "1.0.11", features = ["full", "extra-traits"] }

View file

@ -0,0 +1,233 @@
//! Parse components into the VComponent VNode
//! ==========================================
//!
//! This parsing path emerges from [`AmbiguousElement`] which supports validation of the vcomponent format.
//! We can be reasonably sure that whatever enters this parsing path is in the right format.
//! This feature must support
//! - [x] Namespaced components
//! - [x] Fields
//! - [x] Componentbuilder synax
//! - [x] Optional commas
//! - [ ] Children
//! - [ ] Keys
//! - [ ] Properties spreading with with `..` syntax
use super::*;
use proc_macro2::TokenStream as TokenStream2;
use quote::{quote, ToTokens, TokenStreamExt};
use syn::{
ext::IdentExt,
parse::{Parse, ParseBuffer, ParseStream},
token, Expr, Ident, LitStr, Result, Token,
};
pub struct Component {
pub name: syn::Path,
pub body: Vec<ComponentField>,
pub children: Vec<BodyNode>,
pub manual_props: Option<Expr>,
}
impl Parse for Component {
fn parse(stream: ParseStream) -> Result<Self> {
let name = syn::Path::parse_mod_style(stream)?;
let content: ParseBuffer;
// if we see a `{` then we have a block
// else parse as a function-like call
if stream.peek(token::Brace) {
syn::braced!(content in stream);
} else {
syn::parenthesized!(content in stream);
}
let mut body = Vec::new();
let mut children = Vec::new();
let mut manual_props = None;
while !content.is_empty() {
// if we splat into a component then we're merging properties
if content.peek(Token![..]) {
content.parse::<Token![..]>()?;
manual_props = Some(content.parse::<Expr>()?);
} else if content.peek(Ident) && content.peek2(Token![:]) && !content.peek3(Token![:]) {
body.push(content.parse::<ComponentField>()?);
} else {
children.push(content.parse::<BodyNode>()?);
}
if content.peek(Token![,]) {
let _ = content.parse::<Token![,]>();
}
}
Ok(Self {
name,
body,
children,
manual_props,
})
}
}
impl ToTokens for Component {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let name = &self.name;
let mut has_key = None;
let builder = match &self.manual_props {
Some(manual_props) => {
let mut toks = quote! {
let mut __manual_props = #manual_props;
};
for field in &self.body {
if field.name == "key" {
has_key = Some(field);
} else {
let name = &field.name;
let val = &field.content;
toks.append_all(quote! {
__manual_props.#name = #val;
});
}
}
toks.append_all(quote! {
__manual_props
});
quote! {{
#toks
}}
}
None => {
let mut toks = quote! { fc_to_builder(#name) };
for field in &self.body {
match field.name.to_string().as_str() {
"key" => {
//
has_key = Some(field);
}
_ => toks.append_all(quote! {#field}),
}
}
if !self.children.is_empty() {
let childs = &self.children;
toks.append_all(quote! {
.children(__cx.create_children([ #( #childs ),* ]))
});
}
toks.append_all(quote! {
.build()
});
toks
}
};
let key_token = match has_key {
Some(field) => {
let inners = &field.content;
quote! { Some(format_args_f!(#inners)) }
}
None => quote! { None },
};
let fn_name = self.name.segments.last().unwrap().ident.to_string();
tokens.append_all(quote! {
__cx.component(
#name,
#builder,
#key_token,
#fn_name
)
})
}
}
// the struct's fields info
pub struct ComponentField {
name: Ident,
content: ContentField,
}
enum ContentField {
ManExpr(Expr),
Formatted(LitStr),
OnHandlerRaw(Expr),
}
impl ToTokens for ContentField {
fn to_tokens(&self, tokens: &mut TokenStream2) {
match self {
ContentField::ManExpr(e) => e.to_tokens(tokens),
ContentField::Formatted(s) => tokens.append_all(quote! {
__cx.raw_text(format_args_f!(#s)).0
}),
ContentField::OnHandlerRaw(e) => tokens.append_all(quote! {
__cx.event_handler(#e)
}),
}
}
}
impl Parse for ComponentField {
fn parse(input: ParseStream) -> Result<Self> {
let name = Ident::parse_any(input)?;
input.parse::<Token![:]>()?;
if name.to_string().starts_with("on") {
let content = ContentField::OnHandlerRaw(input.parse()?);
return Ok(Self { name, content });
}
if name == "key" {
let content = ContentField::ManExpr(input.parse()?);
return Ok(Self { name, content });
}
if input.peek(LitStr) && input.peek2(Token![,]) {
let t: LitStr = input.fork().parse()?;
if is_literal_foramtted(&t) {
let content = ContentField::Formatted(input.parse()?);
return Ok(Self { name, content });
}
}
if input.peek(LitStr) && input.peek2(LitStr) {
missing_trailing_comma!(input.span());
}
let content = ContentField::ManExpr(input.parse()?);
Ok(Self { name, content })
}
}
impl ToTokens for ComponentField {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let ComponentField { name, content, .. } = self;
tokens.append_all(quote! {
.#name(#content)
})
}
}
fn is_literal_foramtted(lit: &LitStr) -> bool {
let s = lit.value();
let mut chars = s.chars();
while let Some(next) = chars.next() {
if next == '{' {
let nen = chars.next();
if nen != Some('{') {
return true;
}
}
}
false
}

253
packages/rsx/src/element.rs Normal file
View file

@ -0,0 +1,253 @@
use super::*;
use proc_macro2::TokenStream as TokenStream2;
use quote::{quote, ToTokens, TokenStreamExt};
use syn::{
parse::{Parse, ParseBuffer, ParseStream},
Expr, Ident, LitStr, Result, Token,
};
// =======================================
// Parse the VNode::Element type
// =======================================
pub struct Element {
pub name: Ident,
pub key: Option<LitStr>,
pub attributes: Vec<ElementAttrNamed>,
pub children: Vec<BodyNode>,
pub _is_static: bool,
}
impl Parse for Element {
fn parse(stream: ParseStream) -> Result<Self> {
let el_name = Ident::parse(stream)?;
// parse the guts
let content: ParseBuffer;
syn::braced!(content in stream);
let mut attributes: Vec<ElementAttrNamed> = vec![];
let mut children: Vec<BodyNode> = vec![];
let mut key = None;
let mut _el_ref = None;
// parse fields with commas
// break when we don't get this pattern anymore
// start parsing bodynodes
// "def": 456,
// abc: 123,
loop {
// Parse the raw literal fields
if content.peek(LitStr) && content.peek2(Token![:]) && !content.peek3(Token![:]) {
let name = content.parse::<LitStr>()?;
let ident = name.clone();
content.parse::<Token![:]>()?;
if content.peek(LitStr) && content.peek2(Token![,]) {
let value = content.parse::<LitStr>()?;
attributes.push(ElementAttrNamed {
el_name: el_name.clone(),
attr: ElementAttr::CustomAttrText { name, value },
});
} else {
let value = content.parse::<Expr>()?;
attributes.push(ElementAttrNamed {
el_name: el_name.clone(),
attr: ElementAttr::CustomAttrExpression { name, value },
});
}
if content.is_empty() {
break;
}
if content.parse::<Token![,]>().is_err() {
missing_trailing_comma!(ident);
}
continue;
}
if content.peek(Ident) && content.peek2(Token![:]) && !content.peek3(Token![:]) {
let name = content.parse::<Ident>()?;
let ident = name.clone();
let name_str = name.to_string();
content.parse::<Token![:]>()?;
if name_str.starts_with("on") {
attributes.push(ElementAttrNamed {
el_name: el_name.clone(),
attr: ElementAttr::EventTokens {
name,
tokens: content.parse()?,
},
});
} else {
match name_str.as_str() {
"key" => {
key = Some(content.parse()?);
}
"classes" => todo!("custom class list not supported yet"),
// "namespace" => todo!("custom namespace not supported yet"),
"node_ref" => {
_el_ref = Some(content.parse::<Expr>()?);
}
_ => {
if content.peek(LitStr) {
attributes.push(ElementAttrNamed {
el_name: el_name.clone(),
attr: ElementAttr::AttrText {
name,
value: content.parse()?,
},
});
} else {
attributes.push(ElementAttrNamed {
el_name: el_name.clone(),
attr: ElementAttr::AttrExpression {
name,
value: content.parse()?,
},
});
}
}
}
}
if content.is_empty() {
break;
}
// todo: add a message saying you need to include commas between fields
if content.parse::<Token![,]>().is_err() {
missing_trailing_comma!(ident);
}
continue;
}
break;
}
while !content.is_empty() {
if (content.peek(LitStr) && content.peek2(Token![:])) && !content.peek3(Token![:]) {
attr_after_element!(content.span());
}
if (content.peek(Ident) && content.peek2(Token![:])) && !content.peek3(Token![:]) {
attr_after_element!(content.span());
}
children.push(content.parse::<BodyNode>()?);
// consume comma if it exists
// we don't actually care if there *are* commas after elements/text
if content.peek(Token![,]) {
let _ = content.parse::<Token![,]>();
}
}
Ok(Self {
key,
name: el_name,
attributes,
children,
_is_static: false,
})
}
}
impl ToTokens for Element {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let name = &self.name;
let children = &self.children;
let key = match &self.key {
Some(ty) => quote! { Some(format_args_f!(#ty)) },
None => quote! { None },
};
let listeners = self
.attributes
.iter()
.filter(|f| matches!(f.attr, ElementAttr::EventTokens { .. }));
let attr = self
.attributes
.iter()
.filter(|f| !matches!(f.attr, ElementAttr::EventTokens { .. }));
tokens.append_all(quote! {
__cx.element(
dioxus_elements::#name,
__cx.bump().alloc([ #(#listeners),* ]),
__cx.bump().alloc([ #(#attr),* ]),
__cx.bump().alloc([ #(#children),* ]),
#key,
)
});
}
}
pub enum ElementAttr {
/// attribute: "valuee {}"
AttrText { name: Ident, value: LitStr },
/// attribute: true,
AttrExpression { name: Ident, value: Expr },
/// "attribute": "value {}"
CustomAttrText { name: LitStr, value: LitStr },
/// "attribute": true,
CustomAttrExpression { name: LitStr, value: Expr },
// /// onclick: move |_| {}
// EventClosure { name: Ident, closure: ExprClosure },
/// onclick: {}
EventTokens { name: Ident, tokens: Expr },
}
pub struct ElementAttrNamed {
pub el_name: Ident,
pub attr: ElementAttr,
}
impl ToTokens for ElementAttrNamed {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let ElementAttrNamed { el_name, attr } = self;
tokens.append_all(match attr {
ElementAttr::AttrText { name, value } => {
quote! {
dioxus_elements::#el_name.#name(__cx, format_args_f!(#value))
}
}
ElementAttr::AttrExpression { name, value } => {
quote! {
dioxus_elements::#el_name.#name(__cx, #value)
}
}
ElementAttr::CustomAttrText { name, value } => {
quote! {
__cx.attr( #name, format_args_f!(#value), None, false )
}
}
ElementAttr::CustomAttrExpression { name, value } => {
quote! {
__cx.attr( #name, format_args_f!(#value), None, false )
}
}
// ElementAttr::EventClosure { name, closure } => {
// quote! {
// dioxus_elements::on::#name(__cx, #closure)
// }
// }
ElementAttr::EventTokens { name, tokens } => {
quote! {
dioxus_elements::on::#name(__cx, #tokens)
}
}
});
}
}

View file

@ -0,0 +1,15 @@
macro_rules! missing_trailing_comma {
($span:expr) => {
proc_macro_error::emit_error!($span, "missing trailing comma")
};
}
macro_rules! attr_after_element {
($span:expr) => {
proc_macro_error::emit_error!(
$span,
"expected element";
help = "move the attribute above all the children and text elements"
)
};
}

100
packages/rsx/src/lib.rs Normal file
View file

@ -0,0 +1,100 @@
//! Parse the root tokens in the rsx!{} macro
//! =========================================
//!
//! This parsing path emerges directly from the macro call, with `RsxRender` being the primary entrance into parsing.
//! This feature must support:
//! - [x] Optionally rendering if the `in XYZ` pattern is present
//! - [x] Fragments as top-level element (through ambiguous)
//! - [x] Components as top-level element (through ambiguous)
//! - [x] Tags as top-level elements (through ambiguous)
//! - [x] Good errors if parsing fails
//!
//! Any errors in using rsx! will likely occur when people start using it, so the first errors must be really helpful.
#[macro_use]
mod errors;
mod component;
mod element;
mod node;
pub mod pretty;
// Re-export the namespaces into each other
pub use component::*;
pub use element::*;
pub use node::*;
// imports
use proc_macro2::TokenStream as TokenStream2;
use quote::{quote, ToTokens, TokenStreamExt};
use syn::{
parse::{Parse, ParseStream},
Ident, Result, Token,
};
pub struct CallBody {
pub custom_context: Option<Ident>,
pub roots: Vec<BodyNode>,
}
impl Parse for CallBody {
fn parse(input: ParseStream) -> Result<Self> {
let custom_context = if input.peek(Ident) && input.peek2(Token![,]) {
let name = input.parse::<Ident>()?;
input.parse::<Token![,]>()?;
Some(name)
} else {
None
};
let mut roots = Vec::new();
while !input.is_empty() {
let node = input.parse::<BodyNode>()?;
if input.peek(Token![,]) {
let _ = input.parse::<Token![,]>();
}
roots.push(node);
}
Ok(Self {
custom_context,
roots,
})
}
}
/// Serialize the same way, regardless of flavor
impl ToTokens for CallBody {
fn to_tokens(&self, out_tokens: &mut TokenStream2) {
let inner = if self.roots.len() == 1 {
let inner = &self.roots[0];
quote! { #inner }
} else {
let childs = &self.roots;
quote! { __cx.fragment_root([ #(#childs),* ]) }
};
match &self.custom_context {
// The `in cx` pattern allows directly rendering
Some(ident) => out_tokens.append_all(quote! {
#ident.render(LazyNodes::new(move |__cx: NodeFactory| -> VNode {
use dioxus_elements::{GlobalAttributes, SvgAttributes};
#inner
}))
}),
// Otherwise we just build the LazyNode wrapper
None => out_tokens.append_all(quote! {
LazyNodes::new(move |__cx: NodeFactory| -> VNode {
use dioxus_elements::{GlobalAttributes, SvgAttributes};
#inner
})
}),
};
}
}

84
packages/rsx/src/node.rs Normal file
View file

@ -0,0 +1,84 @@
use super::*;
use proc_macro2::TokenStream as TokenStream2;
use quote::{quote, ToTokens, TokenStreamExt};
use syn::{
parse::{Parse, ParseStream},
token, Expr, LitStr, Result, Token,
};
/*
Parse
-> div {}
-> Component {}
-> component()
-> "text {with_args}"
-> (0..10).map(|f| rsx!("asd")), // <--- notice the comma - must be a complete expr
*/
pub enum BodyNode {
Element(Element),
Component(Component),
Text(LitStr),
RawExpr(Expr),
}
impl Parse for BodyNode {
fn parse(stream: ParseStream) -> Result<Self> {
if stream.peek(LitStr) {
return Ok(BodyNode::Text(stream.parse()?));
}
// div {} -> el
// Div {} -> comp
if stream.peek(syn::Ident) && stream.peek2(token::Brace) {
if stream
.fork()
.parse::<Ident>()?
.to_string()
.chars()
.next()
.unwrap()
.is_ascii_uppercase()
{
return Ok(BodyNode::Component(stream.parse()?));
} else {
return Ok(BodyNode::Element(stream.parse::<Element>()?));
}
}
// component() -> comp
// ::component {} -> comp
// ::component () -> comp
if (stream.peek(syn::Ident) && stream.peek2(token::Paren))
|| (stream.peek(Token![::]))
|| (stream.peek(Token![:]) && stream.peek2(Token![:]))
{
return Ok(BodyNode::Component(stream.parse::<Component>()?));
}
// crate::component{} -> comp
// crate::component() -> comp
if let Ok(pat) = stream.fork().parse::<syn::Path>() {
if pat.segments.len() > 1 {
return Ok(BodyNode::Component(stream.parse::<Component>()?));
}
}
Ok(BodyNode::RawExpr(stream.parse::<Expr>()?))
}
}
impl ToTokens for BodyNode {
fn to_tokens(&self, tokens: &mut TokenStream2) {
match &self {
BodyNode::Element(el) => el.to_tokens(tokens),
BodyNode::Component(comp) => comp.to_tokens(tokens),
BodyNode::Text(txt) => tokens.append_all(quote! {
__cx.text(format_args_f!(#txt))
}),
BodyNode::RawExpr(exp) => tokens.append_all(quote! {
__cx.fragment_from_iter(#exp)
}),
}
}
}

View file

@ -0,0 +1 @@
//! pretty printer for rsx!