Feat: move the rsx macro around

This commit is contained in:
Jonathan Kelley 2021-06-03 10:42:28 -04:00
parent 1919f88f03
commit 50c8b93aad
23 changed files with 886 additions and 617 deletions

View file

@ -6,7 +6,7 @@ pub(crate) mod fc;
pub(crate) mod htm;
pub(crate) mod ifmt;
pub(crate) mod props;
pub(crate) mod rsxt;
pub(crate) mod rsx;
pub(crate) mod rsxtemplate;
pub(crate) mod util;
@ -24,7 +24,7 @@ pub fn html(s: TokenStream) -> TokenStream {
/// We aim to keep functional parity with html templates.
#[proc_macro]
pub fn rsx(s: TokenStream) -> TokenStream {
match syn::parse::<rsxt::RsxRender>(s) {
match syn::parse::<rsx::RsxRender>(s) {
Err(e) => e.to_compile_error().into(),
Ok(s) => s.to_token_stream().into(),
}

View file

@ -0,0 +1,88 @@
//! Parse anything that has a pattern of < Ident, Bracket >
//! ========================================================
//!
//! Whenever a `name {}` pattern emerges, we need to parse it into an element, a component, or a fragment.
//! This feature must support:
//! - Namepsaced/pathed components
//! - Differentiating between built-in and custom elements
use super::*;
use proc_macro2::TokenStream as TokenStream2;
use quote::ToTokens;
use syn::{
parse::{Parse, ParseStream},
Error, Ident, LitStr, Result, Token,
};
pub enum AmbiguousElement {
Element(Element),
Component(Component),
Fragment(Fragment),
}
impl Parse for AmbiguousElement {
fn parse(input: ParseStream) -> Result<Self> {
// Try to parse as an absolute path and immediately defer to the componetn
if input.peek(Token![::]) {
return input
.parse::<Component>()
.map(|c| AmbiguousElement::Component(c));
}
// If not an absolute path, then parse the ident and check if it's a valid tag
if let Ok(pat) = input.fork().parse::<syn::Path>() {
if pat.segments.len() > 1 {
return input
.parse::<Component>()
.map(|c| AmbiguousElement::Component(c));
}
}
if let Ok(name) = input.fork().parse::<Ident>() {
let name_str = name.to_string();
match is_valid_tag(&name_str) {
true => input
.parse::<Element>()
.map(|c| AmbiguousElement::Element(c)),
false => {
let first_char = name_str.chars().next().unwrap();
if first_char.is_ascii_uppercase() {
if name_str == "Fragment" {
input
.parse::<Fragment>()
.map(|c| AmbiguousElement::Fragment(c))
} else {
input
.parse::<Component>()
.map(|c| AmbiguousElement::Component(c))
}
} else {
let name = input.parse::<Ident>().unwrap();
Err(Error::new(
name.span(),
"Components must be uppercased, perhaps you mispelled a html tag",
))
}
}
}
} else {
if input.peek(LitStr) {
panic!("it's actually a litstr");
}
Err(Error::new(input.span(), "Not a valid Html tag"))
}
}
}
impl ToTokens for AmbiguousElement {
fn to_tokens(&self, tokens: &mut TokenStream2) {
match self {
AmbiguousElement::Element(el) => el.to_tokens(tokens),
AmbiguousElement::Component(comp) => comp.to_tokens(tokens),
AmbiguousElement::Fragment(frag) => frag.to_tokens(tokens),
}
}
}

View file

@ -0,0 +1,141 @@
//! Parse components into the VComponent VNode
//! ==========================================
//!
//! This parsing path emerges from [`AmbiguousElement`] which supports validation of the vcomponent format.
//! We can be reasonably sure that whatever enters this parsing path is in the right format.
//! This feature must support
//! - [x] Namespaced components
//! - [x] Fields
//! - [x] Componentbuilder synax
//! - [x] Optional commas
//! - [ ] Children
//! - [ ] Keys
//! - [ ] Properties spreading with with `..` syntax
use super::*;
use proc_macro2::TokenStream as TokenStream2;
use quote::{quote, ToTokens, TokenStreamExt};
use syn::{
ext::IdentExt,
parse::{Parse, ParseBuffer, ParseStream},
token, Expr, Ident, Result, Token,
};
pub struct Component {
// accept any path-like argument
name: syn::Path,
body: Vec<ComponentField>,
_children: Vec<Node>,
}
impl Parse for Component {
fn parse(s: ParseStream) -> Result<Self> {
// let name = s.parse::<syn::ExprPath>()?;
// todo: look into somehow getting the crate/super/etc
let name = syn::Path::parse_mod_style(s)?;
// parse the guts
let content: ParseBuffer;
syn::braced!(content in s);
let mut body: Vec<ComponentField> = Vec::new();
let _children: Vec<Node> = Vec::new();
'parsing: loop {
// [1] Break if empty
if content.is_empty() {
break 'parsing;
}
if content.peek(token::Brace) {
let inner: ParseBuffer;
syn::braced!(inner in content);
if inner.peek(Token![...]) {
todo!("Inline props not yet supported");
}
}
body.push(content.parse::<ComponentField>()?);
// consume comma if it exists
// we don't actually care if there *are* commas between attrs
if content.peek(Token![,]) {
let _ = content.parse::<Token![,]>();
}
}
// todo: add support for children
let children: Vec<Node> = vec![];
Ok(Self {
name,
body,
_children: children,
})
}
}
impl ToTokens for Component {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let name = &self.name;
let mut builder = quote! {
fc_to_builder(#name)
};
let mut has_key = None;
for field in &self.body {
if field.name.to_string() == "key" {
has_key = Some(field);
} else {
builder.append_all(quote! {#field});
}
}
builder.append_all(quote! {
.build()
});
let key_token = match has_key {
Some(field) => {
let inners = field.content.to_token_stream();
quote! {
Some(#inners)
}
}
None => quote! {None},
};
let _toks = tokens.append_all(quote! {
dioxus::builder::virtual_child(__ctx, #name, #builder, #key_token)
});
}
}
// the struct's fields info
pub struct ComponentField {
name: Ident,
content: Expr,
}
impl Parse for ComponentField {
fn parse(input: ParseStream) -> Result<Self> {
let name = Ident::parse_any(input)?;
input.parse::<Token![:]>()?;
let content = input.parse()?;
Ok(Self { name, content })
}
}
impl ToTokens for ComponentField {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let ComponentField { name, content, .. } = self;
tokens.append_all(quote! {
.#name(#content)
})
}
}

View file

@ -0,0 +1,223 @@
use super::*;
use proc_macro2::TokenStream as TokenStream2;
use quote::{quote, ToTokens, TokenStreamExt};
use syn::{
ext::IdentExt,
parse::{discouraged::Speculative, Parse, ParseBuffer, ParseStream},
token, Error, Expr, ExprClosure, Ident, LitStr, Result, Token,
};
// =======================================
// Parse the VNode::Element type
// =======================================
pub struct Element {
name: Ident,
attrs: Vec<ElementAttr>,
children: Vec<Node>,
}
impl Parse for Element {
fn parse(stream: ParseStream) -> Result<Self> {
//
let name = Ident::parse(stream)?;
if !crate::util::is_valid_tag(&name.to_string()) {
return Err(Error::new(name.span(), "Not a valid Html tag"));
}
// parse the guts
let content: ParseBuffer;
syn::braced!(content in stream);
let mut attrs: Vec<ElementAttr> = vec![];
let mut children: Vec<Node> = vec![];
'parsing: loop {
// [1] Break if empty
if content.is_empty() {
break 'parsing;
}
let forked = content.fork();
if forked.call(Ident::parse_any).is_ok()
&& forked.parse::<Token![:]>().is_ok()
&& forked.parse::<Token![:]>().is_err()
{
attrs.push(content.parse::<ElementAttr>()?);
} else {
children.push(content.parse::<Node>()?);
}
// consume comma if it exists
// we don't actually care if there *are* commas after elements/text
if content.peek(Token![,]) {
let _ = content.parse::<Token![,]>();
}
}
Ok(Self {
name,
attrs,
children,
})
}
}
impl ToTokens for Element {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let name = &self.name.to_string();
tokens.append_all(quote! {
dioxus::builder::ElementBuilder::new(__ctx, #name)
});
for attr in self.attrs.iter() {
attr.to_tokens(tokens);
}
let mut children = self.children.iter();
while let Some(child) = children.next() {
let inner_toks = child.to_token_stream();
tokens.append_all(quote! {
.iter_child(#inner_toks)
})
}
tokens.append_all(quote! {
.finish()
});
}
}
/// =======================================
/// Parse a VElement's Attributes
/// =======================================
struct ElementAttr {
name: Ident,
ty: AttrType,
}
enum AttrType {
BumpText(LitStr),
FieldTokens(Expr),
EventTokens(Expr),
Event(ExprClosure),
}
impl Parse for ElementAttr {
fn parse(s: ParseStream) -> Result<Self> {
let mut name = Ident::parse_any(s)?;
let name_str = name.to_string();
s.parse::<Token![:]>()?;
// Check if this is an event handler
// If so, parse into literal tokens
let ty = if name_str.starts_with("on") {
// remove the "on" bit
name = Ident::new(&name_str.trim_start_matches("on"), name.span());
if s.peek(token::Brace) {
let content;
syn::braced!(content in s);
// Try to parse directly as a closure
let fork = content.fork();
if let Ok(event) = fork.parse::<ExprClosure>() {
content.advance_to(&fork);
AttrType::Event(event)
} else {
AttrType::EventTokens(content.parse()?)
}
} else {
AttrType::Event(s.parse()?)
}
} else {
match name_str.as_str() {
"style" => {
//
todo!("inline style not yet supported")
}
"classes" => {
//
todo!("custom class lsit not supported")
}
"namespace" => {
//
todo!("custom namespace not supported")
}
"ref" => {
//
todo!("custom ref not supported")
}
_ => {
if s.peek(LitStr) {
let rawtext = s.parse::<LitStr>().unwrap();
AttrType::BumpText(rawtext)
} else {
let toks = s.parse::<Expr>()?;
AttrType::FieldTokens(toks)
}
}
}
// let lit_str = if name_str == "style" && s.peek(token::Brace) {
// // special-case to deal with literal styles.
// let outer;
// syn::braced!(outer in s);
// // double brace for inline style.
// // todo!("Style support not ready yet");
// // if outer.peek(token::Brace) {
// // let inner;
// // syn::braced!(inner in outer);
// // let styles: Styles = inner.parse()?;
// // MaybeExpr::Literal(LitStr::new(&styles.to_string(), Span::call_site()))
// // } else {
// // just parse as an expression
// outer.parse()?
// // }
// } else {
// s.parse()?
// };
};
// consume comma if it exists
// we don't actually care if there *are* commas between attrs
if s.peek(Token![,]) {
let _ = s.parse::<Token![,]>();
}
Ok(ElementAttr { name, ty })
}
}
impl ToTokens for ElementAttr {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let name = self.name.to_string();
let nameident = &self.name;
let _attr_stream = TokenStream2::new();
match &self.ty {
AttrType::BumpText(value) => {
tokens.append_all(quote! {
.attr(#name, format_args_f!(#value))
});
}
AttrType::Event(event) => {
tokens.append_all(quote! {
.add_listener(dioxus::events::on::#nameident(__ctx, #event))
});
}
AttrType::FieldTokens(exp) => {
tokens.append_all(quote! {
.attr(#name, #exp)
});
}
AttrType::EventTokens(event) => {
//
tokens.append_all(quote! {
.add_listener(dioxus::events::on::#nameident(__ctx, #event))
})
}
}
}
}

View file

@ -0,0 +1,33 @@
//! Parse `Fragments` into the Fragment VNode
//! ==========================================
//!
//! This parsing path emerges from [`AmbiguousElement`] which supports validation of the Fragment format.
//! We can be reasonably sure that whatever enters this parsing path is in the right format.
//! This feature must support:
//! - [x] Optional commas
//! - [ ] Children
//! - [ ] Keys
use {
proc_macro::TokenStream,
proc_macro2::{Span, TokenStream as TokenStream2},
quote::{quote, ToTokens, TokenStreamExt},
syn::{
ext::IdentExt,
parse::{Parse, ParseStream},
token, Error, Expr, ExprClosure, Ident, LitBool, LitStr, Path, Result, Token,
},
};
pub struct Fragment {}
impl Parse for Fragment {
fn parse(input: ParseStream) -> Result<Self> {
todo!()
}
}
impl ToTokens for Fragment {
fn to_tokens(&self, tokens: &mut TokenStream2) {
todo!()
}
}

View file

@ -0,0 +1,104 @@
//! Parse the root tokens in the rsx!{} macro
//! =========================================
//!
//! This parsing path emerges directly from the macro call, with `RsxRender` being the primary entrance into parsing.
//! This feature must support:
//! - [x] Optionally rendering if the `in XYZ` pattern is present
//! - [x] Fragments as top-level element (through ambiguous)
//! - [x] Components as top-level element (through ambiguous)
//! - [x] Tags as top-level elements (through ambiguous)
//! - [x] Good errors if parsing fails
//!
//! Any errors in using rsx! will likely occur when people start using it, so the first errors must be really helpful.
mod ambiguous;
mod component;
mod element;
mod fragment;
mod node;
// Re-export the namespaces into each other
pub use ambiguous::*;
pub use component::*;
pub use element::*;
pub use fragment::*;
pub use node::*;
use crate::util::is_valid_tag;
use proc_macro2::TokenStream as TokenStream2;
use quote::{quote, ToTokens};
use syn::{
parse::{Parse, ParseStream},
Error, Ident, LitStr, Result, Token,
};
pub struct RsxRender {
custom_context: Option<Ident>,
root: AmbiguousElement,
}
impl Parse for RsxRender {
fn parse(input: ParseStream) -> Result<Self> {
if input.peek(LitStr) {
return input.parse::<LitStr>()?.parse::<RsxRender>();
}
// try to parse the first ident and comma
let custom_context =
if input.peek(Token![in]) && input.peek2(Ident) && input.peek3(Token![,]) {
let _ = input.parse::<Token![in]>()?;
let name = input.parse::<Ident>()?;
if is_valid_tag(&name.to_string()) {
return Err(Error::new(
input.span(),
"Custom context cannot be an html element name",
));
} else {
input.parse::<Token![,]>().unwrap();
Some(name)
}
} else {
None
};
let root = { input.parse::<AmbiguousElement>() }?;
if !input.is_empty() {
return Err(Error::new(
input.span(),
"Currently only one element is allowed per component. Try wrapping your list of components in a `Fragment` tag",
));
}
Ok(Self {
root,
custom_context,
})
}
}
impl ToTokens for RsxRender {
fn to_tokens(&self, out_tokens: &mut TokenStream2) {
let inner = &self.root;
let output = match &self.custom_context {
// The `in ctx` pattern allows directly rendering
Some(ident) => {
quote! {
#ident.render(dioxus::prelude::LazyNodes::new(move |__ctx|{
let bump = &__ctx.bump();
#inner
}))
}
}
// Otherwise we just build the LazyNode wrapper
None => {
quote! {
dioxus::prelude::LazyNodes::new(move |__ctx|{
let bump = &__ctx.bump();
#inner
})
}
}
};
output.to_tokens(out_tokens)
}
}

View file

@ -0,0 +1,73 @@
use super::*;
use proc_macro2::TokenStream as TokenStream2;
use quote::{quote, ToTokens, TokenStreamExt};
use syn::{
parse::{Parse, ParseStream},
token, Expr, LitStr, Result,
};
// ==============================================
// Parse any div {} as a VElement
// ==============================================
pub enum Node {
Element(AmbiguousElement),
Text(TextNode),
RawExpr(Expr),
}
impl ToTokens for Node {
fn to_tokens(&self, tokens: &mut TokenStream2) {
match &self {
Node::Element(el) => el.to_tokens(tokens),
Node::Text(txt) => txt.to_tokens(tokens),
Node::RawExpr(exp) => exp.to_tokens(tokens),
}
}
}
impl Parse for Node {
fn parse(stream: ParseStream) -> Result<Self> {
// Supposedly this approach is discouraged due to inability to return proper errors
// TODO: Rework this to provide more informative errors
if stream.peek(token::Brace) {
let content;
syn::braced!(content in stream);
return Ok(Node::RawExpr(content.parse::<Expr>()?));
}
if stream.peek(LitStr) {
return Ok(Node::Text(stream.parse::<TextNode>()?));
}
Ok(Node::Element(stream.parse::<AmbiguousElement>()?))
}
}
// =======================================
// Parse just plain text
// =======================================
pub struct TextNode(LitStr);
impl Parse for TextNode {
fn parse(s: ParseStream) -> Result<Self> {
Ok(Self(s.parse()?))
}
}
impl ToTokens for TextNode {
fn to_tokens(&self, tokens: &mut TokenStream2) {
// todo: use heuristics to see if we can promote to &static str
let token_stream = &self.0.to_token_stream();
tokens.append_all(quote! {
{
// use bumpalo::core_alloc::fmt::Write;
// let mut s = bumpalo::collections::String::new_in(bump);
// s.write_fmt(format_args_f!(#token_stream)).unwrap();
dioxus::builder::text3(bump, format_args_f!(#token_stream))
// dioxus::builder::text2(s)
}
});
}
}

View file

@ -1,588 +0,0 @@
use syn::parse::{discouraged::Speculative, ParseBuffer};
use crate::util::is_valid_tag;
use {
proc_macro::TokenStream,
proc_macro2::{Span, TokenStream as TokenStream2},
quote::{quote, ToTokens, TokenStreamExt},
syn::{
ext::IdentExt,
parse::{Parse, ParseStream},
token, Error, Expr, ExprClosure, Ident, LitBool, LitStr, Path, Result, Token,
},
};
// ==============================================
// Parse any stream coming from the rsx! macro
// ==============================================
pub struct RsxRender {
custom_context: Option<Ident>,
root: AmbiguousElement,
}
impl Parse for RsxRender {
fn parse(input: ParseStream) -> Result<Self> {
if input.peek(LitStr) {
return input.parse::<LitStr>()?.parse::<RsxRender>();
}
// try to parse the first ident and comma
let custom_context =
if input.peek(Token![in]) && input.peek2(Ident) && input.peek3(Token![,]) {
let _ = input.parse::<Token![in]>()?;
let name = input.parse::<Ident>()?;
if is_valid_tag(&name.to_string()) {
return Err(Error::new(
input.span(),
"Custom context cannot be an html element name",
));
} else {
input.parse::<Token![,]>().unwrap();
Some(name)
}
} else {
None
};
let root = { input.parse::<AmbiguousElement>() }?;
if !input.is_empty() {
return Err(Error::new(
input.span(),
"Currently only one element is allowed per component",
));
}
Ok(Self {
root,
custom_context,
})
}
}
impl ToTokens for RsxRender {
fn to_tokens(&self, out_tokens: &mut TokenStream2) {
// create a lazy tree that accepts a bump allocator
// Currently disabled
let inner = &self.root;
let output = match &self.custom_context {
Some(ident) => {
//
quote! {
#ident.render(dioxus::prelude::LazyNodes::new(move |__ctx|{
let bump = &__ctx.bump();
#inner
}))
}
}
None => {
quote! {
dioxus::prelude::LazyNodes::new(move |__ctx|{
let bump = &__ctx.bump();
#inner
})
}
}
};
output.to_tokens(out_tokens)
}
}
enum AmbiguousElement {
Element(Element),
Component(Component),
}
impl Parse for AmbiguousElement {
fn parse(input: ParseStream) -> Result<Self> {
// Try to parse as an absolute path and immediately defer to the componetn
if input.peek(Token![::]) {
return input
.parse::<Component>()
.map(|c| AmbiguousElement::Component(c));
}
// If not an absolute path, then parse the ident and check if it's a valid tag
if let Ok(pat) = input.fork().parse::<syn::Path>() {
if pat.segments.len() > 1 {
return input
.parse::<Component>()
.map(|c| AmbiguousElement::Component(c));
}
}
if let Ok(name) = input.fork().parse::<Ident>() {
let name_str = name.to_string();
match is_valid_tag(&name_str) {
true => input
.parse::<Element>()
.map(|c| AmbiguousElement::Element(c)),
false => {
let first_char = name_str.chars().next().unwrap();
if first_char.is_ascii_uppercase() {
input
.parse::<Component>()
.map(|c| AmbiguousElement::Component(c))
} else {
let name = input.parse::<Ident>().unwrap();
Err(Error::new(
name.span(),
"Components must be uppercased, perhaps you mispelled a html tag",
))
}
}
}
} else {
if input.peek(LitStr) {
panic!("it's actually a litstr");
}
Err(Error::new(input.span(), "Not a valid Html tag"))
}
}
}
impl ToTokens for AmbiguousElement {
fn to_tokens(&self, tokens: &mut TokenStream2) {
match self {
AmbiguousElement::Element(el) => el.to_tokens(tokens),
AmbiguousElement::Component(comp) => comp.to_tokens(tokens),
}
}
}
// ==============================================
// Parse any div {} as a VElement
// ==============================================
enum Node {
Element(AmbiguousElement),
Text(TextNode),
RawExpr(Expr),
}
impl ToTokens for &Node {
fn to_tokens(&self, tokens: &mut TokenStream2) {
match &self {
Node::Element(el) => el.to_tokens(tokens),
Node::Text(txt) => txt.to_tokens(tokens),
Node::RawExpr(exp) => exp.to_tokens(tokens),
}
}
}
impl Parse for Node {
fn parse(stream: ParseStream) -> Result<Self> {
// Supposedly this approach is discouraged due to inability to return proper errors
// TODO: Rework this to provide more informative errors
if stream.peek(token::Brace) {
let content;
syn::braced!(content in stream);
return Ok(Node::RawExpr(content.parse::<Expr>()?));
}
if stream.peek(LitStr) {
return Ok(Node::Text(stream.parse::<TextNode>()?));
}
Ok(Node::Element(stream.parse::<AmbiguousElement>()?))
}
}
struct Component {
// accept any path-like argument
name: syn::Path,
body: Vec<ComponentField>,
children: Vec<Node>,
}
impl Parse for Component {
fn parse(s: ParseStream) -> Result<Self> {
// let name = s.parse::<syn::ExprPath>()?;
// todo: look into somehow getting the crate/super/etc
let name = syn::Path::parse_mod_style(s)?;
// parse the guts
let content: ParseBuffer;
syn::braced!(content in s);
let mut body: Vec<ComponentField> = Vec::new();
let _children: Vec<Node> = Vec::new();
'parsing: loop {
// [1] Break if empty
if content.is_empty() {
break 'parsing;
}
if content.peek(token::Brace) {
let inner: ParseBuffer;
syn::braced!(inner in content);
if inner.peek(Token![...]) {
todo!("Inline props not yet supported");
}
}
body.push(content.parse::<ComponentField>()?);
// consume comma if it exists
// we don't actually care if there *are* commas between attrs
if content.peek(Token![,]) {
let _ = content.parse::<Token![,]>();
}
}
// todo: add support for children
let children: Vec<Node> = vec![];
Ok(Self {
name,
body,
children,
})
}
}
impl ToTokens for &Component {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let name = &self.name;
let mut builder = quote! {
fc_to_builder(#name)
};
let mut has_key = None;
for field in &self.body {
if field.name.to_string() == "key" {
has_key = Some(field);
} else {
builder.append_all(quote! {#field});
}
}
builder.append_all(quote! {
.build()
});
let key_token = match has_key {
Some(field) => {
let inners = field.content.to_token_stream();
quote! {
Some(#inners)
}
}
None => quote! {None},
};
let _toks = tokens.append_all(quote! {
dioxus::builder::virtual_child(__ctx, #name, #builder, #key_token)
});
}
}
// the struct's fields info
pub struct ComponentField {
name: Ident,
content: Expr,
}
impl Parse for ComponentField {
fn parse(input: ParseStream) -> Result<Self> {
let name = Ident::parse_any(input)?;
input.parse::<Token![:]>()?;
let content = input.parse()?;
Ok(Self { name, content })
}
}
impl ToTokens for &ComponentField {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let ComponentField { name, content, .. } = self;
tokens.append_all(quote! {
.#name(#content)
})
}
}
// =======================================
// Parse the VNode::Element type
// =======================================
struct Element {
name: Ident,
attrs: Vec<ElementAttr>,
children: Vec<Node>,
}
impl Parse for Element {
fn parse(stream: ParseStream) -> Result<Self> {
//
let name = Ident::parse(stream)?;
if !crate::util::is_valid_tag(&name.to_string()) {
return Err(Error::new(name.span(), "Not a valid Html tag"));
}
// parse the guts
let content: ParseBuffer;
syn::braced!(content in stream);
let mut attrs: Vec<ElementAttr> = vec![];
let mut children: Vec<Node> = vec![];
'parsing: loop {
// [1] Break if empty
if content.is_empty() {
break 'parsing;
}
let forked = content.fork();
if forked.call(Ident::parse_any).is_ok()
&& forked.parse::<Token![:]>().is_ok()
&& forked.parse::<Token![:]>().is_err()
{
attrs.push(content.parse::<ElementAttr>()?);
} else {
children.push(content.parse::<Node>()?);
}
// consume comma if it exists
// we don't actually care if there *are* commas after elements/text
if content.peek(Token![,]) {
let _ = content.parse::<Token![,]>();
}
}
Ok(Self {
name,
attrs,
children,
})
}
}
impl ToTokens for &Element {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let name = &self.name.to_string();
tokens.append_all(quote! {
dioxus::builder::ElementBuilder::new(__ctx, #name)
});
for attr in self.attrs.iter() {
attr.to_tokens(tokens);
}
let mut children = self.children.iter();
while let Some(child) = children.next() {
let inner_toks = child.to_token_stream();
tokens.append_all(quote! {
.iter_child(#inner_toks)
})
}
tokens.append_all(quote! {
.finish()
});
}
}
/// =======================================
/// Parse a VElement's Attributes
/// =======================================
struct ElementAttr {
name: Ident,
ty: AttrType,
}
enum AttrType {
BumpText(LitStr),
FieldTokens(Expr),
EventTokens(Expr),
Event(ExprClosure),
}
impl Parse for ElementAttr {
fn parse(s: ParseStream) -> Result<Self> {
let mut name = Ident::parse_any(s)?;
let name_str = name.to_string();
s.parse::<Token![:]>()?;
// Check if this is an event handler
// If so, parse into literal tokens
let ty = if name_str.starts_with("on") {
// remove the "on" bit
name = Ident::new(&name_str.trim_start_matches("on"), name.span());
if s.peek(token::Brace) {
let content;
syn::braced!(content in s);
// Try to parse directly as a closure
let fork = content.fork();
if let Ok(event) = fork.parse::<ExprClosure>() {
content.advance_to(&fork);
AttrType::Event(event)
} else {
AttrType::EventTokens(content.parse()?)
}
} else {
AttrType::Event(s.parse()?)
}
} else {
match name_str.as_str() {
"style" => {
//
todo!("inline style not yet supported")
}
"classes" => {
//
todo!("custom class lsit not supported")
}
"namespace" => {
//
todo!("custom namespace not supported")
}
"ref" => {
//
todo!("custom ref not supported")
}
_ => {
if s.peek(LitStr) {
let rawtext = s.parse::<LitStr>().unwrap();
AttrType::BumpText(rawtext)
} else {
let toks = s.parse::<Expr>()?;
AttrType::FieldTokens(toks)
}
}
}
// let lit_str = if name_str == "style" && s.peek(token::Brace) {
// // special-case to deal with literal styles.
// let outer;
// syn::braced!(outer in s);
// // double brace for inline style.
// // todo!("Style support not ready yet");
// // if outer.peek(token::Brace) {
// // let inner;
// // syn::braced!(inner in outer);
// // let styles: Styles = inner.parse()?;
// // MaybeExpr::Literal(LitStr::new(&styles.to_string(), Span::call_site()))
// // } else {
// // just parse as an expression
// outer.parse()?
// // }
// } else {
// s.parse()?
// };
};
// consume comma if it exists
// we don't actually care if there *are* commas between attrs
if s.peek(Token![,]) {
let _ = s.parse::<Token![,]>();
}
Ok(ElementAttr { name, ty })
}
}
impl ToTokens for &ElementAttr {
fn to_tokens(&self, tokens: &mut TokenStream2) {
let name = self.name.to_string();
let nameident = &self.name;
let _attr_stream = TokenStream2::new();
match &self.ty {
AttrType::BumpText(value) => {
tokens.append_all(quote! {
.attr(#name, format_args_f!(#value))
});
}
AttrType::Event(event) => {
tokens.append_all(quote! {
.add_listener(dioxus::events::on::#nameident(__ctx, #event))
});
}
AttrType::FieldTokens(exp) => {
tokens.append_all(quote! {
.attr(#name, #exp)
});
}
AttrType::EventTokens(event) => {
//
tokens.append_all(quote! {
.add_listener(dioxus::events::on::#nameident(__ctx, #event))
})
}
}
}
}
// =======================================
// Parse just plain text
// =======================================
struct TextNode(LitStr);
impl Parse for TextNode {
fn parse(s: ParseStream) -> Result<Self> {
Ok(Self(s.parse()?))
}
}
impl ToTokens for TextNode {
fn to_tokens(&self, tokens: &mut TokenStream2) {
// todo: use heuristics to see if we can promote to &static str
let token_stream = &self.0.to_token_stream();
tokens.append_all(quote! {
{
// use bumpalo::core_alloc::fmt::Write;
// let mut s = bumpalo::collections::String::new_in(bump);
// s.write_fmt(format_args_f!(#token_stream)).unwrap();
dioxus::builder::text3(bump, format_args_f!(#token_stream))
// dioxus::builder::text2(s)
}
});
}
}
fn try_parse_bracketed(stream: &ParseBuffer) -> Result<Expr> {
let content;
syn::braced!(content in stream);
content.parse()
}
// // Used to uniquely identify elements that contain closures so that the DomUpdater can
// // look them up by their unique id.
// // When the DomUpdater sees that the element no longer exists it will drop all of it's
// // Rc'd Closures for those events.
// // It doesn't quite make sense to keep this here, perhaps just in the html crate?
// // Dioxus itself shouldn't be concerned with the attribute names
// // a ftk!
// static SELF_CLOSING_TAGS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
// [
// "area", "base", "br", "col", "hr", "img", "input", "link", "meta", "param", "command",
// "keygen", "source",
// ]
// .iter()
// .cloned()
// .collect()
// });
// /// Whether or not this tag is self closing
// ///
// /// ```ignore
// /// use dioxus_core::validation::is_self_closing;
// /// assert_eq!(is_self_closing("br"), true);
// /// assert_eq!(is_self_closing("div"), false);
// /// ```
// pub fn is_self_closing(tag: &str) -> bool {
// SELF_CLOSING_TAGS.contains(tag)
// // SELF_CLOSING_TAGS.contains(tag) || is_self_closing_svg_tag(tag)
// }

View file

@ -1,4 +1,4 @@
use crate::{rsxt::RsxRender, util::is_valid_svg_tag};
use crate::{rsx::RsxRender, util::is_valid_svg_tag};
use {
proc_macro::TokenStream,
@ -35,7 +35,7 @@ impl Parse for RsxTemplate {
let lit = LitStr::new(&value, lit.span());
// panic!("{:#?}", lit);
match lit.parse::<crate::rsxt::RsxRender>() {
match lit.parse::<crate::rsx::RsxRender>() {
Ok(r) => Ok(Self { inner: r }),
Err(e) => Err(e),
}

View file

@ -1,6 +1,13 @@
// use lazy_static::lazy_static;
use once_cell::sync::Lazy;
use std::collections::hash_set::HashSet;
use syn::{parse::ParseBuffer, Expr};
pub fn try_parse_bracketed(stream: &ParseBuffer) -> syn::Result<Expr> {
let content;
syn::braced!(content in stream);
content.parse()
}
/// rsx! and html! macros support the html namespace as well as svg namespace
static HTML_TAGS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
@ -134,6 +141,18 @@ static SVG_TAGS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
.collect()
});
// these tags are reserved by dioxus for any reason
// They might not all be used
static RESERVED_TAGS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
[
// a fragment
"fragment",
]
.iter()
.cloned()
.collect()
});
/// Whether or not this tag is valid
///
/// ```
@ -144,7 +163,7 @@ static SVG_TAGS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
/// assert_eq!(is_valid_tag("random"), false);
/// ```
pub fn is_valid_tag(tag: &str) -> bool {
is_valid_html_tag(tag) || is_valid_svg_tag(tag)
is_valid_html_tag(tag) || is_valid_svg_tag(tag) || is_valid_reserved_tag(tag)
}
pub fn is_valid_html_tag(tag: &str) -> bool {
@ -154,3 +173,7 @@ pub fn is_valid_html_tag(tag: &str) -> bool {
pub fn is_valid_svg_tag(tag: &str) -> bool {
SVG_TAGS.contains(tag)
}
pub fn is_valid_reserved_tag(tag: &str) -> bool {
RESERVED_TAGS.contains(tag)
}

View file

@ -0,0 +1,17 @@
use dioxus_core::prelude::*;
fn main() {
let g = rsx! {
Fragment {
// div {}
// div {}
// div {}
// div {}
// div {}
// div {}
// div {}
// div {}
// div {}
}
};
}

View file

@ -9,7 +9,7 @@ use crate::innerlude::FC;
pub type ScopeIdx = generational_arena::Index;
pub trait Properties: PartialEq + 'static {
pub trait Properties: PartialEq {
type Builder;
fn builder() -> Self::Builder;
}

View file

@ -194,6 +194,12 @@ impl<'a> DiffMachine<'a> {
(VNode::Suspended, _) | (_, VNode::Suspended) => {
todo!("Suspended components not currently available")
}
(VNode::Fragment(_), VNode::Fragment(_)) => {
todo!("Fragments not currently supported in diffing")
}
(_, VNode::Fragment(_)) => todo!("Fragments not currently supported in diffing"),
(VNode::Fragment(_), _) => todo!("Fragments not currently supported in diffing"),
}
}
@ -276,6 +282,10 @@ impl<'a> DiffMachine<'a> {
VNode::Suspended => {
todo!("Creation of VNode::Suspended not yet supported")
}
VNode::Fragment(frag) => {
//
todo!("Cannot current create fragments")
}
}
}

View file

@ -20,12 +20,6 @@ mod use_state_def {
rc::Rc,
};
struct UseState<T: 'static> {
new_val: Rc<RefCell<Option<T>>>,
current_val: T,
caller: Rc<dyn Fn(T) + 'static>,
}
/// Store state between component renders!
/// When called, this hook retrives a stored value and provides a setter to update that value.
/// When the setter is called, the component is re-ran with the new value.
@ -48,10 +42,16 @@ mod use_state_def {
/// }
/// }
/// ```
pub fn use_state<'a, 'c, T: 'static, F: FnOnce() -> T, P: 'static>(
ctx: &'c Context<'a, P>,
pub fn use_state<'a, 'c, T: 'static, F: FnOnce() -> T, P: 'a>(
ctx: Context<'a, P>,
initial_state_fn: F,
) -> (&'a T, &'a Rc<dyn Fn(T)>) {
struct UseState<T: 'static> {
new_val: Rc<RefCell<Option<T>>>,
current_val: T,
caller: Rc<dyn Fn(T) + 'static>,
}
ctx.use_hook(
move || UseState {
new_val: Rc::new(RefCell::new(None)),
@ -170,7 +170,7 @@ mod new_use_state_def {
/// }
/// ```
pub fn use_state_new<'a, 'c, T: 'static, F: FnOnce() -> T, P: 'static>(
ctx: &'c Context<'a, P>,
ctx: Context<'a, P>,
initial_state_fn: F,
) -> &'a UseState<T> {
ctx.use_hook(
@ -271,7 +271,7 @@ mod use_reducer_def {
/// This is behaves almost exactly the same way as React's "use_state".
///
pub fn use_reducer<'a, 'c, State: 'static, Action: 'static, P: 'static>(
ctx: &'c Context<'a, P>,
ctx: Context<'a, P>,
initial_state_fn: impl FnOnce() -> State,
_reducer: impl Fn(&mut State, Action),
) -> (&'a State, &'a Box<dyn Fn(Action)>) {

View file

@ -580,6 +580,7 @@ where
std::iter::once(self)
}
}
impl<'a> IntoVNode<'a> for () {
fn into_vnode(self, ctx: &NodeCtx<'a>) -> VNode<'a> {
VNode::Suspended

View file

@ -8,7 +8,13 @@ use crate::{
innerlude::{Context, Properties, Scope, ScopeIdx, FC},
};
use bumpalo::Bump;
use std::{any::Any, cell::RefCell, fmt::Debug, marker::PhantomData, rc::Rc};
use std::{
any::Any,
cell::RefCell,
fmt::{Arguments, Debug},
marker::PhantomData,
rc::Rc,
};
/// Tools for the base unit of the virtual dom - the VNode
/// VNodes are intended to be quickly-allocated, lightweight enum values.
@ -22,6 +28,8 @@ pub enum VNode<'src> {
/// A text node (node type `TEXT_NODE`).
Text(VText<'src>),
Fragment(&'src VFragment<'src>),
/// A "suspended component"
/// This is a masqeurade over an underlying future that needs to complete
/// When the future is completed, the VNode will then trigger a render
@ -37,6 +45,7 @@ impl<'a> Clone for VNode<'a> {
match self {
VNode::Element(el) => VNode::Element(el),
VNode::Text(origi) => VNode::Text(VText { text: origi.text }),
VNode::Fragment(frag) => VNode::Fragment(frag),
VNode::Suspended => VNode::Suspended,
VNode::Component(c) => VNode::Component(c),
}
@ -77,6 +86,10 @@ impl<'a> VNode<'a> {
VNode::Text(VText { text })
}
pub fn text_args(b: &'a Bump, f: Arguments) -> VNode<'a> {
todo!()
}
#[inline]
pub(crate) fn key(&self) -> NodeKey {
match &self {
@ -85,9 +98,7 @@ impl<'a> VNode<'a> {
VNode::Suspended => {
todo!()
}
// Self::PhantomChild { id } => {
// todo!()
// }
VNode::Fragment(frag) => frag.key,
VNode::Component(c) => c.key,
}
}
@ -315,3 +326,8 @@ impl<'a> VComponent<'a> {
}
}
}
pub struct VFragment<'src> {
pub key: NodeKey<'src>,
pub children: &'src [VNode<'src>],
}

View file

@ -725,8 +725,10 @@ impl<'src, T> Context<'src, T> {
}
/// Create a subscription that schedules a future render for the reference component
pub fn schedule_update(&self) -> impl Fn() -> () + 'static {
self.scope.event_queue.schedule_update(&self.scope)
pub fn schedule_update(&self) -> Rc<dyn Fn() + 'static> {
todo!()
// pub fn schedule_update(self) -> impl Fn() + 'static {
// self.scope.event_queue.schedule_update(&self.scope)
}
// /// Create a suspended component from a future.
@ -765,6 +767,7 @@ impl<'src, T> Context<'src, T> {
self,
// &'a/ self,
lazy_nodes: LazyNodes<'src, F>,
// lazy_nodes: LazyNodes<'src, F>,
) -> VNode<'src> {
let ctx = NodeCtx {
scope_ref: self.scope,
@ -961,6 +964,10 @@ impl<'src, P> Context<'src, P> {
}
}
pub trait OpaqueScope {
fn get_scope(&self) -> &Scope;
}
// ==================================================================================
// Supporting structs for the above abstractions
// ==================================================================================

View file

@ -0,0 +1,3 @@
{
"rust-analyzer.inlayHints.enable": false
}

View file

@ -8,3 +8,6 @@ edition = "2018"
[dependencies]
dioxus-ssr = { path = "../ssr" }
pulldown-cmark = "0.8.0"
recoil = { path = "../recoil" }
codeblocks = { path = "../../../ecosystem-dioxus/syntec-dioxus/" }

View file

@ -1,13 +1,97 @@
#![allow(non_upper_case_globals)]
use dioxus_ssr::{
prelude::*,
prelude::{builder::IntoVNode, dioxus::events::on::MouseEvent},
TextRenderer,
};
mod utils;
fn main() {
TextRenderer::new(App);
let renderer = TextRenderer::new(App);
}
fn App(ctx: Context<()>) -> VNode {
todo!()
}
static App: FC<()> = |ctx| {
rsx! { in ctx,
div {
Home {}
Docs {}
Tutorial {}
Blog {}
Community {}
}
}
};
const HeroContent: [(&'static str, &'static str); 3] = [
("Declarative",
"React makes it painless to create interactive UIs. Design simple views for each state in your application, and React will efficiently update and render just the right components when your data changes.\nDeclarative views make your code more predictable and easier to debug."),
("Component-Based", "Build encapsulated components that manage their own state, then compose them to make complex UIs.\nSince component logic is written in JavaScript instead of templates, you can easily pass rich data through your app and keep state out of the DOM."),
("Learn Once, Write Anywhere", "We dont make assumptions about the rest of your technology stack, so you can develop new features in React without rewriting existing code.\nReact can also render on the server using Node and power mobile apps using React Native."),
];
const SnippetHighlights: &'static str = include_str!("./snippets.md");
static Home: FC<()> = |ctx| {
let hero = HeroContent.iter().map(|(title, body)| {
rsx! {
div {
h3 { "{title}" }
div { {body.split("\n").map(|paragraph| rsx!( p{"{paragraph}"} ))} }
}
}
});
let snippets: Vec<VNode> = utils::markdown_to_snippet(ctx, SnippetHighlights);
rsx! { in ctx,
div {
header {
// Hero
section {
div { {hero} }
}
hr {}
// Highlighted Snippets
section {
{snippets}
}
}
div {}
section {}
}
}
};
static Docs: FC<()> = |ctx| {
rsx! { in ctx,
div {
}
}
};
static Tutorial: FC<()> = |ctx| {
rsx! { in ctx,
div {
}
}
};
static Blog: FC<()> = |ctx| {
rsx! { in ctx,
div {
}
}
};
static Community: FC<()> = |ctx| {
rsx! { in ctx,
div {
}
}
};

View file

@ -11,7 +11,7 @@ static HelloMessage: FC<Props> = |ctx| {
}
```
# Any syntax you like
# Two syntaxes: html! and rsx!
Choose from a close-to-html syntax or the standard rsx! syntax
@ -43,7 +43,7 @@ static HelloMessage: FC<()> = |ctx| {
};
ctx.render(rsx!{
h1 {"{title}"}
h1 { "{title}" }
button { "tick"
onclick: move |_| set_color(match color {
Green => Yellow,

View file

@ -0,0 +1,31 @@
use dioxus_ssr::prelude::{Context, VNode};
// Parse a snippet into
pub fn markdown_to_snippet<'a, P>(ctx: Context<'a, P>, text: &str) -> Vec<VNode<'a>> {
let snips = Vec::new();
use pulldown_cmark::{Options, Parser};
let mut options = Options::empty();
let mut parser = Parser::new_ext(text, options);
while let Some(evt) = parser.next() {
match evt {
pulldown_cmark::Event::Start(tag) => {
// take until the end
let r = parser.next();
}
// push a p{} tag with the contents
pulldown_cmark::Event::Text(text) => todo!(),
// Code delinates an end
pulldown_cmark::Event::Code(code) => todo!(),
// not supported
pulldown_cmark::Event::Html(ht) => {}
_ => {}
}
}
//
snips
}

View file

@ -428,7 +428,7 @@ mod hooks {
let update = ctx.schedule_update();
let val = api.try_get_raw(readable).unwrap();
let id = api.subscribe(readable, Rc::new(update));
let id = api.subscribe(readable, update);
ReadHook {
value: val,
consumer_id: id,