feat: RSX parser with recovery after errors, and unquoted text (#1054)

* Feat: Upgrade to new local version of syn-rsx

* chore: Make macro more IDE friendly

1. Add quotation to RawText node.
2. Replace vec! macro with [].to_vec().
Cons:
1. Temporary remove allow(unused_braces) from expressions, to allow completion after dot in rust-analyzer.

* chore: Change dependency from syn-rsx to rstml

* chore: Fix value_to_string usage, pr comments, and fmt.
This commit is contained in:
Vladimir Motylenko 2023-05-21 13:45:53 +03:00 committed by GitHub
parent 70eb07d7d6
commit 5a71ca797a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 346 additions and 291 deletions

View file

@ -11,7 +11,7 @@ readme = "../README.md"
[dependencies] [dependencies]
anyhow = "1" anyhow = "1"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
syn = { version = "1", features = [ syn = { version = "2", features = [
"full", "full",
"parsing", "parsing",
"extra-traits", "extra-traits",
@ -19,7 +19,7 @@ syn = { version = "1", features = [
"printing", "printing",
] } ] }
quote = "1" quote = "1"
syn-rsx = "0.9" rstml = "0.10.6"
proc-macro2 = { version = "1", features = ["span-locations", "nightly"] } proc-macro2 = { version = "1", features = ["span-locations", "nightly"] }
parking_lot = "0.12" parking_lot = "0.12"
walkdir = "2" walkdir = "2"

View file

@ -76,7 +76,7 @@ impl ViewMacros {
tokens.next(); // , tokens.next(); // ,
// TODO handle class = ... // TODO handle class = ...
let rsx = let rsx =
syn_rsx::parse2(tokens.collect::<proc_macro2::TokenStream>())?; rstml::parse2(tokens.collect::<proc_macro2::TokenStream>())?;
let template = LNode::parse_view(rsx)?; let template = LNode::parse_view(rsx)?;
views.push(MacroInvocation { id, template }) views.push(MacroInvocation { id, template })
} }

View file

@ -1,8 +1,8 @@
use crate::parsing::{is_component_node, value_to_string}; use crate::parsing::is_component_node;
use anyhow::Result; use anyhow::Result;
use quote::quote; use quote::ToTokens;
use rstml::node::{Node, NodeAttribute};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use syn_rsx::Node;
// A lightweight virtual DOM structure we can use to hold // A lightweight virtual DOM structure we can use to hold
// the state of a Leptos view macro template. This is because // the state of a Leptos view macro template. This is because
@ -58,36 +58,30 @@ impl LNode {
} }
} }
Node::Text(text) => { Node::Text(text) => {
if let Some(value) = value_to_string(&text.value) { views.push(LNode::Text(text.value_string()));
views.push(LNode::Text(value));
} else {
let value = text.value.as_ref();
let code = quote! { #value };
let code = code.to_string();
views.push(LNode::DynChild(code));
}
} }
Node::Block(block) => { Node::Block(block) => {
let value = block.value.as_ref(); let code = block.into_token_stream();
let code = quote! { #value };
let code = code.to_string(); let code = code.to_string();
views.push(LNode::DynChild(code)); views.push(LNode::DynChild(code));
} }
Node::Element(el) => { Node::Element(el) => {
if is_component_node(&el) { if is_component_node(&el) {
let name = el.name().to_string();
let mut children = Vec::new(); let mut children = Vec::new();
for child in el.children { for child in el.children {
LNode::parse_node(child, &mut children)?; LNode::parse_node(child, &mut children)?;
} }
views.push(LNode::Component { views.push(LNode::Component {
name: el.name.to_string(), name: name,
props: el props: el
.open_tag
.attributes .attributes
.into_iter() .into_iter()
.filter_map(|attr| match attr { .filter_map(|attr| match attr {
Node::Attribute(attr) => Some(( NodeAttribute::Attribute(attr) => Some((
attr.key.to_string(), attr.key.to_string(),
format!("{:#?}", attr.value), format!("{:#?}", attr.value()),
)), )),
_ => None, _ => None,
}) })
@ -95,15 +89,13 @@ impl LNode {
children, children,
}); });
} else { } else {
let name = el.name.to_string(); let name = el.name().to_string();
let mut attrs = Vec::new(); let mut attrs = Vec::new();
for attr in el.attributes { for attr in el.open_tag.attributes {
if let Node::Attribute(attr) = attr { if let NodeAttribute::Attribute(attr) = attr {
let name = attr.key.to_string(); let name = attr.key.to_string();
if let Some(value) = if let Some(value) = attr.value_literal_string() {
attr.value.as_ref().and_then(value_to_string)
{
attrs.push(( attrs.push((
name, name,
LAttributeValue::Static(value), LAttributeValue::Static(value),

View file

@ -1,7 +1,37 @@
use syn_rsx::{NodeElement, NodeValueExpr}; use rstml::node::NodeElement;
pub fn value_to_string(value: &NodeValueExpr) -> Option<String> { ///
match &value.as_ref() { /// Converts `syn::Block` to simple expression
///
/// For example:
/// ```no_build
/// // "string literal" in
/// {"string literal"}
/// // number literal
/// {0x12}
/// // boolean literal
/// {true}
/// // variable
/// {path::x}
/// ```
pub fn block_to_primitive_expression(block: &syn::Block) -> Option<&syn::Expr> {
// its empty block, or block with multi lines
if block.stmts.len() != 1 {
return None;
}
match &block.stmts[0] {
syn::Stmt::Expr(e, None) => return Some(&e),
_ => {}
}
None
}
/// Converts simple literals to its string representation.
///
/// This function doesn't convert literal wrapped inside block
/// like: `{"string"}`.
pub fn value_to_string(value: &syn::Expr) -> Option<String> {
match &value {
syn::Expr::Lit(lit) => match &lit.lit { syn::Expr::Lit(lit) => match &lit.lit {
syn::Lit::Str(s) => Some(s.value()), syn::Lit::Str(s) => Some(s.value()),
syn::Lit::Char(c) => Some(c.value().to_string()), syn::Lit::Char(c) => Some(c.value().to_string()),
@ -14,7 +44,7 @@ pub fn value_to_string(value: &NodeValueExpr) -> Option<String> {
} }
pub fn is_component_node(node: &NodeElement) -> bool { pub fn is_component_node(node: &NodeElement) -> bool {
node.name node.name()
.to_string() .to_string()
.starts_with(|c: char| c.is_ascii_uppercase()) .starts_with(|c: char| c.is_ascii_uppercase())
} }

View file

@ -12,16 +12,16 @@ readme = "../README.md"
proc-macro = true proc-macro = true
[dependencies] [dependencies]
attribute-derive = { version = "0.5", features = ["syn-full"] } attribute-derive = { version = "0.6", features = ["syn-full"] }
cfg-if = "1" cfg-if = "1"
html-escape = "0.2" html-escape = "0.2"
itertools = "0.10" itertools = "0.10"
prettyplease = "0.1" prettyplease = "0.2.4"
proc-macro-error = "1" proc-macro-error = "1"
proc-macro2 = "1" proc-macro2 = "1"
quote = "1" quote = "1"
syn = { version = "1", features = ["full"] } syn = { version = "2", features = ["full"] }
syn-rsx = "0.9" rstml = "0.10.6"
leptos_hot_reload = { workspace = true } leptos_hot_reload = { workspace = true }
server_fn_macro = { workspace = true } server_fn_macro = { workspace = true }
convert_case = "0.6.0" convert_case = "0.6.0"

View file

@ -4,15 +4,15 @@ use convert_case::{
Casing, Casing,
}; };
use itertools::Itertools; use itertools::Itertools;
use leptos_hot_reload::parsing::value_to_string;
use proc_macro2::{Ident, Span, TokenStream}; use proc_macro2::{Ident, Span, TokenStream};
use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt}; use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt};
use syn::{ use syn::{
parse::Parse, parse_quote, spanned::Spanned, parse::Parse, parse_quote, spanned::Spanned,
AngleBracketedGenericArguments, Attribute, FnArg, GenericArgument, Item, AngleBracketedGenericArguments, Attribute, FnArg, GenericArgument, Item,
ItemFn, Lit, LitStr, Meta, MetaNameValue, Pat, PatIdent, Path, ItemFn, LitStr, Meta, Pat, PatIdent, Path, PathArguments, ReturnType, Stmt,
PathArguments, ReturnType, Stmt, Type, TypePath, Visibility, Type, TypePath, Visibility,
}; };
pub struct Model { pub struct Model {
is_transparent: bool, is_transparent: bool,
docs: Docs, docs: Docs,
@ -56,14 +56,17 @@ impl Parse for Model {
// We need to remove the `#[doc = ""]` and `#[builder(_)]` // We need to remove the `#[doc = ""]` and `#[builder(_)]`
// attrs from the function signature // attrs from the function signature
drain_filter(&mut item.attrs, |attr| { drain_filter(&mut item.attrs, |attr| match &attr.meta {
attr.path == parse_quote!(doc) || attr.path == parse_quote!(prop) Meta::NameValue(attr) => attr.path == parse_quote!(doc),
Meta::List(attr) => attr.path == parse_quote!(prop),
_ => false,
}); });
item.sig.inputs.iter_mut().for_each(|arg| { item.sig.inputs.iter_mut().for_each(|arg| {
if let FnArg::Typed(ty) = arg { if let FnArg::Typed(ty) = arg {
drain_filter(&mut ty.attrs, |attr| { drain_filter(&mut ty.attrs, |attr| match &attr.meta {
attr.path == parse_quote!(doc) Meta::NameValue(attr) => attr.path == parse_quote!(doc),
|| attr.path == parse_quote!(prop) Meta::List(attr) => attr.path == parse_quote!(prop),
_ => false,
}); });
} }
}); });
@ -400,12 +403,20 @@ impl Docs {
let mut attrs = attrs let mut attrs = attrs
.iter() .iter()
.filter_map(|attr| attr.path.is_ident("doc").then(|| { .filter_map(|attr| {
let Ok(Meta::NameValue(MetaNameValue { lit: Lit::Str(doc), .. })) = attr.parse_meta() else { let Meta::NameValue(attr ) = &attr.meta else {
abort!(attr, "expected doc comment to be string literal"); return None
}; };
(doc.value(), doc.span()) if !attr.path.is_ident("doc") {
})) return None
}
let Some(val) = value_to_string(&attr.value) else {
abort!(attr, "expected string literal in value of doc comment");
};
Some((val, attr.path.span()))
})
.flat_map(map) .flat_map(map)
.collect_vec(); .collect_vec();

View file

@ -7,9 +7,9 @@ extern crate proc_macro_error;
use proc_macro::TokenStream; use proc_macro::TokenStream;
use proc_macro2::{Span, TokenTree}; use proc_macro2::{Span, TokenTree};
use quote::ToTokens; use quote::ToTokens;
use rstml::{node::KeyedAttribute, parse};
use server_fn_macro::{server_macro_impl, ServerContext}; use server_fn_macro::{server_macro_impl, ServerContext};
use syn::parse_macro_input; use syn::parse_macro_input;
use syn_rsx::{parse, NodeAttribute};
#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum Mode { pub(crate) enum Mode {
@ -351,16 +351,22 @@ pub fn view(tokens: TokenStream) -> TokenStream {
.chain(tokens) .chain(tokens)
.collect() .collect()
}; };
let config = rstml::ParserConfig::default().recover_block(true);
match parse(tokens.into()) { let parser = rstml::Parser::new(config);
Ok(nodes) => render_view( let (nodes, errors) = parser.parse_recoverable(tokens).split_vec();
&proc_macro2::Ident::new(&cx.to_string(), cx.span()), let errors = errors.into_iter().map(|e| e.emit_as_expr_tokens());
&nodes, let nodes_output = render_view(
Mode::default(), &cx,
global_class.as_ref(), &nodes,
normalized_call_site(proc_macro::Span::call_site()), Mode::default(),
), global_class.as_ref(),
Err(error) => error.to_compile_error(), normalized_call_site(proc_macro::Span::call_site()),
);
quote! {
{
#(#errors;)*
#nodes_output
}
} }
.into() .into()
} }
@ -874,9 +880,9 @@ pub fn params_derive(
} }
} }
pub(crate) fn attribute_value(attr: &NodeAttribute) -> &syn::Expr { pub(crate) fn attribute_value(attr: &KeyedAttribute) -> &syn::Expr {
match &attr.value { match &attr.possible_value {
Some(value) => value.as_ref(), Some(value) => &value.value,
None => abort!(attr.key, "attribute should have value"), None => abort!(attr.key, "attribute should have value"),
} }
} }

View file

@ -5,7 +5,8 @@ use attribute_derive::Attribute as AttributeDerive;
use proc_macro2::{Ident, TokenStream}; use proc_macro2::{Ident, TokenStream};
use quote::{ToTokens, TokenStreamExt}; use quote::{ToTokens, TokenStreamExt};
use syn::{ use syn::{
parse::Parse, parse_quote, Field, ItemStruct, LitStr, Type, Visibility, parse::Parse, parse_quote, Field, ItemStruct, LitStr, Meta, Type,
Visibility,
}; };
pub struct Model { pub struct Model {
@ -31,13 +32,16 @@ impl Parse for Model {
// We need to remove the `#[doc = ""]` and `#[builder(_)]` // We need to remove the `#[doc = ""]` and `#[builder(_)]`
// attrs from the function signature // attrs from the function signature
drain_filter(&mut item.attrs, |attr| { drain_filter(&mut item.attrs, |attr| match &attr.meta {
attr.path == parse_quote!(doc) || attr.path == parse_quote!(prop) Meta::NameValue(attr) => attr.path == parse_quote!(doc),
Meta::List(attr) => attr.path == parse_quote!(prop),
_ => false,
}); });
item.fields.iter_mut().for_each(|arg| { item.fields.iter_mut().for_each(|arg| {
drain_filter(&mut arg.attrs, |attr| { drain_filter(&mut arg.attrs, |attr| match &attr.meta {
attr.path == parse_quote!(doc) Meta::NameValue(attr) => attr.path == parse_quote!(doc),
|| attr.path == parse_quote!(prop) Meta::List(attr) => attr.path == parse_quote!(prop),
_ => false,
}); });
}); });

View file

@ -1,9 +1,14 @@
use crate::attribute_value; use crate::attribute_value;
use leptos_hot_reload::parsing::is_component_node; use itertools::Either;
use leptos_hot_reload::parsing::{
block_to_primitive_expression, is_component_node, value_to_string,
};
use proc_macro2::{Ident, Span, TokenStream}; use proc_macro2::{Ident, Span, TokenStream};
use quote::{quote, quote_spanned}; use quote::{quote, quote_spanned, ToTokens};
use rstml::node::{
KeyedAttribute, Node, NodeAttribute, NodeBlock, NodeElement,
};
use syn::spanned::Spanned; use syn::spanned::Spanned;
use syn_rsx::{Node, NodeAttribute, NodeElement, NodeValueExpr};
use uuid::Uuid; use uuid::Uuid;
pub(crate) fn render_template(cx: &Ident, nodes: &[Node]) -> TokenStream { pub(crate) fn render_template(cx: &Ident, nodes: &[Node]) -> TokenStream {
@ -53,7 +58,7 @@ fn root_element_to_tokens(
.unwrap(); .unwrap();
}; };
let span = node.name.span(); let span = node.name().span();
let navigations = if navigations.is_empty() { let navigations = if navigations.is_empty() {
quote! {} quote! {}
@ -67,7 +72,7 @@ fn root_element_to_tokens(
quote! { #(#expressions;);* } quote! { #(#expressions;);* }
}; };
let tag_name = node.name.to_string(); let tag_name = node.name().to_string();
quote_spanned! { quote_spanned! {
span => { span => {
@ -104,9 +109,9 @@ enum PrevSibChange {
Skip, Skip,
} }
fn attributes(node: &NodeElement) -> impl Iterator<Item = &NodeAttribute> { fn attributes(node: &NodeElement) -> impl Iterator<Item = &KeyedAttribute> {
node.attributes.iter().filter_map(|node| { node.attributes().iter().filter_map(|node| {
if let Node::Attribute(attribute) = node { if let NodeAttribute::Attribute(attribute) = node {
Some(attribute) Some(attribute)
} else { } else {
None None
@ -129,11 +134,11 @@ fn element_to_tokens(
) -> Ident { ) -> Ident {
// create this element // create this element
*next_el_id += 1; *next_el_id += 1;
let this_el_ident = child_ident(*next_el_id, node.name.span()); let this_el_ident = child_ident(*next_el_id, node.name().span());
// Open tag // Open tag
let name_str = node.name.to_string(); let name_str = node.name().to_string();
let span = node.name.span(); let span = node.name().span();
// CSR/hydrate, push to template // CSR/hydrate, push to template
template.push('<'); template.push('<');
@ -145,7 +150,7 @@ fn element_to_tokens(
} }
// navigation for this el // navigation for this el
let debug_name = node.name.to_string(); let debug_name = node.name().to_string();
let this_nav = if is_root_el { let this_nav = if is_root_el {
quote_spanned! { quote_spanned! {
span => let #this_el_ident = #debug_name; span => let #this_el_ident = #debug_name;
@ -247,14 +252,17 @@ fn next_sibling_node(
if is_component_node(sibling) { if is_component_node(sibling) {
next_sibling_node(children, idx + 1, next_el_id) next_sibling_node(children, idx + 1, next_el_id)
} else { } else {
Ok(Some(child_ident(*next_el_id + 1, sibling.name.span()))) Ok(Some(child_ident(
*next_el_id + 1,
sibling.name().span(),
)))
} }
} }
Node::Block(sibling) => { Node::Block(sibling) => {
Ok(Some(child_ident(*next_el_id + 1, sibling.value.span()))) Ok(Some(child_ident(*next_el_id + 1, sibling.span())))
} }
Node::Text(sibling) => { Node::Text(sibling) => {
Ok(Some(child_ident(*next_el_id + 1, sibling.value.span()))) Ok(Some(child_ident(*next_el_id + 1, sibling.span())))
} }
_ => Err("expected either an element or a block".to_string()), _ => Err("expected either an element or a block".to_string()),
} }
@ -263,7 +271,7 @@ fn next_sibling_node(
fn attr_to_tokens( fn attr_to_tokens(
cx: &Ident, cx: &Ident,
node: &NodeAttribute, node: &KeyedAttribute,
el_id: &Ident, el_id: &Ident,
template: &mut String, template: &mut String,
expressions: &mut Vec<TokenStream>, expressions: &mut Vec<TokenStream>,
@ -272,8 +280,8 @@ fn attr_to_tokens(
let name = name.strip_prefix('_').unwrap_or(&name); let name = name.strip_prefix('_').unwrap_or(&name);
let name = name.strip_prefix("attr:").unwrap_or(name); let name = name.strip_prefix("attr:").unwrap_or(name);
let value = match &node.value { let value = match &node.value() {
Some(expr) => match expr.as_ref() { Some(expr) => match expr {
syn::Expr::Lit(expr_lit) => { syn::Expr::Lit(expr_lit) => {
if let syn::Lit::Str(s) = &expr_lit.lit { if let syn::Lit::Str(s) = &expr_lit.lit {
AttributeValue::Static(s.value()) AttributeValue::Static(s.value())
@ -367,7 +375,7 @@ fn child_to_tokens(
Node::Element(node) => { Node::Element(node) => {
if is_component_node(node) { if is_component_node(node) {
proc_macro_error::emit_error!( proc_macro_error::emit_error!(
node.name.span(), node.name().span(),
"component children not allowed in template!, use view! \ "component children not allowed in template!, use view! \
instead" instead"
); );
@ -389,7 +397,7 @@ fn child_to_tokens(
} }
Node::Text(node) => block_to_tokens( Node::Text(node) => block_to_tokens(
cx, cx,
&node.value, Either::Left(node.value_string()),
node.value.span(), node.value.span(),
parent, parent,
prev_sib, prev_sib,
@ -399,10 +407,42 @@ fn child_to_tokens(
expressions, expressions,
navigations, navigations,
), ),
Node::Block(node) => block_to_tokens( Node::RawText(node) => block_to_tokens(
cx, cx,
&node.value, Either::Left(node.to_string_best()),
node.value.span(), node.span(),
parent,
prev_sib,
next_sib,
next_el_id,
template,
expressions,
navigations,
),
Node::Block(NodeBlock::ValidBlock(b)) => {
let value = match block_to_primitive_expression(b)
.and_then(value_to_string)
{
Some(v) => Either::Left(v),
None => Either::Right(b.into_token_stream()),
};
block_to_tokens(
cx,
value,
b.span(),
parent,
prev_sib,
next_sib,
next_el_id,
template,
expressions,
navigations,
)
}
Node::Block(b @ NodeBlock::Invalid { .. }) => block_to_tokens(
cx,
Either::Right(b.into_token_stream()),
b.span(),
parent, parent,
prev_sib, prev_sib,
next_sib, next_sib,
@ -418,7 +458,7 @@ fn child_to_tokens(
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
fn block_to_tokens( fn block_to_tokens(
_cx: &Ident, _cx: &Ident,
value: &NodeValueExpr, value: Either<String, TokenStream>,
span: Span, span: Span,
parent: &Ident, parent: &Ident,
prev_sib: Option<Ident>, prev_sib: Option<Ident>,
@ -428,18 +468,6 @@ fn block_to_tokens(
expressions: &mut Vec<TokenStream>, expressions: &mut Vec<TokenStream>,
navigations: &mut Vec<TokenStream>, navigations: &mut Vec<TokenStream>,
) -> PrevSibChange { ) -> PrevSibChange {
let value = value.as_ref();
let str_value = match value {
syn::Expr::Lit(lit) => match &lit.lit {
syn::Lit::Str(s) => Some(s.value()),
syn::Lit::Char(c) => Some(c.value().to_string()),
syn::Lit::Int(i) => Some(i.base10_digits().to_string()),
syn::Lit::Float(f) => Some(f.base10_digits().to_string()),
_ => None,
},
_ => None,
};
// code to navigate to this text node // code to navigate to this text node
let (name, location) = /* if is_first_child && mode == Mode::Client { let (name, location) = /* if is_first_child && mode == Mode::Client {
@ -473,27 +501,30 @@ fn block_to_tokens(
} }
}; };
if let Some(v) = str_value { match value {
navigations.push(location); Either::Left(v) => {
template.push_str(&v); navigations.push(location);
template.push_str(&v);
if let Some(name) = name { if let Some(name) = name {
PrevSibChange::Sib(name) PrevSibChange::Sib(name)
} else { } else {
PrevSibChange::Parent PrevSibChange::Parent
}
} }
} else { Either::Right(value) => {
template.push_str("<!>"); template.push_str("<!>");
navigations.push(location); navigations.push(location);
expressions.push(quote! { expressions.push(quote! {
leptos::leptos_dom::mount_child(#mount_kind, &{#value}.into_view(cx)); leptos::leptos_dom::mount_child(#mount_kind, &{#value}.into_view(cx));
}); });
if let Some(name) = name { if let Some(name) = name {
PrevSibChange::Sib(name) PrevSibChange::Sib(name)
} else { } else {
PrevSibChange::Parent PrevSibChange::Parent
}
} }
} }
} }

View file

@ -1,11 +1,15 @@
use crate::{attribute_value, Mode}; use crate::{attribute_value, Mode};
use convert_case::{Case::Snake, Casing}; use convert_case::{Case::Snake, Casing};
use leptos_hot_reload::parsing::{is_component_node, value_to_string}; use leptos_hot_reload::parsing::{
block_to_primitive_expression, is_component_node, value_to_string,
};
use proc_macro2::{Ident, Span, TokenStream, TokenTree}; use proc_macro2::{Ident, Span, TokenStream, TokenTree};
use quote::{format_ident, quote, quote_spanned}; use quote::{format_ident, quote, quote_spanned};
use rstml::node::{
KeyedAttribute, Node, NodeAttribute, NodeBlock, NodeElement, NodeName,
};
use std::collections::HashMap; use std::collections::HashMap;
use syn::{spanned::Spanned, Expr, ExprLit, ExprPath, Lit}; use syn::{spanned::Spanned, Expr, ExprLit, ExprPath, Lit};
use syn_rsx::{Node, NodeAttribute, NodeElement, NodeName, NodeValueExpr};
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
enum TagType { enum TagType {
@ -213,18 +217,22 @@ fn root_node_to_tokens_ssr(
global_class, global_class,
view_marker, view_marker,
), ),
Node::Comment(_) | Node::Doctype(_) | Node::Attribute(_) => quote! {}, Node::Comment(_) | Node::Doctype(_) => quote! {},
Node::Text(node) => { Node::Text(node) => {
let value = node.value.as_ref();
quote! { quote! {
leptos::leptos_dom::html::text(#value) leptos::leptos_dom::html::text(#node)
}
}
Node::RawText(r) => {
let text = r.to_string_best();
let text = syn::LitStr::new(&text, r.span());
quote! {
leptos::leptos_dom::html::text(#text)
} }
} }
Node::Block(node) => { Node::Block(node) => {
let value = node.value.as_ref();
quote! { quote! {
#[allow(unused_braces)] #node
#value
} }
} }
Node::Element(node) => { Node::Element(node) => {
@ -254,9 +262,9 @@ fn fragment_to_tokens_ssr(
}); });
quote! { quote! {
{ {
leptos::Fragment::lazy(|| vec![ leptos::Fragment::lazy(|| [
#(#nodes),* #(#nodes),*
]) ].to_vec())
#view_marker #view_marker
} }
} }
@ -329,15 +337,15 @@ fn root_element_to_tokens_ssr(
}, },
}); });
let tag_name = node.name.to_string(); let tag_name = node.name().to_string();
let is_custom_element = is_custom_element(&tag_name); let is_custom_element = is_custom_element(&tag_name);
let typed_element_name = if is_custom_element { let typed_element_name = if is_custom_element {
Ident::new("Custom", node.name.span()) Ident::new("Custom", node.name().span())
} else { } else {
let camel_cased = camel_case_tag_name( let camel_cased = camel_case_tag_name(
&tag_name.replace("svg::", "").replace("math::", ""), &tag_name.replace("svg::", "").replace("math::", ""),
); );
Ident::new(&camel_cased, node.name.span()) Ident::new(&camel_cased, node.name().span())
}; };
let typed_element_name = if is_svg_element(&tag_name) { let typed_element_name = if is_svg_element(&tag_name) {
quote! { svg::#typed_element_name } quote! { svg::#typed_element_name }
@ -409,7 +417,7 @@ fn element_to_tokens_ssr(
})); }));
} else { } else {
let tag_name = node let tag_name = node
.name .name()
.to_string() .to_string()
.replace("svg::", "") .replace("svg::", "")
.replace("math::", ""); .replace("math::", "");
@ -419,8 +427,8 @@ fn element_to_tokens_ssr(
let mut inner_html = None; let mut inner_html = None;
for attr in &node.attributes { for attr in node.attributes() {
if let Node::Attribute(attr) = attr { if let NodeAttribute::Attribute(attr) = attr {
inner_html = attribute_to_tokens_ssr( inner_html = attribute_to_tokens_ssr(
cx, cx,
attr, attr,
@ -439,9 +447,9 @@ fn element_to_tokens_ssr(
quote! { leptos::leptos_dom::HydrationCtx::id() } quote! { leptos::leptos_dom::HydrationCtx::id() }
}; };
match node match node
.attributes .attributes()
.iter() .iter()
.find(|node| matches!(node, Node::Attribute(attr) if attr.key.to_string() == "id")) .find(|node| matches!(node, NodeAttribute::Attribute(attr) if attr.key.to_string() == "id"))
{ {
Some(_) => { Some(_) => {
template.push_str(" leptos-hk=\"_{}\""); template.push_str(" leptos-hk=\"_{}\"");
@ -462,7 +470,7 @@ fn element_to_tokens_ssr(
if let Some(inner_html) = inner_html { if let Some(inner_html) = inner_html {
template.push_str("{}"); template.push_str("{}");
let value = inner_html.as_ref(); let value = inner_html;
holes.push(quote! { holes.push(quote! {
(#value).into_attribute(#cx).as_nameless_value_string().unwrap_or_default() (#value).into_attribute(#cx).as_nameless_value_string().unwrap_or_default()
@ -484,32 +492,23 @@ fn element_to_tokens_ssr(
); );
} }
Node::Text(text) => { Node::Text(text) => {
if let Some(value) = value_to_string(&text.value) { let value = text.value_string();
let value = if is_script_or_style { let value = if is_script_or_style {
value.into() value.into()
} else {
html_escape::encode_safe(&value)
};
template.push_str(
&value
.replace('{', "\\{")
.replace('}', "\\}"),
);
} else { } else {
template.push_str("{}"); html_escape::encode_safe(&value)
let value = text.value.as_ref(); };
template.push_str(
holes.push(quote! { &value.replace('{', "\\{").replace('}', "\\}"),
#value.into_view(#cx).render_to_string(#cx) );
})
}
} }
Node::Block(block) => { Node::Block(NodeBlock::ValidBlock(block)) => {
if let Some(value) = value_to_string(&block.value) { if let Some(value) =
block_to_primitive_expression(block)
.and_then(value_to_string)
{
template.push_str(&value); template.push_str(&value);
} else { } else {
let value = block.value.as_ref();
if !template.is_empty() { if !template.is_empty() {
chunks.push(SsrElementChunks::String { chunks.push(SsrElementChunks::String {
template: std::mem::take(template), template: std::mem::take(template),
@ -517,10 +516,16 @@ fn element_to_tokens_ssr(
}) })
} }
chunks.push(SsrElementChunks::View(quote! { chunks.push(SsrElementChunks::View(quote! {
{#value}.into_view(#cx) {#block}.into_view(#cx)
})); }));
} }
} }
// Keep invalid blocks for faster IDE diff (on user type)
Node::Block(block @ NodeBlock::Invalid { .. }) => {
chunks.push(SsrElementChunks::View(quote! {
{#block}.into_view(#cx)
}));
}
Node::Fragment(_) => abort!( Node::Fragment(_) => abort!(
Span::call_site(), Span::call_site(),
"You can't nest a fragment inside an element." "You can't nest a fragment inside an element."
@ -531,7 +536,7 @@ fn element_to_tokens_ssr(
} }
template.push_str("</"); template.push_str("</");
template.push_str(&node.name.to_string()); template.push_str(&node.name().to_string());
template.push('>'); template.push('>');
} }
} }
@ -540,17 +545,17 @@ fn element_to_tokens_ssr(
// returns `inner_html` // returns `inner_html`
fn attribute_to_tokens_ssr<'a>( fn attribute_to_tokens_ssr<'a>(
cx: &Ident, cx: &Ident,
node: &'a NodeAttribute, attr: &'a KeyedAttribute,
template: &mut String, template: &mut String,
holes: &mut Vec<TokenStream>, holes: &mut Vec<TokenStream>,
exprs_for_compiler: &mut Vec<TokenStream>, exprs_for_compiler: &mut Vec<TokenStream>,
global_class: Option<&TokenTree>, global_class: Option<&TokenTree>,
) -> Option<&'a NodeValueExpr> { ) -> Option<&'a syn::Expr> {
let name = node.key.to_string(); let name = attr.key.to_string();
if name == "ref" || name == "_ref" || name == "ref_" || name == "node_ref" { if name == "ref" || name == "_ref" || name == "ref_" || name == "node_ref" {
// ignore refs on SSR // ignore refs on SSR
} else if let Some(name) = name.strip_prefix("on:") { } else if let Some(name) = name.strip_prefix("on:") {
let handler = attribute_value(node); let handler = attribute_value(attr);
let (event_type, _, _) = parse_event_name(name); let (event_type, _, _) = parse_event_name(name);
exprs_for_compiler.push(quote! { exprs_for_compiler.push(quote! {
@ -563,16 +568,16 @@ fn attribute_to_tokens_ssr<'a>(
// ignore props for SSR // ignore props for SSR
// ignore classes and sdtyles: we'll handle these separately // ignore classes and sdtyles: we'll handle these separately
} else if name == "inner_html" { } else if name == "inner_html" {
return node.value.as_ref(); return attr.value();
} else { } else {
let name = name.replacen("attr:", "", 1); let name = name.replacen("attr:", "", 1);
// special case of global_class and class attribute // special case of global_class and class attribute
if name == "class" if name == "class"
&& global_class.is_some() && global_class.is_some()
&& node.value.as_ref().and_then(value_to_string).is_none() && attr.value().and_then(value_to_string).is_none()
{ {
let span = node.key.span(); let span = attr.key.span();
proc_macro_error::emit_error!(span, "Combining a global class (view! { cx, class = ... }) \ proc_macro_error::emit_error!(span, "Combining a global class (view! { cx, class = ... }) \
and a dynamic `class=` attribute on an element causes runtime inconsistencies. You can \ and a dynamic `class=` attribute on an element causes runtime inconsistencies. You can \
toggle individual classes dynamically with the `class:name=value` syntax. \n\nSee this issue \ toggle individual classes dynamically with the `class:name=value` syntax. \n\nSee this issue \
@ -582,7 +587,7 @@ fn attribute_to_tokens_ssr<'a>(
if name != "class" && name != "style" { if name != "class" && name != "style" {
template.push(' '); template.push(' ');
if let Some(value) = node.value.as_ref() { if let Some(value) = attr.value() {
if let Some(value) = value_to_string(value) { if let Some(value) = value_to_string(value) {
template.push_str(&name); template.push_str(&name);
template.push_str("=\""); template.push_str("=\"");
@ -590,7 +595,6 @@ fn attribute_to_tokens_ssr<'a>(
template.push('"'); template.push('"');
} else { } else {
template.push_str("{}"); template.push_str("{}");
let value = value.as_ref();
holes.push(quote! { holes.push(quote! {
&{#value}.into_attribute(#cx) &{#value}.into_attribute(#cx)
.as_nameless_value_string() .as_nameless_value_string()
@ -630,11 +634,13 @@ fn set_class_attribute_ssr(
Some(val) => (String::new(), Some(val)), Some(val) => (String::new(), Some(val)),
}; };
let static_class_attr = node let static_class_attr = node
.attributes .attributes()
.iter() .iter()
.filter_map(|a| match a { .filter_map(|a| match a {
Node::Attribute(attr) if attr.key.to_string() == "class" => { NodeAttribute::Attribute(attr)
attr.value.as_ref().and_then(value_to_string) if attr.key.to_string() == "class" =>
{
attr.value().and_then(value_to_string)
} }
_ => None, _ => None,
}) })
@ -644,17 +650,17 @@ fn set_class_attribute_ssr(
.join(" "); .join(" ");
let dyn_class_attr = node let dyn_class_attr = node
.attributes .attributes()
.iter() .iter()
.filter_map(|a| { .filter_map(|a| {
if let Node::Attribute(a) = a { if let NodeAttribute::Attribute(a) = a {
if a.key.to_string() == "class" { if a.key.to_string() == "class" {
if a.value.as_ref().and_then(value_to_string).is_some() if a.value().and_then(value_to_string).is_some()
|| fancy_class_name(&a.key.to_string(), cx, a).is_some() || fancy_class_name(&a.key.to_string(), cx, a).is_some()
{ {
None None
} else { } else {
Some((a.key.span(), &a.value)) Some((a.key.span(), a.value()))
} }
} else { } else {
None None
@ -666,10 +672,10 @@ fn set_class_attribute_ssr(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let class_attrs = node let class_attrs = node
.attributes .attributes()
.iter() .iter()
.filter_map(|node| { .filter_map(|node| {
if let Node::Attribute(node) = node { if let NodeAttribute::Attribute(node) = node {
let name = node.key.to_string(); let name = node.key.to_string();
if name == "class" { if name == "class" {
return if let Some((_, name, value)) = return if let Some((_, name, value)) =
@ -713,7 +719,6 @@ fn set_class_attribute_ssr(
for (_span, value) in dyn_class_attr { for (_span, value) in dyn_class_attr {
if let Some(value) = value { if let Some(value) = value {
template.push_str(" {}"); template.push_str(" {}");
let value = value.as_ref();
holes.push(quote! { holes.push(quote! {
&(#cx, #value).into_attribute(#cx).as_nameless_value_string() &(#cx, #value).into_attribute(#cx).as_nameless_value_string()
.map(|a| leptos::leptos_dom::ssr::escape_attr(&a).to_string()) .map(|a| leptos::leptos_dom::ssr::escape_attr(&a).to_string())
@ -745,11 +750,13 @@ fn set_style_attribute_ssr(
holes: &mut Vec<TokenStream>, holes: &mut Vec<TokenStream>,
) { ) {
let static_style_attr = node let static_style_attr = node
.attributes .attributes()
.iter() .iter()
.filter_map(|a| match a { .filter_map(|a| match a {
Node::Attribute(attr) if attr.key.to_string() == "style" => { NodeAttribute::Attribute(attr)
attr.value.as_ref().and_then(value_to_string) if attr.key.to_string() == "style" =>
{
attr.value().and_then(value_to_string)
} }
_ => None, _ => None,
}) })
@ -757,17 +764,17 @@ fn set_style_attribute_ssr(
.map(|style| format!("{style};")); .map(|style| format!("{style};"));
let dyn_style_attr = node let dyn_style_attr = node
.attributes .attributes()
.iter() .iter()
.filter_map(|a| { .filter_map(|a| {
if let Node::Attribute(a) = a { if let NodeAttribute::Attribute(a) = a {
if a.key.to_string() == "style" { if a.key.to_string() == "style" {
if a.value.as_ref().and_then(value_to_string).is_some() if a.value().and_then(value_to_string).is_some()
|| fancy_style_name(&a.key.to_string(), cx, a).is_some() || fancy_style_name(&a.key.to_string(), cx, a).is_some()
{ {
None None
} else { } else {
Some((a.key.span(), &a.value)) Some((a.key.span(), a.value()))
} }
} else { } else {
None None
@ -779,10 +786,10 @@ fn set_style_attribute_ssr(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let style_attrs = node let style_attrs = node
.attributes .attributes()
.iter() .iter()
.filter_map(|node| { .filter_map(|node| {
if let Node::Attribute(node) = node { if let NodeAttribute::Attribute(node) = node {
let name = node.key.to_string(); let name = node.key.to_string();
if name == "style" { if name == "style" {
return if let Some((_, name, value)) = return if let Some((_, name, value)) =
@ -825,7 +832,6 @@ fn set_style_attribute_ssr(
for (_span, value) in dyn_style_attr { for (_span, value) in dyn_style_attr {
if let Some(value) = value { if let Some(value) = value {
template.push_str(" {};"); template.push_str(" {};");
let value = value.as_ref();
holes.push(quote! { holes.push(quote! {
&(#cx, #value).into_attribute(#cx).as_nameless_value_string() &(#cx, #value).into_attribute(#cx).as_nameless_value_string()
.map(|a| leptos::leptos_dom::ssr::escape_attr(&a).to_string()) .map(|a| leptos::leptos_dom::ssr::escape_attr(&a).to_string())
@ -899,18 +905,18 @@ fn fragment_to_tokens(
let tokens = if lazy { let tokens = if lazy {
quote! { quote! {
{ {
leptos::Fragment::lazy(|| vec![ leptos::Fragment::lazy(|| [
#(#nodes),* #(#nodes),*
]) ].to_vec())
#view_marker #view_marker
} }
} }
} else { } else {
quote! { quote! {
{ {
leptos::Fragment::new(vec![ leptos::Fragment::new([
#(#nodes),* #(#nodes),*
]) ].to_vec())
#view_marker #view_marker
} }
} }
@ -948,18 +954,14 @@ fn node_to_tokens(
view_marker, view_marker,
), ),
Node::Comment(_) | Node::Doctype(_) => Some(quote! {}), Node::Comment(_) | Node::Doctype(_) => Some(quote! {}),
Node::Text(node) => { Node::Text(node) => Some(quote! {
let value = node.value.as_ref(); leptos::leptos_dom::html::text(#node)
Some(quote! { }),
leptos::leptos_dom::html::text(#value) Node::Block(node) => Some(quote! { #node }),
}) Node::RawText(r) => {
} let text = r.to_string_best();
Node::Block(node) => { let text = syn::LitStr::new(&text, r.span());
let value = node.value.as_ref(); Some(quote! { #text })
Some(quote! { #value })
}
Node::Attribute(node) => {
Some(attribute_to_tokens(cx, node, global_class))
} }
Node::Element(node) => element_to_tokens( Node::Element(node) => element_to_tokens(
cx, cx,
@ -980,6 +982,7 @@ fn element_to_tokens(
global_class: Option<&TokenTree>, global_class: Option<&TokenTree>,
view_marker: Option<String>, view_marker: Option<String>,
) -> Option<TokenStream> { ) -> Option<TokenStream> {
let name = node.name();
if is_component_node(node) { if is_component_node(node) {
if let Some(slot) = get_slot(node) { if let Some(slot) = get_slot(node) {
slot_to_tokens(cx, node, slot, parent_slots, global_class); slot_to_tokens(cx, node, slot, parent_slots, global_class);
@ -988,20 +991,17 @@ fn element_to_tokens(
Some(component_to_tokens(cx, node, global_class)) Some(component_to_tokens(cx, node, global_class))
} }
} else { } else {
let tag = node.name.to_string(); let tag = name.to_string();
let name = if is_custom_element(&tag) { let name = if is_custom_element(&tag) {
let name = node.name.to_string(); let name = node.name().to_string();
quote! { leptos::leptos_dom::html::custom(#cx, leptos::leptos_dom::html::Custom::new(#name)) } quote! { leptos::leptos_dom::html::custom(#cx, leptos::leptos_dom::html::Custom::new(#name)) }
} else if is_svg_element(&tag) { } else if is_svg_element(&tag) {
let name = &node.name;
parent_type = TagType::Svg; parent_type = TagType::Svg;
quote! { leptos::leptos_dom::svg::#name(#cx) } quote! { leptos::leptos_dom::svg::#name(#cx) }
} else if is_math_ml_element(&tag) { } else if is_math_ml_element(&tag) {
let name = &node.name;
parent_type = TagType::Math; parent_type = TagType::Math;
quote! { leptos::leptos_dom::math::#name(#cx) } quote! { leptos::leptos_dom::math::#name(#cx) }
} else if is_ambiguous_element(&tag) { } else if is_ambiguous_element(&tag) {
let name = &node.name;
match parent_type { match parent_type {
TagType::Unknown => { TagType::Unknown => {
// We decided this warning was too aggressive, but I'll leave it here in case we want it later // We decided this warning was too aggressive, but I'll leave it here in case we want it later
@ -1020,12 +1020,11 @@ fn element_to_tokens(
} }
} }
} else { } else {
let name = &node.name;
parent_type = TagType::Html; parent_type = TagType::Html;
quote! { leptos::leptos_dom::html::#name(#cx) } quote! { leptos::leptos_dom::html::#name(#cx) }
}; };
let attrs = node.attributes.iter().filter_map(|node| { let attrs = node.attributes().iter().filter_map(|node| {
if let Node::Attribute(node) = node { if let NodeAttribute::Attribute(node) = node {
let name = node.key.to_string(); let name = node.key.to_string();
let name = name.trim(); let name = name.trim();
if name.starts_with("class:") if name.starts_with("class:")
@ -1041,8 +1040,8 @@ fn element_to_tokens(
None None
} }
}); });
let class_attrs = node.attributes.iter().filter_map(|node| { let class_attrs = node.attributes().iter().filter_map(|node| {
if let Node::Attribute(node) = node { if let NodeAttribute::Attribute(node) = node {
let name = node.key.to_string(); let name = node.key.to_string();
if let Some((fancy, _, _)) = fancy_class_name(&name, cx, node) { if let Some((fancy, _, _)) = fancy_class_name(&name, cx, node) {
Some(fancy) Some(fancy)
@ -1055,8 +1054,8 @@ fn element_to_tokens(
None None
} }
}); });
let style_attrs = node.attributes.iter().filter_map(|node| { let style_attrs = node.attributes().iter().filter_map(|node| {
if let Node::Attribute(node) = node { if let NodeAttribute::Attribute(node) = node {
let name = node.key.to_string(); let name = node.key.to_string();
if let Some((fancy, _, _)) = fancy_style_name(&name, cx, node) { if let Some((fancy, _, _)) = fancy_style_name(&name, cx, node) {
Some(fancy) Some(fancy)
@ -1101,32 +1100,18 @@ fn element_to_tokens(
}), }),
false, false,
), ),
Node::Text(node) => { Node::Text(node) => (quote! { #node }, true),
if let Some(primitive) = value_to_string(&node.value) { Node::RawText(node) => {
(quote! { #primitive }, true) let text = node.to_string_best();
} else { let text = syn::LitStr::new(&text, node.span());
let value = node.value.as_ref(); (quote! { #text }, true)
(
quote! {
#[allow(unused_braces)] #value
},
false,
)
}
}
Node::Block(node) => {
if let Some(primitive) = value_to_string(&node.value) {
(quote! { #primitive }, true)
} else {
let value = node.value.as_ref();
(
quote! {
#[allow(unused_braces)] #value
},
false,
)
}
} }
Node::Block(node) => (
quote! {
#node
},
false,
),
Node::Element(node) => ( Node::Element(node) => (
element_to_tokens( element_to_tokens(
cx, cx,
@ -1139,9 +1124,7 @@ fn element_to_tokens(
.unwrap_or_default(), .unwrap_or_default(),
false, false,
), ),
Node::Comment(_) | Node::Doctype(_) | Node::Attribute(_) => { Node::Comment(_) | Node::Doctype(_) => (quote! {}, false),
(quote! {}, false)
}
}; };
if is_static { if is_static {
quote! { quote! {
@ -1172,7 +1155,7 @@ fn element_to_tokens(
fn attribute_to_tokens( fn attribute_to_tokens(
cx: &Ident, cx: &Ident,
node: &NodeAttribute, node: &KeyedAttribute,
global_class: Option<&TokenTree>, global_class: Option<&TokenTree>,
) -> TokenStream { ) -> TokenStream {
let span = node.key.span(); let span = node.key.span();
@ -1303,7 +1286,7 @@ fn attribute_to_tokens(
// special case of global_class and class attribute // special case of global_class and class attribute
if name == "class" if name == "class"
&& global_class.is_some() && global_class.is_some()
&& node.value.as_ref().and_then(value_to_string).is_none() && node.value().and_then(value_to_string).is_none()
{ {
let span = node.key.span(); let span = node.key.span();
proc_macro_error::emit_error!(span, "Combining a global class (view! { cx, class = ... }) \ proc_macro_error::emit_error!(span, "Combining a global class (view! { cx, class = ... }) \
@ -1313,10 +1296,8 @@ fn attribute_to_tokens(
}; };
// all other attributes // all other attributes
let value = match node.value.as_ref() { let value = match node.value() {
Some(value) => { Some(value) => {
let value = value.as_ref();
quote! { #value } quote! { #value }
} }
None => quote_spanned! { span => "" }, None => quote_spanned! { span => "" },
@ -1367,7 +1348,7 @@ pub(crate) fn parse_event_name(name: &str) -> (TokenStream, bool, bool) {
pub(crate) fn slot_to_tokens( pub(crate) fn slot_to_tokens(
cx: &Ident, cx: &Ident,
node: &NodeElement, node: &NodeElement,
slot: &NodeAttribute, slot: &KeyedAttribute,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>, parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>, global_class: Option<&TokenTree>,
) { ) {
@ -1376,19 +1357,19 @@ pub(crate) fn slot_to_tokens(
let name = convert_to_snake_case(if name.starts_with("slot:") { let name = convert_to_snake_case(if name.starts_with("slot:") {
name.replacen("slot:", "", 1) name.replacen("slot:", "", 1)
} else { } else {
node.name.to_string() node.name().to_string()
}); });
let component_name = ident_from_tag_name(&node.name); let component_name = ident_from_tag_name(node.name());
let span = node.name.span(); let span = node.name().span();
let Some(parent_slots) = parent_slots else { let Some(parent_slots) = parent_slots else {
proc_macro_error::emit_error!(span, "slots cannot be used inside HTML elements"); proc_macro_error::emit_error!(span, "slots cannot be used inside HTML elements");
return; return;
}; };
let attrs = node.attributes.iter().filter_map(|node| { let attrs = node.attributes().iter().filter_map(|node| {
if let Node::Attribute(node) = node { if let NodeAttribute::Attribute(node) = node {
if is_slot(node) { if is_slot(node) {
None None
} else { } else {
@ -1406,10 +1387,8 @@ pub(crate) fn slot_to_tokens(
let name = &attr.key; let name = &attr.key;
let value = attr let value = attr
.value .value()
.as_ref()
.map(|v| { .map(|v| {
let v = v.as_ref();
quote! { #v } quote! { #v }
}) })
.unwrap_or_else(|| quote! { #name }); .unwrap_or_else(|| quote! { #name });
@ -1474,9 +1453,9 @@ pub(crate) fn slot_to_tokens(
let slot = Ident::new(&slot, span); let slot = Ident::new(&slot, span);
if values.len() > 1 { if values.len() > 1 {
quote! { quote! {
.#slot(vec![ .#slot([
#(#values)* #(#values)*
]) ].to_vec())
} }
} else { } else {
let value = &values[0]; let value = &values[0];
@ -1504,12 +1483,12 @@ pub(crate) fn component_to_tokens(
node: &NodeElement, node: &NodeElement,
global_class: Option<&TokenTree>, global_class: Option<&TokenTree>,
) -> TokenStream { ) -> TokenStream {
let name = &node.name; let name = node.name();
let component_name = ident_from_tag_name(&node.name); let component_name = ident_from_tag_name(node.name());
let span = node.name.span(); let span = node.name().span();
let attrs = node.attributes.iter().filter_map(|node| { let attrs = node.attributes().iter().filter_map(|node| {
if let Node::Attribute(node) = node { if let NodeAttribute::Attribute(node) = node {
Some(node) Some(node)
} else { } else {
None None
@ -1526,10 +1505,8 @@ pub(crate) fn component_to_tokens(
let name = &attr.key; let name = &attr.key;
let value = attr let value = attr
.value .value()
.as_ref()
.map(|v| { .map(|v| {
let v = v.as_ref();
quote! { #v } quote! { #v }
}) })
.unwrap_or_else(|| quote! { #name }); .unwrap_or_else(|| quote! { #name });
@ -1637,7 +1614,7 @@ pub(crate) fn component_to_tokens(
} }
pub(crate) fn event_from_attribute_node( pub(crate) fn event_from_attribute_node(
attr: &NodeAttribute, attr: &KeyedAttribute,
force_undelegated: bool, force_undelegated: bool,
) -> (TokenStream, &Expr) { ) -> (TokenStream, &Expr) {
let event_name = attr let event_name = attr
@ -1697,7 +1674,7 @@ fn ident_from_tag_name(tag_name: &NodeName) -> Ident {
fn expr_to_ident(expr: &syn::Expr) -> Option<&ExprPath> { fn expr_to_ident(expr: &syn::Expr) -> Option<&ExprPath> {
match expr { match expr {
syn::Expr::Block(block) => block.block.stmts.last().and_then(|stmt| { syn::Expr::Block(block) => block.block.stmts.last().and_then(|stmt| {
if let syn::Stmt::Expr(expr) = stmt { if let syn::Stmt::Expr(expr, ..) = stmt {
expr_to_ident(expr) expr_to_ident(expr)
} else { } else {
None None
@ -1708,15 +1685,15 @@ fn expr_to_ident(expr: &syn::Expr) -> Option<&ExprPath> {
} }
} }
fn is_slot(node: &NodeAttribute) -> bool { fn is_slot(node: &KeyedAttribute) -> bool {
let key = node.key.to_string(); let key = node.key.to_string();
let key = key.trim(); let key = key.trim();
key == "slot" || key.starts_with("slot:") key == "slot" || key.starts_with("slot:")
} }
fn get_slot(node: &NodeElement) -> Option<&NodeAttribute> { fn get_slot(node: &NodeElement) -> Option<&KeyedAttribute> {
node.attributes.iter().find_map(|node| { node.attributes().iter().find_map(|node| {
if let Node::Attribute(node) = node { if let NodeAttribute::Attribute(node) = node {
if is_slot(node) { if is_slot(node) {
Some(node) Some(node)
} else { } else {
@ -1744,7 +1721,7 @@ fn is_self_closing(node: &NodeElement) -> bool {
// self-closing tags // self-closing tags
// https://developer.mozilla.org/en-US/docs/Glossary/Empty_element // https://developer.mozilla.org/en-US/docs/Glossary/Empty_element
matches!( matches!(
node.name.to_string().as_str(), node.name().to_string().as_str(),
"area" "area"
| "base" | "base"
| "br" | "br"
@ -1899,13 +1876,13 @@ fn parse_event(event_name: &str) -> (&str, bool) {
fn fancy_class_name<'a>( fn fancy_class_name<'a>(
name: &str, name: &str,
cx: &Ident, cx: &Ident,
node: &'a NodeAttribute, node: &'a KeyedAttribute,
) -> Option<(TokenStream, String, &'a Expr)> { ) -> Option<(TokenStream, String, &'a Expr)> {
// special case for complex class names: // special case for complex class names:
// e.g., Tailwind `class=("mt-[calc(100vh_-_3rem)]", true)` // e.g., Tailwind `class=("mt-[calc(100vh_-_3rem)]", true)`
if name == "class" { if name == "class" {
if let Some(expr) = node.value.as_ref() { if let Some(expr) = node.value() {
if let syn::Expr::Tuple(tuple) = expr.as_ref() { if let syn::Expr::Tuple(tuple) = expr {
if tuple.elems.len() == 2 { if tuple.elems.len() == 2 {
let span = node.key.span(); let span = node.key.span();
let class = quote_spanned! { let class = quote_spanned! {
@ -1948,12 +1925,12 @@ fn fancy_class_name<'a>(
fn fancy_style_name<'a>( fn fancy_style_name<'a>(
name: &str, name: &str,
cx: &Ident, cx: &Ident,
node: &'a NodeAttribute, node: &'a KeyedAttribute,
) -> Option<(TokenStream, String, &'a Expr)> { ) -> Option<(TokenStream, String, &'a Expr)> {
// special case for complex dynamic style names: // special case for complex dynamic style names:
if name == "style" { if name == "style" {
if let Some(expr) = node.value.as_ref() { if let Some(expr) = node.value() {
if let syn::Expr::Tuple(tuple) = expr.as_ref() { if let syn::Expr::Tuple(tuple) = expr {
if tuple.elems.len() == 2 { if tuple.elems.len() == 2 {
let span = node.key.span(); let span = node.key.span();
let style = quote_spanned! { let style = quote_spanned! {

View file

@ -44,7 +44,7 @@ error: unexpected end of input, expected assignment `=`
47 | #[prop(default)] default: bool, 47 | #[prop(default)] default: bool,
| ^ | ^
error: unexpected end of input, expected one of: `::`, `<`, `_`, literal, `const`, `ref`, `mut`, `&`, parentheses, square brackets, `..`, `const` error: unexpected end of input, expected one of: identifier, `::`, `<`, `_`, literal, `const`, `ref`, `mut`, `&`, parentheses, square brackets, `..`, `const`
= help: try `#[prop(default=5 * 10)]` = help: try `#[prop(default=5 * 10)]`
--> tests/ui/component.rs:56:22 --> tests/ui/component.rs:56:22

View file

@ -44,7 +44,7 @@ error: unexpected end of input, expected assignment `=`
45 | #[prop(default)] default: bool, 45 | #[prop(default)] default: bool,
| ^ | ^
error: unexpected end of input, expected one of: `::`, `<`, `_`, literal, `const`, `ref`, `mut`, `&`, parentheses, square brackets, `..`, `const` error: unexpected end of input, expected one of: identifier, `::`, `<`, `_`, literal, `const`, `ref`, `mut`, `&`, parentheses, square brackets, `..`, `const`
= help: try `#[prop(default=5 * 10)]` = help: try `#[prop(default=5 * 10)]`
--> tests/ui/component_absolute.rs:54:22 --> tests/ui/component_absolute.rs:54:22

View file

@ -90,10 +90,14 @@ where
children: Children, children: Children,
) -> HtmlElement<leptos::html::A> { ) -> HtmlElement<leptos::html::A> {
#[cfg(not(any(feature = "hydrate", feature = "csr")))] #[cfg(not(any(feature = "hydrate", feature = "csr")))]
_ = state; {
_ = state;
}
#[cfg(not(any(feature = "hydrate", feature = "csr")))] #[cfg(not(any(feature = "hydrate", feature = "csr")))]
_ = replace; {
_ = replace;
}
let location = use_location(cx); let location = use_location(cx);
let is_active = create_memo(cx, move |_| match href.get() { let is_active = create_memo(cx, move |_| match href.get() {

View file

@ -15,7 +15,7 @@ serde_qs = "0.12"
thiserror = "1" thiserror = "1"
serde_json = "1" serde_json = "1"
quote = "1" quote = "1"
syn = { version = "1", features = ["full", "parsing", "extra-traits"] } syn = { version = "2", features = ["full", "parsing", "extra-traits"] }
proc-macro2 = "1" proc-macro2 = "1"
ciborium = "0.2" ciborium = "0.2"
xxhash-rust = { version = "0.8", features = ["const_xxh64"] } xxhash-rust = { version = "0.8", features = ["const_xxh64"] }

View file

@ -11,7 +11,7 @@ description = "The default implementation of the server_fn macro without a conte
proc-macro = true proc-macro = true
[dependencies] [dependencies]
syn = { version = "1", features = ["full"] } syn = { version = "2", features = ["full"] }
server_fn_macro = { workspace = true } server_fn_macro = { workspace = true }
[dev-dependencies] [dev-dependencies]

View file

@ -11,7 +11,7 @@ readme = "../README.md"
[dependencies] [dependencies]
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
quote = "1" quote = "1"
syn = { version = "1", features = ["full", "parsing", "extra-traits"] } syn = { version = "2", features = ["full", "parsing", "extra-traits"] }
proc-macro2 = "1" proc-macro2 = "1"
proc-macro-error = "1" proc-macro-error = "1"
xxhash-rust = { version = "0.8.6", features = ["const_xxh64"] } xxhash-rust = { version = "0.8.6", features = ["const_xxh64"] }