Merge pull request #2985 from leptos-rs/2982

fix: sort attributes so `class` and `style` always run before `class:` and `style:` (closes #2982)
This commit is contained in:
Greg Johnston 2024-09-16 21:58:38 -04:00 committed by GitHub
commit c581b3293e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 129 additions and 63 deletions

View file

@ -302,10 +302,10 @@ pub fn view(tokens: TokenStream) -> TokenStream {
};
let config = rstml::ParserConfig::default().recover_block(true);
let parser = rstml::Parser::new(config);
let (nodes, errors) = parser.parse_recoverable(tokens).split_vec();
let (mut nodes, errors) = parser.parse_recoverable(tokens).split_vec();
let errors = errors.into_iter().map(|e| e.emit_as_expr_tokens());
let nodes_output = view::render_view(
&nodes,
&mut nodes,
global_class.as_ref(),
normalized_call_site(proc_macro::Span::call_site()),
);

View file

@ -10,11 +10,9 @@ use std::collections::HashMap;
use syn::{spanned::Spanned, Expr, ExprPath, ExprRange, RangeLimits, Stmt};
pub(crate) fn component_to_tokens(
node: &NodeElement<impl CustomNode>,
node: &mut NodeElement<impl CustomNode>,
global_class: Option<&TokenTree>,
) -> TokenStream {
let name = node.name();
#[allow(unused)] // TODO this is used by hot-reloading
#[cfg(debug_assertions)]
let component_name = super::ident_from_tag_name(node.name());
@ -45,16 +43,21 @@ pub(crate) fn component_to_tokens(
})
.unwrap_or_else(|| node.attributes().len());
let attrs = node.attributes().iter().filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
Some(node)
} else {
None
}
});
let attrs = node
.attributes()
.iter()
.filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
Some(node)
} else {
None
}
})
.cloned()
.collect::<Vec<_>>();
let props = attrs
.clone()
.iter()
.enumerate()
.filter(|(idx, attr)| {
idx < &spread_marker && {
@ -85,7 +88,7 @@ pub(crate) fn component_to_tokens(
});
let items_to_bind = attrs
.clone()
.iter()
.filter_map(|attr| {
if !is_attr_let(&attr.key) {
return None;
@ -107,7 +110,7 @@ pub(crate) fn component_to_tokens(
.collect::<Vec<_>>();
let items_to_clone = attrs
.clone()
.iter()
.filter_map(|attr| {
attr.key
.to_string()
@ -183,7 +186,7 @@ pub(crate) fn component_to_tokens(
quote! {}
} else {
let children = fragment_to_tokens(
&node.children,
&mut node.children,
TagType::Unknown,
Some(&mut slots),
global_class,
@ -261,6 +264,7 @@ pub(crate) fn component_to_tokens(
quote! {}
};
let name = node.name();
#[allow(unused_mut)] // used in debug
let mut component = quote! {
{

View file

@ -13,7 +13,10 @@ use rstml::node::{
CustomNode, KVAttributeValue, KeyedAttribute, Node, NodeAttribute,
NodeBlock, NodeElement, NodeName, NodeNameFragment,
};
use std::collections::{HashMap, HashSet};
use std::{
cmp::Ordering,
collections::{HashMap, HashSet},
};
use syn::{
spanned::Spanned, Expr, Expr::Tuple, ExprLit, ExprRange, Lit, LitStr,
RangeLimits, Stmt,
@ -28,7 +31,7 @@ pub(crate) enum TagType {
}
pub fn render_view(
nodes: &[Node],
nodes: &mut [Node],
global_class: Option<&TokenTree>,
view_marker: Option<String>,
) -> Option<TokenStream> {
@ -44,7 +47,7 @@ pub fn render_view(
}
1 => (
node_to_tokens(
&nodes[0],
&mut nodes[0],
TagType::Unknown,
None,
global_class,
@ -89,7 +92,7 @@ pub fn render_view(
}
fn element_children_to_tokens(
nodes: &[Node<impl CustomNode>],
nodes: &mut [Node<impl CustomNode>],
parent_type: TagType,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
@ -137,7 +140,7 @@ fn element_children_to_tokens(
}
fn fragment_to_tokens(
nodes: &[Node<impl CustomNode>],
nodes: &mut [Node<impl CustomNode>],
parent_type: TagType,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
@ -175,7 +178,7 @@ fn fragment_to_tokens(
}
fn children_to_tokens(
nodes: &[Node<impl CustomNode>],
nodes: &mut [Node<impl CustomNode>],
parent_type: TagType,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
@ -183,7 +186,7 @@ fn children_to_tokens(
) -> Vec<TokenStream> {
if nodes.len() == 1 {
match node_to_tokens(
&nodes[0],
&mut nodes[0],
parent_type,
parent_slots,
global_class,
@ -195,7 +198,7 @@ fn children_to_tokens(
} else {
let mut slots = HashMap::new();
let nodes = nodes
.iter()
.iter_mut()
.filter_map(|node| {
node_to_tokens(
node,
@ -219,7 +222,7 @@ fn children_to_tokens(
}
fn node_to_tokens(
node: &Node<impl CustomNode>,
node: &mut Node<impl CustomNode>,
parent_type: TagType,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
@ -232,7 +235,7 @@ fn node_to_tokens(
Some(quote! { ::leptos::tachys::html::doctype(#value) })
}
Node::Fragment(fragment) => fragment_to_tokens(
&fragment.children,
&mut fragment.children,
parent_type,
parent_slots,
global_class,
@ -270,12 +273,56 @@ fn text_to_tokens(text: &LitStr) -> TokenStream {
}
pub(crate) fn element_to_tokens(
node: &NodeElement<impl CustomNode>,
node: &mut NodeElement<impl CustomNode>,
mut parent_type: TagType,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
view_marker: Option<&str>,
) -> Option<TokenStream> {
// attribute sorting:
//
// the `class` and `style` attributes overwrite individual `class:` and `style:` attributes
// when they are set. as a result, we're going to sort the attributes so that `class` and
// `style` always come before all other attributes.
// if there's a spread marker, we don't want to move `class` or `style` before it
// so let's only sort attributes that come *before* a spread marker
let spread_position = node
.attributes()
.iter()
.position(|n| match n {
NodeAttribute::Block(node) => as_spread_attr(node).is_some(),
_ => false,
})
.unwrap_or_else(|| node.attributes().len());
// now, sort the attributes
node.attributes_mut()[0..spread_position].sort_by(|a, b| {
let key_a = match a {
NodeAttribute::Attribute(attr) => match &attr.key {
NodeName::Path(attr) => {
attr.path.segments.first().map(|n| n.ident.to_string())
}
_ => None,
},
_ => None,
};
let key_b = match b {
NodeAttribute::Attribute(attr) => match &attr.key {
NodeName::Path(attr) => {
attr.path.segments.first().map(|n| n.ident.to_string())
}
_ => None,
},
_ => None,
};
match (key_a.as_deref(), key_b.as_deref()) {
(Some("class"), _) | (Some("style"), _) => Ordering::Less,
(_, Some("class")) | (_, Some("style")) => Ordering::Greater,
_ => Ordering::Equal,
}
});
// check for duplicate attribute names and emit an error for all subsequent ones
let mut names = HashSet::new();
for attr in node.attributes() {
@ -299,7 +346,8 @@ pub(crate) fn element_to_tokens(
let name = node.name();
if is_component_node(node) {
if let Some(slot) = get_slot(node) {
slot_to_tokens(node, slot, parent_slots, global_class);
let slot = slot.clone();
slot_to_tokens(node, &slot, parent_slots, global_class);
None
} else {
Some(component_to_tokens(node, global_class))
@ -414,7 +462,7 @@ pub(crate) fn element_to_tokens(
let self_closing = is_self_closing(node);
let children = if !self_closing {
element_children_to_tokens(
&node.children,
&mut node.children,
parent_type,
parent_slots,
global_class,
@ -463,6 +511,25 @@ fn is_spread_marker(node: &NodeElement<impl CustomNode>) -> bool {
}
}
fn as_spread_attr(node: &NodeBlock) -> Option<Option<&Expr>> {
if let NodeBlock::ValidBlock(block) = node {
match block.stmts.first() {
Some(Stmt::Expr(
Expr::Range(ExprRange {
start: None,
limits: RangeLimits::HalfOpen(_),
end,
..
}),
_,
)) => Some(end.as_deref()),
_ => None,
}
} else {
None
}
}
fn attribute_to_tokens(
tag_type: TagType,
node: &NodeAttribute,
@ -470,29 +537,18 @@ fn attribute_to_tokens(
is_custom: bool,
) -> TokenStream {
match node {
NodeAttribute::Block(node) => {
let dotted = if let NodeBlock::ValidBlock(block) = node {
match block.stmts.first() {
Some(Stmt::Expr(
Expr::Range(ExprRange {
start: None,
limits: RangeLimits::HalfOpen(_),
end: Some(end),
..
}),
_,
)) => Some(quote! { .add_any_attr(#end) }),
_ => None,
NodeAttribute::Block(node) => as_spread_attr(node)
.flatten()
.map(|end| {
quote! {
.add_any_attr(#end)
}
} else {
None
};
dotted.unwrap_or_else(|| {
})
.unwrap_or_else(|| {
quote! {
.add_any_attr(#[allow(unused_braces)] { #node })
}
})
}
}),
NodeAttribute::Attribute(node) => {
let name = node.key.to_string();
if name == "node_ref" {

View file

@ -7,7 +7,7 @@ use std::collections::HashMap;
use syn::spanned::Spanned;
pub(crate) fn slot_to_tokens(
node: &NodeElement<impl CustomNode>,
node: &mut NodeElement<impl CustomNode>,
slot: &KeyedAttribute,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
@ -30,20 +30,25 @@ pub(crate) fn slot_to_tokens(
return;
};
let attrs = node.attributes().iter().filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
if is_slot(node) {
None
let attrs = node
.attributes()
.iter()
.filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
if is_slot(node) {
None
} else {
Some(node)
}
} else {
Some(node)
None
}
} else {
None
}
});
})
.cloned()
.collect::<Vec<_>>();
let props = attrs
.clone()
.iter()
.filter(|attr| {
!attr.key.to_string().starts_with("let:")
&& !attr.key.to_string().starts_with("clone:")
@ -65,7 +70,7 @@ pub(crate) fn slot_to_tokens(
});
let items_to_bind = attrs
.clone()
.iter()
.filter_map(|attr| {
attr.key
.to_string()
@ -75,7 +80,7 @@ pub(crate) fn slot_to_tokens(
.collect::<Vec<_>>();
let items_to_clone = attrs
.clone()
.iter()
.filter_map(|attr| {
attr.key
.to_string()
@ -85,6 +90,7 @@ pub(crate) fn slot_to_tokens(
.collect::<Vec<_>>();
let dyn_attrs = attrs
.iter()
.filter(|attr| attr.key.to_string().starts_with("attr:"))
.filter_map(|attr| {
let name = &attr.key.to_string();
@ -107,7 +113,7 @@ pub(crate) fn slot_to_tokens(
quote! {}
} else {
let children = fragment_to_tokens(
&node.children,
&mut node.children,
TagType::Unknown,
Some(&mut slots),
global_class,

View file

@ -29,7 +29,7 @@ use leptos::{
/// #[component]
/// fn MyApp() -> impl IntoView {
/// provide_meta_context();
/// let (prefers_dark, set_prefers_dark) = create_signal(false);
/// let (prefers_dark, set_prefers_dark) = signal(false);
/// let body_class = move || {
/// if prefers_dark.get() {
/// "dark".to_string()