mirror of
https://github.com/leptos-rs/leptos
synced 2024-11-10 06:44:17 +00:00
started working on component macro
This commit is contained in:
parent
99ff73c721
commit
a75cbee133
3 changed files with 96 additions and 263 deletions
|
@ -16,7 +16,7 @@ itertools = "0.10"
|
|||
proc-macro-error = "1"
|
||||
proc-macro2 = "1"
|
||||
quote = "1"
|
||||
syn = { version = "1", features = ["full", "parsing", "extra-traits"] }
|
||||
syn = { version = "1", features = ["full"] }
|
||||
syn-rsx = "0.9"
|
||||
uuid = { version = "1", features = ["v4"] }
|
||||
leptos_dom = { path = "../leptos_dom", version = "0.0.18" }
|
||||
|
|
|
@ -1,277 +1,107 @@
|
|||
// Based in large part on Dioxus: https://github.com/DioxusLabs/dioxus/blob/master/packages/core-macro/src/inlineprops.rs
|
||||
|
||||
#![allow(unstable_name_collisions)]
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use itertools::Itertools;
|
||||
use proc_macro2::{Span, TokenStream as TokenStream2, TokenTree};
|
||||
use quote::{quote, ToTokens, TokenStreamExt};
|
||||
use proc_macro2::{Ident, TokenStream};
|
||||
use syn::{
|
||||
parse::{Parse, ParseStream},
|
||||
punctuated::Punctuated,
|
||||
*,
|
||||
parse::Parse, parse_quote, Attribute, FnArg, ItemFn, Pat, PatIdent, ReturnType, Type,
|
||||
Visibility,
|
||||
};
|
||||
|
||||
pub struct InlinePropsBody {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub vis: syn::Visibility,
|
||||
pub fn_token: Token![fn],
|
||||
pub ident: Ident,
|
||||
pub cx_token: Box<Pat>,
|
||||
pub generics: Generics,
|
||||
pub paren_token: token::Paren,
|
||||
pub inputs: Punctuated<FnArg, Token![,]>,
|
||||
// pub fields: FieldsNamed,
|
||||
pub output: ReturnType,
|
||||
pub where_clause: Option<WhereClause>,
|
||||
pub block: Box<Block>,
|
||||
pub doc_comment: String,
|
||||
pub struct Model {
|
||||
docs: Docs,
|
||||
vis: Visibility,
|
||||
name: Ident,
|
||||
props: Vec<Prop>,
|
||||
body: ItemFn,
|
||||
ret: ReturnType,
|
||||
}
|
||||
|
||||
/// The custom rusty variant of parsing rsx!
|
||||
impl Parse for InlinePropsBody {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs: Vec<Attribute> = input.call(Attribute::parse_outer)?;
|
||||
let vis: Visibility = input.parse()?;
|
||||
impl Parse for Model {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let item = ItemFn::parse(input)?;
|
||||
|
||||
let fn_token = input.parse()?;
|
||||
let ident = input.parse()?;
|
||||
let generics: Generics = input.parse()?;
|
||||
|
||||
let content;
|
||||
let paren_token = syn::parenthesized!(content in input);
|
||||
|
||||
let first_arg: FnArg = content.parse()?;
|
||||
let cx_token = {
|
||||
match first_arg {
|
||||
FnArg::Receiver(_) => panic!("first argument must not be a receiver argument"),
|
||||
FnArg::Typed(f) => f.pat,
|
||||
}
|
||||
};
|
||||
|
||||
let _: Result<Token![,]> = content.parse();
|
||||
|
||||
let inputs = syn::punctuated::Punctuated::parse_terminated(&content)?;
|
||||
|
||||
let output = input.parse()?;
|
||||
|
||||
let where_clause = input
|
||||
.peek(syn::token::Where)
|
||||
.then(|| input.parse())
|
||||
.transpose()?;
|
||||
|
||||
let block = input.parse()?;
|
||||
|
||||
let doc_comment = attrs
|
||||
.iter()
|
||||
.filter_map(|attr| {
|
||||
if attr.path.segments[0].ident == "doc" {
|
||||
Some(
|
||||
attr.clone()
|
||||
.tokens
|
||||
.into_iter()
|
||||
.filter_map(|token| {
|
||||
if let TokenTree::Literal(_) = token {
|
||||
// remove quotes
|
||||
let chars = token.to_string();
|
||||
let mut chars = chars.chars();
|
||||
chars.next();
|
||||
chars.next_back();
|
||||
Some(chars.as_str().to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<String>(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.intersperse_with(|| "\n".to_string())
|
||||
.collect();
|
||||
let docs = Docs::new(&item.attrs);
|
||||
|
||||
Ok(Self {
|
||||
vis,
|
||||
fn_token,
|
||||
ident,
|
||||
generics,
|
||||
paren_token,
|
||||
inputs,
|
||||
output,
|
||||
where_clause,
|
||||
block,
|
||||
cx_token,
|
||||
attrs,
|
||||
doc_comment,
|
||||
docs,
|
||||
vis: item.vis.clone(),
|
||||
name: item.sig.ident.clone(),
|
||||
props: item.sig.inputs.clone().into_iter().map(Prop::new).collect(),
|
||||
ret: item.sig.output.clone(),
|
||||
body: item,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize the same way, regardless of flavor
|
||||
impl ToTokens for InlinePropsBody {
|
||||
fn to_tokens(&self, out_tokens: &mut TokenStream2) {
|
||||
impl Into<TokenStream> for Model {
|
||||
fn into(self) -> TokenStream {
|
||||
let Self {
|
||||
docs,
|
||||
vis,
|
||||
ident,
|
||||
generics,
|
||||
inputs,
|
||||
output,
|
||||
where_clause,
|
||||
block,
|
||||
cx_token,
|
||||
attrs,
|
||||
doc_comment,
|
||||
..
|
||||
name,
|
||||
props,
|
||||
body,
|
||||
ret,
|
||||
} = self;
|
||||
|
||||
let field_docs: HashMap<String, String> = {
|
||||
let mut map = HashMap::new();
|
||||
let mut pieces = doc_comment.split("# Props");
|
||||
pieces.next();
|
||||
let rest = pieces.next().unwrap_or_default();
|
||||
let mut current_field_name = String::new();
|
||||
let mut current_field_value = String::new();
|
||||
for line in rest.split('\n') {
|
||||
if let Some(line) = line.strip_prefix(" - ") {
|
||||
let mut pieces = line.split("**");
|
||||
pieces.next();
|
||||
let field_name = pieces.next();
|
||||
let field_value = pieces.next().unwrap_or_default();
|
||||
let field_value = if let Some((_ty, desc)) = field_value.split_once('-') {
|
||||
desc
|
||||
} else {
|
||||
field_value
|
||||
};
|
||||
if let Some(field_name) = field_name {
|
||||
if !current_field_name.is_empty() {
|
||||
map.insert(current_field_name.clone(), current_field_value.clone());
|
||||
}
|
||||
current_field_name = field_name.to_string();
|
||||
current_field_value = String::new();
|
||||
current_field_value.push_str(field_value);
|
||||
} else {
|
||||
current_field_value.push_str(field_value);
|
||||
}
|
||||
} else {
|
||||
current_field_value.push_str(line);
|
||||
}
|
||||
}
|
||||
if !current_field_name.is_empty() {
|
||||
map.insert(current_field_name, current_field_value.clone());
|
||||
quote!(
|
||||
#vis #name() #ret {
|
||||
todo!();
|
||||
}
|
||||
);
|
||||
|
||||
map
|
||||
};
|
||||
|
||||
let fields = inputs.iter().map(|f| {
|
||||
let typed_arg = match f {
|
||||
FnArg::Receiver(_) => todo!(),
|
||||
FnArg::Typed(t) => t,
|
||||
};
|
||||
let comment = if let Pat::Ident(ident) = &*typed_arg.pat {
|
||||
field_docs.get(&ident.ident.to_string()).cloned()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
.unwrap_or_default();
|
||||
let comment_macro = quote! {
|
||||
#[doc = #comment]
|
||||
};
|
||||
if let Type::Path(pat) = &*typed_arg.ty {
|
||||
if pat.path.segments[0].ident == "Option" {
|
||||
quote! {
|
||||
#comment_macro
|
||||
#[builder(default, setter(strip_option, doc = #comment))]
|
||||
pub #f
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
#comment_macro
|
||||
#[builder(setter(doc = #comment))]
|
||||
pub #f
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
#comment_macro
|
||||
#vis #f
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let component_name_str = ident.to_string();
|
||||
let struct_name = Ident::new(&format!("{}Props", ident), Span::call_site());
|
||||
let prop_struct_comments = format!("Props for the [`{ident}`] component.");
|
||||
|
||||
let field_names = inputs.iter().filter_map(|f| match f {
|
||||
FnArg::Receiver(_) => todo!(),
|
||||
FnArg::Typed(t) => Some(&t.pat),
|
||||
});
|
||||
|
||||
let first_lifetime = if let Some(GenericParam::Lifetime(lt)) = generics.params.first() {
|
||||
Some(lt)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
//let modifiers = if first_lifetime.is_some() {
|
||||
let modifiers = quote! {
|
||||
#[derive(leptos::TypedBuilder)]
|
||||
#[builder(doc)]
|
||||
};
|
||||
/* } else {
|
||||
quote! { #[derive(Props, PartialEq, Eq)] }
|
||||
}; */
|
||||
|
||||
let (_scope_lifetime, fn_generics, struct_generics) = if let Some(lt) = first_lifetime {
|
||||
let struct_generics: Punctuated<_, token::Comma> = generics
|
||||
.params
|
||||
.iter()
|
||||
.map(|it| match it {
|
||||
GenericParam::Type(tp) => {
|
||||
let mut tp = tp.clone();
|
||||
tp.bounds.push(parse_quote!( 'a ));
|
||||
|
||||
GenericParam::Type(tp)
|
||||
}
|
||||
_ => it.clone(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
(
|
||||
quote! { #lt, },
|
||||
generics.clone(),
|
||||
quote! { <#struct_generics> },
|
||||
)
|
||||
} else {
|
||||
let fn_generics = generics.clone();
|
||||
|
||||
(quote! {}, fn_generics, quote! { #generics })
|
||||
};
|
||||
|
||||
out_tokens.append_all(quote! {
|
||||
#modifiers
|
||||
#[doc = #prop_struct_comments]
|
||||
#[allow(non_camel_case_types)]
|
||||
#vis struct #struct_name #struct_generics
|
||||
#where_clause
|
||||
{
|
||||
#(#fields),*
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
#(#attrs)*
|
||||
#vis fn #ident #fn_generics (#cx_token: Scope, props: #struct_name #struct_generics) #output
|
||||
#where_clause
|
||||
{
|
||||
let #struct_name { #(#field_names),* } = props;
|
||||
::leptos::Component::new(
|
||||
#component_name_str,
|
||||
move |#cx_token| #block
|
||||
)
|
||||
}
|
||||
});
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
struct Prop {
|
||||
pub docs: Docs,
|
||||
pub typed_builder_attrs: Vec<Attribute>,
|
||||
pub name: PatIdent,
|
||||
pub ty: Type,
|
||||
}
|
||||
|
||||
impl Prop {
|
||||
fn new(arg: FnArg) -> Self {
|
||||
let typed = if let FnArg::Typed(ty) = arg {
|
||||
ty
|
||||
} else {
|
||||
abort!(arg, "receiver not allowed in `fn`");
|
||||
};
|
||||
|
||||
let typed_builder_attrs = typed
|
||||
.attrs
|
||||
.iter()
|
||||
.filter(|attr| attr.path == parse_quote!(builder))
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
let name = if let Pat::Ident(i) = *typed.pat {
|
||||
i
|
||||
} else {
|
||||
abort!(
|
||||
typed.pat,
|
||||
"only `prop: bool` style types are allowed within the \
|
||||
`#[component]` macro"
|
||||
);
|
||||
};
|
||||
|
||||
Self {
|
||||
docs: Docs::new(&typed.attrs),
|
||||
typed_builder_attrs,
|
||||
name,
|
||||
ty: *typed.ty,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Docs(pub Vec<Attribute>);
|
||||
|
||||
impl Docs {
|
||||
fn new(attrs: &[Attribute]) -> Self {
|
||||
let attrs = attrs
|
||||
.iter()
|
||||
.filter(|attr| attr.path == parse_quote!(doc))
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
Self(attrs)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
#![cfg_attr(not(feature = "stable"), feature(proc_macro_span))]
|
||||
|
||||
#[macro_use]
|
||||
extern crate proc_macro_error;
|
||||
|
||||
use proc_macro::{TokenStream, TokenTree};
|
||||
use quote::ToTokens;
|
||||
use server::server_macro_impl;
|
||||
use syn::{parse_macro_input, DeriveInput};
|
||||
use syn::{parse::Parse, parse_macro_input, DeriveInput};
|
||||
use syn_rsx::{parse, NodeElement};
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
|
@ -341,12 +344,12 @@ pub fn view(tokens: TokenStream) -> TokenStream {
|
|||
/// todo!()
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_error::proc_macro_error]
|
||||
#[proc_macro_attribute]
|
||||
pub fn component(_args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
match syn::parse::<component::InlinePropsBody>(s) {
|
||||
Err(e) => e.to_compile_error().into(),
|
||||
Ok(s) => s.to_token_stream().into(),
|
||||
}
|
||||
let output: proc_macro2::TokenStream = parse_macro_input!(s as component::Model).into();
|
||||
|
||||
output.into()
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
|
|
Loading…
Reference in a new issue