From 8f926e590cce1fb3a0d4120f215c771ddd88a084 Mon Sep 17 00:00:00 2001 From: Luiz Carvalho Date: Tue, 12 Mar 2024 19:58:21 -0300 Subject: [PATCH] refac: rebase syn 2 changes --- Cargo.lock | 30 ++-- sqlx-macros-core/Cargo.toml | 7 +- sqlx-macros-core/src/database/mod.rs | 1 + sqlx-macros-core/src/derives/attributes.rs | 166 ++++++++------------ sqlx-macros-core/src/derives/encode.rs | 4 +- sqlx-macros-core/src/derives/mod.rs | 1 - sqlx-macros-core/src/query/args.rs | 83 +++------- sqlx-macros-core/src/test_attr.rs | 171 +++++++++++---------- sqlx-macros/Cargo.toml | 4 +- sqlx-macros/src/lib.rs | 3 +- 10 files changed, 199 insertions(+), 271 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 82ef2a24..130a38c6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -488,10 +488,12 @@ dependencies = [ [[package]] name = "bigdecimal" -version = "0.3.1" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" +checksum = "9324c8014cd04590682b34f1e9448d38f0674d0f7b2dc553331016ef0e4e9ebc" dependencies = [ + "autocfg", + "libm", "num-bigint", "num-integer", "num-traits", @@ -2520,9 +2522,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.78" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" +checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" dependencies = [ "unicode-ident", ] @@ -3155,7 +3157,7 @@ dependencies = [ [[package]] name = "sqlx" -version = "0.7.4" +version = "0.8.0-alpha.0" dependencies = [ "anyhow", "async-std", @@ -3185,7 +3187,7 @@ dependencies = [ [[package]] name = "sqlx-cli" -version = "0.7.4" +version = "0.8.0-alpha.0" dependencies = [ "anyhow", "assert_cmd", @@ -3211,7 +3213,7 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.7.4" +version = "0.8.0-alpha.0" dependencies = [ "ahash 0.8.11", "async-io 1.13.0", @@ -3395,18 +3397,18 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.7.4" +version = "0.8.0-alpha.0" dependencies = [ "proc-macro2", "quote", "sqlx-core", "sqlx-macros-core", - "syn 1.0.109", + "syn 2.0.52", ] [[package]] name = "sqlx-macros-core" -version = "0.7.4" +version = "0.8.0-alpha.0" dependencies = [ "async-std", "dotenvy", @@ -3423,7 +3425,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 1.0.109", + "syn 2.0.52", "tempfile", "tokio", "url", @@ -3431,7 +3433,7 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.7.4" +version = "0.8.0-alpha.0" dependencies = [ "atoi", "base64 0.21.7", @@ -3476,7 +3478,7 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.7.4" +version = "0.8.0-alpha.0" dependencies = [ "atoi", "base64 0.21.7", @@ -3521,7 +3523,7 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.7.4" +version = "0.8.0-alpha.0" dependencies = [ "atoi", "chrono", diff --git a/sqlx-macros-core/Cargo.toml b/sqlx-macros-core/Cargo.toml index 48e8d264..effbd687 100644 --- a/sqlx-macros-core/Cargo.toml +++ b/sqlx-macros-core/Cargo.toml @@ -52,12 +52,11 @@ hex = { version = "0.4.3" } heck = { version = "0.4", features = ["unicode"] } either = "1.6.1" once_cell = "1.9.0" -proc-macro2 = { version = "1.0.36", default-features = false } +proc-macro2 = { version = "1.0.79", default-features = false } serde = { version = "1.0.132", features = ["derive"] } serde_json = { version = "1.0.73" } sha2 = { version = "0.10.0" } -syn = { version = "1.0.84", default-features = false, features = ["full", "derive", "parsing", "printing", "clone-impls"] } +syn = { version = "2.0.52", default-features = false, features = ["full", "derive", "parsing", "printing", "clone-impls"] } tempfile = { version = "3.3.0" } -quote = { version = "1.0.14", default-features = false } +quote = { version = "1.0.26", default-features = false } url = { version = "2.2.2", default-features = false } - diff --git a/sqlx-macros-core/src/database/mod.rs b/sqlx-macros-core/src/database/mod.rs index c4fe6967..ddb8762a 100644 --- a/sqlx-macros-core/src/database/mod.rs +++ b/sqlx-macros-core/src/database/mod.rs @@ -172,6 +172,7 @@ mod mysql; mod sqlite; mod fake_sqlx { + #[cfg(any(feature = "mysql", feature = "postgres", feature = "sqlite"))] pub use sqlx_core::*; #[cfg(feature = "mysql")] diff --git a/sqlx-macros-core/src/derives/attributes.rs b/sqlx-macros-core/src/derives/attributes.rs index 554d7a4e..9d7be0e8 100644 --- a/sqlx-macros-core/src/derives/attributes.rs +++ b/sqlx-macros-core/src/derives/attributes.rs @@ -1,8 +1,8 @@ use proc_macro2::{Ident, Span, TokenStream}; use quote::quote; use syn::{ - punctuated::Punctuated, spanned::Spanned, token::Comma, Attribute, DeriveInput, Field, Lit, - Meta, MetaNameValue, NestedMeta, Type, Variant, + punctuated::Punctuated, token::Comma, Attribute, DeriveInput, Field, LitStr, Meta, Token, Type, + Variant, }; macro_rules! assert_attribute { @@ -77,82 +77,53 @@ pub fn parse_container_attributes(input: &[Attribute]) -> syn::Result { - for value in list.nested.iter() { - match value { - NestedMeta::Meta(meta) => match meta { - Meta::Path(p) if p.is_ident("transparent") => { - try_set!(transparent, true, value) - } + for attr in input { + if attr.path().is_ident("sqlx") { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("transparent") { + try_set!(transparent, true, attr); + } else if meta.path.is_ident("no_pg_array") { + try_set!(no_pg_array, true, attr); + } else if meta.path.is_ident("default") { + try_set!(default, true, attr); + } else if meta.path.is_ident("rename_all") { + meta.input.parse::()?; + let lit: LitStr = meta.input.parse()?; - Meta::Path(p) if p.is_ident("no_pg_array") => { - try_set!(no_pg_array, true, value); - } + let val = match lit.value().as_str() { + "lowercase" => RenameAll::LowerCase, + "snake_case" => RenameAll::SnakeCase, + "UPPERCASE" => RenameAll::UpperCase, + "SCREAMING_SNAKE_CASE" => RenameAll::ScreamingSnakeCase, + "kebab-case" => RenameAll::KebabCase, + "camelCase" => RenameAll::CamelCase, + "PascalCase" => RenameAll::PascalCase, + _ => fail!(lit, "unexpected value for rename_all"), + }; - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("rename_all") => { - let val = match &*val.value() { - "lowercase" => RenameAll::LowerCase, - "snake_case" => RenameAll::SnakeCase, - "UPPERCASE" => RenameAll::UpperCase, - "SCREAMING_SNAKE_CASE" => RenameAll::ScreamingSnakeCase, - "kebab-case" => RenameAll::KebabCase, - "camelCase" => RenameAll::CamelCase, - "PascalCase" => RenameAll::PascalCase, - _ => fail!(meta, "unexpected value for rename_all"), - }; + try_set!(rename_all, val, lit) + } else if meta.path.is_ident("type_name") { + meta.input.parse::()?; + let lit: LitStr = meta.input.parse()?; + let name = TypeName { + val: lit.value(), + span: lit.span(), + }; - try_set!(rename_all, val, value) - } - - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("type_name") => { - try_set!( - type_name, - TypeName { - val: val.value(), - span: value.span(), - }, - value - ) - } - - Meta::Path(p) if p.is_ident("default") => { - try_set!(default, true, value) - } - - u => fail!(u, "unexpected attribute"), - }, - u => fail!(u, "unexpected attribute"), - } + try_set!(type_name, name, lit) + } else { + fail!(meta.path, "unexpected attribute") } + + Ok(()) + })?; + } else if attr.path().is_ident("repr") { + let list: Punctuated = + attr.parse_args_with(>::parse_terminated)?; + + if let Some(path) = list.iter().find_map(|f| f.require_path_only().ok()) { + try_set!(repr, path.get_ident().unwrap().clone(), list); } - Meta::List(list) if list.path.is_ident("repr") => { - if list.nested.len() != 1 { - fail!(&list.nested, "expected one value") - } - match list.nested.first().unwrap() { - NestedMeta::Meta(Meta::Path(p)) if p.get_ident().is_some() => { - try_set!(repr, p.get_ident().unwrap().clone(), list); - } - u => fail!(u, "unexpected value"), - } - } - _ => {} } } @@ -174,35 +145,28 @@ pub fn parse_child_attributes(input: &[Attribute]) -> syn::Result match meta { - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("rename") => try_set!(rename, val.value(), value), - Meta::NameValue(MetaNameValue { - path, - lit: Lit::Str(val), - .. - }) if path.is_ident("try_from") => try_set!(try_from, val.parse()?, value), - Meta::Path(path) if path.is_ident("default") => default = true, - Meta::Path(path) if path.is_ident("flatten") => flatten = true, - Meta::Path(path) if path.is_ident("skip") => skip = true, - Meta::Path(path) if path.is_ident("json") => json = true, - u => fail!(u, "unexpected attribute"), - }, - u => fail!(u, "unexpected attribute"), - } + for attr in input.iter().filter(|a| a.path().is_ident("sqlx")) { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("rename") { + meta.input.parse::()?; + let val: LitStr = meta.input.parse()?; + try_set!(rename, val.value(), val); + } else if meta.path.is_ident("try_from") { + meta.input.parse::()?; + let val: LitStr = meta.input.parse()?; + try_set!(try_from, val.parse()?, val); + } else if meta.path.is_ident("default") { + default = true; + } else if meta.path.is_ident("flatten") { + flatten = true; + } else if meta.path.is_ident("skip") { + skip = true; + } else if meta.path.is_ident("json") { + json = true; } - } + + return Ok(()); + })?; if json && flatten { fail!( diff --git a/sqlx-macros-core/src/derives/encode.rs b/sqlx-macros-core/src/derives/encode.rs index 7bb56821..823af65a 100644 --- a/sqlx-macros-core/src/derives/encode.rs +++ b/sqlx-macros-core/src/derives/encode.rs @@ -9,7 +9,7 @@ use syn::punctuated::Punctuated; use syn::token::Comma; use syn::{ parse_quote, Data, DataEnum, DataStruct, DeriveInput, Expr, Field, Fields, FieldsNamed, - FieldsUnnamed, Lifetime, LifetimeDef, Stmt, Variant, + FieldsUnnamed, Lifetime, LifetimeParam, Stmt, Variant, }; pub fn expand_derive_encode(input: &DeriveInput) -> syn::Result { @@ -66,7 +66,7 @@ fn expand_derive_encode_transparent( let mut generics = generics.clone(); generics .params - .insert(0, LifetimeDef::new(lifetime.clone()).into()); + .insert(0, LifetimeParam::new(lifetime.clone()).into()); generics .params diff --git a/sqlx-macros-core/src/derives/mod.rs b/sqlx-macros-core/src/derives/mod.rs index 45e8d521..5f55ab5a 100644 --- a/sqlx-macros-core/src/derives/mod.rs +++ b/sqlx-macros-core/src/derives/mod.rs @@ -12,7 +12,6 @@ pub use row::expand_derive_from_row; use self::attributes::RenameAll; use heck::{ToKebabCase, ToLowerCamelCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase}; use proc_macro2::TokenStream; -use std::iter::FromIterator; use syn::DeriveInput; pub fn expand_derive_type_encode_decode(input: &DeriveInput) -> syn::Result { diff --git a/sqlx-macros-core/src/query/args.rs b/sqlx-macros-core/src/query/args.rs index 3a07b1b3..194a444a 100644 --- a/sqlx-macros-core/src/query/args.rs +++ b/sqlx-macros-core/src/query/args.rs @@ -1,11 +1,11 @@ use crate::database::DatabaseExt; use crate::query::QueryMacroInput; use either::Either; -use proc_macro2::{Ident, TokenStream}; +use proc_macro2::TokenStream; use quote::{format_ident, quote, quote_spanned}; use sqlx_core::describe::Describe; use syn::spanned::Spanned; -use syn::{Expr, ExprCast, ExprGroup, ExprType, Type}; +use syn::{Expr, ExprCast, ExprGroup, Type}; /// Returns a tokenstream which typechecks the arguments passed to the macro /// and binds them to `DB::Arguments` with the ident `query_args`. @@ -49,31 +49,28 @@ pub fn quote_args( .zip(arg_names.iter().zip(&input.arg_exprs)) .enumerate() .map(|(i, (param_ty, (name, expr)))| -> crate::Result<_> { - let param_ty = match get_type_override(expr) { + if get_type_override(expr).is_some() { // cast will fail to compile if the type does not match // and we strip casts to wildcard - Some((_, false)) => return Ok(quote!()), - // type ascription is deprecated - Some((ty, true)) => return Ok(create_warning(name.clone(), &ty, &expr)), - None => { - DB::param_type_for_id(¶m_ty) - .ok_or_else(|| { - if let Some(feature_gate) = ::get_feature_gate(¶m_ty) { - format!( - "optional sqlx feature `{}` required for type {} of param #{}", - feature_gate, - param_ty, - i + 1, - ) - } else { - format!("unsupported type {} for param #{}", param_ty, i + 1) - } - })? - .parse::() - .map_err(|_| format!("Rust type mapping for {param_ty} not parsable"))? + return Ok(quote!()); + } - } - }; + let param_ty = + DB::param_type_for_id(¶m_ty) + .ok_or_else(|| { + if let Some(feature_gate) = ::get_feature_gate(¶m_ty) { + format!( + "optional sqlx feature `{}` required for type {} of param #{}", + feature_gate, + param_ty, + i + 1, + ) + } else { + format!("unsupported type {} for param #{}", param_ty, i + 1) + } + })? + .parse::() + .map_err(|_| format!("Rust type mapping for {param_ty} not parsable"))?; Ok(quote_spanned!(expr.span() => // this shouldn't actually run @@ -116,42 +113,10 @@ pub fn quote_args( }) } -fn create_warning(name: Ident, ty: &Type, expr: &Expr) -> TokenStream { - let Expr::Type(ExprType { expr: stripped, .. }) = expr else { - return quote!(); - }; - let current = quote!(#stripped: #ty).to_string(); - let fix = quote!(#stripped as #ty).to_string(); - let name = Ident::new(&format!("warning_{name}"), expr.span()); - - let message = format!( - " -\t\tType ascription pattern is deprecated, prefer casting -\t\tTry changing from -\t\t\t`{current}` -\t\tto -\t\t\t`{fix}` - -\t\tSee for more information -" - ); - - quote_spanned!(expr.span() => - // this shouldn't actually run - if false { - #[deprecated(note = #message)] - #[allow(non_upper_case_globals)] - const #name: () = (); - let _ = #name; - } - ) -} - -fn get_type_override(expr: &Expr) -> Option<(&Type, bool)> { +fn get_type_override(expr: &Expr) -> Option<&Type> { match expr { Expr::Group(group) => get_type_override(&group.expr), - Expr::Cast(cast) => Some((&cast.ty, false)), - Expr::Type(ascription) => Some((&ascription.ty, true)), + Expr::Cast(cast) => Some(&cast.ty), _ => None, } } @@ -167,8 +132,6 @@ fn strip_wildcard(expr: Expr) -> Expr { group_token, expr: Box::new(strip_wildcard(*expr)), }), - // type ascription syntax is experimental so we always strip it - Expr::Type(ExprType { expr, .. }) => *expr, // we want to retain casts if they semantically matter Expr::Cast(ExprCast { attrs, diff --git a/sqlx-macros-core/src/test_attr.rs b/sqlx-macros-core/src/test_attr.rs index a86547cc..ff9ce511 100644 --- a/sqlx-macros-core/src/test_attr.rs +++ b/sqlx-macros-core/src/test_attr.rs @@ -1,5 +1,6 @@ use proc_macro2::TokenStream; use quote::quote; +use syn::parse::Parser; #[cfg(feature = "migrate")] struct Args { @@ -23,7 +24,12 @@ enum MigrationsOpt { Disabled, } -pub fn expand(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Result { +type AttributeArgs = syn::punctuated::Punctuated; + +pub fn expand(args: TokenStream, input: syn::ItemFn) -> crate::Result { + let parser = AttributeArgs::parse_terminated; + let args = parser.parse2(args)?; + if input.sig.inputs.is_empty() { if !args.is_empty() { if cfg!(feature = "migrate") { @@ -70,7 +76,7 @@ fn expand_simple(input: syn::ItemFn) -> TokenStream { } #[cfg(feature = "migrate")] -fn expand_advanced(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Result { +fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result { let ret = &input.sig.output; let name = &input.sig.ident; let inputs = &input.sig.inputs; @@ -180,97 +186,101 @@ fn expand_advanced(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Resul } #[cfg(feature = "migrate")] -fn parse_args(attr_args: syn::AttributeArgs) -> syn::Result { +fn parse_args(attr_args: AttributeArgs) -> syn::Result { + use syn::{punctuated::Punctuated, Expr, Lit, LitStr, Meta, MetaNameValue, Token}; + let mut fixtures = Vec::new(); let mut migrations = MigrationsOpt::InferredPath; for arg in attr_args { + let path = arg.path().clone(); + match arg { - syn::NestedMeta::Meta(syn::Meta::List(list)) if list.path.is_ident("fixtures") => { + syn::Meta::List(list) if list.path.is_ident("fixtures") => { let mut fixtures_local = vec![]; let mut fixtures_type = FixturesType::None; - for nested in list.nested { - match nested { - syn::NestedMeta::Lit(syn::Lit::Str(litstr)) => { - // fixtures("","") or fixtures("","") - parse_fixtures_args(&mut fixtures_type, litstr, &mut fixtures_local)?; - }, - syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue)) - if namevalue.path.is_ident("path") => - { - // fixtures(path = "", scripts("","")) checking `path` argument - parse_fixtures_path_args(&mut fixtures_type, namevalue)?; - }, - syn::NestedMeta::Meta(syn::Meta::List(list)) if list.path.is_ident("scripts") => { - // fixtures(path = "", scripts("","")) checking `scripts` argument - parse_fixtures_scripts_args(&mut fixtures_type, list, &mut fixtures_local)?; - } - other => { - return Err(syn::Error::new_spanned(other, "expected string literal")) - } - }; + let parse_nested = list.parse_nested_meta(|meta| { + if meta.path.is_ident("path") { + // fixtures(path = "", scripts("","")) checking `path` argument + meta.input.parse::()?; + let val: LitStr = meta.input.parse()?; + parse_fixtures_path_args(&mut fixtures_type, val)?; + } else if meta.path.is_ident("scripts") { + // fixtures(path = "", scripts("","")) checking `scripts` argument + let parser = >::parse_terminated; + let list = parser.parse2(list.tokens.clone())?; + parse_fixtures_scripts_args(&mut fixtures_type, list, &mut fixtures_local)?; + } else { + return Err(syn::Error::new_spanned( + meta.path, + "unexpected fixture meta", + )); + } + + Ok(()) + }); + + if parse_nested.is_err() { + // fixtures("","") or fixtures("","") + let args = + list.parse_args_with(>::parse_terminated)?; + for arg in args { + parse_fixtures_args(&mut fixtures_type, arg, &mut fixtures_local)?; + } } + fixtures.push((fixtures_type, fixtures_local)); } - syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue)) - if namevalue.path.is_ident("migrations") => - { + syn::Meta::NameValue(value) if value.path.is_ident("migrations") => { if !matches!(migrations, MigrationsOpt::InferredPath) { return Err(syn::Error::new_spanned( - namevalue, + value, "cannot have more than one `migrations` or `migrator` arg", )); } - migrations = match namevalue.lit { - syn::Lit::Bool(litbool) => { - if !litbool.value { - // migrations = false - MigrationsOpt::Disabled - } else { - // migrations = true - return Err(syn::Error::new_spanned( - litbool, - "`migrations = true` is redundant", - )); - } - } - // migrations = "" - syn::Lit::Str(litstr) => MigrationsOpt::ExplicitPath(litstr), - _ => { - return Err(syn::Error::new_spanned( - namevalue, - "expected string or `false`", - )) - } + let Expr::Lit(syn::ExprLit { lit, .. }) = value.value else { + return Err(syn::Error::new_spanned(path, "expected string for `false`")); }; - } - syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue)) - if namevalue.path.is_ident("migrator") => - { - if !matches!(migrations, MigrationsOpt::InferredPath) { + + migrations = match lit { + // migrations = false + Lit::Bool(b) if !b.value => MigrationsOpt::Disabled, + // migrations = true + Lit::Bool(b) => { return Err(syn::Error::new_spanned( - namevalue, - "cannot have more than one `migrations` or `migrator` arg", + b, + "`migrations = true` is redundant", )); } - - migrations = match namevalue.lit { - // migrator = "" - syn::Lit::Str(litstr) => MigrationsOpt::ExplicitMigrator(litstr.parse()?), - _ => { - return Err(syn::Error::new_spanned( - namevalue, - "expected string", - )) - } - }; + // migrations = "path" + Lit::Str(s) => MigrationsOpt::ExplicitPath(s), + lit => return Err(syn::Error::new_spanned(lit, "expected string or `false`")), + }; + } + // migrator = "" + Meta::NameValue(MetaNameValue { value, .. }) if path.is_ident("migrator") => { + if !matches!(migrations, MigrationsOpt::InferredPath) { + return Err(syn::Error::new_spanned( + path, + "cannot have more than one `migrations` or `migrator` arg", + )); } - other => { + + let Expr::Lit(syn::ExprLit { + lit: Lit::Str(lit), .. + }) = value + else { + return Err(syn::Error::new_spanned(path, "expected string")); + }; + + migrations = MigrationsOpt::ExplicitMigrator(lit.parse()?); + } + arg => { return Err(syn::Error::new_spanned( - other, - "expected `fixtures(\"\", ...)` or `migrations = \"\" | false` or `migrator = \"\"`", + arg, + r#"expected `fixtures("", ...)` or `migrations = "" | false` or `migrator = ""`"#, )) } } @@ -338,43 +348,34 @@ fn parse_fixtures_args( #[cfg(feature = "migrate")] fn parse_fixtures_path_args( fixtures_type: &mut FixturesType, - namevalue: syn::MetaNameValue, + namevalue: syn::LitStr, ) -> syn::Result<()> { - // fixtures(path = "", scripts("","")) checking `path` argument if !matches!(fixtures_type, FixturesType::None) { return Err(syn::Error::new_spanned( namevalue, "`path` must be the first argument of `fixtures`", )); } - *fixtures_type = match namevalue.lit { - // path = "" - syn::Lit::Str(litstr) => FixturesType::CustomRelativePath(litstr), - _ => return Err(syn::Error::new_spanned(namevalue, "expected string")), - }; + *fixtures_type = FixturesType::CustomRelativePath(namevalue); Ok(()) } #[cfg(feature = "migrate")] fn parse_fixtures_scripts_args( fixtures_type: &mut FixturesType, - list: syn::MetaList, + list: syn::punctuated::Punctuated, fixtures_local: &mut Vec, ) -> syn::Result<()> { // fixtures(path = "", scripts("","")) checking `scripts` argument + if !matches!(fixtures_type, FixturesType::CustomRelativePath(_)) { return Err(syn::Error::new_spanned( list, "`scripts` must be the second argument of `fixtures` and used together with `path`", )); } - for nested in list.nested { - let litstr = match nested { - syn::NestedMeta::Lit(syn::Lit::Str(litstr)) => litstr, - other => return Err(syn::Error::new_spanned(other, "expected string literal")), - }; - fixtures_local.push(litstr); - } + + fixtures_local.extend(list); Ok(()) } diff --git a/sqlx-macros/Cargo.toml b/sqlx-macros/Cargo.toml index 82fecab7..82bf4a7d 100644 --- a/sqlx-macros/Cargo.toml +++ b/sqlx-macros/Cargo.toml @@ -44,5 +44,5 @@ sqlx-core = { workspace = true, features = ["any"] } sqlx-macros-core = { workspace = true } proc-macro2 = { version = "1.0.36", default-features = false } -syn = { version = "1.0.84", default-features = false, features = ["parsing", "proc-macro"] } -quote = { version = "1.0.14", default-features = false } +syn = { version = "2.0.52", default-features = false, features = ["parsing", "proc-macro"] } +quote = { version = "1.0.26", default-features = false } diff --git a/sqlx-macros/src/lib.rs b/sqlx-macros/src/lib.rs index 229c1030..96647cf1 100644 --- a/sqlx-macros/src/lib.rs +++ b/sqlx-macros/src/lib.rs @@ -79,10 +79,9 @@ pub fn migrate(input: TokenStream) -> TokenStream { #[proc_macro_attribute] pub fn test(args: TokenStream, input: TokenStream) -> TokenStream { - let args = syn::parse_macro_input!(args as syn::AttributeArgs); let input = syn::parse_macro_input!(input as syn::ItemFn); - match test_attr::expand(args, input) { + match test_attr::expand(args.into(), input) { Ok(ts) => ts.into(), Err(e) => { if let Some(parse_err) = e.downcast_ref::() {