refac: rebase syn 2 changes

This commit is contained in:
Luiz Carvalho 2024-03-12 19:58:21 -03:00 committed by Austin Bonander
parent c5357f18e5
commit 8f926e590c
10 changed files with 199 additions and 271 deletions

30
Cargo.lock generated
View file

@ -488,10 +488,12 @@ dependencies = [
[[package]]
name = "bigdecimal"
version = "0.3.1"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa"
checksum = "9324c8014cd04590682b34f1e9448d38f0674d0f7b2dc553331016ef0e4e9ebc"
dependencies = [
"autocfg",
"libm",
"num-bigint",
"num-integer",
"num-traits",
@ -2520,9 +2522,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.78"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e"
dependencies = [
"unicode-ident",
]
@ -3155,7 +3157,7 @@ dependencies = [
[[package]]
name = "sqlx"
version = "0.7.4"
version = "0.8.0-alpha.0"
dependencies = [
"anyhow",
"async-std",
@ -3185,7 +3187,7 @@ dependencies = [
[[package]]
name = "sqlx-cli"
version = "0.7.4"
version = "0.8.0-alpha.0"
dependencies = [
"anyhow",
"assert_cmd",
@ -3211,7 +3213,7 @@ dependencies = [
[[package]]
name = "sqlx-core"
version = "0.7.4"
version = "0.8.0-alpha.0"
dependencies = [
"ahash 0.8.11",
"async-io 1.13.0",
@ -3395,18 +3397,18 @@ dependencies = [
[[package]]
name = "sqlx-macros"
version = "0.7.4"
version = "0.8.0-alpha.0"
dependencies = [
"proc-macro2",
"quote",
"sqlx-core",
"sqlx-macros-core",
"syn 1.0.109",
"syn 2.0.52",
]
[[package]]
name = "sqlx-macros-core"
version = "0.7.4"
version = "0.8.0-alpha.0"
dependencies = [
"async-std",
"dotenvy",
@ -3423,7 +3425,7 @@ dependencies = [
"sqlx-mysql",
"sqlx-postgres",
"sqlx-sqlite",
"syn 1.0.109",
"syn 2.0.52",
"tempfile",
"tokio",
"url",
@ -3431,7 +3433,7 @@ dependencies = [
[[package]]
name = "sqlx-mysql"
version = "0.7.4"
version = "0.8.0-alpha.0"
dependencies = [
"atoi",
"base64 0.21.7",
@ -3476,7 +3478,7 @@ dependencies = [
[[package]]
name = "sqlx-postgres"
version = "0.7.4"
version = "0.8.0-alpha.0"
dependencies = [
"atoi",
"base64 0.21.7",
@ -3521,7 +3523,7 @@ dependencies = [
[[package]]
name = "sqlx-sqlite"
version = "0.7.4"
version = "0.8.0-alpha.0"
dependencies = [
"atoi",
"chrono",

View file

@ -52,12 +52,11 @@ hex = { version = "0.4.3" }
heck = { version = "0.4", features = ["unicode"] }
either = "1.6.1"
once_cell = "1.9.0"
proc-macro2 = { version = "1.0.36", default-features = false }
proc-macro2 = { version = "1.0.79", default-features = false }
serde = { version = "1.0.132", features = ["derive"] }
serde_json = { version = "1.0.73" }
sha2 = { version = "0.10.0" }
syn = { version = "1.0.84", default-features = false, features = ["full", "derive", "parsing", "printing", "clone-impls"] }
syn = { version = "2.0.52", default-features = false, features = ["full", "derive", "parsing", "printing", "clone-impls"] }
tempfile = { version = "3.3.0" }
quote = { version = "1.0.14", default-features = false }
quote = { version = "1.0.26", default-features = false }
url = { version = "2.2.2", default-features = false }

View file

@ -172,6 +172,7 @@ mod mysql;
mod sqlite;
mod fake_sqlx {
#[cfg(any(feature = "mysql", feature = "postgres", feature = "sqlite"))]
pub use sqlx_core::*;
#[cfg(feature = "mysql")]

View file

@ -1,8 +1,8 @@
use proc_macro2::{Ident, Span, TokenStream};
use quote::quote;
use syn::{
punctuated::Punctuated, spanned::Spanned, token::Comma, Attribute, DeriveInput, Field, Lit,
Meta, MetaNameValue, NestedMeta, Type, Variant,
punctuated::Punctuated, token::Comma, Attribute, DeriveInput, Field, LitStr, Meta, Token, Type,
Variant,
};
macro_rules! assert_attribute {
@ -77,82 +77,53 @@ pub fn parse_container_attributes(input: &[Attribute]) -> syn::Result<SqlxContai
let mut no_pg_array = None;
let mut default = None;
for attr in input
.iter()
.filter(|a| a.path.is_ident("sqlx") || a.path.is_ident("repr"))
{
let meta = attr
.parse_meta()
.map_err(|e| syn::Error::new_spanned(attr, e))?;
match meta {
Meta::List(list) if list.path.is_ident("sqlx") => {
for value in list.nested.iter() {
match value {
NestedMeta::Meta(meta) => match meta {
Meta::Path(p) if p.is_ident("transparent") => {
try_set!(transparent, true, value)
}
for attr in input {
if attr.path().is_ident("sqlx") {
attr.parse_nested_meta(|meta| {
if meta.path.is_ident("transparent") {
try_set!(transparent, true, attr);
} else if meta.path.is_ident("no_pg_array") {
try_set!(no_pg_array, true, attr);
} else if meta.path.is_ident("default") {
try_set!(default, true, attr);
} else if meta.path.is_ident("rename_all") {
meta.input.parse::<Token![=]>()?;
let lit: LitStr = meta.input.parse()?;
Meta::Path(p) if p.is_ident("no_pg_array") => {
try_set!(no_pg_array, true, value);
}
let val = match lit.value().as_str() {
"lowercase" => RenameAll::LowerCase,
"snake_case" => RenameAll::SnakeCase,
"UPPERCASE" => RenameAll::UpperCase,
"SCREAMING_SNAKE_CASE" => RenameAll::ScreamingSnakeCase,
"kebab-case" => RenameAll::KebabCase,
"camelCase" => RenameAll::CamelCase,
"PascalCase" => RenameAll::PascalCase,
_ => fail!(lit, "unexpected value for rename_all"),
};
Meta::NameValue(MetaNameValue {
path,
lit: Lit::Str(val),
..
}) if path.is_ident("rename_all") => {
let val = match &*val.value() {
"lowercase" => RenameAll::LowerCase,
"snake_case" => RenameAll::SnakeCase,
"UPPERCASE" => RenameAll::UpperCase,
"SCREAMING_SNAKE_CASE" => RenameAll::ScreamingSnakeCase,
"kebab-case" => RenameAll::KebabCase,
"camelCase" => RenameAll::CamelCase,
"PascalCase" => RenameAll::PascalCase,
_ => fail!(meta, "unexpected value for rename_all"),
};
try_set!(rename_all, val, lit)
} else if meta.path.is_ident("type_name") {
meta.input.parse::<Token![=]>()?;
let lit: LitStr = meta.input.parse()?;
let name = TypeName {
val: lit.value(),
span: lit.span(),
};
try_set!(rename_all, val, value)
}
Meta::NameValue(MetaNameValue {
path,
lit: Lit::Str(val),
..
}) if path.is_ident("type_name") => {
try_set!(
type_name,
TypeName {
val: val.value(),
span: value.span(),
},
value
)
}
Meta::Path(p) if p.is_ident("default") => {
try_set!(default, true, value)
}
u => fail!(u, "unexpected attribute"),
},
u => fail!(u, "unexpected attribute"),
}
try_set!(type_name, name, lit)
} else {
fail!(meta.path, "unexpected attribute")
}
Ok(())
})?;
} else if attr.path().is_ident("repr") {
let list: Punctuated<Meta, Token![,]> =
attr.parse_args_with(<Punctuated<Meta, Token![,]>>::parse_terminated)?;
if let Some(path) = list.iter().find_map(|f| f.require_path_only().ok()) {
try_set!(repr, path.get_ident().unwrap().clone(), list);
}
Meta::List(list) if list.path.is_ident("repr") => {
if list.nested.len() != 1 {
fail!(&list.nested, "expected one value")
}
match list.nested.first().unwrap() {
NestedMeta::Meta(Meta::Path(p)) if p.get_ident().is_some() => {
try_set!(repr, p.get_ident().unwrap().clone(), list);
}
u => fail!(u, "unexpected value"),
}
}
_ => {}
}
}
@ -174,35 +145,28 @@ pub fn parse_child_attributes(input: &[Attribute]) -> syn::Result<SqlxChildAttri
let mut skip: bool = false;
let mut json = false;
for attr in input.iter().filter(|a| a.path.is_ident("sqlx")) {
let meta = attr
.parse_meta()
.map_err(|e| syn::Error::new_spanned(attr, e))?;
if let Meta::List(list) = meta {
for value in list.nested.iter() {
match value {
NestedMeta::Meta(meta) => match meta {
Meta::NameValue(MetaNameValue {
path,
lit: Lit::Str(val),
..
}) if path.is_ident("rename") => try_set!(rename, val.value(), value),
Meta::NameValue(MetaNameValue {
path,
lit: Lit::Str(val),
..
}) if path.is_ident("try_from") => try_set!(try_from, val.parse()?, value),
Meta::Path(path) if path.is_ident("default") => default = true,
Meta::Path(path) if path.is_ident("flatten") => flatten = true,
Meta::Path(path) if path.is_ident("skip") => skip = true,
Meta::Path(path) if path.is_ident("json") => json = true,
u => fail!(u, "unexpected attribute"),
},
u => fail!(u, "unexpected attribute"),
}
for attr in input.iter().filter(|a| a.path().is_ident("sqlx")) {
attr.parse_nested_meta(|meta| {
if meta.path.is_ident("rename") {
meta.input.parse::<Token![=]>()?;
let val: LitStr = meta.input.parse()?;
try_set!(rename, val.value(), val);
} else if meta.path.is_ident("try_from") {
meta.input.parse::<Token![=]>()?;
let val: LitStr = meta.input.parse()?;
try_set!(try_from, val.parse()?, val);
} else if meta.path.is_ident("default") {
default = true;
} else if meta.path.is_ident("flatten") {
flatten = true;
} else if meta.path.is_ident("skip") {
skip = true;
} else if meta.path.is_ident("json") {
json = true;
}
}
return Ok(());
})?;
if json && flatten {
fail!(

View file

@ -9,7 +9,7 @@ use syn::punctuated::Punctuated;
use syn::token::Comma;
use syn::{
parse_quote, Data, DataEnum, DataStruct, DeriveInput, Expr, Field, Fields, FieldsNamed,
FieldsUnnamed, Lifetime, LifetimeDef, Stmt, Variant,
FieldsUnnamed, Lifetime, LifetimeParam, Stmt, Variant,
};
pub fn expand_derive_encode(input: &DeriveInput) -> syn::Result<TokenStream> {
@ -66,7 +66,7 @@ fn expand_derive_encode_transparent(
let mut generics = generics.clone();
generics
.params
.insert(0, LifetimeDef::new(lifetime.clone()).into());
.insert(0, LifetimeParam::new(lifetime.clone()).into());
generics
.params

View file

@ -12,7 +12,6 @@ pub use row::expand_derive_from_row;
use self::attributes::RenameAll;
use heck::{ToKebabCase, ToLowerCamelCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase};
use proc_macro2::TokenStream;
use std::iter::FromIterator;
use syn::DeriveInput;
pub fn expand_derive_type_encode_decode(input: &DeriveInput) -> syn::Result<TokenStream> {

View file

@ -1,11 +1,11 @@
use crate::database::DatabaseExt;
use crate::query::QueryMacroInput;
use either::Either;
use proc_macro2::{Ident, TokenStream};
use proc_macro2::TokenStream;
use quote::{format_ident, quote, quote_spanned};
use sqlx_core::describe::Describe;
use syn::spanned::Spanned;
use syn::{Expr, ExprCast, ExprGroup, ExprType, Type};
use syn::{Expr, ExprCast, ExprGroup, Type};
/// Returns a tokenstream which typechecks the arguments passed to the macro
/// and binds them to `DB::Arguments` with the ident `query_args`.
@ -49,31 +49,28 @@ pub fn quote_args<DB: DatabaseExt>(
.zip(arg_names.iter().zip(&input.arg_exprs))
.enumerate()
.map(|(i, (param_ty, (name, expr)))| -> crate::Result<_> {
let param_ty = match get_type_override(expr) {
if get_type_override(expr).is_some() {
// cast will fail to compile if the type does not match
// and we strip casts to wildcard
Some((_, false)) => return Ok(quote!()),
// type ascription is deprecated
Some((ty, true)) => return Ok(create_warning(name.clone(), &ty, &expr)),
None => {
DB::param_type_for_id(&param_ty)
.ok_or_else(|| {
if let Some(feature_gate) = <DB as DatabaseExt>::get_feature_gate(&param_ty) {
format!(
"optional sqlx feature `{}` required for type {} of param #{}",
feature_gate,
param_ty,
i + 1,
)
} else {
format!("unsupported type {} for param #{}", param_ty, i + 1)
}
})?
.parse::<TokenStream>()
.map_err(|_| format!("Rust type mapping for {param_ty} not parsable"))?
return Ok(quote!());
}
}
};
let param_ty =
DB::param_type_for_id(&param_ty)
.ok_or_else(|| {
if let Some(feature_gate) = <DB as DatabaseExt>::get_feature_gate(&param_ty) {
format!(
"optional sqlx feature `{}` required for type {} of param #{}",
feature_gate,
param_ty,
i + 1,
)
} else {
format!("unsupported type {} for param #{}", param_ty, i + 1)
}
})?
.parse::<TokenStream>()
.map_err(|_| format!("Rust type mapping for {param_ty} not parsable"))?;
Ok(quote_spanned!(expr.span() =>
// this shouldn't actually run
@ -116,42 +113,10 @@ pub fn quote_args<DB: DatabaseExt>(
})
}
fn create_warning(name: Ident, ty: &Type, expr: &Expr) -> TokenStream {
let Expr::Type(ExprType { expr: stripped, .. }) = expr else {
return quote!();
};
let current = quote!(#stripped: #ty).to_string();
let fix = quote!(#stripped as #ty).to_string();
let name = Ident::new(&format!("warning_{name}"), expr.span());
let message = format!(
"
\t\tType ascription pattern is deprecated, prefer casting
\t\tTry changing from
\t\t\t`{current}`
\t\tto
\t\t\t`{fix}`
\t\tSee <https://github.com/rust-lang/rfcs/pull/3307> for more information
"
);
quote_spanned!(expr.span() =>
// this shouldn't actually run
if false {
#[deprecated(note = #message)]
#[allow(non_upper_case_globals)]
const #name: () = ();
let _ = #name;
}
)
}
fn get_type_override(expr: &Expr) -> Option<(&Type, bool)> {
fn get_type_override(expr: &Expr) -> Option<&Type> {
match expr {
Expr::Group(group) => get_type_override(&group.expr),
Expr::Cast(cast) => Some((&cast.ty, false)),
Expr::Type(ascription) => Some((&ascription.ty, true)),
Expr::Cast(cast) => Some(&cast.ty),
_ => None,
}
}
@ -167,8 +132,6 @@ fn strip_wildcard(expr: Expr) -> Expr {
group_token,
expr: Box::new(strip_wildcard(*expr)),
}),
// type ascription syntax is experimental so we always strip it
Expr::Type(ExprType { expr, .. }) => *expr,
// we want to retain casts if they semantically matter
Expr::Cast(ExprCast {
attrs,

View file

@ -1,5 +1,6 @@
use proc_macro2::TokenStream;
use quote::quote;
use syn::parse::Parser;
#[cfg(feature = "migrate")]
struct Args {
@ -23,7 +24,12 @@ enum MigrationsOpt {
Disabled,
}
pub fn expand(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Result<TokenStream> {
type AttributeArgs = syn::punctuated::Punctuated<syn::Meta, syn::Token![,]>;
pub fn expand(args: TokenStream, input: syn::ItemFn) -> crate::Result<TokenStream> {
let parser = AttributeArgs::parse_terminated;
let args = parser.parse2(args)?;
if input.sig.inputs.is_empty() {
if !args.is_empty() {
if cfg!(feature = "migrate") {
@ -70,7 +76,7 @@ fn expand_simple(input: syn::ItemFn) -> TokenStream {
}
#[cfg(feature = "migrate")]
fn expand_advanced(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Result<TokenStream> {
fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result<TokenStream> {
let ret = &input.sig.output;
let name = &input.sig.ident;
let inputs = &input.sig.inputs;
@ -180,97 +186,101 @@ fn expand_advanced(args: syn::AttributeArgs, input: syn::ItemFn) -> crate::Resul
}
#[cfg(feature = "migrate")]
fn parse_args(attr_args: syn::AttributeArgs) -> syn::Result<Args> {
fn parse_args(attr_args: AttributeArgs) -> syn::Result<Args> {
use syn::{punctuated::Punctuated, Expr, Lit, LitStr, Meta, MetaNameValue, Token};
let mut fixtures = Vec::new();
let mut migrations = MigrationsOpt::InferredPath;
for arg in attr_args {
let path = arg.path().clone();
match arg {
syn::NestedMeta::Meta(syn::Meta::List(list)) if list.path.is_ident("fixtures") => {
syn::Meta::List(list) if list.path.is_ident("fixtures") => {
let mut fixtures_local = vec![];
let mut fixtures_type = FixturesType::None;
for nested in list.nested {
match nested {
syn::NestedMeta::Lit(syn::Lit::Str(litstr)) => {
// fixtures("<file_1>","<file_2>") or fixtures("<path/file_1.sql>","<path/file_2.sql>")
parse_fixtures_args(&mut fixtures_type, litstr, &mut fixtures_local)?;
},
syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue))
if namevalue.path.is_ident("path") =>
{
// fixtures(path = "<path>", scripts("<file_1>","<file_2>")) checking `path` argument
parse_fixtures_path_args(&mut fixtures_type, namevalue)?;
},
syn::NestedMeta::Meta(syn::Meta::List(list)) if list.path.is_ident("scripts") => {
// fixtures(path = "<path>", scripts("<file_1>","<file_2>")) checking `scripts` argument
parse_fixtures_scripts_args(&mut fixtures_type, list, &mut fixtures_local)?;
}
other => {
return Err(syn::Error::new_spanned(other, "expected string literal"))
}
};
let parse_nested = list.parse_nested_meta(|meta| {
if meta.path.is_ident("path") {
// fixtures(path = "<path>", scripts("<file_1>","<file_2>")) checking `path` argument
meta.input.parse::<Token![=]>()?;
let val: LitStr = meta.input.parse()?;
parse_fixtures_path_args(&mut fixtures_type, val)?;
} else if meta.path.is_ident("scripts") {
// fixtures(path = "<path>", scripts("<file_1>","<file_2>")) checking `scripts` argument
let parser = <Punctuated<LitStr, Token![,]>>::parse_terminated;
let list = parser.parse2(list.tokens.clone())?;
parse_fixtures_scripts_args(&mut fixtures_type, list, &mut fixtures_local)?;
} else {
return Err(syn::Error::new_spanned(
meta.path,
"unexpected fixture meta",
));
}
Ok(())
});
if parse_nested.is_err() {
// fixtures("<file_1>","<file_2>") or fixtures("<path/file_1.sql>","<path/file_2.sql>")
let args =
list.parse_args_with(<Punctuated<LitStr, Token![,]>>::parse_terminated)?;
for arg in args {
parse_fixtures_args(&mut fixtures_type, arg, &mut fixtures_local)?;
}
}
fixtures.push((fixtures_type, fixtures_local));
}
syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue))
if namevalue.path.is_ident("migrations") =>
{
syn::Meta::NameValue(value) if value.path.is_ident("migrations") => {
if !matches!(migrations, MigrationsOpt::InferredPath) {
return Err(syn::Error::new_spanned(
namevalue,
value,
"cannot have more than one `migrations` or `migrator` arg",
));
}
migrations = match namevalue.lit {
syn::Lit::Bool(litbool) => {
if !litbool.value {
// migrations = false
MigrationsOpt::Disabled
} else {
// migrations = true
return Err(syn::Error::new_spanned(
litbool,
"`migrations = true` is redundant",
));
}
}
// migrations = "<path>"
syn::Lit::Str(litstr) => MigrationsOpt::ExplicitPath(litstr),
_ => {
return Err(syn::Error::new_spanned(
namevalue,
"expected string or `false`",
))
}
let Expr::Lit(syn::ExprLit { lit, .. }) = value.value else {
return Err(syn::Error::new_spanned(path, "expected string for `false`"));
};
}
syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue))
if namevalue.path.is_ident("migrator") =>
{
if !matches!(migrations, MigrationsOpt::InferredPath) {
migrations = match lit {
// migrations = false
Lit::Bool(b) if !b.value => MigrationsOpt::Disabled,
// migrations = true
Lit::Bool(b) => {
return Err(syn::Error::new_spanned(
namevalue,
"cannot have more than one `migrations` or `migrator` arg",
b,
"`migrations = true` is redundant",
));
}
migrations = match namevalue.lit {
// migrator = "<path>"
syn::Lit::Str(litstr) => MigrationsOpt::ExplicitMigrator(litstr.parse()?),
_ => {
return Err(syn::Error::new_spanned(
namevalue,
"expected string",
))
}
};
// migrations = "path"
Lit::Str(s) => MigrationsOpt::ExplicitPath(s),
lit => return Err(syn::Error::new_spanned(lit, "expected string or `false`")),
};
}
// migrator = "<path>"
Meta::NameValue(MetaNameValue { value, .. }) if path.is_ident("migrator") => {
if !matches!(migrations, MigrationsOpt::InferredPath) {
return Err(syn::Error::new_spanned(
path,
"cannot have more than one `migrations` or `migrator` arg",
));
}
other => {
let Expr::Lit(syn::ExprLit {
lit: Lit::Str(lit), ..
}) = value
else {
return Err(syn::Error::new_spanned(path, "expected string"));
};
migrations = MigrationsOpt::ExplicitMigrator(lit.parse()?);
}
arg => {
return Err(syn::Error::new_spanned(
other,
"expected `fixtures(\"<filename>\", ...)` or `migrations = \"<path>\" | false` or `migrator = \"<rust path>\"`",
arg,
r#"expected `fixtures("<filename>", ...)` or `migrations = "<path>" | false` or `migrator = "<rust path>"`"#,
))
}
}
@ -338,43 +348,34 @@ fn parse_fixtures_args(
#[cfg(feature = "migrate")]
fn parse_fixtures_path_args(
fixtures_type: &mut FixturesType,
namevalue: syn::MetaNameValue,
namevalue: syn::LitStr,
) -> syn::Result<()> {
// fixtures(path = "<path>", scripts("<file_1>","<file_2>")) checking `path` argument
if !matches!(fixtures_type, FixturesType::None) {
return Err(syn::Error::new_spanned(
namevalue,
"`path` must be the first argument of `fixtures`",
));
}
*fixtures_type = match namevalue.lit {
// path = "<path>"
syn::Lit::Str(litstr) => FixturesType::CustomRelativePath(litstr),
_ => return Err(syn::Error::new_spanned(namevalue, "expected string")),
};
*fixtures_type = FixturesType::CustomRelativePath(namevalue);
Ok(())
}
#[cfg(feature = "migrate")]
fn parse_fixtures_scripts_args(
fixtures_type: &mut FixturesType,
list: syn::MetaList,
list: syn::punctuated::Punctuated<syn::LitStr, syn::Token![,]>,
fixtures_local: &mut Vec<syn::LitStr>,
) -> syn::Result<()> {
// fixtures(path = "<path>", scripts("<file_1>","<file_2>")) checking `scripts` argument
if !matches!(fixtures_type, FixturesType::CustomRelativePath(_)) {
return Err(syn::Error::new_spanned(
list,
"`scripts` must be the second argument of `fixtures` and used together with `path`",
));
}
for nested in list.nested {
let litstr = match nested {
syn::NestedMeta::Lit(syn::Lit::Str(litstr)) => litstr,
other => return Err(syn::Error::new_spanned(other, "expected string literal")),
};
fixtures_local.push(litstr);
}
fixtures_local.extend(list);
Ok(())
}

View file

@ -44,5 +44,5 @@ sqlx-core = { workspace = true, features = ["any"] }
sqlx-macros-core = { workspace = true }
proc-macro2 = { version = "1.0.36", default-features = false }
syn = { version = "1.0.84", default-features = false, features = ["parsing", "proc-macro"] }
quote = { version = "1.0.14", default-features = false }
syn = { version = "2.0.52", default-features = false, features = ["parsing", "proc-macro"] }
quote = { version = "1.0.26", default-features = false }

View file

@ -79,10 +79,9 @@ pub fn migrate(input: TokenStream) -> TokenStream {
#[proc_macro_attribute]
pub fn test(args: TokenStream, input: TokenStream) -> TokenStream {
let args = syn::parse_macro_input!(args as syn::AttributeArgs);
let input = syn::parse_macro_input!(input as syn::ItemFn);
match test_attr::expand(args, input) {
match test_attr::expand(args.into(), input) {
Ok(ts) => ts.into(),
Err(e) => {
if let Some(parse_err) = e.downcast_ref::<syn::Error>() {