mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Test macros doing edition dependent parsing
This commit is contained in:
parent
2c32ee7cfa
commit
546eb6b530
8 changed files with 92 additions and 62 deletions
|
@ -1921,3 +1921,59 @@ fn f() {
|
|||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_edition_handling_out() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:old edition:2021
|
||||
macro_rules! r#try {
|
||||
($it:expr) => {
|
||||
$it?
|
||||
};
|
||||
}
|
||||
fn f() {
|
||||
old::invoke_bare_try!(0);
|
||||
}
|
||||
//- /old.rs crate:old edition:2015
|
||||
#[macro_export]
|
||||
macro_rules! invoke_bare_try {
|
||||
($it:expr) => {
|
||||
try!($it)
|
||||
};
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
macro_rules! r#try {
|
||||
($it:expr) => {
|
||||
$it?
|
||||
};
|
||||
}
|
||||
fn f() {
|
||||
try!(0);
|
||||
}
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_edition_handling_in() {
|
||||
check(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:old edition:2021
|
||||
fn f() {
|
||||
old::parse_try_old!(try!{});
|
||||
}
|
||||
//- /old.rs crate:old edition:2015
|
||||
#[macro_export]
|
||||
macro_rules! parse_try_old {
|
||||
($it:expr) => {};
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn f() {
|
||||
;
|
||||
}
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
|
|
@ -348,7 +348,7 @@ fn parse_macro_expansion(
|
|||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
|
||||
let _p = tracing::info_span!("parse_macro_expansion").entered();
|
||||
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
let edition = loc.def.edition;
|
||||
let def_edition = loc.def.edition;
|
||||
let expand_to = loc.expand_to();
|
||||
let mbe::ValueResult { value: (tt, matched_arm), err } =
|
||||
macro_expand(db, macro_file.macro_call_id, loc);
|
||||
|
@ -359,7 +359,7 @@ fn parse_macro_expansion(
|
|||
CowArc::Owned(it) => it,
|
||||
},
|
||||
expand_to,
|
||||
edition,
|
||||
def_edition,
|
||||
);
|
||||
rev_token_map.matched_arm = matched_arm;
|
||||
|
||||
|
|
|
@ -129,20 +129,7 @@ impl DeclarativeMacroExpander {
|
|||
_ => None,
|
||||
}
|
||||
};
|
||||
let toolchain = db.toolchain(def_crate);
|
||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||
&base_db::Version {
|
||||
pre: base_db::Prerelease::EMPTY,
|
||||
build: base_db::BuildMetadata::EMPTY,
|
||||
major: version.major,
|
||||
minor: version.minor,
|
||||
patch: version.patch,
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
let edition = |ctx: SyntaxContextId| {
|
||||
let ctx_edition = |ctx: SyntaxContextId| {
|
||||
let crate_graph = db.crate_graph();
|
||||
if ctx.is_root() {
|
||||
crate_graph[def_crate].edition
|
||||
|
@ -165,7 +152,7 @@ impl DeclarativeMacroExpander {
|
|||
DocCommentDesugarMode::Mbe,
|
||||
);
|
||||
|
||||
mbe::DeclarativeMacro::parse_macro_rules(&tt, edition, new_meta_vars)
|
||||
mbe::DeclarativeMacro::parse_macro_rules(&tt, ctx_edition)
|
||||
}
|
||||
None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected(
|
||||
"expected a token tree".into(),
|
||||
|
@ -193,12 +180,7 @@ impl DeclarativeMacroExpander {
|
|||
DocCommentDesugarMode::Mbe,
|
||||
);
|
||||
|
||||
mbe::DeclarativeMacro::parse_macro2(
|
||||
args.as_ref(),
|
||||
&body,
|
||||
edition,
|
||||
new_meta_vars,
|
||||
)
|
||||
mbe::DeclarativeMacro::parse_macro2(args.as_ref(), &body, ctx_edition)
|
||||
}
|
||||
None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected(
|
||||
"expected a token tree".into(),
|
||||
|
|
|
@ -25,9 +25,7 @@ fn benchmark_parse_macro_rules() {
|
|||
rules
|
||||
.values()
|
||||
.map(|it| {
|
||||
DeclarativeMacro::parse_macro_rules(it, |_| span::Edition::CURRENT, true)
|
||||
.rules
|
||||
.len()
|
||||
DeclarativeMacro::parse_macro_rules(it, |_| span::Edition::CURRENT).rules.len()
|
||||
})
|
||||
.sum()
|
||||
};
|
||||
|
@ -59,9 +57,7 @@ fn benchmark_expand_macro_rules() {
|
|||
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
|
||||
macro_rules_fixtures_tt()
|
||||
.into_iter()
|
||||
.map(|(id, tt)| {
|
||||
(id, DeclarativeMacro::parse_macro_rules(&tt, |_| span::Edition::CURRENT, true))
|
||||
})
|
||||
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, |_| span::Edition::CURRENT)))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
|
|
@ -144,9 +144,7 @@ impl DeclarativeMacro {
|
|||
/// The old, `macro_rules! m {}` flavor.
|
||||
pub fn parse_macro_rules(
|
||||
tt: &tt::Subtree<Span>,
|
||||
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||
// FIXME: Remove this once we drop support for rust 1.76 (defaults to true then)
|
||||
new_meta_vars: bool,
|
||||
ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||
) -> DeclarativeMacro {
|
||||
// Note: this parsing can be implemented using mbe machinery itself, by
|
||||
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
|
||||
|
@ -156,7 +154,7 @@ impl DeclarativeMacro {
|
|||
let mut err = None;
|
||||
|
||||
while src.len() > 0 {
|
||||
let rule = match Rule::parse(edition, &mut src, new_meta_vars) {
|
||||
let rule = match Rule::parse(ctx_edition, &mut src) {
|
||||
Ok(it) => it,
|
||||
Err(e) => {
|
||||
err = Some(Box::new(e));
|
||||
|
@ -186,9 +184,7 @@ impl DeclarativeMacro {
|
|||
pub fn parse_macro2(
|
||||
args: Option<&tt::Subtree<Span>>,
|
||||
body: &tt::Subtree<Span>,
|
||||
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||
// FIXME: Remove this once we drop support for rust 1.76 (defaults to true then)
|
||||
new_meta_vars: bool,
|
||||
ctx_edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||
) -> DeclarativeMacro {
|
||||
let mut rules = Vec::new();
|
||||
let mut err = None;
|
||||
|
@ -197,8 +193,8 @@ impl DeclarativeMacro {
|
|||
cov_mark::hit!(parse_macro_def_simple);
|
||||
|
||||
let rule = (|| {
|
||||
let lhs = MetaTemplate::parse_pattern(edition, args)?;
|
||||
let rhs = MetaTemplate::parse_template(edition, body, new_meta_vars)?;
|
||||
let lhs = MetaTemplate::parse_pattern(ctx_edition, args)?;
|
||||
let rhs = MetaTemplate::parse_template(ctx_edition, body)?;
|
||||
|
||||
Ok(crate::Rule { lhs, rhs })
|
||||
})();
|
||||
|
@ -211,7 +207,7 @@ impl DeclarativeMacro {
|
|||
cov_mark::hit!(parse_macro_def_rules);
|
||||
let mut src = TtIter::new(body);
|
||||
while src.len() > 0 {
|
||||
let rule = match Rule::parse(edition, &mut src, new_meta_vars) {
|
||||
let rule = match Rule::parse(ctx_edition, &mut src) {
|
||||
Ok(it) => it,
|
||||
Err(e) => {
|
||||
err = Some(Box::new(e));
|
||||
|
@ -264,7 +260,6 @@ impl Rule {
|
|||
fn parse(
|
||||
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||
src: &mut TtIter<'_, Span>,
|
||||
new_meta_vars: bool,
|
||||
) -> Result<Self, ParseError> {
|
||||
let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
|
||||
src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?;
|
||||
|
@ -272,7 +267,7 @@ impl Rule {
|
|||
let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
|
||||
|
||||
let lhs = MetaTemplate::parse_pattern(edition, lhs)?;
|
||||
let rhs = MetaTemplate::parse_template(edition, rhs, new_meta_vars)?;
|
||||
let rhs = MetaTemplate::parse_template(edition, rhs)?;
|
||||
|
||||
Ok(crate::Rule { lhs, rhs })
|
||||
}
|
||||
|
@ -367,7 +362,7 @@ fn expect_fragment<S: Copy + fmt::Debug>(
|
|||
) -> ExpandResult<Option<tt::TokenTree<S>>> {
|
||||
use ::parser;
|
||||
let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
|
||||
let parser_input = to_parser_input::to_parser_input(&buffer);
|
||||
let parser_input = to_parser_input::to_parser_input(edition, &buffer);
|
||||
let tree_traversal = entry_point.parse(&parser_input, edition);
|
||||
let mut cursor = buffer.begin();
|
||||
let mut error = false;
|
||||
|
|
|
@ -31,15 +31,14 @@ impl MetaTemplate {
|
|||
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||
pattern: &tt::Subtree<Span>,
|
||||
) -> Result<Self, ParseError> {
|
||||
MetaTemplate::parse(edition, pattern, Mode::Pattern, false)
|
||||
MetaTemplate::parse(edition, pattern, Mode::Pattern)
|
||||
}
|
||||
|
||||
pub(crate) fn parse_template(
|
||||
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||
template: &tt::Subtree<Span>,
|
||||
new_meta_vars: bool,
|
||||
) -> Result<Self, ParseError> {
|
||||
MetaTemplate::parse(edition, template, Mode::Template, new_meta_vars)
|
||||
MetaTemplate::parse(edition, template, Mode::Template)
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> impl Iterator<Item = &Op> {
|
||||
|
@ -50,13 +49,12 @@ impl MetaTemplate {
|
|||
edition: impl Copy + Fn(SyntaxContextId) -> Edition,
|
||||
tt: &tt::Subtree<Span>,
|
||||
mode: Mode,
|
||||
new_meta_vars: bool,
|
||||
) -> Result<Self, ParseError> {
|
||||
let mut src = TtIter::new(tt);
|
||||
|
||||
let mut res = Vec::new();
|
||||
while let Some(first) = src.peek_n(0) {
|
||||
let op = next_op(edition, first, &mut src, mode, new_meta_vars)?;
|
||||
let op = next_op(edition, first, &mut src, mode)?;
|
||||
res.push(op);
|
||||
}
|
||||
|
||||
|
@ -161,7 +159,6 @@ fn next_op(
|
|||
first_peeked: &tt::TokenTree<Span>,
|
||||
src: &mut TtIter<'_, Span>,
|
||||
mode: Mode,
|
||||
new_meta_vars: bool,
|
||||
) -> Result<Op, ParseError> {
|
||||
let res = match first_peeked {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => {
|
||||
|
@ -181,14 +178,14 @@ fn next_op(
|
|||
tt::TokenTree::Subtree(subtree) => match subtree.delimiter.kind {
|
||||
tt::DelimiterKind::Parenthesis => {
|
||||
let (separator, kind) = parse_repeat(src)?;
|
||||
let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?;
|
||||
let tokens = MetaTemplate::parse(edition, subtree, mode)?;
|
||||
Op::Repeat { tokens, separator: separator.map(Arc::new), kind }
|
||||
}
|
||||
tt::DelimiterKind::Brace => match mode {
|
||||
Mode::Template => {
|
||||
parse_metavar_expr(new_meta_vars, &mut TtIter::new(subtree)).map_err(
|
||||
|()| ParseError::unexpected("invalid metavariable expression"),
|
||||
)?
|
||||
parse_metavar_expr(&mut TtIter::new(subtree)).map_err(|()| {
|
||||
ParseError::unexpected("invalid metavariable expression")
|
||||
})?
|
||||
}
|
||||
Mode::Pattern => {
|
||||
return Err(ParseError::unexpected(
|
||||
|
@ -260,7 +257,7 @@ fn next_op(
|
|||
|
||||
tt::TokenTree::Subtree(subtree) => {
|
||||
src.next().expect("first token already peeked");
|
||||
let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?;
|
||||
let tokens = MetaTemplate::parse(edition, subtree, mode)?;
|
||||
Op::Subtree { tokens, delimiter: subtree.delimiter }
|
||||
}
|
||||
};
|
||||
|
@ -343,7 +340,7 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, Repeat
|
|||
Err(ParseError::InvalidRepeat)
|
||||
}
|
||||
|
||||
fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
|
||||
fn parse_metavar_expr(src: &mut TtIter<'_, Span>) -> Result<Op, ()> {
|
||||
let func = src.expect_ident()?;
|
||||
let args = src.expect_subtree()?;
|
||||
|
||||
|
@ -355,18 +352,14 @@ fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, Span>) -> Result
|
|||
|
||||
let op = match &func.sym {
|
||||
s if sym::ignore == *s => {
|
||||
if new_meta_vars {
|
||||
args.expect_dollar()?;
|
||||
}
|
||||
args.expect_dollar()?;
|
||||
let ident = args.expect_ident()?;
|
||||
Op::Ignore { name: ident.sym.clone(), id: ident.span }
|
||||
}
|
||||
s if sym::index == *s => Op::Index { depth: parse_depth(&mut args)? },
|
||||
s if sym::len == *s => Op::Len { depth: parse_depth(&mut args)? },
|
||||
s if sym::count == *s => {
|
||||
if new_meta_vars {
|
||||
args.expect_dollar()?;
|
||||
}
|
||||
args.expect_dollar()?;
|
||||
let ident = args.expect_ident()?;
|
||||
let depth = if try_eat_comma(&mut args) { Some(parse_depth(&mut args)?) } else { None };
|
||||
Op::Count { name: ident.sym.clone(), depth }
|
||||
|
|
|
@ -153,7 +153,7 @@ where
|
|||
} => TokenBuffer::from_tokens(token_trees),
|
||||
_ => TokenBuffer::from_subtree(tt),
|
||||
};
|
||||
let parser_input = to_parser_input(&buffer);
|
||||
let parser_input = to_parser_input(edition, &buffer);
|
||||
let parser_output = entry_point.parse(&parser_input, edition);
|
||||
let mut tree_sink = TtTreeSink::new(buffer.begin());
|
||||
for event in parser_output.iter() {
|
||||
|
|
|
@ -3,11 +3,15 @@
|
|||
|
||||
use std::fmt;
|
||||
|
||||
use span::Edition;
|
||||
use syntax::{SyntaxKind, SyntaxKind::*, T};
|
||||
|
||||
use tt::buffer::TokenBuffer;
|
||||
|
||||
pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(buffer: &TokenBuffer<'_, S>) -> parser::Input {
|
||||
pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(
|
||||
edition: Edition,
|
||||
buffer: &TokenBuffer<'_, S>,
|
||||
) -> parser::Input {
|
||||
let mut res = parser::Input::default();
|
||||
|
||||
let mut current = buffer.begin();
|
||||
|
@ -60,6 +64,10 @@ pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(buffer: &TokenBuffer<'_, S>)
|
|||
"_" => res.push(T![_]),
|
||||
i if i.starts_with('\'') => res.push(LIFETIME_IDENT),
|
||||
_ if ident.is_raw.yes() => res.push(IDENT),
|
||||
"gen" if !edition.at_least_2024() => res.push(IDENT),
|
||||
"async" | "await" | "dyn" | "try" if !edition.at_least_2018() => {
|
||||
res.push(IDENT)
|
||||
}
|
||||
text => match SyntaxKind::from_keyword(text) {
|
||||
Some(kind) => res.push(kind),
|
||||
None => {
|
||||
|
|
Loading…
Reference in a new issue