diff --git a/Cargo.lock b/Cargo.lock index 1209cee03c..0de4ae46df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -521,6 +521,7 @@ dependencies = [ "limit", "mbe", "once_cell", + "parser", "profile", "ra-ap-rustc_abi", "ra-ap-rustc_parse_format", @@ -551,6 +552,7 @@ dependencies = [ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "limit", "mbe", + "parser", "rustc-hash", "smallvec", "span", diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 523ff6fc40..1076cab544 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -44,6 +44,7 @@ cfg.workspace = true tt.workspace = true limit.workspace = true span.workspace = true +parser.workspace = true [dev-dependencies] diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index 23b10cfd8e..67a9d541bc 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -316,8 +316,11 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander { _: Span, _: Span, ) -> Result { - let (parse, _) = - ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems); + let (parse, _) = ::mbe::token_tree_to_syntax_node( + subtree, + ::mbe::TopEntryPoint::MacroItems, + parser::Edition::Edition2021, + ); if parse.errors().is_empty() { Ok(subtree.clone()) } else { diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml index 4f30808015..a9eeb58b2b 100644 --- a/crates/hir-expand/Cargo.toml +++ b/crates/hir-expand/Cargo.toml @@ -32,6 +32,7 @@ tt.workspace = true mbe.workspace = true limit.workspace = true span.workspace = true +parser.workspace = true [dev-dependencies] expect-test = "1.4.0" diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs index 528038a9cc..f9fafa9c72 100644 --- a/crates/hir-expand/src/builtin_derive_macro.rs +++ b/crates/hir-expand/src/builtin_derive_macro.rs @@ -204,7 +204,11 @@ struct BasicAdtInfo { } fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result { - let (parsed, tm) = &mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems); + let (parsed, tm) = &mbe::token_tree_to_syntax_node( + tt, + mbe::TopEntryPoint::MacroItems, + parser::Edition::Edition2021, + ); let macro_items = ast::MacroItems::cast(parsed.syntax_node()) .ok_or_else(|| ExpandError::other("invalid item definition"))?; let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?; diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index a961ad14a6..8fe7a04209 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -676,7 +676,7 @@ fn token_tree_to_syntax_node( ExpandTo::Type => mbe::TopEntryPoint::Type, ExpandTo::Expr => mbe::TopEntryPoint::Expr, }; - mbe::token_tree_to_syntax_node(tt, entry_point) + mbe::token_tree_to_syntax_node(tt, entry_point, parser::Edition::Edition2021) } fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> { diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 959595afb5..eed7d4a78e 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -417,7 +417,11 @@ mod tests { expect.assert_eq(&actual); // the fixed-up tree should be syntactically valid - let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems); + let (parse, _) = mbe::token_tree_to_syntax_node( + &tt, + ::mbe::TopEntryPoint::MacroItems, + parser::Edition::Edition2021, + ); assert!( parse.errors().is_empty(), "parse has syntax errors. parse tree:\n{:#?}", diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 3170834d54..2faef23ed1 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -743,9 +743,11 @@ fn match_meta_var( ) -> ExpandResult> { let fragment = match kind { MetaVarKind::Path => { - return input.expect_fragment(parser::PrefixEntryPoint::Path).map(|it| { - it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) - }); + return input + .expect_fragment(parser::PrefixEntryPoint::Path, parser::Edition::Edition2021) + .map(|it| { + it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) + }); } MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop, @@ -770,21 +772,23 @@ fn match_meta_var( } _ => {} }; - return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| { - tt.map(|tt| match tt { - tt::TokenTree::Leaf(leaf) => tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), - token_trees: Box::new([leaf.into()]), - }, - tt::TokenTree::Subtree(mut s) => { - if s.delimiter.kind == tt::DelimiterKind::Invisible { - s.delimiter.kind = tt::DelimiterKind::Parenthesis; + return input + .expect_fragment(parser::PrefixEntryPoint::Expr, parser::Edition::Edition2021) + .map(|tt| { + tt.map(|tt| match tt { + tt::TokenTree::Leaf(leaf) => tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), + token_trees: Box::new([leaf.into()]), + }, + tt::TokenTree::Subtree(mut s) => { + if s.delimiter.kind == tt::DelimiterKind::Invisible { + s.delimiter.kind = tt::DelimiterKind::Parenthesis; + } + s } - s - } - }) - .map(Fragment::Expr) - }); + }) + .map(Fragment::Expr) + }); } MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => { let tt_result = match kind { @@ -819,7 +823,7 @@ fn match_meta_var( return tt_result.map(|it| Some(Fragment::Tokens(it))).into(); } }; - input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens)) + input.expect_fragment(fragment, parser::Edition::Edition2021).map(|it| it.map(Fragment::Tokens)) } fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index a22bbf833a..7d1ac242da 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -119,6 +119,7 @@ where pub fn token_tree_to_syntax_node( tt: &tt::Subtree>, entry_point: parser::TopEntryPoint, + edition: parser::Edition, ) -> (Parse, SpanMap) where SpanData: Copy + fmt::Debug, @@ -131,7 +132,7 @@ where _ => TokenBuffer::from_subtree(tt), }; let parser_input = to_parser_input(&buffer); - let parser_output = entry_point.parse(&parser_input, parser::Edition::Edition2021); + let parser_output = entry_point.parse(&parser_input, edition); let mut tree_sink = TtTreeSink::new(buffer.begin()); for event in parser_output.iter() { match event { @@ -194,7 +195,8 @@ where let mut res = Vec::new(); while iter.peek_n(0).is_some() { - let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr); + let expanded = + iter.expect_fragment(parser::PrefixEntryPoint::Expr, parser::Edition::Edition2021); res.push(match expanded.value { None => break, diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 12f7deafd6..9c7d7af7b1 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -140,10 +140,11 @@ impl<'a, S: Copy + fmt::Debug> TtIter<'a, S> { pub(crate) fn expect_fragment( &mut self, entry_point: parser::PrefixEntryPoint, + edition: parser::Edition, ) -> ExpandResult>> { let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice()); let parser_input = to_parser_input(&buffer); - let tree_traversal = entry_point.parse(&parser_input, parser::Edition::Edition2021); + let tree_traversal = entry_point.parse(&parser_input, edition); let mut cursor = buffer.begin(); let mut error = false; for step in tree_traversal.iter() { diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 41f560fce2..1c628a948c 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -172,7 +172,7 @@ pub use crate::ast::SourceFile; impl SourceFile { pub fn parse(text: &str) -> Parse { let _p = tracing::span!(tracing::Level::INFO, "SourceFile::parse").entered(); - let (green, errors) = parsing::parse_text(text); + let (green, errors) = parsing::parse_text(text, parser::Edition::Edition2021); let root = SyntaxNode::new_root(green.clone()); assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); @@ -185,7 +185,10 @@ impl SourceFile { } impl ast::TokenTree { - pub fn reparse_as_comma_separated_expr(self) -> Parse { + pub fn reparse_as_comma_separated_expr( + self, + edition: parser::Edition, + ) -> Parse { let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); let mut parser_input = parser::Input::default(); @@ -219,8 +222,7 @@ impl ast::TokenTree { } } - let parser_output = parser::TopEntryPoint::MacroEagerInput - .parse(&parser_input, parser::Edition::Edition2021); + let parser_output = parser::TopEntryPoint::MacroEagerInput.parse(&parser_input, edition); let mut tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs index 35683c9eed..420f4938e5 100644 --- a/crates/syntax/src/parsing.rs +++ b/crates/syntax/src/parsing.rs @@ -9,12 +9,11 @@ use crate::{syntax_node::GreenNode, SyntaxError, SyntaxTreeBuilder}; pub(crate) use crate::parsing::reparsing::incremental_reparse; -pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec) { +pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec) { let _p = tracing::span!(tracing::Level::INFO, "parse_text").entered(); let lexed = parser::LexedStr::new(text); let parser_input = lexed.to_input(); - let parser_output = - parser::TopEntryPoint::SourceFile.parse(&parser_input, parser::Edition::Edition2021); + let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition); let (node, errors, _eof) = build_tree(lexed, parser_output); (node, errors) } diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 3f48bdddfa..5186d48c61 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs @@ -26,7 +26,9 @@ pub(crate) fn incremental_reparse( return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); } - if let Some((green, new_errors, old_range)) = reparse_block(node, edit) { + if let Some((green, new_errors, old_range)) = + reparse_block(node, edit, parser::Edition::Edition2021) + { return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); } None @@ -84,6 +86,7 @@ fn reparse_token( fn reparse_block( root: &SyntaxNode, edit: &Indel, + edition: parser::Edition, ) -> Option<(GreenNode, Vec, TextRange)> { let (node, reparser) = find_reparsable_node(root, edit.delete)?; let text = get_text_after_edit(node.clone().into(), edit); @@ -94,7 +97,7 @@ fn reparse_block( return None; } - let tree_traversal = reparser.parse(&parser_input, parser::Edition::Edition2021); + let tree_traversal = reparser.parse(&parser_input, edition); let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal);