Raise edition one more level

This commit is contained in:
Johann Hemmann 2024-01-30 16:57:40 +01:00 committed by Lukas Wirth
parent 454e481422
commit 2cf5d8811a
13 changed files with 61 additions and 35 deletions

2
Cargo.lock generated
View file

@ -521,6 +521,7 @@ dependencies = [
"limit", "limit",
"mbe", "mbe",
"once_cell", "once_cell",
"parser",
"profile", "profile",
"ra-ap-rustc_abi", "ra-ap-rustc_abi",
"ra-ap-rustc_parse_format", "ra-ap-rustc_parse_format",
@ -551,6 +552,7 @@ dependencies = [
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit", "limit",
"mbe", "mbe",
"parser",
"rustc-hash", "rustc-hash",
"smallvec", "smallvec",
"span", "span",

View file

@ -44,6 +44,7 @@ cfg.workspace = true
tt.workspace = true tt.workspace = true
limit.workspace = true limit.workspace = true
span.workspace = true span.workspace = true
parser.workspace = true
[dev-dependencies] [dev-dependencies]

View file

@ -316,8 +316,11 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
_: Span, _: Span,
_: Span, _: Span,
) -> Result<Subtree, ProcMacroExpansionError> { ) -> Result<Subtree, ProcMacroExpansionError> {
let (parse, _) = let (parse, _) = ::mbe::token_tree_to_syntax_node(
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems); subtree,
::mbe::TopEntryPoint::MacroItems,
parser::Edition::Edition2021,
);
if parse.errors().is_empty() { if parse.errors().is_empty() {
Ok(subtree.clone()) Ok(subtree.clone())
} else { } else {

View file

@ -32,6 +32,7 @@ tt.workspace = true
mbe.workspace = true mbe.workspace = true
limit.workspace = true limit.workspace = true
span.workspace = true span.workspace = true
parser.workspace = true
[dev-dependencies] [dev-dependencies]
expect-test = "1.4.0" expect-test = "1.4.0"

View file

@ -204,7 +204,11 @@ struct BasicAdtInfo {
} }
fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> { fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
let (parsed, tm) = &mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems); let (parsed, tm) = &mbe::token_tree_to_syntax_node(
tt,
mbe::TopEntryPoint::MacroItems,
parser::Edition::Edition2021,
);
let macro_items = ast::MacroItems::cast(parsed.syntax_node()) let macro_items = ast::MacroItems::cast(parsed.syntax_node())
.ok_or_else(|| ExpandError::other("invalid item definition"))?; .ok_or_else(|| ExpandError::other("invalid item definition"))?;
let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?; let item = macro_items.items().next().ok_or_else(|| ExpandError::other("no item found"))?;

View file

@ -676,7 +676,7 @@ fn token_tree_to_syntax_node(
ExpandTo::Type => mbe::TopEntryPoint::Type, ExpandTo::Type => mbe::TopEntryPoint::Type,
ExpandTo::Expr => mbe::TopEntryPoint::Expr, ExpandTo::Expr => mbe::TopEntryPoint::Expr,
}; };
mbe::token_tree_to_syntax_node(tt, entry_point) mbe::token_tree_to_syntax_node(tt, entry_point, parser::Edition::Edition2021)
} }
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> { fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {

View file

@ -417,7 +417,11 @@ mod tests {
expect.assert_eq(&actual); expect.assert_eq(&actual);
// the fixed-up tree should be syntactically valid // the fixed-up tree should be syntactically valid
let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems); let (parse, _) = mbe::token_tree_to_syntax_node(
&tt,
::mbe::TopEntryPoint::MacroItems,
parser::Edition::Edition2021,
);
assert!( assert!(
parse.errors().is_empty(), parse.errors().is_empty(),
"parse has syntax errors. parse tree:\n{:#?}", "parse has syntax errors. parse tree:\n{:#?}",

View file

@ -743,9 +743,11 @@ fn match_meta_var(
) -> ExpandResult<Option<Fragment>> { ) -> ExpandResult<Option<Fragment>> {
let fragment = match kind { let fragment = match kind {
MetaVarKind::Path => { MetaVarKind::Path => {
return input.expect_fragment(parser::PrefixEntryPoint::Path).map(|it| { return input
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) .expect_fragment(parser::PrefixEntryPoint::Path, parser::Edition::Edition2021)
}); .map(|it| {
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
});
} }
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop, MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop,
@ -770,21 +772,23 @@ fn match_meta_var(
} }
_ => {} _ => {}
}; };
return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| { return input
tt.map(|tt| match tt { .expect_fragment(parser::PrefixEntryPoint::Expr, parser::Edition::Edition2021)
tt::TokenTree::Leaf(leaf) => tt::Subtree { .map(|tt| {
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), tt.map(|tt| match tt {
token_trees: Box::new([leaf.into()]), tt::TokenTree::Leaf(leaf) => tt::Subtree {
}, delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
tt::TokenTree::Subtree(mut s) => { token_trees: Box::new([leaf.into()]),
if s.delimiter.kind == tt::DelimiterKind::Invisible { },
s.delimiter.kind = tt::DelimiterKind::Parenthesis; tt::TokenTree::Subtree(mut s) => {
if s.delimiter.kind == tt::DelimiterKind::Invisible {
s.delimiter.kind = tt::DelimiterKind::Parenthesis;
}
s
} }
s })
} .map(Fragment::Expr)
}) });
.map(Fragment::Expr)
});
} }
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => { MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
let tt_result = match kind { let tt_result = match kind {
@ -819,7 +823,7 @@ fn match_meta_var(
return tt_result.map(|it| Some(Fragment::Tokens(it))).into(); return tt_result.map(|it| Some(Fragment::Tokens(it))).into();
} }
}; };
input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens)) input.expect_fragment(fragment, parser::Edition::Edition2021).map(|it| it.map(Fragment::Tokens))
} }
fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {

View file

@ -119,6 +119,7 @@ where
pub fn token_tree_to_syntax_node<Ctx>( pub fn token_tree_to_syntax_node<Ctx>(
tt: &tt::Subtree<SpanData<Ctx>>, tt: &tt::Subtree<SpanData<Ctx>>,
entry_point: parser::TopEntryPoint, entry_point: parser::TopEntryPoint,
edition: parser::Edition,
) -> (Parse<SyntaxNode>, SpanMap<Ctx>) ) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
where where
SpanData<Ctx>: Copy + fmt::Debug, SpanData<Ctx>: Copy + fmt::Debug,
@ -131,7 +132,7 @@ where
_ => TokenBuffer::from_subtree(tt), _ => TokenBuffer::from_subtree(tt),
}; };
let parser_input = to_parser_input(&buffer); let parser_input = to_parser_input(&buffer);
let parser_output = entry_point.parse(&parser_input, parser::Edition::Edition2021); let parser_output = entry_point.parse(&parser_input, edition);
let mut tree_sink = TtTreeSink::new(buffer.begin()); let mut tree_sink = TtTreeSink::new(buffer.begin());
for event in parser_output.iter() { for event in parser_output.iter() {
match event { match event {
@ -194,7 +195,8 @@ where
let mut res = Vec::new(); let mut res = Vec::new();
while iter.peek_n(0).is_some() { while iter.peek_n(0).is_some() {
let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr); let expanded =
iter.expect_fragment(parser::PrefixEntryPoint::Expr, parser::Edition::Edition2021);
res.push(match expanded.value { res.push(match expanded.value {
None => break, None => break,

View file

@ -140,10 +140,11 @@ impl<'a, S: Copy + fmt::Debug> TtIter<'a, S> {
pub(crate) fn expect_fragment( pub(crate) fn expect_fragment(
&mut self, &mut self,
entry_point: parser::PrefixEntryPoint, entry_point: parser::PrefixEntryPoint,
edition: parser::Edition,
) -> ExpandResult<Option<tt::TokenTree<S>>> { ) -> ExpandResult<Option<tt::TokenTree<S>>> {
let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice()); let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice());
let parser_input = to_parser_input(&buffer); let parser_input = to_parser_input(&buffer);
let tree_traversal = entry_point.parse(&parser_input, parser::Edition::Edition2021); let tree_traversal = entry_point.parse(&parser_input, edition);
let mut cursor = buffer.begin(); let mut cursor = buffer.begin();
let mut error = false; let mut error = false;
for step in tree_traversal.iter() { for step in tree_traversal.iter() {

View file

@ -172,7 +172,7 @@ pub use crate::ast::SourceFile;
impl SourceFile { impl SourceFile {
pub fn parse(text: &str) -> Parse<SourceFile> { pub fn parse(text: &str) -> Parse<SourceFile> {
let _p = tracing::span!(tracing::Level::INFO, "SourceFile::parse").entered(); let _p = tracing::span!(tracing::Level::INFO, "SourceFile::parse").entered();
let (green, errors) = parsing::parse_text(text); let (green, errors) = parsing::parse_text(text, parser::Edition::Edition2021);
let root = SyntaxNode::new_root(green.clone()); let root = SyntaxNode::new_root(green.clone());
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
@ -185,7 +185,10 @@ impl SourceFile {
} }
impl ast::TokenTree { impl ast::TokenTree {
pub fn reparse_as_comma_separated_expr(self) -> Parse<ast::MacroEagerInput> { pub fn reparse_as_comma_separated_expr(
self,
edition: parser::Edition,
) -> Parse<ast::MacroEagerInput> {
let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
let mut parser_input = parser::Input::default(); let mut parser_input = parser::Input::default();
@ -219,8 +222,7 @@ impl ast::TokenTree {
} }
} }
let parser_output = parser::TopEntryPoint::MacroEagerInput let parser_output = parser::TopEntryPoint::MacroEagerInput.parse(&parser_input, edition);
.parse(&parser_input, parser::Edition::Edition2021);
let mut tokens = let mut tokens =
self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);

View file

@ -9,12 +9,11 @@ use crate::{syntax_node::GreenNode, SyntaxError, SyntaxTreeBuilder};
pub(crate) use crate::parsing::reparsing::incremental_reparse; pub(crate) use crate::parsing::reparsing::incremental_reparse;
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec<SyntaxError>) {
let _p = tracing::span!(tracing::Level::INFO, "parse_text").entered(); let _p = tracing::span!(tracing::Level::INFO, "parse_text").entered();
let lexed = parser::LexedStr::new(text); let lexed = parser::LexedStr::new(text);
let parser_input = lexed.to_input(); let parser_input = lexed.to_input();
let parser_output = let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition);
parser::TopEntryPoint::SourceFile.parse(&parser_input, parser::Edition::Edition2021);
let (node, errors, _eof) = build_tree(lexed, parser_output); let (node, errors, _eof) = build_tree(lexed, parser_output);
(node, errors) (node, errors)
} }

View file

@ -26,7 +26,9 @@ pub(crate) fn incremental_reparse(
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
} }
if let Some((green, new_errors, old_range)) = reparse_block(node, edit) { if let Some((green, new_errors, old_range)) =
reparse_block(node, edit, parser::Edition::Edition2021)
{
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
} }
None None
@ -84,6 +86,7 @@ fn reparse_token(
fn reparse_block( fn reparse_block(
root: &SyntaxNode, root: &SyntaxNode,
edit: &Indel, edit: &Indel,
edition: parser::Edition,
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
let (node, reparser) = find_reparsable_node(root, edit.delete)?; let (node, reparser) = find_reparsable_node(root, edit.delete)?;
let text = get_text_after_edit(node.clone().into(), edit); let text = get_text_after_edit(node.clone().into(), edit);
@ -94,7 +97,7 @@ fn reparse_block(
return None; return None;
} }
let tree_traversal = reparser.parse(&parser_input, parser::Edition::Edition2021); let tree_traversal = reparser.parse(&parser_input, edition);
let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal); let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal);