mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 13:33:31 +00:00
Modify hacks::parse_expr_from_str()
to take an edition too
This will be needed as we parse unknown identifiers and want to insert them into source code.
This commit is contained in:
parent
e6d59e65ec
commit
ddbb28daa0
4 changed files with 14 additions and 10 deletions
|
@ -1,7 +1,7 @@
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make, AstNode, AstToken},
|
ast::{self, make, AstNode, AstToken},
|
||||||
match_ast, ted, NodeOrToken, SyntaxElement, TextRange, TextSize, T,
|
match_ast, ted, Edition, NodeOrToken, SyntaxElement, TextRange, TextSize, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
|
@ -77,7 +77,7 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt
|
||||||
let input_expressions = input_expressions
|
let input_expressions = input_expressions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
||||||
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
|
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""), Edition::CURRENT))
|
||||||
.collect::<Option<Vec<ast::Expr>>>()?;
|
.collect::<Option<Vec<ast::Expr>>>()?;
|
||||||
|
|
||||||
let parent = macro_expr.syntax().parent()?;
|
let parent = macro_expr.syntax().parent()?;
|
||||||
|
|
|
@ -14,7 +14,7 @@ use ide_db::{
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AttrKind},
|
ast::{self, AttrKind},
|
||||||
AstNode, SyntaxKind, T,
|
AstNode, Edition, SyntaxKind, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -373,7 +373,9 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
|
||||||
input_expressions
|
input_expressions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
||||||
.filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
|
.filter_map(|mut tokens| {
|
||||||
|
syntax::hacks::parse_expr_from_str(&tokens.join(""), Edition::CURRENT)
|
||||||
|
})
|
||||||
.collect::<Vec<ast::Expr>>(),
|
.collect::<Vec<ast::Expr>>(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -477,10 +477,12 @@ pub fn parse_tt_as_comma_sep_paths(
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
||||||
.filter_map(|mut tokens| {
|
.filter_map(|mut tokens| {
|
||||||
syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
|
syntax::hacks::parse_expr_from_str(&tokens.join(""), Edition::CURRENT).and_then(
|
||||||
|
|expr| match expr {
|
||||||
ast::Expr::PathExpr(it) => it.path(),
|
ast::Expr::PathExpr(it) => it.path(),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
Some(paths)
|
Some(paths)
|
||||||
|
|
|
@ -6,9 +6,9 @@ use parser::Edition;
|
||||||
|
|
||||||
use crate::{ast, AstNode};
|
use crate::{ast, AstNode};
|
||||||
|
|
||||||
pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
|
pub fn parse_expr_from_str(s: &str, edition: Edition) -> Option<ast::Expr> {
|
||||||
let s = s.trim();
|
let s = s.trim();
|
||||||
let file = ast::SourceFile::parse(&format!("const _: () = {s};"), Edition::CURRENT);
|
let file = ast::SourceFile::parse(&format!("const _: () = {s};"), edition);
|
||||||
let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
|
let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
|
||||||
if expr.syntax().text() != s {
|
if expr.syntax().text() != s {
|
||||||
return None;
|
return None;
|
||||||
|
|
Loading…
Reference in a new issue