mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-14 17:07:26 +00:00
introduce hacks module
This commit is contained in:
parent
aa1788dc71
commit
bfc263f1f9
6 changed files with 34 additions and 51 deletions
|
@ -309,7 +309,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
|
|||
input_expressions
|
||||
.into_iter()
|
||||
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
|
||||
.filter_map(|mut tokens| ast::Expr::parse(&tokens.join("")).ok())
|
||||
.filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
|
||||
.collect::<Vec<ast::Expr>>(),
|
||||
)
|
||||
}
|
||||
|
|
|
@ -212,15 +212,14 @@ fn validate_snippet(
|
|||
) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> {
|
||||
let mut imports = Vec::with_capacity(requires.len());
|
||||
for path in requires.iter() {
|
||||
let path = ast::Path::parse(path).ok()?;
|
||||
let valid_use_path = path.segments().all(|seg| {
|
||||
matches!(seg.kind(), Some(ast::PathSegmentKind::Name(_)))
|
||||
|| seg.generic_arg_list().is_none()
|
||||
});
|
||||
if !valid_use_path {
|
||||
let use_path = ast::SourceFile::parse(&format!("use {};", path))
|
||||
.syntax_node()
|
||||
.descendants()
|
||||
.find_map(ast::Path::cast)?;
|
||||
if use_path.syntax().text() != path.as_str() {
|
||||
return None;
|
||||
}
|
||||
let green = path.syntax().green().into_owned();
|
||||
let green = use_path.syntax().green().into_owned();
|
||||
imports.push(green);
|
||||
}
|
||||
let snippet = snippet.iter().join("\n");
|
||||
|
|
|
@ -67,7 +67,11 @@ pub fn get_path_at_cursor_in_tt(cursor: &ast::Ident) -> Option<ast::Path> {
|
|||
.filter_map(SyntaxElement::into_token)
|
||||
.take_while(|tok| tok != cursor);
|
||||
|
||||
ast::Path::parse(&path_tokens.chain(iter::once(cursor.clone())).join("")).ok()
|
||||
syntax::hacks::parse_expr_from_str(&path_tokens.chain(iter::once(cursor.clone())).join(""))
|
||||
.and_then(|expr| match expr {
|
||||
ast::Expr::PathExpr(it) => it.path(),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses and resolves the path at the cursor position in the given attribute, if it is a derive.
|
||||
|
@ -323,7 +327,12 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Pat
|
|||
let paths = input_expressions
|
||||
.into_iter()
|
||||
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
|
||||
.filter_map(|mut tokens| ast::Path::parse(&tokens.join("")).ok())
|
||||
.filter_map(|mut tokens| {
|
||||
syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
|
||||
ast::Expr::PathExpr(it) => it.path(),
|
||||
_ => None,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
Some(paths)
|
||||
}
|
||||
|
|
14
crates/syntax/src/hacks.rs
Normal file
14
crates/syntax/src/hacks.rs
Normal file
|
@ -0,0 +1,14 @@
|
|||
//! Things which exist to solve practial issues, but which shouldn't exist.
|
||||
//!
|
||||
//! Please avoid adding new usages of the functions in this module
|
||||
|
||||
use crate::{ast, AstNode};
|
||||
|
||||
pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
|
||||
let file = ast::SourceFile::parse(&format!("const _: () = {};", s));
|
||||
let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
|
||||
if expr.syntax().text() != s {
|
||||
return None;
|
||||
}
|
||||
Some(expr)
|
||||
}
|
|
@ -40,6 +40,7 @@ pub mod ast;
|
|||
pub mod fuzz;
|
||||
pub mod utils;
|
||||
pub mod ted;
|
||||
pub mod hacks;
|
||||
|
||||
use std::{marker::PhantomData, sync::Arc};
|
||||
|
||||
|
@ -167,26 +168,6 @@ impl SourceFile {
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME: `parse` functions shouldn't hang directly from AST nodes, and they
|
||||
// shouldn't return `Result`.
|
||||
//
|
||||
// We need a dedicated module for parser entry points, and they should always
|
||||
// return `Parse`.
|
||||
|
||||
impl ast::Path {
|
||||
/// Returns `text`, parsed as a path, but only if it has no errors.
|
||||
pub fn parse(text: &str) -> Result<Self, ()> {
|
||||
parsing::parse_text_as(text, parser::ParserEntryPoint::Path)
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::Expr {
|
||||
/// Returns `text`, parsed as an expression, but only if it has no errors.
|
||||
pub fn parse(text: &str) -> Result<Self, ()> {
|
||||
parsing::parse_text_as(text, parser::ParserEntryPoint::Expr)
|
||||
}
|
||||
}
|
||||
|
||||
/// Matches a `SyntaxNode` against an `ast` type.
|
||||
///
|
||||
/// # Example:
|
||||
|
|
|
@ -5,7 +5,7 @@ mod reparsing;
|
|||
|
||||
use rowan::TextRange;
|
||||
|
||||
use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode, SyntaxTreeBuilder};
|
||||
use crate::{syntax_node::GreenNode, SyntaxError, SyntaxTreeBuilder};
|
||||
|
||||
pub(crate) use crate::parsing::reparsing::incremental_reparse;
|
||||
|
||||
|
@ -17,26 +17,6 @@ pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
|
|||
(node, errors)
|
||||
}
|
||||
|
||||
/// Returns `text` parsed as a `T` provided there are no parse errors.
|
||||
pub(crate) fn parse_text_as<T: AstNode>(
|
||||
text: &str,
|
||||
entry_point: parser::ParserEntryPoint,
|
||||
) -> Result<T, ()> {
|
||||
let lexed = parser::LexedStr::new(text);
|
||||
if lexed.errors().next().is_some() {
|
||||
return Err(());
|
||||
}
|
||||
let parser_input = lexed.to_input();
|
||||
let parser_output = parser::parse(&parser_input, entry_point);
|
||||
let (node, errors, eof) = build_tree(lexed, parser_output, true);
|
||||
|
||||
if !errors.is_empty() || !eof {
|
||||
return Err(());
|
||||
}
|
||||
|
||||
SyntaxNode::new_root(node).first_child().and_then(T::cast).ok_or(())
|
||||
}
|
||||
|
||||
pub(crate) fn build_tree(
|
||||
lexed: parser::LexedStr<'_>,
|
||||
parser_output: parser::Output,
|
||||
|
|
Loading…
Reference in a new issue