mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Merge #1749
1749: simplify r=matklad a=matklad Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
4417a97bc5
9 changed files with 190 additions and 291 deletions
|
@ -1,7 +1,9 @@
|
|||
//! This module takes a (parsed) definition of `macro_rules` invocation, a
|
||||
//! `tt::TokenTree` representing an argument of macro invocation, and produces a
|
||||
//! `tt::TokenTree` for the result of the expansion.
|
||||
|
||||
use ra_parser::FragmentKind::*;
|
||||
use ra_syntax::SmolStr;
|
||||
/// This module takes a (parsed) definition of `macro_rules` invocation, a
|
||||
/// `tt::TokenTree` representing an argument of macro invocation, and produces a
|
||||
/// `tt::TokenTree` for the result of the expansion.
|
||||
use rustc_hash::FxHashMap;
|
||||
use tt::TokenId;
|
||||
|
||||
|
@ -192,81 +194,11 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
|
|||
crate::TokenTree::Leaf(leaf) => match leaf {
|
||||
crate::Leaf::Var(crate::Var { text, kind }) => {
|
||||
let kind = kind.clone().ok_or(ExpandError::UnexpectedToken)?;
|
||||
match kind.as_str() {
|
||||
"ident" => {
|
||||
let ident =
|
||||
input.eat_ident().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(
|
||||
text.clone(),
|
||||
Binding::Simple(tt::Leaf::from(ident).into()),
|
||||
);
|
||||
match match_meta_var(kind.as_str(), input)? {
|
||||
Some(tt) => {
|
||||
res.inner.insert(text.clone(), Binding::Simple(tt));
|
||||
}
|
||||
"path" => {
|
||||
let path =
|
||||
input.eat_path().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(path));
|
||||
}
|
||||
"expr" => {
|
||||
let expr =
|
||||
input.eat_expr().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(expr));
|
||||
}
|
||||
"ty" => {
|
||||
let ty = input.eat_ty().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(ty));
|
||||
}
|
||||
"pat" => {
|
||||
let pat = input.eat_pat().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(pat));
|
||||
}
|
||||
"stmt" => {
|
||||
let pat = input.eat_stmt().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(pat));
|
||||
}
|
||||
"block" => {
|
||||
let block =
|
||||
input.eat_block().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(block));
|
||||
}
|
||||
"meta" => {
|
||||
let meta =
|
||||
input.eat_meta().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(meta));
|
||||
}
|
||||
"tt" => {
|
||||
let token = input.eat().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(token));
|
||||
}
|
||||
"item" => {
|
||||
let item =
|
||||
input.eat_item().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(item));
|
||||
}
|
||||
"lifetime" => {
|
||||
let lifetime =
|
||||
input.eat_lifetime().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(lifetime));
|
||||
}
|
||||
"literal" => {
|
||||
let literal =
|
||||
input.eat_literal().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
|
||||
res.inner.insert(
|
||||
text.clone(),
|
||||
Binding::Simple(tt::Leaf::from(literal).into()),
|
||||
);
|
||||
}
|
||||
"vis" => {
|
||||
// `vis` is optional
|
||||
if let Some(vis) = input.try_eat_vis() {
|
||||
let vis = vis.clone();
|
||||
res.inner.insert(text.clone(), Binding::Simple(vis));
|
||||
} else {
|
||||
res.push_optional(&text);
|
||||
}
|
||||
}
|
||||
|
||||
_ => return Err(ExpandError::UnexpectedToken),
|
||||
None => res.push_optional(text),
|
||||
}
|
||||
}
|
||||
crate::Leaf::Punct(punct) => {
|
||||
|
@ -360,6 +292,42 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
fn match_meta_var(kind: &str, input: &mut TtCursor) -> Result<Option<tt::TokenTree>, ExpandError> {
|
||||
let fragment = match kind {
|
||||
"path" => Path,
|
||||
"expr" => Expr,
|
||||
"ty" => Type,
|
||||
"pat" => Pattern,
|
||||
"stmt" => Statement,
|
||||
"block" => Block,
|
||||
"meta" => MetaItem,
|
||||
"item" => Item,
|
||||
_ => {
|
||||
let binding = match kind {
|
||||
"ident" => {
|
||||
let ident = input.eat_ident().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
tt::Leaf::from(ident).into()
|
||||
}
|
||||
"tt" => input.eat().ok_or(ExpandError::UnexpectedToken)?.clone(),
|
||||
"lifetime" => input.eat_lifetime().ok_or(ExpandError::UnexpectedToken)?.clone(),
|
||||
"literal" => {
|
||||
let literal = input.eat_literal().ok_or(ExpandError::UnexpectedToken)?.clone();
|
||||
tt::Leaf::from(literal).into()
|
||||
}
|
||||
// `vis` is optional
|
||||
"vis" => match input.try_eat_vis() {
|
||||
Some(vis) => vis,
|
||||
None => return Ok(None),
|
||||
},
|
||||
_ => return Err(ExpandError::UnexpectedToken),
|
||||
};
|
||||
return Ok(Some(binding));
|
||||
}
|
||||
};
|
||||
let binding = input.eat_fragment(fragment).ok_or(ExpandError::UnexpectedToken)?;
|
||||
Ok(Some(binding))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ExpandCtx<'a> {
|
||||
bindings: &'a Bindings,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::subtree_source::SubtreeTokenSource;
|
||||
|
||||
use ra_parser::{TokenSource, TreeSink};
|
||||
use ra_parser::{FragmentKind, TokenSource, TreeSink};
|
||||
use ra_syntax::SyntaxKind;
|
||||
use tt::buffer::{Cursor, TokenBuffer};
|
||||
|
||||
|
@ -52,40 +52,10 @@ impl<'a> Parser<'a> {
|
|||
Parser { cur_pos, subtree }
|
||||
}
|
||||
|
||||
pub fn parse_path(self) -> Option<tt::TokenTree> {
|
||||
self.parse(ra_parser::parse_path)
|
||||
}
|
||||
|
||||
pub fn parse_expr(self) -> Option<tt::TokenTree> {
|
||||
self.parse(ra_parser::parse_expr)
|
||||
}
|
||||
|
||||
pub fn parse_ty(self) -> Option<tt::TokenTree> {
|
||||
self.parse(ra_parser::parse_ty)
|
||||
}
|
||||
|
||||
pub fn parse_pat(self) -> Option<tt::TokenTree> {
|
||||
self.parse(ra_parser::parse_pat)
|
||||
}
|
||||
|
||||
pub fn parse_stmt(self) -> Option<tt::TokenTree> {
|
||||
self.parse(|src, sink| ra_parser::parse_stmt(src, sink, false))
|
||||
}
|
||||
|
||||
pub fn parse_block(self) -> Option<tt::TokenTree> {
|
||||
self.parse(ra_parser::parse_block)
|
||||
}
|
||||
|
||||
pub fn parse_meta(self) -> Option<tt::TokenTree> {
|
||||
self.parse(ra_parser::parse_meta)
|
||||
}
|
||||
|
||||
pub fn parse_item(self) -> Option<tt::TokenTree> {
|
||||
self.parse(ra_parser::parse_item)
|
||||
}
|
||||
|
||||
pub fn parse_vis(self) -> Option<tt::TokenTree> {
|
||||
self.parse(ra_parser::parse_vis)
|
||||
pub fn parse_fragment(self, fragment_kind: FragmentKind) -> Option<tt::TokenTree> {
|
||||
self.parse(|token_source, tree_skink| {
|
||||
ra_parser::parse_fragment(token_source, tree_skink, fragment_kind)
|
||||
})
|
||||
}
|
||||
|
||||
fn parse<F>(self, f: F) -> Option<tt::TokenTree>
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
use ra_parser::{ParseError, TreeSink};
|
||||
use ra_parser::{
|
||||
FragmentKind::{self, *},
|
||||
ParseError, TreeSink,
|
||||
};
|
||||
use ra_syntax::{
|
||||
ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
|
||||
SyntaxTreeBuilder, TextRange, TextUnit, T,
|
||||
|
@ -63,33 +66,50 @@ where
|
|||
Ok(parse)
|
||||
}
|
||||
|
||||
fn fragment_to_syntax_node(
|
||||
tt: &tt::Subtree,
|
||||
fragment_kind: FragmentKind,
|
||||
) -> Result<Parse<SyntaxNode>, ExpandError> {
|
||||
let tokens = [tt.clone().into()];
|
||||
let buffer = TokenBuffer::new(&tokens);
|
||||
let mut token_source = SubtreeTokenSource::new(&buffer);
|
||||
let mut tree_sink = TtTreeSink::new(buffer.begin());
|
||||
ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
|
||||
if tree_sink.roots.len() != 1 {
|
||||
return Err(ExpandError::ConversionError);
|
||||
}
|
||||
//FIXME: would be cool to report errors
|
||||
let parse = tree_sink.inner.finish();
|
||||
Ok(parse)
|
||||
}
|
||||
|
||||
/// Parses the token tree (result of macro expansion) to an expression
|
||||
pub fn token_tree_to_expr(tt: &tt::Subtree) -> Result<Parse<ast::Expr>, ExpandError> {
|
||||
let parse = token_tree_to_syntax_node(tt, ra_parser::parse_expr)?;
|
||||
let parse = fragment_to_syntax_node(tt, Expr)?;
|
||||
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
|
||||
}
|
||||
|
||||
/// Parses the token tree (result of macro expansion) to a Pattern
|
||||
pub fn token_tree_to_pat(tt: &tt::Subtree) -> Result<Parse<ast::Pat>, ExpandError> {
|
||||
let parse = token_tree_to_syntax_node(tt, ra_parser::parse_pat)?;
|
||||
let parse = fragment_to_syntax_node(tt, Pattern)?;
|
||||
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
|
||||
}
|
||||
|
||||
/// Parses the token tree (result of macro expansion) to a Type
|
||||
pub fn token_tree_to_ty(tt: &tt::Subtree) -> Result<Parse<ast::TypeRef>, ExpandError> {
|
||||
let parse = token_tree_to_syntax_node(tt, ra_parser::parse_ty)?;
|
||||
let parse = fragment_to_syntax_node(tt, Type)?;
|
||||
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
|
||||
}
|
||||
|
||||
/// Parses the token tree (result of macro expansion) as a sequence of stmts
|
||||
pub fn token_tree_to_macro_stmts(tt: &tt::Subtree) -> Result<Parse<ast::MacroStmts>, ExpandError> {
|
||||
let parse = token_tree_to_syntax_node(tt, ra_parser::parse_macro_stmts)?;
|
||||
let parse = fragment_to_syntax_node(tt, Statements)?;
|
||||
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
|
||||
}
|
||||
|
||||
/// Parses the token tree (result of macro expansion) as a sequence of items
|
||||
pub fn token_tree_to_macro_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> {
|
||||
let parse = token_tree_to_syntax_node(tt, ra_parser::parse_macro_items)?;
|
||||
let parse = fragment_to_syntax_node(tt, Items)?;
|
||||
parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use crate::subtree_parser::Parser;
|
||||
use crate::ParseError;
|
||||
use crate::{subtree_parser::Parser, ParseError};
|
||||
|
||||
use ra_parser::FragmentKind;
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -98,44 +99,9 @@ impl<'a> TtCursor<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
pub(crate) fn eat_path(&mut self) -> Option<tt::TokenTree> {
|
||||
pub(crate) fn eat_fragment(&mut self, fragment_kind: FragmentKind) -> Option<tt::TokenTree> {
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
parser.parse_path()
|
||||
}
|
||||
|
||||
pub(crate) fn eat_expr(&mut self) -> Option<tt::TokenTree> {
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
parser.parse_expr()
|
||||
}
|
||||
|
||||
pub(crate) fn eat_ty(&mut self) -> Option<tt::TokenTree> {
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
parser.parse_ty()
|
||||
}
|
||||
|
||||
pub(crate) fn eat_pat(&mut self) -> Option<tt::TokenTree> {
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
parser.parse_pat()
|
||||
}
|
||||
|
||||
pub(crate) fn eat_stmt(&mut self) -> Option<tt::TokenTree> {
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
parser.parse_stmt()
|
||||
}
|
||||
|
||||
pub(crate) fn eat_block(&mut self) -> Option<tt::TokenTree> {
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
parser.parse_block()
|
||||
}
|
||||
|
||||
pub(crate) fn eat_meta(&mut self) -> Option<tt::TokenTree> {
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
parser.parse_meta()
|
||||
}
|
||||
|
||||
pub(crate) fn eat_item(&mut self) -> Option<tt::TokenTree> {
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
parser.parse_item()
|
||||
parser.parse_fragment(fragment_kind)
|
||||
}
|
||||
|
||||
pub(crate) fn eat_lifetime(&mut self) -> Option<tt::TokenTree> {
|
||||
|
@ -154,7 +120,7 @@ impl<'a> TtCursor<'a> {
|
|||
let old_pos = self.pos;
|
||||
let parser = Parser::new(&mut self.pos, self.subtree);
|
||||
|
||||
let res = parser.parse_vis();
|
||||
let res = parser.parse_fragment(FragmentKind::Visibility);
|
||||
if res.is_none() {
|
||||
self.pos = old_pos;
|
||||
}
|
||||
|
|
|
@ -49,98 +49,93 @@ pub(crate) fn root(p: &mut Parser) {
|
|||
m.complete(p, SOURCE_FILE);
|
||||
}
|
||||
|
||||
pub(crate) fn macro_items(p: &mut Parser) {
|
||||
let m = p.start();
|
||||
items::mod_contents(p, false);
|
||||
m.complete(p, MACRO_ITEMS);
|
||||
}
|
||||
/// Various pieces of syntax that can be parsed by macros by example
|
||||
pub(crate) mod fragments {
|
||||
use super::*;
|
||||
|
||||
pub(crate) fn macro_stmts(p: &mut Parser) {
|
||||
let m = p.start();
|
||||
pub(crate) use super::{
|
||||
expressions::block, paths::type_path as path, patterns::pattern, types::type_,
|
||||
};
|
||||
|
||||
while !p.at(EOF) {
|
||||
if p.current() == T![;] {
|
||||
p.bump();
|
||||
continue;
|
||||
}
|
||||
|
||||
expressions::stmt(p, expressions::StmtWithSemi::Optional);
|
||||
pub(crate) fn expr(p: &mut Parser) {
|
||||
let _ = expressions::expr(p);
|
||||
}
|
||||
|
||||
m.complete(p, MACRO_STMTS);
|
||||
}
|
||||
|
||||
pub(crate) fn path(p: &mut Parser) {
|
||||
paths::type_path(p);
|
||||
}
|
||||
|
||||
pub(crate) fn expr(p: &mut Parser) {
|
||||
expressions::expr(p);
|
||||
}
|
||||
|
||||
pub(crate) fn type_(p: &mut Parser) {
|
||||
types::type_(p)
|
||||
}
|
||||
|
||||
pub(crate) fn pattern(p: &mut Parser) {
|
||||
patterns::pattern(p)
|
||||
}
|
||||
|
||||
pub(crate) fn stmt(p: &mut Parser, with_semi: bool) {
|
||||
let with_semi =
|
||||
if with_semi { expressions::StmtWithSemi::Yes } else { expressions::StmtWithSemi::No };
|
||||
|
||||
expressions::stmt(p, with_semi)
|
||||
}
|
||||
|
||||
pub(crate) fn block(p: &mut Parser) {
|
||||
expressions::block(p);
|
||||
}
|
||||
|
||||
// Parse a meta item , which excluded [], e.g : #[ MetaItem ]
|
||||
pub(crate) fn meta_item(p: &mut Parser) {
|
||||
fn is_delimiter(p: &mut Parser) -> bool {
|
||||
match p.current() {
|
||||
T!['{'] | T!['('] | T!['['] => true,
|
||||
_ => false,
|
||||
}
|
||||
pub(crate) fn stmt(p: &mut Parser) {
|
||||
expressions::stmt(p, expressions::StmtWithSemi::No)
|
||||
}
|
||||
|
||||
if is_delimiter(p) {
|
||||
items::token_tree(p);
|
||||
return;
|
||||
pub(crate) fn opt_visibility(p: &mut Parser) {
|
||||
let _ = super::opt_visibility(p);
|
||||
}
|
||||
|
||||
let m = p.start();
|
||||
while !p.at(EOF) {
|
||||
if is_delimiter(p) {
|
||||
items::token_tree(p);
|
||||
break;
|
||||
} else {
|
||||
// https://doc.rust-lang.org/reference/attributes.html
|
||||
// https://doc.rust-lang.org/reference/paths.html#simple-paths
|
||||
// The start of an meta must be a simple path
|
||||
// Parse a meta item , which excluded [], e.g : #[ MetaItem ]
|
||||
pub(crate) fn meta_item(p: &mut Parser) {
|
||||
fn is_delimiter(p: &mut Parser) -> bool {
|
||||
match p.current() {
|
||||
IDENT | T![::] | T![super] | T![self] | T![crate] => p.bump(),
|
||||
T![=] => {
|
||||
p.bump();
|
||||
match p.current() {
|
||||
c if c.is_literal() => p.bump(),
|
||||
T![true] | T![false] => p.bump(),
|
||||
_ => {}
|
||||
}
|
||||
break;
|
||||
}
|
||||
_ => break,
|
||||
T!['{'] | T!['('] | T!['['] => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
if is_delimiter(p) {
|
||||
items::token_tree(p);
|
||||
return;
|
||||
}
|
||||
|
||||
let m = p.start();
|
||||
while !p.at(EOF) {
|
||||
if is_delimiter(p) {
|
||||
items::token_tree(p);
|
||||
break;
|
||||
} else {
|
||||
// https://doc.rust-lang.org/reference/attributes.html
|
||||
// https://doc.rust-lang.org/reference/paths.html#simple-paths
|
||||
// The start of an meta must be a simple path
|
||||
match p.current() {
|
||||
IDENT | T![::] | T![super] | T![self] | T![crate] => p.bump(),
|
||||
T![=] => {
|
||||
p.bump();
|
||||
match p.current() {
|
||||
c if c.is_literal() => p.bump(),
|
||||
T![true] | T![false] => p.bump(),
|
||||
_ => {}
|
||||
}
|
||||
break;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
m.complete(p, TOKEN_TREE);
|
||||
}
|
||||
|
||||
m.complete(p, TOKEN_TREE);
|
||||
}
|
||||
pub(crate) fn item(p: &mut Parser) {
|
||||
items::item_or_macro(p, true, items::ItemFlavor::Mod)
|
||||
}
|
||||
|
||||
pub(crate) fn macro_items(p: &mut Parser) {
|
||||
let m = p.start();
|
||||
items::mod_contents(p, false);
|
||||
m.complete(p, MACRO_ITEMS);
|
||||
}
|
||||
|
||||
pub(crate) fn macro_stmts(p: &mut Parser) {
|
||||
let m = p.start();
|
||||
|
||||
while !p.at(EOF) {
|
||||
if p.current() == T![;] {
|
||||
p.bump();
|
||||
continue;
|
||||
}
|
||||
|
||||
expressions::stmt(p, expressions::StmtWithSemi::Optional);
|
||||
}
|
||||
|
||||
m.complete(p, MACRO_STMTS);
|
||||
}
|
||||
|
||||
pub(crate) fn item(p: &mut Parser) {
|
||||
items::item_or_macro(p, true, items::ItemFlavor::Mod)
|
||||
}
|
||||
|
||||
pub(crate) fn reparser(
|
||||
|
@ -180,7 +175,7 @@ impl BlockLike {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn opt_visibility(p: &mut Parser) -> bool {
|
||||
fn opt_visibility(p: &mut Parser) -> bool {
|
||||
match p.current() {
|
||||
T![pub] => {
|
||||
let m = p.start();
|
||||
|
|
|
@ -18,7 +18,7 @@ pub(super) fn use_path(p: &mut Parser) {
|
|||
path(p, Mode::Use)
|
||||
}
|
||||
|
||||
pub(super) fn type_path(p: &mut Parser) {
|
||||
pub(crate) fn type_path(p: &mut Parser) {
|
||||
path(p, Mode::Type)
|
||||
}
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST
|
|||
.union(paths::PATH_FIRST)
|
||||
.union(token_set![BOX_KW, REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE, MINUS]);
|
||||
|
||||
pub(super) fn pattern(p: &mut Parser) {
|
||||
pub(crate) fn pattern(p: &mut Parser) {
|
||||
pattern_r(p, PAT_RECOVERY_SET);
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![
|
|||
|
||||
const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA];
|
||||
|
||||
pub(super) fn type_(p: &mut Parser) {
|
||||
pub(crate) fn type_(p: &mut Parser) {
|
||||
type_with_bounds_cond(p, true);
|
||||
}
|
||||
|
||||
|
|
|
@ -83,62 +83,42 @@ pub fn parse(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
|||
parse_from_tokens(token_source, tree_sink, grammar::root);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a path
|
||||
pub fn parse_path(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::path);
|
||||
pub enum FragmentKind {
|
||||
Path,
|
||||
Expr,
|
||||
Statement,
|
||||
Type,
|
||||
Pattern,
|
||||
Item,
|
||||
Block,
|
||||
Visibility,
|
||||
MetaItem,
|
||||
|
||||
// These kinds are used when parsing the result of expansion
|
||||
// FIXME: use separate fragment kinds for macro inputs and outputs?
|
||||
Items,
|
||||
Statements,
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a expression
|
||||
pub fn parse_expr(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::expr);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a ty
|
||||
pub fn parse_ty(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::type_);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a pattern
|
||||
pub fn parse_pat(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::pattern);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a statement
|
||||
pub fn parse_stmt(
|
||||
pub fn parse_fragment(
|
||||
token_source: &mut dyn TokenSource,
|
||||
tree_sink: &mut dyn TreeSink,
|
||||
with_semi: bool,
|
||||
fragment_kind: FragmentKind,
|
||||
) {
|
||||
parse_from_tokens(token_source, tree_sink, |p| grammar::stmt(p, with_semi));
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as a block
|
||||
pub fn parse_block(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::block);
|
||||
}
|
||||
|
||||
pub fn parse_meta(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::meta_item);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as an item
|
||||
pub fn parse_item(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::item);
|
||||
}
|
||||
|
||||
/// Parse given tokens into the given sink as an visibility qualifier
|
||||
pub fn parse_vis(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, |p| {
|
||||
grammar::opt_visibility(p);
|
||||
});
|
||||
}
|
||||
|
||||
pub fn parse_macro_items(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::macro_items);
|
||||
}
|
||||
|
||||
pub fn parse_macro_stmts(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
|
||||
parse_from_tokens(token_source, tree_sink, grammar::macro_stmts);
|
||||
let parser: fn(&'_ mut parser::Parser) = match fragment_kind {
|
||||
FragmentKind::Path => grammar::fragments::path,
|
||||
FragmentKind::Expr => grammar::fragments::expr,
|
||||
FragmentKind::Type => grammar::fragments::type_,
|
||||
FragmentKind::Pattern => grammar::fragments::pattern,
|
||||
FragmentKind::Item => grammar::fragments::item,
|
||||
FragmentKind::Block => grammar::fragments::block,
|
||||
FragmentKind::Visibility => grammar::fragments::opt_visibility,
|
||||
FragmentKind::MetaItem => grammar::fragments::meta_item,
|
||||
FragmentKind::Statement => grammar::fragments::stmt,
|
||||
FragmentKind::Items => grammar::fragments::macro_items,
|
||||
FragmentKind::Statements => grammar::fragments::macro_stmts,
|
||||
};
|
||||
parse_from_tokens(token_source, tree_sink, parser)
|
||||
}
|
||||
|
||||
/// A parsing function for a specific braced-block.
|
||||
|
|
Loading…
Reference in a new issue