1801: WIP: switch to fully decomposed tokens internally r=matklad a=matklad



Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2019-09-10 13:01:44 +00:00 committed by GitHub
commit 9d3c78e2ee
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 497 additions and 337 deletions

1
Cargo.lock generated
View file

@ -1051,6 +1051,7 @@ dependencies = [
"ra_tt 0.1.0",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
]
[[package]]

View file

@ -12,3 +12,7 @@ itertools = "0.8.0"
rustc-hash = "1.0.0"
smallvec = "0.6.9"
log = "0.4.5"
[dev-dependencies]
test_utils = { path = "../test_utils" }

View file

@ -339,21 +339,13 @@ fn expand_subtree(
template: &crate::Subtree,
ctx: &mut ExpandCtx,
) -> Result<tt::Subtree, ExpandError> {
let token_trees = template
.token_trees
.iter()
.map(|it| expand_tt(it, ctx))
.filter(|it| {
// Filter empty subtree
if let Ok(tt::TokenTree::Subtree(subtree)) = it {
subtree.delimiter != tt::Delimiter::None || !subtree.token_trees.is_empty()
} else {
true
}
})
.collect::<Result<Vec<_>, ExpandError>>()?;
let mut buf: Vec<tt::TokenTree> = Vec::new();
for tt in template.token_trees.iter() {
let tt = expand_tt(tt, ctx)?;
push_tt(&mut buf, tt);
}
Ok(tt::Subtree { token_trees, delimiter: template.delimiter })
Ok(tt::Subtree { delimiter: template.delimiter, token_trees: buf })
}
/// Reduce single token subtree to single token
@ -377,7 +369,7 @@ fn expand_tt(
let res: tt::TokenTree = match template {
crate::TokenTree::Subtree(subtree) => expand_subtree(subtree, ctx)?.into(),
crate::TokenTree::Repeat(repeat) => {
let mut token_trees: Vec<tt::TokenTree> = Vec::new();
let mut buf: Vec<tt::TokenTree> = Vec::new();
ctx.nesting.push(0);
// Dirty hack to make macro-expansion terminate.
// This should be replaced by a propper macro-by-example implementation
@ -418,23 +410,23 @@ fn expand_tt(
let idx = ctx.nesting.pop().unwrap();
ctx.nesting.push(idx + 1);
token_trees.push(reduce_single_token(t));
push_subtree(&mut buf, t);
if let Some(ref sep) = repeat.separator {
match sep {
crate::Separator::Ident(ident) => {
has_seps = 1;
token_trees.push(tt::Leaf::from(ident.clone()).into());
buf.push(tt::Leaf::from(ident.clone()).into());
}
crate::Separator::Literal(lit) => {
has_seps = 1;
token_trees.push(tt::Leaf::from(lit.clone()).into());
buf.push(tt::Leaf::from(lit.clone()).into());
}
crate::Separator::Puncts(puncts) => {
has_seps = puncts.len();
for punct in puncts {
token_trees.push(tt::Leaf::from(*punct).into());
buf.push(tt::Leaf::from(*punct).into());
}
}
}
@ -450,16 +442,16 @@ fn expand_tt(
ctx.nesting.pop().unwrap();
for _ in 0..has_seps {
token_trees.pop();
buf.pop();
}
if crate::RepeatKind::OneOrMore == repeat.kind && counter == 0 {
return Err(ExpandError::UnexpectedToken);
}
// Check if it is a singel token subtree without any delimiter
// Check if it is a single token subtree without any delimiter
// e.g {Delimiter:None> ['>'] /Delimiter:None>}
reduce_single_token(tt::Subtree { token_trees, delimiter: tt::Delimiter::None })
reduce_single_token(tt::Subtree { delimiter: tt::Delimiter::None, token_trees: buf })
}
crate::TokenTree::Leaf(leaf) => match leaf {
crate::Leaf::Ident(ident) => {
@ -586,3 +578,17 @@ mod tests {
expand_rule(&rules.rules[0], &invocation_tt)
}
}
fn push_tt(buf: &mut Vec<tt::TokenTree>, tt: tt::TokenTree) {
match tt {
tt::TokenTree::Subtree(tt) => push_subtree(buf, tt),
_ => buf.push(tt),
}
}
fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
match tt.delimiter {
tt::Delimiter::None => buf.extend(tt.token_trees),
_ => buf.push(tt.into()),
}
}

View file

@ -70,7 +70,14 @@ fn fragment_to_syntax_node(
tt: &tt::Subtree,
fragment_kind: FragmentKind,
) -> Result<Parse<SyntaxNode>, ExpandError> {
let tokens = [tt.clone().into()];
let tmp;
let tokens = match tt {
tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(),
_ => {
tmp = [tt.clone().into()];
&tmp[..]
}
};
let buffer = TokenBuffer::new(&tokens);
let mut token_source = SubtreeTokenSource::new(&buffer);
let mut tree_sink = TtTreeSink::new(buffer.begin());

View file

@ -1,4 +1,5 @@
use ra_syntax::{ast, AstNode, NodeOrToken};
use test_utils::assert_eq_text;
use super::*;
@ -152,7 +153,6 @@ pub(crate) fn assert_expansion(
// wrap the given text to a macro call
let expected = text_to_tokentree(&expected);
let (expanded_tree, expected_tree) = match kind {
MacroKind::Items => {
let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree();
@ -178,7 +178,7 @@ pub(crate) fn assert_expansion(
let expected_tree = expected_tree.replace("C_C__C", "$crate");
assert_eq!(
expanded_tree, expected_tree,
"left => {}\nright => {}",
"\nleft:\n{}\nright:\n{}",
expanded_tree, expected_tree,
);
@ -657,6 +657,7 @@ fn test_expr() {
}
#[test]
#[ignore]
fn test_expr_order() {
let rules = create_rules(
r#"
@ -668,8 +669,8 @@ fn test_expr_order() {
"#,
);
assert_eq!(
format!("{:#?}", expand_to_items(&rules, "foo! { 1 + 1 }").syntax()).trim(),
assert_eq_text!(
&format!("{:#?}", expand_to_items(&rules, "foo! { 1 + 1 }").syntax()),
r#"MACRO_ITEMS@[0; 15)
FN_DEF@[0; 15)
FN_KW@[0; 2) "fn"

View file

@ -210,7 +210,7 @@ fn opt_visibility(p: &mut Parser) -> bool {
//
// test crate_keyword_path
// fn foo() { crate::foo(); }
T![crate] if p.nth(1) != T![::] => {
T![crate] if !p.nth_at(1, T![::]) => {
let m = p.start();
p.bump_any();
m.complete(p, VISIBILITY);
@ -245,7 +245,7 @@ fn abi(p: &mut Parser) {
fn opt_fn_ret_type(p: &mut Parser) -> bool {
if p.at(T![->]) {
let m = p.start();
p.bump_any();
p.bump(T![->]);
types::type_(p);
m.complete(p, RET_TYPE);
true

View file

@ -212,52 +212,48 @@ struct Restrictions {
prefer_stmt: bool,
}
enum Op {
Simple,
Composite(SyntaxKind, u8),
}
/// Binding powers of operators for a Pratt parser.
///
/// See https://www.oilshell.org/blog/2016/11/03.html
#[rustfmt::skip]
fn current_op(p: &Parser) -> (u8, SyntaxKind) {
const NOT_AN_OP: (u8, SyntaxKind) = (0, T![@]);
match p.current() {
T![|] if p.at(T![||]) => (3, T![||]),
T![|] if p.at(T![|=]) => (1, T![|=]),
T![|] => (6, T![|]),
T![>] if p.at(T![>>=]) => (1, T![>>=]),
T![>] if p.at(T![>>]) => (9, T![>>]),
T![>] if p.at(T![>=]) => (5, T![>=]),
T![>] => (5, T![>]),
T![=] if p.at(T![=>]) => NOT_AN_OP,
T![=] if p.at(T![==]) => (5, T![==]),
T![=] => (1, T![=]),
T![<] if p.at(T![<=]) => (5, T![<=]),
T![<] if p.at(T![<<=]) => (1, T![<<=]),
T![<] if p.at(T![<<]) => (9, T![<<]),
T![<] => (5, T![<]),
T![+] if p.at(T![+=]) => (1, T![+=]),
T![+] => (10, T![+]),
T![^] if p.at(T![^=]) => (1, T![^=]),
T![^] => (7, T![^]),
T![%] if p.at(T![%=]) => (1, T![%=]),
T![%] => (11, T![%]),
T![&] if p.at(T![&=]) => (1, T![&=]),
T![&] if p.at(T![&&]) => (4, T![&&]),
T![&] => (8, T![&]),
T![/] if p.at(T![/=]) => (1, T![/=]),
T![/] => (11, T![/]),
T![*] if p.at(T![*=]) => (1, T![*=]),
T![*] => (11, T![*]),
T![.] if p.at(T![..=]) => (2, T![..=]),
T![.] if p.at(T![..]) => (2, T![..]),
T![!] if p.at(T![!=]) => (5, T![!=]),
T![-] if p.at(T![-=]) => (1, T![-=]),
T![-] => (10, T![-]),
fn current_op(p: &Parser) -> (u8, Op) {
if let Some(t) = p.current3() {
match t {
(T![<], T![<], T![=]) => return (1, Op::Composite(T![<<=], 3)),
(T![>], T![>], T![=]) => return (1, Op::Composite(T![>>=], 3)),
_ => (),
}
_ => NOT_AN_OP
}
if let Some(t) = p.current2() {
match t {
(T![+], T![=]) => return (1, Op::Composite(T![+=], 2)),
(T![-], T![=]) => return (1, Op::Composite(T![-=], 2)),
(T![*], T![=]) => return (1, Op::Composite(T![*=], 2)),
(T![%], T![=]) => return (1, Op::Composite(T![%=], 2)),
(T![/], T![=]) => return (1, Op::Composite(T![/=], 2)),
(T![|], T![=]) => return (1, Op::Composite(T![|=], 2)),
(T![&], T![=]) => return (1, Op::Composite(T![&=], 2)),
(T![^], T![=]) => return (1, Op::Composite(T![^=], 2)),
(T![|], T![|]) => return (3, Op::Composite(T![||], 2)),
(T![&], T![&]) => return (4, Op::Composite(T![&&], 2)),
(T![<], T![=]) => return (5, Op::Composite(T![<=], 2)),
(T![>], T![=]) => return (5, Op::Composite(T![>=], 2)),
(T![<], T![<]) => return (9, Op::Composite(T![<<], 2)),
(T![>], T![>]) => return (9, Op::Composite(T![>>], 2)),
_ => (),
}
}
let bp = match p.current() {
T![=] => 1,
T![..] | T![..=] => 2,
T![==] | T![!=] | T![<] | T![>] => 5,
T![|] => 6,
T![^] => 7,
T![&] => 8,
T![-] | T![+] => 10,
T![*] | T![/] | T![%] => 11,
_ => 0,
};
(bp, Op::Simple)
}
// Parses expression with binding power of at least bp.
@ -308,12 +304,7 @@ fn expr_bp(
break;
}
let m = lhs.precede(p);
match op {
Op::Simple => p.bump_any(),
Op::Composite(kind, n) => {
p.bump_compound(kind, n);
}
}
p.bump(op);
expr_bp(p, r, op_bp + 1, dollar_lvl);
lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
@ -321,8 +312,7 @@ fn expr_bp(
(Some(lhs), BlockLike::NotBlock)
}
const LHS_FIRST: TokenSet =
atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]);
const LHS_FIRST: TokenSet = atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOT, MINUS]);
fn lhs(
p: &mut Parser,
@ -353,17 +343,20 @@ fn lhs(
p.bump_any();
PREFIX_EXPR
}
// test full_range_expr
// fn foo() { xs[..]; }
T![..] | T![..=] => {
m = p.start();
p.bump_any();
if p.at_ts(EXPR_FIRST) {
expr_bp(p, r, 2, dollar_lvl);
}
return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock));
}
_ => {
// test full_range_expr
// fn foo() { xs[..]; }
for &op in [T![..=], T![..]].iter() {
if p.at(op) {
m = p.start();
p.bump(op);
if p.at_ts(EXPR_FIRST) {
expr_bp(p, r, 2, dollar_lvl);
}
return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock));
}
}
// test expression_after_block
// fn foo() {
// let mut p = F{x: 5};
@ -399,29 +392,13 @@ fn postfix_expr(
// }
T!['('] if allow_calls => call_expr(p, lhs),
T!['['] if allow_calls => index_expr(p, lhs),
T![.] if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth(2) == T![::]) => {
method_call_expr(p, lhs)
}
T![.] if p.nth(1) == AWAIT_KW => {
// test await_expr
// fn foo() {
// x.await;
// x.0.await;
// x.0().await?.hello();
// }
let m = lhs.precede(p);
p.bump_any();
p.bump_any();
m.complete(p, AWAIT_EXPR)
}
T![.] => field_expr(p, lhs),
// test postfix_range
// fn foo() { let x = 1..; }
T![..] | T![..=] if !EXPR_FIRST.contains(p.nth(1)) => {
let m = lhs.precede(p);
p.bump_any();
m.complete(p, RANGE_EXPR)
}
T![.] => match postfix_dot_expr(p, lhs) {
Ok(it) => it,
Err(it) => {
lhs = it;
break;
}
},
T![?] => try_expr(p, lhs),
T![as] => cast_expr(p, lhs),
_ => break,
@ -429,7 +406,46 @@ fn postfix_expr(
allow_calls = true;
block_like = BlockLike::NotBlock;
}
(lhs, block_like)
return (lhs, block_like);
fn postfix_dot_expr(
p: &mut Parser,
lhs: CompletedMarker,
) -> Result<CompletedMarker, CompletedMarker> {
assert!(p.at(T![.]));
if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])) {
return Ok(method_call_expr(p, lhs));
}
// test await_expr
// fn foo() {
// x.await;
// x.0.await;
// x.0().await?.hello();
// }
if p.nth(1) == T![await] {
let m = lhs.precede(p);
p.bump(T![.]);
p.bump(T![await]);
return Ok(m.complete(p, AWAIT_EXPR));
}
// test postfix_range
// fn foo() { let x = 1..; }
for &(op, la) in [(T![..=], 3), (T![..], 2)].iter() {
if p.at(op) {
return if EXPR_FIRST.contains(p.nth(la)) {
Err(lhs)
} else {
let m = lhs.precede(p);
p.bump(op);
Ok(m.complete(p, RANGE_EXPR))
};
}
}
Ok(field_expr(p, lhs))
}
}
// test call_expr
@ -465,7 +481,7 @@ fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
// y.bar::<T>(1, 2,);
// }
fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth(2) == T![::]));
assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])));
let m = lhs.precede(p);
p.bump_any();
name_ref(p);
@ -567,7 +583,7 @@ fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) {
record_field_list(p);
(m.complete(p, RECORD_LIT), BlockLike::NotBlock)
}
T![!] => {
T![!] if !p.at(T![!=]) => {
let block_like = items::macro_call_after_excl(p);
(m.complete(p, MACRO_CALL), block_like)
}
@ -601,8 +617,8 @@ pub(crate) fn record_field_list(p: &mut Parser) {
}
m.complete(p, RECORD_FIELD);
}
T![..] => {
p.bump_any();
T![.] if p.at(T![..]) => {
p.bump(T![..]);
expr(p);
}
T!['{'] => error_block(p, "expected a field"),

View file

@ -422,7 +422,7 @@ pub(crate) fn token_tree(p: &mut Parser) {
return;
}
T![')'] | T![']'] => p.err_and_bump("unmatched brace"),
_ => p.bump_raw(),
_ => p.bump_any(),
}
}
p.expect(closing_paren_kind);

View file

@ -13,9 +13,8 @@ pub(super) fn use_item(p: &mut Parser, m: Marker) {
/// so handles both `some::path::{inner::path}` and `inner::path` in
/// `use some::path::{inner::path};`
fn use_tree(p: &mut Parser) {
let la = p.nth(1);
let m = p.start();
match (p.current(), la) {
match p.current() {
// Finish the use_tree for cases of e.g.
// `use some::path::{self, *};` or `use *;`
// This does not handle cases such as `use some::path::*`
@ -28,15 +27,15 @@ fn use_tree(p: &mut Parser) {
// use ::*;
// use some::path::{*};
// use some::path::{::*};
(T![*], _) => p.bump_any(),
(T![::], T![*]) => {
T![*] => p.bump(T![*]),
T![:] if p.at(T![::]) && p.nth(2) == T![*] => {
// Parse `use ::*;`, which imports all from the crate root in Rust 2015
// This is invalid inside a use_tree_list, (e.g. `use some::path::{::*}`)
// but still parses and errors later: ('crate root in paths can only be used in start position')
// FIXME: Add this error (if not out of scope)
// In Rust 2018, it is always invalid (see above)
p.bump_any();
p.bump_any();
p.bump(T![::]);
p.bump(T![*]);
}
// Open a use tree list
// Handles cases such as `use {some::path};` or `{inner::path}` in
@ -47,10 +46,11 @@ fn use_tree(p: &mut Parser) {
// use {path::from::root}; // Rust 2015
// use ::{some::arbritrary::path}; // Rust 2015
// use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig
(T!['{'], _) | (T![::], T!['{']) => {
if p.at(T![::]) {
p.bump_any();
}
T!['{'] => {
use_tree_list(p);
}
T![:] if p.at(T![::]) && p.nth(2) == T!['{'] => {
p.bump(T![::]);
use_tree_list(p);
}
// Parse a 'standard' path.
@ -80,8 +80,8 @@ fn use_tree(p: &mut Parser) {
// use Trait as _;
opt_alias(p);
}
T![::] => {
p.bump_any();
T![:] if p.at(T![::]) => {
p.bump(T![::]);
match p.current() {
T![*] => {
p.bump_any();

View file

@ -80,7 +80,7 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
match flavor {
Flavor::OptionalType | Flavor::Normal => {
patterns::pattern(p);
if p.at(T![:]) || flavor.type_required() {
if p.at(T![:]) && !p.at(T![::]) || flavor.type_required() {
types::ascription(p)
}
}
@ -96,10 +96,11 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
// trait Foo {
// fn bar(_: u64, mut x: i32);
// }
if (la0 == IDENT || la0 == T![_]) && la1 == T![:]
if (la0 == IDENT || la0 == T![_]) && la1 == T![:] && !p.nth_at(1, T![::])
|| la0 == T![mut] && la1 == IDENT && la2 == T![:]
|| la0 == T![&] && la1 == IDENT && la2 == T![:]
|| la0 == T![&] && la1 == T![mut] && la2 == IDENT && la3 == T![:]
|| la0 == T![&]
&& (la1 == IDENT && la2 == T![:] && !p.nth_at(2, T![::])
|| la1 == T![mut] && la2 == IDENT && la3 == T![:] && !p.nth_at(3, T![::]))
{
patterns::pattern(p);
types::ascription(p);

View file

@ -1,7 +1,7 @@
use super::*;
pub(super) const PATH_FIRST: TokenSet =
token_set![IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLONCOLON, L_ANGLE];
token_set![IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLON, L_ANGLE];
pub(super) fn is_path_start(p: &Parser) -> bool {
is_use_path_start(p) || p.at(T![<])
@ -9,7 +9,8 @@ pub(super) fn is_path_start(p: &Parser) -> bool {
pub(super) fn is_use_path_start(p: &Parser) -> bool {
match p.current() {
IDENT | T![self] | T![super] | T![crate] | T![::] => true,
IDENT | T![self] | T![super] | T![crate] => true,
T![:] if p.at(T![::]) => true,
_ => false,
}
}
@ -38,13 +39,13 @@ fn path(p: &mut Parser, mode: Mode) {
path_segment(p, mode, true);
let mut qual = path.complete(p, PATH);
loop {
let use_tree = match p.nth(1) {
let use_tree = match p.nth(2) {
T![*] | T!['{'] => true,
_ => false,
};
if p.at(T![::]) && !use_tree {
let path = qual.precede(p);
p.bump_any();
p.bump(T![::]);
path_segment(p, mode, false);
let path = path.complete(p, PATH);
qual = path;

View file

@ -34,17 +34,20 @@ pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) {
// 200 .. 301=> (),
// }
// }
if p.at(T![...]) || p.at(T![..=]) || p.at(T![..]) {
let m = lhs.precede(p);
p.bump_any();
atom_pat(p, recovery_set);
m.complete(p, RANGE_PAT);
for &range_op in [T![...], T![..=], T![..]].iter() {
if p.at(range_op) {
let m = lhs.precede(p);
p.bump(range_op);
atom_pat(p, recovery_set);
m.complete(p, RANGE_PAT);
return;
}
}
// test marco_pat
// fn main() {
// let m!(x) = 0;
// }
else if lhs.kind() == PATH_PAT && p.at(T![!]) {
if lhs.kind() == PATH_PAT && p.at(T![!]) {
let m = lhs.precede(p);
items::macro_call_after_excl(p);
m.complete(p, MACRO_CALL);
@ -56,14 +59,16 @@ const PAT_RECOVERY_SET: TokenSet =
token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA];
fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
// Checks the token after an IDENT to see if a pattern is a path (Struct { .. }) or macro
// (T![x]).
let is_path_or_macro_pat =
|la1| la1 == T![::] || la1 == T!['('] || la1 == T!['{'] || la1 == T![!];
let m = match p.nth(0) {
T![box] => box_pat(p),
T![ref] | T![mut] | IDENT if !is_path_or_macro_pat(p.nth(1)) => bind_pat(p, true),
T![ref] | T![mut] => bind_pat(p, true),
IDENT => match p.nth(1) {
// Checks the token after an IDENT to see if a pattern is a path (Struct { .. }) or macro
// (T![x]).
T!['('] | T!['{'] | T![!] => path_pat(p),
T![:] if p.nth_at(1, T![::]) => path_pat(p),
_ => bind_pat(p, true),
},
_ if paths::is_use_path_start(p) => path_pat(p),
_ if is_literal_pat_start(p) => literal_pat(p),
@ -158,7 +163,7 @@ fn record_field_pat_list(p: &mut Parser) {
p.bump_any();
while !p.at(EOF) && !p.at(T!['}']) {
match p.current() {
T![..] => p.bump_any(),
T![.] if p.at(T![..]) => p.bump(T![..]),
IDENT if p.nth(1) == T![:] => record_field_pat(p),
T!['{'] => error_block(p, "expected ident"),
T![box] => {
@ -237,7 +242,7 @@ fn slice_pat(p: &mut Parser) -> CompletedMarker {
fn pat_list(p: &mut Parser, ket: SyntaxKind) {
while !p.at(EOF) && !p.at(ket) {
match p.current() {
T![..] => p.bump_any(),
T![.] if p.at(T![..]) => p.bump(T![..]),
_ => {
if !p.at_ts(PATTERN_FIRST) {
p.error("expected a pattern");

View file

@ -2,19 +2,16 @@ use super::*;
pub(super) fn opt_type_arg_list(p: &mut Parser, colon_colon_required: bool) {
let m;
match (colon_colon_required, p.nth(0), p.nth(1)) {
(_, T![::], T![<]) => {
m = p.start();
p.bump_any();
p.bump_any();
}
(false, T![<], T![=]) => return,
(false, T![<], _) => {
m = p.start();
p.bump_any();
}
_ => return,
};
if p.at(T![::]) && p.nth(2) == T![<] {
m = p.start();
p.bump(T![::]);
p.bump(T![<]);
} else if !colon_colon_required && p.at(T![<]) && p.nth(1) != T![=] {
m = p.start();
p.bump(T![<]);
} else {
return;
}
while !p.at(EOF) && !p.at(T![>]) {
type_arg(p);
@ -37,7 +34,7 @@ fn type_arg(p: &mut Parser) {
}
// test associated_type_bounds
// fn print_all<T: Iterator<Item: Display>>(printables: T) {}
IDENT if p.nth(1) == T![:] => {
IDENT if p.nth(1) == T![:] && p.nth(2) != T![:] => {
name_ref(p);
type_params::bounds(p);
m.complete(p, ASSOC_TYPE_ARG);

View file

@ -6,7 +6,7 @@ use crate::{
event::Event,
ParseError,
SyntaxKind::{self, EOF, ERROR, TOMBSTONE},
Token, TokenSet, TokenSource, T,
TokenSet, TokenSource, T,
};
/// `Parser` struct provides the low-level API for
@ -40,38 +40,6 @@ impl<'t> Parser<'t> {
self.nth(0)
}
/// Returns the kinds of the current two tokens, if they are not separated
/// by trivia.
///
/// Useful for parsing things like `>>`.
pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> {
let c1 = self.nth(0);
let c2 = self.nth(1);
if self.token_source.current().is_jointed_to_next {
Some((c1, c2))
} else {
None
}
}
/// Returns the kinds of the current three tokens, if they are not separated
/// by trivia.
///
/// Useful for parsing things like `=>>`.
pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> {
let c1 = self.nth(0);
let c2 = self.nth(1);
let c3 = self.nth(2);
if self.token_source.current().is_jointed_to_next
&& self.token_source.lookahead_nth(1).is_jointed_to_next
{
Some((c1, c2, c3))
} else {
None
}
}
/// Lookahead operation: returns the kind of the next nth
/// token.
pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
@ -81,33 +49,93 @@ impl<'t> Parser<'t> {
assert!(steps <= 10_000_000, "the parser seems stuck");
self.steps.set(steps + 1);
// It is beecause the Dollar will appear between nth
// Following code skips through it
let mut non_dollars_count = 0;
let mut i = 0;
loop {
let token = self.token_source.lookahead_nth(i);
let mut kind = token.kind;
if let Some((composited, step)) = self.is_composite(token, i) {
kind = composited;
i += step;
} else {
i += 1;
}
match kind {
EOF => return EOF,
SyntaxKind::L_DOLLAR | SyntaxKind::R_DOLLAR => {}
_ if non_dollars_count == n => return kind,
_ => non_dollars_count += 1,
}
}
self.token_source.lookahead_nth(n).kind
}
/// Checks if the current token is `kind`.
pub(crate) fn at(&self, kind: SyntaxKind) -> bool {
self.current() == kind
self.nth_at(0, kind)
}
pub(crate) fn nth_at(&self, n: usize, kind: SyntaxKind) -> bool {
match kind {
T![-=] => self.at_composite2(n, T![-], T![=]),
T![->] => self.at_composite2(n, T![-], T![>]),
T![::] => self.at_composite2(n, T![:], T![:]),
T![!=] => self.at_composite2(n, T![!], T![=]),
T![..] => self.at_composite2(n, T![.], T![.]),
T![*=] => self.at_composite2(n, T![*], T![=]),
T![/=] => self.at_composite2(n, T![/], T![=]),
T![&&] => self.at_composite2(n, T![&], T![&]),
T![&=] => self.at_composite2(n, T![&], T![=]),
T![%=] => self.at_composite2(n, T![%], T![=]),
T![^=] => self.at_composite2(n, T![^], T![=]),
T![+=] => self.at_composite2(n, T![+], T![=]),
T![<<] => self.at_composite2(n, T![<], T![<]),
T![<=] => self.at_composite2(n, T![<], T![=]),
T![==] => self.at_composite2(n, T![=], T![=]),
T![=>] => self.at_composite2(n, T![=], T![>]),
T![>=] => self.at_composite2(n, T![>], T![=]),
T![>>] => self.at_composite2(n, T![>], T![>]),
T![|=] => self.at_composite2(n, T![|], T![=]),
T![||] => self.at_composite2(n, T![|], T![|]),
T![...] => self.at_composite3(n, T![.], T![.], T![.]),
T![..=] => self.at_composite3(n, T![.], T![.], T![=]),
T![<<=] => self.at_composite3(n, T![<], T![<], T![=]),
T![>>=] => self.at_composite3(n, T![>], T![>], T![=]),
_ => self.token_source.lookahead_nth(n).kind == kind,
}
}
/// Consume the next token if `kind` matches.
pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool {
if !self.at(kind) {
return false;
}
let n_raw_tokens = match kind {
T![-=]
| T![->]
| T![::]
| T![!=]
| T![..]
| T![*=]
| T![/=]
| T![&&]
| T![&=]
| T![%=]
| T![^=]
| T![+=]
| T![<<]
| T![<=]
| T![==]
| T![=>]
| T![>=]
| T![>>]
| T![|=]
| T![||] => 2,
T![...] | T![..=] | T![<<=] | T![>>=] => 3,
_ => 1,
};
self.do_bump(kind, n_raw_tokens);
true
}
fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool {
let t1 = self.token_source.lookahead_nth(n + 0);
let t2 = self.token_source.lookahead_nth(n + 1);
t1.kind == k1 && t1.is_jointed_to_next && t2.kind == k2
}
fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool {
let t1 = self.token_source.lookahead_nth(n + 0);
let t2 = self.token_source.lookahead_nth(n + 1);
let t3 = self.token_source.lookahead_nth(n + 2);
(t1.kind == k1 && t1.is_jointed_to_next)
&& (t2.kind == k2 && t2.is_jointed_to_next)
&& t3.kind == k3
}
/// Checks if the current token is in `kinds`.
@ -129,22 +157,9 @@ impl<'t> Parser<'t> {
Marker::new(pos)
}
/// Advances the parser by one token unconditionally
/// Mainly use in `token_tree` parsing
pub(crate) fn bump_raw(&mut self) {
let mut kind = self.token_source.current().kind;
// Skip dollars, do_bump will eat these later
let mut i = 0;
while kind == SyntaxKind::L_DOLLAR || kind == SyntaxKind::R_DOLLAR {
kind = self.token_source.lookahead_nth(i).kind;
i += 1;
}
if kind == EOF {
return;
}
self.do_bump(kind, 1);
/// Consume the next token if `kind` matches.
pub(crate) fn bump(&mut self, kind: SyntaxKind) {
assert!(self.eat(kind));
}
/// Advances the parser by one token with composite puncts handled
@ -153,27 +168,7 @@ impl<'t> Parser<'t> {
if kind == EOF {
return;
}
use SyntaxKind::*;
// Handle parser composites
match kind {
T![...] | T![..=] => {
self.bump_compound(kind, 3);
}
T![..] | T![::] | T![==] | T![=>] | T![!=] | T![->] => {
self.bump_compound(kind, 2);
}
_ => {
self.do_bump(kind, 1);
}
}
}
/// Advances the parser by one token, asserting that it is exactly the expected token
pub(crate) fn bump(&mut self, expected: SyntaxKind) {
debug_assert!(self.nth(0) == expected);
self.bump_any()
self.do_bump(kind, 1)
}
/// Advances the parser by one token, remapping its kind.
@ -190,13 +185,6 @@ impl<'t> Parser<'t> {
self.do_bump(kind, 1);
}
/// Advances the parser by `n` tokens, remapping its kind.
/// This is useful to create compound tokens from parts. For
/// example, an `<<` token is two consecutive remapped `<` tokens
pub(crate) fn bump_compound(&mut self, kind: SyntaxKind, n: u8) {
self.do_bump(kind, n);
}
/// Emit error with the `message`
/// FIXME: this should be much more fancy and support
/// structured errors with spans and notes, like rustc
@ -206,15 +194,6 @@ impl<'t> Parser<'t> {
self.push_event(Event::Error { msg })
}
/// Consume the next token if `kind` matches.
pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool {
if !self.at(kind) {
return false;
}
self.bump_any();
true
}
/// Consume the next token if it is `kind` or emit an error
/// otherwise.
pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool {
@ -243,7 +222,7 @@ impl<'t> Parser<'t> {
}
fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
self.eat_dollars();
// self.eat_dollars();
for _ in 0..n_raw_tokens {
self.token_source.bump();
@ -256,64 +235,6 @@ impl<'t> Parser<'t> {
self.events.push(event)
}
/// helper function for check if it is composite.
fn is_composite(&self, first: Token, n: usize) -> Option<(SyntaxKind, usize)> {
// We assume the dollars will not occuried between
// mult-byte tokens
let jn1 = first.is_jointed_to_next;
if !jn1 && first.kind != T![-] {
return None;
}
let second = self.token_source.lookahead_nth(n + 1);
if first.kind == T![-] && second.kind == T![>] {
return Some((T![->], 2));
}
if !jn1 {
return None;
}
match (first.kind, second.kind) {
(T![:], T![:]) => return Some((T![::], 2)),
(T![=], T![=]) => return Some((T![==], 2)),
(T![=], T![>]) => return Some((T![=>], 2)),
(T![!], T![=]) => return Some((T![!=], 2)),
_ => {}
}
if first.kind != T![.] || second.kind != T![.] {
return None;
}
let third = self.token_source.lookahead_nth(n + 2);
let jn2 = second.is_jointed_to_next;
let la3 = third.kind;
if jn2 && la3 == T![.] {
return Some((T![...], 3));
}
if la3 == T![=] {
return Some((T![..=], 3));
}
return Some((T![..], 2));
}
fn eat_dollars(&mut self) {
loop {
match self.token_source.current().kind {
k @ SyntaxKind::L_DOLLAR | k @ SyntaxKind::R_DOLLAR => {
self.token_source.bump();
self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
}
_ => {
return;
}
}
}
}
pub(crate) fn eat_l_dollars(&mut self) -> usize {
let mut ate_count = 0;
loop {

View file

@ -15,6 +15,18 @@ fn lexer_tests() {
})
}
#[test]
fn parse_smoke_test() {
let code = r##"
fn main() {
println!("Hello, world!")
}
"##;
let parse = SourceFile::parse(code);
assert!(parse.ok().is_ok());
}
#[test]
fn parser_tests() {
dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| {
@ -75,7 +87,9 @@ fn self_hosting_parsing() {
{
count += 1;
let text = read_text(entry.path());
SourceFile::parse(&text).ok().expect("There should be no errors in the file");
if let Err(errors) = SourceFile::parse(&text).ok() {
panic!("Parsing errors:\n{:?}\n{}\n", errors, entry.path().display());
}
}
assert!(
count > 30,

View file

@ -0,0 +1,5 @@
fn a() -> Foo<bar::Baz> {}
fn b(_: impl FnMut(x::Y)) {}
fn c(_: impl FnMut(&x::Y)) {}

View file

@ -0,0 +1,126 @@
SOURCE_FILE@[0; 88)
FN_DEF@[0; 26)
FN_KW@[0; 2) "fn"
WHITESPACE@[2; 3) " "
NAME@[3; 4)
IDENT@[3; 4) "a"
PARAM_LIST@[4; 6)
L_PAREN@[4; 5) "("
R_PAREN@[5; 6) ")"
WHITESPACE@[6; 7) " "
RET_TYPE@[7; 23)
THIN_ARROW@[7; 9) "->"
WHITESPACE@[9; 10) " "
PATH_TYPE@[10; 23)
PATH@[10; 23)
PATH_SEGMENT@[10; 23)
NAME_REF@[10; 13)
IDENT@[10; 13) "Foo"
TYPE_ARG_LIST@[13; 23)
L_ANGLE@[13; 14) "<"
TYPE_ARG@[14; 22)
PATH_TYPE@[14; 22)
PATH@[14; 22)
PATH@[14; 17)
PATH_SEGMENT@[14; 17)
NAME_REF@[14; 17)
IDENT@[14; 17) "bar"
COLONCOLON@[17; 19) "::"
PATH_SEGMENT@[19; 22)
NAME_REF@[19; 22)
IDENT@[19; 22) "Baz"
R_ANGLE@[22; 23) ">"
WHITESPACE@[23; 24) " "
BLOCK_EXPR@[24; 26)
BLOCK@[24; 26)
L_CURLY@[24; 25) "{"
R_CURLY@[25; 26) "}"
WHITESPACE@[26; 28) "\n\n"
FN_DEF@[28; 56)
FN_KW@[28; 30) "fn"
WHITESPACE@[30; 31) " "
NAME@[31; 32)
IDENT@[31; 32) "b"
PARAM_LIST@[32; 53)
L_PAREN@[32; 33) "("
PARAM@[33; 52)
PLACEHOLDER_PAT@[33; 34)
UNDERSCORE@[33; 34) "_"
COLON@[34; 35) ":"
WHITESPACE@[35; 36) " "
IMPL_TRAIT_TYPE@[36; 52)
IMPL_KW@[36; 40) "impl"
WHITESPACE@[40; 41) " "
TYPE_BOUND_LIST@[41; 52)
TYPE_BOUND@[41; 52)
PATH_TYPE@[41; 52)
PATH@[41; 52)
PATH_SEGMENT@[41; 52)
NAME_REF@[41; 46)
IDENT@[41; 46) "FnMut"
PARAM_LIST@[46; 52)
L_PAREN@[46; 47) "("
PARAM@[47; 51)
PATH_TYPE@[47; 51)
PATH@[47; 51)
PATH@[47; 48)
PATH_SEGMENT@[47; 48)
NAME_REF@[47; 48)
IDENT@[47; 48) "x"
COLONCOLON@[48; 50) "::"
PATH_SEGMENT@[50; 51)
NAME_REF@[50; 51)
IDENT@[50; 51) "Y"
R_PAREN@[51; 52) ")"
R_PAREN@[52; 53) ")"
WHITESPACE@[53; 54) " "
BLOCK_EXPR@[54; 56)
BLOCK@[54; 56)
L_CURLY@[54; 55) "{"
R_CURLY@[55; 56) "}"
WHITESPACE@[56; 58) "\n\n"
FN_DEF@[58; 87)
FN_KW@[58; 60) "fn"
WHITESPACE@[60; 61) " "
NAME@[61; 62)
IDENT@[61; 62) "c"
PARAM_LIST@[62; 84)
L_PAREN@[62; 63) "("
PARAM@[63; 83)
PLACEHOLDER_PAT@[63; 64)
UNDERSCORE@[63; 64) "_"
COLON@[64; 65) ":"
WHITESPACE@[65; 66) " "
IMPL_TRAIT_TYPE@[66; 83)
IMPL_KW@[66; 70) "impl"
WHITESPACE@[70; 71) " "
TYPE_BOUND_LIST@[71; 83)
TYPE_BOUND@[71; 83)
PATH_TYPE@[71; 83)
PATH@[71; 83)
PATH_SEGMENT@[71; 83)
NAME_REF@[71; 76)
IDENT@[71; 76) "FnMut"
PARAM_LIST@[76; 83)
L_PAREN@[76; 77) "("
PARAM@[77; 82)
REFERENCE_TYPE@[77; 82)
AMP@[77; 78) "&"
PATH_TYPE@[78; 82)
PATH@[78; 82)
PATH@[78; 79)
PATH_SEGMENT@[78; 79)
NAME_REF@[78; 79)
IDENT@[78; 79) "x"
COLONCOLON@[79; 81) "::"
PATH_SEGMENT@[81; 82)
NAME_REF@[81; 82)
IDENT@[81; 82) "Y"
R_PAREN@[82; 83) ")"
R_PAREN@[83; 84) ")"
WHITESPACE@[84; 85) " "
BLOCK_EXPR@[85; 87)
BLOCK@[85; 87)
L_CURLY@[85; 86) "{"
R_CURLY@[86; 87) "}"
WHITESPACE@[87; 88) "\n"

View file

@ -0,0 +1,5 @@
type X = ();
fn main() {
let ():::X = ();
}

View file

@ -0,0 +1,50 @@
SOURCE_FILE@[0; 49)
TYPE_ALIAS_DEF@[0; 12)
TYPE_KW@[0; 4) "type"
WHITESPACE@[4; 5) " "
NAME@[5; 6)
IDENT@[5; 6) "X"
WHITESPACE@[6; 7) " "
EQ@[7; 8) "="
WHITESPACE@[8; 9) " "
TUPLE_TYPE@[9; 11)
L_PAREN@[9; 10) "("
R_PAREN@[10; 11) ")"
SEMI@[11; 12) ";"
WHITESPACE@[12; 14) "\n\n"
FN_DEF@[14; 48)
FN_KW@[14; 16) "fn"
WHITESPACE@[16; 17) " "
NAME@[17; 21)
IDENT@[17; 21) "main"
PARAM_LIST@[21; 23)
L_PAREN@[21; 22) "("
R_PAREN@[22; 23) ")"
WHITESPACE@[23; 24) " "
BLOCK_EXPR@[24; 48)
BLOCK@[24; 48)
L_CURLY@[24; 25) "{"
WHITESPACE@[25; 30) "\n "
LET_STMT@[30; 46)
LET_KW@[30; 33) "let"
WHITESPACE@[33; 34) " "
TUPLE_PAT@[34; 36)
L_PAREN@[34; 35) "("
R_PAREN@[35; 36) ")"
COLON@[36; 37) ":"
PATH_TYPE@[37; 40)
PATH@[37; 40)
PATH_SEGMENT@[37; 40)
COLONCOLON@[37; 39) "::"
NAME_REF@[39; 40)
IDENT@[39; 40) "X"
WHITESPACE@[40; 41) " "
EQ@[41; 42) "="
WHITESPACE@[42; 43) " "
TUPLE_EXPR@[43; 45)
L_PAREN@[43; 44) "("
R_PAREN@[44; 45) ")"
SEMI@[45; 46) ";"
WHITESPACE@[46; 47) "\n"
R_CURLY@[47; 48) "}"
WHITESPACE@[48; 49) "\n"