mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Moved TokenSet into it's own file.
This commit is contained in:
parent
751562d2f7
commit
518cc87496
4 changed files with 41 additions and 37 deletions
|
@ -32,7 +32,8 @@ mod type_params;
|
||||||
mod types;
|
mod types;
|
||||||
|
|
||||||
use {
|
use {
|
||||||
parser_api::{Marker, CompletedMarker, Parser, TokenSet},
|
token_set::TokenSet,
|
||||||
|
parser_api::{Marker, CompletedMarker, Parser},
|
||||||
SyntaxKind::{self, *},
|
SyntaxKind::{self, *},
|
||||||
};
|
};
|
||||||
pub(crate) use self::{
|
pub(crate) use self::{
|
||||||
|
|
|
@ -31,6 +31,7 @@ pub mod algo;
|
||||||
pub mod ast;
|
pub mod ast;
|
||||||
mod lexer;
|
mod lexer;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
mod token_set;
|
||||||
mod parser_api;
|
mod parser_api;
|
||||||
mod grammar;
|
mod grammar;
|
||||||
mod parser_impl;
|
mod parser_impl;
|
||||||
|
|
|
@ -1,45 +1,10 @@
|
||||||
use {
|
use {
|
||||||
|
token_set::TokenSet,
|
||||||
parser_impl::ParserImpl,
|
parser_impl::ParserImpl,
|
||||||
SyntaxKind::{self, ERROR},
|
SyntaxKind::{self, ERROR},
|
||||||
drop_bomb::DropBomb,
|
drop_bomb::DropBomb,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
|
||||||
pub(crate) struct TokenSet(pub(crate) u128);
|
|
||||||
|
|
||||||
fn mask(kind: SyntaxKind) -> u128 {
|
|
||||||
1u128 << (kind as usize)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenSet {
|
|
||||||
pub const EMPTY: TokenSet = TokenSet(0);
|
|
||||||
|
|
||||||
pub fn contains(&self, kind: SyntaxKind) -> bool {
|
|
||||||
self.0 & mask(kind) != 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! token_set {
|
|
||||||
($($t:ident),*) => { TokenSet($(1u128 << ($t as usize))|*) };
|
|
||||||
($($t:ident),* ,) => { token_set!($($t),*) };
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! token_set_union {
|
|
||||||
($($ts:expr),*) => { TokenSet($($ts.0)|*) };
|
|
||||||
($($ts:expr),* ,) => { token_set_union!($($ts),*) };
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn token_set_works_for_tokens() {
|
|
||||||
use SyntaxKind::*;
|
|
||||||
let ts = token_set! { EOF, SHEBANG };
|
|
||||||
assert!(ts.contains(EOF));
|
|
||||||
assert!(ts.contains(SHEBANG));
|
|
||||||
assert!(!ts.contains(PLUS));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `Parser` struct provides the low-level API for
|
/// `Parser` struct provides the low-level API for
|
||||||
/// navigating through the stream of tokens and
|
/// navigating through the stream of tokens and
|
||||||
/// constructing the parse tree. The actual parsing
|
/// constructing the parse tree. The actual parsing
|
||||||
|
|
37
crates/libsyntax2/src/token_set.rs
Normal file
37
crates/libsyntax2/src/token_set.rs
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
use SyntaxKind;
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub(crate) struct TokenSet(pub(crate) u128);
|
||||||
|
|
||||||
|
fn mask(kind: SyntaxKind) -> u128 {
|
||||||
|
1u128 << (kind as usize)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenSet {
|
||||||
|
pub const EMPTY: TokenSet = TokenSet(0);
|
||||||
|
|
||||||
|
pub fn contains(&self, kind: SyntaxKind) -> bool {
|
||||||
|
self.0 & mask(kind) != 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! token_set {
|
||||||
|
($($t:ident),*) => { TokenSet($(1u128 << ($t as usize))|*) };
|
||||||
|
($($t:ident),* ,) => { token_set!($($t),*) };
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! token_set_union {
|
||||||
|
($($ts:expr),*) => { TokenSet($($ts.0)|*) };
|
||||||
|
($($ts:expr),* ,) => { token_set_union!($($ts),*) };
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn token_set_works_for_tokens() {
|
||||||
|
use SyntaxKind::*;
|
||||||
|
let ts = token_set! { EOF, SHEBANG };
|
||||||
|
assert!(ts.contains(EOF));
|
||||||
|
assert!(ts.contains(SHEBANG));
|
||||||
|
assert!(!ts.contains(PLUS));
|
||||||
|
}
|
Loading…
Reference in a new issue