mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-15 22:54:00 +00:00
56 lines
1.7 KiB
Rust
56 lines
1.7 KiB
Rust
//! A bit-set of `SyntaxKind`s.
|
|
|
|
use crate::SyntaxKind;
|
|
|
|
/// A bit-set of `SyntaxKind`s
|
|
#[derive(Clone, Copy)]
|
|
pub(crate) struct TokenSet([u64; 3]);
|
|
|
|
/// `TokenSet`s should only include token `SyntaxKind`s, so the discriminant of any passed/included
|
|
/// `SyntaxKind` must *not* be greater than that of the last token `SyntaxKind`.
|
|
/// See #17037.
|
|
const LAST_TOKEN_KIND_DISCRIMINANT: usize = SyntaxKind::SHEBANG as usize;
|
|
|
|
impl TokenSet {
|
|
pub(crate) const EMPTY: TokenSet = TokenSet([0; 3]);
|
|
|
|
pub(crate) const fn new(kinds: &[SyntaxKind]) -> TokenSet {
|
|
let mut res = [0; 3];
|
|
let mut i = 0;
|
|
while i < kinds.len() {
|
|
let discriminant = kinds[i] as usize;
|
|
debug_assert!(
|
|
discriminant <= LAST_TOKEN_KIND_DISCRIMINANT,
|
|
"Expected a token `SyntaxKind`"
|
|
);
|
|
let idx = discriminant / 64;
|
|
res[idx] |= 1 << (discriminant % 64);
|
|
i += 1;
|
|
}
|
|
TokenSet(res)
|
|
}
|
|
|
|
pub(crate) const fn union(self, other: TokenSet) -> TokenSet {
|
|
TokenSet([self.0[0] | other.0[0], self.0[1] | other.0[1], self.0[2] | other.0[2]])
|
|
}
|
|
|
|
pub(crate) const fn contains(&self, kind: SyntaxKind) -> bool {
|
|
let discriminant = kind as usize;
|
|
debug_assert!(
|
|
discriminant <= LAST_TOKEN_KIND_DISCRIMINANT,
|
|
"Expected a token `SyntaxKind`"
|
|
);
|
|
let idx = discriminant / 64;
|
|
let mask = 1 << (discriminant % 64);
|
|
self.0[idx] & mask != 0
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn token_set_works_for_tokens() {
|
|
use crate::SyntaxKind::*;
|
|
let ts = TokenSet::new(&[EOF, SHEBANG]);
|
|
assert!(ts.contains(EOF));
|
|
assert!(ts.contains(SHEBANG));
|
|
assert!(!ts.contains(PLUS));
|
|
}
|