mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Replace SyntaxKind usage with T! macro where applicable
This commit is contained in:
parent
e1430d822e
commit
e618d12903
12 changed files with 50 additions and 44 deletions
|
@ -1,6 +1,6 @@
|
|||
use syntax::{
|
||||
ast::{self, AstNode},
|
||||
match_ast, SyntaxElement, SyntaxKind, TextRange, TextSize, T,
|
||||
match_ast, SyntaxElement, TextRange, TextSize, T,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
@ -136,14 +136,14 @@ fn needs_parentheses_around_macro_contents(macro_contents: Vec<SyntaxElement>) -
|
|||
symbol_kind => {
|
||||
let symbol_not_in_bracket = unpaired_brackets_in_contents.is_empty();
|
||||
if symbol_not_in_bracket
|
||||
&& symbol_kind != SyntaxKind::COLON // paths
|
||||
&& (symbol_kind != SyntaxKind::DOT // field/method access
|
||||
&& symbol_kind != T![:] // paths
|
||||
&& (symbol_kind != T![.] // field/method access
|
||||
|| macro_contents // range expressions consist of two SyntaxKind::Dot in macro invocations
|
||||
.peek()
|
||||
.map(|element| element.kind() == SyntaxKind::DOT)
|
||||
.map(|element| element.kind() == T![.])
|
||||
.unwrap_or(false))
|
||||
&& symbol_kind != SyntaxKind::QUESTION // try operator
|
||||
&& (symbol_kind.is_punct() || symbol_kind == SyntaxKind::AS_KW)
|
||||
&& symbol_kind != T![?] // try operator
|
||||
&& (symbol_kind.is_punct() || symbol_kind == T![as])
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashSet;
|
||||
use syntax::{ast, AstNode, SyntaxKind};
|
||||
use syntax::{ast, AstNode, T};
|
||||
|
||||
use crate::{
|
||||
context::CompletionContext,
|
||||
|
@ -205,8 +205,7 @@ fn complete_lint(
|
|||
fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Result<FxHashSet<String>, ()> {
|
||||
match (derive_input.left_delimiter_token(), derive_input.right_delimiter_token()) {
|
||||
(Some(left_paren), Some(right_paren))
|
||||
if left_paren.kind() == SyntaxKind::L_PAREN
|
||||
&& right_paren.kind() == SyntaxKind::R_PAREN =>
|
||||
if left_paren.kind() == T!['('] && right_paren.kind() == T![')'] =>
|
||||
{
|
||||
let mut input_derives = FxHashSet::default();
|
||||
let mut current_derive = String::new();
|
||||
|
@ -218,7 +217,7 @@ fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Result<FxHashSet<Strin
|
|||
.skip(1)
|
||||
.take_while(|token| token != &right_paren)
|
||||
{
|
||||
if SyntaxKind::COMMA == token.kind() {
|
||||
if T![,] == token.kind() {
|
||||
if !current_derive.is_empty() {
|
||||
input_derives.insert(current_derive);
|
||||
current_derive = String::new();
|
||||
|
|
|
@ -93,11 +93,11 @@ fn completion_match(ctx: &CompletionContext) -> Option<(ImplCompletionKind, Synt
|
|||
// `impl .. { const $0 }`
|
||||
// ERROR 0
|
||||
// CONST_KW <- *
|
||||
SyntaxKind::CONST_KW => 0,
|
||||
T![const] => 0,
|
||||
// `impl .. { fn/type $0 }`
|
||||
// FN/TYPE_ALIAS 0
|
||||
// FN_KW <- *
|
||||
SyntaxKind::FN_KW | SyntaxKind::TYPE_KW => 0,
|
||||
T![fn] | T![type] => 0,
|
||||
// `impl .. { fn/type/const foo$0 }`
|
||||
// FN/TYPE_ALIAS/CONST 1
|
||||
// NAME 0
|
||||
|
@ -121,7 +121,7 @@ fn completion_match(ctx: &CompletionContext) -> Option<(ImplCompletionKind, Synt
|
|||
let impl_def = ast::Impl::cast(impl_item.parent()?.parent()?)?;
|
||||
let kind = match impl_item.kind() {
|
||||
// `impl ... { const $0 fn/type/const }`
|
||||
_ if token.kind() == SyntaxKind::CONST_KW => ImplCompletionKind::Const,
|
||||
_ if token.kind() == T![const] => ImplCompletionKind::Const,
|
||||
SyntaxKind::CONST | SyntaxKind::ERROR => ImplCompletionKind::Const,
|
||||
SyntaxKind::TYPE_ALIAS => ImplCompletionKind::TypeAlias,
|
||||
SyntaxKind::FN => ImplCompletionKind::Fn,
|
||||
|
|
|
@ -5,7 +5,7 @@ use syntax::{
|
|||
ast::{self, LoopBodyOwner},
|
||||
match_ast, AstNode, Direction, NodeOrToken, SyntaxElement,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken,
|
||||
SyntaxNode, SyntaxToken, T,
|
||||
};
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -119,7 +119,7 @@ pub(crate) fn unsafe_is_prev(element: SyntaxElement) -> bool {
|
|||
element
|
||||
.into_token()
|
||||
.and_then(|it| previous_non_trivia_token(it))
|
||||
.filter(|it| it.kind() == UNSAFE_KW)
|
||||
.filter(|it| it.kind() == T![unsafe])
|
||||
.is_some()
|
||||
}
|
||||
#[test]
|
||||
|
@ -131,7 +131,7 @@ pub(crate) fn if_is_prev(element: SyntaxElement) -> bool {
|
|||
element
|
||||
.into_token()
|
||||
.and_then(|it| previous_non_trivia_token(it))
|
||||
.filter(|it| it.kind() == IF_KW)
|
||||
.filter(|it| it.kind() == T![if])
|
||||
.is_some()
|
||||
}
|
||||
|
||||
|
@ -139,7 +139,7 @@ pub(crate) fn fn_is_prev(element: SyntaxElement) -> bool {
|
|||
element
|
||||
.into_token()
|
||||
.and_then(|it| previous_non_trivia_token(it))
|
||||
.filter(|it| it.kind() == FN_KW)
|
||||
.filter(|it| it.kind() == T![fn])
|
||||
.is_some()
|
||||
}
|
||||
#[test]
|
||||
|
@ -154,7 +154,7 @@ pub(crate) fn for_is_prev2(element: SyntaxElement) -> bool {
|
|||
.into_token()
|
||||
.and_then(|it| previous_non_trivia_token(it))
|
||||
.and_then(|it| previous_non_trivia_token(it))
|
||||
.filter(|it| it.kind() == FOR_KW)
|
||||
.filter(|it| it.kind() == T![for])
|
||||
.is_some()
|
||||
}
|
||||
#[test]
|
||||
|
|
|
@ -21,7 +21,7 @@ use ide_db::{
|
|||
use syntax::{
|
||||
algo::find_node_at_offset,
|
||||
ast::{self, NameOwner},
|
||||
match_ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TokenAtOffset,
|
||||
match_ast, AstNode, SyntaxNode, TextRange, TokenAtOffset, T,
|
||||
};
|
||||
|
||||
use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo, SymbolKind};
|
||||
|
@ -203,7 +203,7 @@ fn get_struct_def_name_for_struct_literal_search(
|
|||
position: FilePosition,
|
||||
) -> Option<ast::Name> {
|
||||
if let TokenAtOffset::Between(ref left, ref right) = syntax.token_at_offset(position.offset) {
|
||||
if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN {
|
||||
if right.kind() != T!['{'] && right.kind() != T!['('] {
|
||||
return None;
|
||||
}
|
||||
if let Some(name) =
|
||||
|
@ -230,7 +230,7 @@ fn get_enum_def_name_for_struct_literal_search(
|
|||
position: FilePosition,
|
||||
) -> Option<ast::Name> {
|
||||
if let TokenAtOffset::Between(ref left, ref right) = syntax.token_at_offset(position.offset) {
|
||||
if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN {
|
||||
if right.kind() != T!['{'] && right.kind() != T!['('] {
|
||||
return None;
|
||||
}
|
||||
if let Some(name) =
|
||||
|
@ -255,8 +255,7 @@ fn try_find_self_references(
|
|||
syntax: &SyntaxNode,
|
||||
position: FilePosition,
|
||||
) -> Option<RangeInfo<ReferenceSearchResult>> {
|
||||
let self_token =
|
||||
syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::SELF_KW)?;
|
||||
let self_token = syntax.token_at_offset(position.offset).find(|t| t.kind() == T![self])?;
|
||||
let parent = self_token.parent();
|
||||
match_ast! {
|
||||
match parent {
|
||||
|
|
|
@ -119,7 +119,7 @@ fn is_metavariable(element: SyntaxElement) -> Option<TextRange> {
|
|||
let tok = element.as_token()?;
|
||||
match tok.kind() {
|
||||
kind if kind == SyntaxKind::IDENT || kind.is_keyword() => {
|
||||
if let Some(_dollar) = tok.prev_token().filter(|t| t.kind() == SyntaxKind::DOLLAR) {
|
||||
if let Some(_dollar) = tok.prev_token().filter(|t| t.kind() == T![$]) {
|
||||
return Some(tok.text_range());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,8 +15,16 @@ use super::*;
|
|||
// let _ = b"e";
|
||||
// let _ = br"f";
|
||||
// }
|
||||
pub(crate) const LITERAL_FIRST: TokenSet =
|
||||
TokenSet::new(&[TRUE_KW, FALSE_KW, INT_NUMBER, FLOAT_NUMBER, BYTE, CHAR, STRING, BYTE_STRING]);
|
||||
pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[
|
||||
T![true],
|
||||
T![false],
|
||||
INT_NUMBER,
|
||||
FLOAT_NUMBER,
|
||||
BYTE,
|
||||
CHAR,
|
||||
STRING,
|
||||
BYTE_STRING,
|
||||
]);
|
||||
|
||||
pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
|
||||
if !p.at_ts(LITERAL_FIRST) {
|
||||
|
|
|
@ -27,19 +27,19 @@ pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
|
|||
}
|
||||
|
||||
pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[
|
||||
FN_KW,
|
||||
STRUCT_KW,
|
||||
ENUM_KW,
|
||||
IMPL_KW,
|
||||
TRAIT_KW,
|
||||
CONST_KW,
|
||||
STATIC_KW,
|
||||
LET_KW,
|
||||
MOD_KW,
|
||||
PUB_KW,
|
||||
CRATE_KW,
|
||||
USE_KW,
|
||||
MACRO_KW,
|
||||
T![fn],
|
||||
T![struct],
|
||||
T![enum],
|
||||
T![impl],
|
||||
T![trait],
|
||||
T![const],
|
||||
T![static],
|
||||
T![let],
|
||||
T![mod],
|
||||
T![pub],
|
||||
T![crate],
|
||||
T![use],
|
||||
T![macro],
|
||||
T![;],
|
||||
]);
|
||||
|
||||
|
|
|
@ -110,7 +110,7 @@ fn choose_type_params_over_qpath(p: &Parser) -> bool {
|
|||
if !p.at(T![<]) {
|
||||
return false;
|
||||
}
|
||||
if p.nth(1) == T![#] || p.nth(1) == T![>] || p.nth(1) == CONST_KW {
|
||||
if p.nth(1) == T![#] || p.nth(1) == T![>] || p.nth(1) == T![const] {
|
||||
return true;
|
||||
}
|
||||
(p.nth(1) == LIFETIME_IDENT || p.nth(1) == IDENT)
|
||||
|
|
|
@ -83,7 +83,7 @@ fn pattern_single_r(p: &mut Parser, recovery_set: TokenSet) {
|
|||
}
|
||||
|
||||
const PAT_RECOVERY_SET: TokenSet =
|
||||
TokenSet::new(&[LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]);
|
||||
TokenSet::new(&[T![let], T![if], T![while], T![loop], T![match], T![')'], T![,]]);
|
||||
|
||||
fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
|
||||
let m = match p.nth(0) {
|
||||
|
|
|
@ -25,7 +25,7 @@ fn generic_param_list(p: &mut Parser) {
|
|||
match p.current() {
|
||||
LIFETIME_IDENT => lifetime_param(p, m),
|
||||
IDENT => type_param(p, m),
|
||||
CONST_KW => const_param(p, m),
|
||||
T![const] => const_param(p, m),
|
||||
_ => {
|
||||
m.abandon(p);
|
||||
p.err_and_bump("expected type parameter")
|
||||
|
@ -66,7 +66,7 @@ fn type_param(p: &mut Parser, m: Marker) {
|
|||
// test const_param
|
||||
// struct S<const N: u32>;
|
||||
fn const_param(p: &mut Parser, m: Marker) {
|
||||
assert!(p.at(CONST_KW));
|
||||
assert!(p.at(T![const]));
|
||||
p.bump(T![const]);
|
||||
name(p);
|
||||
types::ascription(p);
|
||||
|
|
|
@ -133,7 +133,7 @@ impl ast::Attr {
|
|||
first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind);
|
||||
|
||||
match (first_token_kind, second_token_kind) {
|
||||
(Some(SyntaxKind::POUND), Some(T![!])) => AttrKind::Inner,
|
||||
(Some(T![#]), Some(T![!])) => AttrKind::Inner,
|
||||
_ => AttrKind::Outer,
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue