mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-28 14:03:35 +00:00
Replace SyntaxKind usage with T! macro where applicable
This commit is contained in:
parent
e1430d822e
commit
e618d12903
12 changed files with 50 additions and 44 deletions
|
@ -1,6 +1,6 @@
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
match_ast, SyntaxElement, SyntaxKind, TextRange, TextSize, T,
|
match_ast, SyntaxElement, TextRange, TextSize, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
|
@ -136,14 +136,14 @@ fn needs_parentheses_around_macro_contents(macro_contents: Vec<SyntaxElement>) -
|
||||||
symbol_kind => {
|
symbol_kind => {
|
||||||
let symbol_not_in_bracket = unpaired_brackets_in_contents.is_empty();
|
let symbol_not_in_bracket = unpaired_brackets_in_contents.is_empty();
|
||||||
if symbol_not_in_bracket
|
if symbol_not_in_bracket
|
||||||
&& symbol_kind != SyntaxKind::COLON // paths
|
&& symbol_kind != T![:] // paths
|
||||||
&& (symbol_kind != SyntaxKind::DOT // field/method access
|
&& (symbol_kind != T![.] // field/method access
|
||||||
|| macro_contents // range expressions consist of two SyntaxKind::Dot in macro invocations
|
|| macro_contents // range expressions consist of two SyntaxKind::Dot in macro invocations
|
||||||
.peek()
|
.peek()
|
||||||
.map(|element| element.kind() == SyntaxKind::DOT)
|
.map(|element| element.kind() == T![.])
|
||||||
.unwrap_or(false))
|
.unwrap_or(false))
|
||||||
&& symbol_kind != SyntaxKind::QUESTION // try operator
|
&& symbol_kind != T![?] // try operator
|
||||||
&& (symbol_kind.is_punct() || symbol_kind == SyntaxKind::AS_KW)
|
&& (symbol_kind.is_punct() || symbol_kind == T![as])
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use syntax::{ast, AstNode, SyntaxKind};
|
use syntax::{ast, AstNode, T};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
context::CompletionContext,
|
context::CompletionContext,
|
||||||
|
@ -205,8 +205,7 @@ fn complete_lint(
|
||||||
fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Result<FxHashSet<String>, ()> {
|
fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Result<FxHashSet<String>, ()> {
|
||||||
match (derive_input.left_delimiter_token(), derive_input.right_delimiter_token()) {
|
match (derive_input.left_delimiter_token(), derive_input.right_delimiter_token()) {
|
||||||
(Some(left_paren), Some(right_paren))
|
(Some(left_paren), Some(right_paren))
|
||||||
if left_paren.kind() == SyntaxKind::L_PAREN
|
if left_paren.kind() == T!['('] && right_paren.kind() == T![')'] =>
|
||||||
&& right_paren.kind() == SyntaxKind::R_PAREN =>
|
|
||||||
{
|
{
|
||||||
let mut input_derives = FxHashSet::default();
|
let mut input_derives = FxHashSet::default();
|
||||||
let mut current_derive = String::new();
|
let mut current_derive = String::new();
|
||||||
|
@ -218,7 +217,7 @@ fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Result<FxHashSet<Strin
|
||||||
.skip(1)
|
.skip(1)
|
||||||
.take_while(|token| token != &right_paren)
|
.take_while(|token| token != &right_paren)
|
||||||
{
|
{
|
||||||
if SyntaxKind::COMMA == token.kind() {
|
if T![,] == token.kind() {
|
||||||
if !current_derive.is_empty() {
|
if !current_derive.is_empty() {
|
||||||
input_derives.insert(current_derive);
|
input_derives.insert(current_derive);
|
||||||
current_derive = String::new();
|
current_derive = String::new();
|
||||||
|
|
|
@ -93,11 +93,11 @@ fn completion_match(ctx: &CompletionContext) -> Option<(ImplCompletionKind, Synt
|
||||||
// `impl .. { const $0 }`
|
// `impl .. { const $0 }`
|
||||||
// ERROR 0
|
// ERROR 0
|
||||||
// CONST_KW <- *
|
// CONST_KW <- *
|
||||||
SyntaxKind::CONST_KW => 0,
|
T![const] => 0,
|
||||||
// `impl .. { fn/type $0 }`
|
// `impl .. { fn/type $0 }`
|
||||||
// FN/TYPE_ALIAS 0
|
// FN/TYPE_ALIAS 0
|
||||||
// FN_KW <- *
|
// FN_KW <- *
|
||||||
SyntaxKind::FN_KW | SyntaxKind::TYPE_KW => 0,
|
T![fn] | T![type] => 0,
|
||||||
// `impl .. { fn/type/const foo$0 }`
|
// `impl .. { fn/type/const foo$0 }`
|
||||||
// FN/TYPE_ALIAS/CONST 1
|
// FN/TYPE_ALIAS/CONST 1
|
||||||
// NAME 0
|
// NAME 0
|
||||||
|
@ -121,7 +121,7 @@ fn completion_match(ctx: &CompletionContext) -> Option<(ImplCompletionKind, Synt
|
||||||
let impl_def = ast::Impl::cast(impl_item.parent()?.parent()?)?;
|
let impl_def = ast::Impl::cast(impl_item.parent()?.parent()?)?;
|
||||||
let kind = match impl_item.kind() {
|
let kind = match impl_item.kind() {
|
||||||
// `impl ... { const $0 fn/type/const }`
|
// `impl ... { const $0 fn/type/const }`
|
||||||
_ if token.kind() == SyntaxKind::CONST_KW => ImplCompletionKind::Const,
|
_ if token.kind() == T![const] => ImplCompletionKind::Const,
|
||||||
SyntaxKind::CONST | SyntaxKind::ERROR => ImplCompletionKind::Const,
|
SyntaxKind::CONST | SyntaxKind::ERROR => ImplCompletionKind::Const,
|
||||||
SyntaxKind::TYPE_ALIAS => ImplCompletionKind::TypeAlias,
|
SyntaxKind::TYPE_ALIAS => ImplCompletionKind::TypeAlias,
|
||||||
SyntaxKind::FN => ImplCompletionKind::Fn,
|
SyntaxKind::FN => ImplCompletionKind::Fn,
|
||||||
|
|
|
@ -5,7 +5,7 @@ use syntax::{
|
||||||
ast::{self, LoopBodyOwner},
|
ast::{self, LoopBodyOwner},
|
||||||
match_ast, AstNode, Direction, NodeOrToken, SyntaxElement,
|
match_ast, AstNode, Direction, NodeOrToken, SyntaxElement,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
SyntaxNode, SyntaxToken,
|
SyntaxNode, SyntaxToken, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -119,7 +119,7 @@ pub(crate) fn unsafe_is_prev(element: SyntaxElement) -> bool {
|
||||||
element
|
element
|
||||||
.into_token()
|
.into_token()
|
||||||
.and_then(|it| previous_non_trivia_token(it))
|
.and_then(|it| previous_non_trivia_token(it))
|
||||||
.filter(|it| it.kind() == UNSAFE_KW)
|
.filter(|it| it.kind() == T![unsafe])
|
||||||
.is_some()
|
.is_some()
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -131,7 +131,7 @@ pub(crate) fn if_is_prev(element: SyntaxElement) -> bool {
|
||||||
element
|
element
|
||||||
.into_token()
|
.into_token()
|
||||||
.and_then(|it| previous_non_trivia_token(it))
|
.and_then(|it| previous_non_trivia_token(it))
|
||||||
.filter(|it| it.kind() == IF_KW)
|
.filter(|it| it.kind() == T![if])
|
||||||
.is_some()
|
.is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,7 +139,7 @@ pub(crate) fn fn_is_prev(element: SyntaxElement) -> bool {
|
||||||
element
|
element
|
||||||
.into_token()
|
.into_token()
|
||||||
.and_then(|it| previous_non_trivia_token(it))
|
.and_then(|it| previous_non_trivia_token(it))
|
||||||
.filter(|it| it.kind() == FN_KW)
|
.filter(|it| it.kind() == T![fn])
|
||||||
.is_some()
|
.is_some()
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -154,7 +154,7 @@ pub(crate) fn for_is_prev2(element: SyntaxElement) -> bool {
|
||||||
.into_token()
|
.into_token()
|
||||||
.and_then(|it| previous_non_trivia_token(it))
|
.and_then(|it| previous_non_trivia_token(it))
|
||||||
.and_then(|it| previous_non_trivia_token(it))
|
.and_then(|it| previous_non_trivia_token(it))
|
||||||
.filter(|it| it.kind() == FOR_KW)
|
.filter(|it| it.kind() == T![for])
|
||||||
.is_some()
|
.is_some()
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -21,7 +21,7 @@ use ide_db::{
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::find_node_at_offset,
|
algo::find_node_at_offset,
|
||||||
ast::{self, NameOwner},
|
ast::{self, NameOwner},
|
||||||
match_ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TokenAtOffset,
|
match_ast, AstNode, SyntaxNode, TextRange, TokenAtOffset, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo, SymbolKind};
|
use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo, SymbolKind};
|
||||||
|
@ -203,7 +203,7 @@ fn get_struct_def_name_for_struct_literal_search(
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
) -> Option<ast::Name> {
|
) -> Option<ast::Name> {
|
||||||
if let TokenAtOffset::Between(ref left, ref right) = syntax.token_at_offset(position.offset) {
|
if let TokenAtOffset::Between(ref left, ref right) = syntax.token_at_offset(position.offset) {
|
||||||
if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN {
|
if right.kind() != T!['{'] && right.kind() != T!['('] {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if let Some(name) =
|
if let Some(name) =
|
||||||
|
@ -230,7 +230,7 @@ fn get_enum_def_name_for_struct_literal_search(
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
) -> Option<ast::Name> {
|
) -> Option<ast::Name> {
|
||||||
if let TokenAtOffset::Between(ref left, ref right) = syntax.token_at_offset(position.offset) {
|
if let TokenAtOffset::Between(ref left, ref right) = syntax.token_at_offset(position.offset) {
|
||||||
if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN {
|
if right.kind() != T!['{'] && right.kind() != T!['('] {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if let Some(name) =
|
if let Some(name) =
|
||||||
|
@ -255,8 +255,7 @@ fn try_find_self_references(
|
||||||
syntax: &SyntaxNode,
|
syntax: &SyntaxNode,
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
) -> Option<RangeInfo<ReferenceSearchResult>> {
|
) -> Option<RangeInfo<ReferenceSearchResult>> {
|
||||||
let self_token =
|
let self_token = syntax.token_at_offset(position.offset).find(|t| t.kind() == T![self])?;
|
||||||
syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::SELF_KW)?;
|
|
||||||
let parent = self_token.parent();
|
let parent = self_token.parent();
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match parent {
|
match parent {
|
||||||
|
|
|
@ -119,7 +119,7 @@ fn is_metavariable(element: SyntaxElement) -> Option<TextRange> {
|
||||||
let tok = element.as_token()?;
|
let tok = element.as_token()?;
|
||||||
match tok.kind() {
|
match tok.kind() {
|
||||||
kind if kind == SyntaxKind::IDENT || kind.is_keyword() => {
|
kind if kind == SyntaxKind::IDENT || kind.is_keyword() => {
|
||||||
if let Some(_dollar) = tok.prev_token().filter(|t| t.kind() == SyntaxKind::DOLLAR) {
|
if let Some(_dollar) = tok.prev_token().filter(|t| t.kind() == T![$]) {
|
||||||
return Some(tok.text_range());
|
return Some(tok.text_range());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,8 +15,16 @@ use super::*;
|
||||||
// let _ = b"e";
|
// let _ = b"e";
|
||||||
// let _ = br"f";
|
// let _ = br"f";
|
||||||
// }
|
// }
|
||||||
pub(crate) const LITERAL_FIRST: TokenSet =
|
pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[
|
||||||
TokenSet::new(&[TRUE_KW, FALSE_KW, INT_NUMBER, FLOAT_NUMBER, BYTE, CHAR, STRING, BYTE_STRING]);
|
T![true],
|
||||||
|
T![false],
|
||||||
|
INT_NUMBER,
|
||||||
|
FLOAT_NUMBER,
|
||||||
|
BYTE,
|
||||||
|
CHAR,
|
||||||
|
STRING,
|
||||||
|
BYTE_STRING,
|
||||||
|
]);
|
||||||
|
|
||||||
pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
|
pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
if !p.at_ts(LITERAL_FIRST) {
|
if !p.at_ts(LITERAL_FIRST) {
|
||||||
|
|
|
@ -27,19 +27,19 @@ pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[
|
pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[
|
||||||
FN_KW,
|
T![fn],
|
||||||
STRUCT_KW,
|
T![struct],
|
||||||
ENUM_KW,
|
T![enum],
|
||||||
IMPL_KW,
|
T![impl],
|
||||||
TRAIT_KW,
|
T![trait],
|
||||||
CONST_KW,
|
T![const],
|
||||||
STATIC_KW,
|
T![static],
|
||||||
LET_KW,
|
T![let],
|
||||||
MOD_KW,
|
T![mod],
|
||||||
PUB_KW,
|
T![pub],
|
||||||
CRATE_KW,
|
T![crate],
|
||||||
USE_KW,
|
T![use],
|
||||||
MACRO_KW,
|
T![macro],
|
||||||
T![;],
|
T![;],
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|
|
@ -110,7 +110,7 @@ fn choose_type_params_over_qpath(p: &Parser) -> bool {
|
||||||
if !p.at(T![<]) {
|
if !p.at(T![<]) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if p.nth(1) == T![#] || p.nth(1) == T![>] || p.nth(1) == CONST_KW {
|
if p.nth(1) == T![#] || p.nth(1) == T![>] || p.nth(1) == T![const] {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
(p.nth(1) == LIFETIME_IDENT || p.nth(1) == IDENT)
|
(p.nth(1) == LIFETIME_IDENT || p.nth(1) == IDENT)
|
||||||
|
|
|
@ -83,7 +83,7 @@ fn pattern_single_r(p: &mut Parser, recovery_set: TokenSet) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const PAT_RECOVERY_SET: TokenSet =
|
const PAT_RECOVERY_SET: TokenSet =
|
||||||
TokenSet::new(&[LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]);
|
TokenSet::new(&[T![let], T![if], T![while], T![loop], T![match], T![')'], T![,]]);
|
||||||
|
|
||||||
fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
|
fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
|
||||||
let m = match p.nth(0) {
|
let m = match p.nth(0) {
|
||||||
|
|
|
@ -25,7 +25,7 @@ fn generic_param_list(p: &mut Parser) {
|
||||||
match p.current() {
|
match p.current() {
|
||||||
LIFETIME_IDENT => lifetime_param(p, m),
|
LIFETIME_IDENT => lifetime_param(p, m),
|
||||||
IDENT => type_param(p, m),
|
IDENT => type_param(p, m),
|
||||||
CONST_KW => const_param(p, m),
|
T![const] => const_param(p, m),
|
||||||
_ => {
|
_ => {
|
||||||
m.abandon(p);
|
m.abandon(p);
|
||||||
p.err_and_bump("expected type parameter")
|
p.err_and_bump("expected type parameter")
|
||||||
|
@ -66,7 +66,7 @@ fn type_param(p: &mut Parser, m: Marker) {
|
||||||
// test const_param
|
// test const_param
|
||||||
// struct S<const N: u32>;
|
// struct S<const N: u32>;
|
||||||
fn const_param(p: &mut Parser, m: Marker) {
|
fn const_param(p: &mut Parser, m: Marker) {
|
||||||
assert!(p.at(CONST_KW));
|
assert!(p.at(T![const]));
|
||||||
p.bump(T![const]);
|
p.bump(T![const]);
|
||||||
name(p);
|
name(p);
|
||||||
types::ascription(p);
|
types::ascription(p);
|
||||||
|
|
|
@ -133,7 +133,7 @@ impl ast::Attr {
|
||||||
first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind);
|
first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind);
|
||||||
|
|
||||||
match (first_token_kind, second_token_kind) {
|
match (first_token_kind, second_token_kind) {
|
||||||
(Some(SyntaxKind::POUND), Some(T![!])) => AttrKind::Inner,
|
(Some(T![#]), Some(T![!])) => AttrKind::Inner,
|
||||||
_ => AttrKind::Outer,
|
_ => AttrKind::Outer,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue