mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-16 17:58:16 +00:00
Make attribute completions more ast based
This commit is contained in:
parent
e329b7742b
commit
ce47d13101
4 changed files with 110 additions and 63 deletions
|
@ -5,9 +5,10 @@
|
|||
|
||||
use hir::HasAttrs;
|
||||
use ide_db::helpers::generated_lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES};
|
||||
use itertools::Itertools;
|
||||
use once_cell::sync::Lazy;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use syntax::{algo::non_trivia_sibling, ast, AstNode, Direction, NodeOrToken, SyntaxKind, T};
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{algo::non_trivia_sibling, ast, AstNode, Direction, SyntaxKind, T};
|
||||
|
||||
use crate::{
|
||||
context::CompletionContext,
|
||||
|
@ -303,31 +304,38 @@ const ATTRIBUTES: &[AttrCompletion] = &[
|
|||
.prefer_inner(),
|
||||
];
|
||||
|
||||
fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Option<FxHashSet<String>> {
|
||||
let (l_paren, r_paren) = derive_input.l_paren_token().zip(derive_input.r_paren_token())?;
|
||||
let mut input_derives = FxHashSet::default();
|
||||
let mut tokens = derive_input
|
||||
fn parse_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
|
||||
let r_paren = input.r_paren_token()?;
|
||||
let tokens = input
|
||||
.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(NodeOrToken::into_token)
|
||||
.skip_while(|token| token != &l_paren)
|
||||
.skip(1)
|
||||
.take_while(|token| token != &r_paren)
|
||||
.peekable();
|
||||
let mut input = String::new();
|
||||
while tokens.peek().is_some() {
|
||||
for token in tokens.by_ref().take_while(|t| t.kind() != T![,]) {
|
||||
input.push_str(token.text());
|
||||
}
|
||||
.take_while(|it| it.as_token() != Some(&r_paren));
|
||||
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
|
||||
Some(
|
||||
input_expressions
|
||||
.into_iter()
|
||||
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
|
||||
.filter_map(|mut tokens| ast::Path::parse(&tokens.join("")).ok())
|
||||
.collect::<Vec<ast::Path>>(),
|
||||
)
|
||||
}
|
||||
|
||||
if !input.is_empty() {
|
||||
input_derives.insert(input.trim().to_owned());
|
||||
}
|
||||
|
||||
input.clear();
|
||||
}
|
||||
|
||||
Some(input_derives)
|
||||
fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
|
||||
let r_paren = input.r_paren_token()?;
|
||||
let tokens = input
|
||||
.syntax()
|
||||
.children_with_tokens()
|
||||
.skip(1)
|
||||
.take_while(|it| it.as_token() != Some(&r_paren));
|
||||
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
|
||||
Some(
|
||||
input_expressions
|
||||
.into_iter()
|
||||
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
|
||||
.filter_map(|mut tokens| ast::Expr::parse(&tokens.join("")).ok())
|
||||
.collect::<Vec<ast::Expr>>(),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
use hir::HasAttrs;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::ast;
|
||||
use syntax::{ast, SmolStr};
|
||||
|
||||
use crate::{
|
||||
context::CompletionContext,
|
||||
|
@ -15,26 +15,31 @@ pub(super) fn complete_derive(
|
|||
ctx: &CompletionContext,
|
||||
derive_input: ast::TokenTree,
|
||||
) {
|
||||
if let Some(existing_derives) = super::parse_comma_sep_input(derive_input) {
|
||||
if let Some(existing_derives) = super::parse_comma_sep_paths(derive_input) {
|
||||
for (derive, docs) in get_derive_names_in_scope(ctx) {
|
||||
let label;
|
||||
let (label, lookup) = if let Some(derive_completion) = DEFAULT_DERIVE_COMPLETIONS
|
||||
.iter()
|
||||
.find(|derive_completion| derive_completion.label == derive)
|
||||
{
|
||||
let mut components = vec![derive_completion.label];
|
||||
components.extend(
|
||||
derive_completion
|
||||
.dependencies
|
||||
components.extend(derive_completion.dependencies.iter().filter(|&&dependency| {
|
||||
!existing_derives
|
||||
.iter()
|
||||
.filter(|&&dependency| !existing_derives.contains(dependency)),
|
||||
);
|
||||
.filter_map(|it| it.as_single_name_ref())
|
||||
.any(|it| it.text() == dependency)
|
||||
}));
|
||||
let lookup = components.join(", ");
|
||||
let label = components.iter().rev().join(", ");
|
||||
(label, Some(lookup))
|
||||
} else if existing_derives.contains(&derive) {
|
||||
label = components.iter().rev().join(", ");
|
||||
(&*label, Some(lookup))
|
||||
} else if existing_derives
|
||||
.iter()
|
||||
.filter_map(|it| it.as_single_name_ref())
|
||||
.any(|it| it.text().as_str() == derive)
|
||||
{
|
||||
continue;
|
||||
} else {
|
||||
(derive, None)
|
||||
(&*derive, None)
|
||||
};
|
||||
let mut item =
|
||||
CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label);
|
||||
|
@ -52,12 +57,12 @@ pub(super) fn complete_derive(
|
|||
|
||||
fn get_derive_names_in_scope(
|
||||
ctx: &CompletionContext,
|
||||
) -> FxHashMap<String, Option<hir::Documentation>> {
|
||||
) -> FxHashMap<SmolStr, Option<hir::Documentation>> {
|
||||
let mut result = FxHashMap::default();
|
||||
ctx.process_all_names(&mut |name, scope_def| {
|
||||
if let hir::ScopeDef::MacroDef(mac) = scope_def {
|
||||
if mac.kind() == hir::MacroKind::Derive {
|
||||
result.insert(name.to_string(), mac.docs(ctx.db));
|
||||
result.insert(name.to_smol_str(), mac.docs(ctx.db));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -14,17 +14,46 @@ pub(super) fn complete_lint(
|
|||
derive_input: ast::TokenTree,
|
||||
lints_completions: &[Lint],
|
||||
) {
|
||||
if let Some(existing_lints) = super::parse_comma_sep_input(derive_input) {
|
||||
for lint_completion in
|
||||
lints_completions.iter().filter(|completion| !existing_lints.contains(completion.label))
|
||||
{
|
||||
let mut item = CompletionItem::new(
|
||||
CompletionKind::Attribute,
|
||||
ctx.source_range(),
|
||||
lint_completion.label,
|
||||
);
|
||||
if let Some(existing_lints) = super::parse_comma_sep_paths(derive_input) {
|
||||
for &Lint { label, description } in lints_completions {
|
||||
let (ex_q, ex_name) = {
|
||||
// FIXME: change `Lint`'s label to not store a path in it but split the prefix off instead?
|
||||
let mut parts = label.split("::");
|
||||
let ns_or_label = match parts.next() {
|
||||
Some(it) => it,
|
||||
None => continue,
|
||||
};
|
||||
let label = parts.next();
|
||||
match label {
|
||||
Some(label) => (Some(ns_or_label), label),
|
||||
None => (None, ns_or_label),
|
||||
}
|
||||
};
|
||||
let repr_already_annotated = existing_lints
|
||||
.iter()
|
||||
.filter_map(|path| {
|
||||
let q = path.qualifier();
|
||||
if q.as_ref().and_then(|it| it.qualifier()).is_some() {
|
||||
return None;
|
||||
}
|
||||
Some((q.and_then(|it| it.as_single_name_ref()), path.segment()?.name_ref()?))
|
||||
})
|
||||
.any(|(q, name)| {
|
||||
let qualifier_matches = match (q, ex_q) {
|
||||
(None, None) => true,
|
||||
(None, Some(_)) => false,
|
||||
(Some(_), None) => false,
|
||||
(Some(q), Some(ns)) => q.text() == ns,
|
||||
};
|
||||
qualifier_matches && name.text() == ex_name
|
||||
});
|
||||
if repr_already_annotated {
|
||||
continue;
|
||||
}
|
||||
let mut item =
|
||||
CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), ex_name);
|
||||
item.kind(CompletionItemKind::Attribute)
|
||||
.documentation(hir::Documentation::new(lint_completion.description.to_owned()));
|
||||
.documentation(hir::Documentation::new(description.to_owned()));
|
||||
item.add_to(acc)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,29 +8,34 @@ use crate::{
|
|||
Completions,
|
||||
};
|
||||
|
||||
pub(super) fn complete_repr(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext,
|
||||
derive_input: ast::TokenTree,
|
||||
) {
|
||||
if let Some(existing_reprs) = super::parse_comma_sep_input(derive_input) {
|
||||
for repr_completion in REPR_COMPLETIONS {
|
||||
if existing_reprs
|
||||
pub(super) fn complete_repr(acc: &mut Completions, ctx: &CompletionContext, input: ast::TokenTree) {
|
||||
if let Some(existing_reprs) = super::parse_comma_sep_expr(input) {
|
||||
for &ReprCompletion { label, snippet, lookup, collides } in REPR_COMPLETIONS {
|
||||
let repr_already_annotated = existing_reprs
|
||||
.iter()
|
||||
.any(|it| repr_completion.label == it || repr_completion.collides.contains(&&**it))
|
||||
{
|
||||
.filter_map(|expr| match expr {
|
||||
ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
|
||||
ast::Expr::CallExpr(call) => match call.expr()? {
|
||||
ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
|
||||
_ => return None,
|
||||
},
|
||||
_ => None,
|
||||
})
|
||||
.any(|it| {
|
||||
let text = it.text();
|
||||
label == text || collides.contains(&text.as_str())
|
||||
});
|
||||
if repr_already_annotated {
|
||||
continue;
|
||||
}
|
||||
let mut item = CompletionItem::new(
|
||||
CompletionKind::Attribute,
|
||||
ctx.source_range(),
|
||||
repr_completion.label,
|
||||
);
|
||||
|
||||
let mut item =
|
||||
CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label);
|
||||
item.kind(CompletionItemKind::Attribute);
|
||||
if let Some(lookup) = repr_completion.lookup {
|
||||
if let Some(lookup) = lookup {
|
||||
item.lookup_by(lookup);
|
||||
}
|
||||
if let Some((snippet, cap)) = repr_completion.snippet.zip(ctx.config.snippet_cap) {
|
||||
if let Some((snippet, cap)) = snippet.zip(ctx.config.snippet_cap) {
|
||||
item.insert_snippet(cap, snippet);
|
||||
}
|
||||
item.add_to(acc);
|
||||
|
|
Loading…
Reference in a new issue