mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-13 13:48:50 +00:00
Merge #3513
3513: Completion in macros r=matklad a=flodiebold I experimented a bit with completion in macros. It's kind of working, but there are a lot of rough edges. - I'm trying to expand the macro call with the inserted fake token. This requires some hacky additions on the HIR level to be able to do "hypothetical" expansions. There should probably be a nicer API for this, if we want to do it this way. I'm not sure whether it's worth it, because we still can't do a lot if the original macro call didn't expand in nearly the same way. E.g. if we have something like `println!("", x<|>)` the expansions will look the same and everything is fine; but in that case we could maybe have achieved the same result in a simpler way. If we have something like `m!(<|>)` where `m!()` doesn't even expand or expands to something very different, we don't really know what to do anyway. - Relatedly, there are a lot of cases where this doesn't work because either the original call or the hypothetical call doesn't expand. E.g. if we have `m!(x.<|>)` the original token tree doesn't parse as an expression; if we have `m!(match x { <|> })` the hypothetical token tree doesn't parse. It would be nice if we could have better error recovery in these cases. Co-authored-by: Florian Diebold <flodiebold@gmail.com>
This commit is contained in:
commit
beb4f49541
14 changed files with 529 additions and 43 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -960,6 +960,7 @@ name = "ra_hir"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"either",
|
||||
"itertools",
|
||||
"log",
|
||||
"ra_db",
|
||||
"ra_hir_def",
|
||||
|
|
|
@ -12,6 +12,8 @@ log = "0.4.8"
|
|||
rustc-hash = "1.1.0"
|
||||
either = "1.5.3"
|
||||
|
||||
itertools = "0.8.2"
|
||||
|
||||
ra_syntax = { path = "../ra_syntax" }
|
||||
ra_db = { path = "../ra_db" }
|
||||
ra_prof = { path = "../ra_prof" }
|
||||
|
|
|
@ -6,7 +6,7 @@ use std::{cell::RefCell, fmt, iter::successors};
|
|||
|
||||
use hir_def::{
|
||||
resolver::{self, HasResolver, Resolver},
|
||||
TraitId,
|
||||
AsMacroCall, TraitId,
|
||||
};
|
||||
use hir_expand::ExpansionInfo;
|
||||
use ra_db::{FileId, FileRange};
|
||||
|
@ -70,6 +70,20 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
Some(node)
|
||||
}
|
||||
|
||||
pub fn expand_hypothetical(
|
||||
&self,
|
||||
actual_macro_call: &ast::MacroCall,
|
||||
hypothetical_args: &ast::TokenTree,
|
||||
token_to_map: SyntaxToken,
|
||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||
let macro_call =
|
||||
self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call);
|
||||
let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
|
||||
let macro_call_id = macro_call
|
||||
.as_call_id(self.db, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?;
|
||||
hir_expand::db::expand_hypothetical(self.db, macro_call_id, hypothetical_args, token_to_map)
|
||||
}
|
||||
|
||||
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
||||
let parent = token.parent();
|
||||
let parent = self.find_file(parent);
|
||||
|
@ -104,6 +118,25 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
node.ancestors_with_macros(self.db).map(|it| it.value)
|
||||
}
|
||||
|
||||
pub fn ancestors_at_offset_with_macros(
|
||||
&self,
|
||||
node: &SyntaxNode,
|
||||
offset: TextUnit,
|
||||
) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||
use itertools::Itertools;
|
||||
node.token_at_offset(offset)
|
||||
.map(|token| self.ancestors_with_macros(token.parent()))
|
||||
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
|
||||
}
|
||||
|
||||
pub fn find_node_at_offset_with_macros<N: AstNode>(
|
||||
&self,
|
||||
node: &SyntaxNode,
|
||||
offset: TextUnit,
|
||||
) -> Option<N> {
|
||||
self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
|
||||
}
|
||||
|
||||
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
|
||||
self.analyze(expr.syntax()).type_of(self.db, &expr)
|
||||
}
|
||||
|
|
|
@ -72,6 +72,30 @@ pub trait AstDatabase: SourceDatabase {
|
|||
fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId;
|
||||
}
|
||||
|
||||
/// This expands the given macro call, but with different arguments. This is
|
||||
/// used for completion, where we want to see what 'would happen' if we insert a
|
||||
/// token. The `token_to_map` mapped down into the expansion, with the mapped
|
||||
/// token returned.
|
||||
pub fn expand_hypothetical(
|
||||
db: &impl AstDatabase,
|
||||
actual_macro_call: MacroCallId,
|
||||
hypothetical_args: &ra_syntax::ast::TokenTree,
|
||||
token_to_map: ra_syntax::SyntaxToken,
|
||||
) -> Option<(SyntaxNode, ra_syntax::SyntaxToken)> {
|
||||
let macro_file = MacroFile { macro_call_id: actual_macro_call };
|
||||
let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()).unwrap();
|
||||
let range =
|
||||
token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?;
|
||||
let token_id = tmap_1.token_by_range(range)?;
|
||||
let macro_def = expander(db, actual_macro_call)?;
|
||||
let (node, tmap_2) =
|
||||
parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1))))?;
|
||||
let token_id = macro_def.0.map_id_down(token_id);
|
||||
let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?;
|
||||
let token = ra_syntax::algo::find_covering_element(&node.syntax_node(), range).into_token()?;
|
||||
Some((node.syntax_node(), token))
|
||||
}
|
||||
|
||||
pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
||||
let map =
|
||||
db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it));
|
||||
|
@ -130,15 +154,42 @@ pub(crate) fn macro_expand(
|
|||
db: &dyn AstDatabase,
|
||||
id: MacroCallId,
|
||||
) -> Result<Arc<tt::Subtree>, String> {
|
||||
macro_expand_with_arg(db, id, None)
|
||||
}
|
||||
|
||||
fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> {
|
||||
let lazy_id = match id {
|
||||
MacroCallId::LazyMacro(id) => id,
|
||||
MacroCallId::EagerMacro(id) => {
|
||||
return Ok(db.lookup_intern_eager_expansion(id).subtree);
|
||||
MacroCallId::EagerMacro(_id) => {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
let loc = db.lookup_intern_macro(lazy_id);
|
||||
let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?;
|
||||
let macro_rules = db.macro_def(loc.def)?;
|
||||
Some(macro_rules)
|
||||
}
|
||||
|
||||
fn macro_expand_with_arg(
|
||||
db: &dyn AstDatabase,
|
||||
id: MacroCallId,
|
||||
arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
|
||||
) -> Result<Arc<tt::Subtree>, String> {
|
||||
let lazy_id = match id {
|
||||
MacroCallId::LazyMacro(id) => id,
|
||||
MacroCallId::EagerMacro(id) => {
|
||||
if arg.is_some() {
|
||||
return Err(
|
||||
"hypothetical macro expansion not implemented for eager macro".to_owned()
|
||||
);
|
||||
} else {
|
||||
return Ok(db.lookup_intern_eager_expansion(id).subtree);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let loc = db.lookup_intern_macro(lazy_id);
|
||||
let macro_arg = arg.or_else(|| db.macro_arg(id)).ok_or("Fail to args in to tt::TokenTree")?;
|
||||
|
||||
let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?;
|
||||
let tt = macro_rules.0.expand(db, lazy_id, ¯o_arg.0).map_err(|err| format!("{:?}", err))?;
|
||||
|
@ -162,12 +213,24 @@ pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Optio
|
|||
pub(crate) fn parse_macro(
|
||||
db: &dyn AstDatabase,
|
||||
macro_file: MacroFile,
|
||||
) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
|
||||
parse_macro_with_arg(db, macro_file, None)
|
||||
}
|
||||
|
||||
pub fn parse_macro_with_arg(
|
||||
db: &dyn AstDatabase,
|
||||
macro_file: MacroFile,
|
||||
arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
|
||||
) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
|
||||
let _p = profile("parse_macro_query");
|
||||
|
||||
let macro_call_id = macro_file.macro_call_id;
|
||||
let tt = db
|
||||
.macro_expand(macro_call_id)
|
||||
let expansion = if let Some(arg) = arg {
|
||||
macro_expand_with_arg(db, macro_call_id, Some(arg))
|
||||
} else {
|
||||
db.macro_expand(macro_call_id)
|
||||
};
|
||||
let tt = expansion
|
||||
.map_err(|err| {
|
||||
// Note:
|
||||
// The final goal we would like to make all parse_macro success,
|
||||
|
|
|
@ -38,7 +38,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) {
|
||||
for receiver in receiver.autoderef(ctx.db) {
|
||||
for (field, ty) in receiver.fields(ctx.db) {
|
||||
if ctx.module.map_or(false, |m| !field.is_visible_from(ctx.db, m)) {
|
||||
if ctx.scope().module().map_or(false, |m| !field.is_visible_from(ctx.db, m)) {
|
||||
// Skip private field. FIXME: If the definition location of the
|
||||
// field is editable, we should show the completion
|
||||
continue;
|
||||
|
@ -53,7 +53,7 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Ty
|
|||
}
|
||||
|
||||
fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) {
|
||||
if let Some(krate) = ctx.module.map(|it| it.krate()) {
|
||||
if let Some(krate) = ctx.krate {
|
||||
let mut seen_methods = FxHashSet::default();
|
||||
let traits_in_scope = ctx.scope().traits_in_scope();
|
||||
receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| {
|
||||
|
@ -620,4 +620,102 @@ mod tests {
|
|||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn works_in_simple_macro_1() {
|
||||
assert_debug_snapshot!(
|
||||
do_ref_completion(
|
||||
r"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
struct A { the_field: u32 }
|
||||
fn foo(a: A) {
|
||||
m!(a.x<|>)
|
||||
}
|
||||
",
|
||||
),
|
||||
@r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "the_field",
|
||||
source_range: [156; 157),
|
||||
delete: [156; 157),
|
||||
insert: "the_field",
|
||||
kind: Field,
|
||||
detail: "u32",
|
||||
},
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn works_in_simple_macro_recursive() {
|
||||
assert_debug_snapshot!(
|
||||
do_ref_completion(
|
||||
r"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
struct A { the_field: u32 }
|
||||
fn foo(a: A) {
|
||||
m!(a.x<|>)
|
||||
}
|
||||
",
|
||||
),
|
||||
@r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "the_field",
|
||||
source_range: [156; 157),
|
||||
delete: [156; 157),
|
||||
insert: "the_field",
|
||||
kind: Field,
|
||||
detail: "u32",
|
||||
},
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn works_in_simple_macro_2() {
|
||||
// this doesn't work yet because the macro doesn't expand without the token -- maybe it can be fixed with better recovery
|
||||
assert_debug_snapshot!(
|
||||
do_ref_completion(
|
||||
r"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
struct A { the_field: u32 }
|
||||
fn foo(a: A) {
|
||||
m!(a.<|>)
|
||||
}
|
||||
",
|
||||
),
|
||||
@r###"[]"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn works_in_simple_macro_recursive_1() {
|
||||
assert_debug_snapshot!(
|
||||
do_ref_completion(
|
||||
r"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
struct A { the_field: u32 }
|
||||
fn foo(a: A) {
|
||||
m!(m!(m!(a.x<|>)))
|
||||
}
|
||||
",
|
||||
),
|
||||
@r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "the_field",
|
||||
source_range: [162; 163),
|
||||
delete: [162; 163),
|
||||
insert: "the_field",
|
||||
kind: Field,
|
||||
detail: "u32",
|
||||
},
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -79,6 +79,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
|
|||
}
|
||||
|
||||
fn is_in_loop_body(leaf: &SyntaxToken) -> bool {
|
||||
// FIXME move this to CompletionContext and make it handle macros
|
||||
for node in leaf.parent().ancestors() {
|
||||
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
||||
break;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! Completion of paths, including when writing a single name.
|
||||
//! Completion of paths, i.e. `some::prefix::<|>`.
|
||||
|
||||
use hir::{Adt, HasVisibility, PathResolution, ScopeDef};
|
||||
use ra_syntax::AstNode;
|
||||
|
@ -48,7 +48,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
};
|
||||
// Iterate assoc types separately
|
||||
// FIXME: complete T::AssocType
|
||||
let krate = ctx.module.map(|m| m.krate());
|
||||
let krate = ctx.krate;
|
||||
if let Some(krate) = krate {
|
||||
let traits_in_scope = ctx.scope().traits_in_scope();
|
||||
ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| {
|
||||
|
@ -934,4 +934,37 @@ mod tests {
|
|||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_in_simple_macro_call() {
|
||||
let completions = do_reference_completion(
|
||||
r#"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
fn main() { m!(self::f<|>); }
|
||||
fn foo() {}
|
||||
"#,
|
||||
);
|
||||
assert_debug_snapshot!(completions, @r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "foo()",
|
||||
source_range: [93; 94),
|
||||
delete: [93; 94),
|
||||
insert: "foo()$0",
|
||||
kind: Function,
|
||||
lookup: "foo",
|
||||
detail: "fn foo()",
|
||||
},
|
||||
CompletionItem {
|
||||
label: "main()",
|
||||
source_range: [93; 94),
|
||||
delete: [93; 94),
|
||||
insert: "main()$0",
|
||||
kind: Function,
|
||||
lookup: "main",
|
||||
detail: "fn main()",
|
||||
},
|
||||
]
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -86,4 +86,22 @@ mod tests {
|
|||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_in_simple_macro_call() {
|
||||
// FIXME: doesn't work yet because of missing error recovery in macro expansion
|
||||
let completions = complete(
|
||||
r"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
enum E { X }
|
||||
|
||||
fn foo() {
|
||||
m!(match E::X {
|
||||
<|>
|
||||
})
|
||||
}
|
||||
",
|
||||
);
|
||||
assert_debug_snapshot!(completions, @r###"[]"###);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,8 +67,8 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
|
||||
fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder {
|
||||
let edit = {
|
||||
let receiver_range =
|
||||
ctx.dot_receiver.as_ref().expect("no receiver available").syntax().text_range();
|
||||
let receiver_syntax = ctx.dot_receiver.as_ref().expect("no receiver available").syntax();
|
||||
let receiver_range = ctx.sema.original_range(receiver_syntax).range;
|
||||
let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end());
|
||||
TextEdit::replace(delete_range, snippet.to_string())
|
||||
};
|
||||
|
@ -279,4 +279,65 @@ mod tests {
|
|||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn works_in_simple_macro() {
|
||||
assert_debug_snapshot!(
|
||||
do_postfix_completion(
|
||||
r#"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
fn main() {
|
||||
let bar: u8 = 12;
|
||||
m!(bar.b<|>)
|
||||
}
|
||||
"#,
|
||||
),
|
||||
@r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "box",
|
||||
source_range: [149; 150),
|
||||
delete: [145; 150),
|
||||
insert: "Box::new(bar)",
|
||||
detail: "Box::new(expr)",
|
||||
},
|
||||
CompletionItem {
|
||||
label: "dbg",
|
||||
source_range: [149; 150),
|
||||
delete: [145; 150),
|
||||
insert: "dbg!(bar)",
|
||||
detail: "dbg!(expr)",
|
||||
},
|
||||
CompletionItem {
|
||||
label: "match",
|
||||
source_range: [149; 150),
|
||||
delete: [145; 150),
|
||||
insert: "match bar {\n ${1:_} => {$0\\},\n}",
|
||||
detail: "match expr {}",
|
||||
},
|
||||
CompletionItem {
|
||||
label: "not",
|
||||
source_range: [149; 150),
|
||||
delete: [145; 150),
|
||||
insert: "!bar",
|
||||
detail: "!expr",
|
||||
},
|
||||
CompletionItem {
|
||||
label: "ref",
|
||||
source_range: [149; 150),
|
||||
delete: [145; 150),
|
||||
insert: "&bar",
|
||||
detail: "&expr",
|
||||
},
|
||||
CompletionItem {
|
||||
label: "refm",
|
||||
source_range: [149; 150),
|
||||
delete: [145; 150),
|
||||
insert: "&mut bar",
|
||||
detail: "&mut expr",
|
||||
},
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -153,4 +153,29 @@ mod tests {
|
|||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_record_literal_field_in_simple_macro() {
|
||||
let completions = complete(
|
||||
r"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
struct A { the_field: u32 }
|
||||
fn foo() {
|
||||
m!(A { the<|> })
|
||||
}
|
||||
",
|
||||
);
|
||||
assert_debug_snapshot!(completions, @r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "the_field",
|
||||
source_range: [137; 140),
|
||||
delete: [137; 140),
|
||||
insert: "the_field",
|
||||
kind: Field,
|
||||
detail: "u32",
|
||||
},
|
||||
]
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -87,4 +87,32 @@ mod tests {
|
|||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_record_pattern_field_in_simple_macro() {
|
||||
let completions = complete(
|
||||
r"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
struct S { foo: u32 }
|
||||
|
||||
fn process(f: S) {
|
||||
m!(match f {
|
||||
S { f<|>: 92 } => (),
|
||||
})
|
||||
}
|
||||
",
|
||||
);
|
||||
assert_debug_snapshot!(completions, @r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "foo",
|
||||
source_range: [171; 172),
|
||||
delete: [171; 172),
|
||||
insert: "foo",
|
||||
kind: Field,
|
||||
detail: "u32",
|
||||
},
|
||||
]
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! FIXME: write short doc here
|
||||
//! Completion of names from the current scope, e.g. locals and imported items.
|
||||
|
||||
use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
|
@ -797,4 +797,72 @@ mod tests {
|
|||
"###
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_in_simple_macro_1() {
|
||||
assert_debug_snapshot!(
|
||||
do_reference_completion(
|
||||
r"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
fn quux(x: i32) {
|
||||
let y = 92;
|
||||
m!(<|>);
|
||||
}
|
||||
"
|
||||
),
|
||||
@"[]"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_in_simple_macro_2() {
|
||||
assert_debug_snapshot!(
|
||||
do_reference_completion(
|
||||
r"
|
||||
macro_rules! m { ($e:expr) => { $e } }
|
||||
fn quux(x: i32) {
|
||||
let y = 92;
|
||||
m!(x<|>);
|
||||
}
|
||||
"
|
||||
),
|
||||
@r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "m!",
|
||||
source_range: [145; 146),
|
||||
delete: [145; 146),
|
||||
insert: "m!($0)",
|
||||
kind: Macro,
|
||||
detail: "macro_rules! m",
|
||||
},
|
||||
CompletionItem {
|
||||
label: "quux(…)",
|
||||
source_range: [145; 146),
|
||||
delete: [145; 146),
|
||||
insert: "quux(${1:x})$0",
|
||||
kind: Function,
|
||||
lookup: "quux",
|
||||
detail: "fn quux(x: i32)",
|
||||
},
|
||||
CompletionItem {
|
||||
label: "x",
|
||||
source_range: [145; 146),
|
||||
delete: [145; 146),
|
||||
insert: "x",
|
||||
kind: Binding,
|
||||
detail: "i32",
|
||||
},
|
||||
CompletionItem {
|
||||
label: "y",
|
||||
source_range: [145; 146),
|
||||
delete: [145; 146),
|
||||
insert: "y",
|
||||
kind: Binding,
|
||||
detail: "i32",
|
||||
},
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use ra_db::SourceDatabase;
|
|||
use ra_ide_db::RootDatabase;
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_node_at_offset},
|
||||
ast, AstNode, SourceFile,
|
||||
ast, AstNode,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit,
|
||||
};
|
||||
|
@ -20,8 +20,11 @@ pub(crate) struct CompletionContext<'a> {
|
|||
pub(super) sema: Semantics<'a, RootDatabase>,
|
||||
pub(super) db: &'a RootDatabase,
|
||||
pub(super) offset: TextUnit,
|
||||
/// The token before the cursor, in the original file.
|
||||
pub(super) original_token: SyntaxToken,
|
||||
/// The token before the cursor, in the macro-expanded file.
|
||||
pub(super) token: SyntaxToken,
|
||||
pub(super) module: Option<hir::Module>,
|
||||
pub(super) krate: Option<hir::Crate>,
|
||||
pub(super) name_ref_syntax: Option<ast::NameRef>,
|
||||
pub(super) function_syntax: Option<ast::FnDef>,
|
||||
pub(super) use_item_syntax: Option<ast::UseItem>,
|
||||
|
@ -67,15 +70,20 @@ impl<'a> CompletionContext<'a> {
|
|||
let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string());
|
||||
parse.reparse(&edit).tree()
|
||||
};
|
||||
let fake_ident_token =
|
||||
file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap();
|
||||
|
||||
let module = sema.to_module_def(position.file_id);
|
||||
let token = original_file.syntax().token_at_offset(position.offset).left_biased()?;
|
||||
let krate = sema.to_module_def(position.file_id).map(|m| m.krate());
|
||||
let original_token =
|
||||
original_file.syntax().token_at_offset(position.offset).left_biased()?;
|
||||
let token = sema.descend_into_macros(original_token.clone());
|
||||
let mut ctx = CompletionContext {
|
||||
sema,
|
||||
db,
|
||||
original_token,
|
||||
token,
|
||||
offset: position.offset,
|
||||
module,
|
||||
krate,
|
||||
name_ref_syntax: None,
|
||||
function_syntax: None,
|
||||
use_item_syntax: None,
|
||||
|
@ -95,15 +103,57 @@ impl<'a> CompletionContext<'a> {
|
|||
has_type_args: false,
|
||||
dot_receiver_is_ambiguous_float_literal: false,
|
||||
};
|
||||
ctx.fill(&original_file, file_with_fake_ident, position.offset);
|
||||
|
||||
let mut original_file = original_file.syntax().clone();
|
||||
let mut hypothetical_file = file_with_fake_ident.syntax().clone();
|
||||
let mut offset = position.offset;
|
||||
let mut fake_ident_token = fake_ident_token;
|
||||
|
||||
// Are we inside a macro call?
|
||||
while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
|
||||
find_node_at_offset::<ast::MacroCall>(&original_file, offset),
|
||||
find_node_at_offset::<ast::MacroCall>(&hypothetical_file, offset),
|
||||
) {
|
||||
if actual_macro_call.path().as_ref().map(|s| s.syntax().text())
|
||||
!= macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text())
|
||||
{
|
||||
break;
|
||||
}
|
||||
let hypothetical_args = match macro_call_with_fake_ident.token_tree() {
|
||||
Some(tt) => tt,
|
||||
None => break,
|
||||
};
|
||||
if let (Some(actual_expansion), Some(hypothetical_expansion)) = (
|
||||
ctx.sema.expand(&actual_macro_call),
|
||||
ctx.sema.expand_hypothetical(
|
||||
&actual_macro_call,
|
||||
&hypothetical_args,
|
||||
fake_ident_token,
|
||||
),
|
||||
) {
|
||||
let new_offset = hypothetical_expansion.1.text_range().start();
|
||||
if new_offset >= actual_expansion.text_range().end() {
|
||||
break;
|
||||
}
|
||||
original_file = actual_expansion;
|
||||
hypothetical_file = hypothetical_expansion.0;
|
||||
fake_ident_token = hypothetical_expansion.1;
|
||||
offset = new_offset;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.fill(&original_file, hypothetical_file, offset);
|
||||
Some(ctx)
|
||||
}
|
||||
|
||||
// The range of the identifier that is being completed.
|
||||
pub(crate) fn source_range(&self) -> TextRange {
|
||||
// check kind of macro-expanded token, but use range of original token
|
||||
match self.token.kind() {
|
||||
// workaroud when completion is triggered by trigger characters.
|
||||
IDENT => self.token.text_range(),
|
||||
IDENT => self.original_token.text_range(),
|
||||
_ => TextRange::offset_len(self.offset, 0.into()),
|
||||
}
|
||||
}
|
||||
|
@ -114,27 +164,24 @@ impl<'a> CompletionContext<'a> {
|
|||
|
||||
fn fill(
|
||||
&mut self,
|
||||
original_file: &ast::SourceFile,
|
||||
file_with_fake_ident: ast::SourceFile,
|
||||
original_file: &SyntaxNode,
|
||||
file_with_fake_ident: SyntaxNode,
|
||||
offset: TextUnit,
|
||||
) {
|
||||
// First, let's try to complete a reference to some declaration.
|
||||
if let Some(name_ref) =
|
||||
find_node_at_offset::<ast::NameRef>(file_with_fake_ident.syntax(), offset)
|
||||
{
|
||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&file_with_fake_ident, offset) {
|
||||
// Special case, `trait T { fn foo(i_am_a_name_ref) {} }`.
|
||||
// See RFC#1685.
|
||||
if is_node::<ast::Param>(name_ref.syntax()) {
|
||||
self.is_param = true;
|
||||
return;
|
||||
}
|
||||
self.classify_name_ref(original_file, name_ref);
|
||||
self.classify_name_ref(original_file, name_ref, offset);
|
||||
}
|
||||
|
||||
// Otherwise, see if this is a declaration. We can use heuristics to
|
||||
// suggest declaration names, see `CompletionKind::Magic`.
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(file_with_fake_ident.syntax(), offset)
|
||||
{
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(&file_with_fake_ident, offset) {
|
||||
if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) {
|
||||
let parent = bind_pat.syntax().parent();
|
||||
if parent.clone().and_then(ast::MatchArm::cast).is_some()
|
||||
|
@ -148,23 +195,29 @@ impl<'a> CompletionContext<'a> {
|
|||
return;
|
||||
}
|
||||
if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() {
|
||||
self.record_lit_pat = find_node_at_offset(original_file.syntax(), self.offset);
|
||||
self.record_lit_pat =
|
||||
self.sema.find_node_at_offset_with_macros(&original_file, offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn classify_name_ref(&mut self, original_file: &SourceFile, name_ref: ast::NameRef) {
|
||||
fn classify_name_ref(
|
||||
&mut self,
|
||||
original_file: &SyntaxNode,
|
||||
name_ref: ast::NameRef,
|
||||
offset: TextUnit,
|
||||
) {
|
||||
self.name_ref_syntax =
|
||||
find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start());
|
||||
find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
|
||||
let name_range = name_ref.syntax().text_range();
|
||||
if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() {
|
||||
self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset);
|
||||
self.record_lit_syntax =
|
||||
self.sema.find_node_at_offset_with_macros(&original_file, offset);
|
||||
}
|
||||
|
||||
self.impl_def = self
|
||||
.token
|
||||
.parent()
|
||||
.ancestors()
|
||||
.sema
|
||||
.ancestors_with_macros(self.token.parent())
|
||||
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
|
||||
.find_map(ast::ImplDef::cast);
|
||||
|
||||
|
@ -183,12 +236,12 @@ impl<'a> CompletionContext<'a> {
|
|||
_ => (),
|
||||
}
|
||||
|
||||
self.use_item_syntax = self.token.parent().ancestors().find_map(ast::UseItem::cast);
|
||||
self.use_item_syntax =
|
||||
self.sema.ancestors_with_macros(self.token.parent()).find_map(ast::UseItem::cast);
|
||||
|
||||
self.function_syntax = self
|
||||
.token
|
||||
.parent()
|
||||
.ancestors()
|
||||
.sema
|
||||
.ancestors_with_macros(self.token.parent())
|
||||
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
|
||||
.find_map(ast::FnDef::cast);
|
||||
|
||||
|
@ -242,7 +295,7 @@ impl<'a> CompletionContext<'a> {
|
|||
|
||||
if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) {
|
||||
if let Some(if_expr) =
|
||||
find_node_at_offset::<ast::IfExpr>(original_file.syntax(), off)
|
||||
self.sema.find_node_at_offset_with_macros::<ast::IfExpr>(original_file, off)
|
||||
{
|
||||
if if_expr.syntax().text_range().end()
|
||||
< name_ref.syntax().text_range().start()
|
||||
|
@ -259,7 +312,7 @@ impl<'a> CompletionContext<'a> {
|
|||
self.dot_receiver = field_expr
|
||||
.expr()
|
||||
.map(|e| e.syntax().text_range())
|
||||
.and_then(|r| find_node_with_range(original_file.syntax(), r));
|
||||
.and_then(|r| find_node_with_range(original_file, r));
|
||||
self.dot_receiver_is_ambiguous_float_literal =
|
||||
if let Some(ast::Expr::Literal(l)) = &self.dot_receiver {
|
||||
match l.kind() {
|
||||
|
@ -275,7 +328,7 @@ impl<'a> CompletionContext<'a> {
|
|||
self.dot_receiver = method_call_expr
|
||||
.expr()
|
||||
.map(|e| e.syntax().text_range())
|
||||
.and_then(|r| find_node_with_range(original_file.syntax(), r));
|
||||
.and_then(|r| find_node_with_range(original_file, r));
|
||||
self.is_call = true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -247,6 +247,7 @@ impl<'a> TtIter<'a> {
|
|||
ra_parser::parse_fragment(&mut src, &mut sink, fragment_kind);
|
||||
|
||||
if !sink.cursor.is_root() || sink.error {
|
||||
// FIXME better recovery in this case would help completion inside macros immensely
|
||||
return Err(());
|
||||
}
|
||||
|
||||
|
@ -375,7 +376,8 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> Result<Option<Fragment>, Ex
|
|||
return Ok(Some(Fragment::Tokens(tt)));
|
||||
}
|
||||
};
|
||||
let tt = input.expect_fragment(fragment).map_err(|()| err!())?;
|
||||
let tt =
|
||||
input.expect_fragment(fragment).map_err(|()| err!("fragment did not parse as {}", kind))?;
|
||||
let fragment = if kind == "expr" { Fragment::Ast(tt) } else { Fragment::Tokens(tt) };
|
||||
Ok(Some(fragment))
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue