diff --git a/Cargo.lock b/Cargo.lock index 80e778bcf7..2e052d267b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -960,6 +960,7 @@ name = "ra_hir" version = "0.1.0" dependencies = [ "either", + "itertools", "log", "ra_db", "ra_hir_def", diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 0555a0de7a..266c4cff38 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml @@ -12,6 +12,8 @@ log = "0.4.8" rustc-hash = "1.1.0" either = "1.5.3" +itertools = "0.8.2" + ra_syntax = { path = "../ra_syntax" } ra_db = { path = "../ra_db" } ra_prof = { path = "../ra_prof" } diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 965d185a47..3782a9984b 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs @@ -6,7 +6,7 @@ use std::{cell::RefCell, fmt, iter::successors}; use hir_def::{ resolver::{self, HasResolver, Resolver}, - TraitId, + AsMacroCall, TraitId, }; use hir_expand::ExpansionInfo; use ra_db::{FileId, FileRange}; @@ -70,6 +70,20 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { Some(node) } + pub fn expand_hypothetical( + &self, + actual_macro_call: &ast::MacroCall, + hypothetical_args: &ast::TokenTree, + token_to_map: SyntaxToken, + ) -> Option<(SyntaxNode, SyntaxToken)> { + let macro_call = + self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); + let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); + let macro_call_id = macro_call + .as_call_id(self.db, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?; + hir_expand::db::expand_hypothetical(self.db, macro_call_id, hypothetical_args, token_to_map) + } + pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { let parent = token.parent(); let parent = self.find_file(parent); @@ -104,6 +118,25 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { node.ancestors_with_macros(self.db).map(|it| it.value) } + pub fn ancestors_at_offset_with_macros( + &self, + node: &SyntaxNode, + offset: TextUnit, + ) -> impl Iterator + '_ { + use itertools::Itertools; + node.token_at_offset(offset) + .map(|token| self.ancestors_with_macros(token.parent())) + .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) + } + + pub fn find_node_at_offset_with_macros( + &self, + node: &SyntaxNode, + offset: TextUnit, + ) -> Option { + self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) + } + pub fn type_of_expr(&self, expr: &ast::Expr) -> Option { self.analyze(expr.syntax()).type_of(self.db, &expr) } diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs index f3a84cacc3..29dde3d807 100644 --- a/crates/ra_hir_expand/src/db.rs +++ b/crates/ra_hir_expand/src/db.rs @@ -72,6 +72,30 @@ pub trait AstDatabase: SourceDatabase { fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; } +/// This expands the given macro call, but with different arguments. This is +/// used for completion, where we want to see what 'would happen' if we insert a +/// token. The `token_to_map` mapped down into the expansion, with the mapped +/// token returned. +pub fn expand_hypothetical( + db: &impl AstDatabase, + actual_macro_call: MacroCallId, + hypothetical_args: &ra_syntax::ast::TokenTree, + token_to_map: ra_syntax::SyntaxToken, +) -> Option<(SyntaxNode, ra_syntax::SyntaxToken)> { + let macro_file = MacroFile { macro_call_id: actual_macro_call }; + let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()).unwrap(); + let range = + token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; + let token_id = tmap_1.token_by_range(range)?; + let macro_def = expander(db, actual_macro_call)?; + let (node, tmap_2) = + parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1))))?; + let token_id = macro_def.0.map_id_down(token_id); + let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; + let token = ra_syntax::algo::find_covering_element(&node.syntax_node(), range).into_token()?; + Some((node.syntax_node(), token)) +} + pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc { let map = db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); @@ -130,15 +154,42 @@ pub(crate) fn macro_expand( db: &dyn AstDatabase, id: MacroCallId, ) -> Result, String> { + macro_expand_with_arg(db, id, None) +} + +fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option> { let lazy_id = match id { MacroCallId::LazyMacro(id) => id, - MacroCallId::EagerMacro(id) => { - return Ok(db.lookup_intern_eager_expansion(id).subtree); + MacroCallId::EagerMacro(_id) => { + return None; } }; let loc = db.lookup_intern_macro(lazy_id); - let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?; + let macro_rules = db.macro_def(loc.def)?; + Some(macro_rules) +} + +fn macro_expand_with_arg( + db: &dyn AstDatabase, + id: MacroCallId, + arg: Option>, +) -> Result, String> { + let lazy_id = match id { + MacroCallId::LazyMacro(id) => id, + MacroCallId::EagerMacro(id) => { + if arg.is_some() { + return Err( + "hypothetical macro expansion not implemented for eager macro".to_owned() + ); + } else { + return Ok(db.lookup_intern_eager_expansion(id).subtree); + } + } + }; + + let loc = db.lookup_intern_macro(lazy_id); + let macro_arg = arg.or_else(|| db.macro_arg(id)).ok_or("Fail to args in to tt::TokenTree")?; let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?; let tt = macro_rules.0.expand(db, lazy_id, ¯o_arg.0).map_err(|err| format!("{:?}", err))?; @@ -162,12 +213,24 @@ pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Optio pub(crate) fn parse_macro( db: &dyn AstDatabase, macro_file: MacroFile, +) -> Option<(Parse, Arc)> { + parse_macro_with_arg(db, macro_file, None) +} + +pub fn parse_macro_with_arg( + db: &dyn AstDatabase, + macro_file: MacroFile, + arg: Option>, ) -> Option<(Parse, Arc)> { let _p = profile("parse_macro_query"); let macro_call_id = macro_file.macro_call_id; - let tt = db - .macro_expand(macro_call_id) + let expansion = if let Some(arg) = arg { + macro_expand_with_arg(db, macro_call_id, Some(arg)) + } else { + db.macro_expand(macro_call_id) + }; + let tt = expansion .map_err(|err| { // Note: // The final goal we would like to make all parse_macro success, diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ra_ide/src/completion/complete_dot.rs index acada48ae1..f275305e20 100644 --- a/crates/ra_ide/src/completion/complete_dot.rs +++ b/crates/ra_ide/src/completion/complete_dot.rs @@ -38,7 +38,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { for receiver in receiver.autoderef(ctx.db) { for (field, ty) in receiver.fields(ctx.db) { - if ctx.module.map_or(false, |m| !field.is_visible_from(ctx.db, m)) { + if ctx.scope().module().map_or(false, |m| !field.is_visible_from(ctx.db, m)) { // Skip private field. FIXME: If the definition location of the // field is editable, we should show the completion continue; @@ -53,7 +53,7 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Ty } fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { - if let Some(krate) = ctx.module.map(|it| it.krate()) { + if let Some(krate) = ctx.krate { let mut seen_methods = FxHashSet::default(); let traits_in_scope = ctx.scope().traits_in_scope(); receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| { @@ -620,4 +620,102 @@ mod tests { "### ); } + + #[test] + fn works_in_simple_macro_1() { + assert_debug_snapshot!( + do_ref_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + struct A { the_field: u32 } + fn foo(a: A) { + m!(a.x<|>) + } + ", + ), + @r###" + [ + CompletionItem { + label: "the_field", + source_range: [156; 157), + delete: [156; 157), + insert: "the_field", + kind: Field, + detail: "u32", + }, + ] + "### + ); + } + + #[test] + fn works_in_simple_macro_recursive() { + assert_debug_snapshot!( + do_ref_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + struct A { the_field: u32 } + fn foo(a: A) { + m!(a.x<|>) + } + ", + ), + @r###" + [ + CompletionItem { + label: "the_field", + source_range: [156; 157), + delete: [156; 157), + insert: "the_field", + kind: Field, + detail: "u32", + }, + ] + "### + ); + } + + #[test] + fn works_in_simple_macro_2() { + // this doesn't work yet because the macro doesn't expand without the token -- maybe it can be fixed with better recovery + assert_debug_snapshot!( + do_ref_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + struct A { the_field: u32 } + fn foo(a: A) { + m!(a.<|>) + } + ", + ), + @r###"[]"### + ); + } + + #[test] + fn works_in_simple_macro_recursive_1() { + assert_debug_snapshot!( + do_ref_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + struct A { the_field: u32 } + fn foo(a: A) { + m!(m!(m!(a.x<|>))) + } + ", + ), + @r###" + [ + CompletionItem { + label: "the_field", + source_range: [162; 163), + delete: [162; 163), + insert: "the_field", + kind: Field, + detail: "u32", + }, + ] + "### + ); + } } diff --git a/crates/ra_ide/src/completion/complete_keyword.rs b/crates/ra_ide/src/completion/complete_keyword.rs index eb7cd9ac20..e1c0ffb1fb 100644 --- a/crates/ra_ide/src/completion/complete_keyword.rs +++ b/crates/ra_ide/src/completion/complete_keyword.rs @@ -79,6 +79,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte } fn is_in_loop_body(leaf: &SyntaxToken) -> bool { + // FIXME move this to CompletionContext and make it handle macros for node in leaf.parent().ancestors() { if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { break; diff --git a/crates/ra_ide/src/completion/complete_path.rs b/crates/ra_ide/src/completion/complete_path.rs index d2c7585712..3c4a705612 100644 --- a/crates/ra_ide/src/completion/complete_path.rs +++ b/crates/ra_ide/src/completion/complete_path.rs @@ -1,4 +1,4 @@ -//! Completion of paths, including when writing a single name. +//! Completion of paths, i.e. `some::prefix::<|>`. use hir::{Adt, HasVisibility, PathResolution, ScopeDef}; use ra_syntax::AstNode; @@ -48,7 +48,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) { }; // Iterate assoc types separately // FIXME: complete T::AssocType - let krate = ctx.module.map(|m| m.krate()); + let krate = ctx.krate; if let Some(krate) = krate { let traits_in_scope = ctx.scope().traits_in_scope(); ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { @@ -934,4 +934,37 @@ mod tests { "### ); } + + #[test] + fn completes_in_simple_macro_call() { + let completions = do_reference_completion( + r#" + macro_rules! m { ($e:expr) => { $e } } + fn main() { m!(self::f<|>); } + fn foo() {} + "#, + ); + assert_debug_snapshot!(completions, @r###" + [ + CompletionItem { + label: "foo()", + source_range: [93; 94), + delete: [93; 94), + insert: "foo()$0", + kind: Function, + lookup: "foo", + detail: "fn foo()", + }, + CompletionItem { + label: "main()", + source_range: [93; 94), + delete: [93; 94), + insert: "main()$0", + kind: Function, + lookup: "main", + detail: "fn main()", + }, + ] + "###); + } } diff --git a/crates/ra_ide/src/completion/complete_pattern.rs b/crates/ra_ide/src/completion/complete_pattern.rs index c2c6ca002c..fa8aecedad 100644 --- a/crates/ra_ide/src/completion/complete_pattern.rs +++ b/crates/ra_ide/src/completion/complete_pattern.rs @@ -86,4 +86,22 @@ mod tests { ] "###); } + + #[test] + fn completes_in_simple_macro_call() { + // FIXME: doesn't work yet because of missing error recovery in macro expansion + let completions = complete( + r" + macro_rules! m { ($e:expr) => { $e } } + enum E { X } + + fn foo() { + m!(match E::X { + <|> + }) + } + ", + ); + assert_debug_snapshot!(completions, @r###"[]"###); + } } diff --git a/crates/ra_ide/src/completion/complete_postfix.rs b/crates/ra_ide/src/completion/complete_postfix.rs index 8a74f993ab..65ecea1250 100644 --- a/crates/ra_ide/src/completion/complete_postfix.rs +++ b/crates/ra_ide/src/completion/complete_postfix.rs @@ -67,8 +67,8 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder { let edit = { - let receiver_range = - ctx.dot_receiver.as_ref().expect("no receiver available").syntax().text_range(); + let receiver_syntax = ctx.dot_receiver.as_ref().expect("no receiver available").syntax(); + let receiver_range = ctx.sema.original_range(receiver_syntax).range; let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); TextEdit::replace(delete_range, snippet.to_string()) }; @@ -279,4 +279,65 @@ mod tests { "### ); } + + #[test] + fn works_in_simple_macro() { + assert_debug_snapshot!( + do_postfix_completion( + r#" + macro_rules! m { ($e:expr) => { $e } } + fn main() { + let bar: u8 = 12; + m!(bar.b<|>) + } + "#, + ), + @r###" + [ + CompletionItem { + label: "box", + source_range: [149; 150), + delete: [145; 150), + insert: "Box::new(bar)", + detail: "Box::new(expr)", + }, + CompletionItem { + label: "dbg", + source_range: [149; 150), + delete: [145; 150), + insert: "dbg!(bar)", + detail: "dbg!(expr)", + }, + CompletionItem { + label: "match", + source_range: [149; 150), + delete: [145; 150), + insert: "match bar {\n ${1:_} => {$0\\},\n}", + detail: "match expr {}", + }, + CompletionItem { + label: "not", + source_range: [149; 150), + delete: [145; 150), + insert: "!bar", + detail: "!expr", + }, + CompletionItem { + label: "ref", + source_range: [149; 150), + delete: [145; 150), + insert: "&bar", + detail: "&expr", + }, + CompletionItem { + label: "refm", + source_range: [149; 150), + delete: [145; 150), + insert: "&mut bar", + detail: "&mut expr", + }, + ] + "### + ); + } } diff --git a/crates/ra_ide/src/completion/complete_record_literal.rs b/crates/ra_ide/src/completion/complete_record_literal.rs index f98353d769..be6e4194fc 100644 --- a/crates/ra_ide/src/completion/complete_record_literal.rs +++ b/crates/ra_ide/src/completion/complete_record_literal.rs @@ -153,4 +153,29 @@ mod tests { ] "###); } + + #[test] + fn test_record_literal_field_in_simple_macro() { + let completions = complete( + r" + macro_rules! m { ($e:expr) => { $e } } + struct A { the_field: u32 } + fn foo() { + m!(A { the<|> }) + } + ", + ); + assert_debug_snapshot!(completions, @r###" + [ + CompletionItem { + label: "the_field", + source_range: [137; 140), + delete: [137; 140), + insert: "the_field", + kind: Field, + detail: "u32", + }, + ] + "###); + } } diff --git a/crates/ra_ide/src/completion/complete_record_pattern.rs b/crates/ra_ide/src/completion/complete_record_pattern.rs index 9bdeae49f9..687c57d3ed 100644 --- a/crates/ra_ide/src/completion/complete_record_pattern.rs +++ b/crates/ra_ide/src/completion/complete_record_pattern.rs @@ -87,4 +87,32 @@ mod tests { ] "###); } + + #[test] + fn test_record_pattern_field_in_simple_macro() { + let completions = complete( + r" + macro_rules! m { ($e:expr) => { $e } } + struct S { foo: u32 } + + fn process(f: S) { + m!(match f { + S { f<|>: 92 } => (), + }) + } + ", + ); + assert_debug_snapshot!(completions, @r###" + [ + CompletionItem { + label: "foo", + source_range: [171; 172), + delete: [171; 172), + insert: "foo", + kind: Field, + detail: "u32", + }, + ] + "###); + } } diff --git a/crates/ra_ide/src/completion/complete_scope.rs b/crates/ra_ide/src/completion/complete_scope.rs index 2b9a0e5560..eb3c8cf1b6 100644 --- a/crates/ra_ide/src/completion/complete_scope.rs +++ b/crates/ra_ide/src/completion/complete_scope.rs @@ -1,4 +1,4 @@ -//! FIXME: write short doc here +//! Completion of names from the current scope, e.g. locals and imported items. use crate::completion::{CompletionContext, Completions}; @@ -797,4 +797,72 @@ mod tests { "### ) } + + #[test] + fn completes_in_simple_macro_1() { + assert_debug_snapshot!( + do_reference_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + fn quux(x: i32) { + let y = 92; + m!(<|>); + } + " + ), + @"[]" + ); + } + + #[test] + fn completes_in_simple_macro_2() { + assert_debug_snapshot!( + do_reference_completion( + r" + macro_rules! m { ($e:expr) => { $e } } + fn quux(x: i32) { + let y = 92; + m!(x<|>); + } + " + ), + @r###" + [ + CompletionItem { + label: "m!", + source_range: [145; 146), + delete: [145; 146), + insert: "m!($0)", + kind: Macro, + detail: "macro_rules! m", + }, + CompletionItem { + label: "quux(…)", + source_range: [145; 146), + delete: [145; 146), + insert: "quux(${1:x})$0", + kind: Function, + lookup: "quux", + detail: "fn quux(x: i32)", + }, + CompletionItem { + label: "x", + source_range: [145; 146), + delete: [145; 146), + insert: "x", + kind: Binding, + detail: "i32", + }, + CompletionItem { + label: "y", + source_range: [145; 146), + delete: [145; 146), + insert: "y", + kind: Binding, + detail: "i32", + }, + ] + "### + ); + } } diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs index 9aa5a705d5..40535c09e1 100644 --- a/crates/ra_ide/src/completion/completion_context.rs +++ b/crates/ra_ide/src/completion/completion_context.rs @@ -5,7 +5,7 @@ use ra_db::SourceDatabase; use ra_ide_db::RootDatabase; use ra_syntax::{ algo::{find_covering_element, find_node_at_offset}, - ast, AstNode, SourceFile, + ast, AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, TextRange, TextUnit, }; @@ -20,8 +20,11 @@ pub(crate) struct CompletionContext<'a> { pub(super) sema: Semantics<'a, RootDatabase>, pub(super) db: &'a RootDatabase, pub(super) offset: TextUnit, + /// The token before the cursor, in the original file. + pub(super) original_token: SyntaxToken, + /// The token before the cursor, in the macro-expanded file. pub(super) token: SyntaxToken, - pub(super) module: Option, + pub(super) krate: Option, pub(super) name_ref_syntax: Option, pub(super) function_syntax: Option, pub(super) use_item_syntax: Option, @@ -67,15 +70,20 @@ impl<'a> CompletionContext<'a> { let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string()); parse.reparse(&edit).tree() }; + let fake_ident_token = + file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap(); - let module = sema.to_module_def(position.file_id); - let token = original_file.syntax().token_at_offset(position.offset).left_biased()?; + let krate = sema.to_module_def(position.file_id).map(|m| m.krate()); + let original_token = + original_file.syntax().token_at_offset(position.offset).left_biased()?; + let token = sema.descend_into_macros(original_token.clone()); let mut ctx = CompletionContext { sema, db, + original_token, token, offset: position.offset, - module, + krate, name_ref_syntax: None, function_syntax: None, use_item_syntax: None, @@ -95,15 +103,57 @@ impl<'a> CompletionContext<'a> { has_type_args: false, dot_receiver_is_ambiguous_float_literal: false, }; - ctx.fill(&original_file, file_with_fake_ident, position.offset); + + let mut original_file = original_file.syntax().clone(); + let mut hypothetical_file = file_with_fake_ident.syntax().clone(); + let mut offset = position.offset; + let mut fake_ident_token = fake_ident_token; + + // Are we inside a macro call? + while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( + find_node_at_offset::(&original_file, offset), + find_node_at_offset::(&hypothetical_file, offset), + ) { + if actual_macro_call.path().as_ref().map(|s| s.syntax().text()) + != macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()) + { + break; + } + let hypothetical_args = match macro_call_with_fake_ident.token_tree() { + Some(tt) => tt, + None => break, + }; + if let (Some(actual_expansion), Some(hypothetical_expansion)) = ( + ctx.sema.expand(&actual_macro_call), + ctx.sema.expand_hypothetical( + &actual_macro_call, + &hypothetical_args, + fake_ident_token, + ), + ) { + let new_offset = hypothetical_expansion.1.text_range().start(); + if new_offset >= actual_expansion.text_range().end() { + break; + } + original_file = actual_expansion; + hypothetical_file = hypothetical_expansion.0; + fake_ident_token = hypothetical_expansion.1; + offset = new_offset; + } else { + break; + } + } + + ctx.fill(&original_file, hypothetical_file, offset); Some(ctx) } // The range of the identifier that is being completed. pub(crate) fn source_range(&self) -> TextRange { + // check kind of macro-expanded token, but use range of original token match self.token.kind() { // workaroud when completion is triggered by trigger characters. - IDENT => self.token.text_range(), + IDENT => self.original_token.text_range(), _ => TextRange::offset_len(self.offset, 0.into()), } } @@ -114,27 +164,24 @@ impl<'a> CompletionContext<'a> { fn fill( &mut self, - original_file: &ast::SourceFile, - file_with_fake_ident: ast::SourceFile, + original_file: &SyntaxNode, + file_with_fake_ident: SyntaxNode, offset: TextUnit, ) { // First, let's try to complete a reference to some declaration. - if let Some(name_ref) = - find_node_at_offset::(file_with_fake_ident.syntax(), offset) - { + if let Some(name_ref) = find_node_at_offset::(&file_with_fake_ident, offset) { // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. // See RFC#1685. if is_node::(name_ref.syntax()) { self.is_param = true; return; } - self.classify_name_ref(original_file, name_ref); + self.classify_name_ref(original_file, name_ref, offset); } // Otherwise, see if this is a declaration. We can use heuristics to // suggest declaration names, see `CompletionKind::Magic`. - if let Some(name) = find_node_at_offset::(file_with_fake_ident.syntax(), offset) - { + if let Some(name) = find_node_at_offset::(&file_with_fake_ident, offset) { if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) { let parent = bind_pat.syntax().parent(); if parent.clone().and_then(ast::MatchArm::cast).is_some() @@ -148,23 +195,29 @@ impl<'a> CompletionContext<'a> { return; } if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() { - self.record_lit_pat = find_node_at_offset(original_file.syntax(), self.offset); + self.record_lit_pat = + self.sema.find_node_at_offset_with_macros(&original_file, offset); } } } - fn classify_name_ref(&mut self, original_file: &SourceFile, name_ref: ast::NameRef) { + fn classify_name_ref( + &mut self, + original_file: &SyntaxNode, + name_ref: ast::NameRef, + offset: TextUnit, + ) { self.name_ref_syntax = - find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start()); + find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); let name_range = name_ref.syntax().text_range(); if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() { - self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); + self.record_lit_syntax = + self.sema.find_node_at_offset_with_macros(&original_file, offset); } self.impl_def = self - .token - .parent() - .ancestors() + .sema + .ancestors_with_macros(self.token.parent()) .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) .find_map(ast::ImplDef::cast); @@ -183,12 +236,12 @@ impl<'a> CompletionContext<'a> { _ => (), } - self.use_item_syntax = self.token.parent().ancestors().find_map(ast::UseItem::cast); + self.use_item_syntax = + self.sema.ancestors_with_macros(self.token.parent()).find_map(ast::UseItem::cast); self.function_syntax = self - .token - .parent() - .ancestors() + .sema + .ancestors_with_macros(self.token.parent()) .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) .find_map(ast::FnDef::cast); @@ -242,7 +295,7 @@ impl<'a> CompletionContext<'a> { if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) { if let Some(if_expr) = - find_node_at_offset::(original_file.syntax(), off) + self.sema.find_node_at_offset_with_macros::(original_file, off) { if if_expr.syntax().text_range().end() < name_ref.syntax().text_range().start() @@ -259,7 +312,7 @@ impl<'a> CompletionContext<'a> { self.dot_receiver = field_expr .expr() .map(|e| e.syntax().text_range()) - .and_then(|r| find_node_with_range(original_file.syntax(), r)); + .and_then(|r| find_node_with_range(original_file, r)); self.dot_receiver_is_ambiguous_float_literal = if let Some(ast::Expr::Literal(l)) = &self.dot_receiver { match l.kind() { @@ -275,7 +328,7 @@ impl<'a> CompletionContext<'a> { self.dot_receiver = method_call_expr .expr() .map(|e| e.syntax().text_range()) - .and_then(|r| find_node_with_range(original_file.syntax(), r)); + .and_then(|r| find_node_with_range(original_file, r)); self.is_call = true; } } diff --git a/crates/ra_mbe/src/mbe_expander/matcher.rs b/crates/ra_mbe/src/mbe_expander/matcher.rs index ffba03898a..49c53183a1 100644 --- a/crates/ra_mbe/src/mbe_expander/matcher.rs +++ b/crates/ra_mbe/src/mbe_expander/matcher.rs @@ -247,6 +247,7 @@ impl<'a> TtIter<'a> { ra_parser::parse_fragment(&mut src, &mut sink, fragment_kind); if !sink.cursor.is_root() || sink.error { + // FIXME better recovery in this case would help completion inside macros immensely return Err(()); } @@ -375,7 +376,8 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> Result, Ex return Ok(Some(Fragment::Tokens(tt))); } }; - let tt = input.expect_fragment(fragment).map_err(|()| err!())?; + let tt = + input.expect_fragment(fragment).map_err(|()| err!("fragment did not parse as {}", kind))?; let fragment = if kind == "expr" { Fragment::Ast(tt) } else { Fragment::Tokens(tt) }; Ok(Some(fragment)) }