Try to complete within macros

This commit is contained in:
Florian Diebold 2020-03-07 15:27:03 +01:00
parent aff82cf7ac
commit 24e98121d8
9 changed files with 339 additions and 38 deletions

1
Cargo.lock generated
View file

@ -960,6 +960,7 @@ name = "ra_hir"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"either", "either",
"itertools",
"log", "log",
"ra_db", "ra_db",
"ra_hir_def", "ra_hir_def",

View file

@ -12,6 +12,8 @@ log = "0.4.8"
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
either = "1.5.3" either = "1.5.3"
itertools = "0.8.2"
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }
ra_prof = { path = "../ra_prof" } ra_prof = { path = "../ra_prof" }

View file

@ -6,13 +6,14 @@ use std::{cell::RefCell, fmt, iter::successors};
use hir_def::{ use hir_def::{
resolver::{self, HasResolver, Resolver}, resolver::{self, HasResolver, Resolver},
TraitId, AsMacroCall, TraitId,
}; };
use hir_expand::ExpansionInfo; use hir_expand::ExpansionInfo;
use ra_db::{FileId, FileRange}; use ra_db::{FileId, FileRange};
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::{ use ra_syntax::{
algo::skip_trivia_token, ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, algo::{self, skip_trivia_token},
ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit,
}; };
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -70,6 +71,37 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
Some(node) Some(node)
} }
pub fn expand_hypothetical(
&self,
actual_macro_call: &ast::MacroCall,
hypothetical_call: &ast::MacroCall,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let macro_call =
self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call);
let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
let macro_call_id = macro_call
.as_call_id(self.db, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?;
let macro_file = macro_call_id.as_file().macro_file().unwrap();
let (tt, tmap_1) =
hir_expand::syntax_node_to_token_tree(hypothetical_call.token_tree().unwrap().syntax())
.unwrap();
let range = token_to_map
.text_range()
.checked_sub(hypothetical_call.token_tree().unwrap().syntax().text_range().start())?;
let token_id = tmap_1.token_by_range(range)?;
let macro_def = hir_expand::db::expander(self.db, macro_call_id)?;
let (node, tmap_2) = hir_expand::db::parse_macro_with_arg(
self.db,
macro_file,
Some(std::sync::Arc::new((tt, tmap_1))),
)?;
let token_id = macro_def.0.map_id_down(token_id);
let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?;
let token = algo::find_covering_element(&node.syntax_node(), range).into_token()?;
Some((node.syntax_node(), token))
}
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
let parent = token.parent(); let parent = token.parent();
let parent = self.find_file(parent); let parent = self.find_file(parent);
@ -104,6 +136,25 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
node.ancestors_with_macros(self.db).map(|it| it.value) node.ancestors_with_macros(self.db).map(|it| it.value)
} }
pub fn ancestors_at_offset_with_macros(
&self,
node: &SyntaxNode,
offset: TextUnit,
) -> impl Iterator<Item = SyntaxNode> + '_ {
use itertools::Itertools;
node.token_at_offset(offset)
.map(|token| self.ancestors_with_macros(token.parent()))
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
}
pub fn find_node_at_offset_with_macros<N: AstNode>(
&self,
node: &SyntaxNode,
offset: TextUnit,
) -> Option<N> {
self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
}
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
self.analyze(expr.syntax()).type_of(self.db, &expr) self.analyze(expr.syntax()).type_of(self.db, &expr)
} }

View file

@ -129,16 +129,43 @@ pub(crate) fn macro_arg(
pub(crate) fn macro_expand( pub(crate) fn macro_expand(
db: &dyn AstDatabase, db: &dyn AstDatabase,
id: MacroCallId, id: MacroCallId,
) -> Result<Arc<tt::Subtree>, String> {
macro_expand_with_arg(db, id, None)
}
// TODO hack
pub fn expander(
db: &dyn AstDatabase,
id: MacroCallId,
) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> {
let lazy_id = match id {
MacroCallId::LazyMacro(id) => id,
MacroCallId::EagerMacro(_id) => {
// TODO
unimplemented!()
}
};
let loc = db.lookup_intern_macro(lazy_id);
let macro_rules = db.macro_def(loc.def)?;
Some(macro_rules)
}
pub(crate) fn macro_expand_with_arg(
db: &dyn AstDatabase,
id: MacroCallId,
arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
) -> Result<Arc<tt::Subtree>, String> { ) -> Result<Arc<tt::Subtree>, String> {
let lazy_id = match id { let lazy_id = match id {
MacroCallId::LazyMacro(id) => id, MacroCallId::LazyMacro(id) => id,
MacroCallId::EagerMacro(id) => { MacroCallId::EagerMacro(id) => {
// TODO
return Ok(db.lookup_intern_eager_expansion(id).subtree); return Ok(db.lookup_intern_eager_expansion(id).subtree);
} }
}; };
let loc = db.lookup_intern_macro(lazy_id); let loc = db.lookup_intern_macro(lazy_id);
let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?; let macro_arg = arg.or_else(|| db.macro_arg(id)).ok_or("Fail to args in to tt::TokenTree")?;
let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?; let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?;
let tt = macro_rules.0.expand(db, lazy_id, &macro_arg.0).map_err(|err| format!("{:?}", err))?; let tt = macro_rules.0.expand(db, lazy_id, &macro_arg.0).map_err(|err| format!("{:?}", err))?;
@ -162,12 +189,24 @@ pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Optio
pub(crate) fn parse_macro( pub(crate) fn parse_macro(
db: &dyn AstDatabase, db: &dyn AstDatabase,
macro_file: MacroFile, macro_file: MacroFile,
) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
parse_macro_with_arg(db, macro_file, None)
}
pub fn parse_macro_with_arg(
db: &dyn AstDatabase,
macro_file: MacroFile,
arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> { ) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
let _p = profile("parse_macro_query"); let _p = profile("parse_macro_query");
let macro_call_id = macro_file.macro_call_id; let macro_call_id = macro_file.macro_call_id;
let tt = db let expansion = if let Some(arg) = arg {
.macro_expand(macro_call_id) macro_expand_with_arg(db, macro_call_id, Some(arg))
} else {
db.macro_expand(macro_call_id)
};
let tt = expansion
.map_err(|err| { .map_err(|err| {
// Note: // Note:
// The final goal we would like to make all parse_macro success, // The final goal we would like to make all parse_macro success,
@ -185,15 +224,13 @@ pub(crate) fn parse_macro(
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join("\n"); .join("\n");
log::warn!( eprintln!(
"fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}", "fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}",
err, err, node.value, parents
node.value,
parents
); );
} }
_ => { _ => {
log::warn!("fail on macro_parse: (reason: {})", err); eprintln!("fail on macro_parse: (reason: {})", err);
} }
} }
}) })

View file

@ -157,6 +157,13 @@ impl HirFileId {
} }
} }
} }
pub fn macro_file(self) -> Option<MacroFile> {
match self.0 {
HirFileIdRepr::FileId(_) => None,
HirFileIdRepr::MacroFile(m) => Some(m),
}
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -296,7 +303,7 @@ pub struct ExpansionInfo {
exp_map: Arc<mbe::TokenMap>, exp_map: Arc<mbe::TokenMap>,
} }
pub use mbe::Origin; pub use mbe::{syntax_node_to_token_tree, Origin};
use ra_parser::FragmentKind; use ra_parser::FragmentKind;
impl ExpansionInfo { impl ExpansionInfo {

View file

@ -584,4 +584,102 @@ mod tests {
"### "###
); );
} }
#[test]
fn works_in_simple_macro_1() {
assert_debug_snapshot!(
do_ref_completion(
r"
macro_rules! m { ($e:expr) => { $e } }
struct A { the_field: u32 }
fn foo(a: A) {
m!(a.x<|>)
}
",
),
@r###"
[
CompletionItem {
label: "the_field",
source_range: [156; 157),
delete: [156; 157),
insert: "the_field",
kind: Field,
detail: "u32",
},
]
"###
);
}
#[test]
fn works_in_simple_macro_recursive() {
assert_debug_snapshot!(
do_ref_completion(
r"
macro_rules! m { ($e:expr) => { $e } }
struct A { the_field: u32 }
fn foo(a: A) {
m!(a.x<|>)
}
",
),
@r###"
[
CompletionItem {
label: "the_field",
source_range: [156; 157),
delete: [156; 157),
insert: "the_field",
kind: Field,
detail: "u32",
},
]
"###
);
}
#[test]
fn works_in_simple_macro_2() {
// this doesn't work yet because the macro doesn't expand without the token -- maybe it can be fixed with better recovery
assert_debug_snapshot!(
do_ref_completion(
r"
macro_rules! m { ($e:expr) => { $e } }
struct A { the_field: u32 }
fn foo(a: A) {
m!(a.<|>)
}
",
),
@r###"[]"###
);
}
#[test]
fn works_in_simple_macro_recursive_1() {
assert_debug_snapshot!(
do_ref_completion(
r"
macro_rules! m { ($e:expr) => { $e } }
struct A { the_field: u32 }
fn foo(a: A) {
m!(m!(m!(a.x<|>)))
}
",
),
@r###"
[
CompletionItem {
label: "the_field",
source_range: [162; 163),
delete: [162; 163),
insert: "the_field",
kind: Field,
detail: "u32",
},
]
"###
);
}
} }

View file

@ -1,4 +1,4 @@
//! Completion of paths, including when writing a single name. //! Completion of paths, i.e. `some::prefix::<|>`.
use hir::{Adt, PathResolution, ScopeDef}; use hir::{Adt, PathResolution, ScopeDef};
use ra_syntax::AstNode; use ra_syntax::AstNode;

View file

@ -1,4 +1,4 @@
//! FIXME: write short doc here //! Completion of names from the current scope, e.g. locals and imported items.
use crate::completion::{CompletionContext, Completions}; use crate::completion::{CompletionContext, Completions};
@ -797,4 +797,72 @@ mod tests {
"### "###
) )
} }
#[test]
fn completes_in_simple_macro_1() {
assert_debug_snapshot!(
do_reference_completion(
r"
macro_rules! m { ($e:expr) => { $e } }
fn quux(x: i32) {
let y = 92;
m!(<|>);
}
"
),
@"[]"
);
}
#[test]
fn completes_in_simple_macro_2() {
assert_debug_snapshot!(
do_reference_completion(
r"
macro_rules! m { ($e:expr) => { $e } }
fn quux(x: i32) {
let y = 92;
m!(x<|>);
}
"
),
@r###"
[
CompletionItem {
label: "m!",
source_range: [145; 146),
delete: [145; 146),
insert: "m!($0)",
kind: Macro,
detail: "macro_rules! m",
},
CompletionItem {
label: "quux(…)",
source_range: [145; 146),
delete: [145; 146),
insert: "quux(${1:x})$0",
kind: Function,
lookup: "quux",
detail: "fn quux(x: i32)",
},
CompletionItem {
label: "x",
source_range: [145; 146),
delete: [145; 146),
insert: "x",
kind: Binding,
detail: "i32",
},
CompletionItem {
label: "y",
source_range: [145; 146),
delete: [145; 146),
insert: "y",
kind: Binding,
detail: "i32",
},
]
"###
);
}
} }

View file

@ -5,7 +5,7 @@ use ra_db::SourceDatabase;
use ra_ide_db::RootDatabase; use ra_ide_db::RootDatabase;
use ra_syntax::{ use ra_syntax::{
algo::{find_covering_element, find_node_at_offset}, algo::{find_covering_element, find_node_at_offset},
ast, AstNode, SourceFile, ast, AstNode,
SyntaxKind::*, SyntaxKind::*,
SyntaxNode, SyntaxToken, TextRange, TextUnit, SyntaxNode, SyntaxToken, TextRange, TextUnit,
}; };
@ -20,6 +20,9 @@ pub(crate) struct CompletionContext<'a> {
pub(super) sema: Semantics<'a, RootDatabase>, pub(super) sema: Semantics<'a, RootDatabase>,
pub(super) db: &'a RootDatabase, pub(super) db: &'a RootDatabase,
pub(super) offset: TextUnit, pub(super) offset: TextUnit,
/// The token before the cursor, in the original file.
pub(super) original_token: SyntaxToken,
/// The token before the cursor, in the macro-expanded file.
pub(super) token: SyntaxToken, pub(super) token: SyntaxToken,
pub(super) module: Option<hir::Module>, pub(super) module: Option<hir::Module>,
pub(super) name_ref_syntax: Option<ast::NameRef>, pub(super) name_ref_syntax: Option<ast::NameRef>,
@ -67,12 +70,18 @@ impl<'a> CompletionContext<'a> {
let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string()); let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string());
parse.reparse(&edit).tree() parse.reparse(&edit).tree()
}; };
let fake_ident_token =
file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap();
// TODO: shouldn't this take the position into account? (in case we're inside a mod {})
let module = sema.to_module_def(position.file_id); let module = sema.to_module_def(position.file_id);
let token = original_file.syntax().token_at_offset(position.offset).left_biased()?; let original_token =
original_file.syntax().token_at_offset(position.offset).left_biased()?;
let token = sema.descend_into_macros(original_token.clone());
let mut ctx = CompletionContext { let mut ctx = CompletionContext {
sema, sema,
db, db,
original_token,
token, token,
offset: position.offset, offset: position.offset,
module, module,
@ -95,15 +104,45 @@ impl<'a> CompletionContext<'a> {
has_type_args: false, has_type_args: false,
dot_receiver_is_ambiguous_float_literal: false, dot_receiver_is_ambiguous_float_literal: false,
}; };
ctx.fill(&original_file, file_with_fake_ident, position.offset);
let mut original_file = original_file.syntax().clone();
let mut hypothetical_file = file_with_fake_ident.syntax().clone();
let mut offset = position.offset;
let mut fake_ident_token = fake_ident_token;
// Are we inside a macro call?
while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
find_node_at_offset::<ast::MacroCall>(&original_file, offset),
find_node_at_offset::<ast::MacroCall>(&hypothetical_file, offset),
) {
if let (Some(actual_expansion), Some(hypothetical_expansion)) = (
ctx.sema.expand(&actual_macro_call),
ctx.sema.expand_hypothetical(
&actual_macro_call,
&macro_call_with_fake_ident,
fake_ident_token,
),
) {
// TODO check that the expansions 'look the same' up to the inserted token?
original_file = actual_expansion;
hypothetical_file = hypothetical_expansion.0;
fake_ident_token = hypothetical_expansion.1;
offset = fake_ident_token.text_range().start();
} else {
break;
}
}
ctx.fill(&original_file, hypothetical_file, offset);
Some(ctx) Some(ctx)
} }
// The range of the identifier that is being completed. // The range of the identifier that is being completed.
pub(crate) fn source_range(&self) -> TextRange { pub(crate) fn source_range(&self) -> TextRange {
// check kind of macro-expanded token, but use range of original token
match self.token.kind() { match self.token.kind() {
// workaroud when completion is triggered by trigger characters. // workaroud when completion is triggered by trigger characters.
IDENT => self.token.text_range(), IDENT => self.original_token.text_range(),
_ => TextRange::offset_len(self.offset, 0.into()), _ => TextRange::offset_len(self.offset, 0.into()),
} }
} }
@ -114,14 +153,12 @@ impl<'a> CompletionContext<'a> {
fn fill( fn fill(
&mut self, &mut self,
original_file: &ast::SourceFile, original_file: &SyntaxNode,
file_with_fake_ident: ast::SourceFile, file_with_fake_ident: SyntaxNode,
offset: TextUnit, offset: TextUnit,
) { ) {
// First, let's try to complete a reference to some declaration. // First, let's try to complete a reference to some declaration.
if let Some(name_ref) = if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&file_with_fake_ident, offset) {
find_node_at_offset::<ast::NameRef>(file_with_fake_ident.syntax(), offset)
{
// Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`.
// See RFC#1685. // See RFC#1685.
if is_node::<ast::Param>(name_ref.syntax()) { if is_node::<ast::Param>(name_ref.syntax()) {
@ -133,8 +170,7 @@ impl<'a> CompletionContext<'a> {
// Otherwise, see if this is a declaration. We can use heuristics to // Otherwise, see if this is a declaration. We can use heuristics to
// suggest declaration names, see `CompletionKind::Magic`. // suggest declaration names, see `CompletionKind::Magic`.
if let Some(name) = find_node_at_offset::<ast::Name>(file_with_fake_ident.syntax(), offset) if let Some(name) = find_node_at_offset::<ast::Name>(&file_with_fake_ident, offset) {
{
if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) { if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) {
let parent = bind_pat.syntax().parent(); let parent = bind_pat.syntax().parent();
if parent.clone().and_then(ast::MatchArm::cast).is_some() if parent.clone().and_then(ast::MatchArm::cast).is_some()
@ -148,23 +184,24 @@ impl<'a> CompletionContext<'a> {
return; return;
} }
if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() { if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() {
self.record_lit_pat = find_node_at_offset(original_file.syntax(), self.offset); self.record_lit_pat =
self.sema.find_node_at_offset_with_macros(&original_file, self.offset);
} }
} }
} }
fn classify_name_ref(&mut self, original_file: &SourceFile, name_ref: ast::NameRef) { fn classify_name_ref(&mut self, original_file: &SyntaxNode, name_ref: ast::NameRef) {
self.name_ref_syntax = self.name_ref_syntax =
find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start()); find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
let name_range = name_ref.syntax().text_range(); let name_range = name_ref.syntax().text_range();
if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() { if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() {
self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); self.record_lit_syntax =
self.sema.find_node_at_offset_with_macros(&original_file, self.offset);
} }
self.impl_def = self self.impl_def = self
.token .sema
.parent() .ancestors_with_macros(self.token.parent())
.ancestors()
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
.find_map(ast::ImplDef::cast); .find_map(ast::ImplDef::cast);
@ -183,12 +220,12 @@ impl<'a> CompletionContext<'a> {
_ => (), _ => (),
} }
self.use_item_syntax = self.token.parent().ancestors().find_map(ast::UseItem::cast); self.use_item_syntax =
self.sema.ancestors_with_macros(self.token.parent()).find_map(ast::UseItem::cast);
self.function_syntax = self self.function_syntax = self
.token .sema
.parent() .ancestors_with_macros(self.token.parent())
.ancestors()
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
.find_map(ast::FnDef::cast); .find_map(ast::FnDef::cast);
@ -242,7 +279,7 @@ impl<'a> CompletionContext<'a> {
if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) { if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) {
if let Some(if_expr) = if let Some(if_expr) =
find_node_at_offset::<ast::IfExpr>(original_file.syntax(), off) self.sema.find_node_at_offset_with_macros::<ast::IfExpr>(original_file, off)
{ {
if if_expr.syntax().text_range().end() if if_expr.syntax().text_range().end()
< name_ref.syntax().text_range().start() < name_ref.syntax().text_range().start()
@ -259,7 +296,7 @@ impl<'a> CompletionContext<'a> {
self.dot_receiver = field_expr self.dot_receiver = field_expr
.expr() .expr()
.map(|e| e.syntax().text_range()) .map(|e| e.syntax().text_range())
.and_then(|r| find_node_with_range(original_file.syntax(), r)); .and_then(|r| find_node_with_range(original_file, r));
self.dot_receiver_is_ambiguous_float_literal = self.dot_receiver_is_ambiguous_float_literal =
if let Some(ast::Expr::Literal(l)) = &self.dot_receiver { if let Some(ast::Expr::Literal(l)) = &self.dot_receiver {
match l.kind() { match l.kind() {
@ -275,7 +312,7 @@ impl<'a> CompletionContext<'a> {
self.dot_receiver = method_call_expr self.dot_receiver = method_call_expr
.expr() .expr()
.map(|e| e.syntax().text_range()) .map(|e| e.syntax().text_range())
.and_then(|r| find_node_with_range(original_file.syntax(), r)); .and_then(|r| find_node_with_range(original_file, r));
self.is_call = true; self.is_call = true;
} }
} }