mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-29 06:23:25 +00:00
fix: Fix completions for locals not working properly inside macro calls
This commit is contained in:
parent
b74e96f509
commit
bdbffdd463
3 changed files with 45 additions and 19 deletions
|
@ -67,10 +67,7 @@ impl SourceAnalyzer {
|
||||||
let scopes = db.expr_scopes(def);
|
let scopes = db.expr_scopes(def);
|
||||||
let scope = match offset {
|
let scope = match offset {
|
||||||
None => scope_for(&scopes, &source_map, node),
|
None => scope_for(&scopes, &source_map, node),
|
||||||
Some(offset) => {
|
Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
|
||||||
let file_id = node.file_id.original_file(db.upcast());
|
|
||||||
scope_for_offset(db, &scopes, &source_map, InFile::new(file_id.into(), offset))
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
let resolver = resolver_for_scope(db.upcast(), def, scope);
|
let resolver = resolver_for_scope(db.upcast(), def, scope);
|
||||||
SourceAnalyzer {
|
SourceAnalyzer {
|
||||||
|
@ -91,10 +88,7 @@ impl SourceAnalyzer {
|
||||||
let scopes = db.expr_scopes(def);
|
let scopes = db.expr_scopes(def);
|
||||||
let scope = match offset {
|
let scope = match offset {
|
||||||
None => scope_for(&scopes, &source_map, node),
|
None => scope_for(&scopes, &source_map, node),
|
||||||
Some(offset) => {
|
Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
|
||||||
let file_id = node.file_id.original_file(db.upcast());
|
|
||||||
scope_for_offset(db, &scopes, &source_map, InFile::new(file_id.into(), offset))
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
let resolver = resolver_for_scope(db.upcast(), def, scope);
|
let resolver = resolver_for_scope(db.upcast(), def, scope);
|
||||||
SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer: None, file_id }
|
SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer: None, file_id }
|
||||||
|
@ -585,14 +579,15 @@ fn scope_for_offset(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
scopes: &ExprScopes,
|
scopes: &ExprScopes,
|
||||||
source_map: &BodySourceMap,
|
source_map: &BodySourceMap,
|
||||||
offset: InFile<TextSize>,
|
from_file: HirFileId,
|
||||||
|
offset: TextSize,
|
||||||
) -> Option<ScopeId> {
|
) -> Option<ScopeId> {
|
||||||
scopes
|
scopes
|
||||||
.scope_by_expr()
|
.scope_by_expr()
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(id, scope)| {
|
.filter_map(|(id, scope)| {
|
||||||
let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
|
let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
|
||||||
if offset.file_id == file_id {
|
if from_file == file_id {
|
||||||
let root = db.parse_or_expand(file_id)?;
|
let root = db.parse_or_expand(file_id)?;
|
||||||
let node = value.to_node(&root);
|
let node = value.to_node(&root);
|
||||||
return Some((node.syntax().text_range(), scope));
|
return Some((node.syntax().text_range(), scope));
|
||||||
|
@ -602,17 +597,15 @@ fn scope_for_offset(
|
||||||
let source = iter::successors(file_id.call_node(db.upcast()), |it| {
|
let source = iter::successors(file_id.call_node(db.upcast()), |it| {
|
||||||
it.file_id.call_node(db.upcast())
|
it.file_id.call_node(db.upcast())
|
||||||
})
|
})
|
||||||
.find(|it| it.file_id == offset.file_id)
|
.find(|it| it.file_id == from_file)
|
||||||
.filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
|
.filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
|
||||||
Some((source.value.text_range(), scope))
|
Some((source.value.text_range(), scope))
|
||||||
})
|
})
|
||||||
.filter(|(expr_range, _scope)| {
|
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
|
||||||
expr_range.start() <= offset.value && offset.value <= expr_range.end()
|
|
||||||
})
|
|
||||||
// find containing scope
|
// find containing scope
|
||||||
.min_by_key(|(expr_range, _scope)| expr_range.len())
|
.min_by_key(|(expr_range, _scope)| expr_range.len())
|
||||||
.map(|(expr_range, scope)| {
|
.map(|(expr_range, scope)| {
|
||||||
adjust(db, scopes, source_map, expr_range, offset).unwrap_or(*scope)
|
adjust(db, scopes, source_map, expr_range, from_file, offset).unwrap_or(*scope)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -623,7 +616,8 @@ fn adjust(
|
||||||
scopes: &ExprScopes,
|
scopes: &ExprScopes,
|
||||||
source_map: &BodySourceMap,
|
source_map: &BodySourceMap,
|
||||||
expr_range: TextRange,
|
expr_range: TextRange,
|
||||||
offset: InFile<TextSize>,
|
from_file: HirFileId,
|
||||||
|
offset: TextSize,
|
||||||
) -> Option<ScopeId> {
|
) -> Option<ScopeId> {
|
||||||
let child_scopes = scopes
|
let child_scopes = scopes
|
||||||
.scope_by_expr()
|
.scope_by_expr()
|
||||||
|
@ -631,7 +625,7 @@ fn adjust(
|
||||||
.filter_map(|(id, scope)| {
|
.filter_map(|(id, scope)| {
|
||||||
let source = source_map.expr_syntax(*id).ok()?;
|
let source = source_map.expr_syntax(*id).ok()?;
|
||||||
// FIXME: correctly handle macro expansion
|
// FIXME: correctly handle macro expansion
|
||||||
if source.file_id != offset.file_id {
|
if source.file_id != from_file {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let root = source.file_syntax(db.upcast());
|
let root = source.file_syntax(db.upcast());
|
||||||
|
@ -639,7 +633,7 @@ fn adjust(
|
||||||
Some((node.syntax().text_range(), scope))
|
Some((node.syntax().text_range(), scope))
|
||||||
})
|
})
|
||||||
.filter(|&(range, _)| {
|
.filter(|&(range, _)| {
|
||||||
range.start() <= offset.value && expr_range.contains_range(range) && range != expr_range
|
range.start() <= offset && expr_range.contains_range(range) && range != expr_range
|
||||||
});
|
});
|
||||||
|
|
||||||
child_scopes
|
child_scopes
|
||||||
|
|
|
@ -506,7 +506,11 @@ impl<'a> CompletionContext<'a> {
|
||||||
|
|
||||||
let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
|
let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
|
||||||
let token = sema.descend_into_macros_single(original_token.clone());
|
let token = sema.descend_into_macros_single(original_token.clone());
|
||||||
let scope = sema.scope_at_offset(&token.parent()?, offset)?;
|
|
||||||
|
// adjust for macro input, this still fails if there is no token written yet
|
||||||
|
let scope_offset = if original_token == token { offset } else { token.text_range().end() };
|
||||||
|
let scope = sema.scope_at_offset(&token.parent()?, scope_offset)?;
|
||||||
|
|
||||||
let krate = scope.krate();
|
let krate = scope.krate();
|
||||||
let module = scope.module();
|
let module = scope.module();
|
||||||
|
|
||||||
|
|
|
@ -782,3 +782,31 @@ fn main() {
|
||||||
"#]],
|
"#]],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn completes_locals_from_macros() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
|
||||||
|
macro_rules! x {
|
||||||
|
($x:ident, $expr:expr) => {
|
||||||
|
let $x = 0;
|
||||||
|
$expr
|
||||||
|
};
|
||||||
|
}
|
||||||
|
fn main() {
|
||||||
|
x! {
|
||||||
|
foobar, {
|
||||||
|
f$0
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
fn main() fn()
|
||||||
|
lc foobar i32
|
||||||
|
ma x!(…) macro_rules! x
|
||||||
|
bt u32
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue