Auto merge of #17542 - roife:fix-issue-17517, r=Veykril

feat: go-to-def and find-references on control-flow keywords

fix #17517.

This PR implements **go-to-definition** and **find-references** functionalities for control flow keywords, which is similar to the behaviors in the `highlight-related` module. Besides, this PR also fixes some incorrect behaviors in `highlight-related`.

## Changes

1. **Support for go-to-definition on control flow keywords**:
   This PR introduces functionality allowing users to navigate on the definition of control flow keywords (`return`, `break`, `continue`).
   Commit: 2a3244ee147f898dd828c06352645ae1713c260f..7391e7a608634709db002a4cb09229de4d12c056.

2. **Bug fixes and refactoring in highlight-related**:
   - **Handling return/break/continue within try_blocks**:
     This PR adjusted the behavior of these keywords when they occur within `try_blocks`. When encounter these keywords, the program should exit the outer function or loop which containing the `try_blocks`, rather than the `try_blocks` itself; while the `?` will cause the program to exit `try_blocks`.
     Commit: 59d697e807f0197f59814b37dca1563959da4aa1.
   - **Support highlighting keywords in macro expansion for highlight-related**:
     Commit: 88df24f01727c23a667a763ee3ee0cec22d5ad52.
   - Detailed description for the bug fixes
     + The previous implementation of `preorder_expr` incorrectly treated `try_blocks` as new contexts, thereby r-a will not continue to traverse inner `return` and `break/continue` statements. To resolve this, a new function `preorder_expr_with_ctx_checker` has been added, allowing users to specify which expressions to skip.
       * For example, when searching for the `?` in the context, r-a should skip `try_blocks` where the `?` insides just works for `try_blocks`. But when search for the `return` keyword, r-a should collect both the `return` keywords inside and outside the `try_blocks`
     + Thus, this PR added `WalkExpandedExprCtx` (builder pattern). It offers the following improvements: customizable context skipping, maintenance of loop depth (for `break`/`continue`), and handling macro expansion during traversal.

3. **Support for find-references on control flow keywords**:
   This PR enables users to find all references to control flow keywords.
   Commit: 9202a33f81218fb9c2edb5d42e6b4de85b0323a8.
This commit is contained in:
bors 2024-07-22 09:22:13 +00:00
commit d092f7d78f
5 changed files with 1209 additions and 194 deletions

View file

@ -36,10 +36,35 @@ pub fn walk_expr(expr: &ast::Expr, cb: &mut dyn FnMut(ast::Expr)) {
})
}
pub fn is_closure_or_blk_with_modif(expr: &ast::Expr) -> bool {
match expr {
ast::Expr::BlockExpr(block_expr) => {
matches!(
block_expr.modifier(),
Some(
ast::BlockModifier::Async(_)
| ast::BlockModifier::Try(_)
| ast::BlockModifier::Const(_)
)
)
}
ast::Expr::ClosureExpr(_) => true,
_ => false,
}
}
/// Preorder walk all the expression's child expressions preserving events.
/// If the callback returns true on an [`WalkEvent::Enter`], the subtree of the expression will be skipped.
/// Note that the subtree may already be skipped due to the context analysis this function does.
pub fn preorder_expr(start: &ast::Expr, cb: &mut dyn FnMut(WalkEvent<ast::Expr>) -> bool) {
preorder_expr_with_ctx_checker(start, &is_closure_or_blk_with_modif, cb);
}
pub fn preorder_expr_with_ctx_checker(
start: &ast::Expr,
check_ctx: &dyn Fn(&ast::Expr) -> bool,
cb: &mut dyn FnMut(WalkEvent<ast::Expr>) -> bool,
) {
let mut preorder = start.syntax().preorder();
while let Some(event) = preorder.next() {
let node = match event {
@ -71,20 +96,7 @@ pub fn preorder_expr(start: &ast::Expr, cb: &mut dyn FnMut(WalkEvent<ast::Expr>)
if ast::GenericArg::can_cast(node.kind()) {
preorder.skip_subtree();
} else if let Some(expr) = ast::Expr::cast(node) {
let is_different_context = match &expr {
ast::Expr::BlockExpr(block_expr) => {
matches!(
block_expr.modifier(),
Some(
ast::BlockModifier::Async(_)
| ast::BlockModifier::Try(_)
| ast::BlockModifier::Const(_)
)
)
}
ast::Expr::ClosureExpr(_) => true,
_ => false,
} && expr.syntax() != start.syntax();
let is_different_context = check_ctx(&expr) && expr.syntax() != start.syntax();
let skip = cb(WalkEvent::Enter(expr));
if skip || is_different_context {
preorder.skip_subtree();
@ -394,7 +406,7 @@ fn for_each_break_expr(
}
}
fn eq_label_lt(lt1: &Option<ast::Lifetime>, lt2: &Option<ast::Lifetime>) -> bool {
pub fn eq_label_lt(lt1: &Option<ast::Lifetime>, lt2: &Option<ast::Lifetime>) -> bool {
lt1.as_ref().zip(lt2.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text())
}

View file

@ -1,18 +1,29 @@
use std::{iter, mem::discriminant};
use crate::{
doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget,
RangeInfo, TryToNav,
doc_links::token_as_doc_comment,
navigation_target::{self, ToNav},
FilePosition, NavigationTarget, RangeInfo, TryToNav, UpmappingResult,
};
use hir::{
AsAssocItem, AssocItem, DescendPreference, FileRange, InFile, MacroFileIdExt, ModuleDef,
Semantics,
};
use hir::{AsAssocItem, AssocItem, DescendPreference, MacroFileIdExt, ModuleDef, Semantics};
use ide_db::{
base_db::{AnchoredPath, FileLoader},
defs::{Definition, IdentClass},
helpers::pick_best_token,
FileId, RootDatabase,
RootDatabase, SymbolKind,
};
use itertools::Itertools;
use syntax::{ast, AstNode, AstToken, SyntaxKind::*, SyntaxToken, TextRange, T};
use span::FileId;
use syntax::{
ast::{self, HasLoopBody},
match_ast, AstNode, AstToken,
SyntaxKind::*,
SyntaxNode, SyntaxToken, TextRange, T,
};
// Feature: Go to Definition
//
@ -68,6 +79,10 @@ pub(crate) fn goto_definition(
));
}
if let Some(navs) = handle_control_flow_keywords(sema, &original_token) {
return Some(RangeInfo::new(original_token.text_range(), navs));
}
let navs = sema
.descend_into_macros(DescendPreference::None, original_token.clone())
.into_iter()
@ -190,10 +205,221 @@ fn try_filter_trait_item_definition(
}
}
fn handle_control_flow_keywords(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
) -> Option<Vec<NavigationTarget>> {
match token.kind() {
// For `fn` / `loop` / `while` / `for` / `async`, return the keyword it self,
// so that VSCode will find the references when using `ctrl + click`
T![fn] | T![async] | T![try] | T![return] => nav_for_exit_points(sema, token),
T![loop] | T![while] | T![break] | T![continue] => nav_for_break_points(sema, token),
T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => {
nav_for_break_points(sema, token)
}
_ => None,
}
}
pub(crate) fn find_fn_or_blocks(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
) -> Vec<SyntaxNode> {
let find_ancestors = |token: SyntaxToken| {
let token_kind = token.kind();
for anc in sema.token_ancestors_with_macros(token) {
let node = match_ast! {
match anc {
ast::Fn(fn_) => fn_.syntax().clone(),
ast::ClosureExpr(c) => c.syntax().clone(),
ast::BlockExpr(blk) => {
match blk.modifier() {
Some(ast::BlockModifier::Async(_)) => blk.syntax().clone(),
Some(ast::BlockModifier::Try(_)) if token_kind != T![return] => blk.syntax().clone(),
_ => continue,
}
},
_ => continue,
}
};
return Some(node);
}
None
};
sema.descend_into_macros(DescendPreference::None, token.clone())
.into_iter()
.filter_map(find_ancestors)
.collect_vec()
}
fn nav_for_exit_points(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
) -> Option<Vec<NavigationTarget>> {
let db = sema.db;
let token_kind = token.kind();
let navs = find_fn_or_blocks(sema, token)
.into_iter()
.filter_map(|node| {
let file_id = sema.hir_file_for(&node);
match_ast! {
match node {
ast::Fn(fn_) => {
let mut nav = sema.to_def(&fn_)?.try_to_nav(db)?;
// For async token, we navigate to itself, which triggers
// VSCode to find the references
let focus_token = if matches!(token_kind, T![async]) {
fn_.async_token()?
} else {
fn_.fn_token()?
};
let focus_frange = InFile::new(file_id, focus_token.text_range())
.original_node_file_range_opt(db)
.map(|(frange, _)| frange);
if let Some(FileRange { file_id, range }) = focus_frange {
let contains_frange = |nav: &NavigationTarget| {
nav.file_id == file_id && nav.full_range.contains_range(range)
};
if let Some(def_site) = nav.def_site.as_mut() {
if contains_frange(def_site) {
def_site.focus_range = Some(range);
}
} else if contains_frange(&nav.call_site) {
nav.call_site.focus_range = Some(range);
}
}
Some(nav)
},
ast::ClosureExpr(c) => {
let pipe_tok = c.param_list().and_then(|it| it.pipe_token())?.text_range();
let closure_in_file = InFile::new(file_id, c.into());
Some(expr_to_nav(db, closure_in_file, Some(pipe_tok)))
},
ast::BlockExpr(blk) => {
match blk.modifier() {
Some(ast::BlockModifier::Async(_)) => {
let async_tok = blk.async_token()?.text_range();
let blk_in_file = InFile::new(file_id, blk.into());
Some(expr_to_nav(db, blk_in_file, Some(async_tok)))
},
Some(ast::BlockModifier::Try(_)) if token_kind != T![return] => {
let try_tok = blk.try_token()?.text_range();
let blk_in_file = InFile::new(file_id, blk.into());
Some(expr_to_nav(db, blk_in_file, Some(try_tok)))
},
_ => None,
}
},
_ => None,
}
}
})
.flatten()
.collect_vec();
Some(navs)
}
pub(crate) fn find_loops(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
) -> Option<Vec<ast::Expr>> {
let parent = token.parent()?;
let lbl = match_ast! {
match parent {
ast::BreakExpr(break_) => break_.lifetime(),
ast::ContinueExpr(continue_) => continue_.lifetime(),
_ => None,
}
};
let label_matches =
|it: Option<ast::Label>| match (lbl.as_ref(), it.and_then(|it| it.lifetime())) {
(Some(lbl), Some(it)) => lbl.text() == it.text(),
(None, _) => true,
(Some(_), None) => false,
};
let find_ancestors = |token: SyntaxToken| {
for anc in sema.token_ancestors_with_macros(token).filter_map(ast::Expr::cast) {
let node = match &anc {
ast::Expr::LoopExpr(loop_) if label_matches(loop_.label()) => anc,
ast::Expr::WhileExpr(while_) if label_matches(while_.label()) => anc,
ast::Expr::ForExpr(for_) if label_matches(for_.label()) => anc,
ast::Expr::BlockExpr(blk)
if blk.label().is_some() && label_matches(blk.label()) =>
{
anc
}
_ => continue,
};
return Some(node);
}
None
};
sema.descend_into_macros(DescendPreference::None, token.clone())
.into_iter()
.filter_map(find_ancestors)
.collect_vec()
.into()
}
fn nav_for_break_points(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
) -> Option<Vec<NavigationTarget>> {
let db = sema.db;
let navs = find_loops(sema, token)?
.into_iter()
.filter_map(|expr| {
let file_id = sema.hir_file_for(expr.syntax());
let expr_in_file = InFile::new(file_id, expr.clone());
let focus_range = match expr {
ast::Expr::LoopExpr(loop_) => loop_.loop_token()?.text_range(),
ast::Expr::WhileExpr(while_) => while_.while_token()?.text_range(),
ast::Expr::ForExpr(for_) => for_.for_token()?.text_range(),
// We guarantee that the label exists
ast::Expr::BlockExpr(blk) => blk.label().unwrap().syntax().text_range(),
_ => return None,
};
let nav = expr_to_nav(db, expr_in_file, Some(focus_range));
Some(nav)
})
.flatten()
.collect_vec();
Some(navs)
}
fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec<NavigationTarget> {
def.try_to_nav(db).map(|it| it.collect()).unwrap_or_default()
}
fn expr_to_nav(
db: &RootDatabase,
InFile { file_id, value }: InFile<ast::Expr>,
focus_range: Option<TextRange>,
) -> UpmappingResult<NavigationTarget> {
let kind = SymbolKind::Label;
let value_range = value.syntax().text_range();
let navs = navigation_target::orig_range_with_focus_r(db, file_id, value_range, focus_range);
navs.map(|(hir::FileRangeWrapper { file_id, range }, focus_range)| {
NavigationTarget::from_syntax(file_id, "<expr>".into(), focus_range, range, kind)
})
}
#[cfg(test)]
mod tests {
use ide_db::FileRange;
@ -2313,4 +2539,200 @@ pub mod prelude {
"#,
);
}
#[test]
fn goto_def_on_return_kw() {
check(
r#"
macro_rules! N {
($i:ident, $x:expr, $blk:expr) => {
for $i in 0..$x {
$blk
}
};
}
fn main() {
fn f() {
// ^^
N!(i, 5, {
println!("{}", i);
return$0;
});
for i in 1..5 {
return;
}
(|| {
return;
})();
}
}
"#,
)
}
#[test]
fn goto_def_on_return_kw_in_closure() {
check(
r#"
macro_rules! N {
($i:ident, $x:expr, $blk:expr) => {
for $i in 0..$x {
$blk
}
};
}
fn main() {
fn f() {
N!(i, 5, {
println!("{}", i);
return;
});
for i in 1..5 {
return;
}
(|| {
// ^
return$0;
})();
}
}
"#,
)
}
#[test]
fn goto_def_on_break_kw() {
check(
r#"
fn main() {
for i in 1..5 {
// ^^^
break$0;
}
}
"#,
)
}
#[test]
fn goto_def_on_continue_kw() {
check(
r#"
fn main() {
for i in 1..5 {
// ^^^
continue$0;
}
}
"#,
)
}
#[test]
fn goto_def_on_break_kw_for_block() {
check(
r#"
fn main() {
'a:{
// ^^^
break$0 'a;
}
}
"#,
)
}
#[test]
fn goto_def_on_break_with_label() {
check(
r#"
fn foo() {
'outer: loop {
// ^^^^
'inner: loop {
'innermost: loop {
}
break$0 'outer;
}
}
}
"#,
);
}
#[test]
fn goto_def_on_return_in_try() {
check(
r#"
fn main() {
fn f() {
// ^^
try {
return$0;
}
return;
}
}
"#,
)
}
#[test]
fn goto_def_on_break_in_try() {
check(
r#"
fn main() {
for i in 1..100 {
// ^^^
let x: Result<(), ()> = try {
break$0;
};
}
}
"#,
)
}
#[test]
fn goto_def_on_return_in_async_block() {
check(
r#"
fn main() {
async {
// ^^^^^
return$0;
}
}
"#,
)
}
#[test]
fn goto_def_on_for_kw() {
check(
r#"
fn main() {
for$0 i in 1..5 {}
// ^^^
}
"#,
)
}
#[test]
fn goto_def_on_fn_kw() {
check(
r#"
fn main() {
fn$0 foo() {}
// ^^
}
"#,
)
}
}

View file

@ -1,24 +1,25 @@
use std::iter;
use hir::{DescendPreference, FilePosition, FileRange, Semantics};
use hir::{db, DescendPreference, FilePosition, FileRange, HirFileId, InFile, Semantics};
use ide_db::{
defs::{Definition, IdentClass},
helpers::pick_best_token,
search::{FileReference, ReferenceCategory, SearchScope},
syntax_helpers::node_ext::{
for_each_break_and_continue_expr, for_each_tail_expr, full_path_of_name_ref, walk_expr,
eq_label_lt, for_each_tail_expr, full_path_of_name_ref, is_closure_or_blk_with_modif,
preorder_expr_with_ctx_checker,
},
FxHashSet, RootDatabase,
FxHashMap, FxHashSet, RootDatabase,
};
use span::EditionedFileId;
use syntax::{
ast::{self, HasLoopBody},
match_ast, AstNode,
SyntaxKind::{self, IDENT, INT_NUMBER},
SyntaxToken, TextRange, T,
SyntaxToken, TextRange, WalkEvent, T,
};
use crate::{navigation_target::ToNav, NavigationTarget, TryToNav};
use crate::{goto_definition, navigation_target::ToNav, NavigationTarget, TryToNav};
#[derive(PartialEq, Eq, Hash)]
pub struct HighlightedRange {
@ -72,15 +73,19 @@ pub(crate) fn highlight_related(
// most if not all of these should be re-implemented with information seeded from hir
match token.kind() {
T![?] if config.exit_points && token.parent().and_then(ast::TryExpr::cast).is_some() => {
highlight_exit_points(sema, token)
highlight_exit_points(sema, token).remove(&file_id)
}
T![fn] | T![return] | T![->] if config.exit_points => {
highlight_exit_points(sema, token).remove(&file_id)
}
T![await] | T![async] if config.yield_points => {
highlight_yield_points(sema, token).remove(&file_id)
}
T![fn] | T![return] | T![->] if config.exit_points => highlight_exit_points(sema, token),
T![await] | T![async] if config.yield_points => highlight_yield_points(token),
T![for] if config.break_points && token.parent().and_then(ast::ForExpr::cast).is_some() => {
highlight_break_points(token)
highlight_break_points(sema, token).remove(&file_id)
}
T![break] | T![loop] | T![while] | T![continue] if config.break_points => {
highlight_break_points(token)
highlight_break_points(sema, token).remove(&file_id)
}
T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
@ -276,50 +281,66 @@ fn highlight_references(
}
}
fn highlight_exit_points(
// If `file_id` is None,
pub(crate) fn highlight_exit_points(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
) -> Option<Vec<HighlightedRange>> {
) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> {
fn hl(
sema: &Semantics<'_, RootDatabase>,
def_ranges: [Option<TextRange>; 2],
body: Option<ast::Expr>,
) -> Option<Vec<HighlightedRange>> {
let mut highlights = Vec::new();
highlights.extend(
def_ranges
.into_iter()
.flatten()
.map(|range| HighlightedRange { category: ReferenceCategory::empty(), range }),
);
let body = body?;
walk_expr(&body, &mut |expr| match expr {
ast::Expr::ReturnExpr(expr) => {
if let Some(token) = expr.return_token() {
highlights.push(HighlightedRange {
category: ReferenceCategory::empty(),
range: token.text_range(),
});
}
def_token: Option<SyntaxToken>,
body: ast::Expr,
) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| {
if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
highlights.entry(file_id).or_default().insert(hrange);
}
ast::Expr::TryExpr(try_) => {
if let Some(token) = try_.question_mark_token() {
highlights.push(HighlightedRange {
category: ReferenceCategory::empty(),
range: token.text_range(),
});
};
if let Some(tok) = def_token {
let file_id = sema.hir_file_for(&tok.parent()?);
let range = Some(tok.text_range());
push_to_highlights(file_id, range);
}
WalkExpandedExprCtx::new(sema).walk(&body, &mut |_, expr| {
let file_id = sema.hir_file_for(expr.syntax());
let range = match &expr {
ast::Expr::TryExpr(try_) => {
try_.question_mark_token().map(|token| token.text_range())
}
}
ast::Expr::MethodCallExpr(_) | ast::Expr::CallExpr(_) | ast::Expr::MacroExpr(_) => {
if sema.type_of_expr(&expr).map_or(false, |ty| ty.original.is_never()) {
highlights.push(HighlightedRange {
category: ReferenceCategory::empty(),
range: expr.syntax().text_range(),
});
ast::Expr::MethodCallExpr(_) | ast::Expr::CallExpr(_) | ast::Expr::MacroExpr(_)
if sema.type_of_expr(&expr).map_or(false, |ty| ty.original.is_never()) =>
{
Some(expr.syntax().text_range())
}
}
_ => (),
_ => None,
};
push_to_highlights(file_id, range);
});
// We should handle `return` separately, because when it is used in a `try` block,
// it will exit the outside function instead of the block itself.
WalkExpandedExprCtx::new(sema)
.with_check_ctx(&WalkExpandedExprCtx::is_async_const_block_or_closure)
.walk(&body, &mut |_, expr| {
let file_id = sema.hir_file_for(expr.syntax());
let range = match &expr {
ast::Expr::ReturnExpr(expr) => {
expr.return_token().map(|token| token.text_range())
}
_ => None,
};
push_to_highlights(file_id, range);
});
let tail = match body {
ast::Expr::BlockExpr(b) => b.tail_expr(),
e => Some(e),
@ -327,171 +348,188 @@ fn highlight_exit_points(
if let Some(tail) = tail {
for_each_tail_expr(&tail, &mut |tail| {
let file_id = sema.hir_file_for(tail.syntax());
let range = match tail {
ast::Expr::BreakExpr(b) => b
.break_token()
.map_or_else(|| tail.syntax().text_range(), |tok| tok.text_range()),
_ => tail.syntax().text_range(),
};
highlights.push(HighlightedRange { category: ReferenceCategory::empty(), range })
push_to_highlights(file_id, Some(range));
});
}
Some(highlights)
}
for anc in token.parent_ancestors() {
return match_ast! {
match anc {
ast::Fn(fn_) => hl(sema, [fn_.fn_token().map(|it| it.text_range()), None], fn_.body().map(ast::Expr::BlockExpr)),
ast::ClosureExpr(closure) => hl(
sema,
closure.param_list().map_or([None; 2], |p| [p.l_paren_token().map(|it| it.text_range()), p.r_paren_token().map(|it| it.text_range())]),
closure.body()
),
ast::BlockExpr(block_expr) => if matches!(block_expr.modifier(), Some(ast::BlockModifier::Async(_) | ast::BlockModifier::Try(_)| ast::BlockModifier::Const(_))) {
hl(
sema,
[block_expr.modifier().and_then(|modifier| match modifier {
ast::BlockModifier::Async(t) | ast::BlockModifier::Try(t) | ast::BlockModifier::Const(t) => Some(t.text_range()),
_ => None,
}), None],
Some(block_expr.into())
)
} else {
continue;
let mut res = FxHashMap::default();
for def in goto_definition::find_fn_or_blocks(sema, &token) {
let new_map = match_ast! {
match def {
ast::Fn(fn_) => fn_.body().and_then(|body| hl(sema, fn_.fn_token(), body.into())),
ast::ClosureExpr(closure) => {
let pipe_tok = closure.param_list().and_then(|p| p.pipe_token());
closure.body().and_then(|body| hl(sema, pipe_tok, body))
},
ast::BlockExpr(blk) => match blk.modifier() {
Some(ast::BlockModifier::Async(t)) => hl(sema, Some(t), blk.into()),
Some(ast::BlockModifier::Try(t)) if token.kind() != T![return] => {
hl(sema, Some(t), blk.into())
},
_ => continue,
},
_ => continue,
}
};
merge_map(&mut res, new_map);
}
None
res.into_iter().map(|(file_id, ranges)| (file_id, ranges.into_iter().collect())).collect()
}
fn highlight_break_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> {
fn hl(
pub(crate) fn highlight_break_points(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> {
pub(crate) fn hl(
sema: &Semantics<'_, RootDatabase>,
cursor_token_kind: SyntaxKind,
token: Option<SyntaxToken>,
loop_token: Option<SyntaxToken>,
label: Option<ast::Label>,
body: Option<ast::StmtList>,
) -> Option<Vec<HighlightedRange>> {
let mut highlights = Vec::new();
let range = cover_range(
token.map(|tok| tok.text_range()),
expr: ast::Expr,
) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| {
if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
highlights.entry(file_id).or_default().insert(hrange);
}
};
let label_lt = label.as_ref().and_then(|it| it.lifetime());
if let Some(range) = cover_range(
loop_token.as_ref().map(|tok| tok.text_range()),
label.as_ref().map(|it| it.syntax().text_range()),
);
highlights.extend(
range.map(|range| HighlightedRange { category: ReferenceCategory::empty(), range }),
);
for_each_break_and_continue_expr(label, body, &mut |expr| {
let range: Option<TextRange> = match (cursor_token_kind, expr) {
(T![for] | T![while] | T![loop] | T![break], ast::Expr::BreakExpr(break_)) => {
cover_range(
break_.break_token().map(|it| it.text_range()),
break_.lifetime().map(|it| it.syntax().text_range()),
)
) {
let file_id = loop_token
.and_then(|tok| Some(sema.hir_file_for(&tok.parent()?)))
.unwrap_or_else(|| sema.hir_file_for(label.unwrap().syntax()));
push_to_highlights(file_id, Some(range));
}
WalkExpandedExprCtx::new(sema)
.with_check_ctx(&WalkExpandedExprCtx::is_async_const_block_or_closure)
.walk(&expr, &mut |depth, expr| {
let file_id = sema.hir_file_for(expr.syntax());
// Only highlight the `break`s for `break` and `continue`s for `continue`
let (token, token_lt) = match expr {
ast::Expr::BreakExpr(b) if cursor_token_kind != T![continue] => {
(b.break_token(), b.lifetime())
}
ast::Expr::ContinueExpr(c) if cursor_token_kind != T![break] => {
(c.continue_token(), c.lifetime())
}
_ => return,
};
if !(depth == 1 && token_lt.is_none() || eq_label_lt(&label_lt, &token_lt)) {
return;
}
(
T![for] | T![while] | T![loop] | T![continue],
ast::Expr::ContinueExpr(continue_),
) => cover_range(
continue_.continue_token().map(|it| it.text_range()),
continue_.lifetime().map(|it| it.syntax().text_range()),
),
_ => None,
};
highlights.extend(
range.map(|range| HighlightedRange { category: ReferenceCategory::empty(), range }),
);
});
let text_range = cover_range(
token.map(|it| it.text_range()),
token_lt.map(|it| it.syntax().text_range()),
);
push_to_highlights(file_id, text_range);
});
Some(highlights)
}
let parent = token.parent()?;
let lbl = match_ast! {
match parent {
ast::BreakExpr(b) => b.lifetime(),
ast::ContinueExpr(c) => c.lifetime(),
ast::LoopExpr(l) => l.label().and_then(|it| it.lifetime()),
ast::ForExpr(f) => f.label().and_then(|it| it.lifetime()),
ast::WhileExpr(w) => w.label().and_then(|it| it.lifetime()),
ast::BlockExpr(b) => Some(b.label().and_then(|it| it.lifetime())?),
_ => return None,
}
};
let lbl = lbl.as_ref();
let label_matches = |def_lbl: Option<ast::Label>| match lbl {
Some(lbl) => {
Some(lbl.text()) == def_lbl.and_then(|it| it.lifetime()).as_ref().map(|it| it.text())
}
None => true,
let Some(loops) = goto_definition::find_loops(sema, &token) else {
return FxHashMap::default();
};
let mut res = FxHashMap::default();
let token_kind = token.kind();
for anc in token.parent_ancestors().flat_map(ast::Expr::cast) {
return match anc {
ast::Expr::LoopExpr(l) if label_matches(l.label()) => hl(
token_kind,
l.loop_token(),
l.label(),
l.loop_body().and_then(|it| it.stmt_list()),
),
ast::Expr::ForExpr(f) if label_matches(f.label()) => hl(
token_kind,
f.for_token(),
f.label(),
f.loop_body().and_then(|it| it.stmt_list()),
),
ast::Expr::WhileExpr(w) if label_matches(w.label()) => hl(
token_kind,
w.while_token(),
w.label(),
w.loop_body().and_then(|it| it.stmt_list()),
),
ast::Expr::BlockExpr(e) if e.label().is_some() && label_matches(e.label()) => {
hl(token_kind, None, e.label(), e.stmt_list())
}
for expr in loops {
let new_map = match &expr {
ast::Expr::LoopExpr(l) => hl(sema, token_kind, l.loop_token(), l.label(), expr),
ast::Expr::ForExpr(f) => hl(sema, token_kind, f.for_token(), f.label(), expr),
ast::Expr::WhileExpr(w) => hl(sema, token_kind, w.while_token(), w.label(), expr),
ast::Expr::BlockExpr(e) => hl(sema, token_kind, None, e.label(), expr),
_ => continue,
};
merge_map(&mut res, new_map);
}
None
res.into_iter().map(|(file_id, ranges)| (file_id, ranges.into_iter().collect())).collect()
}
fn highlight_yield_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> {
pub(crate) fn highlight_yield_points(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> {
fn hl(
sema: &Semantics<'_, RootDatabase>,
async_token: Option<SyntaxToken>,
body: Option<ast::Expr>,
) -> Option<Vec<HighlightedRange>> {
let mut highlights = vec![HighlightedRange {
category: ReferenceCategory::empty(),
range: async_token?.text_range(),
}];
if let Some(body) = body {
walk_expr(&body, &mut |expr| {
if let ast::Expr::AwaitExpr(expr) = expr {
if let Some(token) = expr.await_token() {
highlights.push(HighlightedRange {
category: ReferenceCategory::empty(),
range: token.text_range(),
});
}
}
});
}
) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| {
if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
highlights.entry(file_id).or_default().insert(hrange);
}
};
let async_token = async_token?;
let async_tok_file_id = sema.hir_file_for(&async_token.parent()?);
push_to_highlights(async_tok_file_id, Some(async_token.text_range()));
let Some(body) = body else {
return Some(highlights);
};
WalkExpandedExprCtx::new(sema).walk(&body, &mut |_, expr| {
let file_id = sema.hir_file_for(expr.syntax());
let text_range = match expr {
ast::Expr::AwaitExpr(expr) => expr.await_token(),
ast::Expr::ReturnExpr(expr) => expr.return_token(),
_ => None,
}
.map(|it| it.text_range());
push_to_highlights(file_id, text_range);
});
Some(highlights)
}
for anc in token.parent_ancestors() {
return match_ast! {
let mut res = FxHashMap::default();
for anc in goto_definition::find_fn_or_blocks(sema, &token) {
let new_map = match_ast! {
match anc {
ast::Fn(fn_) => hl(fn_.async_token(), fn_.body().map(ast::Expr::BlockExpr)),
ast::Fn(fn_) => hl(sema, fn_.async_token(), fn_.body().map(ast::Expr::BlockExpr)),
ast::BlockExpr(block_expr) => {
if block_expr.async_token().is_none() {
continue;
}
hl(block_expr.async_token(), Some(block_expr.into()))
hl(sema, block_expr.async_token(), Some(block_expr.into()))
},
ast::ClosureExpr(closure) => hl(closure.async_token(), closure.body()),
ast::ClosureExpr(closure) => hl(sema, closure.async_token(), closure.body()),
_ => continue,
}
};
merge_map(&mut res, new_map);
}
None
res.into_iter().map(|(file_id, ranges)| (file_id, ranges.into_iter().collect())).collect()
}
fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange> {
@ -511,6 +549,115 @@ fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSe
.collect()
}
fn original_frange(
db: &dyn db::ExpandDatabase,
file_id: HirFileId,
text_range: Option<TextRange>,
) -> Option<FileRange> {
InFile::new(file_id, text_range?).original_node_file_range_opt(db).map(|(frange, _)| frange)
}
fn merge_map(
res: &mut FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>,
new: Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>>,
) {
let Some(new) = new else {
return;
};
new.into_iter().for_each(|(file_id, ranges)| {
res.entry(file_id).or_default().extend(ranges);
});
}
/// Preorder walk all the expression's child expressions.
/// For macro calls, the callback will be called on the expanded expressions after
/// visiting the macro call itself.
struct WalkExpandedExprCtx<'a> {
sema: &'a Semantics<'a, RootDatabase>,
depth: usize,
check_ctx: &'static dyn Fn(&ast::Expr) -> bool,
}
impl<'a> WalkExpandedExprCtx<'a> {
fn new(sema: &'a Semantics<'a, RootDatabase>) -> Self {
Self { sema, depth: 0, check_ctx: &is_closure_or_blk_with_modif }
}
fn with_check_ctx(&self, check_ctx: &'static dyn Fn(&ast::Expr) -> bool) -> Self {
Self { check_ctx, ..*self }
}
fn walk(&mut self, expr: &ast::Expr, cb: &mut dyn FnMut(usize, ast::Expr)) {
preorder_expr_with_ctx_checker(expr, self.check_ctx, &mut |ev: WalkEvent<ast::Expr>| {
match ev {
syntax::WalkEvent::Enter(expr) => {
cb(self.depth, expr.clone());
if Self::should_change_depth(&expr) {
self.depth += 1;
}
if let ast::Expr::MacroExpr(expr) = expr {
if let Some(expanded) =
expr.macro_call().and_then(|call| self.sema.expand(&call))
{
match_ast! {
match expanded {
ast::MacroStmts(it) => {
self.handle_expanded(it, cb);
},
ast::Expr(it) => {
self.walk(&it, cb);
},
_ => {}
}
}
}
}
}
syntax::WalkEvent::Leave(expr) if Self::should_change_depth(&expr) => {
self.depth -= 1;
}
_ => {}
}
false
})
}
fn handle_expanded(&mut self, expanded: ast::MacroStmts, cb: &mut dyn FnMut(usize, ast::Expr)) {
if let Some(expr) = expanded.expr() {
self.walk(&expr, cb);
}
for stmt in expanded.statements() {
if let ast::Stmt::ExprStmt(stmt) = stmt {
if let Some(expr) = stmt.expr() {
self.walk(&expr, cb);
}
}
}
}
fn should_change_depth(expr: &ast::Expr) -> bool {
match expr {
ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_) => true,
ast::Expr::BlockExpr(blk) if blk.label().is_some() => true,
_ => false,
}
}
fn is_async_const_block_or_closure(expr: &ast::Expr) -> bool {
match expr {
ast::Expr::BlockExpr(b) => matches!(
b.modifier(),
Some(ast::BlockModifier::Async(_) | ast::BlockModifier::Const(_))
),
ast::Expr::ClosureExpr(_) => true,
_ => false,
}
}
}
#[cfg(test)]
mod tests {
use itertools::Itertools;
@ -897,6 +1044,7 @@ impl Never {
}
macro_rules! never {
() => { never() }
// ^^^^^^^
}
fn never() -> ! { loop {} }
fn foo() ->$0 u32 {
@ -1723,4 +1871,140 @@ fn test() {
"#,
);
}
#[test]
fn return_in_macros() {
check(
r#"
macro_rules! N {
($i:ident, $x:expr, $blk:expr) => {
for $i in 0..$x {
$blk
}
};
}
fn main() {
fn f() {
// ^^
N!(i, 5, {
println!("{}", i);
return$0;
// ^^^^^^
});
for i in 1..5 {
return;
// ^^^^^^
}
(|| {
return;
})();
}
}
"#,
)
}
#[test]
fn return_in_closure() {
check(
r#"
macro_rules! N {
($i:ident, $x:expr, $blk:expr) => {
for $i in 0..$x {
$blk
}
};
}
fn main() {
fn f() {
N!(i, 5, {
println!("{}", i);
return;
});
for i in 1..5 {
return;
}
(|| {
// ^
return$0;
// ^^^^^^
})();
}
}
"#,
)
}
#[test]
fn return_in_try() {
check(
r#"
fn main() {
fn f() {
// ^^
try {
return$0;
// ^^^^^^
}
return;
// ^^^^^^
}
}
"#,
)
}
#[test]
fn break_in_try() {
check(
r#"
fn main() {
for i in 1..100 {
// ^^^
let x: Result<(), ()> = try {
break$0;
// ^^^^^
};
}
}
"#,
)
}
#[test]
fn no_highlight_on_return_in_macro_call() {
check(
r#"
//- minicore:include
//- /lib.rs
macro_rules! M {
($blk:expr) => {
$blk
};
}
fn main() {
fn f() {
// ^^
M!({ return$0; });
// ^^^^^^
// ^^^^^^^^^^^^^^^
include!("a.rs")
// ^^^^^^^^^^^^^^^^
}
}
//- /a.rs
{
return;
}
"#,
)
}
}

View file

@ -152,7 +152,7 @@ impl NavigationTarget {
)
}
fn from_syntax(
pub(crate) fn from_syntax(
file_id: FileId,
name: SmolStr,
focus_range: Option<TextRange>,
@ -710,7 +710,7 @@ impl<T> IntoIterator for UpmappingResult<T> {
}
impl<T> UpmappingResult<T> {
fn map<U>(self, f: impl Fn(T) -> U) -> UpmappingResult<U> {
pub(crate) fn map<U>(self, f: impl Fn(T) -> U) -> UpmappingResult<U> {
UpmappingResult { call_site: f(self.call_site), def_site: self.def_site.map(f) }
}
}
@ -732,13 +732,13 @@ fn orig_range_with_focus(
)
}
fn orig_range_with_focus_r(
pub(crate) fn orig_range_with_focus_r(
db: &RootDatabase,
hir_file: HirFileId,
value: TextRange,
name: Option<TextRange>,
focus_range: Option<TextRange>,
) -> UpmappingResult<(FileRange, Option<TextRange>)> {
let Some(name) = name else { return orig_range_r(db, hir_file, value) };
let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) };
let call_kind =
|| db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id).kind;

View file

@ -24,7 +24,7 @@ use syntax::{
SyntaxNode, TextRange, TextSize, T,
};
use crate::{FilePosition, NavigationTarget, TryToNav};
use crate::{highlight_related, FilePosition, HighlightedRange, NavigationTarget, TryToNav};
#[derive(Debug, Clone)]
pub struct ReferenceSearchResult {
@ -103,6 +103,11 @@ pub(crate) fn find_all_refs(
}
};
// Find references for control-flow keywords.
if let Some(res) = handle_control_flow_keywords(sema, position) {
return Some(vec![res]);
}
match name_for_constructor_search(&syntax, position) {
Some(name) => {
let def = match NameClass::classify(sema, &name)? {
@ -296,6 +301,37 @@ fn is_lit_name_ref(name_ref: &ast::NameRef) -> bool {
}).unwrap_or(false)
}
fn handle_control_flow_keywords(
sema: &Semantics<'_, RootDatabase>,
FilePosition { file_id, offset }: FilePosition,
) -> Option<ReferenceSearchResult> {
let file = sema.parse_guess_edition(file_id);
let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword())?;
let references = match token.kind() {
T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token),
T![async] => highlight_related::highlight_yield_points(sema, token),
T![loop] | T![while] | T![break] | T![continue] => {
highlight_related::highlight_break_points(sema, token)
}
T![for] if token.parent().and_then(ast::ForExpr::cast).is_some() => {
highlight_related::highlight_break_points(sema, token)
}
_ => return None,
}
.into_iter()
.map(|(file_id, ranges)| {
let ranges = ranges
.into_iter()
.map(|HighlightedRange { range, category }| (range, category))
.collect();
(file_id.into(), ranges)
})
.collect();
Some(ReferenceSearchResult { declaration: None, references })
}
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
@ -1200,7 +1236,7 @@ impl Foo {
let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap();
let mut actual = String::new();
for refs in refs {
for mut refs in refs {
actual += "\n\n";
if let Some(decl) = refs.declaration {
@ -1211,7 +1247,8 @@ impl Foo {
actual += "\n\n";
}
for (file_id, references) in &refs.references {
for (file_id, references) in &mut refs.references {
references.sort_by_key(|(range, _)| range.start());
for (range, category) in references {
format_to!(actual, "{:?} {:?}", file_id, range);
for (name, _flag) in category.iter_names() {
@ -2187,4 +2224,264 @@ fn test() {
"#]],
);
}
#[test]
fn goto_ref_fn_kw() {
check(
r#"
macro_rules! N {
($i:ident, $x:expr, $blk:expr) => {
for $i in 0..$x {
$blk
}
};
}
fn main() {
$0fn f() {
N!(i, 5, {
println!("{}", i);
return;
});
for i in 1..5 {
return;
}
(|| {
return;
})();
}
}
"#,
expect![[r#"
FileId(0) 136..138
FileId(0) 207..213
FileId(0) 264..270
"#]],
)
}
#[test]
fn goto_ref_exit_points() {
check(
r#"
fn$0 foo() -> u32 {
if true {
return 0;
}
0?;
0xDEAD_BEEF
}
"#,
expect![[r#"
FileId(0) 0..2
FileId(0) 40..46
FileId(0) 62..63
FileId(0) 69..80
"#]],
);
}
#[test]
fn test_ref_yield_points() {
check(
r#"
pub async$0 fn foo() {
let x = foo()
.await
.await;
|| { 0.await };
(async { 0.await }).await
}
"#,
expect![[r#"
FileId(0) 4..9
FileId(0) 48..53
FileId(0) 63..68
FileId(0) 114..119
"#]],
);
}
#[test]
fn goto_ref_for_kw() {
check(
r#"
fn main() {
$0for i in 1..5 {
break;
continue;
}
}
"#,
expect![[r#"
FileId(0) 16..19
FileId(0) 40..45
FileId(0) 55..63
"#]],
)
}
#[test]
fn goto_ref_on_break_kw() {
check(
r#"
fn main() {
for i in 1..5 {
$0break;
continue;
}
}
"#,
expect![[r#"
FileId(0) 16..19
FileId(0) 40..45
"#]],
)
}
#[test]
fn goto_ref_on_break_kw_for_block() {
check(
r#"
fn main() {
'a:{
$0break 'a;
}
}
"#,
expect![[r#"
FileId(0) 16..19
FileId(0) 29..37
"#]],
)
}
#[test]
fn goto_ref_on_break_with_label() {
check(
r#"
fn foo() {
'outer: loop {
break;
'inner: loop {
'innermost: loop {
}
$0break 'outer;
break;
}
break;
}
}
"#,
expect![[r#"
FileId(0) 15..27
FileId(0) 39..44
FileId(0) 127..139
FileId(0) 178..183
"#]],
);
}
#[test]
fn goto_ref_on_return_in_try() {
check(
r#"
fn main() {
fn f() {
try {
$0return;
}
return;
}
return;
}
"#,
expect![[r#"
FileId(0) 16..18
FileId(0) 51..57
FileId(0) 78..84
"#]],
)
}
#[test]
fn goto_ref_on_break_in_try() {
check(
r#"
fn main() {
for i in 1..100 {
let x: Result<(), ()> = try {
$0break;
};
}
}
"#,
expect![[r#"
FileId(0) 16..19
FileId(0) 84..89
"#]],
)
}
#[test]
fn goto_ref_on_return_in_async_block() {
check(
r#"
fn main() {
$0async {
return;
}
}
"#,
expect![[r#"
FileId(0) 16..21
FileId(0) 32..38
"#]],
)
}
#[test]
fn goto_ref_on_return_in_macro_call() {
check(
r#"
//- minicore:include
//- /lib.rs
macro_rules! M {
($blk:expr) => {
fn f() {
$blk
}
$blk
};
}
fn main() {
M!({
return$0;
});
f();
include!("a.rs")
}
//- /a.rs
{
return;
}
"#,
expect![[r#"
FileId(0) 46..48
FileId(0) 106..108
FileId(0) 122..149
FileId(0) 135..141
FileId(0) 165..181
FileId(1) 6..12
"#]],
)
}
}