fix: sort references in tests

This commit is contained in:
roife 2024-07-11 03:39:49 +08:00
parent 55cd8ab904
commit 1b59cf2d52
3 changed files with 97 additions and 22 deletions

View file

@ -386,7 +386,7 @@ fn nav_for_break_points(
ast::Expr::LoopExpr(loop_) => loop_.loop_token()?.text_range(), ast::Expr::LoopExpr(loop_) => loop_.loop_token()?.text_range(),
ast::Expr::WhileExpr(while_) => while_.while_token()?.text_range(), ast::Expr::WhileExpr(while_) => while_.while_token()?.text_range(),
ast::Expr::ForExpr(for_) => for_.for_token()?.text_range(), ast::Expr::ForExpr(for_) => for_.for_token()?.text_range(),
// We garentee that the label exists // We guarantee that the label exists
ast::Expr::BlockExpr(blk) => blk.label().unwrap().syntax().text_range(), ast::Expr::BlockExpr(blk) => blk.label().unwrap().syntax().text_range(),
_ => return None, _ => return None,
}; };

View file

@ -286,13 +286,13 @@ pub(crate) fn highlight_exit_points(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
def_token: Option<SyntaxToken>, def_token: Option<SyntaxToken>,
body: ast::Expr, body: ast::Expr,
) -> Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>> { ) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
let mut highlights: FxHashMap<EditionedFileId, Vec<_>> = FxHashMap::default(); let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| { let mut push_to_highlights = |file_id, range| {
if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
highlights.entry(file_id).or_default().push(hrange); highlights.entry(file_id).or_default().insert(hrange);
} }
}; };
@ -379,7 +379,7 @@ pub(crate) fn highlight_exit_points(
merge_map(&mut res, new_map); merge_map(&mut res, new_map);
} }
res res.into_iter().map(|(file_id, ranges)| (file_id, ranges.into_iter().collect())).collect()
} }
pub(crate) fn highlight_break_points( pub(crate) fn highlight_break_points(
@ -392,13 +392,13 @@ pub(crate) fn highlight_break_points(
loop_token: Option<SyntaxToken>, loop_token: Option<SyntaxToken>,
label: Option<ast::Label>, label: Option<ast::Label>,
expr: ast::Expr, expr: ast::Expr,
) -> Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>> { ) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
let mut highlights: FxHashMap<EditionedFileId, Vec<_>> = FxHashMap::default(); let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| { let mut push_to_highlights = |file_id, range| {
if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
highlights.entry(file_id).or_default().push(hrange); highlights.entry(file_id).or_default().insert(hrange);
} }
}; };
@ -445,11 +445,12 @@ pub(crate) fn highlight_break_points(
Some(highlights) Some(highlights)
} }
let Some(loops) = goto_definition::find_loops(sema, &token) else {
return FxHashMap::default();
};
let mut res = FxHashMap::default(); let mut res = FxHashMap::default();
let token_kind = token.kind(); let token_kind = token.kind();
let Some(loops) = goto_definition::find_loops(sema, &token) else {
return res;
};
for expr in loops { for expr in loops {
let new_map = match &expr { let new_map = match &expr {
ast::Expr::LoopExpr(l) => hl(sema, token_kind, l.loop_token(), l.label(), expr), ast::Expr::LoopExpr(l) => hl(sema, token_kind, l.loop_token(), l.label(), expr),
@ -461,7 +462,7 @@ pub(crate) fn highlight_break_points(
merge_map(&mut res, new_map); merge_map(&mut res, new_map);
} }
res res.into_iter().map(|(file_id, ranges)| (file_id, ranges.into_iter().collect())).collect()
} }
pub(crate) fn highlight_yield_points( pub(crate) fn highlight_yield_points(
@ -472,13 +473,13 @@ pub(crate) fn highlight_yield_points(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
async_token: Option<SyntaxToken>, async_token: Option<SyntaxToken>,
body: Option<ast::Expr>, body: Option<ast::Expr>,
) -> Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>> { ) -> Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>> {
let mut highlights: FxHashMap<EditionedFileId, Vec<_>> = FxHashMap::default(); let mut highlights: FxHashMap<EditionedFileId, FxHashSet<_>> = FxHashMap::default();
let mut push_to_highlights = |file_id, range| { let mut push_to_highlights = |file_id, range| {
if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) { if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
let hrange = HighlightedRange { category: ReferenceCategory::empty(), range }; let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
highlights.entry(file_id).or_default().push(hrange); highlights.entry(file_id).or_default().insert(hrange);
} }
}; };
@ -524,7 +525,7 @@ pub(crate) fn highlight_yield_points(
merge_map(&mut res, new_map); merge_map(&mut res, new_map);
} }
res res.into_iter().map(|(file_id, ranges)| (file_id, ranges.into_iter().collect())).collect()
} }
fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange> { fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange> {
@ -553,8 +554,8 @@ fn original_frange(
} }
fn merge_map( fn merge_map(
res: &mut FxHashMap<EditionedFileId, Vec<HighlightedRange>>, res: &mut FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>,
new: Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>>, new: Option<FxHashMap<EditionedFileId, FxHashSet<HighlightedRange>>>,
) { ) {
let Some(new) = new else { let Some(new) = new else {
return; return;
@ -1967,6 +1968,38 @@ fn main() {
}; };
} }
} }
"#,
)
}
#[test]
fn no_highlight_on_return_in_macro_call() {
check(
r#"
//- minicore:include
//- /lib.rs
macro_rules! M {
($blk:expr) => {
$blk
};
}
fn main() {
fn f() {
// ^^
M!({ return$0; });
// ^^^^^^
// ^^^^^^^^^^^^^^^
include!("a.rs")
// ^^^^^^^^^^^^^^^^
}
}
//- /a.rs
{
return;
}
"#, "#,
) )
} }

View file

@ -1236,7 +1236,7 @@ impl Foo {
let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap(); let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap();
let mut actual = String::new(); let mut actual = String::new();
for refs in refs { for mut refs in refs {
actual += "\n\n"; actual += "\n\n";
if let Some(decl) = refs.declaration { if let Some(decl) = refs.declaration {
@ -1247,7 +1247,8 @@ impl Foo {
actual += "\n\n"; actual += "\n\n";
} }
for (file_id, references) in &refs.references { for (file_id, references) in &mut refs.references {
references.sort_by_key(|(range, _)| range.start());
for (range, category) in references { for (range, category) in references {
format_to!(actual, "{:?} {:?}", file_id, range); format_to!(actual, "{:?} {:?}", file_id, range);
for (name, _flag) in category.iter_names() { for (name, _flag) in category.iter_names() {
@ -2276,8 +2277,8 @@ fn$0 foo() -> u32 {
"#, "#,
expect![[r#" expect![[r#"
FileId(0) 0..2 FileId(0) 0..2
FileId(0) 62..63
FileId(0) 40..46 FileId(0) 40..46
FileId(0) 62..63
FileId(0) 69..80 FileId(0) 69..80
"#]], "#]],
); );
@ -2297,8 +2298,8 @@ pub async$0 fn foo() {
"#, "#,
expect![[r#" expect![[r#"
FileId(0) 4..9 FileId(0) 4..9
FileId(0) 63..68
FileId(0) 48..53 FileId(0) 48..53
FileId(0) 63..68
FileId(0) 114..119 FileId(0) 114..119
"#]], "#]],
); );
@ -2442,4 +2443,45 @@ fn main() {
"#]], "#]],
) )
} }
#[test]
fn goto_ref_on_return_in_macro_call() {
check(
r#"
//- minicore:include
//- /lib.rs
macro_rules! M {
($blk:expr) => {
fn f() {
$blk
}
$blk
};
}
fn main() {
M!({
return$0;
});
f();
include!("a.rs")
}
//- /a.rs
{
return;
}
"#,
expect![[r#"
FileId(0) 46..48
FileId(0) 106..108
FileId(0) 122..149
FileId(0) 135..141
FileId(0) 165..181
FileId(1) 6..12
"#]],
)
}
} }