mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-29 06:23:25 +00:00
Map first and last node in original_range
This commit is contained in:
parent
5202b0ecba
commit
47f2b5d0d9
1 changed files with 34 additions and 15 deletions
|
@ -3,7 +3,7 @@ use std::iter::successors;
|
||||||
|
|
||||||
use hir::InFile;
|
use hir::InFile;
|
||||||
use ra_db::FileId;
|
use ra_db::FileId;
|
||||||
use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken};
|
use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange};
|
||||||
|
|
||||||
use crate::{db::RootDatabase, FileRange};
|
use crate::{db::RootDatabase, FileRange};
|
||||||
|
|
||||||
|
@ -17,26 +17,45 @@ pub(crate) fn original_range(db: &RootDatabase, node: InFile<&SyntaxNode>) -> Fi
|
||||||
}
|
}
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
};
|
};
|
||||||
// FIXME: the following completely wrong.
|
// FIXME: We should handle recurside macro expansions
|
||||||
//
|
|
||||||
// *First*, we should try to map first and last tokens of node, and, if that
|
|
||||||
// fails, return the range of the overall macro expansions.
|
|
||||||
//
|
|
||||||
// *Second*, we should handle recurside macro expansions
|
|
||||||
|
|
||||||
let token = node
|
let range = node.value.descendants_with_tokens().find_map(|it| {
|
||||||
.value
|
match it.as_token() {
|
||||||
.descendants_with_tokens()
|
// FIXME: Remove this branch after all `tt::TokenTree`s have a proper `TokenId`,
|
||||||
.filter_map(|it| it.into_token())
|
// and return the range of the overall macro expansions if mapping first and last tokens fails.
|
||||||
.find_map(|it| expansion.map_token_up(node.with_value(&it)));
|
Some(token) => {
|
||||||
|
let token = expansion.map_token_up(node.with_value(&token))?;
|
||||||
match token {
|
Some(token.with_value(token.value.text_range()))
|
||||||
Some(it) => {
|
|
||||||
FileRange { file_id: it.file_id.original_file(db), range: it.value.text_range() }
|
|
||||||
}
|
}
|
||||||
|
None => {
|
||||||
|
// Try to map first and last tokens of node, and, if success, return the union range of mapped tokens
|
||||||
|
let n = it.into_node()?;
|
||||||
|
let first = expansion.map_token_up(node.with_value(&n.first_token()?))?;
|
||||||
|
let last = expansion.map_token_up(node.with_value(&n.last_token()?))?;
|
||||||
|
|
||||||
|
// FIXME: Is is possible ?
|
||||||
|
if first.file_id != last.file_id {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: Add union method in TextRange
|
||||||
|
let range = union_range(first.value.text_range(), last.value.text_range());
|
||||||
|
Some(first.with_value(range))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return match range {
|
||||||
|
Some(it) => FileRange { file_id: it.file_id.original_file(db), range: it.value },
|
||||||
None => {
|
None => {
|
||||||
FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
|
FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fn union_range(a: TextRange, b: TextRange) -> TextRange {
|
||||||
|
let start = a.start().min(b.start());
|
||||||
|
let end = a.end().max(b.end());
|
||||||
|
TextRange::from_to(start, end)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue