mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-12 21:28:51 +00:00
Make original_range
a method on InFile<&SyntaxNode>
This commit is contained in:
parent
b01981e636
commit
4943ef085d
5 changed files with 88 additions and 89 deletions
|
@ -39,7 +39,7 @@ pub use crate::{
|
||||||
Struct, Trait, Type, TypeAlias, TypeParam, Union, VariantDef,
|
Struct, Trait, Type, TypeAlias, TypeParam, Union, VariantDef,
|
||||||
},
|
},
|
||||||
has_source::HasSource,
|
has_source::HasSource,
|
||||||
semantics::{original_range, PathResolution, Semantics, SemanticsScope},
|
semantics::{PathResolution, Semantics, SemanticsScope},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use hir_def::{
|
pub use hir_def::{
|
||||||
|
|
|
@ -13,10 +13,7 @@ use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo};
|
||||||
use hir_ty::associated_type_shorthand_candidates;
|
use hir_ty::associated_type_shorthand_candidates;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use syntax::{
|
use syntax::{algo::find_node_at_offset, ast, AstNode, SyntaxNode, SyntaxToken, TextSize};
|
||||||
algo::{find_node_at_offset, skip_trivia_token},
|
|
||||||
ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
code_model::Access,
|
code_model::Access,
|
||||||
|
@ -25,7 +22,7 @@ use crate::{
|
||||||
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
|
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
|
||||||
source_analyzer::{resolve_hir_path, SourceAnalyzer},
|
source_analyzer::{resolve_hir_path, SourceAnalyzer},
|
||||||
AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef,
|
AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef,
|
||||||
Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, VariantDef,
|
Module, ModuleDef, Name, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, VariantDef,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
@ -372,7 +369,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
|
|
||||||
fn original_range(&self, node: &SyntaxNode) -> FileRange {
|
fn original_range(&self, node: &SyntaxNode) -> FileRange {
|
||||||
let node = self.find_file(node.clone());
|
let node = self.find_file(node.clone());
|
||||||
original_range(self.db, node.as_ref())
|
node.as_ref().original_file_range(self.db.upcast())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
|
fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
|
||||||
|
@ -380,7 +377,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let root = self.db.parse_or_expand(src.file_id).unwrap();
|
let root = self.db.parse_or_expand(src.file_id).unwrap();
|
||||||
let node = src.value.to_node(&root);
|
let node = src.value.to_node(&root);
|
||||||
self.cache(root, src.file_id);
|
self.cache(root, src.file_id);
|
||||||
original_range(self.db, src.with_value(&node))
|
src.with_value(&node).original_file_range(self.db.upcast())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
|
fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
|
||||||
|
@ -771,68 +768,3 @@ impl<'a> SemanticsScope<'a> {
|
||||||
resolve_hir_path(self.db, &self.resolver, &path)
|
resolve_hir_path(self.db, &self.resolver, &path)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: Change `HasSource` trait to work with `Semantics` and remove this?
|
|
||||||
pub fn original_range(db: &dyn HirDatabase, node: InFile<&SyntaxNode>) -> FileRange {
|
|
||||||
if let Some(range) = original_range_opt(db, node) {
|
|
||||||
let original_file = range.file_id.original_file(db.upcast());
|
|
||||||
if range.file_id == original_file.into() {
|
|
||||||
return FileRange { file_id: original_file, range: range.value };
|
|
||||||
}
|
|
||||||
|
|
||||||
log::error!("Fail to mapping up more for {:?}", range);
|
|
||||||
return FileRange { file_id: range.file_id.original_file(db.upcast()), range: range.value };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fall back to whole macro call
|
|
||||||
if let Some(expansion) = node.file_id.expansion_info(db.upcast()) {
|
|
||||||
if let Some(call_node) = expansion.call_node() {
|
|
||||||
return FileRange {
|
|
||||||
file_id: call_node.file_id.original_file(db.upcast()),
|
|
||||||
range: call_node.value.text_range(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
FileRange { file_id: node.file_id.original_file(db.upcast()), range: node.value.text_range() }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn original_range_opt(
|
|
||||||
db: &dyn HirDatabase,
|
|
||||||
node: InFile<&SyntaxNode>,
|
|
||||||
) -> Option<InFile<TextRange>> {
|
|
||||||
let expansion = node.file_id.expansion_info(db.upcast())?;
|
|
||||||
|
|
||||||
// the input node has only one token ?
|
|
||||||
let single = skip_trivia_token(node.value.first_token()?, Direction::Next)?
|
|
||||||
== skip_trivia_token(node.value.last_token()?, Direction::Prev)?;
|
|
||||||
|
|
||||||
Some(node.value.descendants().find_map(|it| {
|
|
||||||
let first = skip_trivia_token(it.first_token()?, Direction::Next)?;
|
|
||||||
let first = ascend_call_token(db, &expansion, node.with_value(first))?;
|
|
||||||
|
|
||||||
let last = skip_trivia_token(it.last_token()?, Direction::Prev)?;
|
|
||||||
let last = ascend_call_token(db, &expansion, node.with_value(last))?;
|
|
||||||
|
|
||||||
if (!single && first == last) || (first.file_id != last.file_id) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(first.with_value(first.value.text_range().cover(last.value.text_range())))
|
|
||||||
})?)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ascend_call_token(
|
|
||||||
db: &dyn HirDatabase,
|
|
||||||
expansion: &ExpansionInfo,
|
|
||||||
token: InFile<SyntaxToken>,
|
|
||||||
) -> Option<InFile<SyntaxToken>> {
|
|
||||||
let (mapped, origin) = expansion.map_token_up(token.as_ref())?;
|
|
||||||
if origin != Origin::Call {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
if let Some(info) = mapped.file_id.expansion_info(db.upcast()) {
|
|
||||||
return ascend_call_token(db, &info, mapped);
|
|
||||||
}
|
|
||||||
Some(mapped)
|
|
||||||
}
|
|
||||||
|
|
|
@ -20,11 +20,11 @@ pub use mbe::{ExpandError, ExpandResult};
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use base_db::{impl_intern_key, salsa, CrateId, FileId};
|
use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo,
|
algo::{self, skip_trivia_token},
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
SyntaxNode, SyntaxToken, TextSize,
|
Direction, SyntaxNode, SyntaxToken, TextRange, TextSize,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::ast_id_map::FileAstId;
|
use crate::ast_id_map::FileAstId;
|
||||||
|
@ -445,6 +445,72 @@ impl InFile<SyntaxNode> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> InFile<&'a SyntaxNode> {
|
||||||
|
pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
|
||||||
|
if let Some(range) = original_range_opt(db, self) {
|
||||||
|
let original_file = range.file_id.original_file(db);
|
||||||
|
if range.file_id == original_file.into() {
|
||||||
|
return FileRange { file_id: original_file, range: range.value };
|
||||||
|
}
|
||||||
|
|
||||||
|
log::error!("Fail to mapping up more for {:?}", range);
|
||||||
|
return FileRange { file_id: range.file_id.original_file(db), range: range.value };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to whole macro call
|
||||||
|
if let Some(expansion) = self.file_id.expansion_info(db) {
|
||||||
|
if let Some(call_node) = expansion.call_node() {
|
||||||
|
return FileRange {
|
||||||
|
file_id: call_node.file_id.original_file(db),
|
||||||
|
range: call_node.value.text_range(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FileRange { file_id: self.file_id.original_file(db), range: self.value.text_range() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn original_range_opt(
|
||||||
|
db: &dyn db::AstDatabase,
|
||||||
|
node: InFile<&SyntaxNode>,
|
||||||
|
) -> Option<InFile<TextRange>> {
|
||||||
|
let expansion = node.file_id.expansion_info(db)?;
|
||||||
|
|
||||||
|
// the input node has only one token ?
|
||||||
|
let single = skip_trivia_token(node.value.first_token()?, Direction::Next)?
|
||||||
|
== skip_trivia_token(node.value.last_token()?, Direction::Prev)?;
|
||||||
|
|
||||||
|
Some(node.value.descendants().find_map(|it| {
|
||||||
|
let first = skip_trivia_token(it.first_token()?, Direction::Next)?;
|
||||||
|
let first = ascend_call_token(db, &expansion, node.with_value(first))?;
|
||||||
|
|
||||||
|
let last = skip_trivia_token(it.last_token()?, Direction::Prev)?;
|
||||||
|
let last = ascend_call_token(db, &expansion, node.with_value(last))?;
|
||||||
|
|
||||||
|
if (!single && first == last) || (first.file_id != last.file_id) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(first.with_value(first.value.text_range().cover(last.value.text_range())))
|
||||||
|
})?)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ascend_call_token(
|
||||||
|
db: &dyn db::AstDatabase,
|
||||||
|
expansion: &ExpansionInfo,
|
||||||
|
token: InFile<SyntaxToken>,
|
||||||
|
) -> Option<InFile<SyntaxToken>> {
|
||||||
|
let (mapped, origin) = expansion.map_token_up(token.as_ref())?;
|
||||||
|
if origin != Origin::Call {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
if let Some(info) = mapped.file_id.expansion_info(db) {
|
||||||
|
return ascend_call_token(db, &info, mapped);
|
||||||
|
}
|
||||||
|
Some(mapped)
|
||||||
|
}
|
||||||
|
|
||||||
impl InFile<SyntaxToken> {
|
impl InFile<SyntaxToken> {
|
||||||
pub fn ancestors_with_macros(
|
pub fn ancestors_with_macros(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! FIXME: write short doc here
|
//! FIXME: write short doc here
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource};
|
use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource};
|
||||||
use ide_db::base_db::{FileId, SourceDatabase};
|
use ide_db::base_db::{FileId, SourceDatabase};
|
||||||
use ide_db::{defs::Definition, RootDatabase};
|
use ide_db::{defs::Definition, RootDatabase};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
@ -62,7 +62,8 @@ impl NavigationTarget {
|
||||||
pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
|
pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
|
||||||
let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default();
|
let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default();
|
||||||
if let Some(src) = module.declaration_source(db) {
|
if let Some(src) = module.declaration_source(db) {
|
||||||
let frange = original_range(db, src.as_ref().map(|it| it.syntax()));
|
let node = src.as_ref().map(|it| it.syntax());
|
||||||
|
let frange = node.original_file_range(db);
|
||||||
let mut res = NavigationTarget::from_syntax(
|
let mut res = NavigationTarget::from_syntax(
|
||||||
frange.file_id,
|
frange.file_id,
|
||||||
name,
|
name,
|
||||||
|
@ -104,8 +105,8 @@ impl NavigationTarget {
|
||||||
let name =
|
let name =
|
||||||
node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_"));
|
node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_"));
|
||||||
let focus_range =
|
let focus_range =
|
||||||
node.value.name().map(|it| original_range(db, node.with_value(it.syntax())).range);
|
node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range);
|
||||||
let frange = original_range(db, node.map(|it| it.syntax()));
|
let frange = node.map(|it| it.syntax()).original_file_range(db);
|
||||||
|
|
||||||
NavigationTarget::from_syntax(
|
NavigationTarget::from_syntax(
|
||||||
frange.file_id,
|
frange.file_id,
|
||||||
|
@ -124,7 +125,7 @@ impl NavigationTarget {
|
||||||
) -> NavigationTarget {
|
) -> NavigationTarget {
|
||||||
let name =
|
let name =
|
||||||
named.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_"));
|
named.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_"));
|
||||||
let frange = original_range(db, node.map(|it| it.syntax()));
|
let frange = node.map(|it| it.syntax()).original_file_range(db);
|
||||||
|
|
||||||
NavigationTarget::from_syntax(
|
NavigationTarget::from_syntax(
|
||||||
frange.file_id,
|
frange.file_id,
|
||||||
|
@ -236,7 +237,7 @@ impl ToNav for hir::Module {
|
||||||
(node.syntax(), node.name().map(|it| it.syntax().text_range()))
|
(node.syntax(), node.name().map(|it| it.syntax().text_range()))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let frange = original_range(db, src.with_value(syntax));
|
let frange = src.with_value(syntax).original_file_range(db);
|
||||||
NavigationTarget::from_syntax(frange.file_id, name, focus, frange.range, syntax.kind())
|
NavigationTarget::from_syntax(frange.file_id, name, focus, frange.range, syntax.kind())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -246,14 +247,14 @@ impl ToNav for hir::ImplDef {
|
||||||
let src = self.source(db);
|
let src = self.source(db);
|
||||||
let derive_attr = self.is_builtin_derive(db);
|
let derive_attr = self.is_builtin_derive(db);
|
||||||
let frange = if let Some(item) = &derive_attr {
|
let frange = if let Some(item) = &derive_attr {
|
||||||
original_range(db, item.syntax())
|
item.syntax().original_file_range(db)
|
||||||
} else {
|
} else {
|
||||||
original_range(db, src.as_ref().map(|it| it.syntax()))
|
src.as_ref().map(|it| it.syntax()).original_file_range(db)
|
||||||
};
|
};
|
||||||
let focus_range = if derive_attr.is_some() {
|
let focus_range = if derive_attr.is_some() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
src.value.self_ty().map(|ty| original_range(db, src.with_value(ty.syntax())).range)
|
src.value.self_ty().map(|ty| src.with_value(ty.syntax()).original_file_range(db).range)
|
||||||
};
|
};
|
||||||
|
|
||||||
NavigationTarget::from_syntax(
|
NavigationTarget::from_syntax(
|
||||||
|
@ -278,7 +279,7 @@ impl ToNav for hir::Field {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
FieldSource::Pos(it) => {
|
FieldSource::Pos(it) => {
|
||||||
let frange = original_range(db, src.with_value(it.syntax()));
|
let frange = src.with_value(it.syntax()).original_file_range(db);
|
||||||
NavigationTarget::from_syntax(
|
NavigationTarget::from_syntax(
|
||||||
frange.file_id,
|
frange.file_id,
|
||||||
"".into(),
|
"".into(),
|
||||||
|
@ -331,7 +332,7 @@ impl ToNav for hir::Local {
|
||||||
}
|
}
|
||||||
Either::Right(it) => it.syntax().clone(),
|
Either::Right(it) => it.syntax().clone(),
|
||||||
};
|
};
|
||||||
let full_range = original_range(db, src.with_value(&node));
|
let full_range = src.with_value(&node).original_file_range(db);
|
||||||
let name = match self.name(db) {
|
let name = match self.name(db) {
|
||||||
Some(it) => it.to_string().into(),
|
Some(it) => it.to_string().into(),
|
||||||
None => "".into(),
|
None => "".into(),
|
||||||
|
|
|
@ -8,7 +8,7 @@ use std::{
|
||||||
|
|
||||||
use hir::{
|
use hir::{
|
||||||
db::{AstDatabase, DefDatabase, HirDatabase},
|
db::{AstDatabase, DefDatabase, HirDatabase},
|
||||||
original_range, AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
|
AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
|
||||||
};
|
};
|
||||||
use hir_def::FunctionId;
|
use hir_def::FunctionId;
|
||||||
use hir_ty::{Ty, TypeWalk};
|
use hir_ty::{Ty, TypeWalk};
|
||||||
|
@ -232,7 +232,7 @@ impl AnalysisStatsCmd {
|
||||||
// But also, we should just turn the type mismatches into diagnostics and provide these
|
// But also, we should just turn the type mismatches into diagnostics and provide these
|
||||||
let root = db.parse_or_expand(src.file_id).unwrap();
|
let root = db.parse_or_expand(src.file_id).unwrap();
|
||||||
let node = src.map(|e| e.to_node(&root).syntax().clone());
|
let node = src.map(|e| e.to_node(&root).syntax().clone());
|
||||||
let original_range = original_range(db, node.as_ref());
|
let original_range = node.as_ref().original_file_range(db);
|
||||||
let path = vfs.file_path(original_range.file_id);
|
let path = vfs.file_path(original_range.file_id);
|
||||||
let line_index =
|
let line_index =
|
||||||
host.analysis().file_line_index(original_range.file_id).unwrap();
|
host.analysis().file_line_index(original_range.file_id).unwrap();
|
||||||
|
|
Loading…
Reference in a new issue