915: Bring BodySyntaxMapping in line with other source-map instances r=flodiebold a=matklad

* rename to SourceMap
* don't store the actual body inline, just return a pair

r? @flodiebold 

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2019-03-02 14:09:46 +00:00
commit fd7240837b
16 changed files with 108 additions and 133 deletions

View file

@ -23,8 +23,8 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
let function =
source_binder::function_from_child_node(ctx.db, ctx.frange.file_id, expr.syntax())?;
let infer_result = function.infer(ctx.db);
let syntax_mapping = function.body_syntax_mapping(ctx.db);
let node_expr = syntax_mapping.node_expr(expr)?;
let source_map = function.body_source_map(ctx.db);
let node_expr = source_map.node_expr(expr)?;
let match_expr_ty = infer_result[node_expr].clone();
let enum_def = match match_expr_ty {
Ty::Adt { def_id: AdtDef::Enum(e), .. } => e,

View file

@ -5,11 +5,11 @@ use ra_db::{CrateId, SourceRootId, Edition};
use ra_syntax::{ast::self, TreeArc, SyntaxNode};
use crate::{
Name, ScopesWithSyntaxMapping, Ty, HirFileId,
Name, ScopesWithSourceMap, Ty, HirFileId,
HirDatabase, PersistentHirDatabase,
type_ref::TypeRef,
nameres::{ModuleScope, Namespace, lower::ImportId},
expr::{Body, BodySyntaxMapping},
expr::{Body, BodySourceMap},
ty::InferenceResult,
adt::{EnumVariantId, StructFieldId, VariantDef},
generics::GenericParams,
@ -191,7 +191,7 @@ impl Module {
}
pub fn declarations(self, db: &impl HirDatabase) -> Vec<ModuleDef> {
let (lowered_module, _) = db.lower_module(self);
let lowered_module = db.lower_module(self);
lowered_module
.declarations
.values()
@ -483,8 +483,8 @@ impl Function {
self.signature(db).name.clone()
}
pub fn body_syntax_mapping(&self, db: &impl HirDatabase) -> Arc<BodySyntaxMapping> {
db.body_syntax_mapping(*self)
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
db.body_with_source_map(*self).1
}
pub fn body(&self, db: &impl HirDatabase) -> Arc<Body> {
@ -495,10 +495,10 @@ impl Function {
db.type_for_def((*self).into(), Namespace::Values)
}
pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSyntaxMapping {
pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSourceMap {
let scopes = db.expr_scopes(*self);
let syntax_mapping = db.body_syntax_mapping(*self);
ScopesWithSyntaxMapping { scopes, syntax_mapping }
let source_map = db.body_with_source_map(*self).1;
ScopesWithSourceMap { scopes, source_map }
}
pub fn signature(&self, db: &impl HirDatabase) -> Arc<FnSignature> {

View file

@ -47,7 +47,7 @@ impl Module {
db: &impl HirDatabase,
import: ImportId,
) -> TreeArc<ast::PathSegment> {
let source_map = db.lower_module_source_map(*self);
let (_, source_map) = db.lower_module_with_source_map(*self);
let (_, source) = self.definition_source(db);
source_map.get(&source, import)
}

View file

@ -48,14 +48,14 @@ pub trait PersistentHirDatabase: SourceDatabase + AsRef<HirInterner> {
delc_id: Option<SourceFileItemId>,
) -> Arc<Vec<crate::module_tree::Submodule>>;
#[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_with_source_map_query)]
fn lower_module_with_source_map(
&self,
module: Module,
) -> (Arc<LoweredModule>, Arc<ImportSourceMap>);
#[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_query)]
fn lower_module(&self, module: Module) -> (Arc<LoweredModule>, Arc<ImportSourceMap>);
#[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_module_query)]
fn lower_module_module(&self, module: Module) -> Arc<LoweredModule>;
#[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_source_map_query)]
fn lower_module_source_map(&self, module: Module) -> Arc<ImportSourceMap>;
fn lower_module(&self, module: Module) -> Arc<LoweredModule>;
#[salsa::invoke(crate::nameres::ItemMap::item_map_query)]
fn item_map(&self, krate: Crate) -> Arc<ItemMap>;
@ -105,11 +105,14 @@ pub trait HirDatabase: PersistentHirDatabase {
#[salsa::invoke(crate::ty::type_for_field)]
fn type_for_field(&self, field: StructField) -> Ty;
#[salsa::invoke(crate::expr::body_hir)]
fn body_hir(&self, func: Function) -> Arc<crate::expr::Body>;
#[salsa::invoke(crate::expr::body_with_source_map_query)]
fn body_with_source_map(
&self,
func: Function,
) -> (Arc<crate::expr::Body>, Arc<crate::expr::BodySourceMap>);
#[salsa::invoke(crate::expr::body_syntax_mapping)]
fn body_syntax_mapping(&self, func: Function) -> Arc<crate::expr::BodySyntaxMapping>;
#[salsa::invoke(crate::expr::body_hir_query)]
fn body_hir(&self, func: Function) -> Arc<crate::expr::Body>;
#[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)]
fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>;

View file

@ -16,7 +16,7 @@ use crate::{
};
use crate::{ path::GenericArgs, ty::primitive::{UintTy, UncertainIntTy, UncertainFloatTy}};
pub use self::scope::{ExprScopes, ScopesWithSyntaxMapping, ScopeEntryWithSyntax};
pub use self::scope::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax};
pub(crate) mod scope;
@ -48,13 +48,12 @@ pub struct Body {
/// expression containing it; but for type inference etc., we want to operate on
/// a structure that is agnostic to the actual positions of expressions in the
/// file, so that we don't recompute types whenever some whitespace is typed.
#[derive(Debug, Eq, PartialEq)]
pub struct BodySyntaxMapping {
body: Arc<Body>,
expr_syntax_mapping: FxHashMap<SyntaxNodePtr, ExprId>,
expr_syntax_mapping_back: ArenaMap<ExprId, SyntaxNodePtr>,
pat_syntax_mapping: FxHashMap<SyntaxNodePtr, PatId>,
pat_syntax_mapping_back: ArenaMap<PatId, SyntaxNodePtr>,
#[derive(Default, Debug, Eq, PartialEq)]
pub struct BodySourceMap {
expr_map: FxHashMap<SyntaxNodePtr, ExprId>,
expr_map_back: ArenaMap<ExprId, SyntaxNodePtr>,
pat_map: FxHashMap<SyntaxNodePtr, PatId>,
pat_map_back: ArenaMap<PatId, SyntaxNodePtr>,
}
impl Body {
@ -77,10 +76,6 @@ impl Body {
pub fn pats(&self) -> impl Iterator<Item = (PatId, &Pat)> {
self.pats.iter()
}
pub fn syntax_mapping(&self, db: &impl HirDatabase) -> Arc<BodySyntaxMapping> {
db.body_syntax_mapping(self.owner)
}
}
// needs arbitrary_self_types to be a method... or maybe move to the def?
@ -119,33 +114,29 @@ impl Index<PatId> for Body {
}
}
impl BodySyntaxMapping {
impl BodySourceMap {
pub fn expr_syntax(&self, expr: ExprId) -> Option<SyntaxNodePtr> {
self.expr_syntax_mapping_back.get(expr).cloned()
self.expr_map_back.get(expr).cloned()
}
pub fn syntax_expr(&self, ptr: SyntaxNodePtr) -> Option<ExprId> {
self.expr_syntax_mapping.get(&ptr).cloned()
self.expr_map.get(&ptr).cloned()
}
pub fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
self.expr_syntax_mapping.get(&SyntaxNodePtr::new(node.syntax())).cloned()
self.expr_map.get(&SyntaxNodePtr::new(node.syntax())).cloned()
}
pub fn pat_syntax(&self, pat: PatId) -> Option<SyntaxNodePtr> {
self.pat_syntax_mapping_back.get(pat).cloned()
self.pat_map_back.get(pat).cloned()
}
pub fn syntax_pat(&self, ptr: SyntaxNodePtr) -> Option<PatId> {
self.pat_syntax_mapping.get(&ptr).cloned()
self.pat_map.get(&ptr).cloned()
}
pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
self.pat_syntax_mapping.get(&SyntaxNodePtr::new(node.syntax())).cloned()
}
pub fn body(&self) -> &Arc<Body> {
&self.body
self.pat_map.get(&SyntaxNodePtr::new(node.syntax())).cloned()
}
}
@ -467,18 +458,11 @@ impl Pat {
// Queries
pub(crate) fn body_hir(db: &impl HirDatabase, func: Function) -> Arc<Body> {
Arc::clone(&body_syntax_mapping(db, func).body)
}
struct ExprCollector {
owner: Function,
exprs: Arena<ExprId, Expr>,
pats: Arena<PatId, Pat>,
expr_syntax_mapping: FxHashMap<SyntaxNodePtr, ExprId>,
expr_syntax_mapping_back: ArenaMap<ExprId, SyntaxNodePtr>,
pat_syntax_mapping: FxHashMap<SyntaxNodePtr, PatId>,
pat_syntax_mapping_back: ArenaMap<PatId, SyntaxNodePtr>,
source_map: BodySourceMap,
params: Vec<PatId>,
body_expr: Option<ExprId>,
}
@ -489,10 +473,7 @@ impl ExprCollector {
owner,
exprs: Arena::default(),
pats: Arena::default(),
expr_syntax_mapping: FxHashMap::default(),
expr_syntax_mapping_back: ArenaMap::default(),
pat_syntax_mapping: FxHashMap::default(),
pat_syntax_mapping_back: ArenaMap::default(),
source_map: BodySourceMap::default(),
params: Vec::new(),
body_expr: None,
}
@ -500,15 +481,15 @@ impl ExprCollector {
fn alloc_expr(&mut self, expr: Expr, syntax_ptr: SyntaxNodePtr) -> ExprId {
let id = self.exprs.alloc(expr);
self.expr_syntax_mapping.insert(syntax_ptr, id);
self.expr_syntax_mapping_back.insert(id, syntax_ptr);
self.source_map.expr_map.insert(syntax_ptr, id);
self.source_map.expr_map_back.insert(id, syntax_ptr);
id
}
fn alloc_pat(&mut self, pat: Pat, syntax_ptr: SyntaxNodePtr) -> PatId {
let id = self.pats.alloc(pat);
self.pat_syntax_mapping.insert(syntax_ptr, id);
self.pat_syntax_mapping_back.insert(id, syntax_ptr);
self.source_map.pat_map.insert(syntax_ptr, id);
self.source_map.pat_map_back.insert(id, syntax_ptr);
id
}
@ -639,7 +620,7 @@ impl ExprCollector {
ast::ExprKind::ParenExpr(e) => {
let inner = self.collect_expr_opt(e.expr());
// make the paren expr point to the inner expression as well
self.expr_syntax_mapping.insert(syntax_ptr, inner);
self.source_map.expr_map.insert(syntax_ptr, inner);
inner
}
ast::ExprKind::ReturnExpr(e) => {
@ -660,9 +641,11 @@ impl ExprCollector {
} else if let Some(nr) = field.name_ref() {
// field shorthand
let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(nr)));
self.expr_syntax_mapping
self.source_map
.expr_map
.insert(SyntaxNodePtr::new(nr.syntax()), id);
self.expr_syntax_mapping_back
self.source_map
.expr_map_back
.insert(id, SyntaxNodePtr::new(nr.syntax()));
id
} else {
@ -910,7 +893,7 @@ impl ExprCollector {
self.body_expr = Some(body);
}
fn into_body_syntax_mapping(self) -> BodySyntaxMapping {
fn finish(self) -> (Body, BodySourceMap) {
let body = Body {
owner: self.owner,
exprs: self.exprs,
@ -918,28 +901,30 @@ impl ExprCollector {
params: self.params,
body_expr: self.body_expr.expect("A body should have been collected"),
};
BodySyntaxMapping {
body: Arc::new(body),
expr_syntax_mapping: self.expr_syntax_mapping,
expr_syntax_mapping_back: self.expr_syntax_mapping_back,
pat_syntax_mapping: self.pat_syntax_mapping,
pat_syntax_mapping_back: self.pat_syntax_mapping_back,
}
(body, self.source_map)
}
}
pub(crate) fn body_syntax_mapping(db: &impl HirDatabase, func: Function) -> Arc<BodySyntaxMapping> {
pub(crate) fn body_with_source_map_query(
db: &impl HirDatabase,
func: Function,
) -> (Arc<Body>, Arc<BodySourceMap>) {
let mut collector = ExprCollector::new(func);
// TODO: consts, etc.
collector.collect_fn_body(&func.source(db).1);
Arc::new(collector.into_body_syntax_mapping())
let (body, source_map) = collector.finish();
(Arc::new(body), Arc::new(source_map))
}
pub(crate) fn body_hir_query(db: &impl HirDatabase, func: Function) -> Arc<Body> {
db.body_with_source_map(func).0
}
#[cfg(test)]
pub(crate) fn collect_fn_body_syntax(function: Function, node: &ast::FnDef) -> BodySyntaxMapping {
fn collect_fn_body_syntax(function: Function, node: &ast::FnDef) -> (Body, BodySourceMap) {
let mut collector = ExprCollector::new(function);
collector.collect_fn_body(node);
collector.into_body_syntax_mapping()
collector.finish()
}

View file

@ -11,7 +11,7 @@ use ra_arena::{Arena, RawId, impl_arena_id};
use crate::{
Name, AsName, Function,
expr::{PatId, ExprId, Pat, Expr, Body, Statement, BodySyntaxMapping},
expr::{PatId, ExprId, Pat, Expr, Body, Statement, BodySourceMap},
HirDatabase,
};
@ -108,8 +108,8 @@ impl ExprScopes {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ScopesWithSyntaxMapping {
pub syntax_mapping: Arc<BodySyntaxMapping>,
pub struct ScopesWithSourceMap {
pub source_map: Arc<BodySourceMap>,
pub scopes: Arc<ExprScopes>,
}
@ -129,7 +129,7 @@ impl ScopeEntryWithSyntax {
}
}
impl ScopesWithSyntaxMapping {
impl ScopesWithSourceMap {
fn scope_chain<'a>(&'a self, node: &SyntaxNode) -> impl Iterator<Item = ScopeId> + 'a {
generate(self.scope_for(node), move |&scope| self.scopes.scopes[scope].parent)
}
@ -138,7 +138,7 @@ impl ScopesWithSyntaxMapping {
self.scopes
.scope_for
.iter()
.filter_map(|(id, scope)| Some((self.syntax_mapping.expr_syntax(*id)?, scope)))
.filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope)))
// find containing scope
.min_by_key(|(ptr, _scope)| {
(!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len())
@ -155,7 +155,7 @@ impl ScopesWithSyntaxMapping {
.scopes
.scope_for
.iter()
.filter_map(|(id, scope)| Some((self.syntax_mapping.expr_syntax(*id)?, scope)))
.filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope)))
.map(|(ptr, scope)| (ptr.range(), scope))
.filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r);
@ -185,7 +185,7 @@ impl ScopesWithSyntaxMapping {
ret.and_then(|entry| {
Some(ScopeEntryWithSyntax {
name: entry.name().clone(),
ptr: self.syntax_mapping.pat_syntax(entry.pat())?,
ptr: self.source_map.pat_syntax(entry.pat())?,
})
})
}
@ -211,7 +211,7 @@ impl ScopesWithSyntaxMapping {
pub fn scope_for(&self, node: &SyntaxNode) -> Option<ScopeId> {
node.ancestors()
.map(SyntaxNodePtr::new)
.filter_map(|ptr| self.syntax_mapping.syntax_expr(ptr))
.filter_map(|ptr| self.source_map.syntax_expr(ptr))
.find_map(|it| self.scopes.scope_for(it))
}
}
@ -316,12 +316,10 @@ mod tests {
let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let irrelevant_function = Function { id: crate::ids::FunctionId::from_raw(0.into()) };
let body_hir = expr::collect_fn_body_syntax(irrelevant_function, fn_def);
let scopes = ExprScopes::new(Arc::clone(body_hir.body()));
let scopes = ScopesWithSyntaxMapping {
scopes: Arc::new(scopes),
syntax_mapping: Arc::new(body_hir),
};
let (body, source_map) = expr::collect_fn_body_syntax(irrelevant_function, fn_def);
let scopes = ExprScopes::new(Arc::new(body));
let scopes =
ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) };
let actual = scopes
.scope_chain(marker.syntax())
.flat_map(|scope| scopes.scopes.entries(scope))
@ -417,12 +415,10 @@ mod tests {
let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
let irrelevant_function = Function { id: crate::ids::FunctionId::from_raw(0.into()) };
let body_hir = expr::collect_fn_body_syntax(irrelevant_function, fn_def);
let scopes = ExprScopes::new(Arc::clone(body_hir.body()));
let scopes = ScopesWithSyntaxMapping {
scopes: Arc::new(scopes),
syntax_mapping: Arc::new(body_hir),
};
let (body, source_map) = expr::collect_fn_body_syntax(irrelevant_function, fn_def);
let scopes = ExprScopes::new(Arc::new(body));
let scopes =
ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) };
let local_name_entry = scopes.resolve_local_name(name_ref).unwrap();
let local_name = local_name_entry.ptr();
assert_eq!(local_name.range(), expected_name.syntax().range());

View file

@ -60,7 +60,7 @@ pub use self::{
impl_block::{ImplBlock, ImplItem},
docs::{Docs, Documentation},
adt::AdtDef,
expr::{ExprScopes, ScopesWithSyntaxMapping, ScopeEntryWithSyntax},
expr::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax},
resolve::{Resolver, Resolution},
};

View file

@ -481,7 +481,7 @@ impl ItemMap {
let module_tree = db.module_tree(krate);
let input = module_tree
.modules()
.map(|module_id| (module_id, db.lower_module_module(Module { krate, module_id })))
.map(|module_id| (module_id, db.lower_module(Module { krate, module_id })))
.collect::<FxHashMap<_, _>>();
let resolver = Resolver::new(db, &input, krate);

View file

@ -60,21 +60,14 @@ impl ImportSourceMap {
}
impl LoweredModule {
pub(crate) fn lower_module_module_query(
pub(crate) fn lower_module_query(
db: &impl PersistentHirDatabase,
module: Module,
) -> Arc<LoweredModule> {
db.lower_module(module).0
db.lower_module_with_source_map(module).0
}
pub(crate) fn lower_module_source_map_query(
db: &impl PersistentHirDatabase,
module: Module,
) -> Arc<ImportSourceMap> {
db.lower_module(module).1
}
pub(crate) fn lower_module_query(
pub(crate) fn lower_module_with_source_map_query(
db: &impl PersistentHirDatabase,
module: Module,
) -> (Arc<LoweredModule>, Arc<ImportSourceMap>) {

View file

@ -157,7 +157,7 @@ pub fn macro_symbols(db: &impl HirDatabase, file_id: FileId) -> Vec<(SmolStr, Te
Some(it) => it,
None => return Vec::new(),
};
let items = db.lower_module_module(module);
let items = db.lower_module(module);
let mut res = Vec::new();
for macro_call_id in items

View file

@ -1045,11 +1045,11 @@ fn test() {
fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String {
let func = source_binder::function_from_position(db, pos).unwrap();
let body_syntax_mapping = func.body_syntax_mapping(db);
let body_source_map = func.body_source_map(db);
let inference_result = func.infer(db);
let (_, syntax) = func.source(db);
let node = algo::find_node_at_offset::<ast::Expr>(syntax.syntax(), pos.offset).unwrap();
let expr = body_syntax_mapping.node_expr(node).unwrap();
let expr = body_source_map.node_expr(node).unwrap();
let ty = &inference_result[expr];
ty.to_string()
}
@ -1061,17 +1061,17 @@ fn infer(content: &str) -> String {
for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) {
let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap();
let inference_result = func.infer(&db);
let body_syntax_mapping = func.body_syntax_mapping(&db);
let body_source_map = func.body_source_map(&db);
let mut types = Vec::new();
for (pat, ty) in inference_result.type_of_pat.iter() {
let syntax_ptr = match body_syntax_mapping.pat_syntax(pat) {
let syntax_ptr = match body_source_map.pat_syntax(pat) {
Some(sp) => sp,
None => continue,
};
types.push((syntax_ptr, ty));
}
for (expr, ty) in inference_result.type_of_expr.iter() {
let syntax_ptr = match body_syntax_mapping.expr_syntax(expr) {
let syntax_ptr = match body_source_map.expr_syntax(expr) {
Some(sp) => sp,
None => continue,
};

View file

@ -223,8 +223,7 @@ impl RootDatabase {
self.query(hir::db::FileItemsQuery).sweep(sweep);
self.query(hir::db::FileItemQuery).sweep(sweep);
self.query(hir::db::LowerModuleQuery).sweep(sweep);
self.query(hir::db::LowerModuleSourceMapQuery).sweep(sweep);
self.query(hir::db::BodySyntaxMappingQuery).sweep(sweep);
self.query(hir::db::LowerModuleWithSourceMapQuery).sweep(sweep);
self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep);
}
}

View file

@ -9,8 +9,8 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
_ => return,
};
let infer_result = function.infer(ctx.db);
let syntax_mapping = function.body_syntax_mapping(ctx.db);
let expr = match syntax_mapping.node_expr(receiver) {
let source_map = function.body_source_map(ctx.db);
let expr = match source_map.node_expr(receiver) {
Some(expr) => expr,
None => return,
};

View file

@ -9,8 +9,8 @@ pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionCon
_ => return,
};
let infer_result = function.infer(ctx.db);
let syntax_mapping = function.body_syntax_mapping(ctx.db);
let expr = match syntax_mapping.node_expr(struct_lit.into()) {
let source_map = function.body_source_map(ctx.db);
let expr = match source_map.node_expr(struct_lit.into()) {
Some(expr) => expr,
None => return,
};

View file

@ -54,10 +54,10 @@ pub(crate) fn reference_definition(
if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) {
tested_by!(goto_definition_works_for_methods);
let infer_result = function.infer(db);
let syntax_mapping = function.body_syntax_mapping(db);
let source_map = function.body_source_map(db);
let expr = ast::Expr::cast(method_call.syntax()).unwrap();
if let Some(func) =
syntax_mapping.node_expr(expr).and_then(|it| infer_result.method_resolution(it))
source_map.node_expr(expr).and_then(|it| infer_result.method_resolution(it))
{
return Exact(NavigationTarget::from_function(db, func));
};
@ -66,10 +66,10 @@ pub(crate) fn reference_definition(
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) {
tested_by!(goto_definition_works_for_fields);
let infer_result = function.infer(db);
let syntax_mapping = function.body_syntax_mapping(db);
let source_map = function.body_source_map(db);
let expr = ast::Expr::cast(field_expr.syntax()).unwrap();
if let Some(field) =
syntax_mapping.node_expr(expr).and_then(|it| infer_result.field_resolution(it))
source_map.node_expr(expr).and_then(|it| infer_result.field_resolution(it))
{
return Exact(NavigationTarget::from_field(db, field));
};
@ -80,11 +80,11 @@ pub(crate) fn reference_definition(
tested_by!(goto_definition_works_for_named_fields);
let infer_result = function.infer(db);
let syntax_mapping = function.body_syntax_mapping(db);
let source_map = function.body_source_map(db);
let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast);
if let Some(expr) = struct_lit.and_then(|lit| syntax_mapping.node_expr(lit.into())) {
if let Some(expr) = struct_lit.and_then(|lit| source_map.node_expr(lit.into())) {
let ty = infer_result[expr].clone();
if let hir::Ty::Adt { def_id, .. } = ty {
if let hir::AdtDef::Struct(s) = def_id {
@ -109,9 +109,8 @@ pub(crate) fn reference_definition(
Some(Resolution::Def(def)) => return Exact(NavigationTarget::from_def(db, def)),
Some(Resolution::LocalBinding(pat)) => {
let body = resolver.body().expect("no body for local binding");
let syntax_mapping = body.syntax_mapping(db);
let ptr =
syntax_mapping.pat_syntax(pat).expect("pattern not found in syntax mapping");
let source_map = body.owner().body_source_map(db);
let ptr = source_map.pat_syntax(pat).expect("pattern not found in syntax mapping");
let name =
path.as_ident().cloned().expect("local binding from a multi-segment path");
let nav = NavigationTarget::from_scope_entry(file_id, name, ptr);

View file

@ -132,10 +132,10 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
let parent_fn = node.ancestors().find_map(ast::FnDef::cast)?;
let function = hir::source_binder::function_from_source(db, frange.file_id, parent_fn)?;
let infer = function.infer(db);
let syntax_mapping = function.body_syntax_mapping(db);
if let Some(expr) = ast::Expr::cast(node).and_then(|e| syntax_mapping.node_expr(e)) {
let source_map = function.body_source_map(db);
if let Some(expr) = ast::Expr::cast(node).and_then(|e| source_map.node_expr(e)) {
Some(infer[expr].to_string())
} else if let Some(pat) = ast::Pat::cast(node).and_then(|p| syntax_mapping.node_pat(p)) {
} else if let Some(pat) = ast::Pat::cast(node).and_then(|p| source_map.node_pat(p)) {
Some(infer[pat].to_string())
} else {
None