Improve code structure

Make sure that there's only one entry point, analyze, remove awkard
analyzer2 name
This commit is contained in:
Aleksey Kladov 2020-12-11 16:50:47 +03:00
parent 15a644d606
commit 4015ff0e0b
2 changed files with 23 additions and 25 deletions

View file

@ -294,9 +294,8 @@ impl<'db> SemanticsImpl<'db> {
} }
fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); let sa = self.analyze(macro_call.syntax());
let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
let file_id = sa.expand(self.db, macro_call)?;
let node = self.db.parse_or_expand(file_id)?; let node = self.db.parse_or_expand(file_id)?;
self.cache(node.clone(), file_id); self.cache(node.clone(), file_id);
Some(node) Some(node)
@ -308,9 +307,8 @@ impl<'db> SemanticsImpl<'db> {
hypothetical_args: &ast::TokenTree, hypothetical_args: &ast::TokenTree,
token_to_map: SyntaxToken, token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> { ) -> Option<(SyntaxNode, SyntaxToken)> {
let macro_call = let sa = self.analyze(actual_macro_call.syntax());
self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); let macro_call = InFile::new(sa.file_id, actual_macro_call);
let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
let krate = sa.resolver.krate()?; let krate = sa.resolver.krate()?;
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) sa.resolver.resolve_path_as_macro(self.db.upcast(), &path)
@ -326,10 +324,9 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
let _p = profile::span("descend_into_macros"); let _p = profile::span("descend_into_macros");
let parent = token.parent(); let parent = token.parent();
let parent = self.find_file(parent); let sa = self.analyze(&parent);
let sa = self.analyze2(parent.as_ref(), None);
let token = successors(Some(parent.with_value(token)), |token| { let token = successors(Some(InFile::new(sa.file_id, token)), |token| {
self.db.check_canceled(); self.db.check_canceled();
let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
let tt = macro_call.token_tree()?; let tt = macro_call.token_tree()?;
@ -486,15 +483,13 @@ impl<'db> SemanticsImpl<'db> {
} }
fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
let node = self.find_file(node.clone()); let sa = self.analyze(node);
let resolver = self.analyze2(node.as_ref(), None).resolver; SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
SemanticsScope { db: self.db, file_id: node.file_id, resolver }
} }
fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
let node = self.find_file(node.clone()); let sa = self.analyze_with_offset(node, offset);
let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
SemanticsScope { db: self.db, file_id: node.file_id, resolver }
} }
fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
@ -504,21 +499,24 @@ impl<'db> SemanticsImpl<'db> {
} }
fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
let src = self.find_file(node.clone()); self.analyze_impl(node, None)
self.analyze2(src.as_ref(), None)
} }
fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
self.analyze_impl(node, Some(offset))
}
fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
let _p = profile::span("Semantics::analyze_impl");
let node = self.find_file(node.clone());
let node = node.as_ref();
fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> SourceAnalyzer { let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
let _p = profile::span("Semantics::analyze2");
let container = match self.with_ctx(|ctx| ctx.find_container(src)) {
Some(it) => it, Some(it) => it,
None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src), None => return SourceAnalyzer::new_for_resolver(Resolver::default(), node),
}; };
let resolver = match container { let resolver = match container {
ChildContainer::DefWithBodyId(def) => { ChildContainer::DefWithBodyId(def) => {
return SourceAnalyzer::new_for_body(self.db, def, src, offset) return SourceAnalyzer::new_for_body(self.db, def, node, offset)
} }
ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
@ -528,7 +526,7 @@ impl<'db> SemanticsImpl<'db> {
ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()), ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
}; };
SourceAnalyzer::new_for_resolver(resolver, src) SourceAnalyzer::new_for_resolver(resolver, node)
} }
fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {

View file

@ -37,7 +37,7 @@ use base_db::CrateId;
/// original source files. It should not be used inside the HIR itself. /// original source files. It should not be used inside the HIR itself.
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct SourceAnalyzer { pub(crate) struct SourceAnalyzer {
file_id: HirFileId, pub(crate) file_id: HirFileId,
pub(crate) resolver: Resolver, pub(crate) resolver: Resolver,
body: Option<Arc<Body>>, body: Option<Arc<Body>>,
body_source_map: Option<Arc<BodySourceMap>>, body_source_map: Option<Arc<BodySourceMap>>,