mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-28 05:53:45 +00:00
Remove unreachable logic for include token mapping
This commit is contained in:
parent
154a9a15db
commit
bd4785a6f0
2 changed files with 18 additions and 88 deletions
|
@ -28,7 +28,7 @@ use hir_expand::{
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use span::{EditionedFileId, FileId, Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
|
use span::{EditionedFileId, FileId};
|
||||||
use stdx::TupleExt;
|
use stdx::TupleExt;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::skip_trivia_token,
|
algo::skip_trivia_token,
|
||||||
|
@ -757,84 +757,9 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
// return:
|
|
||||||
// SourceAnalyzer(file_id that original call include!)
|
|
||||||
// macro file id
|
|
||||||
// token in include! macro mapped from token in params
|
|
||||||
// span for the mapped token
|
|
||||||
fn is_from_include_file(
|
|
||||||
&self,
|
|
||||||
token: SyntaxToken,
|
|
||||||
) -> Option<(SourceAnalyzer, HirFileId, SyntaxToken, Span)> {
|
|
||||||
let parent = token.parent()?;
|
|
||||||
let file_id = self.find_file(&parent).file_id.file_id()?;
|
|
||||||
|
|
||||||
// iterate related crates and find all include! invocations that include_file_id matches
|
|
||||||
for iter in self
|
|
||||||
.db
|
|
||||||
.relevant_crates(file_id.file_id())
|
|
||||||
.iter()
|
|
||||||
.map(|krate| self.db.include_macro_invoc(*krate))
|
|
||||||
{
|
|
||||||
for (invoc, _) in
|
|
||||||
iter.iter().filter(|&&(_, include_file_id)| include_file_id == file_id)
|
|
||||||
{
|
|
||||||
let macro_file = invoc.as_macro_file();
|
|
||||||
let expansion_info = {
|
|
||||||
self.with_ctx(|ctx| {
|
|
||||||
ctx.cache
|
|
||||||
.expansion_info_cache
|
|
||||||
.entry(macro_file)
|
|
||||||
.or_insert_with(|| {
|
|
||||||
let exp_info = macro_file.expansion_info(self.db.upcast());
|
|
||||||
|
|
||||||
let InMacroFile { file_id, value } = exp_info.expanded();
|
|
||||||
if let InFile { file_id, value: Some(value) } = exp_info.arg() {
|
|
||||||
self.cache(value.ancestors().last().unwrap(), file_id);
|
|
||||||
}
|
|
||||||
self.cache(value, file_id.into());
|
|
||||||
|
|
||||||
exp_info
|
|
||||||
})
|
|
||||||
.clone()
|
|
||||||
})
|
|
||||||
};
|
|
||||||
|
|
||||||
// FIXME: uncached parse
|
|
||||||
// Create the source analyzer for the macro call scope
|
|
||||||
let Some(sa) = expansion_info
|
|
||||||
.arg()
|
|
||||||
.value
|
|
||||||
.and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap()))
|
|
||||||
else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
// get mapped token in the include! macro file
|
|
||||||
let span = span::Span {
|
|
||||||
range: token.text_range(),
|
|
||||||
anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
|
||||||
ctx: SyntaxContextId::ROOT,
|
|
||||||
};
|
|
||||||
let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
|
|
||||||
expansion_info.map_range_down_exact(span)
|
|
||||||
else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
// if we find one, then return
|
|
||||||
if let Some(t) = mapped_tokens.next() {
|
|
||||||
return Some((sa, file_id.into(), t, span));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn descend_into_macros_impl(
|
fn descend_into_macros_impl(
|
||||||
&self,
|
&self,
|
||||||
mut token: SyntaxToken,
|
token: SyntaxToken,
|
||||||
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
|
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
|
||||||
) {
|
) {
|
||||||
let _p = tracing::info_span!("descend_into_macros_impl").entered();
|
let _p = tracing::info_span!("descend_into_macros_impl").entered();
|
||||||
|
@ -851,17 +776,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
None => {
|
None => return,
|
||||||
// if we cannot find a source analyzer for this token, then we try to find out
|
|
||||||
// whether this file is an included file and treat that as the include input
|
|
||||||
let Some((it, macro_file_id, mapped_token, s)) =
|
|
||||||
self.is_from_include_file(token)
|
|
||||||
else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
token = mapped_token;
|
|
||||||
(it, s, macro_file_id)
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut m_cache = self.macro_call_cache.borrow_mut();
|
let mut m_cache = self.macro_call_cache.borrow_mut();
|
||||||
|
|
|
@ -1144,6 +1144,21 @@ async fn main() {
|
||||||
//^^ impl Future<Output = i32>
|
//^^ impl Future<Output = i32>
|
||||||
return 8_i32;
|
return 8_i32;
|
||||||
};
|
};
|
||||||
|
}"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn works_in_included_file() {
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
//- minicore: include
|
||||||
|
//- /main.rs
|
||||||
|
include!("foo.rs");
|
||||||
|
//- /foo.rs
|
||||||
|
fn main() {
|
||||||
|
let _x = 42;
|
||||||
|
//^^ i32
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue