mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-13 21:54:42 +00:00
Auto merge of #17398 - Veykril:bogus-file, r=Veykril
internal: Remove FileId::BOGUS
This commit is contained in:
commit
b5e0d7c349
8 changed files with 41 additions and 71 deletions
|
@ -90,7 +90,7 @@ use hir_expand::{
|
||||||
use item_tree::ExternBlock;
|
use item_tree::ExternBlock;
|
||||||
use la_arena::Idx;
|
use la_arena::Idx;
|
||||||
use nameres::DefMap;
|
use nameres::DefMap;
|
||||||
use span::{AstIdNode, Edition, FileAstId, FileId, SyntaxContextId};
|
use span::{AstIdNode, Edition, FileAstId, SyntaxContextId};
|
||||||
use stdx::impl_from;
|
use stdx::impl_from;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
|
|
||||||
|
@ -958,15 +958,14 @@ impl GenericDefId {
|
||||||
match self {
|
match self {
|
||||||
GenericDefId::FunctionId(it) => file_id_and_params_of_item_loc(db, it),
|
GenericDefId::FunctionId(it) => file_id_and_params_of_item_loc(db, it),
|
||||||
GenericDefId::TypeAliasId(it) => file_id_and_params_of_item_loc(db, it),
|
GenericDefId::TypeAliasId(it) => file_id_and_params_of_item_loc(db, it),
|
||||||
GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None),
|
|
||||||
GenericDefId::AdtId(AdtId::StructId(it)) => file_id_and_params_of_item_loc(db, it),
|
GenericDefId::AdtId(AdtId::StructId(it)) => file_id_and_params_of_item_loc(db, it),
|
||||||
GenericDefId::AdtId(AdtId::UnionId(it)) => file_id_and_params_of_item_loc(db, it),
|
GenericDefId::AdtId(AdtId::UnionId(it)) => file_id_and_params_of_item_loc(db, it),
|
||||||
GenericDefId::AdtId(AdtId::EnumId(it)) => file_id_and_params_of_item_loc(db, it),
|
GenericDefId::AdtId(AdtId::EnumId(it)) => file_id_and_params_of_item_loc(db, it),
|
||||||
GenericDefId::TraitId(it) => file_id_and_params_of_item_loc(db, it),
|
GenericDefId::TraitId(it) => file_id_and_params_of_item_loc(db, it),
|
||||||
GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it),
|
GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it),
|
||||||
GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it),
|
GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it),
|
||||||
// We won't be using this ID anyway
|
GenericDefId::ConstId(it) => (it.lookup(db).id.file_id(), None),
|
||||||
GenericDefId::EnumVariantId(_) => (FileId::BOGUS.into(), None),
|
GenericDefId::EnumVariantId(it) => (it.lookup(db).id.file_id(), None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ use itertools::{izip, Itertools};
|
||||||
use la_arena::Idx;
|
use la_arena::Idx;
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContextId};
|
use span::{Edition, ErasedFileAstId, FileAstId, SyntaxContextId};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
@ -75,36 +75,23 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
|
||||||
|
|
||||||
let proc_macros = if krate.is_proc_macro {
|
let proc_macros = if krate.is_proc_macro {
|
||||||
match db.proc_macros().get(&def_map.krate) {
|
match db.proc_macros().get(&def_map.krate) {
|
||||||
Some(Ok(proc_macros)) => {
|
Some(Ok(proc_macros)) => Ok(proc_macros
|
||||||
Ok(proc_macros
|
.iter()
|
||||||
.iter()
|
.enumerate()
|
||||||
.enumerate()
|
.map(|(idx, it)| {
|
||||||
.map(|(idx, it)| {
|
let name = Name::new_text_dont_use(it.name.clone());
|
||||||
// FIXME: a hacky way to create a Name from string.
|
(
|
||||||
let name = tt::Ident {
|
name,
|
||||||
text: it.name.clone(),
|
if it.disabled {
|
||||||
span: Span {
|
CustomProcMacroExpander::disabled()
|
||||||
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
|
} else {
|
||||||
anchor: span::SpanAnchor {
|
CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId::new(
|
||||||
file_id: FileId::BOGUS,
|
idx as u32,
|
||||||
ast_id: span::ROOT_ERASED_FILE_AST_ID,
|
))
|
||||||
},
|
},
|
||||||
ctx: SyntaxContextId::ROOT,
|
)
|
||||||
},
|
})
|
||||||
};
|
.collect()),
|
||||||
(
|
|
||||||
name.as_name(),
|
|
||||||
if it.disabled {
|
|
||||||
CustomProcMacroExpander::disabled()
|
|
||||||
} else {
|
|
||||||
CustomProcMacroExpander::new(
|
|
||||||
hir_expand::proc_macro::ProcMacroId::new(idx as u32),
|
|
||||||
)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect())
|
|
||||||
}
|
|
||||||
Some(Err(e)) => Err(e.clone().into_boxed_str()),
|
Some(Err(e)) => Err(e.clone().into_boxed_str()),
|
||||||
None => Err("No proc-macros present for crate".to_owned().into_boxed_str()),
|
None => Err("No proc-macros present for crate".to_owned().into_boxed_str()),
|
||||||
}
|
}
|
||||||
|
@ -2154,19 +2141,7 @@ impl ModCollector<'_, '_> {
|
||||||
let name;
|
let name;
|
||||||
let name = match attrs.by_key("rustc_builtin_macro").string_value() {
|
let name = match attrs.by_key("rustc_builtin_macro").string_value() {
|
||||||
Some(it) => {
|
Some(it) => {
|
||||||
// FIXME: a hacky way to create a Name from string.
|
name = Name::new_text_dont_use(it.into());
|
||||||
name = tt::Ident {
|
|
||||||
text: it.into(),
|
|
||||||
span: Span {
|
|
||||||
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
|
|
||||||
anchor: span::SpanAnchor {
|
|
||||||
file_id: FileId::BOGUS,
|
|
||||||
ast_id: span::ROOT_ERASED_FILE_AST_ID,
|
|
||||||
},
|
|
||||||
ctx: SyntaxContextId::ROOT,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
.as_name();
|
|
||||||
&name
|
&name
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
|
|
@ -146,13 +146,11 @@ pub fn expand_speculative(
|
||||||
token_to_map: SyntaxToken,
|
token_to_map: SyntaxToken,
|
||||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||||
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
||||||
|
|
||||||
// FIXME: This BOGUS here is dangerous once the proc-macro server can call back into the database!
|
|
||||||
let span_map = RealSpanMap::absolute(FileId::BOGUS);
|
|
||||||
let span_map = SpanMapRef::RealSpanMap(&span_map);
|
|
||||||
|
|
||||||
let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind);
|
let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind);
|
||||||
|
|
||||||
|
let span_map = RealSpanMap::absolute(span.anchor.file_id);
|
||||||
|
let span_map = SpanMapRef::RealSpanMap(&span_map);
|
||||||
|
|
||||||
// Build the subtree and token mapping for the speculative args
|
// Build the subtree and token mapping for the speculative args
|
||||||
let (mut tt, undo_info) = match loc.kind {
|
let (mut tt, undo_info) = match loc.kind {
|
||||||
MacroCallKind::FnLike { .. } => (
|
MacroCallKind::FnLike { .. } => (
|
||||||
|
|
|
@ -4,7 +4,10 @@
|
||||||
use mbe::DocCommentDesugarMode;
|
use mbe::DocCommentDesugarMode;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use span::{ErasedFileAstId, Span, SpanAnchor, FIXUP_ERASED_FILE_AST_ID_MARKER};
|
use span::{
|
||||||
|
ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, FIXUP_ERASED_FILE_AST_ID_MARKER,
|
||||||
|
ROOT_ERASED_FILE_AST_ID,
|
||||||
|
};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode, HasLoopBody},
|
ast::{self, AstNode, HasLoopBody},
|
||||||
|
@ -307,8 +310,13 @@ pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo)
|
||||||
tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
||||||
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
|
||||||
) {
|
) {
|
||||||
tt.delimiter.close = Span::DUMMY;
|
let span = |file_id| Span {
|
||||||
tt.delimiter.open = Span::DUMMY;
|
range: TextRange::empty(TextSize::new(0)),
|
||||||
|
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
};
|
||||||
|
tt.delimiter.open = span(tt.delimiter.open.anchor.file_id);
|
||||||
|
tt.delimiter.close = span(tt.delimiter.close.anchor.file_id);
|
||||||
}
|
}
|
||||||
reverse_fixups_(tt, undo_info);
|
reverse_fixups_(tt, undo_info);
|
||||||
}
|
}
|
||||||
|
|
|
@ -231,7 +231,7 @@ mod tests {
|
||||||
|
|
||||||
const DUMMY: tt::Span = tt::Span {
|
const DUMMY: tt::Span = tt::Span {
|
||||||
range: TextRange::empty(TextSize::new(0)),
|
range: TextRange::empty(TextSize::new(0)),
|
||||||
anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
|
anchor: SpanAnchor { file_id: FileId::from_raw(0xe4e4e), ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
ctx: SyntaxContextId::ROOT,
|
ctx: SyntaxContextId::ROOT,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,7 @@ pub(crate) mod dummy_test_span_utils {
|
||||||
pub const DUMMY: Span = Span {
|
pub const DUMMY: Span = Span {
|
||||||
range: TextRange::empty(TextSize::new(0)),
|
range: TextRange::empty(TextSize::new(0)),
|
||||||
anchor: span::SpanAnchor {
|
anchor: span::SpanAnchor {
|
||||||
file_id: span::FileId::BOGUS,
|
file_id: span::FileId::from_raw(0xe4e4e),
|
||||||
ast_id: span::ROOT_ERASED_FILE_AST_ID,
|
ast_id: span::ROOT_ERASED_FILE_AST_ID,
|
||||||
},
|
},
|
||||||
ctx: SyntaxContextId::ROOT,
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
@ -60,7 +60,7 @@ pub(crate) mod dummy_test_span_utils {
|
||||||
Span {
|
Span {
|
||||||
range,
|
range,
|
||||||
anchor: span::SpanAnchor {
|
anchor: span::SpanAnchor {
|
||||||
file_id: span::FileId::BOGUS,
|
file_id: span::FileId::from_raw(0xe4e4e),
|
||||||
ast_id: span::ROOT_ERASED_FILE_AST_ID,
|
ast_id: span::ROOT_ERASED_FILE_AST_ID,
|
||||||
},
|
},
|
||||||
ctx: SyntaxContextId::ROOT,
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
|
|
@ -86,15 +86,6 @@ impl<Ctx: Copy> SpanData<Ctx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Span {
|
|
||||||
#[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"]
|
|
||||||
pub const DUMMY: Self = Self {
|
|
||||||
range: TextRange::empty(TextSize::new(0)),
|
|
||||||
anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
|
|
||||||
ctx: SyntaxContextId::ROOT,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Span {
|
impl fmt::Display for Span {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
fmt::Debug::fmt(&self.anchor.file_id.index(), f)?;
|
fmt::Debug::fmt(&self.anchor.file_id.index(), f)?;
|
||||||
|
@ -178,6 +169,8 @@ impl salsa::InternKey for MacroCallId {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MacroCallId {
|
impl MacroCallId {
|
||||||
|
pub const MAX_ID: u32 = 0x7fff_ffff;
|
||||||
|
|
||||||
pub fn as_file(self) -> HirFileId {
|
pub fn as_file(self) -> HirFileId {
|
||||||
MacroFileId { macro_call_id: self }.into()
|
MacroFileId { macro_call_id: self }.into()
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,9 +69,6 @@ pub struct FileId(u32);
|
||||||
// pub struct FileId(NonMaxU32);
|
// pub struct FileId(NonMaxU32);
|
||||||
|
|
||||||
impl FileId {
|
impl FileId {
|
||||||
/// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics!
|
|
||||||
// FIXME: To be removed once we get rid of all `SpanData::DUMMY` usages.
|
|
||||||
pub const BOGUS: FileId = FileId(0xe4e4e);
|
|
||||||
pub const MAX_FILE_ID: u32 = 0x7fff_ffff;
|
pub const MAX_FILE_ID: u32 = 0x7fff_ffff;
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
Loading…
Reference in a new issue