Auto merge of #16130 - Veykril:syntax-fixup, r=Veykril

fix: Syntax fixup now removes subtrees with fake spans

Fixes https://github.com/rust-lang/rust-analyzer/issues/16118
This commit is contained in:
bors 2023-12-15 12:58:43 +00:00
commit 34d572e236
5 changed files with 54 additions and 14 deletions

View file

@ -151,21 +151,26 @@ impl fmt::Debug for HirFileIdRepr {
impl From<FileId> for HirFileId {
fn from(id: FileId) -> Self {
assert!(id.index() < Self::MAX_FILE_ID, "FileId index {} is too large", id.index());
_ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
assert!(id.index() <= Self::MAX_HIR_FILE_ID, "FileId index {} is too large", id.index());
HirFileId(id.index())
}
}
impl From<MacroFileId> for HirFileId {
fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
_ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
let id = id.as_u32();
assert!(id < Self::MAX_FILE_ID, "MacroCallId index {} is too large", id);
assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {} is too large", id);
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
}
}
impl HirFileId {
const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
const ASSERT_MAX_FILE_ID_IS_SAME: () =
[()][(Self::MAX_HIR_FILE_ID != FileId::MAX_FILE_ID) as usize];
const MAX_HIR_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
#[inline]

View file

@ -20,7 +20,7 @@ use crate::{
attrs::{collect_attrs, RawAttrs},
builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander,
fixup::{self, SyntaxFixupUndoInfo},
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
hygiene::{apply_mark, SyntaxContextData, Transparency},
span::{RealSpanMap, SpanMap, SpanMapRef},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
@ -421,6 +421,15 @@ fn macro_arg(
syntax::NodeOrToken::Token(_) => true,
});
fixups.remove.extend(censor);
{
let mut tt = mbe::syntax_node_to_token_tree_modified(
&syntax,
map.as_ref(),
fixups.append.clone(),
fixups.remove.clone(),
);
reverse_fixups(&mut tt, &fixups.undo_info);
}
(
mbe::syntax_node_to_token_tree_modified(
&syntax,

View file

@ -8,12 +8,13 @@ use base_db::{
use la_arena::RawIdx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use stdx::never;
use syntax::{
ast::{self, AstNode, HasLoopBody},
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use triomphe::Arc;
use tt::Spacing;
use tt::{Spacing, Span};
use crate::{
span::SpanMapRef,
@ -45,19 +46,20 @@ impl SyntaxFixupUndoInfo {
// replacement -> censor + append
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
// to remove later
const FIXUP_DUMMY_FILE: FileId = FileId::from_raw(FileId::MAX_FILE_ID);
const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::from_u32(!0));
const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut remove = FxHashSet::<SyntaxNode>::default();
let mut preorder = node.preorder();
let mut original = Vec::new();
let dummy_range = TextRange::empty(TextSize::new(0));
let dummy_range = FIXUP_DUMMY_RANGE;
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
// the index into the replacement vec but only if the end points to !0
let dummy_anchor = SpanAnchor {
file_id: FileId::from_raw(!0),
ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)),
};
let dummy_anchor = SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID };
let fake_span = |range| SpanData {
range: dummy_range,
anchor: dummy_anchor,
@ -76,7 +78,7 @@ pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> Synta
let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(),
span: SpanData {
range: TextRange::new(TextSize::new(idx), TextSize::new(!0)),
range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
anchor: dummy_anchor,
ctx: span_map.span_for_range(node_range).ctx,
},
@ -299,6 +301,13 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
let Some(undo_info) = undo_info.original.as_deref() else { return };
let undo_info = &**undo_info;
if never!(
tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
|| tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
) {
tt.delimiter.close = SpanData::DUMMY;
tt.delimiter.open = SpanData::DUMMY;
}
reverse_fixups_(tt, undo_info);
}
@ -310,17 +319,28 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
.filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => {
let span = leaf.span();
span.anchor.file_id != FileId::from_raw(!0) || span.range.end() == TextSize::new(!0)
let is_real_leaf = span.anchor.file_id != FIXUP_DUMMY_FILE;
let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END;
is_real_leaf || is_replaced_node
}
tt::TokenTree::Subtree(_) => true,
})
.flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => {
if tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
|| tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
{
// Even though fixup never creates subtrees with fixup spans, the old proc-macro server
// might copy them if the proc-macro asks for it, so we need to filter those out
// here as well.
return SmallVec::new_const();
}
reverse_fixups_(&mut tt, undo_info);
SmallVec::from_const([tt.into()])
}
tt::TokenTree::Leaf(leaf) => {
if leaf.span().anchor.file_id == FileId::from_raw(!0) {
if leaf.span().anchor.file_id == FIXUP_DUMMY_FILE {
// we have a fake node here, we need to replace it again with the original
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible {
original.token_trees.into()
@ -328,6 +348,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
SmallVec::from_const([original.into()])
}
} else {
// just a normal leaf
SmallVec::from_const([leaf.into()])
}
}

View file

@ -1354,6 +1354,7 @@ impl Config {
}
}
// FIXME: This should be an AbsolutePathBuf
fn target_dir_from_config(&self) -> Option<PathBuf> {
self.data.rust_analyzerTargetDir.as_ref().and_then(|target_dir| match target_dir {
TargetDirectory::UseSubdirectory(yes) if *yes => {

View file

@ -61,13 +61,17 @@ pub use paths::{AbsPath, AbsPathBuf};
/// Most functions in rust-analyzer use this when they need to refer to a file.
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct FileId(u32);
// pub struct FileId(NonMaxU32);
impl FileId {
/// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics!
// FIXME: To be removed once we get rid of all `SpanData::DUMMY` usages.
pub const BOGUS: FileId = FileId(0xe4e4e);
pub const MAX_FILE_ID: u32 = 0x7fff_ffff;
#[inline]
pub fn from_raw(raw: u32) -> FileId {
pub const fn from_raw(raw: u32) -> FileId {
assert!(raw <= Self::MAX_FILE_ID);
FileId(raw)
}