Fix incorrect encoding of literals in the proc-macro-api on version 4

This commit is contained in:
Lukas Wirth 2024-07-15 14:41:35 +02:00
parent f913901399
commit 05ce57efd5
12 changed files with 183 additions and 134 deletions

3
Cargo.lock generated
View file

@ -1046,7 +1046,6 @@ dependencies = [
"arrayvec", "arrayvec",
"cov-mark", "cov-mark",
"parser", "parser",
"ra-ap-rustc_lexer",
"rustc-hash", "rustc-hash",
"smallvec", "smallvec",
"span", "span",
@ -1326,6 +1325,7 @@ dependencies = [
"base-db", "base-db",
"indexmap", "indexmap",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"mbe",
"paths", "paths",
"rustc-hash", "rustc-hash",
"serde", "serde",
@ -2218,6 +2218,7 @@ name = "tt"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"arrayvec", "arrayvec",
"ra-ap-rustc_lexer",
"smol_str", "smol_str",
"stdx", "stdx",
"text-size", "text-size",

View file

@ -5,9 +5,10 @@ use base_db::CrateId;
use cfg::CfgExpr; use cfg::CfgExpr;
use either::Either; use either::Either;
use intern::{sym, Interned}; use intern::{sym, Interned};
use mbe::{ use mbe::{
desugar_doc_comment_text, syntax_node_to_token_tree, token_to_literal, DelimiterKind, desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode,
DocCommentDesugarMode, Punct, Punct,
}; };
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use span::{Span, SyntaxContextId}; use span::{Span, SyntaxContextId};
@ -20,7 +21,7 @@ use crate::{
db::ExpandDatabase, db::ExpandDatabase,
mod_path::ModPath, mod_path::ModPath,
span_map::SpanMapRef, span_map::SpanMapRef,
tt::{self, Subtree}, tt::{self, token_to_literal, Subtree},
InFile, InFile,
}; };

View file

@ -59,7 +59,7 @@ pub use span::{HirFileId, MacroCallId, MacroFileId};
pub mod tt { pub mod tt {
pub use span::Span; pub use span::Span;
pub use tt::{DelimiterKind, IdentIsRaw, LitKind, Spacing}; pub use tt::{token_to_literal, DelimiterKind, IdentIsRaw, LitKind, Spacing};
pub type Delimiter = ::tt::Delimiter<Span>; pub type Delimiter = ::tt::Delimiter<Span>;
pub type DelimSpan = ::tt::DelimSpan<Span>; pub type DelimSpan = ::tt::DelimSpan<Span>;

View file

@ -17,7 +17,6 @@ rustc-hash.workspace = true
smallvec.workspace = true smallvec.workspace = true
tracing.workspace = true tracing.workspace = true
arrayvec.workspace = true arrayvec.workspace = true
ra-ap-rustc_lexer.workspace = true
# local deps # local deps
syntax.workspace = true syntax.workspace = true
@ -30,7 +29,7 @@ span.workspace = true
test-utils.workspace = true test-utils.workspace = true
[features] [features]
in-rust-tree = ["parser/in-rust-tree", "syntax/in-rust-tree"] in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"]
[lints] [lints]
workspace = true workspace = true

View file

@ -6,13 +6,6 @@
//! The tests for this functionality live in another crate: //! The tests for this functionality live in another crate:
//! `hir_def::macro_expansion_tests::mbe`. //! `hir_def::macro_expansion_tests::mbe`.
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[cfg(not(feature = "in-rust-tree"))]
extern crate ra_ap_rustc_lexer as rustc_lexer;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_lexer;
mod expander; mod expander;
mod parser; mod parser;
mod syntax_bridge; mod syntax_bridge;
@ -36,7 +29,7 @@ pub use tt::{Delimiter, DelimiterKind, Punct};
pub use crate::syntax_bridge::{ pub use crate::syntax_bridge::{
desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree, desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree,
parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified, parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified,
token_to_literal, token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper, token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper,
}; };
pub use crate::syntax_bridge::dummy_test_span_utils::*; pub use crate::syntax_bridge::dummy_test_span_utils::*;

View file

@ -4,7 +4,7 @@ use std::fmt;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, SpanAnchor, SpanData, SpanMap}; use span::{Edition, SpanAnchor, SpanData, SpanMap};
use stdx::{format_to, itertools::Itertools, never, non_empty_vec::NonEmptyVec}; use stdx::{format_to, never, non_empty_vec::NonEmptyVec};
use syntax::{ use syntax::{
ast::{self, make::tokens::doc_comment}, ast::{self, make::tokens::doc_comment},
format_smolstr, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, format_smolstr, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement,
@ -14,6 +14,7 @@ use syntax::{
use tt::{ use tt::{
buffer::{Cursor, TokenBuffer}, buffer::{Cursor, TokenBuffer},
iter::TtIter, iter::TtIter,
token_to_literal,
}; };
use crate::to_parser_input::to_parser_input; use crate::to_parser_input::to_parser_input;
@ -400,56 +401,6 @@ where
} }
} }
pub fn token_to_literal<S>(text: SmolStr, span: S) -> tt::Literal<S>
where
S: Copy,
{
use rustc_lexer::LiteralKind;
let token = rustc_lexer::tokenize(&text).next_tuple();
let Some((rustc_lexer::Token {
kind: rustc_lexer::TokenKind::Literal { kind, suffix_start },
..
},)) = token
else {
return tt::Literal { span, text, kind: tt::LitKind::Err(()), suffix: None };
};
let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => (tt::LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => (tt::LitKind::Float, 0, 0),
LiteralKind::Char { terminated } => (tt::LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { terminated } => (tt::LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { terminated } => (tt::LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { terminated } => (tt::LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { terminated } => (tt::LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
tt::LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
tt::LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
tt::LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
};
let (lit, suffix) = text.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Box::new(suffix.into())),
};
tt::Literal { span, text: lit.into(), kind, suffix }
}
fn is_single_token_op(kind: SyntaxKind) -> bool { fn is_single_token_op(kind: SyntaxKind) -> bool {
matches!( matches!(
kind, kind,

View file

@ -28,6 +28,8 @@ span.workspace = true
# InternIds for the syntax context # InternIds for the syntax context
base-db.workspace = true base-db.workspace = true
la-arena.workspace = true la-arena.workspace = true
# only here to parse via token_to_literal
mbe.workspace = true
[lints] [lints]
workspace = true workspace = true

View file

@ -197,7 +197,7 @@ mod tests {
.into(), .into(),
), ),
TokenTree::Leaf(Leaf::Literal(Literal { TokenTree::Leaf(Leaf::Literal(Literal {
text: "\"Foo\"".into(), text: "Foo".into(),
span: Span { span: Span {
range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")), range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")),
anchor, anchor,
@ -263,10 +263,11 @@ mod tests {
#[test] #[test]
fn test_proc_macro_rpc_works() { fn test_proc_macro_rpc_works() {
let tt = fixture_token_tree(); let tt = fixture_token_tree();
for v in RUST_ANALYZER_SPAN_SUPPORT..=CURRENT_API_VERSION {
let mut span_data_table = Default::default(); let mut span_data_table = Default::default();
let task = ExpandMacro { let task = ExpandMacro {
data: ExpandMacroData { data: ExpandMacroData {
macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table), macro_body: FlatTree::new(&tt, v, &mut span_data_table),
macro_name: Default::default(), macro_name: Default::default(),
attributes: None, attributes: None,
has_global_spans: ExpnGlobals { has_global_spans: ExpnGlobals {
@ -288,7 +289,9 @@ mod tests {
assert_eq!( assert_eq!(
tt, tt,
back.data.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table) back.data.macro_body.to_subtree_resolved(v, &span_data_table),
"version: {v}"
); );
} }
}
} }

View file

@ -141,6 +141,7 @@ impl FlatTree {
ident: Vec::new(), ident: Vec::new(),
token_tree: Vec::new(), token_tree: Vec::new(),
text: Vec::new(), text: Vec::new(),
version,
}; };
w.write(subtree); w.write(subtree);
@ -178,6 +179,7 @@ impl FlatTree {
ident: Vec::new(), ident: Vec::new(),
token_tree: Vec::new(), token_tree: Vec::new(),
text: Vec::new(), text: Vec::new(),
version,
}; };
w.write(subtree); w.write(subtree);
@ -228,6 +230,7 @@ impl FlatTree {
token_tree: self.token_tree, token_tree: self.token_tree,
text: self.text, text: self.text,
span_data_table, span_data_table,
version,
} }
.read() .read()
} }
@ -253,6 +256,7 @@ impl FlatTree {
token_tree: self.token_tree, token_tree: self.token_tree,
text: self.text, text: self.text,
span_data_table: &(), span_data_table: &(),
version,
} }
.read() .read()
} }
@ -386,8 +390,9 @@ impl InternableSpan for Span {
struct Writer<'a, 'span, S: InternableSpan> { struct Writer<'a, 'span, S: InternableSpan> {
work: VecDeque<(usize, &'a tt::Subtree<S>)>, work: VecDeque<(usize, &'a tt::Subtree<S>)>,
string_table: FxHashMap<&'a str, u32>, string_table: FxHashMap<std::borrow::Cow<'a, str>, u32>,
span_data_table: &'span mut S::Table, span_data_table: &'span mut S::Table,
version: u32,
subtree: Vec<SubtreeRepr>, subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>, literal: Vec<LiteralRepr>,
@ -425,9 +430,15 @@ impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> {
tt::TokenTree::Leaf(leaf) => match leaf { tt::TokenTree::Leaf(leaf) => match leaf {
tt::Leaf::Literal(lit) => { tt::Leaf::Literal(lit) => {
let idx = self.literal.len() as u32; let idx = self.literal.len() as u32;
let text = self.intern(&lit.text);
let id = self.token_id_of(lit.span); let id = self.token_id_of(lit.span);
let suffix = lit.suffix.as_ref().map(|s| self.intern(s)).unwrap_or(!0); let (text, suffix) = if self.version >= EXTENDED_LEAF_DATA {
(
self.intern(&lit.text),
lit.suffix.as_ref().map(|s| self.intern(s)).unwrap_or(!0),
)
} else {
(self.intern_owned(format!("{lit}")), !0)
};
self.literal.push(LiteralRepr { self.literal.push(LiteralRepr {
id, id,
text, text,
@ -456,13 +467,15 @@ impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> {
} }
tt::Leaf::Ident(ident) => { tt::Leaf::Ident(ident) => {
let idx = self.ident.len() as u32; let idx = self.ident.len() as u32;
let text = self.intern(&ident.text);
let id = self.token_id_of(ident.span); let id = self.token_id_of(ident.span);
self.ident.push(IdentRepr { let text = if self.version >= EXTENDED_LEAF_DATA {
id, self.intern(&ident.text)
text, } else if ident.is_raw.yes() {
is_raw: ident.is_raw == tt::IdentIsRaw::Yes, self.intern_owned(format!("r#{}", ident.text,))
}); } else {
self.intern(&ident.text)
};
self.ident.push(IdentRepr { id, text, is_raw: ident.is_raw.yes() });
idx << 2 | 0b11 idx << 2 | 0b11
} }
}, },
@ -484,15 +497,25 @@ impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> {
pub(crate) fn intern(&mut self, text: &'a str) -> u32 { pub(crate) fn intern(&mut self, text: &'a str) -> u32 {
let table = &mut self.text; let table = &mut self.text;
*self.string_table.entry(text).or_insert_with(|| { *self.string_table.entry(text.into()).or_insert_with(|| {
let idx = table.len(); let idx = table.len();
table.push(text.to_owned()); table.push(text.to_owned());
idx as u32 idx as u32
}) })
} }
pub(crate) fn intern_owned(&mut self, text: String) -> u32 {
let table = &mut self.text;
*self.string_table.entry(text.clone().into()).or_insert_with(|| {
let idx = table.len();
table.push(text);
idx as u32
})
}
} }
struct Reader<'span, S: InternableSpan> { struct Reader<'span, S: InternableSpan> {
version: u32,
subtree: Vec<SubtreeRepr>, subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>, literal: Vec<LiteralRepr>,
punct: Vec<PunctRepr>, punct: Vec<PunctRepr>,
@ -528,9 +551,12 @@ impl<'span, S: InternableSpan> Reader<'span, S> {
0b01 => { 0b01 => {
use tt::LitKind::*; use tt::LitKind::*;
let repr = &self.literal[idx]; let repr = &self.literal[idx];
tt::Leaf::Literal(tt::Literal { let text = self.text[repr.text as usize].as_str();
text: self.text[repr.text as usize].as_str().into(), let span = read_span(repr.id);
span: read_span(repr.id), tt::Leaf::Literal(if self.version >= EXTENDED_LEAF_DATA {
tt::Literal {
text: text.into(),
span,
kind: match u16::to_le_bytes(repr.kind) { kind: match u16::to_le_bytes(repr.kind) {
[0, _] => Err(()), [0, _] => Err(()),
[1, _] => Byte, [1, _] => Byte,
@ -552,6 +578,9 @@ impl<'span, S: InternableSpan> Reader<'span, S> {
} else { } else {
None None
}, },
}
} else {
tt::token_to_literal(text.into(), span)
}) })
.into() .into()
} }
@ -566,14 +595,23 @@ impl<'span, S: InternableSpan> Reader<'span, S> {
} }
0b11 => { 0b11 => {
let repr = &self.ident[idx]; let repr = &self.ident[idx];
tt::Leaf::Ident(tt::Ident { let text = self.text[repr.text as usize].as_str();
text: self.text[repr.text as usize].as_str().into(), let (is_raw, text) = if self.version >= EXTENDED_LEAF_DATA {
span: read_span(repr.id), (
is_raw: if repr.is_raw { if repr.is_raw {
tt::IdentIsRaw::Yes tt::IdentIsRaw::Yes
} else { } else {
tt::IdentIsRaw::No tt::IdentIsRaw::No
}, },
text,
)
} else {
tt::IdentIsRaw::split_from_symbol(text)
};
tt::Leaf::Ident(tt::Ident {
text: text.into(),
span: read_span(repr.id),
is_raw,
}) })
.into() .into()
} }

View file

@ -34,7 +34,7 @@ proc-macro-test.path = "./proc-macro-test"
[features] [features]
sysroot-abi = [] sysroot-abi = []
in-rust-tree = ["mbe/in-rust-tree", "sysroot-abi"] in-rust-tree = ["mbe/in-rust-tree", "tt/in-rust-tree","sysroot-abi"]
[lints] [lints]
workspace = true workspace = true

View file

@ -17,6 +17,10 @@ smol_str.workspace = true
text-size.workspace = true text-size.workspace = true
stdx.workspace = true stdx.workspace = true
ra-ap-rustc_lexer.workspace = true
[features]
in-rust-tree = []
[lints] [lints]
workspace = true workspace = true

View file

@ -2,14 +2,21 @@
//! input and output) of macros. It closely mirrors `proc_macro` crate's //! input and output) of macros. It closely mirrors `proc_macro` crate's
//! `TokenTree`. //! `TokenTree`.
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[cfg(not(feature = "in-rust-tree"))]
extern crate ra_ap_rustc_lexer as rustc_lexer;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_lexer;
pub mod buffer; pub mod buffer;
pub mod iter; pub mod iter;
use std::fmt; use std::fmt;
use stdx::impl_from; use stdx::{impl_from, itertools::Itertools as _};
pub use smol_str::SmolStr; pub use smol_str::{format_smolstr, SmolStr};
pub use text_size::{TextRange, TextSize}; pub use text_size::{TextRange, TextSize};
#[derive(Clone, PartialEq, Debug)] #[derive(Clone, PartialEq, Debug)]
@ -196,6 +203,56 @@ pub struct Literal<S> {
pub suffix: Option<Box<SmolStr>>, pub suffix: Option<Box<SmolStr>>,
} }
pub fn token_to_literal<S>(text: SmolStr, span: S) -> Literal<S>
where
S: Copy,
{
use rustc_lexer::LiteralKind;
let token = rustc_lexer::tokenize(&text).next_tuple();
let Some((rustc_lexer::Token {
kind: rustc_lexer::TokenKind::Literal { kind, suffix_start },
..
},)) = token
else {
return Literal { span, text, kind: LitKind::Err(()), suffix: None };
};
let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
};
let (lit, suffix) = text.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Box::new(suffix.into())),
};
Literal { span, text: lit.into(), kind, suffix }
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Punct<S> { pub struct Punct<S> {
pub char: char, pub char: char,