Switch token trees to use Symbols

This commit is contained in:
Lukas Wirth 2024-07-16 09:59:39 +02:00
parent 0c95aaa08e
commit 93024ad411
51 changed files with 593 additions and 399 deletions

8
Cargo.lock generated
View file

@ -149,6 +149,7 @@ dependencies = [
"mbe", "mbe",
"oorandom", "oorandom",
"rustc-hash", "rustc-hash",
"smol_str",
"syntax", "syntax",
"tt", "tt",
] ]
@ -1045,6 +1046,7 @@ version = "0.0.0"
dependencies = [ dependencies = [
"arrayvec", "arrayvec",
"cov-mark", "cov-mark",
"intern",
"parser", "parser",
"rustc-hash", "rustc-hash",
"smallvec", "smallvec",
@ -1324,8 +1326,8 @@ version = "0.0.0"
dependencies = [ dependencies = [
"base-db", "base-db",
"indexmap", "indexmap",
"intern",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"mbe",
"paths", "paths",
"rustc-hash", "rustc-hash",
"serde", "serde",
@ -1343,6 +1345,7 @@ version = "0.0.0"
dependencies = [ dependencies = [
"base-db", "base-db",
"expect-test", "expect-test",
"intern",
"libloading", "libloading",
"mbe", "mbe",
"memmap2", "memmap2",
@ -1966,6 +1969,7 @@ dependencies = [
"base-db", "base-db",
"cfg", "cfg",
"hir-expand", "hir-expand",
"intern",
"rustc-hash", "rustc-hash",
"span", "span",
"stdx", "stdx",
@ -2218,8 +2222,8 @@ name = "tt"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"arrayvec", "arrayvec",
"intern",
"ra-ap-rustc_lexer", "ra-ap-rustc_lexer",
"smol_str",
"stdx", "stdx",
"text-size", "text-size",
] ]

View file

@ -16,6 +16,7 @@ rustc-hash.workspace = true
# locals deps # locals deps
tt.workspace = true tt.workspace = true
smol_str.workspace = true
[dev-dependencies] [dev-dependencies]
expect-test = "1.4.1" expect-test = "1.4.1"

View file

@ -4,7 +4,7 @@
use std::{fmt, slice::Iter as SliceIter}; use std::{fmt, slice::Iter as SliceIter};
use tt::SmolStr; use smol_str::SmolStr;
/// A simple configuration value passed in from the outside. /// A simple configuration value passed in from the outside.
#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] #[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]
@ -66,7 +66,7 @@ impl CfgExpr {
fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> { fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> {
let name = match it.next() { let name = match it.next() {
None => return None, None => return None,
Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(), Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(),
Some(_) => return Some(CfgExpr::Invalid), Some(_) => return Some(CfgExpr::Invalid),
}; };
@ -77,10 +77,9 @@ fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr>
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => { Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => {
it.next(); it.next();
it.next(); it.next();
// FIXME: escape? raw string? // FIXME: escape?
let value = let value = literal.symbol.as_str().into();
SmolStr::new(literal.text.trim_start_matches('"').trim_end_matches('"')); CfgAtom::KeyValue { key: name.as_str().into(), value }.into()
CfgAtom::KeyValue { key: name, value }.into()
} }
_ => return Some(CfgExpr::Invalid), _ => return Some(CfgExpr::Invalid),
} }
@ -96,7 +95,7 @@ fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr>
_ => CfgExpr::Invalid, _ => CfgExpr::Invalid,
} }
} }
_ => CfgAtom::Flag(name).into(), _ => CfgAtom::Flag(name.as_str().into()).into(),
}; };
// Eat comma separator // Eat comma separator

View file

@ -8,10 +8,10 @@ mod tests;
use std::fmt; use std::fmt;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use tt::SmolStr;
pub use cfg_expr::{CfgAtom, CfgExpr}; pub use cfg_expr::{CfgAtom, CfgExpr};
pub use dnf::DnfExpr; pub use dnf::DnfExpr;
use smol_str::SmolStr;
/// Configuration options used for conditional compilation on items with `cfg` attributes. /// Configuration options used for conditional compilation on items with `cfg` attributes.
/// We have two kind of options in different namespaces: atomic options like `unix`, and /// We have two kind of options in different namespaces: atomic options like `unix`, and

View file

@ -159,14 +159,14 @@ impl Attrs {
pub fn has_doc_hidden(&self) -> bool { pub fn has_doc_hidden(&self) -> bool {
self.by_key("doc").tt_values().any(|tt| { self.by_key("doc").tt_values().any(|tt| {
tt.delimiter.kind == DelimiterKind::Parenthesis && tt.delimiter.kind == DelimiterKind::Parenthesis &&
matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "hidden") matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden)
}) })
} }
pub fn has_doc_notable_trait(&self) -> bool { pub fn has_doc_notable_trait(&self) -> bool {
self.by_key("doc").tt_values().any(|tt| { self.by_key("doc").tt_values().any(|tt| {
tt.delimiter.kind == DelimiterKind::Parenthesis && tt.delimiter.kind == DelimiterKind::Parenthesis &&
matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "notable_trait") matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait)
}) })
} }
@ -267,7 +267,7 @@ impl DocExpr {
fn next_doc_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<DocExpr> { fn next_doc_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<DocExpr> {
let name = match it.next() { let name = match it.next() {
None => return None, None => return None,
Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(), Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(),
Some(_) => return Some(DocExpr::Invalid), Some(_) => return Some(DocExpr::Invalid),
}; };
@ -275,13 +275,16 @@ fn next_doc_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<DocExpr>
let ret = match it.as_slice().first() { let ret = match it.as_slice().first() {
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => { Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
match it.as_slice().get(1) { match it.as_slice().get(1) {
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => { Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
symbol: text,
kind: tt::LitKind::Str,
..
}))) => {
it.next(); it.next();
it.next(); it.next();
// FIXME: escape? raw string? // FIXME: escape? raw string?
let value = let value = SmolStr::new(text.as_str());
SmolStr::new(literal.text.trim_start_matches('"').trim_end_matches('"')); DocAtom::KeyValue { key: name.as_str().into(), value }.into()
DocAtom::KeyValue { key: name, value }.into()
} }
_ => return Some(DocExpr::Invalid), _ => return Some(DocExpr::Invalid),
} }
@ -294,7 +297,7 @@ fn next_doc_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<DocExpr>
_ => DocExpr::Invalid, _ => DocExpr::Invalid,
} }
} }
_ => DocAtom::Flag(name).into(), _ => DocAtom::Flag(name.as_str().into()).into(),
}; };
// Eat comma separator // Eat comma separator
@ -311,10 +314,11 @@ fn parse_comma_sep<S>(subtree: &tt::Subtree<S>) -> Vec<SmolStr> {
.token_trees .token_trees
.iter() .iter()
.filter_map(|tt| match tt { .filter_map(|tt| match tt {
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
// FIXME: escape? raw string? kind: tt::LitKind::Str,
Some(SmolStr::new(lit.text.trim_start_matches('"').trim_end_matches('"'))) symbol: text,
} ..
})) => Some(SmolStr::new(text.as_str())),
_ => None, _ => None,
}) })
.collect() .collect()
@ -598,14 +602,14 @@ impl<'attr> AttrQuery<'attr> {
/// #[doc(html_root_url = "url")] /// #[doc(html_root_url = "url")]
/// ^^^^^^^^^^^^^ key /// ^^^^^^^^^^^^^ key
/// ``` /// ```
pub fn find_string_value_in_tt(self, key: &'attr str) -> Option<&SmolStr> { pub fn find_string_value_in_tt(self, key: &'attr str) -> Option<&str> {
self.tt_values().find_map(|tt| { self.tt_values().find_map(|tt| {
let name = tt.token_trees.iter() let name = tt.token_trees.iter()
.skip_while(|tt| !matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text, ..} )) if text == key)) .skip_while(|tt| !matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if sym.as_str() == key))
.nth(2); .nth(2);
match name { match name {
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal{ text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text), Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal{ symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()),
_ => None _ => None
} }
}) })

View file

@ -6,7 +6,7 @@
use std::fmt; use std::fmt;
use hir_expand::name::{AsName, Name}; use hir_expand::name::{AsName, Name};
use intern::sym; use intern::{sym, Symbol};
/// Different signed int types. /// Different signed int types.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum BuiltinInt { pub enum BuiltinInt {
@ -143,6 +143,18 @@ impl BuiltinInt {
}; };
Some(res) Some(res)
} }
pub fn from_suffix_sym(suffix: &Symbol) -> Option<BuiltinInt> {
let res = match suffix {
s if *s == sym::isize => Self::Isize,
s if *s == sym::i8 => Self::I8,
s if *s == sym::i16 => Self::I16,
s if *s == sym::i32 => Self::I32,
s if *s == sym::i64 => Self::I64,
s if *s == sym::i128 => Self::I128,
_ => return None,
};
Some(res)
}
} }
#[rustfmt::skip] #[rustfmt::skip]
@ -160,6 +172,19 @@ impl BuiltinUint {
}; };
Some(res) Some(res)
} }
pub fn from_suffix_sym(suffix: &Symbol) -> Option<BuiltinUint> {
let res = match suffix {
s if *s == sym::usize => Self::Usize,
s if *s == sym::u8 => Self::U8,
s if *s == sym::u16 => Self::U16,
s if *s == sym::u32 => Self::U32,
s if *s == sym::u64 => Self::U64,
s if *s == sym::u128 => Self::U128,
_ => return None,
};
Some(res)
}
} }
#[rustfmt::skip] #[rustfmt::skip]

View file

@ -150,7 +150,7 @@ fn parse_rustc_legacy_const_generics(tt: &crate::tt::Subtree) -> Box<[u32]> {
let mut indices = Vec::new(); let mut indices = Vec::new();
for args in tt.token_trees.chunks(2) { for args in tt.token_trees.chunks(2) {
match &args[0] { match &args[0] {
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => match lit.text.parse() { tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() {
Ok(index) => indices.push(index), Ok(index) => indices.push(index),
Err(_) => break, Err(_) => break,
}, },

View file

@ -9,7 +9,7 @@ use hir_expand::{
name::{AsName, Name}, name::{AsName, Name},
HirFileId, InFile, HirFileId, InFile,
}; };
use intern::Interned; use intern::{sym, Interned};
use la_arena::Arena; use la_arena::Arena;
use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
use syntax::ast::{self, HasName, HasVisibility}; use syntax::ast::{self, HasName, HasVisibility};
@ -112,12 +112,12 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> {
let mut tts = tt.token_trees.iter().peekable(); let mut tts = tt.token_trees.iter().peekable();
while let Some(tt) = tts.next() { while let Some(tt) = tts.next() {
if let TokenTree::Leaf(Leaf::Ident(ident)) = tt { if let TokenTree::Leaf(Leaf::Ident(ident)) = tt {
flags.insert(match &*ident.text { flags.insert(match &ident.sym {
"packed" => { s if *s == sym::packed => {
let pack = if let Some(TokenTree::Subtree(tt)) = tts.peek() { let pack = if let Some(TokenTree::Subtree(tt)) = tts.peek() {
tts.next(); tts.next();
if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() { if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() {
lit.text.parse().unwrap_or_default() lit.symbol.as_str().parse().unwrap_or_default()
} else { } else {
0 0
} }
@ -129,11 +129,11 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> {
Some(if let Some(min_pack) = min_pack { min_pack.min(pack) } else { pack }); Some(if let Some(min_pack) = min_pack { min_pack.min(pack) } else { pack });
ReprFlags::empty() ReprFlags::empty()
} }
"align" => { s if *s == sym::align => {
if let Some(TokenTree::Subtree(tt)) = tts.peek() { if let Some(TokenTree::Subtree(tt)) = tts.peek() {
tts.next(); tts.next();
if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() { if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() {
if let Ok(align) = lit.text.parse() { if let Ok(align) = lit.symbol.as_str().parse() {
let align = Align::from_bytes(align).ok(); let align = Align::from_bytes(align).ok();
max_align = max_align.max(align); max_align = max_align.max(align);
} }
@ -141,13 +141,13 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> {
} }
ReprFlags::empty() ReprFlags::empty()
} }
"C" => ReprFlags::IS_C, s if *s == sym::C => ReprFlags::IS_C,
"transparent" => ReprFlags::IS_TRANSPARENT, s if *s == sym::transparent => ReprFlags::IS_TRANSPARENT,
"simd" => ReprFlags::IS_SIMD, s if *s == sym::simd => ReprFlags::IS_SIMD,
repr => { repr => {
if let Some(builtin) = BuiltinInt::from_suffix(repr) if let Some(builtin) = BuiltinInt::from_suffix_sym(repr)
.map(Either::Left) .map(Either::Left)
.or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right)) .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right))
{ {
int = Some(match builtin { int = Some(match builtin {
Either::Left(bi) => match bi { Either::Left(bi) => match bi {

View file

@ -278,7 +278,7 @@ fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
tt.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',')); tt.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ','));
for output in segments.skip(1) { for output in segments.skip(1) {
match output { match output {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "no_std" => { [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => {
return true return true
} }
_ => {} _ => {}

View file

@ -63,6 +63,7 @@ use base_db::{CrateId, FileId};
use hir_expand::{ use hir_expand::{
name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId,
}; };
use intern::Symbol;
use itertools::Itertools; use itertools::Itertools;
use la_arena::Arena; use la_arena::Arena;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -148,11 +149,11 @@ struct DefMapCrateData {
proc_macro_loading_error: Option<Box<str>>, proc_macro_loading_error: Option<Box<str>>,
/// Custom attributes registered with `#![register_attr]`. /// Custom attributes registered with `#![register_attr]`.
registered_attrs: Vec<SmolStr>, registered_attrs: Vec<Symbol>,
/// Custom tool modules registered with `#![register_tool]`. /// Custom tool modules registered with `#![register_tool]`.
registered_tools: Vec<SmolStr>, registered_tools: Vec<Symbol>,
/// Unstable features of Rust enabled with `#![feature(A, B)]`. /// Unstable features of Rust enabled with `#![feature(A, B)]`.
unstable_features: FxHashSet<SmolStr>, unstable_features: FxHashSet<Symbol>,
/// #[rustc_coherence_is_core] /// #[rustc_coherence_is_core]
rustc_coherence_is_core: bool, rustc_coherence_is_core: bool,
no_core: bool, no_core: bool,
@ -170,7 +171,7 @@ impl DefMapCrateData {
fn_proc_macro_mapping: FxHashMap::default(), fn_proc_macro_mapping: FxHashMap::default(),
proc_macro_loading_error: None, proc_macro_loading_error: None,
registered_attrs: Vec::new(), registered_attrs: Vec::new(),
registered_tools: PREDEFINED_TOOLS.into(), registered_tools: PREDEFINED_TOOLS.iter().map(|it| Symbol::intern(it)).collect(),
unstable_features: FxHashSet::default(), unstable_features: FxHashSet::default(),
rustc_coherence_is_core: false, rustc_coherence_is_core: false,
no_core: false, no_core: false,
@ -447,15 +448,15 @@ impl DefMap {
self.derive_helpers_in_scope.get(&id.map(|it| it.upcast())).map(Deref::deref) self.derive_helpers_in_scope.get(&id.map(|it| it.upcast())).map(Deref::deref)
} }
pub fn registered_tools(&self) -> &[SmolStr] { pub fn registered_tools(&self) -> &[Symbol] {
&self.data.registered_tools &self.data.registered_tools
} }
pub fn registered_attrs(&self) -> &[SmolStr] { pub fn registered_attrs(&self) -> &[Symbol] {
&self.data.registered_attrs &self.data.registered_attrs
} }
pub fn is_unstable_feature_enabled(&self, feature: &str) -> bool { pub fn is_unstable_feature_enabled(&self, feature: &Symbol) -> bool {
self.data.unstable_features.contains(feature) self.data.unstable_features.contains(feature)
} }

View file

@ -7,7 +7,7 @@ use hir_expand::{
MacroCallId, MacroCallKind, MacroDefId, MacroCallId, MacroCallKind, MacroDefId,
}; };
use span::SyntaxContextId; use span::SyntaxContextId;
use syntax::{ast, SmolStr}; use syntax::ast;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
@ -79,20 +79,20 @@ impl DefMap {
let segments = path.segments(); let segments = path.segments();
if let Some(name) = segments.first() { if let Some(name) = segments.first() {
let name = name.to_smol_str(); let name = name.symbol();
let pred = |n: &_| *n == name; let pred = |n: &_| *n == *name;
let is_tool = self.data.registered_tools.iter().map(SmolStr::as_str).any(pred); let is_tool = self.data.registered_tools.iter().any(pred);
// FIXME: tool modules can be shadowed by actual modules // FIXME: tool modules can be shadowed by actual modules
if is_tool { if is_tool {
return true; return true;
} }
if segments.len() == 1 { if segments.len() == 1 {
if find_builtin_attr_idx(&name).is_some() { if find_builtin_attr_idx(name).is_some() {
return true; return true;
} }
if self.data.registered_attrs.iter().map(SmolStr::as_str).any(pred) { if self.data.registered_attrs.iter().any(pred) {
return true; return true;
} }
} }

View file

@ -317,20 +317,20 @@ impl DefCollector<'_> {
.into_iter() .into_iter()
.flatten() .flatten()
.filter_map(|(feat, _)| match feat.segments() { .filter_map(|(feat, _)| match feat.segments() {
[name] => Some(name.to_smol_str()), [name] => Some(name.symbol().clone()),
_ => None, _ => None,
}); });
crate_data.unstable_features.extend(features); crate_data.unstable_features.extend(features);
} }
() if *attr_name == sym::register_attr.clone() => { () if *attr_name == sym::register_attr.clone() => {
if let Some(ident) = attr.single_ident_value() { if let Some(ident) = attr.single_ident_value() {
crate_data.registered_attrs.push(ident.text.clone()); crate_data.registered_attrs.push(ident.sym.clone());
cov_mark::hit!(register_attr); cov_mark::hit!(register_attr);
} }
} }
() if *attr_name == sym::register_tool.clone() => { () if *attr_name == sym::register_tool.clone() => {
if let Some(ident) = attr.single_ident_value() { if let Some(ident) = attr.single_ident_value() {
crate_data.registered_tools.push(ident.text.clone()); crate_data.registered_tools.push(ident.sym.clone());
cov_mark::hit!(register_tool); cov_mark::hit!(register_tool);
} }
} }
@ -2129,9 +2129,7 @@ impl ModCollector<'_, '_> {
let is_export = export_attr.exists(); let is_export = export_attr.exists();
let local_inner = if is_export { let local_inner = if is_export {
export_attr.tt_values().flat_map(|it| it.token_trees.iter()).any(|it| match it { export_attr.tt_values().flat_map(|it| it.token_trees.iter()).any(|it| match it {
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => ident.sym == sym::local_inner_macros,
ident.text.contains("local_inner_macros")
}
_ => false, _ => false,
}) })
} else { } else {

View file

@ -1,6 +1,7 @@
//! Nameres-specific procedural macro data and helpers. //! Nameres-specific procedural macro data and helpers.
use hir_expand::name::{AsName, Name}; use hir_expand::name::{AsName, Name};
use intern::sym;
use crate::attr::Attrs; use crate::attr::Attrs;
use crate::tt::{Leaf, TokenTree}; use crate::tt::{Leaf, TokenTree};
@ -67,7 +68,7 @@ pub(crate) fn parse_macro_name_and_helper_attrs(tt: &[TokenTree]) -> Option<(Nam
TokenTree::Leaf(Leaf::Punct(comma)), TokenTree::Leaf(Leaf::Punct(comma)),
TokenTree::Leaf(Leaf::Ident(attributes)), TokenTree::Leaf(Leaf::Ident(attributes)),
TokenTree::Subtree(helpers) TokenTree::Subtree(helpers)
] if comma.char == ',' && attributes.text == "attributes" => ] if comma.char == ',' && attributes.sym == sym::attributes =>
{ {
let helpers = helpers let helpers = helpers
.token_trees .token_trees

View file

@ -62,7 +62,7 @@ impl RawAttrs {
Attr { Attr {
id, id,
input: Some(Box::new(AttrInput::Literal(tt::Literal { input: Some(Box::new(AttrInput::Literal(tt::Literal {
text, symbol: text,
span, span,
kind, kind,
suffix: None, suffix: None,
@ -243,7 +243,7 @@ impl Attr {
let span = span_map.span_for_range(range); let span = span_map.span_for_range(range);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let token = lit.token(); let token = lit.token();
Some(Box::new(AttrInput::Literal(token_to_literal(token.text().into(), span)))) Some(Box::new(AttrInput::Literal(token_to_literal(token.text(), span))))
} else if let Some(tt) = ast.token_tree() { } else if let Some(tt) = ast.token_tree() {
let tree = syntax_node_to_token_tree( let tree = syntax_node_to_token_tree(
tt.syntax(), tt.syntax(),
@ -260,8 +260,8 @@ impl Attr {
fn from_tt(db: &dyn ExpandDatabase, mut tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> { fn from_tt(db: &dyn ExpandDatabase, mut tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> {
if matches!(tt, if matches!(tt,
[tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text, .. })), ..] [tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, .. })), ..]
if text == "unsafe" if *sym == sym::unsafe_
) { ) {
match tt.get(1) { match tt.get(1) {
Some(tt::TokenTree::Subtree(subtree)) => tt = &subtree.token_trees, Some(tt::TokenTree::Subtree(subtree)) => tt = &subtree.token_trees,
@ -313,10 +313,10 @@ impl Attr {
pub fn string_value(&self) -> Option<&str> { pub fn string_value(&self) -> Option<&str> {
match self.input.as_deref()? { match self.input.as_deref()? {
AttrInput::Literal(tt::Literal { AttrInput::Literal(tt::Literal {
text, symbol: text,
kind: tt::LitKind::Str | tt::LitKind::StrRaw(_), kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
.. ..
}) => Some(text), }) => Some(text.as_str()),
_ => None, _ => None,
} }
} }
@ -324,23 +324,24 @@ impl Attr {
/// #[path = "string"] /// #[path = "string"]
pub fn string_value_with_span(&self) -> Option<(&str, span::Span)> { pub fn string_value_with_span(&self) -> Option<(&str, span::Span)> {
match self.input.as_deref()? { match self.input.as_deref()? {
AttrInput::Literal(it) => match it.text.strip_prefix('r') { AttrInput::Literal(tt::Literal {
Some(it) => it.trim_matches('#'), symbol: text,
None => it.text.as_str(), kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
} span,
.strip_prefix('"')? suffix: _,
.strip_suffix('"') }) => Some((text.as_str(), *span)),
.zip(Some(it.span)),
_ => None, _ => None,
} }
} }
pub fn string_value_unescape(&self) -> Option<Cow<'_, str>> { pub fn string_value_unescape(&self) -> Option<Cow<'_, str>> {
match self.input.as_deref()? { match self.input.as_deref()? {
AttrInput::Literal(tt::Literal { text, kind: tt::LitKind::StrRaw(_), .. }) => { AttrInput::Literal(tt::Literal {
Some(Cow::Borrowed(text)) symbol: text, kind: tt::LitKind::StrRaw(_), ..
}) => Some(Cow::Borrowed(text.as_str())),
AttrInput::Literal(tt::Literal { symbol: text, kind: tt::LitKind::Str, .. }) => {
unescape(text.as_str())
} }
AttrInput::Literal(tt::Literal { text, kind: tt::LitKind::Str, .. }) => unescape(text),
_ => None, _ => None,
} }
} }

View file

@ -82,7 +82,7 @@ enum VariantShape {
} }
fn tuple_field_iterator(span: Span, n: usize) -> impl Iterator<Item = tt::Ident> { fn tuple_field_iterator(span: Span, n: usize) -> impl Iterator<Item = tt::Ident> {
(0..n).map(move |it| tt::Ident::new(format!("f{it}"), span)) (0..n).map(move |it| tt::Ident::new(&format!("f{it}"), span))
} }
impl VariantShape { impl VariantShape {
@ -693,14 +693,14 @@ fn partial_eq_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree>
} }
[first, rest @ ..] => { [first, rest @ ..] => {
let rest = rest.iter().map(|it| { let rest = rest.iter().map(|it| {
let t1 = tt::Ident::new(format!("{}_self", it.text), it.span); let t1 = tt::Ident::new(&format!("{}_self", it.sym), it.span);
let t2 = tt::Ident::new(format!("{}_other", it.text), it.span); let t2 = tt::Ident::new(&format!("{}_other", it.sym), it.span);
let and_and = and_and(span); let and_and = and_and(span);
quote!(span =>#and_and #t1 .eq( #t2 )) quote!(span =>#and_and #t1 .eq( #t2 ))
}); });
let first = { let first = {
let t1 = tt::Ident::new(format!("{}_self", first.text), first.span); let t1 = tt::Ident::new(&format!("{}_self", first.sym), first.span);
let t2 = tt::Ident::new(format!("{}_other", first.text), first.span); let t2 = tt::Ident::new(&format!("{}_other", first.sym), first.span);
quote!(span =>#t1 .eq( #t2 )) quote!(span =>#t1 .eq( #t2 ))
}; };
quote!(span =>#first ##rest) quote!(span =>#first ##rest)
@ -730,7 +730,7 @@ fn self_and_other_patterns(
let self_patterns = adt.shape.as_pattern_map( let self_patterns = adt.shape.as_pattern_map(
name, name,
|it| { |it| {
let t = tt::Ident::new(format!("{}_self", it.text), it.span); let t = tt::Ident::new(&format!("{}_self", it.sym), it.span);
quote!(span =>#t) quote!(span =>#t)
}, },
span, span,
@ -738,7 +738,7 @@ fn self_and_other_patterns(
let other_patterns = adt.shape.as_pattern_map( let other_patterns = adt.shape.as_pattern_map(
name, name,
|it| { |it| {
let t = tt::Ident::new(format!("{}_other", it.text), it.span); let t = tt::Ident::new(&format!("{}_other", it.sym), it.span);
quote!(span =>#t) quote!(span =>#t)
}, },
span, span,
@ -776,8 +776,8 @@ fn ord_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
|(pat1, pat2, fields)| { |(pat1, pat2, fields)| {
let mut body = quote!(span =>#krate::cmp::Ordering::Equal); let mut body = quote!(span =>#krate::cmp::Ordering::Equal);
for f in fields.into_iter().rev() { for f in fields.into_iter().rev() {
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); let t1 = tt::Ident::new(&format!("{}_self", f.sym), f.span);
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); let t2 = tt::Ident::new(&format!("{}_other", f.sym), f.span);
body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span); body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
} }
let fat_arrow = fat_arrow(span); let fat_arrow = fat_arrow(span);
@ -838,8 +838,8 @@ fn partial_ord_expand(span: Span, tt: &tt::Subtree) -> ExpandResult<tt::Subtree>
let mut body = let mut body =
quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal)); quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
for f in fields.into_iter().rev() { for f in fields.into_iter().rev() {
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); let t1 = tt::Ident::new(&format!("{}_self", f.sym), f.span);
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); let t2 = tt::Ident::new(&format!("{}_other", f.sym), f.span);
body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span); body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
} }
let fat_arrow = fat_arrow(span); let fat_arrow = fat_arrow(span);

View file

@ -1,10 +1,9 @@
//! Builtin macro //! Builtin macro
use ::tt::SmolStr;
use base_db::{AnchoredPath, FileId}; use base_db::{AnchoredPath, FileId};
use cfg::CfgExpr; use cfg::CfgExpr;
use either::Either; use either::Either;
use intern::sym; use intern::{sym, Symbol};
use mbe::{parse_exprs_with_sep, parse_to_token_tree}; use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use span::{Edition, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use span::{Edition, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to; use stdx::format_to;
@ -181,10 +180,10 @@ fn line_expand(
ExpandResult::ok(tt::Subtree { ExpandResult::ok(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(span), delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: "0".into(), symbol: sym::INTEGER_0.clone(),
span, span,
kind: tt::LitKind::Integer, kind: tt::LitKind::Integer,
suffix: Some(Box::new("u32".into())), suffix: Some(sym::u32.clone()),
}))]), }))]),
}) })
} }
@ -301,12 +300,12 @@ fn format_args_nl_expand(
let mut tt = tt.clone(); let mut tt = tt.clone();
tt.delimiter.kind = tt::DelimiterKind::Parenthesis; tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text, symbol: text,
kind: tt::LitKind::Str, kind: tt::LitKind::Str,
.. ..
}))) = tt.token_trees.first_mut() }))) = tt.token_trees.first_mut()
{ {
*text = format_smolstr!("{text}\\n"); *text = Symbol::intern(&format_smolstr!("{}\\n", text.as_str()));
} }
ExpandResult::ok(quote! {span => ExpandResult::ok(quote! {span =>
builtin #pound format_args #tt builtin #pound format_args #tt
@ -460,14 +459,14 @@ fn compile_error_expand(
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees { let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { [tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text, symbol: text,
span: _, span: _,
kind: tt::LitKind::Str | tt::LitKind::StrRaw(_), kind: tt::LitKind::Str | tt::LitKind::StrRaw(_),
suffix: _, suffix: _,
}))] => }))] =>
// FIXME: Use the span here! // FIXME: Use the span here!
{ {
ExpandError::other(Box::from(&*unescape_str(text))) ExpandError::other(Box::from(unescape_str(text).as_str()))
} }
_ => ExpandError::other("`compile_error!` argument must be a string"), _ => ExpandError::other("`compile_error!` argument must be a string"),
}; };
@ -507,18 +506,20 @@ fn concat_expand(
// as-is. // as-is.
match it.kind { match it.kind {
tt::LitKind::Char => { tt::LitKind::Char => {
if let Ok(c) = unescape_char(&it.text) { if let Ok(c) = unescape_char(it.symbol.as_str()) {
text.extend(c.escape_default()); text.extend(c.escape_default());
} }
record_span(it.span); record_span(it.span);
} }
tt::LitKind::Integer | tt::LitKind::Float => format_to!(text, "{}", it.text), tt::LitKind::Integer | tt::LitKind::Float => {
format_to!(text, "{}", it.symbol.as_str())
}
tt::LitKind::Str => { tt::LitKind::Str => {
text.push_str(&it.text); text.push_str(it.symbol.as_str());
record_span(it.span); record_span(it.span);
} }
tt::LitKind::StrRaw(_) => { tt::LitKind::StrRaw(_) => {
format_to!(text, "{}", it.text.escape_debug()); format_to!(text, "{}", it.symbol.as_str().escape_debug());
record_span(it.span); record_span(it.span);
} }
tt::LitKind::Byte tt::LitKind::Byte
@ -531,9 +532,9 @@ fn concat_expand(
} }
// handle boolean literals // handle boolean literals
tt::TokenTree::Leaf(tt::Leaf::Ident(id)) tt::TokenTree::Leaf(tt::Leaf::Ident(id))
if i % 2 == 0 && (id.text == "true" || id.text == "false") => if i % 2 == 0 && (id.sym == sym::true_ || id.sym == sym::false_) =>
{ {
text.push_str(id.text.as_str()); text.push_str(id.sym.as_str());
record_span(id.span); record_span(id.span);
} }
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
@ -562,21 +563,21 @@ fn concat_bytes_expand(
}; };
for (i, t) in tt.token_trees.iter().enumerate() { for (i, t) in tt.token_trees.iter().enumerate() {
match t { match t {
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text, span, kind, suffix: _ })) => { tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { symbol: text, span, kind, suffix: _ })) => {
record_span(*span); record_span(*span);
match kind { match kind {
tt::LitKind::Byte => { tt::LitKind::Byte => {
if let Ok(b) = unescape_byte(text) { if let Ok(b) = unescape_byte(text.as_str()) {
bytes.extend( bytes.extend(
b.escape_ascii().filter_map(|it| char::from_u32(it as u32)), b.escape_ascii().filter_map(|it| char::from_u32(it as u32)),
); );
} }
} }
tt::LitKind::ByteStr => { tt::LitKind::ByteStr => {
bytes.push_str(text); bytes.push_str(text.as_str());
} }
tt::LitKind::ByteStrRaw(_) => { tt::LitKind::ByteStrRaw(_) => {
bytes.extend(text.escape_debug()); bytes.extend(text.as_str().escape_debug());
} }
_ => { _ => {
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
@ -602,7 +603,7 @@ fn concat_bytes_expand(
value: tt::Subtree { value: tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(span), delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: bytes.into(), symbol: Symbol::intern(&bytes),
span, span,
kind: tt::LitKind::ByteStr, kind: tt::LitKind::ByteStr,
suffix: None, suffix: None,
@ -621,24 +622,24 @@ fn concat_bytes_expand_subtree(
for (ti, tt) in tree.token_trees.iter().enumerate() { for (ti, tt) in tree.token_trees.iter().enumerate() {
match tt { match tt {
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text, symbol: text,
span, span,
kind: tt::LitKind::Byte, kind: tt::LitKind::Byte,
suffix: _, suffix: _,
})) => { })) => {
if let Ok(b) = unescape_byte(text) { if let Ok(b) = unescape_byte(text.as_str()) {
bytes.extend(b.escape_ascii().filter_map(|it| char::from_u32(it as u32))); bytes.extend(b.escape_ascii().filter_map(|it| char::from_u32(it as u32)));
} }
record_span(*span); record_span(*span);
} }
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text, symbol: text,
span, span,
kind: tt::LitKind::Integer, kind: tt::LitKind::Integer,
suffix: _, suffix: _,
})) => { })) => {
record_span(*span); record_span(*span);
if let Ok(b) = text.parse::<u8>() { if let Ok(b) = text.as_str().parse::<u8>() {
bytes.extend(b.escape_ascii().filter_map(|it| char::from_u32(it as u32))); bytes.extend(b.escape_ascii().filter_map(|it| char::from_u32(it as u32)));
} }
} }
@ -662,7 +663,7 @@ fn concat_idents_expand(
for (i, t) in tt.token_trees.iter().enumerate() { for (i, t) in tt.token_trees.iter().enumerate() {
match t { match t {
tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => { tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => {
ident.push_str(id.text.as_str()); ident.push_str(id.sym.as_str());
} }
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
_ => { _ => {
@ -671,7 +672,7 @@ fn concat_idents_expand(
} }
} }
// FIXME merge spans // FIXME merge spans
let ident = tt::Ident { text: ident.into(), span, is_raw: tt::IdentIsRaw::No }; let ident = tt::Ident { sym: Symbol::intern(&ident), span, is_raw: tt::IdentIsRaw::No };
ExpandResult { value: quote!(span =>#ident), err } ExpandResult { value: quote!(span =>#ident), err }
} }
@ -694,12 +695,12 @@ fn relative_file(
} }
} }
fn parse_string(tt: &tt::Subtree) -> Result<(SmolStr, Span), ExpandError> { fn parse_string(tt: &tt::Subtree) -> Result<(Symbol, Span), ExpandError> {
tt.token_trees tt.token_trees
.first() .first()
.and_then(|tt| match tt { .and_then(|tt| match tt {
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text, symbol: text,
span, span,
kind: tt::LitKind::Str, kind: tt::LitKind::Str,
suffix: _, suffix: _,
@ -739,7 +740,7 @@ pub fn include_input_to_file_id(
arg_id: MacroCallId, arg_id: MacroCallId,
arg: &tt::Subtree, arg: &tt::Subtree,
) -> Result<FileId, ExpandError> { ) -> Result<FileId, ExpandError> {
relative_file(db, arg_id, &parse_string(arg)?.0, false) relative_file(db, arg_id, parse_string(arg)?.0.as_str(), false)
} }
fn include_bytes_expand( fn include_bytes_expand(
@ -752,7 +753,7 @@ fn include_bytes_expand(
let res = tt::Subtree { let res = tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(span), delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: r#"b"""#.into(), symbol: Symbol::empty(),
span, span,
kind: tt::LitKind::ByteStrRaw(1), kind: tt::LitKind::ByteStrRaw(1),
suffix: None, suffix: None,
@ -778,7 +779,7 @@ fn include_str_expand(
// it's unusual to `include_str!` a Rust file), but we can return an empty string. // it's unusual to `include_str!` a Rust file), but we can return an empty string.
// Ideally, we'd be able to offer a precise expansion if the user asks for macro // Ideally, we'd be able to offer a precise expansion if the user asks for macro
// expansion. // expansion.
let file_id = match relative_file(db, arg_id, &path, true) { let file_id = match relative_file(db, arg_id, path.as_str(), true) {
Ok(file_id) => file_id, Ok(file_id) => file_id,
Err(_) => { Err(_) => {
return ExpandResult::ok(quote!(span =>"")); return ExpandResult::ok(quote!(span =>""));
@ -791,9 +792,9 @@ fn include_str_expand(
ExpandResult::ok(quote!(span =>#text)) ExpandResult::ok(quote!(span =>#text))
} }
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> { fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &Symbol) -> Option<String> {
let krate = db.lookup_intern_macro_call(arg_id).krate; let krate = db.lookup_intern_macro_call(arg_id).krate;
db.crate_graph()[krate].env.get(key).map(|it| it.escape_debug().to_string()) db.crate_graph()[krate].env.get(key.as_str()).map(|it| it.escape_debug().to_string())
} }
fn env_expand( fn env_expand(
@ -813,7 +814,7 @@ fn env_expand(
let s = get_env_inner(db, arg_id, &key).unwrap_or_else(|| { let s = get_env_inner(db, arg_id, &key).unwrap_or_else(|| {
// The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid // The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid
// unnecessary diagnostics for eg. `CARGO_PKG_NAME`. // unnecessary diagnostics for eg. `CARGO_PKG_NAME`.
if key == "OUT_DIR" { if key.as_str() == "OUT_DIR" {
err = Some(ExpandError::other(r#"`OUT_DIR` not set, enable "build scripts" to fix"#)); err = Some(ExpandError::other(r#"`OUT_DIR` not set, enable "build scripts" to fix"#));
} }
@ -867,15 +868,16 @@ fn quote_expand(
) )
} }
fn unescape_str(s: &SmolStr) -> SmolStr { fn unescape_str(s: &Symbol) -> Symbol {
if s.contains('\\') { if s.as_str().contains('\\') {
let s = s.as_str();
let mut buf = String::with_capacity(s.len()); let mut buf = String::with_capacity(s.len());
unescape_unicode(s, Mode::Str, &mut |_, c| { unescape_unicode(s, Mode::Str, &mut |_, c| {
if let Ok(c) = c { if let Ok(c) = c {
buf.push(c) buf.push(c)
} }
}); });
buf.into() Symbol::intern(&buf)
} else { } else {
s.clone() s.clone()
} }

View file

@ -9,7 +9,6 @@ use syntax::{
AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T, AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T,
}; };
use tracing::{debug, warn}; use tracing::{debug, warn};
use tt::SmolStr;
use crate::{db::ExpandDatabase, proc_macro::ProcMacroKind, MacroCallLoc, MacroDefKind}; use crate::{db::ExpandDatabase, proc_macro::ProcMacroKind, MacroCallLoc, MacroDefKind};
@ -263,7 +262,7 @@ where
let name = match iter.next() { let name = match iter.next() {
None => return None, None => return None,
Some(NodeOrToken::Token(element)) => match element.kind() { Some(NodeOrToken::Token(element)) => match element.kind() {
syntax::T![ident] => SmolStr::new(element.text()), syntax::T![ident] => element.text().to_owned(),
_ => return Some(CfgExpr::Invalid), _ => return Some(CfgExpr::Invalid),
}, },
Some(_) => return Some(CfgExpr::Invalid), Some(_) => return Some(CfgExpr::Invalid),
@ -302,13 +301,13 @@ where
if (value_token.kind() == syntax::SyntaxKind::STRING) => if (value_token.kind() == syntax::SyntaxKind::STRING) =>
{ {
let value = value_token.text(); let value = value_token.text();
let value = SmolStr::new(value.trim_matches('"')); let value = value.trim_matches('"').into();
Some(CfgExpr::Atom(CfgAtom::KeyValue { key: name, value })) Some(CfgExpr::Atom(CfgAtom::KeyValue { key: name.into(), value }))
} }
_ => None, _ => None,
} }
} }
_ => Some(CfgExpr::Atom(CfgAtom::Flag(name))), _ => Some(CfgExpr::Atom(CfgAtom::Flag(name.into()))),
}, },
}; };
if let Some(NodeOrToken::Token(element)) = iter.peek() { if let Some(NodeOrToken::Token(element)) = iter.peek() {

View file

@ -120,10 +120,10 @@ impl DeclarativeMacroExpander {
.token_tree_value()? .token_tree_value()?
.token_trees .token_trees
{ {
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text { [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &i.sym {
"transparent" => Some(Transparency::Transparent), s if *s == sym::transparent => Some(Transparency::Transparent),
"semitransparent" => Some(Transparency::SemiTransparent), s if *s == sym::semitransparent => Some(Transparency::SemiTransparent),
"opaque" => Some(Transparency::Opaque), s if *s == sym::opaque => Some(Transparency::Opaque),
_ => None, _ => None,
}, },
_ => None, _ => None,

View file

@ -1,6 +1,7 @@
//! To make attribute macros work reliably when typing, we need to take care to //! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them. //! fix up syntax errors in the code we're passing to them.
use intern::sym;
use mbe::DocCommentDesugarMode; use mbe::DocCommentDesugarMode;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec; use smallvec::SmallVec;
@ -80,7 +81,7 @@ pub(crate) fn fixup_syntax(
original.push(original_tree); original.push(original_tree);
let span = span_map.span_for_range(node_range); let span = span_map.span_for_range(node_range);
let replacement = Leaf::Ident(Ident { let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(), sym: sym::__ra_fixup.clone(),
span: Span { span: Span {
range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END), range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor }, anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
@ -100,7 +101,7 @@ pub(crate) fn fixup_syntax(
// incomplete field access: some_expr.| // incomplete field access: some_expr.|
append.insert(node.clone().into(), vec![ append.insert(node.clone().into(), vec![
Leaf::Ident(Ident { Leaf::Ident(Ident {
text: "__ra_fixup".into(), sym: sym::__ra_fixup.clone(),
span: fake_span(node_range), span: fake_span(node_range),
is_raw: tt::IdentIsRaw::No is_raw: tt::IdentIsRaw::No
}), }),
@ -138,7 +139,7 @@ pub(crate) fn fixup_syntax(
}; };
append.insert(if_token.into(), vec![ append.insert(if_token.into(), vec![
Leaf::Ident(Ident { Leaf::Ident(Ident {
text: "__ra_fixup".into(), sym: sym::__ra_fixup.clone(),
span: fake_span(node_range), span: fake_span(node_range),
is_raw: tt::IdentIsRaw::No is_raw: tt::IdentIsRaw::No
}), }),
@ -169,7 +170,7 @@ pub(crate) fn fixup_syntax(
}; };
append.insert(while_token.into(), vec![ append.insert(while_token.into(), vec![
Leaf::Ident(Ident { Leaf::Ident(Ident {
text: "__ra_fixup".into(), sym: sym::__ra_fixup.clone(),
span: fake_span(node_range), span: fake_span(node_range),
is_raw: tt::IdentIsRaw::No is_raw: tt::IdentIsRaw::No
}), }),
@ -217,7 +218,7 @@ pub(crate) fn fixup_syntax(
}; };
append.insert(match_token.into(), vec![ append.insert(match_token.into(), vec![
Leaf::Ident(Ident { Leaf::Ident(Ident {
text: "__ra_fixup".into(), sym: sym::__ra_fixup.clone(),
span: fake_span(node_range), span: fake_span(node_range),
is_raw: tt::IdentIsRaw::No is_raw: tt::IdentIsRaw::No
}), }),
@ -247,12 +248,12 @@ pub(crate) fn fixup_syntax(
}; };
let [pat, in_token, iter] = [ let [pat, in_token, iter] = [
"_", sym::underscore.clone(),
"in", sym::in_.clone(),
"__ra_fixup" sym::__ra_fixup.clone(),
].map(|text| ].map(|sym|
Leaf::Ident(Ident { Leaf::Ident(Ident {
text: text.into(), sym,
span: fake_span(node_range), span: fake_span(node_range),
is_raw: tt::IdentIsRaw::No is_raw: tt::IdentIsRaw::No
}), }),
@ -286,7 +287,7 @@ pub(crate) fn fixup_syntax(
if it.name_ref().is_some() && it.expr().is_none() { if it.name_ref().is_some() && it.expr().is_none() {
append.insert(colon.into(), vec![ append.insert(colon.into(), vec![
Leaf::Ident(Ident { Leaf::Ident(Ident {
text: "__ra_fixup".into(), sym: sym::__ra_fixup.clone(),
span: fake_span(node_range), span: fake_span(node_range),
is_raw: tt::IdentIsRaw::No is_raw: tt::IdentIsRaw::No
}) })
@ -299,7 +300,7 @@ pub(crate) fn fixup_syntax(
if it.segment().is_none() { if it.segment().is_none() {
append.insert(colon.into(), vec![ append.insert(colon.into(), vec![
Leaf::Ident(Ident { Leaf::Ident(Ident {
text: "__ra_fixup".into(), sym: sym::__ra_fixup.clone(),
span: fake_span(node_range), span: fake_span(node_range),
is_raw: tt::IdentIsRaw::No is_raw: tt::IdentIsRaw::No
}) })
@ -333,7 +334,7 @@ pub(crate) fn fixup_syntax(
if it.body().is_none() { if it.body().is_none() {
append.insert(node.into(), vec![ append.insert(node.into(), vec![
Leaf::Ident(Ident { Leaf::Ident(Ident {
text: "__ra_fixup".into(), sym: sym::__ra_fixup.clone(),
span: fake_span(node_range), span: fake_span(node_range),
is_raw: tt::IdentIsRaw::No is_raw: tt::IdentIsRaw::No
}) })
@ -448,9 +449,9 @@ mod tests {
// `TokenTree`s, see the last assertion in `check()`. // `TokenTree`s, see the last assertion in `check()`.
fn check_leaf_eq(a: &tt::Leaf, b: &tt::Leaf) -> bool { fn check_leaf_eq(a: &tt::Leaf, b: &tt::Leaf) -> bool {
match (a, b) { match (a, b) {
(tt::Leaf::Literal(a), tt::Leaf::Literal(b)) => a.text == b.text, (tt::Leaf::Literal(a), tt::Leaf::Literal(b)) => a.symbol == b.symbol,
(tt::Leaf::Punct(a), tt::Leaf::Punct(b)) => a.char == b.char, (tt::Leaf::Punct(a), tt::Leaf::Punct(b)) => a.char == b.char,
(tt::Leaf::Ident(a), tt::Leaf::Ident(b)) => a.text == b.text, (tt::Leaf::Ident(a), tt::Leaf::Ident(b)) => a.sym == b.sym,
_ => false, _ => false,
} }
} }

View file

@ -10,6 +10,7 @@
use std::sync::OnceLock; use std::sync::OnceLock;
use intern::Symbol;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
pub struct BuiltinAttribute { pub struct BuiltinAttribute {
@ -26,11 +27,16 @@ pub struct AttributeTemplate {
pub name_value_str: Option<&'static str>, pub name_value_str: Option<&'static str>,
} }
pub fn find_builtin_attr_idx(name: &str) -> Option<usize> { pub fn find_builtin_attr_idx(name: &Symbol) -> Option<usize> {
static BUILTIN_LOOKUP_TABLE: OnceLock<FxHashMap<&'static str, usize>> = OnceLock::new(); static BUILTIN_LOOKUP_TABLE: OnceLock<FxHashMap<Symbol, usize>> = OnceLock::new();
BUILTIN_LOOKUP_TABLE BUILTIN_LOOKUP_TABLE
.get_or_init(|| { .get_or_init(|| {
INERT_ATTRIBUTES.iter().map(|attr| attr.name).enumerate().map(|(a, b)| (b, a)).collect() INERT_ATTRIBUTES
.iter()
.map(|attr| attr.name)
.enumerate()
.map(|(a, b)| (Symbol::intern(b), a))
.collect()
}) })
.get(name) .get(name)
.copied() .copied()

View file

@ -316,30 +316,36 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModP
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => PathKind::Abs, tt::Leaf::Punct(tt::Punct { char: ':', .. }) => PathKind::Abs,
_ => return None, _ => return None,
}, },
tt::Leaf::Ident(tt::Ident { text, span, .. }) if text == "$crate" => { tt::Leaf::Ident(tt::Ident { sym: text, span, .. }) if *text == sym::dollar_crate => {
resolve_crate_root(db, span.ctx).map(PathKind::DollarCrate).unwrap_or(PathKind::Crate) resolve_crate_root(db, span.ctx).map(PathKind::DollarCrate).unwrap_or(PathKind::Crate)
} }
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::SELF, tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::self_ => PathKind::SELF,
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => { tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::super_ => {
let mut deg = 1; let mut deg = 1;
while let Some(tt::Leaf::Ident(tt::Ident { text, span, is_raw })) = leaves.next() { while let Some(tt::Leaf::Ident(tt::Ident { sym: text, span, is_raw })) = leaves.next() {
if text != "super" { if *text != sym::super_ {
segments.push(Name::new(text, *is_raw, span.ctx)); segments.push(Name::new_symbol_maybe_raw(text.clone(), *is_raw, span.ctx));
break; break;
} }
deg += 1; deg += 1;
} }
PathKind::Super(deg) PathKind::Super(deg)
} }
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "crate" => PathKind::Crate, tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::crate_ => PathKind::Crate,
tt::Leaf::Ident(ident) => { tt::Leaf::Ident(ident) => {
segments.push(Name::new(&ident.text, ident.is_raw, ident.span.ctx)); segments.push(Name::new_symbol_maybe_raw(
ident.sym.clone(),
ident.is_raw,
ident.span.ctx,
));
PathKind::Plain PathKind::Plain
} }
_ => return None, _ => return None,
}; };
segments.extend(leaves.filter_map(|leaf| match leaf { segments.extend(leaves.filter_map(|leaf| match leaf {
::tt::Leaf::Ident(ident) => Some(Name::new(&ident.text, ident.is_raw, ident.span.ctx)), ::tt::Leaf::Ident(ident) => {
Some(Name::new_symbol_maybe_raw(ident.sym.clone(), ident.is_raw, ident.span.ctx))
}
_ => None, _ => None,
})); }));
Some(ModPath { kind, segments }) Some(ModPath { kind, segments })

View file

@ -17,6 +17,8 @@ use syntax::{ast, format_smolstr, utils::is_raw_identifier, SmolStr};
pub struct Name { pub struct Name {
symbol: Symbol, symbol: Symbol,
ctx: (), ctx: (),
// FIXME: We should probably encode rawness as a property here instead, once we have hygiene
// in here we've got 4 bytes of padding to fill anyways
} }
impl fmt::Debug for Name { impl fmt::Debug for Name {
@ -187,14 +189,22 @@ impl Name {
&self.symbol &self.symbol
} }
pub const fn new_symbol(doc: Symbol, ctx: SyntaxContextId) -> Self { pub const fn new_symbol(symbol: Symbol, ctx: SyntaxContextId) -> Self {
_ = ctx; _ = ctx;
Self { symbol: doc, ctx: () } Self { symbol, ctx: () }
}
pub fn new_symbol_maybe_raw(sym: Symbol, raw: tt::IdentIsRaw, ctx: SyntaxContextId) -> Self {
if raw.no() {
Self { symbol: sym, ctx: () }
} else {
Name::new(sym.as_str(), raw, ctx)
}
} }
// FIXME: This needs to go once we have hygiene // FIXME: This needs to go once we have hygiene
pub const fn new_symbol_root(doc: Symbol) -> Self { pub const fn new_symbol_root(sym: Symbol) -> Self {
Self { symbol: doc, ctx: () } Self { symbol: sym, ctx: () }
} }
} }
@ -250,7 +260,7 @@ impl AsName for ast::NameOrNameRef {
impl<Span> AsName for tt::Ident<Span> { impl<Span> AsName for tt::Ident<Span> {
fn as_name(&self) -> Name { fn as_name(&self) -> Name {
Name::resolve(&self.text) Name::resolve(self.sym.as_str())
} }
} }

View file

@ -1,14 +1,14 @@
//! A simplified version of quote-crate like quasi quote macro //! A simplified version of quote-crate like quasi quote macro
#![allow(clippy::crate_in_macro_def)] #![allow(clippy::crate_in_macro_def)]
use intern::Symbol; use intern::{sym, Symbol};
use span::Span; use span::Span;
use tt::IdentIsRaw; use tt::IdentIsRaw;
use crate::name::Name; use crate::name::Name;
pub(crate) const fn dollar_crate(span: Span) -> tt::Ident<Span> { pub(crate) fn dollar_crate(span: Span) -> tt::Ident<Span> {
tt::Ident { text: syntax::SmolStr::new_static("$crate"), span, is_raw: tt::IdentIsRaw::No } tt::Ident { sym: sym::dollar_crate.clone(), span, is_raw: tt::IdentIsRaw::No }
} }
// A helper macro quote macro // A helper macro quote macro
@ -99,7 +99,7 @@ macro_rules! __quote {
($span:ident $tt:ident ) => { ($span:ident $tt:ident ) => {
vec![ { vec![ {
crate::tt::Leaf::Ident(crate::tt::Ident { crate::tt::Leaf::Ident(crate::tt::Ident {
text: stringify!($tt).into(), sym: intern::Symbol::intern(stringify!($tt)),
span: $span, span: $span,
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
}).into() }).into()
@ -177,12 +177,6 @@ impl ToTokenTree for crate::tt::TokenTree {
} }
} }
impl ToTokenTree for &crate::tt::TokenTree {
fn to_token(self, _: Span) -> crate::tt::TokenTree {
self.clone()
}
}
impl ToTokenTree for crate::tt::Subtree { impl ToTokenTree for crate::tt::Subtree {
fn to_token(self, _: Span) -> crate::tt::TokenTree { fn to_token(self, _: Span) -> crate::tt::TokenTree {
self.into() self.into()
@ -198,35 +192,34 @@ macro_rules! impl_to_to_tokentrees {
leaf.into() leaf.into()
} }
} }
impl ToTokenTree for &$ty {
fn to_token($this, $span: Span) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.clone().into();
leaf.into()
}
}
)* )*
} }
} }
impl<T: ToTokenTree + Clone> ToTokenTree for &T {
fn to_token(self, span: Span) -> crate::tt::TokenTree {
self.clone().to_token(span)
}
}
impl_to_to_tokentrees! { impl_to_to_tokentrees! {
span: u32 => self { crate::tt::Literal{text: self.to_string().into(), span, kind: tt::LitKind::Integer, suffix: None } }; span: u32 => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } };
span: usize => self { crate::tt::Literal{text: self.to_string().into(), span, kind: tt::LitKind::Integer, suffix: None } }; span: usize => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } };
span: i32 => self { crate::tt::Literal{text: self.to_string().into(), span, kind: tt::LitKind::Integer, suffix: None } }; span: i32 => self { crate::tt::Literal{symbol: Symbol::integer(self as _), span, kind: tt::LitKind::Integer, suffix: None } };
span: bool => self { crate::tt::Ident{text: self.to_string().into(), span, is_raw: tt::IdentIsRaw::No } }; span: bool => self { crate::tt::Ident{sym: if self { sym::true_.clone() } else { sym::false_.clone() }, span, is_raw: tt::IdentIsRaw::No } };
_span: crate::tt::Leaf => self { self }; _span: crate::tt::Leaf => self { self };
_span: crate::tt::Literal => self { self }; _span: crate::tt::Literal => self { self };
_span: crate::tt::Ident => self { self }; _span: crate::tt::Ident => self { self };
_span: crate::tt::Punct => self { self }; _span: crate::tt::Punct => self { self };
span: &str => self { crate::tt::Literal{text: (*self).into(), span, kind: tt::LitKind::Str, suffix: None }}; span: &str => self { crate::tt::Literal{symbol: Symbol::intern(self), span, kind: tt::LitKind::Str, suffix: None }};
span: String => self { crate::tt::Literal{text: self.into(), span, kind: tt::LitKind::Str, suffix: None }}; span: String => self { crate::tt::Literal{symbol: Symbol::intern(&self), span, kind: tt::LitKind::Str, suffix: None }};
span: Name => self { span: Name => self {
let (is_raw, s) = IdentIsRaw::split_from_symbol(self.as_str()); let (is_raw, s) = IdentIsRaw::split_from_symbol(self.as_str());
crate::tt::Ident{text: s.into(), span, is_raw } crate::tt::Ident{sym: Symbol::intern(s), span, is_raw }
}; };
span: Symbol => self { span: Symbol => self {
let (is_raw, s) = IdentIsRaw::split_from_symbol(self.as_str()); let (is_raw, s) = IdentIsRaw::split_from_symbol(self.as_str());
crate::tt::Ident{text: s.into(), span, is_raw } crate::tt::Ident{sym: Symbol::intern(s), span, is_raw }
}; };
} }
@ -236,6 +229,7 @@ mod tests {
use ::tt::IdentIsRaw; use ::tt::IdentIsRaw;
use base_db::FileId; use base_db::FileId;
use expect_test::expect; use expect_test::expect;
use intern::Symbol;
use span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use syntax::{TextRange, TextSize}; use syntax::{TextRange, TextSize};
@ -268,7 +262,7 @@ mod tests {
fn mk_ident(name: &str) -> crate::tt::Ident { fn mk_ident(name: &str) -> crate::tt::Ident {
let (is_raw, s) = IdentIsRaw::split_from_symbol(name); let (is_raw, s) = IdentIsRaw::split_from_symbol(name);
crate::tt::Ident { text: s.into(), span: DUMMY, is_raw } crate::tt::Ident { sym: Symbol::intern(s), span: DUMMY, is_raw }
} }
#[test] #[test]

View file

@ -3,6 +3,7 @@
use std::fmt; use std::fmt;
use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
use intern::sym;
use once_cell::unsync::Lazy; use once_cell::unsync::Lazy;
use rustc_index::IndexVec; use rustc_index::IndexVec;
use rustc_pattern_analysis::{ use rustc_pattern_analysis::{
@ -75,9 +76,9 @@ pub(crate) struct MatchCheckCtx<'db> {
impl<'db> MatchCheckCtx<'db> { impl<'db> MatchCheckCtx<'db> {
pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self { pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self {
let def_map = db.crate_def_map(module.krate()); let def_map = db.crate_def_map(module.krate());
let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns"); let exhaustive_patterns = def_map.is_unstable_feature_enabled(&sym::exhaustive_patterns);
let min_exhaustive_patterns = let min_exhaustive_patterns =
def_map.is_unstable_feature_enabled("min_exhaustive_patterns"); def_map.is_unstable_feature_enabled(&sym::min_exhaustive_patterns);
Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns } Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns }
} }

View file

@ -3380,23 +3380,27 @@ impl BuiltinAttr {
if let builtin @ Some(_) = Self::builtin(name) { if let builtin @ Some(_) = Self::builtin(name) {
return builtin; return builtin;
} }
let idx = let idx = db
db.crate_def_map(krate.id).registered_attrs().iter().position(|it| it == name)? as u32; .crate_def_map(krate.id)
.registered_attrs()
.iter()
.position(|it| it.as_str() == name)? as u32;
Some(BuiltinAttr { krate: Some(krate.id), idx }) Some(BuiltinAttr { krate: Some(krate.id), idx })
} }
fn builtin(name: &str) -> Option<Self> { fn builtin(name: &str) -> Option<Self> {
hir_expand::inert_attr_macro::find_builtin_attr_idx(name) hir_expand::inert_attr_macro::find_builtin_attr_idx(&Symbol::intern(name))
.map(|idx| BuiltinAttr { krate: None, idx: idx as u32 }) .map(|idx| BuiltinAttr { krate: None, idx: idx as u32 })
} }
pub fn name(&self, db: &dyn HirDatabase) -> SmolStr { pub fn name(&self, db: &dyn HirDatabase) -> Name {
// FIXME: Return a `Name` here
match self.krate { match self.krate {
Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx as usize].clone(), Some(krate) => Name::new_symbol_root(
None => { db.crate_def_map(krate).registered_attrs()[self.idx as usize].clone(),
SmolStr::new(hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].name) ),
} None => Name::new_symbol_root(Symbol::intern(
hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].name,
)),
} }
} }
@ -3420,13 +3424,15 @@ impl ToolModule {
pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> { pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
let krate = krate.id; let krate = krate.id;
let idx = let idx =
db.crate_def_map(krate).registered_tools().iter().position(|it| it == name)? as u32; db.crate_def_map(krate).registered_tools().iter().position(|it| it.as_str() == name)?
as u32;
Some(ToolModule { krate, idx }) Some(ToolModule { krate, idx })
} }
pub fn name(&self, db: &dyn HirDatabase) -> SmolStr { pub fn name(&self, db: &dyn HirDatabase) -> Name {
// FIXME: Return a `Name` here Name::new_symbol_root(
db.crate_def_map(self.krate).registered_tools()[self.idx as usize].clone() db.crate_def_map(self.krate).registered_tools()[self.idx as usize].clone(),
)
} }
} }

View file

@ -192,13 +192,13 @@ impl Definition {
let AttributeTemplate { word, list, name_value_str } = it.template(db)?; let AttributeTemplate { word, list, name_value_str } = it.template(db)?;
let mut docs = "Valid forms are:".to_owned(); let mut docs = "Valid forms are:".to_owned();
if word { if word {
format_to!(docs, "\n - #\\[{}]", name); format_to!(docs, "\n - #\\[{}]", name.display(db));
} }
if let Some(list) = list { if let Some(list) = list {
format_to!(docs, "\n - #\\[{}({})]", name, list); format_to!(docs, "\n - #\\[{}({})]", name.display(db), list);
} }
if let Some(name_value_str) = name_value_str { if let Some(name_value_str) = name_value_str {
format_to!(docs, "\n - #\\[{} = {}]", name, name_value_str); format_to!(docs, "\n - #\\[{} = {}]", name.display(db), name_value_str);
} }
Some(Documentation::new(docs.replace('*', "\\*"))) Some(Documentation::new(docs.replace('*', "\\*")))
} }
@ -256,8 +256,8 @@ impl Definition {
Definition::GenericParam(it) => it.display(db).to_string(), Definition::GenericParam(it) => it.display(db).to_string(),
Definition::Label(it) => it.name(db).display(db).to_string(), Definition::Label(it) => it.name(db).display(db).to_string(),
Definition::ExternCrateDecl(it) => it.display(db).to_string(), Definition::ExternCrateDecl(it) => it.display(db).to_string(),
Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db)), Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db).display(db)),
Definition::ToolModule(it) => it.name(db).to_string(), Definition::ToolModule(it) => it.name(db).display(db).to_string(),
Definition::DeriveHelper(it) => format!("derive_helper {}", it.name(db).display(db)), Definition::DeriveHelper(it) => format!("derive_helper {}", it.name(db).display(db)),
} }
} }

View file

@ -174,6 +174,32 @@ impl Symbol {
} }
} }
pub fn integer(i: usize) -> Self {
match i {
0 => symbols::INTEGER_0.clone(),
1 => symbols::INTEGER_1.clone(),
2 => symbols::INTEGER_2.clone(),
3 => symbols::INTEGER_3.clone(),
4 => symbols::INTEGER_4.clone(),
5 => symbols::INTEGER_5.clone(),
6 => symbols::INTEGER_6.clone(),
7 => symbols::INTEGER_7.clone(),
8 => symbols::INTEGER_8.clone(),
9 => symbols::INTEGER_9.clone(),
10 => symbols::INTEGER_10.clone(),
11 => symbols::INTEGER_11.clone(),
12 => symbols::INTEGER_12.clone(),
13 => symbols::INTEGER_13.clone(),
14 => symbols::INTEGER_14.clone(),
15 => symbols::INTEGER_15.clone(),
i => Symbol::intern(&format!("{i}")),
}
}
pub fn empty() -> Self {
symbols::__empty.clone()
}
pub fn as_str(&self) -> &str { pub fn as_str(&self) -> &str {
self.repr.as_str() self.repr.as_str()
} }

View file

@ -56,6 +56,10 @@ macro_rules! define_symbols {
define_symbols! { define_symbols! {
@WITH_NAME: @WITH_NAME:
__empty = "",
unsafe_ = "unsafe",
in_ = "in",
super_ = "super",
self_ = "self", self_ = "self",
Self_ = "Self", Self_ = "Self",
tick_static = "'static", tick_static = "'static",
@ -78,10 +82,18 @@ define_symbols! {
INTEGER_14 = "14", INTEGER_14 = "14",
INTEGER_15 = "15", INTEGER_15 = "15",
fn_ = "fn", fn_ = "fn",
crate_ = "crate",
underscore = "_",
true_ = "true",
false_ = "false",
let_ = "let",
const_ = "const",
@PLAIN: @PLAIN:
__ra_fixup,
add_assign, add_assign,
add, add,
attributes,
align_offset, align_offset,
alloc_layout, alloc_layout,
alloc, alloc,
@ -92,6 +104,9 @@ define_symbols! {
bench, bench,
bitand_assign, bitand_assign,
bitand, bitand,
notable_trait,
hidden,
local_inner_macros,
bitor_assign, bitor_assign,
bitor, bitor,
bitxor_assign, bitxor_assign,
@ -225,9 +240,12 @@ define_symbols! {
log_syntax, log_syntax,
lt, lt,
macro_rules, macro_rules,
ignore,
count,
manually_drop, manually_drop,
maybe_uninit, maybe_uninit,
metadata_type, metadata_type,
missing,
module_path, module_path,
mul_assign, mul_assign,
mul, mul,
@ -349,6 +367,10 @@ define_symbols! {
u8, u8,
Unknown, Unknown,
unpin, unpin,
simd,
C,
align,
packed,
unreachable_2015, unreachable_2015,
unreachable_2021, unreachable_2021,
unreachable, unreachable,
@ -356,5 +378,7 @@ define_symbols! {
unsize, unsize,
usize, usize,
v1, v1,
exhaustive_patterns,
min_exhaustive_patterns,
va_list va_list
} }

View file

@ -24,6 +24,7 @@ parser.workspace = true
tt.workspace = true tt.workspace = true
stdx.workspace = true stdx.workspace = true
span.workspace = true span.workspace = true
intern.workspace = true
[dev-dependencies] [dev-dependencies]
test-utils.workspace = true test-utils.workspace = true

View file

@ -1,10 +1,11 @@
//! This module add real world mbe example for benchmark tests //! This module add real world mbe example for benchmark tests
use intern::Symbol;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use span::{Edition, Span}; use span::{Edition, Span};
use syntax::{ use syntax::{
ast::{self, HasName}, ast::{self, HasName},
AstNode, SmolStr, AstNode,
}; };
use test_utils::{bench, bench_fixture, skip_slow_tests}; use test_utils::{bench, bench_fixture, skip_slow_tests};
@ -228,7 +229,7 @@ fn invocation_fixtures(
fn make_ident(ident: &str) -> tt::TokenTree<Span> { fn make_ident(ident: &str) -> tt::TokenTree<Span> {
tt::Leaf::Ident(tt::Ident { tt::Leaf::Ident(tt::Ident {
span: DUMMY, span: DUMMY,
text: SmolStr::new(ident), sym: Symbol::intern(ident),
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
}) })
.into() .into()
@ -239,7 +240,7 @@ fn invocation_fixtures(
fn make_literal(lit: &str) -> tt::TokenTree<Span> { fn make_literal(lit: &str) -> tt::TokenTree<Span> {
tt::Leaf::Literal(tt::Literal { tt::Leaf::Literal(tt::Literal {
span: DUMMY, span: DUMMY,
text: SmolStr::new(lit), symbol: Symbol::intern(lit),
kind: tt::LitKind::Str, kind: tt::LitKind::Str,
suffix: None, suffix: None,
}) })

View file

@ -5,9 +5,9 @@
mod matcher; mod matcher;
mod transcriber; mod transcriber;
use intern::Symbol;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use span::{Edition, Span}; use span::{Edition, Span};
use syntax::SmolStr;
use crate::{parser::MetaVarKind, ExpandError, ExpandResult, MatchedArmIndex}; use crate::{parser::MetaVarKind, ExpandError, ExpandResult, MatchedArmIndex};
@ -110,12 +110,12 @@ pub(crate) fn expand_rules(
/// the `Bindings` we should take. We push to the stack when we enter a /// the `Bindings` we should take. We push to the stack when we enter a
/// repetition. /// repetition.
/// ///
/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to /// In other words, `Bindings` is a *multi* mapping from `Symbol` to
/// `tt::TokenTree`, where the index to select a particular `TokenTree` among /// `tt::TokenTree`, where the index to select a particular `TokenTree` among
/// many is not a plain `usize`, but a `&[usize]`. /// many is not a plain `usize`, but a `&[usize]`.
#[derive(Debug, Default, Clone, PartialEq, Eq)] #[derive(Debug, Default, Clone, PartialEq, Eq)]
struct Bindings { struct Bindings {
inner: FxHashMap<SmolStr, Binding>, inner: FxHashMap<Symbol, Binding>,
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]

View file

@ -61,9 +61,9 @@
use std::{rc::Rc, sync::Arc}; use std::{rc::Rc, sync::Arc};
use intern::{sym, Symbol};
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use span::{Edition, Span}; use span::{Edition, Span};
use syntax::SmolStr;
use tt::{iter::TtIter, DelimSpan}; use tt::{iter::TtIter, DelimSpan};
use crate::{ use crate::{
@ -74,12 +74,12 @@ use crate::{
}; };
impl Bindings { impl Bindings {
fn push_optional(&mut self, name: &SmolStr) { fn push_optional(&mut self, name: Symbol) {
self.inner.insert(name.clone(), Binding::Fragment(Fragment::Empty)); self.inner.insert(name, Binding::Fragment(Fragment::Empty));
} }
fn push_empty(&mut self, name: &SmolStr) { fn push_empty(&mut self, name: Symbol) {
self.inner.insert(name.clone(), Binding::Empty); self.inner.insert(name, Binding::Empty);
} }
fn bindings(&self) -> impl Iterator<Item = &Binding> { fn bindings(&self) -> impl Iterator<Item = &Binding> {
@ -127,10 +127,10 @@ pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree<Span>, edition:
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
enum BindingKind { enum BindingKind {
Empty(SmolStr), Empty(Symbol),
Optional(SmolStr), Optional(Symbol),
Fragment(SmolStr, Fragment), Fragment(Symbol, Fragment),
Missing(SmolStr, MetaVarKind), Missing(Symbol, MetaVarKind),
Nested(usize, usize), Nested(usize, usize),
} }
@ -178,20 +178,20 @@ impl BindingsBuilder {
} }
} }
fn push_empty(&mut self, idx: &mut BindingsIdx, var: &SmolStr) { fn push_empty(&mut self, idx: &mut BindingsIdx, var: &Symbol) {
self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Empty(var.clone())))); self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Empty(var.clone()))));
} }
fn push_optional(&mut self, idx: &mut BindingsIdx, var: &SmolStr) { fn push_optional(&mut self, idx: &mut BindingsIdx, var: &Symbol) {
self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone())))); self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone()))));
} }
fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) { fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &Symbol, fragment: Fragment) {
self.nodes[idx.0] self.nodes[idx.0]
.push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment)))); .push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment))));
} }
fn push_missing(&mut self, idx: &mut BindingsIdx, var: &SmolStr, kind: MetaVarKind) { fn push_missing(&mut self, idx: &mut BindingsIdx, var: &Symbol, kind: MetaVarKind) {
self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Missing(var.clone(), kind)))); self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Missing(var.clone(), kind))));
} }
@ -219,10 +219,10 @@ impl BindingsBuilder {
for cmd in nodes { for cmd in nodes {
match cmd { match cmd {
BindingKind::Empty(name) => { BindingKind::Empty(name) => {
bindings.push_empty(name); bindings.push_empty(name.clone());
} }
BindingKind::Optional(name) => { BindingKind::Optional(name) => {
bindings.push_optional(name); bindings.push_optional(name.clone());
} }
BindingKind::Fragment(name, fragment) => { BindingKind::Fragment(name, fragment) => {
bindings.inner.insert(name.clone(), Binding::Fragment(fragment.clone())); bindings.inner.insert(name.clone(), Binding::Fragment(fragment.clone()));
@ -507,7 +507,7 @@ fn match_loop_inner<'t>(
} }
OpDelimited::Op(Op::Literal(lhs)) => { OpDelimited::Op(Op::Literal(lhs)) => {
if let Ok(rhs) = src.clone().expect_leaf() { if let Ok(rhs) = src.clone().expect_leaf() {
if matches!(rhs, tt::Leaf::Literal(it) if it.text == lhs.text) { if matches!(rhs, tt::Leaf::Literal(it) if it.symbol == lhs.symbol) {
item.dot.next(); item.dot.next();
} else { } else {
res.add_err(ExpandError::UnexpectedToken); res.add_err(ExpandError::UnexpectedToken);
@ -521,7 +521,7 @@ fn match_loop_inner<'t>(
} }
OpDelimited::Op(Op::Ident(lhs)) => { OpDelimited::Op(Op::Ident(lhs)) => {
if let Ok(rhs) = src.clone().expect_leaf() { if let Ok(rhs) = src.clone().expect_leaf() {
if matches!(rhs, tt::Leaf::Ident(it) if it.text == lhs.text) { if matches!(rhs, tt::Leaf::Ident(it) if it.sym == lhs.sym) {
item.dot.next(); item.dot.next();
} else { } else {
res.add_err(ExpandError::UnexpectedToken); res.add_err(ExpandError::UnexpectedToken);
@ -554,7 +554,7 @@ fn match_loop_inner<'t>(
// ident, not a punct. // ident, not a punct.
ExpandError::UnexpectedToken ExpandError::UnexpectedToken
} else { } else {
let lhs: SmolStr = lhs.collect(); let lhs = lhs.collect::<String>();
ExpandError::binding_error(format!("expected punct: `{lhs}`")) ExpandError::binding_error(format!("expected punct: `{lhs}`"))
} }
} else { } else {
@ -759,7 +759,9 @@ fn match_meta_var(
// [1]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576 // [1]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576
match input.peek_n(0) { match input.peek_n(0) {
Some(tt::TokenTree::Leaf(tt::Leaf::Ident(it))) Some(tt::TokenTree::Leaf(tt::Leaf::Ident(it)))
if it.text == "_" || it.text == "let" || it.text == "const" => if it.sym == sym::underscore
|| it.sym == sym::let_
|| it.sym == sym::const_ =>
{ {
return ExpandResult::only_err(ExpandError::NoMatchingRule) return ExpandResult::only_err(ExpandError::NoMatchingRule)
} }
@ -824,7 +826,7 @@ fn match_meta_var(
expect_fragment(input, fragment, edition).map(|it| it.map(Fragment::Tokens)) expect_fragment(input, fragment, edition).map(|it| it.map(Fragment::Tokens))
} }
fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate) {
for op in pattern.iter() { for op in pattern.iter() {
match op { match op {
Op::Var { name, .. } => collector_fun(name.clone()), Op::Var { name, .. } => collector_fun(name.clone()),
@ -908,13 +910,13 @@ fn expect_separator<S: Copy>(iter: &mut TtIter<'_, S>, separator: &Separator) ->
let mut fork = iter.clone(); let mut fork = iter.clone();
let ok = match separator { let ok = match separator {
Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { Separator::Ident(lhs) => match fork.expect_ident_or_underscore() {
Ok(rhs) => rhs.text == lhs.text, Ok(rhs) => rhs.sym == lhs.sym,
Err(_) => false, Err(_) => false,
}, },
Separator::Literal(lhs) => match fork.expect_literal() { Separator::Literal(lhs) => match fork.expect_literal() {
Ok(rhs) => match rhs { Ok(rhs) => match rhs {
tt::Leaf::Literal(rhs) => rhs.text == lhs.text, tt::Leaf::Literal(rhs) => rhs.symbol == lhs.symbol,
tt::Leaf::Ident(rhs) => rhs.text == lhs.text, tt::Leaf::Ident(rhs) => rhs.sym == lhs.symbol,
tt::Leaf::Punct(_) => false, tt::Leaf::Punct(_) => false,
}, },
Err(_) => false, Err(_) => false,

View file

@ -1,8 +1,8 @@
//! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like //! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like
//! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}` //! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}`
use intern::{sym, Symbol};
use span::Span; use span::Span;
use syntax::{format_smolstr, SmolStr};
use tt::Delimiter; use tt::Delimiter;
use crate::{ use crate::{
@ -12,16 +12,16 @@ use crate::{
}; };
impl Bindings { impl Bindings {
fn get(&self, name: &str) -> Result<&Binding, ExpandError> { fn get(&self, name: &Symbol) -> Result<&Binding, ExpandError> {
match self.inner.get(name) { match self.inner.get(name) {
Some(binding) => Ok(binding), Some(binding) => Ok(binding),
None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name)))), None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name.as_str())))),
} }
} }
fn get_fragment( fn get_fragment(
&self, &self,
name: &str, name: &Symbol,
mut span: Span, mut span: Span,
nesting: &mut [NestingState], nesting: &mut [NestingState],
marker: impl Fn(&mut Span), marker: impl Fn(&mut Span),
@ -97,7 +97,7 @@ impl Bindings {
| MetaVarKind::Expr | MetaVarKind::Expr
| MetaVarKind::Ident => { | MetaVarKind::Ident => {
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: SmolStr::new_static("missing"), sym: sym::missing.clone(),
span, span,
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
}))) })))
@ -112,7 +112,7 @@ impl Bindings {
spacing: tt::Spacing::Joint, spacing: tt::Spacing::Joint,
})), })),
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: SmolStr::new_static("missing"), sym: sym::missing.clone(),
span, span,
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
})), })),
@ -121,7 +121,7 @@ impl Bindings {
} }
MetaVarKind::Literal => { MetaVarKind::Literal => {
Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: SmolStr::new_static("\"missing\""), sym: sym::missing.clone(),
span, span,
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
}))) })))
@ -239,7 +239,7 @@ fn expand_subtree(
ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |nest| nest.idx); ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |nest| nest.idx);
arena.push( arena.push(
tt::Leaf::Literal(tt::Literal { tt::Leaf::Literal(tt::Literal {
text: format_smolstr!("{index}"), symbol: Symbol::integer(index),
span: ctx.call_site, span: ctx.call_site,
kind: tt::LitKind::Integer, kind: tt::LitKind::Integer,
suffix: None, suffix: None,
@ -254,7 +254,7 @@ fn expand_subtree(
}); });
arena.push( arena.push(
tt::Leaf::Literal(tt::Literal { tt::Leaf::Literal(tt::Literal {
text: format_smolstr!("{length}"), symbol: Symbol::integer(length),
span: ctx.call_site, span: ctx.call_site,
kind: tt::LitKind::Integer, kind: tt::LitKind::Integer,
suffix: None, suffix: None,
@ -263,7 +263,7 @@ fn expand_subtree(
); );
} }
Op::Count { name, depth } => { Op::Count { name, depth } => {
let mut binding = match ctx.bindings.get(name.as_str()) { let mut binding = match ctx.bindings.get(name) {
Ok(b) => b, Ok(b) => b,
Err(e) => { Err(e) => {
if err.is_none() { if err.is_none() {
@ -321,7 +321,7 @@ fn expand_subtree(
}; };
arena.push( arena.push(
tt::Leaf::Literal(tt::Literal { tt::Leaf::Literal(tt::Literal {
text: format_smolstr!("{c}"), symbol: Symbol::integer(c),
span: ctx.call_site, span: ctx.call_site,
suffix: None, suffix: None,
kind: tt::LitKind::Integer, kind: tt::LitKind::Integer,
@ -344,12 +344,12 @@ fn expand_subtree(
fn expand_var( fn expand_var(
ctx: &mut ExpandCtx<'_>, ctx: &mut ExpandCtx<'_>,
v: &SmolStr, v: &Symbol,
id: Span, id: Span,
marker: impl Fn(&mut Span), marker: impl Fn(&mut Span),
) -> ExpandResult<Fragment> { ) -> ExpandResult<Fragment> {
// We already handle $crate case in mbe parser // We already handle $crate case in mbe parser
debug_assert!(v != "crate"); debug_assert!(*v != sym::crate_);
match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) { match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
Ok(it) => ExpandResult::ok(it), Ok(it) => ExpandResult::ok(it),
@ -373,7 +373,7 @@ fn expand_var(
tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
.into(), .into(),
tt::Leaf::from(tt::Ident { tt::Leaf::from(tt::Ident {
text: v.clone(), sym: v.clone(),
span: id, span: id,
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
}) })

View file

@ -4,8 +4,8 @@
use std::sync::Arc; use std::sync::Arc;
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use intern::{sym, Symbol};
use span::{Edition, Span, SyntaxContextId}; use span::{Edition, Span, SyntaxContextId};
use syntax::SmolStr;
use tt::iter::TtIter; use tt::iter::TtIter;
use crate::ParseError; use crate::ParseError;
@ -67,12 +67,12 @@ impl MetaTemplate {
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) enum Op { pub(crate) enum Op {
Var { Var {
name: SmolStr, name: Symbol,
kind: Option<MetaVarKind>, kind: Option<MetaVarKind>,
id: Span, id: Span,
}, },
Ignore { Ignore {
name: SmolStr, name: Symbol,
id: Span, id: Span,
}, },
Index { Index {
@ -82,7 +82,7 @@ pub(crate) enum Op {
depth: usize, depth: usize,
}, },
Count { Count {
name: SmolStr, name: Symbol,
// FIXME: `usize`` once we drop support for 1.76 // FIXME: `usize`` once we drop support for 1.76
depth: Option<usize>, depth: Option<usize>,
}, },
@ -138,8 +138,8 @@ impl PartialEq for Separator {
use Separator::*; use Separator::*;
match (self, other) { match (self, other) {
(Ident(a), Ident(b)) => a.text == b.text, (Ident(a), Ident(b)) => a.sym == b.sym,
(Literal(a), Literal(b)) => a.text == b.text, (Literal(a), Literal(b)) => a.symbol == b.symbol,
(Puncts(a), Puncts(b)) if a.len() == b.len() => { (Puncts(a), Puncts(b)) if a.len() == b.len() => {
let a_iter = a.iter().map(|a| a.char); let a_iter = a.iter().map(|a| a.char);
let b_iter = b.iter().map(|b| b.char); let b_iter = b.iter().map(|b| b.char);
@ -203,23 +203,23 @@ fn next_op(
} }
}, },
tt::TokenTree::Leaf(leaf) => match leaf { tt::TokenTree::Leaf(leaf) => match leaf {
tt::Leaf::Ident(ident) if ident.text == "crate" => { tt::Leaf::Ident(ident) if ident.sym == sym::crate_ => {
// We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path.
Op::Ident(tt::Ident { Op::Ident(tt::Ident {
text: "$crate".into(), sym: sym::dollar_crate.clone(),
span: ident.span, span: ident.span,
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
}) })
} }
tt::Leaf::Ident(ident) => { tt::Leaf::Ident(ident) => {
let kind = eat_fragment_kind(edition, src, mode)?; let kind = eat_fragment_kind(edition, src, mode)?;
let name = ident.text.clone(); let name = ident.sym.clone();
let id = ident.span; let id = ident.span;
Op::Var { name, kind, id } Op::Var { name, kind, id }
} }
tt::Leaf::Literal(lit) if is_boolean_literal(lit) => { tt::Leaf::Literal(lit) if is_boolean_literal(lit) => {
let kind = eat_fragment_kind(edition, src, mode)?; let kind = eat_fragment_kind(edition, src, mode)?;
let name = lit.text.clone(); let name = lit.symbol.clone();
let id = lit.span; let id = lit.span;
Op::Var { name, kind, id } Op::Var { name, kind, id }
} }
@ -277,7 +277,7 @@ fn eat_fragment_kind(
let ident = src let ident = src
.expect_ident() .expect_ident()
.map_err(|()| ParseError::unexpected("missing fragment specifier"))?; .map_err(|()| ParseError::unexpected("missing fragment specifier"))?;
let kind = match ident.text.as_str() { let kind = match ident.sym.as_str() {
"path" => MetaVarKind::Path, "path" => MetaVarKind::Path,
"ty" => MetaVarKind::Ty, "ty" => MetaVarKind::Ty,
"pat" => match edition(ident.span.ctx) { "pat" => match edition(ident.span.ctx) {
@ -303,7 +303,7 @@ fn eat_fragment_kind(
} }
fn is_boolean_literal(lit: &tt::Literal<Span>) -> bool { fn is_boolean_literal(lit: &tt::Literal<Span>) -> bool {
matches!(lit.text.as_str(), "true" | "false") matches!(lit.symbol.as_str(), "true" | "false")
} }
fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, RepeatKind), ParseError> { fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
@ -353,23 +353,23 @@ fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, Span>) -> Result
let mut args = TtIter::new(args); let mut args = TtIter::new(args);
let op = match &*func.text { let op = match &func.sym {
"ignore" => { s if sym::ignore == *s => {
if new_meta_vars { if new_meta_vars {
args.expect_dollar()?; args.expect_dollar()?;
} }
let ident = args.expect_ident()?; let ident = args.expect_ident()?;
Op::Ignore { name: ident.text.clone(), id: ident.span } Op::Ignore { name: ident.sym.clone(), id: ident.span }
} }
"index" => Op::Index { depth: parse_depth(&mut args)? }, s if sym::index == *s => Op::Index { depth: parse_depth(&mut args)? },
"len" => Op::Len { depth: parse_depth(&mut args)? }, s if sym::len == *s => Op::Len { depth: parse_depth(&mut args)? },
"count" => { s if sym::count == *s => {
if new_meta_vars { if new_meta_vars {
args.expect_dollar()?; args.expect_dollar()?;
} }
let ident = args.expect_ident()?; let ident = args.expect_ident()?;
let depth = if try_eat_comma(&mut args) { Some(parse_depth(&mut args)?) } else { None }; let depth = if try_eat_comma(&mut args) { Some(parse_depth(&mut args)?) } else { None };
Op::Count { name: ident.text.clone(), depth } Op::Count { name: ident.sym.clone(), depth }
} }
_ => return Err(()), _ => return Err(()),
}; };
@ -384,11 +384,11 @@ fn parse_metavar_expr(new_meta_vars: bool, src: &mut TtIter<'_, Span>) -> Result
fn parse_depth(src: &mut TtIter<'_, Span>) -> Result<usize, ()> { fn parse_depth(src: &mut TtIter<'_, Span>) -> Result<usize, ()> {
if src.len() == 0 { if src.len() == 0 {
Ok(0) Ok(0)
} else if let tt::Leaf::Literal(tt::Literal { text, suffix: None, .. }) = } else if let tt::Leaf::Literal(tt::Literal { symbol: text, suffix: None, .. }) =
src.expect_literal()? src.expect_literal()?
{ {
// Suffixes are not allowed. // Suffixes are not allowed.
text.parse().map_err(|_| ()) text.as_str().parse().map_err(|_| ())
} else { } else {
Err(()) Err(())
} }

View file

@ -2,6 +2,7 @@
use std::fmt; use std::fmt;
use intern::Symbol;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, SpanAnchor, SpanData, SpanMap}; use span::{Edition, SpanAnchor, SpanData, SpanMap};
use stdx::{format_to, never, non_empty_vec::NonEmptyVec}; use stdx::{format_to, never, non_empty_vec::NonEmptyVec};
@ -322,7 +323,7 @@ where
() => { () => {
tt::Ident { tt::Ident {
span: conv.span_for(abs_range), span: conv.span_for(abs_range),
text: token.to_text(conv), sym: Symbol::intern(&token.to_text(conv)),
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
} }
.into() .into()
@ -332,14 +333,14 @@ where
T![true] | T![false] => make_ident!(), T![true] | T![false] => make_ident!(),
IDENT => { IDENT => {
let text = token.to_text(conv); let text = token.to_text(conv);
tt::Ident::new(text, conv.span_for(abs_range)).into() tt::Ident::new(&text, conv.span_for(abs_range)).into()
} }
UNDERSCORE => make_ident!(), UNDERSCORE => make_ident!(),
k if k.is_keyword() => make_ident!(), k if k.is_keyword() => make_ident!(),
k if k.is_literal() => { k if k.is_literal() => {
let text = token.to_text(conv); let text = token.to_text(conv);
let span = conv.span_for(abs_range); let span = conv.span_for(abs_range);
token_to_literal(text, span).into() token_to_literal(&text, span).into()
} }
LIFETIME_IDENT => { LIFETIME_IDENT => {
let apostrophe = tt::Leaf::from(tt::Punct { let apostrophe = tt::Leaf::from(tt::Punct {
@ -351,7 +352,7 @@ where
token_trees.push(apostrophe.into()); token_trees.push(apostrophe.into());
let ident = tt::Leaf::from(tt::Ident { let ident = tt::Leaf::from(tt::Ident {
text: SmolStr::new(&token.to_text(conv)[1..]), sym: Symbol::intern(&token.to_text(conv)[1..]),
span: conv.span_for(TextRange::new( span: conv.span_for(TextRange::new(
abs_range.start() + TextSize::of('\''), abs_range.start() + TextSize::of('\''),
abs_range.end(), abs_range.end(),
@ -436,7 +437,7 @@ fn is_single_token_op(kind: SyntaxKind) -> bool {
/// And then quote the string, which is needed to convert to `tt::Literal` /// And then quote the string, which is needed to convert to `tt::Literal`
/// ///
/// Note that proc-macros desugar with string literals where as macro_rules macros desugar with raw string literals. /// Note that proc-macros desugar with string literals where as macro_rules macros desugar with raw string literals.
pub fn desugar_doc_comment_text(text: &str, mode: DocCommentDesugarMode) -> (SmolStr, tt::LitKind) { pub fn desugar_doc_comment_text(text: &str, mode: DocCommentDesugarMode) -> (Symbol, tt::LitKind) {
match mode { match mode {
DocCommentDesugarMode::Mbe => { DocCommentDesugarMode::Mbe => {
let mut num_of_hashes = 0; let mut num_of_hashes = 0;
@ -451,11 +452,11 @@ pub fn desugar_doc_comment_text(text: &str, mode: DocCommentDesugarMode) -> (Smo
} }
// Quote raw string with delimiters // Quote raw string with delimiters
(text.into(), tt::LitKind::StrRaw(num_of_hashes)) (Symbol::intern(text), tt::LitKind::StrRaw(num_of_hashes))
} }
// Quote string with delimiters // Quote string with delimiters
DocCommentDesugarMode::ProcMacro => { DocCommentDesugarMode::ProcMacro => {
(format_smolstr!("{}", text.escape_debug()), tt::LitKind::Str) (Symbol::intern(&format_smolstr!("{}", text.escape_debug())), tt::LitKind::Str)
} }
} }
} }
@ -471,7 +472,7 @@ fn convert_doc_comment<S: Copy>(
let mk_ident = |s: &str| { let mk_ident = |s: &str| {
tt::TokenTree::from(tt::Leaf::from(tt::Ident { tt::TokenTree::from(tt::Leaf::from(tt::Ident {
text: s.into(), sym: Symbol::intern(s),
span, span,
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
})) }))
@ -494,7 +495,7 @@ fn convert_doc_comment<S: Copy>(
text = &text[0..text.len() - 2]; text = &text[0..text.len() - 2];
} }
let (text, kind) = desugar_doc_comment_text(text, mode); let (text, kind) = desugar_doc_comment_text(text, mode);
let lit = tt::Literal { text, span, kind, suffix: None }; let lit = tt::Literal { symbol: text, span, kind, suffix: None };
tt::TokenTree::from(tt::Leaf::from(lit)) tt::TokenTree::from(tt::Leaf::from(lit))
}; };
@ -928,7 +929,12 @@ where
fn float_split(&mut self, has_pseudo_dot: bool) { fn float_split(&mut self, has_pseudo_dot: bool) {
let (text, span) = match self.cursor.token_tree() { let (text, span) = match self.cursor.token_tree() {
Some(tt::buffer::TokenTreeRef::Leaf( Some(tt::buffer::TokenTreeRef::Leaf(
tt::Leaf::Literal(tt::Literal { text, span, kind: tt::LitKind::Float, suffix: _ }), tt::Leaf::Literal(tt::Literal {
symbol: text,
span,
kind: tt::LitKind::Float,
suffix: _,
}),
_, _,
)) => (text.as_str(), *span), )) => (text.as_str(), *span),
tt => unreachable!("{tt:?}"), tt => unreachable!("{tt:?}"),
@ -988,7 +994,7 @@ where
self.buf.push_str("r#"); self.buf.push_str("r#");
self.text_pos += TextSize::of("r#"); self.text_pos += TextSize::of("r#");
} }
let r = (ident.text.as_str(), ident.span); let r = (ident.sym.as_str(), ident.span);
self.cursor = self.cursor.bump(); self.cursor = self.cursor.bump();
r r
} }

View file

@ -49,23 +49,22 @@ pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(buffer: &TokenBuffer<'_, S>)
}; };
res.push(kind); res.push(kind);
if kind == FLOAT_NUMBER && !lit.text.ends_with('.') { if kind == FLOAT_NUMBER && !lit.symbol.as_str().ends_with('.') {
// Tag the token as joint if it is float with a fractional part // Tag the token as joint if it is float with a fractional part
// we use this jointness to inform the parser about what token split // we use this jointness to inform the parser about what token split
// event to emit when we encounter a float literal in a field access // event to emit when we encounter a float literal in a field access
res.was_joint(); res.was_joint();
} }
} }
tt::Leaf::Ident(ident) => match ident.text.as_ref() { tt::Leaf::Ident(ident) => match ident.sym.as_str() {
"_" => res.push(T![_]), "_" => res.push(T![_]),
i if i.starts_with('\'') => res.push(LIFETIME_IDENT), i if i.starts_with('\'') => res.push(LIFETIME_IDENT),
_ if ident.is_raw.yes() => res.push(IDENT), _ if ident.is_raw.yes() => res.push(IDENT),
_ => match SyntaxKind::from_keyword(&ident.text) { text => match SyntaxKind::from_keyword(text) {
Some(kind) => res.push(kind), Some(kind) => res.push(kind),
None => { None => {
let contextual_keyword = let contextual_keyword = SyntaxKind::from_contextual_keyword(text)
SyntaxKind::from_contextual_keyword(&ident.text) .unwrap_or(SyntaxKind::IDENT);
.unwrap_or(SyntaxKind::IDENT);
res.push_ident(contextual_keyword); res.push_ident(contextual_keyword);
} }
}, },

View file

@ -28,8 +28,7 @@ span.workspace = true
# InternIds for the syntax context # InternIds for the syntax context
base-db.workspace = true base-db.workspace = true
la-arena.workspace = true la-arena.workspace = true
# only here to parse via token_to_literal intern.workspace = true
mbe.workspace = true
[lints] [lints]
workspace = true workspace = true

View file

@ -13,7 +13,6 @@ use base_db::Env;
use paths::{AbsPath, AbsPathBuf}; use paths::{AbsPath, AbsPathBuf};
use span::Span; use span::Span;
use std::{fmt, io, sync::Arc}; use std::{fmt, io, sync::Arc};
use tt::SmolStr;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -66,7 +65,7 @@ impl MacroDylib {
pub struct ProcMacro { pub struct ProcMacro {
process: Arc<ProcMacroProcessSrv>, process: Arc<ProcMacroProcessSrv>,
dylib_path: Arc<AbsPathBuf>, dylib_path: Arc<AbsPathBuf>,
name: SmolStr, name: Box<str>,
kind: ProcMacroKind, kind: ProcMacroKind,
} }

View file

@ -158,6 +158,7 @@ type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str)
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use base_db::FileId; use base_db::FileId;
use intern::{sym, Symbol};
use la_arena::RawIdx; use la_arena::RawIdx;
use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId}; use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId};
use text_size::{TextRange, TextSize}; use text_size::{TextRange, TextSize};
@ -174,7 +175,7 @@ mod tests {
let token_trees = Box::new([ let token_trees = Box::new([
TokenTree::Leaf( TokenTree::Leaf(
Ident { Ident {
text: "struct".into(), sym: Symbol::intern("struct"),
span: Span { span: Span {
range: TextRange::at(TextSize::new(0), TextSize::of("struct")), range: TextRange::at(TextSize::new(0), TextSize::of("struct")),
anchor, anchor,
@ -186,7 +187,7 @@ mod tests {
), ),
TokenTree::Leaf( TokenTree::Leaf(
Ident { Ident {
text: "Foo".into(), sym: Symbol::intern("Foo"),
span: Span { span: Span {
range: TextRange::at(TextSize::new(5), TextSize::of("r#Foo")), range: TextRange::at(TextSize::new(5), TextSize::of("r#Foo")),
anchor, anchor,
@ -197,7 +198,7 @@ mod tests {
.into(), .into(),
), ),
TokenTree::Leaf(Leaf::Literal(Literal { TokenTree::Leaf(Leaf::Literal(Literal {
text: "Foo".into(), symbol: Symbol::intern("Foo"),
span: Span { span: Span {
range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")), range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")),
anchor, anchor,
@ -230,14 +231,14 @@ mod tests {
kind: DelimiterKind::Brace, kind: DelimiterKind::Brace,
}, },
token_trees: Box::new([TokenTree::Leaf(Leaf::Literal(Literal { token_trees: Box::new([TokenTree::Leaf(Leaf::Literal(Literal {
text: "0".into(), symbol: sym::INTEGER_0.clone(),
span: Span { span: Span {
range: TextRange::at(TextSize::new(15), TextSize::of("0u32")), range: TextRange::at(TextSize::new(15), TextSize::of("0u32")),
anchor, anchor,
ctx: SyntaxContextId::ROOT, ctx: SyntaxContextId::ROOT,
}, },
kind: tt::LitKind::Integer, kind: tt::LitKind::Integer,
suffix: Some(Box::new("u32".into())), suffix: Some(sym::u32.clone()),
}))]), }))]),
}), }),
]); ]);

View file

@ -37,6 +37,7 @@
use std::collections::VecDeque; use std::collections::VecDeque;
use intern::Symbol;
use la_arena::RawIdx; use la_arena::RawIdx;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -433,8 +434,8 @@ impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> {
let id = self.token_id_of(lit.span); let id = self.token_id_of(lit.span);
let (text, suffix) = if self.version >= EXTENDED_LEAF_DATA { let (text, suffix) = if self.version >= EXTENDED_LEAF_DATA {
( (
self.intern(&lit.text), self.intern(lit.symbol.as_str()),
lit.suffix.as_ref().map(|s| self.intern(s)).unwrap_or(!0), lit.suffix.as_ref().map(|s| self.intern(s.as_str())).unwrap_or(!0),
) )
} else { } else {
(self.intern_owned(format!("{lit}")), !0) (self.intern_owned(format!("{lit}")), !0)
@ -469,11 +470,11 @@ impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> {
let idx = self.ident.len() as u32; let idx = self.ident.len() as u32;
let id = self.token_id_of(ident.span); let id = self.token_id_of(ident.span);
let text = if self.version >= EXTENDED_LEAF_DATA { let text = if self.version >= EXTENDED_LEAF_DATA {
self.intern(&ident.text) self.intern(ident.sym.as_str())
} else if ident.is_raw.yes() { } else if ident.is_raw.yes() {
self.intern_owned(format!("r#{}", ident.text,)) self.intern_owned(format!("r#{}", ident.sym.as_str(),))
} else { } else {
self.intern(&ident.text) self.intern(ident.sym.as_str())
}; };
self.ident.push(IdentRepr { id, text, is_raw: ident.is_raw.yes() }); self.ident.push(IdentRepr { id, text, is_raw: ident.is_raw.yes() });
idx << 2 | 0b11 idx << 2 | 0b11
@ -555,7 +556,7 @@ impl<'span, S: InternableSpan> Reader<'span, S> {
let span = read_span(repr.id); let span = read_span(repr.id);
tt::Leaf::Literal(if self.version >= EXTENDED_LEAF_DATA { tt::Leaf::Literal(if self.version >= EXTENDED_LEAF_DATA {
tt::Literal { tt::Literal {
text: text.into(), symbol: Symbol::intern(text),
span, span,
kind: match u16::to_le_bytes(repr.kind) { kind: match u16::to_le_bytes(repr.kind) {
[0, _] => Err(()), [0, _] => Err(()),
@ -572,15 +573,15 @@ impl<'span, S: InternableSpan> Reader<'span, S> {
_ => unreachable!(), _ => unreachable!(),
}, },
suffix: if repr.suffix != !0 { suffix: if repr.suffix != !0 {
Some(Box::new( Some(Symbol::intern(
self.text[repr.suffix as usize].as_str().into(), self.text[repr.suffix as usize].as_str(),
)) ))
} else { } else {
None None
}, },
} }
} else { } else {
tt::token_to_literal(text.into(), span) tt::token_to_literal(text, span)
}) })
.into() .into()
} }
@ -609,7 +610,7 @@ impl<'span, S: InternableSpan> Reader<'span, S> {
tt::IdentIsRaw::split_from_symbol(text) tt::IdentIsRaw::split_from_symbol(text)
}; };
tt::Leaf::Ident(tt::Ident { tt::Leaf::Ident(tt::Ident {
text: text.into(), sym: Symbol::intern(text),
span: read_span(repr.id), span: read_span(repr.id),
is_raw, is_raw,
}) })

View file

@ -25,6 +25,7 @@ base-db.workspace = true
span.workspace = true span.workspace = true
proc-macro-api.workspace = true proc-macro-api.workspace = true
ra-ap-rustc_lexer.workspace = true ra-ap-rustc_lexer.workspace = true
intern.workspace = true
[dev-dependencies] [dev-dependencies]
expect-test = "1.4.0" expect-test = "1.4.0"

View file

@ -130,14 +130,13 @@ impl ProcMacroSrvSpan for TokenId {
type Server = server_impl::token_id::TokenIdServer; type Server = server_impl::token_id::TokenIdServer;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
Self::Server { interner: &server_impl::SYMBOL_INTERNER, call_site, def_site, mixed_site } Self::Server { call_site, def_site, mixed_site }
} }
} }
impl ProcMacroSrvSpan for Span { impl ProcMacroSrvSpan for Span {
type Server = server_impl::rust_analyzer_span::RaSpanServer; type Server = server_impl::rust_analyzer_span::RaSpanServer;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server {
Self::Server { Self::Server {
interner: &server_impl::SYMBOL_INTERNER,
call_site, call_site,
def_site, def_site,
mixed_site, mixed_site,

View file

@ -14,9 +14,9 @@ mod token_stream;
pub use token_stream::TokenStream; pub use token_stream::TokenStream;
pub mod rust_analyzer_span; pub mod rust_analyzer_span;
mod symbol; // mod symbol;
pub mod token_id; pub mod token_id;
pub use symbol::*; // pub use symbol::*;
use tt::Spacing; use tt::Spacing;
fn delim_to_internal<S>(d: proc_macro::Delimiter, span: bridge::DelimSpan<S>) -> tt::Delimiter<S> { fn delim_to_internal<S>(d: proc_macro::Delimiter, span: bridge::DelimSpan<S>) -> tt::Delimiter<S> {

View file

@ -10,13 +10,14 @@ use std::{
ops::{Bound, Range}, ops::{Bound, Range},
}; };
use intern::Symbol;
use proc_macro::bridge::{self, server}; use proc_macro::bridge::{self, server};
use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER};
use tt::{TextRange, TextSize}; use tt::{TextRange, TextSize};
use crate::server_impl::{ use crate::server_impl::{
delim_to_external, delim_to_internal, literal_kind_to_external, literal_kind_to_internal, delim_to_external, delim_to_internal, literal_kind_to_external, literal_kind_to_internal,
token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER, token_stream::TokenStreamBuilder,
}; };
mod tt { mod tt {
pub use tt::*; pub use tt::*;
@ -36,7 +37,6 @@ pub struct SourceFile;
pub struct FreeFunctions; pub struct FreeFunctions;
pub struct RaSpanServer { pub struct RaSpanServer {
pub(crate) interner: SymbolInternerRef,
// FIXME: Report this back to the caller to track as dependencies // FIXME: Report this back to the caller to track as dependencies
pub tracked_env_vars: HashMap<Box<str>, Option<Box<str>>>, pub tracked_env_vars: HashMap<Box<str>, Option<Box<str>>>,
// FIXME: Report this back to the caller to track as dependencies // FIXME: Report this back to the caller to track as dependencies
@ -126,15 +126,10 @@ impl server::FreeFunctions for RaSpanServer {
let lit = &lit[start_offset..lit.len() - end_offset]; let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix { let suffix = match suffix {
"" | "_" => None, "" | "_" => None,
suffix => Some(Symbol::intern(self.interner, suffix)), suffix => Some(Symbol::intern(suffix)),
}; };
Ok(bridge::Literal { Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span: self.call_site })
kind,
symbol: Symbol::intern(self.interner, lit),
suffix,
span: self.call_site,
})
} }
fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) { fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {
@ -170,9 +165,9 @@ impl server::TokenStream for RaSpanServer {
} }
bridge::TokenTree::Ident(ident) => { bridge::TokenTree::Ident(ident) => {
let text = ident.sym.text(self.interner); let text = ident.sym;
let ident: tt::Ident = tt::Ident { let ident: tt::Ident = tt::Ident {
text, sym: text,
span: ident.span, span: ident.span,
is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No }, is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
}; };
@ -183,8 +178,8 @@ impl server::TokenStream for RaSpanServer {
bridge::TokenTree::Literal(literal) => { bridge::TokenTree::Literal(literal) => {
let literal = tt::Literal { let literal = tt::Literal {
text: literal.symbol.text(self.interner), symbol: literal.symbol,
suffix: literal.suffix.map(|it| Box::new(it.text(self.interner))), suffix: literal.suffix,
span: literal.span, span: literal.span,
kind: literal_kind_to_internal(literal.kind), kind: literal_kind_to_internal(literal.kind),
}; };
@ -255,7 +250,7 @@ impl server::TokenStream for RaSpanServer {
.map(|tree| match tree { .map(|tree| match tree {
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
bridge::TokenTree::Ident(bridge::Ident { bridge::TokenTree::Ident(bridge::Ident {
sym: Symbol::intern(self.interner, &ident.text), sym: ident.sym,
is_raw: ident.is_raw.yes(), is_raw: ident.is_raw.yes(),
span: ident.span, span: ident.span,
}) })
@ -264,8 +259,8 @@ impl server::TokenStream for RaSpanServer {
bridge::TokenTree::Literal(bridge::Literal { bridge::TokenTree::Literal(bridge::Literal {
span: lit.span, span: lit.span,
kind: literal_kind_to_external(lit.kind), kind: literal_kind_to_external(lit.kind),
symbol: Symbol::intern(self.interner, &lit.text), symbol: lit.symbol,
suffix: lit.suffix.map(|it| Symbol::intern(self.interner, &it)), suffix: lit.suffix,
}) })
} }
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
@ -464,12 +459,95 @@ impl server::Server for RaSpanServer {
} }
fn intern_symbol(ident: &str) -> Self::Symbol { fn intern_symbol(ident: &str) -> Self::Symbol {
// FIXME: should be `self.interner` once the proc-macro api allows it. Symbol::intern(ident)
Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident))
} }
fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
// FIXME: should be `self.interner` once the proc-macro api allows it. f(symbol.as_str())
f(symbol.text(&SYMBOL_INTERNER).as_str()) }
}
#[cfg(test)]
mod tests {
use span::SyntaxContextId;
use super::*;
#[test]
fn test_ra_server_to_string() {
let span = Span {
range: TextRange::empty(TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: span::FileId::from_raw(0),
ast_id: span::ErasedFileAstId::from_raw(0.into()),
},
ctx: SyntaxContextId::ROOT,
};
let s = TokenStream {
token_trees: vec![
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("struct"),
span,
is_raw: tt::IdentIsRaw::No,
})),
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("T"),
span: span,
is_raw: tt::IdentIsRaw::No,
})),
tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
open: span,
close: span,
kind: tt::DelimiterKind::Brace,
},
token_trees: Box::new([]),
}),
],
};
assert_eq!(s.to_string(), "struct T {}");
}
#[test]
fn test_ra_server_from_str() {
let span = Span {
range: TextRange::empty(TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: span::FileId::from_raw(0),
ast_id: span::ErasedFileAstId::from_raw(0.into()),
},
ctx: SyntaxContextId::ROOT,
};
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
open: span,
close: span,
kind: tt::DelimiterKind::Parenthesis,
},
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
is_raw: tt::IdentIsRaw::No,
sym: Symbol::intern("a"),
span,
}))]),
});
let t1 = TokenStream::from_str("(a)", span).unwrap();
assert_eq!(t1.token_trees.len(), 1);
assert_eq!(t1.token_trees[0], subtree_paren_a);
let t2 = TokenStream::from_str("(a);", span).unwrap();
assert_eq!(t2.token_trees.len(), 2);
assert_eq!(t2.token_trees[0], subtree_paren_a);
let underscore = TokenStream::from_str("_", span).unwrap();
assert_eq!(
underscore.token_trees[0],
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("_"),
span,
is_raw: tt::IdentIsRaw::No,
}))
);
} }
} }

View file

@ -1,7 +1,6 @@
//! Symbol interner for proc-macro-srv //! Symbol interner for proc-macro-srv
use std::{cell::RefCell, collections::HashMap, thread::LocalKey}; use std::{cell::RefCell, collections::HashMap, thread::LocalKey};
use tt::SmolStr;
thread_local! { thread_local! {
pub(crate) static SYMBOL_INTERNER: RefCell<SymbolInterner> = Default::default(); pub(crate) static SYMBOL_INTERNER: RefCell<SymbolInterner> = Default::default();

View file

@ -5,11 +5,12 @@ use std::{
ops::{Bound, Range}, ops::{Bound, Range},
}; };
use intern::Symbol;
use proc_macro::bridge::{self, server}; use proc_macro::bridge::{self, server};
use crate::server_impl::{ use crate::server_impl::{
delim_to_external, delim_to_internal, literal_kind_to_external, literal_kind_to_internal, delim_to_external, delim_to_internal, literal_kind_to_external, literal_kind_to_internal,
token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER, token_stream::TokenStreamBuilder,
}; };
mod tt { mod tt {
pub use proc_macro_api::msg::TokenId; pub use proc_macro_api::msg::TokenId;
@ -36,7 +37,6 @@ pub struct SourceFile;
pub struct FreeFunctions; pub struct FreeFunctions;
pub struct TokenIdServer { pub struct TokenIdServer {
pub(crate) interner: SymbolInternerRef,
pub call_site: Span, pub call_site: Span,
pub def_site: Span, pub def_site: Span,
pub mixed_site: Span, pub mixed_site: Span,
@ -117,15 +117,10 @@ impl server::FreeFunctions for TokenIdServer {
let lit = &lit[start_offset..lit.len() - end_offset]; let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix { let suffix = match suffix {
"" | "_" => None, "" | "_" => None,
suffix => Some(Symbol::intern(self.interner, suffix)), suffix => Some(Symbol::intern(suffix)),
}; };
Ok(bridge::Literal { Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span: self.call_site })
kind,
symbol: Symbol::intern(self.interner, lit),
suffix,
span: self.call_site,
})
} }
fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {} fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {}
@ -159,9 +154,8 @@ impl server::TokenStream for TokenIdServer {
} }
bridge::TokenTree::Ident(ident) => { bridge::TokenTree::Ident(ident) => {
let text = ident.sym.text(self.interner);
let ident: tt::Ident = tt::Ident { let ident: tt::Ident = tt::Ident {
text, sym: ident.sym,
span: ident.span, span: ident.span,
is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No }, is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
}; };
@ -172,8 +166,8 @@ impl server::TokenStream for TokenIdServer {
bridge::TokenTree::Literal(literal) => { bridge::TokenTree::Literal(literal) => {
let literal = Literal { let literal = Literal {
text: literal.symbol.text(self.interner), symbol: literal.symbol,
suffix: literal.suffix.map(|it| Box::new(it.text(self.interner))), suffix: literal.suffix,
span: literal.span, span: literal.span,
kind: literal_kind_to_internal(literal.kind), kind: literal_kind_to_internal(literal.kind),
}; };
@ -239,7 +233,7 @@ impl server::TokenStream for TokenIdServer {
.map(|tree| match tree { .map(|tree| match tree {
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
bridge::TokenTree::Ident(bridge::Ident { bridge::TokenTree::Ident(bridge::Ident {
sym: Symbol::intern(self.interner, &ident.text), sym: ident.sym,
is_raw: ident.is_raw.yes(), is_raw: ident.is_raw.yes(),
span: ident.span, span: ident.span,
}) })
@ -248,8 +242,8 @@ impl server::TokenStream for TokenIdServer {
bridge::TokenTree::Literal(bridge::Literal { bridge::TokenTree::Literal(bridge::Literal {
span: lit.span, span: lit.span,
kind: literal_kind_to_external(lit.kind), kind: literal_kind_to_external(lit.kind),
symbol: Symbol::intern(self.interner, &lit.text), symbol: lit.symbol,
suffix: lit.suffix.map(|it| Symbol::intern(self.interner, &it)), suffix: lit.suffix,
}) })
} }
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
@ -366,11 +360,11 @@ impl server::Server for TokenIdServer {
} }
fn intern_symbol(ident: &str) -> Self::Symbol { fn intern_symbol(ident: &str) -> Self::Symbol {
Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) Symbol::intern(ident)
} }
fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
f(symbol.text(&SYMBOL_INTERNER).as_str()) f(symbol.as_str())
} }
} }
@ -383,12 +377,12 @@ mod tests {
let s = TokenStream { let s = TokenStream {
token_trees: vec![ token_trees: vec![
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "struct".into(), sym: Symbol::intern("struct"),
span: tt::TokenId(0), span: tt::TokenId(0),
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
})), })),
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "T".into(), sym: Symbol::intern("T"),
span: tt::TokenId(0), span: tt::TokenId(0),
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
})), })),
@ -416,7 +410,7 @@ mod tests {
}, },
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
text: "a".into(), sym: Symbol::intern("a"),
span: tt::TokenId(0), span: tt::TokenId(0),
}))]), }))]),
}); });
@ -433,7 +427,7 @@ mod tests {
assert_eq!( assert_eq!(
underscore.token_trees[0], underscore.token_trees[0],
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "_".into(), sym: Symbol::intern("_"),
span: tt::TokenId(0), span: tt::TokenId(0),
is_raw: tt::IdentIsRaw::No, is_raw: tt::IdentIsRaw::No,
})) }))

View file

@ -16,6 +16,7 @@ base-db.workspace = true
rustc-hash.workspace = true rustc-hash.workspace = true
span.workspace = true span.workspace = true
stdx.workspace = true stdx.workspace = true
intern.workspace = true
[lints] [lints]
workspace = true workspace = true

View file

@ -13,6 +13,7 @@ use hir_expand::{
ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacros, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacros,
}, },
}; };
use intern::Symbol;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use span::{Edition, FileId, FilePosition, FileRange, Span}; use span::{Edition, FileId, FilePosition, FileRange, Span};
use test_utils::{ use test_utils::{
@ -640,11 +641,11 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
Leaf::Literal(it) => { Leaf::Literal(it) => {
// XXX Currently replaces any literals with an empty string, but supporting // XXX Currently replaces any literals with an empty string, but supporting
// "shortening" other literals would be nice. // "shortening" other literals would be nice.
it.text = "\"\"".into(); it.symbol = Symbol::empty();
} }
Leaf::Punct(_) => {} Leaf::Punct(_) => {}
Leaf::Ident(it) => { Leaf::Ident(it) => {
it.text = it.text.chars().take(1).collect(); it.sym = Symbol::intern(&it.sym.as_str().chars().take(1).collect::<String>());
} }
} }
leaf leaf

View file

@ -13,10 +13,10 @@ doctest = false
[dependencies] [dependencies]
arrayvec.workspace = true arrayvec.workspace = true
smol_str.workspace = true
text-size.workspace = true text-size.workspace = true
stdx.workspace = true stdx.workspace = true
intern.workspace = true
ra-ap-rustc_lexer.workspace = true ra-ap-rustc_lexer.workspace = true
[features] [features]

View file

@ -2,6 +2,7 @@
//! macro definition into a list of patterns and templates. //! macro definition into a list of patterns and templates.
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use intern::sym;
use crate::{Ident, Leaf, Punct, Spacing, Subtree, TokenTree}; use crate::{Ident, Leaf, Punct, Spacing, Subtree, TokenTree};
@ -58,7 +59,7 @@ impl<'a, S: Copy> TtIter<'a, S> {
pub fn expect_ident(&mut self) -> Result<&'a Ident<S>, ()> { pub fn expect_ident(&mut self) -> Result<&'a Ident<S>, ()> {
match self.expect_leaf()? { match self.expect_leaf()? {
Leaf::Ident(it) if it.text != "_" => Ok(it), Leaf::Ident(it) if it.sym != sym::underscore => Ok(it),
_ => Err(()), _ => Err(()),
} }
} }
@ -74,7 +75,7 @@ impl<'a, S: Copy> TtIter<'a, S> {
let it = self.expect_leaf()?; let it = self.expect_leaf()?;
match it { match it {
Leaf::Literal(_) => Ok(it), Leaf::Literal(_) => Ok(it),
Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it), Leaf::Ident(ident) if ident.sym == sym::true_ || ident.sym == sym::false_ => Ok(it),
_ => Err(()), _ => Err(()),
} }
} }

View file

@ -14,16 +14,16 @@ pub mod iter;
use std::fmt; use std::fmt;
use intern::Symbol;
use stdx::{impl_from, itertools::Itertools as _}; use stdx::{impl_from, itertools::Itertools as _};
pub use smol_str::{format_smolstr, SmolStr};
pub use text_size::{TextRange, TextSize}; pub use text_size::{TextRange, TextSize};
#[derive(Clone, PartialEq, Debug)] #[derive(Clone, PartialEq, Debug)]
pub struct Lit { pub struct Lit {
pub kind: LitKind, pub kind: LitKind,
pub symbol: SmolStr, pub symbol: Symbol,
pub suffix: Option<SmolStr>, pub suffix: Option<Symbol>,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@ -35,6 +35,9 @@ impl IdentIsRaw {
pub fn yes(self) -> bool { pub fn yes(self) -> bool {
matches!(self, IdentIsRaw::Yes) matches!(self, IdentIsRaw::Yes)
} }
pub fn no(&self) -> bool {
matches!(self, IdentIsRaw::No)
}
pub fn as_str(self) -> &'static str { pub fn as_str(self) -> &'static str {
match self { match self {
IdentIsRaw::No => "", IdentIsRaw::No => "",
@ -197,25 +200,30 @@ pub enum DelimiterKind {
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Literal<S> { pub struct Literal<S> {
// escaped // escaped
pub text: SmolStr, pub symbol: Symbol,
pub span: S, pub span: S,
pub kind: LitKind, pub kind: LitKind,
pub suffix: Option<Box<SmolStr>>, pub suffix: Option<Symbol>,
} }
pub fn token_to_literal<S>(text: SmolStr, span: S) -> Literal<S> pub fn token_to_literal<S>(text: &str, span: S) -> Literal<S>
where where
S: Copy, S: Copy,
{ {
use rustc_lexer::LiteralKind; use rustc_lexer::LiteralKind;
let token = rustc_lexer::tokenize(&text).next_tuple(); let token = rustc_lexer::tokenize(text).next_tuple();
let Some((rustc_lexer::Token { let Some((rustc_lexer::Token {
kind: rustc_lexer::TokenKind::Literal { kind, suffix_start }, kind: rustc_lexer::TokenKind::Literal { kind, suffix_start },
.. ..
},)) = token },)) = token
else { else {
return Literal { span, text, kind: LitKind::Err(()), suffix: None }; return Literal {
span,
symbol: Symbol::intern(text),
kind: LitKind::Err(()),
suffix: None,
};
}; };
let (kind, start_offset, end_offset) = match kind { let (kind, start_offset, end_offset) = match kind {
@ -247,10 +255,10 @@ where
let lit = &lit[start_offset..lit.len() - end_offset]; let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix { let suffix = match suffix {
"" | "_" => None, "" | "_" => None,
suffix => Some(Box::new(suffix.into())), suffix => Some(Symbol::intern(suffix)),
}; };
Literal { span, text: lit.into(), kind, suffix } Literal { span, symbol: Symbol::intern(lit), kind, suffix }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -323,22 +331,16 @@ pub enum Spacing {
/// Identifier or keyword. /// Identifier or keyword.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Ident<S> { pub struct Ident<S> {
pub text: SmolStr, pub sym: Symbol,
pub span: S, pub span: S,
pub is_raw: IdentIsRaw, pub is_raw: IdentIsRaw,
} }
impl<S> Ident<S> { impl<S> Ident<S> {
pub fn new(text: impl Into<SmolStr> + AsRef<str>, span: S) -> Self { pub fn new(text: &str, span: S) -> Self {
let t = text.as_ref();
// let raw_stripped = IdentIsRaw::split_from_symbol(text.as_ref()); // let raw_stripped = IdentIsRaw::split_from_symbol(text.as_ref());
let raw_stripped = t.strip_prefix("r#"); let (is_raw, text) = IdentIsRaw::split_from_symbol(text);
let is_raw = if raw_stripped.is_none() { IdentIsRaw::No } else { IdentIsRaw::Yes }; Ident { sym: Symbol::intern(text), span, is_raw }
let text = match raw_stripped {
Some(derawed) => derawed.into(),
None => text.into(),
};
Ident { text, span, is_raw }
} }
} }
@ -389,8 +391,8 @@ fn print_debug_token<S: fmt::Debug>(
"{}LITERAL {:?} {}{} {:#?}", "{}LITERAL {:?} {}{} {:#?}",
align, align,
lit.kind, lit.kind,
lit.text, lit.symbol,
lit.suffix.as_ref().map(|it| &***it).unwrap_or(""), lit.suffix.as_ref().map(|it| it.as_str()).unwrap_or(""),
lit.span lit.span
)?; )?;
} }
@ -410,7 +412,7 @@ fn print_debug_token<S: fmt::Debug>(
"{}IDENT {}{} {:#?}", "{}IDENT {}{} {:#?}",
align, align,
ident.is_raw.as_str(), ident.is_raw.as_str(),
ident.text, ident.sym,
ident.span ident.span
)?; )?;
} }
@ -479,26 +481,26 @@ impl<S> fmt::Display for Leaf<S> {
impl<S> fmt::Display for Ident<S> { impl<S> fmt::Display for Ident<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.is_raw.as_str(), f)?; fmt::Display::fmt(&self.is_raw.as_str(), f)?;
fmt::Display::fmt(&self.text, f) fmt::Display::fmt(&self.sym, f)
} }
} }
impl<S> fmt::Display for Literal<S> { impl<S> fmt::Display for Literal<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind { match self.kind {
LitKind::Byte => write!(f, "b'{}'", self.text), LitKind::Byte => write!(f, "b'{}'", self.symbol),
LitKind::Char => write!(f, "'{}'", self.text), LitKind::Char => write!(f, "'{}'", self.symbol),
LitKind::Integer | LitKind::Float | LitKind::Err(_) => write!(f, "{}", self.text), LitKind::Integer | LitKind::Float | LitKind::Err(_) => write!(f, "{}", self.symbol),
LitKind::Str => write!(f, "\"{}\"", self.text), LitKind::Str => write!(f, "\"{}\"", self.symbol),
LitKind::ByteStr => write!(f, "b\"{}\"", self.text), LitKind::ByteStr => write!(f, "b\"{}\"", self.symbol),
LitKind::CStr => write!(f, "c\"{}\"", self.text), LitKind::CStr => write!(f, "c\"{}\"", self.symbol),
LitKind::StrRaw(num_of_hashes) => { LitKind::StrRaw(num_of_hashes) => {
let num_of_hashes = num_of_hashes as usize; let num_of_hashes = num_of_hashes as usize;
write!( write!(
f, f,
r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#, r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
"", "",
text = self.text text = self.symbol
) )
} }
LitKind::ByteStrRaw(num_of_hashes) => { LitKind::ByteStrRaw(num_of_hashes) => {
@ -507,7 +509,7 @@ impl<S> fmt::Display for Literal<S> {
f, f,
r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#, r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
"", "",
text = self.text text = self.symbol
) )
} }
LitKind::CStrRaw(num_of_hashes) => { LitKind::CStrRaw(num_of_hashes) => {
@ -516,7 +518,7 @@ impl<S> fmt::Display for Literal<S> {
f, f,
r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#, r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
"", "",
text = self.text text = self.symbol
) )
} }
}?; }?;
@ -566,9 +568,9 @@ impl<S> Subtree<S> {
let s = match child { let s = match child {
TokenTree::Leaf(it) => { TokenTree::Leaf(it) => {
let s = match it { let s = match it {
Leaf::Literal(it) => it.text.to_string(), Leaf::Literal(it) => it.symbol.to_string(),
Leaf::Punct(it) => it.char.to_string(), Leaf::Punct(it) => it.char.to_string(),
Leaf::Ident(it) => format!("{}{}", it.is_raw.as_str(), it.text), Leaf::Ident(it) => format!("{}{}", it.is_raw.as_str(), it.sym),
}; };
match (it, last) { match (it, last) {
(Leaf::Ident(_), Some(&TokenTree::Leaf(Leaf::Ident(_)))) => { (Leaf::Ident(_), Some(&TokenTree::Leaf(Leaf::Ident(_)))) => {
@ -599,9 +601,9 @@ pub fn pretty<S>(tkns: &[TokenTree<S>]) -> String {
fn tokentree_to_text<S>(tkn: &TokenTree<S>) -> String { fn tokentree_to_text<S>(tkn: &TokenTree<S>) -> String {
match tkn { match tkn {
TokenTree::Leaf(Leaf::Ident(ident)) => { TokenTree::Leaf(Leaf::Ident(ident)) => {
format!("{}{}", ident.is_raw.as_str(), ident.text) format!("{}{}", ident.is_raw.as_str(), ident.sym)
} }
TokenTree::Leaf(Leaf::Literal(literal)) => literal.text.clone().into(), TokenTree::Leaf(Leaf::Literal(literal)) => literal.symbol.as_str().to_owned(),
TokenTree::Leaf(Leaf::Punct(punct)) => format!("{}", punct.char), TokenTree::Leaf(Leaf::Punct(punct)) => format!("{}", punct.char),
TokenTree::Subtree(subtree) => { TokenTree::Subtree(subtree) => {
let content = pretty(&subtree.token_trees); let content = pretty(&subtree.token_trees);