mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-14 06:03:58 +00:00
Add literal/ident conversion, tests pass
This commit is contained in:
parent
191db9fed4
commit
d25b61030e
5 changed files with 71 additions and 36 deletions
5
Cargo.lock
generated
5
Cargo.lock
generated
|
@ -1022,9 +1022,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.12.0"
|
||||
version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225"
|
||||
checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
|
||||
|
||||
[[package]]
|
||||
name = "oorandom"
|
||||
|
@ -1167,6 +1167,7 @@ dependencies = [
|
|||
"mbe",
|
||||
"memmap2",
|
||||
"object 0.29.0",
|
||||
"once_cell",
|
||||
"paths",
|
||||
"proc-macro-api",
|
||||
"proc-macro-test",
|
||||
|
|
|
@ -24,6 +24,7 @@ tt = { path = "../tt", version = "0.0.0" }
|
|||
mbe = { path = "../mbe", version = "0.0.0" }
|
||||
paths = { path = "../paths", version = "0.0.0" }
|
||||
proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
|
||||
once_cell = "1.13.0"
|
||||
|
||||
[dev-dependencies]
|
||||
expect-test = "1.4.0"
|
||||
|
|
|
@ -115,20 +115,29 @@ impl server::TokenStream for RustAnalyzer {
|
|||
Self::TokenStream::from_iter(vec![tree])
|
||||
}
|
||||
|
||||
bridge::TokenTree::Ident(_symbol) => {
|
||||
todo!("convert Ident bridge=>TokenStream");
|
||||
// let IdentData(ident) = self.ident_interner.get(index).clone();
|
||||
// let ident: tt::Ident = ident;
|
||||
// let leaf = tt::Leaf::from(ident);
|
||||
// let tree = TokenTree::from(leaf);
|
||||
// Self::TokenStream::from_iter(vec![tree])
|
||||
bridge::TokenTree::Ident(ident) => {
|
||||
// FIXME: handle raw idents
|
||||
let text = SYMBOL_INTERNER.lock().unwrap().get(&ident.sym).clone();
|
||||
let ident: tt::Ident = tt::Ident { text, id: ident.span };
|
||||
let leaf = tt::Leaf::from(ident);
|
||||
let tree = TokenTree::from(leaf);
|
||||
Self::TokenStream::from_iter(vec![tree])
|
||||
}
|
||||
|
||||
bridge::TokenTree::Literal(_literal) => {
|
||||
todo!("convert Literal bridge=>TokenStream");
|
||||
// let leaf = tt::Leaf::from(literal);
|
||||
// let tree = TokenTree::from(leaf);
|
||||
// Self::TokenStream::from_iter(vec![tree])
|
||||
bridge::TokenTree::Literal(literal) => {
|
||||
let symbol = SYMBOL_INTERNER.lock().unwrap().get(&literal.symbol).clone();
|
||||
|
||||
let text: tt::SmolStr = if let Some(suffix) = literal.suffix {
|
||||
let suffix = SYMBOL_INTERNER.lock().unwrap().get(&suffix).clone();
|
||||
format!("{symbol}{suffix}").into()
|
||||
} else {
|
||||
symbol
|
||||
};
|
||||
|
||||
let literal = tt::Literal { text, id: literal.span };
|
||||
let leaf = tt::Leaf::from(literal);
|
||||
let tree = TokenTree::from(leaf);
|
||||
Self::TokenStream::from_iter(vec![tree])
|
||||
}
|
||||
|
||||
bridge::TokenTree::Punct(p) => {
|
||||
|
@ -185,13 +194,23 @@ impl server::TokenStream for RustAnalyzer {
|
|||
stream
|
||||
.into_iter()
|
||||
.map(|tree| match tree {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(_ident)) => {
|
||||
todo!("convert Ident tt=>bridge");
|
||||
// bridge::TokenTree::Ident(Symbol(self.ident_interner.intern(&IdentData(ident))))
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
|
||||
bridge::TokenTree::Ident(bridge::Ident {
|
||||
sym: SYMBOL_INTERNER.lock().unwrap().intern(&ident.text),
|
||||
// FIXME: handle raw idents
|
||||
is_raw: false,
|
||||
span: ident.id,
|
||||
})
|
||||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(_lit)) => {
|
||||
todo!("convert Literal tt=>bridge");
|
||||
// bridge::TokenTree::Literal(lit)
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
|
||||
bridge::TokenTree::Literal(bridge::Literal {
|
||||
// FIXME: handle literal kinds
|
||||
kind: bridge::LitKind::Err,
|
||||
symbol: SYMBOL_INTERNER.lock().unwrap().intern(&lit.text),
|
||||
// FIXME: handle suffixes
|
||||
suffix: None,
|
||||
span: lit.id,
|
||||
})
|
||||
}
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
|
||||
bridge::TokenTree::Punct(bridge::Punct {
|
||||
|
@ -379,12 +398,12 @@ impl server::Server for RustAnalyzer {
|
|||
}
|
||||
}
|
||||
|
||||
fn intern_symbol(_ident: &str) -> Self::Symbol {
|
||||
todo!("intern_symbol")
|
||||
fn intern_symbol(ident: &str) -> Self::Symbol {
|
||||
SYMBOL_INTERNER.lock().unwrap().intern(&tt::SmolStr::from(ident))
|
||||
}
|
||||
|
||||
fn with_symbol_string(_symbol: &Self::Symbol, _f: impl FnOnce(&str)) {
|
||||
todo!("with_symbol_string")
|
||||
fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
|
||||
f(SYMBOL_INTERNER.lock().unwrap().get(symbol).as_str())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,18 +1,21 @@
|
|||
use std::collections::HashMap;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::{collections::HashMap, sync::Mutex};
|
||||
use tt::SmolStr;
|
||||
|
||||
// Identifier for an interned symbol.
|
||||
pub(super) static SYMBOL_INTERNER: Lazy<Mutex<SymbolInterner>> = Lazy::new(|| Default::default());
|
||||
|
||||
// ID for an interned symbol.
|
||||
#[derive(Hash, Eq, PartialEq, Copy, Clone)]
|
||||
pub struct Symbol(u32);
|
||||
|
||||
#[derive(Default)]
|
||||
struct IdentInterner {
|
||||
pub(super) struct SymbolInterner {
|
||||
idents: HashMap<SmolStr, u32>,
|
||||
ident_data: Vec<SmolStr>,
|
||||
}
|
||||
|
||||
impl IdentInterner {
|
||||
fn intern(&mut self, data: &str) -> Symbol {
|
||||
impl SymbolInterner {
|
||||
pub(super) fn intern(&mut self, data: &str) -> Symbol {
|
||||
if let Some(index) = self.idents.get(data) {
|
||||
return Symbol(*index);
|
||||
}
|
||||
|
@ -24,12 +27,7 @@ impl IdentInterner {
|
|||
Symbol(index)
|
||||
}
|
||||
|
||||
fn get(&self, index: u32) -> &SmolStr {
|
||||
&self.ident_data[index as usize]
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn get_mut(&mut self, index: u32) -> &mut SmolStr {
|
||||
self.ident_data.get_mut(index as usize).expect("Should be consistent")
|
||||
pub(super) fn get(&self, index: &Symbol) -> &SmolStr {
|
||||
&self.ident_data[index.0 as usize]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,6 +37,8 @@ use super::dylib::LoadProcMacroDylibError;
|
|||
pub(crate) use abi_1_58::Abi as Abi_1_58;
|
||||
pub(crate) use abi_1_63::Abi as Abi_1_63;
|
||||
pub(crate) use abi_1_64::Abi as Abi_1_64;
|
||||
#[cfg(feature = "sysroot-abi")]
|
||||
pub(crate) use abi_sysroot::Abi as Abi_Sysroot;
|
||||
use libloading::Library;
|
||||
use proc_macro_api::{ProcMacroKind, RustCInfo};
|
||||
|
||||
|
@ -54,6 +56,8 @@ pub(crate) enum Abi {
|
|||
Abi1_58(Abi_1_58),
|
||||
Abi1_63(Abi_1_63),
|
||||
Abi1_64(Abi_1_64),
|
||||
#[cfg(feature = "sysroot-abi")]
|
||||
AbiSysroot(Abi_Sysroot),
|
||||
}
|
||||
|
||||
impl Abi {
|
||||
|
@ -71,6 +75,14 @@ impl Abi {
|
|||
symbol_name: String,
|
||||
info: RustCInfo,
|
||||
) -> Result<Abi, LoadProcMacroDylibError> {
|
||||
// Gated behind an env var for now to avoid a change in behavior for
|
||||
// rustup-installed rust-analyzer
|
||||
#[cfg(feature = "sysroot-abi")]
|
||||
if std::env::var("PROC_MACRO_SRV_SYSROOT_ABI").is_ok() {
|
||||
let inner = unsafe { Abi_Sysroot::from_lib(lib, symbol_name) }?;
|
||||
return Ok(Abi::AbiSysroot(inner));
|
||||
}
|
||||
|
||||
// FIXME: this should use exclusive ranges when they're stable
|
||||
// https://github.com/rust-lang/rust/issues/37854
|
||||
match (info.version.0, info.version.1) {
|
||||
|
@ -100,6 +112,8 @@ impl Abi {
|
|||
Self::Abi1_58(abi) => abi.expand(macro_name, macro_body, attributes),
|
||||
Self::Abi1_63(abi) => abi.expand(macro_name, macro_body, attributes),
|
||||
Self::Abi1_64(abi) => abi.expand(macro_name, macro_body, attributes),
|
||||
#[cfg(feature = "sysroot-abi")]
|
||||
Self::AbiSysroot(abi) => abi.expand(macro_name, macro_body, attributes),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -108,6 +122,8 @@ impl Abi {
|
|||
Self::Abi1_58(abi) => abi.list_macros(),
|
||||
Self::Abi1_63(abi) => abi.list_macros(),
|
||||
Self::Abi1_64(abi) => abi.list_macros(),
|
||||
#[cfg(feature = "sysroot-abi")]
|
||||
Self::AbiSysroot(abi) => abi.list_macros(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue