mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-28 04:45:05 +00:00
Re-enable proc-macros
This commit is contained in:
parent
98cfdde8ba
commit
b98597f06d
24 changed files with 787 additions and 493 deletions
3
Cargo.lock
generated
3
Cargo.lock
generated
|
@ -1255,6 +1255,9 @@ checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
|
||||||
name = "proc-macro-api"
|
name = "proc-macro-api"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"base-db",
|
||||||
|
"indexmap",
|
||||||
|
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memmap2",
|
"memmap2",
|
||||||
"object 0.32.0",
|
"object 0.32.0",
|
||||||
"paths",
|
"paths",
|
||||||
|
|
|
@ -543,6 +543,9 @@ impl ProcMacroExpander for IdentityProcMacroExpander {
|
||||||
subtree: &Subtree<SpanData>,
|
subtree: &Subtree<SpanData>,
|
||||||
_: Option<&Subtree<SpanData>>,
|
_: Option<&Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
Ok(subtree.clone())
|
Ok(subtree.clone())
|
||||||
}
|
}
|
||||||
|
@ -557,6 +560,9 @@ impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
|
||||||
_: &Subtree<SpanData>,
|
_: &Subtree<SpanData>,
|
||||||
attrs: Option<&Subtree<SpanData>>,
|
attrs: Option<&Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
attrs
|
attrs
|
||||||
.cloned()
|
.cloned()
|
||||||
|
@ -572,6 +578,9 @@ impl ProcMacroExpander for MirrorProcMacroExpander {
|
||||||
input: &Subtree<SpanData>,
|
input: &Subtree<SpanData>,
|
||||||
_: Option<&Subtree<SpanData>>,
|
_: Option<&Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
|
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
|
||||||
let mut token_trees = vec![];
|
let mut token_trees = vec![];
|
||||||
|
@ -599,6 +608,9 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
|
||||||
input: &Subtree<SpanData>,
|
input: &Subtree<SpanData>,
|
||||||
_: Option<&Subtree<SpanData>>,
|
_: Option<&Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
return Ok(traverse(input));
|
return Ok(traverse(input));
|
||||||
|
|
||||||
|
|
|
@ -262,6 +262,9 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
|
||||||
subtree: &tt::Subtree<SpanData>,
|
subtree: &tt::Subtree<SpanData>,
|
||||||
attrs: Option<&tt::Subtree<SpanData>>,
|
attrs: Option<&tt::Subtree<SpanData>>,
|
||||||
env: &Env,
|
env: &Env,
|
||||||
|
def_site: SpanData,
|
||||||
|
call_site: SpanData,
|
||||||
|
mixed_site: SpanData,
|
||||||
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
|
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,6 +38,8 @@ impl SyntaxContextId {
|
||||||
// currently (which kind of makes sense but we need it here!)
|
// currently (which kind of makes sense but we need it here!)
|
||||||
pub const SELF_REF: Self =
|
pub const SELF_REF: Self =
|
||||||
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
|
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
|
||||||
|
/// Used syntax fixups
|
||||||
|
pub const FAKE: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 2) });
|
||||||
|
|
||||||
pub fn is_root(self) -> bool {
|
pub fn is_root(self) -> bool {
|
||||||
self == Self::ROOT
|
self == Self::ROOT
|
||||||
|
|
|
@ -16,7 +16,7 @@ mod proc_macros;
|
||||||
|
|
||||||
use std::{iter, ops::Range, sync};
|
use std::{iter, ops::Range, sync};
|
||||||
|
|
||||||
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
|
use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase};
|
||||||
use expect_test::Expect;
|
use expect_test::Expect;
|
||||||
use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFileId};
|
use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFileId};
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
|
@ -307,6 +307,9 @@ impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
|
||||||
subtree: &Subtree,
|
subtree: &Subtree,
|
||||||
_: Option<&Subtree>,
|
_: Option<&Subtree>,
|
||||||
_: &base_db::Env,
|
_: &base_db::Env,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
) -> Result<Subtree, base_db::ProcMacroExpansionError> {
|
) -> Result<Subtree, base_db::ProcMacroExpansionError> {
|
||||||
let (parse, _) =
|
let (parse, _) =
|
||||||
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
|
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
|
||||||
|
|
|
@ -233,7 +233,17 @@ pub fn expand_speculative(
|
||||||
let speculative_expansion = match loc.def.kind {
|
let speculative_expansion = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(expander, ..) => {
|
MacroDefKind::ProcMacro(expander, ..) => {
|
||||||
tt.delimiter = tt::Delimiter::UNSPECIFIED;
|
tt.delimiter = tt::Delimiter::UNSPECIFIED;
|
||||||
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
|
let call_site = loc.span(db);
|
||||||
|
expander.expand(
|
||||||
|
db,
|
||||||
|
loc.def.krate,
|
||||||
|
loc.krate,
|
||||||
|
&tt,
|
||||||
|
attr_arg.as_ref(),
|
||||||
|
call_site,
|
||||||
|
call_site,
|
||||||
|
call_site,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
|
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
|
||||||
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
|
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
|
||||||
|
@ -398,17 +408,23 @@ fn macro_arg(
|
||||||
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
|
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
|
||||||
};
|
};
|
||||||
let censor = censor_for_macro_input(&loc, &syntax);
|
let censor = censor_for_macro_input(&loc, &syntax);
|
||||||
// let mut fixups = fixup::fixup_syntax(&node);
|
let mut tt = match loc.kind {
|
||||||
// fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
MacroCallKind::FnLike { .. } => {
|
||||||
// let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor)
|
||||||
// &node,
|
}
|
||||||
// fixups.token_map,
|
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||||
// fixups.next_id,
|
// let mut fixups = crate::fixup::fixup_syntax(&syntax);
|
||||||
// fixups.replace,
|
// fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
||||||
// fixups.append,
|
// let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||||
// );
|
// &node,
|
||||||
|
// fixups.token_map,
|
||||||
let mut tt = mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor);
|
// fixups.next_id,
|
||||||
|
// fixups.replace,
|
||||||
|
// fixups.append,
|
||||||
|
// );
|
||||||
|
mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if loc.def.is_proc_macro() {
|
if loc.def.is_proc_macro() {
|
||||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||||
|
@ -658,8 +674,19 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let ExpandResult { value: tt, err } =
|
let call_site = loc.span(db);
|
||||||
expander.expand(db, loc.def.krate, loc.krate, ¯o_arg, attr_arg);
|
let ExpandResult { value: tt, err } = expander.expand(
|
||||||
|
db,
|
||||||
|
loc.def.krate,
|
||||||
|
loc.krate,
|
||||||
|
¯o_arg,
|
||||||
|
attr_arg,
|
||||||
|
// FIXME
|
||||||
|
call_site,
|
||||||
|
call_site,
|
||||||
|
// FIXME
|
||||||
|
call_site,
|
||||||
|
);
|
||||||
|
|
||||||
// Set a hard limit for the expanded tt
|
// Set a hard limit for the expanded tt
|
||||||
if let Err(value) = check_tt_count(&tt) {
|
if let Err(value) = check_tt_count(&tt) {
|
||||||
|
|
|
@ -2,25 +2,30 @@
|
||||||
//! fix up syntax errors in the code we're passing to them.
|
//! fix up syntax errors in the code we're passing to them.
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
|
use base_db::{
|
||||||
|
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId},
|
||||||
|
FileId,
|
||||||
|
};
|
||||||
|
use la_arena::RawIdx;
|
||||||
|
use mbe::TokenMap;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode, HasLoopBody},
|
ast::{self, AstNode, HasLoopBody},
|
||||||
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
|
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
||||||
};
|
};
|
||||||
use tt::token_id::Subtree;
|
use tt::Spacing;
|
||||||
|
|
||||||
|
use crate::tt::{Ident, Leaf, Punct, Subtree};
|
||||||
|
|
||||||
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
||||||
/// (appending to and replacing nodes), the information that is needed to
|
/// (appending to and replacing nodes), the information that is needed to
|
||||||
/// reverse those changes afterwards, and a token map.
|
/// reverse those changes afterwards, and a token map.
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub(crate) struct SyntaxFixups {
|
pub(crate) struct SyntaxFixups {
|
||||||
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
|
||||||
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
|
pub(crate) replace: FxHashMap<SyntaxElement, Vec<()>>,
|
||||||
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
||||||
pub(crate) token_map: TokenMap,
|
|
||||||
pub(crate) next_id: u32,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This is the information needed to reverse the fixups.
|
/// This is the information needed to reverse the fixups.
|
||||||
|
@ -29,21 +34,25 @@ pub struct SyntaxFixupUndoInfo {
|
||||||
original: Box<[Subtree]>,
|
original: Box<[Subtree]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
|
// censoring -> just don't convert the node
|
||||||
|
// replacement -> censor + append
|
||||||
|
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
|
||||||
|
// to remove later
|
||||||
|
|
||||||
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
||||||
let mut replace = FxHashMap::<SyntaxElement, _>::default();
|
let mut replace = FxHashMap::<SyntaxElement, _>::default();
|
||||||
let mut preorder = node.preorder();
|
let mut preorder = node.preorder();
|
||||||
let mut original = Vec::new();
|
let mut original = Vec::new();
|
||||||
let mut token_map = TokenMap::default();
|
let dummy_range = TextRange::empty(TextSize::new(0));
|
||||||
let mut next_id = 0;
|
let dummy_anchor =
|
||||||
|
SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) };
|
||||||
|
let fake_span =
|
||||||
|
SpanData { range: dummy_range, anchor: dummy_anchor, ctx: SyntaxContextId::FAKE };
|
||||||
while let Some(event) = preorder.next() {
|
while let Some(event) = preorder.next() {
|
||||||
let node = match event {
|
let syntax::WalkEvent::Enter(node) = event else { continue };
|
||||||
syntax::WalkEvent::Enter(node) => node,
|
|
||||||
syntax::WalkEvent::Leave(_) => continue,
|
|
||||||
};
|
|
||||||
|
|
||||||
|
/* TODO
|
||||||
if can_handle_error(&node) && has_error_to_handle(&node) {
|
if can_handle_error(&node) && has_error_to_handle(&node) {
|
||||||
// the node contains an error node, we have to completely replace it by something valid
|
// the node contains an error node, we have to completely replace it by something valid
|
||||||
let (original_tree, new_tmap, new_next_id) =
|
let (original_tree, new_tmap, new_next_id) =
|
||||||
|
@ -68,6 +77,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
preorder.skip_subtree();
|
preorder.skip_subtree();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
// In some other situations, we can fix things by just appending some tokens.
|
// In some other situations, we can fix things by just appending some tokens.
|
||||||
let end_range = TextRange::empty(node.text_range().end());
|
let end_range = TextRange::empty(node.text_range().end());
|
||||||
match_ast! {
|
match_ast! {
|
||||||
|
@ -76,36 +86,32 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
if it.name_ref().is_none() {
|
if it.name_ref().is_none() {
|
||||||
// incomplete field access: some_expr.|
|
// incomplete field access: some_expr.|
|
||||||
append.insert(node.clone().into(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
SyntheticToken {
|
Leaf::Ident(Ident {
|
||||||
kind: SyntaxKind::IDENT,
|
|
||||||
text: "__ra_fixup".into(),
|
text: "__ra_fixup".into(),
|
||||||
range: end_range,
|
span: fake_span
|
||||||
id: EMPTY_ID,
|
}),
|
||||||
},
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ast::ExprStmt(it) => {
|
ast::ExprStmt(it) => {
|
||||||
if it.semicolon_token().is_none() {
|
if it.semicolon_token().is_none() {
|
||||||
append.insert(node.clone().into(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
SyntheticToken {
|
Leaf::Punct(Punct {
|
||||||
kind: SyntaxKind::SEMICOLON,
|
char: ';',
|
||||||
text: ";".into(),
|
spacing: Spacing::Alone,
|
||||||
range: end_range,
|
span: fake_span
|
||||||
id: EMPTY_ID,
|
}),
|
||||||
},
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ast::LetStmt(it) => {
|
ast::LetStmt(it) => {
|
||||||
if it.semicolon_token().is_none() {
|
if it.semicolon_token().is_none() {
|
||||||
append.insert(node.clone().into(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
SyntheticToken {
|
Leaf::Punct(Punct {
|
||||||
kind: SyntaxKind::SEMICOLON,
|
char: ';',
|
||||||
text: ";".into(),
|
spacing: Spacing::Alone,
|
||||||
range: end_range,
|
span: fake_span
|
||||||
id: EMPTY_ID,
|
}),
|
||||||
},
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -117,28 +123,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
None => continue,
|
None => continue,
|
||||||
};
|
};
|
||||||
append.insert(if_token.into(), vec![
|
append.insert(if_token.into(), vec![
|
||||||
SyntheticToken {
|
Leaf::Ident(Ident {
|
||||||
kind: SyntaxKind::IDENT,
|
|
||||||
text: "__ra_fixup".into(),
|
text: "__ra_fixup".into(),
|
||||||
range: end_range,
|
span: fake_span
|
||||||
id: EMPTY_ID,
|
}),
|
||||||
},
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
if it.then_branch().is_none() {
|
if it.then_branch().is_none() {
|
||||||
append.insert(node.clone().into(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
SyntheticToken {
|
// FIXME: THis should be a subtree no?
|
||||||
kind: SyntaxKind::L_CURLY,
|
Leaf::Punct(Punct {
|
||||||
text: "{".into(),
|
char: '{',
|
||||||
range: end_range,
|
spacing: Spacing::Alone,
|
||||||
id: EMPTY_ID,
|
span: fake_span
|
||||||
},
|
}),
|
||||||
SyntheticToken {
|
Leaf::Punct(Punct {
|
||||||
kind: SyntaxKind::R_CURLY,
|
char: '}',
|
||||||
text: "}".into(),
|
spacing: Spacing::Alone,
|
||||||
range: end_range,
|
span: fake_span
|
||||||
id: EMPTY_ID,
|
}),
|
||||||
},
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -150,46 +153,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
None => continue,
|
None => continue,
|
||||||
};
|
};
|
||||||
append.insert(while_token.into(), vec![
|
append.insert(while_token.into(), vec![
|
||||||
SyntheticToken {
|
Leaf::Ident(Ident {
|
||||||
kind: SyntaxKind::IDENT,
|
|
||||||
text: "__ra_fixup".into(),
|
text: "__ra_fixup".into(),
|
||||||
range: end_range,
|
span: fake_span
|
||||||
id: EMPTY_ID,
|
}),
|
||||||
},
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
if it.loop_body().is_none() {
|
if it.loop_body().is_none() {
|
||||||
append.insert(node.clone().into(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
SyntheticToken {
|
// FIXME: THis should be a subtree no?
|
||||||
kind: SyntaxKind::L_CURLY,
|
Leaf::Punct(Punct {
|
||||||
text: "{".into(),
|
char: '{',
|
||||||
range: end_range,
|
spacing: Spacing::Alone,
|
||||||
id: EMPTY_ID,
|
span: fake_span
|
||||||
},
|
}),
|
||||||
SyntheticToken {
|
Leaf::Punct(Punct {
|
||||||
kind: SyntaxKind::R_CURLY,
|
char: '}',
|
||||||
text: "}".into(),
|
spacing: Spacing::Alone,
|
||||||
range: end_range,
|
span: fake_span
|
||||||
id: EMPTY_ID,
|
}),
|
||||||
},
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ast::LoopExpr(it) => {
|
ast::LoopExpr(it) => {
|
||||||
if it.loop_body().is_none() {
|
if it.loop_body().is_none() {
|
||||||
append.insert(node.clone().into(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
SyntheticToken {
|
// FIXME: THis should be a subtree no?
|
||||||
kind: SyntaxKind::L_CURLY,
|
Leaf::Punct(Punct {
|
||||||
text: "{".into(),
|
char: '{',
|
||||||
range: end_range,
|
spacing: Spacing::Alone,
|
||||||
id: EMPTY_ID,
|
span: fake_span
|
||||||
},
|
}),
|
||||||
SyntheticToken {
|
Leaf::Punct(Punct {
|
||||||
kind: SyntaxKind::R_CURLY,
|
char: '}',
|
||||||
text: "}".into(),
|
spacing: Spacing::Alone,
|
||||||
range: end_range,
|
span: fake_span
|
||||||
id: EMPTY_ID,
|
}),
|
||||||
},
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -201,29 +200,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
None => continue
|
None => continue
|
||||||
};
|
};
|
||||||
append.insert(match_token.into(), vec![
|
append.insert(match_token.into(), vec![
|
||||||
SyntheticToken {
|
Leaf::Ident(Ident {
|
||||||
kind: SyntaxKind::IDENT,
|
|
||||||
text: "__ra_fixup".into(),
|
text: "__ra_fixup".into(),
|
||||||
range: end_range,
|
span: fake_span
|
||||||
id: EMPTY_ID
|
}),
|
||||||
},
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
if it.match_arm_list().is_none() {
|
if it.match_arm_list().is_none() {
|
||||||
// No match arms
|
// No match arms
|
||||||
append.insert(node.clone().into(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
SyntheticToken {
|
// FIXME: THis should be a subtree no?
|
||||||
kind: SyntaxKind::L_CURLY,
|
Leaf::Punct(Punct {
|
||||||
text: "{".into(),
|
char: '{',
|
||||||
range: end_range,
|
spacing: Spacing::Alone,
|
||||||
id: EMPTY_ID,
|
span: fake_span
|
||||||
},
|
}),
|
||||||
SyntheticToken {
|
Leaf::Punct(Punct {
|
||||||
kind: SyntaxKind::R_CURLY,
|
char: '}',
|
||||||
text: "}".into(),
|
spacing: Spacing::Alone,
|
||||||
range: end_range,
|
span: fake_span
|
||||||
id: EMPTY_ID,
|
}),
|
||||||
},
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -234,10 +230,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
};
|
};
|
||||||
|
|
||||||
let [pat, in_token, iter] = [
|
let [pat, in_token, iter] = [
|
||||||
(SyntaxKind::UNDERSCORE, "_"),
|
"_",
|
||||||
(SyntaxKind::IN_KW, "in"),
|
"in",
|
||||||
(SyntaxKind::IDENT, "__ra_fixup")
|
"__ra_fixup"
|
||||||
].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID});
|
].map(|text|
|
||||||
|
Leaf::Ident(Ident {
|
||||||
|
text: text.into(),
|
||||||
|
span: fake_span
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
|
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
|
||||||
append.insert(for_token.into(), vec![pat, in_token, iter]);
|
append.insert(for_token.into(), vec![pat, in_token, iter]);
|
||||||
|
@ -248,18 +249,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
|
|
||||||
if it.loop_body().is_none() {
|
if it.loop_body().is_none() {
|
||||||
append.insert(node.clone().into(), vec![
|
append.insert(node.clone().into(), vec![
|
||||||
SyntheticToken {
|
// FIXME: THis should be a subtree no?
|
||||||
kind: SyntaxKind::L_CURLY,
|
Leaf::Punct(Punct {
|
||||||
text: "{".into(),
|
char: '{',
|
||||||
range: end_range,
|
spacing: Spacing::Alone,
|
||||||
id: EMPTY_ID,
|
span: fake_span
|
||||||
},
|
}),
|
||||||
SyntheticToken {
|
Leaf::Punct(Punct {
|
||||||
kind: SyntaxKind::R_CURLY,
|
char: '}',
|
||||||
text: "}".into(),
|
spacing: Spacing::Alone,
|
||||||
range: end_range,
|
span: fake_span
|
||||||
id: EMPTY_ID,
|
}),
|
||||||
},
|
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -270,8 +270,6 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||||
SyntaxFixups {
|
SyntaxFixups {
|
||||||
append,
|
append,
|
||||||
replace,
|
replace,
|
||||||
token_map,
|
|
||||||
next_id,
|
|
||||||
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
|
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -288,40 +286,33 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
|
||||||
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
|
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn reverse_fixups(
|
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
|
||||||
tt: &mut Subtree,
|
|
||||||
token_map: &TokenMap,
|
|
||||||
undo_info: &SyntaxFixupUndoInfo,
|
|
||||||
) {
|
|
||||||
let tts = std::mem::take(&mut tt.token_trees);
|
let tts = std::mem::take(&mut tt.token_trees);
|
||||||
tt.token_trees = tts
|
tt.token_trees = tts
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
// delete all fake nodes
|
||||||
.filter(|tt| match tt {
|
.filter(|tt| match tt {
|
||||||
tt::TokenTree::Leaf(leaf) => {
|
tt::TokenTree::Leaf(leaf) => leaf.span().ctx != SyntaxContextId::FAKE,
|
||||||
token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID)
|
tt::TokenTree::Subtree(st) => st.delimiter.open.ctx != SyntaxContextId::FAKE,
|
||||||
}
|
|
||||||
tt::TokenTree::Subtree(st) => {
|
|
||||||
token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.flat_map(|tt| match tt {
|
|
||||||
tt::TokenTree::Subtree(mut tt) => {
|
|
||||||
reverse_fixups(&mut tt, token_map, undo_info);
|
|
||||||
SmallVec::from_const([tt.into()])
|
|
||||||
}
|
|
||||||
tt::TokenTree::Leaf(leaf) => {
|
|
||||||
if let Some(id) = token_map.synthetic_token_id(*leaf.span()) {
|
|
||||||
let original = undo_info.original[id.0 as usize].clone();
|
|
||||||
if original.delimiter.kind == tt::DelimiterKind::Invisible {
|
|
||||||
original.token_trees.into()
|
|
||||||
} else {
|
|
||||||
SmallVec::from_const([original.into()])
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
SmallVec::from_const([leaf.into()])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
// .flat_map(|tt| match tt { TODO
|
||||||
|
// tt::TokenTree::Subtree(mut tt) => {
|
||||||
|
// reverse_fixups(&mut tt, undo_info);
|
||||||
|
// SmallVec::from_const([tt.into()])
|
||||||
|
// }
|
||||||
|
// tt::TokenTree::Leaf(leaf) => {
|
||||||
|
// if let Some(id) = leaf.span().anchor {
|
||||||
|
// let original = undo_info.original[id.0 as usize].clone();
|
||||||
|
// if original.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||||
|
// original.token_trees.into()
|
||||||
|
// } else {
|
||||||
|
// SmallVec::from_const([original.into()])
|
||||||
|
// }
|
||||||
|
// } else {
|
||||||
|
// SmallVec::from_const([leaf.into()])
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// })
|
||||||
.collect();
|
.collect();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//! Proc Macro Expander stub
|
//! Proc Macro Expander stub
|
||||||
|
|
||||||
use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
|
use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
|
|
||||||
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
|
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
|
||||||
|
@ -33,6 +33,9 @@ impl ProcMacroExpander {
|
||||||
calling_crate: CrateId,
|
calling_crate: CrateId,
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
attr_arg: Option<&tt::Subtree>,
|
attr_arg: Option<&tt::Subtree>,
|
||||||
|
def_site: SpanData,
|
||||||
|
call_site: SpanData,
|
||||||
|
mixed_site: SpanData,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
match self.proc_macro_id {
|
match self.proc_macro_id {
|
||||||
ProcMacroId(DUMMY_ID) => {
|
ProcMacroId(DUMMY_ID) => {
|
||||||
|
@ -68,7 +71,8 @@ impl ProcMacroExpander {
|
||||||
let krate_graph = db.crate_graph();
|
let krate_graph = db.crate_graph();
|
||||||
// Proc macros have access to the environment variables of the invoking crate.
|
// Proc macros have access to the environment variables of the invoking crate.
|
||||||
let env = &krate_graph[calling_crate].env;
|
let env = &krate_graph[calling_crate].env;
|
||||||
match proc_macro.expander.expand(tt, attr_arg, env) {
|
match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site)
|
||||||
|
{
|
||||||
Ok(t) => ExpandResult::ok(t),
|
Ok(t) => ExpandResult::ok(t),
|
||||||
Err(err) => match err {
|
Err(err) => match err {
|
||||||
// Don't discard the item in case something unexpected happened while expanding attributes
|
// Don't discard the item in case something unexpected happened while expanding attributes
|
||||||
|
|
|
@ -376,16 +376,16 @@ impl ProcMacroExpander for Expander {
|
||||||
subtree: &tt::Subtree<SpanData>,
|
subtree: &tt::Subtree<SpanData>,
|
||||||
attrs: Option<&tt::Subtree<SpanData>>,
|
attrs: Option<&tt::Subtree<SpanData>>,
|
||||||
env: &Env,
|
env: &Env,
|
||||||
|
def_site: SpanData,
|
||||||
|
call_site: SpanData,
|
||||||
|
mixed_site: SpanData,
|
||||||
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
|
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
let _ = (subtree, attrs, env);
|
let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
|
||||||
|
match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) {
|
||||||
// let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
|
Ok(Ok(subtree)) => Ok(subtree),
|
||||||
// match self.0.expand(subtree, attrs, env) {
|
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
|
||||||
// Ok(Ok(subtree)) => Ok(subtree),
|
Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
|
||||||
// Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
|
}
|
||||||
// Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
|
|
||||||
// }
|
|
||||||
todo!()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -399,6 +399,9 @@ impl ProcMacroExpander for IdentityExpander {
|
||||||
subtree: &tt::Subtree<SpanData>,
|
subtree: &tt::Subtree<SpanData>,
|
||||||
_: Option<&tt::Subtree<SpanData>>,
|
_: Option<&tt::Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
|
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
Ok(subtree.clone())
|
Ok(subtree.clone())
|
||||||
}
|
}
|
||||||
|
@ -414,6 +417,9 @@ impl ProcMacroExpander for EmptyExpander {
|
||||||
_: &tt::Subtree<SpanData>,
|
_: &tt::Subtree<SpanData>,
|
||||||
_: Option<&tt::Subtree<SpanData>>,
|
_: Option<&tt::Subtree<SpanData>>,
|
||||||
_: &Env,
|
_: &Env,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
|
_: SpanData,
|
||||||
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
|
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
|
||||||
Ok(tt::Subtree::empty())
|
Ok(tt::Subtree::empty())
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,7 +51,7 @@ impl<S: Span> Bindings<S> {
|
||||||
marker(&mut span);
|
marker(&mut span);
|
||||||
let subtree = tt::Subtree {
|
let subtree = tt::Subtree {
|
||||||
delimiter: tt::Delimiter {
|
delimiter: tt::Delimiter {
|
||||||
// TODO split span
|
// FIXME split span
|
||||||
open: span,
|
open: span,
|
||||||
close: span,
|
close: span,
|
||||||
kind: delimiter.kind,
|
kind: delimiter.kind,
|
||||||
|
|
|
@ -34,7 +34,8 @@ pub use tt::{Delimiter, DelimiterKind, Punct, SyntaxContext};
|
||||||
|
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
syntax_bridge::{
|
syntax_bridge::{
|
||||||
map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
|
map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree,
|
||||||
|
parse_to_token_tree_static_span, syntax_node_to_token_tree,
|
||||||
syntax_node_to_token_tree_censored, token_tree_to_syntax_node, SpanMapper,
|
syntax_node_to_token_tree_censored, token_tree_to_syntax_node, SpanMapper,
|
||||||
},
|
},
|
||||||
token_map::TokenMap,
|
token_map::TokenMap,
|
||||||
|
|
|
@ -63,7 +63,7 @@ pub(crate) mod dummy_test_span_utils {
|
||||||
|
|
||||||
/// Convert the syntax node to a `TokenTree` (what macro
|
/// Convert the syntax node to a `TokenTree` (what macro
|
||||||
/// will consume).
|
/// will consume).
|
||||||
/// TODO: Flesh out the doc comment more thoroughly
|
/// FIXME: Flesh out the doc comment more thoroughly
|
||||||
pub fn syntax_node_to_token_tree<Anchor, Ctx, SpanMap>(
|
pub fn syntax_node_to_token_tree<Anchor, Ctx, SpanMap>(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
map: SpanMap,
|
map: SpanMap,
|
||||||
|
@ -179,6 +179,19 @@ where
|
||||||
Some(convert_tokens(&mut conv))
|
Some(convert_tokens(&mut conv))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert a string to a `TokenTree`
|
||||||
|
pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
|
||||||
|
where
|
||||||
|
S: Span,
|
||||||
|
{
|
||||||
|
let lexed = parser::LexedStr::new(text);
|
||||||
|
if lexed.errors().next().is_some() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let mut conv = StaticRawConverter { lexed, pos: 0, span };
|
||||||
|
Some(convert_tokens(&mut conv))
|
||||||
|
}
|
||||||
|
|
||||||
/// Split token tree with separate expr: $($e:expr)SEP*
|
/// Split token tree with separate expr: $($e:expr)SEP*
|
||||||
pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::Subtree<S>> {
|
pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::Subtree<S>> {
|
||||||
if tt.token_trees.is_empty() {
|
if tt.token_trees.is_empty() {
|
||||||
|
@ -213,12 +226,10 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_tokens<Anchor, Ctx, C>(conv: &mut C) -> tt::Subtree<SpanData<Anchor, Ctx>>
|
fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
|
||||||
where
|
where
|
||||||
C: TokenConverter<Anchor, Ctx>,
|
C: TokenConverter<S>,
|
||||||
Ctx: SyntaxContext,
|
S: Span,
|
||||||
SpanData<Anchor, Ctx>: Span,
|
|
||||||
Anchor: Copy,
|
|
||||||
{
|
{
|
||||||
let entry = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] };
|
let entry = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] };
|
||||||
let mut stack = NonEmptyVec::new(entry);
|
let mut stack = NonEmptyVec::new(entry);
|
||||||
|
@ -452,6 +463,12 @@ struct RawConverter<'a, Anchor> {
|
||||||
pos: usize,
|
pos: usize,
|
||||||
anchor: Anchor,
|
anchor: Anchor,
|
||||||
}
|
}
|
||||||
|
/// A raw token (straight from lexer) converter that gives every token the same span.
|
||||||
|
struct StaticRawConverter<'a, S> {
|
||||||
|
lexed: parser::LexedStr<'a>,
|
||||||
|
pos: usize,
|
||||||
|
span: S,
|
||||||
|
}
|
||||||
|
|
||||||
trait SrcToken<Ctx>: std::fmt::Debug {
|
trait SrcToken<Ctx>: std::fmt::Debug {
|
||||||
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
|
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
|
||||||
|
@ -461,20 +478,16 @@ trait SrcToken<Ctx>: std::fmt::Debug {
|
||||||
fn to_text(&self, ctx: &Ctx) -> SmolStr;
|
fn to_text(&self, ctx: &Ctx) -> SmolStr;
|
||||||
}
|
}
|
||||||
|
|
||||||
trait TokenConverter<Anchor, Ctx>: Sized {
|
trait TokenConverter<S>: Sized {
|
||||||
type Token: SrcToken<Self>;
|
type Token: SrcToken<Self>;
|
||||||
|
|
||||||
fn convert_doc_comment(
|
fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>>;
|
||||||
&self,
|
|
||||||
token: &Self::Token,
|
|
||||||
span: SpanData<Anchor, Ctx>,
|
|
||||||
) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>>;
|
|
||||||
|
|
||||||
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
|
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
|
||||||
|
|
||||||
fn peek(&self) -> Option<Self::Token>;
|
fn peek(&self) -> Option<Self::Token>;
|
||||||
|
|
||||||
fn span_for(&self, range: TextRange) -> SpanData<Anchor, Ctx>;
|
fn span_for(&self, range: TextRange) -> S;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Anchor> SrcToken<RawConverter<'_, Anchor>> for usize {
|
impl<Anchor> SrcToken<RawConverter<'_, Anchor>> for usize {
|
||||||
|
@ -491,7 +504,22 @@ impl<Anchor> SrcToken<RawConverter<'_, Anchor>> for usize {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Anchor: Copy, Ctx: SyntaxContext> TokenConverter<Anchor, Ctx> for RawConverter<'_, Anchor>
|
impl<S: Span> SrcToken<StaticRawConverter<'_, S>> for usize {
|
||||||
|
fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
|
||||||
|
ctx.lexed.kind(*self)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_char(&self, ctx: &StaticRawConverter<'_, S>) -> Option<char> {
|
||||||
|
ctx.lexed.text(*self).chars().next()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_text(&self, ctx: &StaticRawConverter<'_, S>) -> SmolStr {
|
||||||
|
ctx.lexed.text(*self).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Anchor: Copy, Ctx: SyntaxContext> TokenConverter<SpanData<Anchor, Ctx>>
|
||||||
|
for RawConverter<'_, Anchor>
|
||||||
where
|
where
|
||||||
SpanData<Anchor, Ctx>: Span,
|
SpanData<Anchor, Ctx>: Span,
|
||||||
{
|
{
|
||||||
|
@ -530,6 +558,41 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
|
||||||
|
where
|
||||||
|
S: Span,
|
||||||
|
{
|
||||||
|
type Token = usize;
|
||||||
|
|
||||||
|
fn convert_doc_comment(&self, &token: &usize, span: S) -> Option<Vec<tt::TokenTree<S>>> {
|
||||||
|
let text = self.lexed.text(token);
|
||||||
|
convert_doc_comment(&doc_comment(text), span)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
||||||
|
if self.pos == self.lexed.len() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let token = self.pos;
|
||||||
|
self.pos += 1;
|
||||||
|
let range = self.lexed.text_range(token);
|
||||||
|
let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?);
|
||||||
|
|
||||||
|
Some((token, range))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn peek(&self) -> Option<Self::Token> {
|
||||||
|
if self.pos == self.lexed.len() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
Some(self.pos)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn span_for(&self, _: TextRange) -> S {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
struct Converter<SpanMap> {
|
struct Converter<SpanMap> {
|
||||||
current: Option<SyntaxToken>,
|
current: Option<SyntaxToken>,
|
||||||
preorder: PreorderWithTokens,
|
preorder: PreorderWithTokens,
|
||||||
|
@ -596,17 +659,13 @@ impl<SpanMap> SrcToken<Converter<SpanMap>> for SynToken {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Anchor: Copy, Ctx, SpanMap> TokenConverter<Anchor, Ctx> for Converter<SpanMap>
|
impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap>
|
||||||
where
|
where
|
||||||
SpanData<Anchor, Ctx>: Span,
|
S: Span,
|
||||||
SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
|
SpanMap: SpanMapper<S>,
|
||||||
{
|
{
|
||||||
type Token = SynToken;
|
type Token = SynToken;
|
||||||
fn convert_doc_comment(
|
fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>> {
|
||||||
&self,
|
|
||||||
token: &Self::Token,
|
|
||||||
span: SpanData<Anchor, Ctx>,
|
|
||||||
) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>> {
|
|
||||||
convert_doc_comment(token.token(), span)
|
convert_doc_comment(token.token(), span)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -661,7 +720,7 @@ where
|
||||||
Some(token)
|
Some(token)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn span_for(&self, range: TextRange) -> SpanData<Anchor, Ctx> {
|
fn span_for(&self, range: TextRange) -> S {
|
||||||
self.map.span_for(range)
|
self.map.span_for(range)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ tracing.workspace = true
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
memmap2 = "0.5.4"
|
memmap2 = "0.5.4"
|
||||||
snap = "1.1.0"
|
snap = "1.1.0"
|
||||||
|
indexmap = "2.1.0"
|
||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
paths.workspace = true
|
paths.workspace = true
|
||||||
|
@ -32,5 +33,7 @@ tt.workspace = true
|
||||||
stdx.workspace = true
|
stdx.workspace = true
|
||||||
profile.workspace = true
|
profile.workspace = true
|
||||||
text-size.workspace = true
|
text-size.workspace = true
|
||||||
# Intentionally *not* depend on anything salsa-related
|
# Ideally this crate would not depend on salsa things, but we need span information here which wraps
|
||||||
# base-db.workspace = true
|
# InternIds for the syntax context
|
||||||
|
base-db.workspace = true
|
||||||
|
la-arena.workspace = true
|
||||||
|
|
|
@ -11,6 +11,8 @@ pub mod msg;
|
||||||
mod process;
|
mod process;
|
||||||
mod version;
|
mod version;
|
||||||
|
|
||||||
|
use base_db::span::SpanData;
|
||||||
|
use indexmap::IndexSet;
|
||||||
use paths::AbsPathBuf;
|
use paths::AbsPathBuf;
|
||||||
use std::{fmt, io, sync::Mutex};
|
use std::{fmt, io, sync::Mutex};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
@ -18,7 +20,7 @@ use triomphe::Arc;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
msg::{flat::SerializableSpan, ExpandMacro, FlatTree, PanicMessage},
|
msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS},
|
||||||
process::ProcMacroProcessSrv,
|
process::ProcMacroProcessSrv,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -132,32 +134,49 @@ impl ProcMacro {
|
||||||
self.kind
|
self.kind
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand<const L: usize, S: SerializableSpan<L>>(
|
pub fn expand(
|
||||||
&self,
|
&self,
|
||||||
subtree: &tt::Subtree<S>,
|
subtree: &tt::Subtree<SpanData>,
|
||||||
attr: Option<&tt::Subtree<S>>,
|
attr: Option<&tt::Subtree<SpanData>>,
|
||||||
env: Vec<(String, String)>,
|
env: Vec<(String, String)>,
|
||||||
) -> Result<Result<tt::Subtree<S>, PanicMessage>, ServerError> {
|
def_site: SpanData,
|
||||||
|
call_site: SpanData,
|
||||||
|
mixed_site: SpanData,
|
||||||
|
) -> Result<Result<tt::Subtree<SpanData>, PanicMessage>, ServerError> {
|
||||||
let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version();
|
let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version();
|
||||||
let current_dir = env
|
let current_dir = env
|
||||||
.iter()
|
.iter()
|
||||||
.find(|(name, _)| name == "CARGO_MANIFEST_DIR")
|
.find(|(name, _)| name == "CARGO_MANIFEST_DIR")
|
||||||
.map(|(_, value)| value.clone());
|
.map(|(_, value)| value.clone());
|
||||||
|
|
||||||
|
let mut span_data_table = IndexSet::default();
|
||||||
|
let def_site = span_data_table.insert_full(def_site).0;
|
||||||
|
let call_site = span_data_table.insert_full(call_site).0;
|
||||||
|
let mixed_site = span_data_table.insert_full(mixed_site).0;
|
||||||
let task = ExpandMacro {
|
let task = ExpandMacro {
|
||||||
macro_body: FlatTree::new(subtree, version),
|
macro_body: FlatTree::new(subtree, version, &mut span_data_table),
|
||||||
macro_name: self.name.to_string(),
|
macro_name: self.name.to_string(),
|
||||||
attributes: attr.map(|subtree| FlatTree::new(subtree, version)),
|
attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
|
||||||
lib: self.dylib_path.to_path_buf().into(),
|
lib: self.dylib_path.to_path_buf().into(),
|
||||||
env,
|
env,
|
||||||
current_dir,
|
current_dir,
|
||||||
|
has_global_spans: ExpnGlobals {
|
||||||
|
serialize: version >= HAS_GLOBAL_SPANS,
|
||||||
|
def_site,
|
||||||
|
call_site,
|
||||||
|
mixed_site,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
let request = msg::Request::ExpandMacro(task);
|
let response = self
|
||||||
let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?;
|
.process
|
||||||
|
.lock()
|
||||||
|
.unwrap_or_else(|e| e.into_inner())
|
||||||
|
.send_task(msg::Request::ExpandMacro(task))?;
|
||||||
|
|
||||||
match response {
|
match response {
|
||||||
msg::Response::ExpandMacro(it) => {
|
msg::Response::ExpandMacro(it) => {
|
||||||
Ok(it.map(|tree| FlatTree::to_subtree(tree, version)))
|
Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table)))
|
||||||
}
|
}
|
||||||
msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => {
|
msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => {
|
||||||
Err(ServerError { message: "unexpected response".to_string(), io: None })
|
Err(ServerError { message: "unexpected response".to_string(), io: None })
|
||||||
|
|
|
@ -10,21 +10,15 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::ProcMacroKind;
|
use crate::ProcMacroKind;
|
||||||
|
|
||||||
pub use crate::msg::flat::FlatTree;
|
pub use crate::msg::flat::{FlatTree, TokenId};
|
||||||
|
|
||||||
// The versions of the server protocol
|
// The versions of the server protocol
|
||||||
pub const NO_VERSION_CHECK_VERSION: u32 = 0;
|
pub const NO_VERSION_CHECK_VERSION: u32 = 0;
|
||||||
pub const VERSION_CHECK_VERSION: u32 = 1;
|
pub const VERSION_CHECK_VERSION: u32 = 1;
|
||||||
pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2;
|
pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2;
|
||||||
/// This version changes how spans are encoded, kind of. Prior to this version,
|
pub const HAS_GLOBAL_SPANS: u32 = 3;
|
||||||
/// spans were represented as a single u32 which effectively forced spans to be
|
|
||||||
/// token ids. Starting with this version, the span fields are still u32,
|
|
||||||
/// but if the size of the span is greater than 1 then the span data is encoded in
|
|
||||||
/// an additional vector where the span represents the offset into that vector.
|
|
||||||
/// This allows encoding bigger spans while supporting the previous versions.
|
|
||||||
pub const VARIABLE_SIZED_SPANS: u32 = 2;
|
|
||||||
|
|
||||||
pub const CURRENT_API_VERSION: u32 = VARIABLE_SIZED_SPANS;
|
pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub enum Request {
|
pub enum Request {
|
||||||
|
@ -66,6 +60,24 @@ pub struct ExpandMacro {
|
||||||
pub env: Vec<(String, String)>,
|
pub env: Vec<(String, String)>,
|
||||||
|
|
||||||
pub current_dir: Option<String>,
|
pub current_dir: Option<String>,
|
||||||
|
/// marker for serde skip stuff
|
||||||
|
#[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
|
||||||
|
pub has_global_spans: ExpnGlobals,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct ExpnGlobals {
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub serialize: bool,
|
||||||
|
pub def_site: usize,
|
||||||
|
pub call_site: usize,
|
||||||
|
pub mixed_site: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExpnGlobals {
|
||||||
|
fn skip_serializing_if(&self) -> bool {
|
||||||
|
!self.serialize
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Message: Serialize + DeserializeOwned {
|
pub trait Message: Serialize + DeserializeOwned {
|
||||||
|
@ -120,38 +132,89 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/*TODO
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use tt::{
|
use base_db::{
|
||||||
Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, SpanAnchor, Subtree,
|
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId},
|
||||||
TokenId, TokenTree,
|
FileId,
|
||||||
};
|
};
|
||||||
|
use la_arena::RawIdx;
|
||||||
|
use text_size::{TextRange, TextSize};
|
||||||
|
use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
fn fixture_token_tree() -> Subtree<TokenId> {
|
fn fixture_token_tree() -> Subtree<SpanData> {
|
||||||
let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() };
|
let anchor =
|
||||||
subtree
|
SpanAnchor { file_id: FileId(0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) };
|
||||||
.token_trees
|
let mut subtree = Subtree {
|
||||||
.push(TokenTree::Leaf(Ident { text: "struct".into(), span: TokenId(0) }.into()));
|
delimiter: Delimiter {
|
||||||
subtree
|
open: SpanData {
|
||||||
.token_trees
|
range: TextRange::empty(TextSize::new(0)),
|
||||||
.push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into()));
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
|
close: SpanData {
|
||||||
|
range: TextRange::empty(TextSize::new(13)),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
|
kind: DelimiterKind::Invisible,
|
||||||
|
},
|
||||||
|
token_trees: Vec::new(),
|
||||||
|
};
|
||||||
|
subtree.token_trees.push(TokenTree::Leaf(
|
||||||
|
Ident {
|
||||||
|
text: "struct".into(),
|
||||||
|
span: SpanData {
|
||||||
|
range: TextRange::at(TextSize::new(0), TextSize::of("struct")),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
));
|
||||||
|
subtree.token_trees.push(TokenTree::Leaf(
|
||||||
|
Ident {
|
||||||
|
text: "Foo".into(),
|
||||||
|
span: SpanData {
|
||||||
|
range: TextRange::at(TextSize::new(5), TextSize::of("Foo")),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
));
|
||||||
subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
|
subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
|
||||||
text: "Foo".into(),
|
text: "Foo".into(),
|
||||||
span: TokenId::DUMMY,
|
|
||||||
|
span: SpanData {
|
||||||
|
range: TextRange::at(TextSize::new(8), TextSize::of("Foo")),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
})));
|
})));
|
||||||
subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
|
subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
|
||||||
char: '@',
|
char: '@',
|
||||||
span: TokenId::DUMMY,
|
span: SpanData {
|
||||||
|
range: TextRange::at(TextSize::new(11), TextSize::of('@')),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
spacing: Spacing::Joint,
|
spacing: Spacing::Joint,
|
||||||
})));
|
})));
|
||||||
subtree.token_trees.push(TokenTree::Subtree(Subtree {
|
subtree.token_trees.push(TokenTree::Subtree(Subtree {
|
||||||
delimiter: Delimiter {
|
delimiter: Delimiter {
|
||||||
open: TokenId(2),
|
open: SpanData {
|
||||||
close: TokenId::DUMMY,
|
range: TextRange::at(TextSize::new(12), TextSize::of('{')),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
|
close: SpanData {
|
||||||
|
range: TextRange::at(TextSize::new(13), TextSize::of('}')),
|
||||||
|
anchor,
|
||||||
|
ctx: SyntaxContextId::ROOT,
|
||||||
|
},
|
||||||
kind: DelimiterKind::Brace,
|
kind: DelimiterKind::Brace,
|
||||||
},
|
},
|
||||||
token_trees: vec![],
|
token_trees: vec![],
|
||||||
|
@ -162,20 +225,26 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_proc_macro_rpc_works() {
|
fn test_proc_macro_rpc_works() {
|
||||||
let tt = fixture_token_tree();
|
let tt = fixture_token_tree();
|
||||||
|
let mut span_data_table = Default::default();
|
||||||
let task = ExpandMacro {
|
let task = ExpandMacro {
|
||||||
macro_body: FlatTree::new(&tt, CURRENT_API_VERSION),
|
macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table),
|
||||||
macro_name: Default::default(),
|
macro_name: Default::default(),
|
||||||
attributes: None,
|
attributes: None,
|
||||||
lib: std::env::current_dir().unwrap(),
|
lib: std::env::current_dir().unwrap(),
|
||||||
env: Default::default(),
|
env: Default::default(),
|
||||||
current_dir: Default::default(),
|
current_dir: Default::default(),
|
||||||
|
has_global_spans: ExpnGlobals {
|
||||||
|
serialize: true,
|
||||||
|
def_site: 0,
|
||||||
|
call_site: 0,
|
||||||
|
mixed_site: 0,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
let json = serde_json::to_string(&task).unwrap();
|
let json = serde_json::to_string(&task).unwrap();
|
||||||
// println!("{}", json);
|
// println!("{}", json);
|
||||||
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
|
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
|
||||||
|
|
||||||
assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION));
|
assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
*/
|
|
||||||
|
|
|
@ -37,40 +37,40 @@
|
||||||
|
|
||||||
use std::collections::{HashMap, VecDeque};
|
use std::collections::{HashMap, VecDeque};
|
||||||
|
|
||||||
|
use base_db::span::SpanData;
|
||||||
|
use indexmap::IndexSet;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use text_size::TextRange;
|
|
||||||
use tt::{Span, SyntaxContext};
|
|
||||||
|
|
||||||
use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS};
|
use crate::msg::ENCODE_CLOSE_SPAN_VERSION;
|
||||||
|
|
||||||
pub trait SerializableSpan<const L: usize>: Span {
|
pub type SpanDataIndexMap = IndexSet<SpanData>;
|
||||||
fn into_u32(self) -> [u32; L];
|
|
||||||
fn from_u32(input: [u32; L]) -> Self;
|
|
||||||
}
|
|
||||||
// impl SerializableSpan<1> for tt::TokenId {
|
|
||||||
// fn into_u32(self) -> [u32; 1] {
|
|
||||||
// [self.0]
|
|
||||||
// }
|
|
||||||
// fn from_u32([input]: [u32; 1]) -> Self {
|
|
||||||
// tt::TokenId(input)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
impl<Anchor, Ctx> SerializableSpan<3> for tt::SpanData<Anchor, Ctx>
|
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
where
|
pub struct TokenId(pub u32);
|
||||||
Anchor: From<u32> + Into<u32>,
|
|
||||||
Self: Span,
|
impl std::fmt::Debug for TokenId {
|
||||||
Ctx: SyntaxContext,
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
{
|
self.0.fmt(f)
|
||||||
fn into_u32(self) -> [u32; 3] {
|
|
||||||
[self.anchor.into(), self.range.start().into(), self.range.end().into()]
|
|
||||||
}
|
}
|
||||||
fn from_u32([file_id, start, end]: [u32; 3]) -> Self {
|
}
|
||||||
tt::SpanData {
|
|
||||||
anchor: file_id.into(),
|
impl TokenId {
|
||||||
range: TextRange::new(start.into(), end.into()),
|
pub const DEF_SITE: Self = TokenId(0);
|
||||||
ctx: Ctx::DUMMY,
|
pub const CALL_SITE: Self = TokenId(0);
|
||||||
}
|
pub const MIXED_SITE: Self = TokenId(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
impl tt::Span for TokenId {
|
||||||
|
const DUMMY: Self = TokenId(!0);
|
||||||
|
|
||||||
|
type Anchor = ();
|
||||||
|
|
||||||
|
fn anchor(self) -> Self::Anchor {
|
||||||
|
()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mk(_: Self::Anchor, _: text_size::TextRange) -> Self {
|
||||||
|
Self::DUMMY
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,82 +82,41 @@ pub struct FlatTree {
|
||||||
ident: Vec<u32>,
|
ident: Vec<u32>,
|
||||||
token_tree: Vec<u32>,
|
token_tree: Vec<u32>,
|
||||||
text: Vec<String>,
|
text: Vec<String>,
|
||||||
#[serde(skip_serializing_if = "SpanMap::do_serialize")]
|
|
||||||
#[serde(default)]
|
|
||||||
span_map: SpanMap,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
struct SubtreeRepr {
|
||||||
struct SpanMap {
|
open: TokenId,
|
||||||
#[serde(skip_serializing)]
|
close: TokenId,
|
||||||
serialize: bool,
|
|
||||||
span_size: u32,
|
|
||||||
spans: Vec<u32>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for SpanMap {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self { serialize: false, span_size: 1, spans: Default::default() }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SpanMap {
|
|
||||||
fn serialize_span<const L: usize, S: SerializableSpan<L>>(&mut self, span: S) -> u32 {
|
|
||||||
let u32s = span.into_u32();
|
|
||||||
if L == 1 {
|
|
||||||
u32s[0]
|
|
||||||
} else {
|
|
||||||
let offset = self.spans.len() as u32;
|
|
||||||
self.spans.extend(u32s);
|
|
||||||
offset
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn deserialize_span<const L: usize, S: SerializableSpan<L>>(&self, offset: u32) -> S {
|
|
||||||
S::from_u32(if L == 1 {
|
|
||||||
[offset].as_ref().try_into().unwrap()
|
|
||||||
} else {
|
|
||||||
self.spans[offset as usize..][..L].try_into().unwrap()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SpanMap {
|
|
||||||
fn do_serialize(&self) -> bool {
|
|
||||||
self.serialize
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct SubtreeRepr<const L: usize, S> {
|
|
||||||
open: S,
|
|
||||||
close: S,
|
|
||||||
kind: tt::DelimiterKind,
|
kind: tt::DelimiterKind,
|
||||||
tt: [u32; 2],
|
tt: [u32; 2],
|
||||||
}
|
}
|
||||||
|
|
||||||
struct LiteralRepr<const L: usize, S> {
|
struct LiteralRepr {
|
||||||
id: S,
|
id: TokenId,
|
||||||
text: u32,
|
text: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct PunctRepr<const L: usize, S> {
|
struct PunctRepr {
|
||||||
id: S,
|
id: TokenId,
|
||||||
char: char,
|
char: char,
|
||||||
spacing: tt::Spacing,
|
spacing: tt::Spacing,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct IdentRepr<const L: usize, S> {
|
struct IdentRepr {
|
||||||
id: S,
|
id: TokenId,
|
||||||
text: u32,
|
text: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FlatTree {
|
impl FlatTree {
|
||||||
pub fn new<const L: usize, S: SerializableSpan<L>>(
|
pub fn new(
|
||||||
subtree: &tt::Subtree<S>,
|
subtree: &tt::Subtree<SpanData>,
|
||||||
version: u32,
|
version: u32,
|
||||||
|
span_data_table: &mut SpanDataIndexMap,
|
||||||
) -> FlatTree {
|
) -> FlatTree {
|
||||||
let mut w = Writer {
|
let mut w = Writer {
|
||||||
string_table: HashMap::new(),
|
string_table: HashMap::new(),
|
||||||
work: VecDeque::new(),
|
work: VecDeque::new(),
|
||||||
|
span_data_table,
|
||||||
|
|
||||||
subtree: Vec::new(),
|
subtree: Vec::new(),
|
||||||
literal: Vec::new(),
|
literal: Vec::new(),
|
||||||
|
@ -167,78 +126,111 @@ impl FlatTree {
|
||||||
text: Vec::new(),
|
text: Vec::new(),
|
||||||
};
|
};
|
||||||
w.write(subtree);
|
w.write(subtree);
|
||||||
assert!(L == 1 || version >= VARIABLE_SIZED_SPANS);
|
|
||||||
let mut span_map = SpanMap {
|
FlatTree {
|
||||||
serialize: version >= VARIABLE_SIZED_SPANS && L != 1,
|
|
||||||
span_size: L as u32,
|
|
||||||
spans: Vec::new(),
|
|
||||||
};
|
|
||||||
return FlatTree {
|
|
||||||
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
||||||
write_vec(&mut span_map, w.subtree, SubtreeRepr::write_with_close_span)
|
write_vec(w.subtree, SubtreeRepr::write_with_close_span)
|
||||||
} else {
|
} else {
|
||||||
write_vec(&mut span_map, w.subtree, SubtreeRepr::write)
|
write_vec(w.subtree, SubtreeRepr::write)
|
||||||
},
|
},
|
||||||
literal: write_vec(&mut span_map, w.literal, LiteralRepr::write),
|
literal: write_vec(w.literal, LiteralRepr::write),
|
||||||
punct: write_vec(&mut span_map, w.punct, PunctRepr::write),
|
punct: write_vec(w.punct, PunctRepr::write),
|
||||||
ident: write_vec(&mut span_map, w.ident, IdentRepr::write),
|
ident: write_vec(w.ident, IdentRepr::write),
|
||||||
token_tree: w.token_tree,
|
token_tree: w.token_tree,
|
||||||
text: w.text,
|
text: w.text,
|
||||||
span_map,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn write_vec<T, F: Fn(T, &mut SpanMap) -> [u32; N], const N: usize>(
|
|
||||||
map: &mut SpanMap,
|
|
||||||
xs: Vec<T>,
|
|
||||||
f: F,
|
|
||||||
) -> Vec<u32> {
|
|
||||||
xs.into_iter().flat_map(|it| f(it, map)).collect()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_subtree<const L: usize, S: SerializableSpan<L>>(
|
pub fn new_raw(subtree: &tt::Subtree<TokenId>, version: u32) -> FlatTree {
|
||||||
|
let mut w = Writer {
|
||||||
|
string_table: HashMap::new(),
|
||||||
|
work: VecDeque::new(),
|
||||||
|
span_data_table: &mut (),
|
||||||
|
|
||||||
|
subtree: Vec::new(),
|
||||||
|
literal: Vec::new(),
|
||||||
|
punct: Vec::new(),
|
||||||
|
ident: Vec::new(),
|
||||||
|
token_tree: Vec::new(),
|
||||||
|
text: Vec::new(),
|
||||||
|
};
|
||||||
|
w.write(subtree);
|
||||||
|
|
||||||
|
FlatTree {
|
||||||
|
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
||||||
|
write_vec(w.subtree, SubtreeRepr::write_with_close_span)
|
||||||
|
} else {
|
||||||
|
write_vec(w.subtree, SubtreeRepr::write)
|
||||||
|
},
|
||||||
|
literal: write_vec(w.literal, LiteralRepr::write),
|
||||||
|
punct: write_vec(w.punct, PunctRepr::write),
|
||||||
|
ident: write_vec(w.ident, IdentRepr::write),
|
||||||
|
token_tree: w.token_tree,
|
||||||
|
text: w.text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_subtree_resolved(
|
||||||
self,
|
self,
|
||||||
version: u32,
|
version: u32,
|
||||||
) -> tt::Subtree<S> {
|
span_data_table: &SpanDataIndexMap,
|
||||||
assert!((version >= VARIABLE_SIZED_SPANS || L == 1) && L as u32 == self.span_map.span_size);
|
) -> tt::Subtree<SpanData> {
|
||||||
return Reader {
|
Reader {
|
||||||
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
||||||
read_vec(&self.span_map, self.subtree, SubtreeRepr::read_with_close_span)
|
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
|
||||||
} else {
|
} else {
|
||||||
read_vec(&self.span_map, self.subtree, SubtreeRepr::read)
|
read_vec(self.subtree, SubtreeRepr::read)
|
||||||
},
|
},
|
||||||
literal: read_vec(&self.span_map, self.literal, LiteralRepr::read),
|
literal: read_vec(self.literal, LiteralRepr::read),
|
||||||
punct: read_vec(&self.span_map, self.punct, PunctRepr::read),
|
punct: read_vec(self.punct, PunctRepr::read),
|
||||||
ident: read_vec(&self.span_map, self.ident, IdentRepr::read),
|
ident: read_vec(self.ident, IdentRepr::read),
|
||||||
token_tree: self.token_tree,
|
token_tree: self.token_tree,
|
||||||
text: self.text,
|
text: self.text,
|
||||||
|
span_data_table,
|
||||||
}
|
}
|
||||||
.read();
|
.read()
|
||||||
|
}
|
||||||
|
|
||||||
fn read_vec<T, F: Fn([u32; N], &SpanMap) -> T, const N: usize>(
|
pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> {
|
||||||
map: &SpanMap,
|
Reader {
|
||||||
xs: Vec<u32>,
|
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
|
||||||
f: F,
|
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
|
||||||
) -> Vec<T> {
|
} else {
|
||||||
let mut chunks = xs.chunks_exact(N);
|
read_vec(self.subtree, SubtreeRepr::read)
|
||||||
let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap(), map)).collect();
|
},
|
||||||
assert!(chunks.remainder().is_empty());
|
literal: read_vec(self.literal, LiteralRepr::read),
|
||||||
res
|
punct: read_vec(self.punct, PunctRepr::read),
|
||||||
|
ident: read_vec(self.ident, IdentRepr::read),
|
||||||
|
token_tree: self.token_tree,
|
||||||
|
text: self.text,
|
||||||
|
span_data_table: &(),
|
||||||
}
|
}
|
||||||
|
.read()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<const L: usize, S: SerializableSpan<L>> SubtreeRepr<L, S> {
|
fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
|
||||||
fn write(self, map: &mut SpanMap) -> [u32; 4] {
|
let mut chunks = xs.chunks_exact(N);
|
||||||
|
let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
|
||||||
|
assert!(chunks.remainder().is_empty());
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
|
||||||
|
xs.into_iter().flat_map(f).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SubtreeRepr {
|
||||||
|
fn write(self) -> [u32; 4] {
|
||||||
let kind = match self.kind {
|
let kind = match self.kind {
|
||||||
tt::DelimiterKind::Invisible => 0,
|
tt::DelimiterKind::Invisible => 0,
|
||||||
tt::DelimiterKind::Parenthesis => 1,
|
tt::DelimiterKind::Parenthesis => 1,
|
||||||
tt::DelimiterKind::Brace => 2,
|
tt::DelimiterKind::Brace => 2,
|
||||||
tt::DelimiterKind::Bracket => 3,
|
tt::DelimiterKind::Bracket => 3,
|
||||||
};
|
};
|
||||||
[map.serialize_span(self.open), kind, self.tt[0], self.tt[1]]
|
[self.open.0, kind, self.tt[0], self.tt[1]]
|
||||||
}
|
}
|
||||||
fn read([open, kind, lo, len]: [u32; 4], map: &SpanMap) -> Self {
|
fn read([open, kind, lo, len]: [u32; 4]) -> SubtreeRepr {
|
||||||
let kind = match kind {
|
let kind = match kind {
|
||||||
0 => tt::DelimiterKind::Invisible,
|
0 => tt::DelimiterKind::Invisible,
|
||||||
1 => tt::DelimiterKind::Parenthesis,
|
1 => tt::DelimiterKind::Parenthesis,
|
||||||
|
@ -246,24 +238,18 @@ impl<const L: usize, S: SerializableSpan<L>> SubtreeRepr<L, S> {
|
||||||
3 => tt::DelimiterKind::Bracket,
|
3 => tt::DelimiterKind::Bracket,
|
||||||
other => panic!("bad kind {other}"),
|
other => panic!("bad kind {other}"),
|
||||||
};
|
};
|
||||||
SubtreeRepr { open: map.deserialize_span(open), close: S::DUMMY, kind, tt: [lo, len] }
|
SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] }
|
||||||
}
|
}
|
||||||
fn write_with_close_span(self, map: &mut SpanMap) -> [u32; 5] {
|
fn write_with_close_span(self) -> [u32; 5] {
|
||||||
let kind = match self.kind {
|
let kind = match self.kind {
|
||||||
tt::DelimiterKind::Invisible => 0,
|
tt::DelimiterKind::Invisible => 0,
|
||||||
tt::DelimiterKind::Parenthesis => 1,
|
tt::DelimiterKind::Parenthesis => 1,
|
||||||
tt::DelimiterKind::Brace => 2,
|
tt::DelimiterKind::Brace => 2,
|
||||||
tt::DelimiterKind::Bracket => 3,
|
tt::DelimiterKind::Bracket => 3,
|
||||||
};
|
};
|
||||||
[
|
[self.open.0, self.close.0, kind, self.tt[0], self.tt[1]]
|
||||||
map.serialize_span(self.open),
|
|
||||||
map.serialize_span(self.close),
|
|
||||||
kind,
|
|
||||||
self.tt[0],
|
|
||||||
self.tt[1],
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
fn read_with_close_span([open, close, kind, lo, len]: [u32; 5], map: &SpanMap) -> Self {
|
fn read_with_close_span([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr {
|
||||||
let kind = match kind {
|
let kind = match kind {
|
||||||
0 => tt::DelimiterKind::Invisible,
|
0 => tt::DelimiterKind::Invisible,
|
||||||
1 => tt::DelimiterKind::Parenthesis,
|
1 => tt::DelimiterKind::Parenthesis,
|
||||||
|
@ -271,64 +257,86 @@ impl<const L: usize, S: SerializableSpan<L>> SubtreeRepr<L, S> {
|
||||||
3 => tt::DelimiterKind::Bracket,
|
3 => tt::DelimiterKind::Bracket,
|
||||||
other => panic!("bad kind {other}"),
|
other => panic!("bad kind {other}"),
|
||||||
};
|
};
|
||||||
SubtreeRepr {
|
SubtreeRepr { open: TokenId(open), close: TokenId(close), kind, tt: [lo, len] }
|
||||||
open: map.deserialize_span(open),
|
|
||||||
close: map.deserialize_span(close),
|
|
||||||
kind,
|
|
||||||
tt: [lo, len],
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<const L: usize, S: SerializableSpan<L>> LiteralRepr<L, S> {
|
impl LiteralRepr {
|
||||||
fn write(self, map: &mut SpanMap) -> [u32; 2] {
|
fn write(self) -> [u32; 2] {
|
||||||
[map.serialize_span(self.id), self.text]
|
[self.id.0, self.text]
|
||||||
}
|
}
|
||||||
fn read([id, text]: [u32; 2], map: &SpanMap) -> Self {
|
fn read([id, text]: [u32; 2]) -> LiteralRepr {
|
||||||
LiteralRepr { id: map.deserialize_span(id), text }
|
LiteralRepr { id: TokenId(id), text }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<const L: usize, S: SerializableSpan<L>> PunctRepr<L, S> {
|
impl PunctRepr {
|
||||||
fn write(self, map: &mut SpanMap) -> [u32; 3] {
|
fn write(self) -> [u32; 3] {
|
||||||
let spacing = match self.spacing {
|
let spacing = match self.spacing {
|
||||||
tt::Spacing::Alone => 0,
|
tt::Spacing::Alone => 0,
|
||||||
tt::Spacing::Joint => 1,
|
tt::Spacing::Joint => 1,
|
||||||
};
|
};
|
||||||
[map.serialize_span(self.id), self.char as u32, spacing]
|
[self.id.0, self.char as u32, spacing]
|
||||||
}
|
}
|
||||||
fn read([id, char, spacing]: [u32; 3], map: &SpanMap) -> Self {
|
fn read([id, char, spacing]: [u32; 3]) -> PunctRepr {
|
||||||
let spacing = match spacing {
|
let spacing = match spacing {
|
||||||
0 => tt::Spacing::Alone,
|
0 => tt::Spacing::Alone,
|
||||||
1 => tt::Spacing::Joint,
|
1 => tt::Spacing::Joint,
|
||||||
other => panic!("bad spacing {other}"),
|
other => panic!("bad spacing {other}"),
|
||||||
};
|
};
|
||||||
PunctRepr { id: map.deserialize_span(id), char: char.try_into().unwrap(), spacing }
|
PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<const L: usize, S: SerializableSpan<L>> IdentRepr<L, S> {
|
impl IdentRepr {
|
||||||
fn write(self, map: &mut SpanMap) -> [u32; 2] {
|
fn write(self) -> [u32; 2] {
|
||||||
[map.serialize_span(self.id), self.text]
|
[self.id.0, self.text]
|
||||||
}
|
}
|
||||||
fn read(data: [u32; 2], map: &SpanMap) -> Self {
|
fn read(data: [u32; 2]) -> IdentRepr {
|
||||||
IdentRepr { id: map.deserialize_span(data[0]), text: data[1] }
|
IdentRepr { id: TokenId(data[0]), text: data[1] }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Writer<'a, const L: usize, S> {
|
trait Span: Copy {
|
||||||
|
type Table;
|
||||||
|
fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId;
|
||||||
|
fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Span for TokenId {
|
||||||
|
type Table = ();
|
||||||
|
fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId {
|
||||||
|
token_id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn span_for_token_id((): &Self::Table, id: TokenId) -> Self {
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Span for SpanData {
|
||||||
|
type Table = IndexSet<SpanData>;
|
||||||
|
fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId {
|
||||||
|
TokenId(table.insert_full(span).0 as u32)
|
||||||
|
}
|
||||||
|
fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self {
|
||||||
|
*table.get_index(id.0 as usize).unwrap_or_else(|| &table[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Writer<'a, 'span, S: Span> {
|
||||||
work: VecDeque<(usize, &'a tt::Subtree<S>)>,
|
work: VecDeque<(usize, &'a tt::Subtree<S>)>,
|
||||||
string_table: HashMap<&'a str, u32>,
|
string_table: HashMap<&'a str, u32>,
|
||||||
|
span_data_table: &'span mut S::Table,
|
||||||
|
|
||||||
subtree: Vec<SubtreeRepr<L, S>>,
|
subtree: Vec<SubtreeRepr>,
|
||||||
literal: Vec<LiteralRepr<L, S>>,
|
literal: Vec<LiteralRepr>,
|
||||||
punct: Vec<PunctRepr<L, S>>,
|
punct: Vec<PunctRepr>,
|
||||||
ident: Vec<IdentRepr<L, S>>,
|
ident: Vec<IdentRepr>,
|
||||||
token_tree: Vec<u32>,
|
token_tree: Vec<u32>,
|
||||||
text: Vec<String>,
|
text: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
|
impl<'a, 'span, S: Span> Writer<'a, 'span, S> {
|
||||||
fn write(&mut self, root: &'a tt::Subtree<S>) {
|
fn write(&mut self, root: &'a tt::Subtree<S>) {
|
||||||
self.enqueue(root);
|
self.enqueue(root);
|
||||||
while let Some((idx, subtree)) = self.work.pop_front() {
|
while let Some((idx, subtree)) = self.work.pop_front() {
|
||||||
|
@ -336,6 +344,10 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn token_id_of(&mut self, span: S) -> TokenId {
|
||||||
|
S::token_id_of(self.span_data_table, span)
|
||||||
|
}
|
||||||
|
|
||||||
fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree<S>) {
|
fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree<S>) {
|
||||||
let mut first_tt = self.token_tree.len();
|
let mut first_tt = self.token_tree.len();
|
||||||
let n_tt = subtree.token_trees.len();
|
let n_tt = subtree.token_trees.len();
|
||||||
|
@ -353,22 +365,21 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
|
||||||
tt::Leaf::Literal(lit) => {
|
tt::Leaf::Literal(lit) => {
|
||||||
let idx = self.literal.len() as u32;
|
let idx = self.literal.len() as u32;
|
||||||
let text = self.intern(&lit.text);
|
let text = self.intern(&lit.text);
|
||||||
self.literal.push(LiteralRepr { id: lit.span, text });
|
let id = self.token_id_of(lit.span);
|
||||||
|
self.literal.push(LiteralRepr { id, text });
|
||||||
idx << 2 | 0b01
|
idx << 2 | 0b01
|
||||||
}
|
}
|
||||||
tt::Leaf::Punct(punct) => {
|
tt::Leaf::Punct(punct) => {
|
||||||
let idx = self.punct.len() as u32;
|
let idx = self.punct.len() as u32;
|
||||||
self.punct.push(PunctRepr {
|
let id = self.token_id_of(punct.span);
|
||||||
char: punct.char,
|
self.punct.push(PunctRepr { char: punct.char, spacing: punct.spacing, id });
|
||||||
spacing: punct.spacing,
|
|
||||||
id: punct.span,
|
|
||||||
});
|
|
||||||
idx << 2 | 0b10
|
idx << 2 | 0b10
|
||||||
}
|
}
|
||||||
tt::Leaf::Ident(ident) => {
|
tt::Leaf::Ident(ident) => {
|
||||||
let idx = self.ident.len() as u32;
|
let idx = self.ident.len() as u32;
|
||||||
let text = self.intern(&ident.text);
|
let text = self.intern(&ident.text);
|
||||||
self.ident.push(IdentRepr { id: ident.span, text });
|
let id = self.token_id_of(ident.span);
|
||||||
|
self.ident.push(IdentRepr { id, text });
|
||||||
idx << 2 | 0b11
|
idx << 2 | 0b11
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -380,8 +391,8 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
|
||||||
|
|
||||||
fn enqueue(&mut self, subtree: &'a tt::Subtree<S>) -> u32 {
|
fn enqueue(&mut self, subtree: &'a tt::Subtree<S>) -> u32 {
|
||||||
let idx = self.subtree.len();
|
let idx = self.subtree.len();
|
||||||
let open = subtree.delimiter.open;
|
let open = self.token_id_of(subtree.delimiter.open);
|
||||||
let close = subtree.delimiter.close;
|
let close = self.token_id_of(subtree.delimiter.close);
|
||||||
let delimiter_kind = subtree.delimiter.kind;
|
let delimiter_kind = subtree.delimiter.kind;
|
||||||
self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
|
self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
|
||||||
self.work.push_back((idx, subtree));
|
self.work.push_back((idx, subtree));
|
||||||
|
@ -398,23 +409,29 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Reader<const L: usize, S> {
|
struct Reader<'span, S: Span> {
|
||||||
subtree: Vec<SubtreeRepr<L, S>>,
|
subtree: Vec<SubtreeRepr>,
|
||||||
literal: Vec<LiteralRepr<L, S>>,
|
literal: Vec<LiteralRepr>,
|
||||||
punct: Vec<PunctRepr<L, S>>,
|
punct: Vec<PunctRepr>,
|
||||||
ident: Vec<IdentRepr<L, S>>,
|
ident: Vec<IdentRepr>,
|
||||||
token_tree: Vec<u32>,
|
token_tree: Vec<u32>,
|
||||||
text: Vec<String>,
|
text: Vec<String>,
|
||||||
|
span_data_table: &'span S::Table,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
|
impl<'span, S: Span> Reader<'span, S> {
|
||||||
pub(crate) fn read(self) -> tt::Subtree<S> {
|
pub(crate) fn read(self) -> tt::Subtree<S> {
|
||||||
let mut res: Vec<Option<tt::Subtree<S>>> = vec![None; self.subtree.len()];
|
let mut res: Vec<Option<tt::Subtree<S>>> = vec![None; self.subtree.len()];
|
||||||
|
let read_span = |id| S::span_for_token_id(self.span_data_table, id);
|
||||||
for i in (0..self.subtree.len()).rev() {
|
for i in (0..self.subtree.len()).rev() {
|
||||||
let repr = &self.subtree[i];
|
let repr = &self.subtree[i];
|
||||||
let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
|
let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
|
||||||
let s = tt::Subtree {
|
let s = tt::Subtree {
|
||||||
delimiter: tt::Delimiter { open: repr.open, close: repr.close, kind: repr.kind },
|
delimiter: tt::Delimiter {
|
||||||
|
open: read_span(repr.open),
|
||||||
|
close: read_span(repr.close),
|
||||||
|
kind: repr.kind,
|
||||||
|
},
|
||||||
token_trees: token_trees
|
token_trees: token_trees
|
||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
|
@ -429,7 +446,7 @@ impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
|
||||||
let repr = &self.literal[idx];
|
let repr = &self.literal[idx];
|
||||||
tt::Leaf::Literal(tt::Literal {
|
tt::Leaf::Literal(tt::Literal {
|
||||||
text: self.text[repr.text as usize].as_str().into(),
|
text: self.text[repr.text as usize].as_str().into(),
|
||||||
span: repr.id,
|
span: read_span(repr.id),
|
||||||
})
|
})
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
|
@ -438,7 +455,7 @@ impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
|
||||||
tt::Leaf::Punct(tt::Punct {
|
tt::Leaf::Punct(tt::Punct {
|
||||||
char: repr.char,
|
char: repr.char,
|
||||||
spacing: repr.spacing,
|
spacing: repr.spacing,
|
||||||
span: repr.id,
|
span: read_span(repr.id),
|
||||||
})
|
})
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
|
@ -446,7 +463,7 @@ impl<const L: usize, S: SerializableSpan<L>> Reader<L, S> {
|
||||||
let repr = &self.ident[idx];
|
let repr = &self.ident[idx];
|
||||||
tt::Leaf::Ident(tt::Ident {
|
tt::Leaf::Ident(tt::Ident {
|
||||||
text: self.text[repr.text as usize].as_str().into(),
|
text: self.text[repr.text as usize].as_str().into(),
|
||||||
span: repr.id,
|
span: read_span(repr.id),
|
||||||
})
|
})
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,14 +18,12 @@ fn main() -> std::io::Result<()> {
|
||||||
run()
|
run()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(FALSE))]
|
#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))]
|
||||||
#[cfg(not(feature = "sysroot-abi"))]
|
|
||||||
fn run() -> io::Result<()> {
|
fn run() -> io::Result<()> {
|
||||||
panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled");
|
panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(FALSE)]
|
#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
|
||||||
#[cfg(feature = "sysroot-abi")]
|
|
||||||
fn run() -> io::Result<()> {
|
fn run() -> io::Result<()> {
|
||||||
use proc_macro_api::msg::{self, Message};
|
use proc_macro_api::msg::{self, Message};
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ use libloading::Library;
|
||||||
use memmap2::Mmap;
|
use memmap2::Mmap;
|
||||||
use object::Object;
|
use object::Object;
|
||||||
use paths::AbsPath;
|
use paths::AbsPath;
|
||||||
use proc_macro_api::{read_dylib_info, ProcMacroKind};
|
use proc_macro_api::{msg::TokenId, read_dylib_info, ProcMacroKind};
|
||||||
|
|
||||||
const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
|
const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
|
||||||
|
|
||||||
|
@ -152,8 +152,14 @@ impl Expander {
|
||||||
macro_name: &str,
|
macro_name: &str,
|
||||||
macro_body: &crate::tt::Subtree,
|
macro_body: &crate::tt::Subtree,
|
||||||
attributes: Option<&crate::tt::Subtree>,
|
attributes: Option<&crate::tt::Subtree>,
|
||||||
|
def_site: TokenId,
|
||||||
|
call_site: TokenId,
|
||||||
|
mixed_site: TokenId,
|
||||||
) -> Result<crate::tt::Subtree, String> {
|
) -> Result<crate::tt::Subtree, String> {
|
||||||
let result = self.inner.proc_macros.expand(macro_name, macro_body, attributes);
|
let result = self
|
||||||
|
.inner
|
||||||
|
.proc_macros
|
||||||
|
.expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site);
|
||||||
result.map_err(|e| e.as_str().unwrap_or_else(|| "<unknown error>".to_string()))
|
result.map_err(|e| e.as_str().unwrap_or_else(|| "<unknown error>".to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,6 @@
|
||||||
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
|
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
|
||||||
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
|
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
|
||||||
|
|
||||||
#![cfg(FALSE)] // TODO
|
|
||||||
#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
|
#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
|
||||||
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
|
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
|
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
|
||||||
|
@ -32,12 +31,25 @@ use std::{
|
||||||
time::SystemTime,
|
time::SystemTime,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use ::tt::Span;
|
||||||
use proc_macro_api::{
|
use proc_macro_api::{
|
||||||
msg::{self, CURRENT_API_VERSION},
|
msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION, HAS_GLOBAL_SPANS},
|
||||||
ProcMacroKind,
|
ProcMacroKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
use ::tt::token_id as tt;
|
mod tt {
|
||||||
|
pub use proc_macro_api::msg::TokenId;
|
||||||
|
|
||||||
|
pub use ::tt::*;
|
||||||
|
|
||||||
|
pub type Subtree = ::tt::Subtree<TokenId>;
|
||||||
|
pub type TokenTree = ::tt::TokenTree<TokenId>;
|
||||||
|
pub type Delimiter = ::tt::Delimiter<TokenId>;
|
||||||
|
pub type Leaf = ::tt::Leaf<TokenId>;
|
||||||
|
pub type Literal = ::tt::Literal<TokenId>;
|
||||||
|
pub type Punct = ::tt::Punct<TokenId>;
|
||||||
|
pub type Ident = ::tt::Ident<TokenId>;
|
||||||
|
}
|
||||||
|
|
||||||
// see `build.rs`
|
// see `build.rs`
|
||||||
include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
|
include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
|
||||||
|
@ -71,16 +83,28 @@ impl ProcMacroSrv {
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let macro_body = task.macro_body.to_subtree(CURRENT_API_VERSION);
|
let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans;
|
||||||
let attributes = task.attributes.map(|it| it.to_subtree(CURRENT_API_VERSION));
|
let def_site = TokenId(def_site as u32);
|
||||||
|
let call_site = TokenId(call_site as u32);
|
||||||
|
let mixed_site = TokenId(mixed_site as u32);
|
||||||
|
|
||||||
|
let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION);
|
||||||
|
let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION));
|
||||||
let result = thread::scope(|s| {
|
let result = thread::scope(|s| {
|
||||||
let thread = thread::Builder::new()
|
let thread = thread::Builder::new()
|
||||||
.stack_size(EXPANDER_STACK_SIZE)
|
.stack_size(EXPANDER_STACK_SIZE)
|
||||||
.name(task.macro_name.clone())
|
.name(task.macro_name.clone())
|
||||||
.spawn_scoped(s, || {
|
.spawn_scoped(s, || {
|
||||||
expander
|
expander
|
||||||
.expand(&task.macro_name, ¯o_body, attributes.as_ref())
|
.expand(
|
||||||
.map(|it| msg::FlatTree::new(&it, CURRENT_API_VERSION))
|
&task.macro_name,
|
||||||
|
¯o_body,
|
||||||
|
attributes.as_ref(),
|
||||||
|
def_site,
|
||||||
|
call_site,
|
||||||
|
mixed_site,
|
||||||
|
)
|
||||||
|
.map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION))
|
||||||
});
|
});
|
||||||
let res = match thread {
|
let res = match thread {
|
||||||
Ok(handle) => handle.join(),
|
Ok(handle) => handle.join(),
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! Proc macro ABI
|
//! Proc macro ABI
|
||||||
|
|
||||||
use libloading::Library;
|
use libloading::Library;
|
||||||
use proc_macro_api::{ProcMacroKind, RustCInfo};
|
use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo};
|
||||||
|
|
||||||
use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt};
|
use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt};
|
||||||
|
|
||||||
|
@ -45,6 +45,9 @@ impl ProcMacros {
|
||||||
macro_name: &str,
|
macro_name: &str,
|
||||||
macro_body: &tt::Subtree,
|
macro_body: &tt::Subtree,
|
||||||
attributes: Option<&tt::Subtree>,
|
attributes: Option<&tt::Subtree>,
|
||||||
|
def_site: TokenId,
|
||||||
|
call_site: TokenId,
|
||||||
|
mixed_site: TokenId,
|
||||||
) -> Result<tt::Subtree, crate::PanicMessage> {
|
) -> Result<tt::Subtree, crate::PanicMessage> {
|
||||||
let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone());
|
let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone());
|
||||||
|
|
||||||
|
@ -59,34 +62,56 @@ impl ProcMacros {
|
||||||
} if *trait_name == macro_name => {
|
} if *trait_name == macro_name => {
|
||||||
let res = client.run(
|
let res = client.run(
|
||||||
&proc_macro::bridge::server::SameThread,
|
&proc_macro::bridge::server::SameThread,
|
||||||
crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
|
crate::server::RustAnalyzer {
|
||||||
|
interner: &SYMBOL_INTERNER,
|
||||||
|
call_site,
|
||||||
|
def_site,
|
||||||
|
mixed_site,
|
||||||
|
},
|
||||||
parsed_body,
|
parsed_body,
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
|
return res
|
||||||
|
.map(|it| it.into_subtree(call_site))
|
||||||
|
.map_err(crate::PanicMessage::from);
|
||||||
}
|
}
|
||||||
proc_macro::bridge::client::ProcMacro::Bang { name, client }
|
proc_macro::bridge::client::ProcMacro::Bang { name, client }
|
||||||
if *name == macro_name =>
|
if *name == macro_name =>
|
||||||
{
|
{
|
||||||
let res = client.run(
|
let res = client.run(
|
||||||
&proc_macro::bridge::server::SameThread,
|
&proc_macro::bridge::server::SameThread,
|
||||||
crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
|
crate::server::RustAnalyzer {
|
||||||
|
interner: &SYMBOL_INTERNER,
|
||||||
|
call_site,
|
||||||
|
def_site,
|
||||||
|
mixed_site,
|
||||||
|
},
|
||||||
parsed_body,
|
parsed_body,
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
|
return res
|
||||||
|
.map(|it| it.into_subtree(call_site))
|
||||||
|
.map_err(crate::PanicMessage::from);
|
||||||
}
|
}
|
||||||
proc_macro::bridge::client::ProcMacro::Attr { name, client }
|
proc_macro::bridge::client::ProcMacro::Attr { name, client }
|
||||||
if *name == macro_name =>
|
if *name == macro_name =>
|
||||||
{
|
{
|
||||||
let res = client.run(
|
let res = client.run(
|
||||||
&proc_macro::bridge::server::SameThread,
|
&proc_macro::bridge::server::SameThread,
|
||||||
crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
|
crate::server::RustAnalyzer {
|
||||||
|
interner: &SYMBOL_INTERNER,
|
||||||
|
|
||||||
|
call_site,
|
||||||
|
def_site,
|
||||||
|
mixed_site,
|
||||||
|
},
|
||||||
parsed_attributes,
|
parsed_attributes,
|
||||||
parsed_body,
|
parsed_body,
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
|
return res
|
||||||
|
.map(|it| it.into_subtree(call_site))
|
||||||
|
.map_err(crate::PanicMessage::from);
|
||||||
}
|
}
|
||||||
_ => continue,
|
_ => continue,
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
use proc_macro::bridge::{self, server};
|
use proc_macro::bridge::{self, server};
|
||||||
|
|
||||||
mod token_stream;
|
mod token_stream;
|
||||||
|
use proc_macro_api::msg::TokenId;
|
||||||
pub use token_stream::TokenStream;
|
pub use token_stream::TokenStream;
|
||||||
use token_stream::TokenStreamBuilder;
|
use token_stream::TokenStreamBuilder;
|
||||||
|
|
||||||
|
@ -43,6 +44,9 @@ pub struct FreeFunctions;
|
||||||
pub struct RustAnalyzer {
|
pub struct RustAnalyzer {
|
||||||
// FIXME: store span information here.
|
// FIXME: store span information here.
|
||||||
pub(crate) interner: SymbolInternerRef,
|
pub(crate) interner: SymbolInternerRef,
|
||||||
|
pub call_site: TokenId,
|
||||||
|
pub def_site: TokenId,
|
||||||
|
pub mixed_site: TokenId,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl server::Types for RustAnalyzer {
|
impl server::Types for RustAnalyzer {
|
||||||
|
@ -69,7 +73,7 @@ impl server::FreeFunctions for RustAnalyzer {
|
||||||
kind: bridge::LitKind::Err,
|
kind: bridge::LitKind::Err,
|
||||||
symbol: Symbol::intern(self.interner, s),
|
symbol: Symbol::intern(self.interner, s),
|
||||||
suffix: None,
|
suffix: None,
|
||||||
span: tt::TokenId::unspecified(),
|
span: self.call_site,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,7 +87,7 @@ impl server::TokenStream for RustAnalyzer {
|
||||||
stream.is_empty()
|
stream.is_empty()
|
||||||
}
|
}
|
||||||
fn from_str(&mut self, src: &str) -> Self::TokenStream {
|
fn from_str(&mut self, src: &str) -> Self::TokenStream {
|
||||||
src.parse().expect("cannot parse string")
|
Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string")
|
||||||
}
|
}
|
||||||
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
|
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
|
||||||
stream.to_string()
|
stream.to_string()
|
||||||
|
@ -280,7 +284,7 @@ impl server::Span for RustAnalyzer {
|
||||||
}
|
}
|
||||||
fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
|
fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
|
||||||
// FIXME stub
|
// FIXME stub
|
||||||
tt::TokenId::unspecified()
|
self.call_site
|
||||||
}
|
}
|
||||||
/// Recent feature, not yet in the proc_macro
|
/// Recent feature, not yet in the proc_macro
|
||||||
///
|
///
|
||||||
|
@ -317,15 +321,15 @@ impl server::Span for RustAnalyzer {
|
||||||
}
|
}
|
||||||
fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
|
fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
|
||||||
// FIXME handle span
|
// FIXME handle span
|
||||||
tt::TokenId::unspecified()
|
self.call_site
|
||||||
}
|
}
|
||||||
|
|
||||||
fn end(&mut self, _self_: Self::Span) -> Self::Span {
|
fn end(&mut self, _self_: Self::Span) -> Self::Span {
|
||||||
tt::TokenId::unspecified()
|
self.call_site
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start(&mut self, _self_: Self::Span) -> Self::Span {
|
fn start(&mut self, _self_: Self::Span) -> Self::Span {
|
||||||
tt::TokenId::unspecified()
|
self.call_site
|
||||||
}
|
}
|
||||||
|
|
||||||
fn line(&mut self, _span: Self::Span) -> usize {
|
fn line(&mut self, _span: Self::Span) -> usize {
|
||||||
|
@ -349,9 +353,9 @@ impl server::Symbol for RustAnalyzer {
|
||||||
impl server::Server for RustAnalyzer {
|
impl server::Server for RustAnalyzer {
|
||||||
fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
|
fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
|
||||||
bridge::ExpnGlobals {
|
bridge::ExpnGlobals {
|
||||||
def_site: Span::unspecified(),
|
def_site: self.def_site,
|
||||||
call_site: Span::unspecified(),
|
call_site: self.call_site,
|
||||||
mixed_site: Span::unspecified(),
|
mixed_site: self.mixed_site,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -422,6 +426,8 @@ impl LiteralFormatter {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use ::tt::Span;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -430,16 +436,16 @@ mod tests {
|
||||||
token_trees: vec![
|
token_trees: vec![
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
text: "struct".into(),
|
text: "struct".into(),
|
||||||
span: tt::TokenId::unspecified(),
|
span: tt::TokenId::DUMMY,
|
||||||
})),
|
})),
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
text: "T".into(),
|
text: "T".into(),
|
||||||
span: tt::TokenId::unspecified(),
|
span: tt::TokenId::DUMMY,
|
||||||
})),
|
})),
|
||||||
tt::TokenTree::Subtree(tt::Subtree {
|
tt::TokenTree::Subtree(tt::Subtree {
|
||||||
delimiter: tt::Delimiter {
|
delimiter: tt::Delimiter {
|
||||||
open: tt::TokenId::unspecified(),
|
open: tt::TokenId::DUMMY,
|
||||||
close: tt::TokenId::unspecified(),
|
close: tt::TokenId::DUMMY,
|
||||||
kind: tt::DelimiterKind::Brace,
|
kind: tt::DelimiterKind::Brace,
|
||||||
},
|
},
|
||||||
token_trees: vec![],
|
token_trees: vec![],
|
||||||
|
@ -452,33 +458,32 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_ra_server_from_str() {
|
fn test_ra_server_from_str() {
|
||||||
use std::str::FromStr;
|
|
||||||
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
|
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
|
||||||
delimiter: tt::Delimiter {
|
delimiter: tt::Delimiter {
|
||||||
open: tt::TokenId::unspecified(),
|
open: tt::TokenId::DUMMY,
|
||||||
close: tt::TokenId::unspecified(),
|
close: tt::TokenId::DUMMY,
|
||||||
kind: tt::DelimiterKind::Parenthesis,
|
kind: tt::DelimiterKind::Parenthesis,
|
||||||
},
|
},
|
||||||
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
text: "a".into(),
|
text: "a".into(),
|
||||||
span: tt::TokenId::unspecified(),
|
span: tt::TokenId::DUMMY,
|
||||||
}))],
|
}))],
|
||||||
});
|
});
|
||||||
|
|
||||||
let t1 = TokenStream::from_str("(a)").unwrap();
|
let t1 = TokenStream::from_str("(a)", tt::TokenId::DUMMY).unwrap();
|
||||||
assert_eq!(t1.token_trees.len(), 1);
|
assert_eq!(t1.token_trees.len(), 1);
|
||||||
assert_eq!(t1.token_trees[0], subtree_paren_a);
|
assert_eq!(t1.token_trees[0], subtree_paren_a);
|
||||||
|
|
||||||
let t2 = TokenStream::from_str("(a);").unwrap();
|
let t2 = TokenStream::from_str("(a);", tt::TokenId::DUMMY).unwrap();
|
||||||
assert_eq!(t2.token_trees.len(), 2);
|
assert_eq!(t2.token_trees.len(), 2);
|
||||||
assert_eq!(t2.token_trees[0], subtree_paren_a);
|
assert_eq!(t2.token_trees[0], subtree_paren_a);
|
||||||
|
|
||||||
let underscore = TokenStream::from_str("_").unwrap();
|
let underscore = TokenStream::from_str("_", tt::TokenId::DUMMY).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
underscore.token_trees[0],
|
underscore.token_trees[0],
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
||||||
text: "_".into(),
|
text: "_".into(),
|
||||||
span: tt::TokenId::unspecified(),
|
span: tt::TokenId::DUMMY,
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
//! TokenStream implementation used by sysroot ABI
|
//! TokenStream implementation used by sysroot ABI
|
||||||
|
|
||||||
|
use proc_macro_api::msg::TokenId;
|
||||||
|
|
||||||
use crate::tt::{self, TokenTree};
|
use crate::tt::{self, TokenTree};
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone)]
|
#[derive(Debug, Default, Clone)]
|
||||||
|
@ -20,8 +22,15 @@ impl TokenStream {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn into_subtree(self) -> tt::Subtree {
|
pub(crate) fn into_subtree(self, call_site: TokenId) -> tt::Subtree {
|
||||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees }
|
tt::Subtree {
|
||||||
|
delimiter: tt::Delimiter {
|
||||||
|
open: call_site,
|
||||||
|
close: call_site,
|
||||||
|
kind: tt::DelimiterKind::Invisible,
|
||||||
|
},
|
||||||
|
token_trees: self.token_trees,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn is_empty(&self) -> bool {
|
pub(super) fn is_empty(&self) -> bool {
|
||||||
|
@ -84,7 +93,7 @@ pub(super) struct TokenStreamBuilder {
|
||||||
|
|
||||||
/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
|
/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
|
||||||
pub(super) mod token_stream {
|
pub(super) mod token_stream {
|
||||||
use std::str::FromStr;
|
use proc_macro_api::msg::TokenId;
|
||||||
|
|
||||||
use super::{tt, TokenStream, TokenTree};
|
use super::{tt, TokenStream, TokenTree};
|
||||||
|
|
||||||
|
@ -109,14 +118,15 @@ pub(super) mod token_stream {
|
||||||
///
|
///
|
||||||
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
|
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
|
||||||
/// change these errors into `LexError`s later.
|
/// change these errors into `LexError`s later.
|
||||||
impl FromStr for TokenStream {
|
#[rustfmt::skip]
|
||||||
type Err = LexError;
|
impl /*FromStr for*/ TokenStream {
|
||||||
|
// type Err = LexError;
|
||||||
|
|
||||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
pub(crate) fn from_str(src: &str, call_site: TokenId) -> Result<TokenStream, LexError> {
|
||||||
let (subtree, _token_map) =
|
let subtree =
|
||||||
mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
|
mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?;
|
||||||
|
|
||||||
let subtree = subtree_replace_token_ids_with_unspecified(subtree);
|
let subtree = subtree_replace_token_ids_with_call_site(subtree,call_site);
|
||||||
Ok(TokenStream::with_subtree(subtree))
|
Ok(TokenStream::with_subtree(subtree))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -127,43 +137,39 @@ pub(super) mod token_stream {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
|
fn subtree_replace_token_ids_with_call_site(
|
||||||
|
subtree: tt::Subtree,
|
||||||
|
call_site: TokenId,
|
||||||
|
) -> tt::Subtree {
|
||||||
tt::Subtree {
|
tt::Subtree {
|
||||||
delimiter: tt::Delimiter {
|
delimiter: tt::Delimiter { open: call_site, close: call_site, ..subtree.delimiter },
|
||||||
open: tt::TokenId::UNSPECIFIED,
|
|
||||||
close: tt::TokenId::UNSPECIFIED,
|
|
||||||
..subtree.delimiter
|
|
||||||
},
|
|
||||||
token_trees: subtree
|
token_trees: subtree
|
||||||
.token_trees
|
.token_trees
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(token_tree_replace_token_ids_with_unspecified)
|
.map(|it| token_tree_replace_token_ids_with_call_site(it, call_site))
|
||||||
.collect(),
|
.collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
|
fn token_tree_replace_token_ids_with_call_site(
|
||||||
|
tt: tt::TokenTree,
|
||||||
|
call_site: TokenId,
|
||||||
|
) -> tt::TokenTree {
|
||||||
match tt {
|
match tt {
|
||||||
tt::TokenTree::Leaf(leaf) => {
|
tt::TokenTree::Leaf(leaf) => {
|
||||||
tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
|
tt::TokenTree::Leaf(leaf_replace_token_ids_with_call_site(leaf, call_site))
|
||||||
}
|
}
|
||||||
tt::TokenTree::Subtree(subtree) => {
|
tt::TokenTree::Subtree(subtree) => {
|
||||||
tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
|
tt::TokenTree::Subtree(subtree_replace_token_ids_with_call_site(subtree, call_site))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
|
fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> tt::Leaf {
|
||||||
match leaf {
|
match leaf {
|
||||||
tt::Leaf::Literal(lit) => {
|
tt::Leaf::Literal(lit) => tt::Leaf::Literal(tt::Literal { span: call_site, ..lit }),
|
||||||
tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit })
|
tt::Leaf::Punct(punct) => tt::Leaf::Punct(tt::Punct { span: call_site, ..punct }),
|
||||||
}
|
tt::Leaf::Ident(ident) => tt::Leaf::Ident(tt::Ident { span: call_site, ..ident }),
|
||||||
tt::Leaf::Punct(punct) => {
|
|
||||||
tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct })
|
|
||||||
}
|
|
||||||
tt::Leaf::Ident(ident) => {
|
|
||||||
tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident })
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,18 +1,19 @@
|
||||||
//! utils used in proc-macro tests
|
//! utils used in proc-macro tests
|
||||||
|
|
||||||
use expect_test::Expect;
|
use expect_test::Expect;
|
||||||
use std::str::FromStr;
|
use proc_macro_api::msg::TokenId;
|
||||||
|
use tt::Span;
|
||||||
|
|
||||||
use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv};
|
use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv};
|
||||||
|
|
||||||
fn parse_string(code: &str) -> Option<crate::server::TokenStream> {
|
fn parse_string(code: &str, call_site: TokenId) -> Option<crate::server::TokenStream> {
|
||||||
// This is a bit strange. We need to parse a string into a token stream into
|
// This is a bit strange. We need to parse a string into a token stream into
|
||||||
// order to create a tt::SubTree from it in fixtures. `into_subtree` is
|
// order to create a tt::SubTree from it in fixtures. `into_subtree` is
|
||||||
// implemented by all the ABIs we have so we arbitrarily choose one ABI to
|
// implemented by all the ABIs we have so we arbitrarily choose one ABI to
|
||||||
// write a `parse_string` function for and use that. The tests don't really
|
// write a `parse_string` function for and use that. The tests don't really
|
||||||
// care which ABI we're using as the `into_subtree` function isn't part of
|
// care which ABI we're using as the `into_subtree` function isn't part of
|
||||||
// the ABI and shouldn't change between ABI versions.
|
// the ABI and shouldn't change between ABI versions.
|
||||||
crate::server::TokenStream::from_str(code).ok()
|
crate::server::TokenStream::from_str(code, call_site).ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) {
|
pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) {
|
||||||
|
@ -24,12 +25,22 @@ pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, e
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) {
|
fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) {
|
||||||
|
let call_site = TokenId::DUMMY;
|
||||||
let path = proc_macro_test_dylib_path();
|
let path = proc_macro_test_dylib_path();
|
||||||
let expander = dylib::Expander::new(&path).unwrap();
|
let expander = dylib::Expander::new(&path).unwrap();
|
||||||
let fixture = parse_string(input).unwrap();
|
let fixture = parse_string(input, call_site).unwrap();
|
||||||
let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree());
|
let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site));
|
||||||
|
|
||||||
let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap();
|
let res = expander
|
||||||
|
.expand(
|
||||||
|
macro_name,
|
||||||
|
&fixture.into_subtree(call_site),
|
||||||
|
attr.as_ref(),
|
||||||
|
TokenId::DUMMY,
|
||||||
|
TokenId::DUMMY,
|
||||||
|
TokenId::DUMMY,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
expect.assert_eq(&format!("{res:?}"));
|
expect.assert_eq(&format!("{res:?}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,7 @@ fn integrated_highlighting_benchmark() {
|
||||||
analysis.highlight_as_html(file_id, false).unwrap();
|
analysis.highlight_as_html(file_id, false).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
profile::init_from("*>1");
|
profile::init_from("*>100");
|
||||||
|
|
||||||
{
|
{
|
||||||
let _it = stdx::timeit("change");
|
let _it = stdx::timeit("change");
|
||||||
|
|
Loading…
Reference in a new issue