Proper span representation with syntax context

This commit is contained in:
Lukas Wirth 2023-10-06 14:47:11 +02:00
parent 890eb17b4e
commit e36b3f7b8c
16 changed files with 414 additions and 470 deletions

View file

@ -1,6 +1,7 @@
use std::fmt;
use salsa::InternId;
use tt::SyntaxContext;
use vfs::FileId;
pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
@ -9,10 +10,17 @@ pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct SyntaxContext;
pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
pub type SpanData = tt::SpanData<SpanAnchor>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct SyntaxContextId(InternId);
crate::impl_intern_key!(SyntaxContextId);
impl SyntaxContext for SyntaxContextId {
// FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context
// currently (which kind of makes sense but we need it here!)
const DUMMY: Self = SyntaxContextId(unsafe { core::mem::transmute(1) });
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct SpanAnchor {
@ -26,7 +34,7 @@ impl fmt::Debug for SpanAnchor {
}
}
impl tt::Span for SpanAnchor {
impl tt::SpanAnchor for SpanAnchor {
const DUMMY: Self = SpanAnchor { file_id: HirFileId(0), ast_id: ROOT_ERASED_FILE_AST_ID };
}

View file

@ -2,20 +2,30 @@ use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect};
use mbe::syntax_node_to_token_tree;
use syntax::{ast, AstNode};
use tt::Span;
use tt::{SpanAnchor, SyntaxContext};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
struct DummyFile;
impl Span for DummyFile {
impl SpanAnchor for DummyFile {
const DUMMY: Self = DummyFile;
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
struct DummyCtx;
impl SyntaxContext for DummyCtx {
const DUMMY: Self = DummyCtx;
}
fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
let tt = syntax_node_to_token_tree::<_, DummyCtx>(
tt.syntax(),
DummyFile,
0.into(),
&Default::default(),
);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
}
@ -23,7 +33,12 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
let tt = syntax_node_to_token_tree::<_, DummyCtx>(
tt.syntax(),
DummyFile,
0.into(),
&Default::default(),
);
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual);
@ -32,7 +47,12 @@ fn check_dnf(input: &str, expect: Expect) {
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
let tt = syntax_node_to_token_tree::<_, DummyCtx>(
tt.syntax(),
DummyFile,
0.into(),
&Default::default(),
);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@ -43,7 +63,12 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
let tt = syntax_node_to_token_tree::<_, DummyCtx>(
tt.syntax(),
DummyFile,
0.into(),
&Default::default(),
);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();

View file

@ -4,15 +4,25 @@
use base_db::span::SpanAnchor;
use mbe::syntax_node_to_token_tree;
use syntax::{ast, AstNode};
use tt::Span;
use tt::{SpanAnchor as _, SyntaxContext};
use crate::attr::{DocAtom, DocExpr};
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
struct DummyCtx;
impl SyntaxContext for DummyCtx {
const DUMMY: Self = DummyCtx;
}
fn assert_parse_result(input: &str, expected: DocExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt =
syntax_node_to_token_tree(tt.syntax(), SpanAnchor::DUMMY, 0.into(), &Default::default());
let tt = syntax_node_to_token_tree::<_, DummyCtx>(
tt.syntax(),
SpanAnchor::DUMMY,
0.into(),
&Default::default(),
);
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);
}

View file

@ -1,7 +1,7 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{fmt, ops};
use ::tt::Span;
use ::tt::SpanAnchor as _;
use base_db::{span::SpanAnchor, CrateId};
use cfg::CfgExpr;
use either::Either;

View file

@ -1,5 +1,6 @@
//! Builtin macro
use ::tt::Span;
use base_db::{
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
AnchoredPath, Edition, FileId,
@ -15,7 +16,7 @@ use syntax::{
use crate::{
db::ExpandDatabase,
name, quote,
tt::{self, Span},
tt::{self},
EagerCallInfo, ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
};

View file

@ -1,8 +1,9 @@
//! Defines database & queries for macro expansion.
use ::tt::SyntaxContext;
use base_db::{
salsa,
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
CrateId, Edition, SourceDatabase,
};
use either::Either;
@ -15,11 +16,13 @@ use syntax::{
use triomphe::Arc;
use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind, MacroFile, ProcMacroExpander, SpanMap, SyntaxContext, SyntaxContextId,
ast_id_map::AstIdMap,
builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander,
hygiene::{self, HygieneFrame, SyntaxContextData},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, SpanMap,
};
/// Total limit on the number of tokens produced by any macro invocation.
@ -89,7 +92,15 @@ pub trait ExpandDatabase: SourceDatabase {
#[salsa::interned]
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
#[salsa::interned]
fn intern_syntax_context(&self, ctx: SyntaxContext) -> SyntaxContextId;
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
#[salsa::transparent]
#[salsa::invoke(hygiene::apply_mark)]
fn apply_mark(
&self,
ctxt: SyntaxContextData,
file_id: HirFileId,
transparency: hygiene::Transparency,
) -> SyntaxContextId;
/// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned
@ -225,6 +236,7 @@ pub fn expand_speculative(
.ranges_with_span(tt::SpanData {
range: token_to_map.text_range(),
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::DUMMY,
})
.filter_map(|range| syntax_node.covering_element(range).into_token())
.min_by_key(|t| {

View file

@ -2,71 +2,92 @@
//!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`.
use base_db::CrateId;
use db::TokenExpander;
use base_db::{span::SyntaxContextId, CrateId};
use either::Either;
use syntax::{
ast::{self, HasDocComments},
AstNode, SyntaxNode, TextRange, TextSize,
ast::{self},
TextRange,
};
use triomphe::Arc;
use crate::{
db::{self, ExpandDatabase},
db::ExpandDatabase,
name::{AsName, Name},
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, SpanMap,
HirFileId, InFile,
};
#[derive(Clone, Debug)]
pub struct Hygiene {
frames: Option<HygieneFrames>,
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct SyntaxContextData {
// FIXME: This might only need to be Option<MacroCallId>?
outer_expn: HirFileId,
outer_transparency: Transparency,
parent: SyntaxContextId,
/// This context, but with all transparent and semi-transparent expansions filtered away.
opaque: SyntaxContextId,
/// This context, but with all transparent expansions filtered away.
opaque_and_semitransparent: SyntaxContextId,
/// Name of the crate to which `$crate` with this context would resolve.
dollar_crate_name: Name,
}
/// A property of a macro expansion that determines how identifiers
/// produced by that expansion are resolved.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
pub enum Transparency {
/// Identifier produced by a transparent expansion is always resolved at call-site.
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
Transparent,
/// Identifier produced by a semi-transparent expansion may be resolved
/// either at call-site or at definition-site.
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
/// Otherwise it's resolved at call-site.
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
/// but that's an implementation detail.
SemiTransparent,
/// Identifier produced by an opaque expansion is always resolved at definition-site.
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
Opaque,
}
pub(super) fn apply_mark(
_db: &dyn ExpandDatabase,
_ctxt: SyntaxContextData,
_file_id: HirFileId,
_transparency: Transparency,
) -> SyntaxContextId {
_db.intern_syntax_context(_ctxt)
}
// pub(super) fn with_ctxt_from_mark(db: &ExpandDatabase, file_id: HirFileId) {
// self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
// }
// pub(super) fn with_call_site_ctxt(db: &ExpandDatabase, file_id: HirFileId) {
// self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
// }
#[derive(Clone, Debug)]
pub struct Hygiene {}
impl Hygiene {
pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene {
Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
pub fn new(_: &dyn ExpandDatabase, _: HirFileId) -> Hygiene {
Hygiene {}
}
pub fn new_unhygienic() -> Hygiene {
Hygiene { frames: None }
Hygiene {}
}
// FIXME: this should just return name
pub fn name_ref_to_name(
&self,
db: &dyn ExpandDatabase,
_: &dyn ExpandDatabase,
name_ref: ast::NameRef,
) -> Either<Name, CrateId> {
if let Some(frames) = &self.frames {
if name_ref.text() == "$crate" {
if let Some(krate) = frames.root_crate(db, name_ref.syntax()) {
return Either::Right(krate);
}
}
}
Either::Left(name_ref.as_name())
}
pub fn local_inner_macros(&self, _db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
let mut _token = path.syntax().first_token()?.text_range();
let frames = self.frames.as_ref()?;
let mut _current = &frames.0;
// FIXME: Hygiene ...
return None;
// loop {
// let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
// if origin == Origin::Def {
// return if current.local_inner {
// frames.root_crate(db, path.syntax())
// } else {
// None
// };
// }
// current = current.call_site.as_ref()?;
// token = mapped.value;
// }
pub fn local_inner_macros(&self, _: &dyn ExpandDatabase, _: ast::Path) -> Option<CrateId> {
None
}
}
@ -74,150 +95,19 @@ impl Hygiene {
struct HygieneFrames(Arc<HygieneFrame>);
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct HygieneFrame {
expansion: Option<HygieneInfo>,
// Indicate this is a local inner macro
local_inner: bool,
krate: Option<CrateId>,
call_site: Option<Arc<HygieneFrame>>,
def_site: Option<Arc<HygieneFrame>>,
}
impl HygieneFrames {
fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
// Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
// usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
}
fn root_crate(&self, _db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
let mut _token = node.first_token()?.text_range();
let mut _result = self.0.krate;
let mut _current = self.0.clone();
return None;
// while let Some((mapped, origin)) =
// current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
// {
// result = current.krate;
// let site = match origin {
// Origin::Def => &current.def_site,
// Origin::Call => &current.call_site,
// };
// let site = match site {
// None => break,
// Some(it) => it,
// };
// current = site.clone();
// token = mapped.value;
// }
// result
}
}
pub struct HygieneFrame {}
#[derive(Debug, Clone, PartialEq, Eq)]
struct HygieneInfo {
file: MacroFile,
/// The start offset of the `macro_rules!` arguments or attribute input.
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
macro_def: TokenExpander,
macro_arg: Arc<crate::tt::Subtree>,
exp_map: Arc<SpanMap>,
}
struct HygieneInfo {}
impl HygieneInfo {
fn _map_ident_up(
&self,
_db: &dyn ExpandDatabase,
_token: TextRange,
) -> Option<InFile<TextRange>> {
// self.exp_map.token_by_range(token).map(|span| InFile::new(span.anchor, span.range))
fn _map_ident_up(&self, _: &dyn ExpandDatabase, _: TextRange) -> Option<InFile<TextRange>> {
None
}
}
fn make_hygiene_info(
db: &dyn ExpandDatabase,
macro_file: MacroFile,
loc: &MacroCallLoc,
) -> HygieneInfo {
let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) {
ast::Macro::MacroRules(mac) => mac.token_tree()?,
ast::Macro::MacroDef(mac) => mac.body()?,
};
Some(InFile::new(id.file_id, def_tt))
});
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let tt = ast_id
.to_node(db)
.doc_comments_and_attrs()
.nth(invoc_attr_index.ast_index())
.and_then(Either::left)?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
let macro_def = db.macro_expander(loc.def);
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() })
});
HygieneInfo {
file: macro_file,
attr_input_or_mac_def_start: attr_input_or_mac_def
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
macro_arg,
macro_def,
exp_map,
}
}
impl HygieneFrame {
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
let (info, krate, local_inner) = match file_id.macro_file() {
None => (None, None, false),
Some(macro_file) => {
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
match loc.def.kind {
MacroDefKind::Declarative(_) => {
(info, Some(loc.def.krate), loc.def.local_inner)
}
MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
MacroDefKind::BuiltInAttr(..) => (info, None, false),
MacroDefKind::BuiltInDerive(..) => (info, None, false),
MacroDefKind::BuiltInEager(..) => (info, None, false),
MacroDefKind::ProcMacro(..) => (info, None, false),
}
}
};
let Some((info, calling_file)) = info else {
return HygieneFrame {
expansion: None,
local_inner,
krate,
call_site: None,
def_site: None,
};
};
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
let call_site = Some(db.hygiene_frame(calling_file));
HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
pub(crate) fn new(_: &dyn ExpandDatabase, _: HirFileId) -> HygieneFrame {
HygieneFrame {}
}
}

View file

@ -51,7 +51,7 @@ pub type DeclarativeMacro = ::mbe::DeclarativeMacro<tt::SpanData>;
pub mod tt {
pub use base_db::span::SpanData;
pub use tt::{DelimiterKind, Spacing, Span};
pub use tt::{DelimiterKind, Spacing, Span, SpanAnchor};
pub type Delimiter = ::tt::Delimiter<SpanData>;
pub type Subtree = ::tt::Subtree<SpanData>;
@ -97,44 +97,6 @@ impl fmt::Display for ExpandError {
}
}
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct SyntaxContextId(base_db::salsa::InternId);
base_db::impl_intern_key!(SyntaxContextId);
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct SyntaxContext {
outer_expn: HirFileId,
outer_transparency: Transparency,
parent: SyntaxContextId,
/// This context, but with all transparent and semi-transparent expansions filtered away.
opaque: SyntaxContextId,
/// This context, but with all transparent expansions filtered away.
opaque_and_semitransparent: SyntaxContextId,
/// Name of the crate to which `$crate` with this context would resolve.
dollar_crate_name: name::Name,
}
/// A property of a macro expansion that determines how identifiers
/// produced by that expansion are resolved.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
pub enum Transparency {
/// Identifier produced by a transparent expansion is always resolved at call-site.
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
Transparent,
/// Identifier produced by a semi-transparent expansion may be resolved
/// either at call-site or at definition-site.
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
/// Otherwise it's resolved at call-site.
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
/// but that's an implementation detail.
SemiTransparent,
/// Identifier produced by an opaque expansion is always resolved at definition-site.
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
Opaque,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroCallLoc {
pub def: MacroDefId,

View file

@ -247,8 +247,8 @@ mod tests {
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:?}");
expect![[r#"
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } } SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } }
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } }"#]].assert_eq(&t);
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) }
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
}
#[test]

View file

@ -6,19 +6,27 @@ use syntax::{
AstNode, SmolStr,
};
use test_utils::{bench, bench_fixture, skip_slow_tests};
use tt::{Span, SpanData};
use tt::{Span, SpanAnchor, SyntaxContext};
use crate::{
parser::{MetaVarKind, Op, RepeatKind, Separator},
syntax_node_to_token_tree, DeclarativeMacro,
};
type SpanData = tt::SpanData<DummyFile, DummyCtx>;
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
struct DummyFile;
impl Span for DummyFile {
impl SpanAnchor for DummyFile {
const DUMMY: Self = DummyFile;
}
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
struct DummyCtx;
impl SyntaxContext for DummyCtx {
const DUMMY: Self = DummyCtx;
}
#[test]
fn benchmark_parse_macro_rules() {
if skip_slow_tests() {
@ -54,14 +62,14 @@ fn benchmark_expand_macro_rules() {
assert_eq!(hash, 69413);
}
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<SpanData<DummyFile>>> {
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<SpanData>> {
macro_rules_fixtures_tt()
.into_iter()
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.collect()
}
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData<DummyFile>>> {
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData>> {
let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
@ -84,8 +92,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData<DummyFile
/// Generate random invocation fixtures from rules
fn invocation_fixtures(
rules: &FxHashMap<String, DeclarativeMacro<SpanData<DummyFile>>>,
) -> Vec<(String, tt::Subtree<SpanData<DummyFile>>)> {
rules: &FxHashMap<String, DeclarativeMacro<SpanData>>,
) -> Vec<(String, tt::Subtree<SpanData>)> {
let mut seed = 123456789;
let mut res = Vec::new();
@ -130,11 +138,7 @@ fn invocation_fixtures(
}
return res;
fn collect_from_op(
op: &Op<SpanData<DummyFile>>,
parent: &mut tt::Subtree<SpanData<DummyFile>>,
seed: &mut usize,
) {
fn collect_from_op(op: &Op<SpanData>, parent: &mut tt::Subtree<SpanData>, seed: &mut usize) {
return match op {
Op::Var { kind, .. } => match kind.as_ref() {
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
@ -220,20 +224,20 @@ fn invocation_fixtures(
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
*seed
}
fn make_ident(ident: &str) -> tt::TokenTree<SpanData<DummyFile>> {
fn make_ident(ident: &str) -> tt::TokenTree<SpanData> {
tt::Leaf::Ident(tt::Ident { span: SpanData::DUMMY, text: SmolStr::new(ident) }).into()
}
fn make_punct(char: char) -> tt::TokenTree<SpanData<DummyFile>> {
fn make_punct(char: char) -> tt::TokenTree<SpanData> {
tt::Leaf::Punct(tt::Punct { span: SpanData::DUMMY, char, spacing: tt::Spacing::Alone })
.into()
}
fn make_literal(lit: &str) -> tt::TokenTree<SpanData<DummyFile>> {
fn make_literal(lit: &str) -> tt::TokenTree<SpanData> {
tt::Leaf::Literal(tt::Literal { span: SpanData::DUMMY, text: SmolStr::new(lit) }).into()
}
fn make_subtree(
kind: tt::DelimiterKind,
token_trees: Option<Vec<tt::TokenTree<SpanData<DummyFile>>>>,
) -> tt::TokenTree<SpanData<DummyFile>> {
token_trees: Option<Vec<tt::TokenTree<SpanData>>>,
) -> tt::TokenTree<SpanData> {
tt::Subtree {
delimiter: tt::Delimiter { open: SpanData::DUMMY, close: SpanData::DUMMY, kind },
token_trees: token_trees.unwrap_or_default(),

View file

@ -9,7 +9,7 @@ use syntax::{
};
use tt::{
buffer::{Cursor, TokenBuffer},
Span, SpanData,
Span, SpanData, SyntaxContext,
};
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
@ -23,33 +23,37 @@ mod tests;
/// to relative spans, relative to the passed anchor.
/// `map` is used to resolve the converted spans accordingly.
/// TODO: Flesh out the doc comment more thoroughly
pub fn syntax_node_to_token_tree<SpanAnchor: Copy>(
pub fn syntax_node_to_token_tree<Anchor, Ctx>(
node: &SyntaxNode,
anchor: SpanAnchor,
anchor: Anchor,
anchor_offset: TextSize,
map: &TokenMap<SpanData<SpanAnchor>>,
) -> tt::Subtree<SpanData<SpanAnchor>>
map: &TokenMap<SpanData<Anchor, Ctx>>,
) -> tt::Subtree<SpanData<Anchor, Ctx>>
where
SpanData<SpanAnchor>: Span,
SpanData<Anchor, Ctx>: Span,
Anchor: Copy,
Ctx: SyntaxContext,
{
assert!(anchor_offset <= node.text_range().start());
let mut c = Converter::new(node, anchor_offset, anchor, vec![], map);
convert_tokens(&mut c)
let mut c = Converter::new(node, anchor_offset, vec![], map);
convert_tokens(&mut c, anchor)
}
pub fn syntax_node_to_token_tree_censored<SpanAnchor: Copy>(
pub fn syntax_node_to_token_tree_censored<Anchor, Ctx>(
node: &SyntaxNode,
anchor: SpanAnchor,
anchor: Anchor,
anchor_offset: TextSize,
map: &TokenMap<SpanData<SpanAnchor>>,
map: &TokenMap<SpanData<Anchor, Ctx>>,
censored: Vec<SyntaxNode>,
) -> tt::Subtree<SpanData<SpanAnchor>>
) -> tt::Subtree<SpanData<Anchor, Ctx>>
where
SpanData<SpanAnchor>: Span,
SpanData<Anchor, Ctx>: Span,
Anchor: Copy,
Ctx: SyntaxContext,
{
assert!(anchor_offset <= node.text_range().start());
let mut c = Converter::new(node, anchor_offset, anchor, censored, map);
convert_tokens(&mut c)
let mut c = Converter::new(node, anchor_offset, censored, map);
convert_tokens(&mut c, anchor)
}
// The following items are what `rustc` macro can be parsed into :
@ -64,12 +68,14 @@ where
// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
pub fn token_tree_to_syntax_node<SpanAnchor: Copy>(
tt: &tt::Subtree<SpanData<SpanAnchor>>,
pub fn token_tree_to_syntax_node<Anchor, Ctx>(
tt: &tt::Subtree<SpanData<Anchor, Ctx>>,
entry_point: parser::TopEntryPoint,
) -> (Parse<SyntaxNode>, TokenMap<SpanData<SpanAnchor>>)
) -> (Parse<SyntaxNode>, TokenMap<SpanData<Anchor, Ctx>>)
where
SpanData<SpanAnchor>: Span,
SpanData<Anchor, Ctx>: Span,
Anchor: Copy,
Ctx: SyntaxContext,
{
let buffer = match tt {
tt::Subtree {
@ -97,36 +103,42 @@ where
tree_sink.finish()
}
pub fn map_from_syntax_node<SpanAnchor>(
pub fn map_from_syntax_node<Anchor, Ctx>(
node: &SyntaxNode,
anchor: SpanAnchor,
anchor: Anchor,
anchor_offset: TextSize,
) -> TokenMap<SpanData<SpanAnchor>>
) -> TokenMap<SpanData<Anchor, Ctx>>
where
SpanAnchor: Copy,
SpanData<SpanAnchor>: Span,
Anchor: Copy,
SpanData<Anchor, Ctx>: Span,
Ctx: SyntaxContext,
{
let mut map = TokenMap::default();
node.descendants_with_tokens().filter_map(NodeOrToken::into_token).for_each(|t| {
map.insert(t.text_range(), SpanData { range: t.text_range() - anchor_offset, anchor });
map.insert(
t.text_range(),
SpanData { range: t.text_range() - anchor_offset, anchor, ctx: Ctx::DUMMY },
);
});
map
}
/// Convert a string to a `TokenTree`
pub fn parse_to_token_tree<SpanAnchor: Copy>(
pub fn parse_to_token_tree<Anchor, Ctx>(
text: &str,
file_id: SpanAnchor,
) -> Option<tt::Subtree<SpanData<SpanAnchor>>>
anchor: Anchor,
) -> Option<tt::Subtree<SpanData<Anchor, Ctx>>>
where
SpanData<SpanAnchor>: Span,
SpanData<Anchor, Ctx>: Span,
Anchor: Copy,
Ctx: SyntaxContext,
{
let lexed = parser::LexedStr::new(text);
if lexed.errors().next().is_some() {
return None;
}
let mut conv = RawConverter { lexed, pos: 0, _offset: TextSize::default(), file_id };
Some(convert_tokens(&mut conv))
let mut conv = RawConverter { lexed, pos: 0, _offset: TextSize::default() };
Some(convert_tokens(&mut conv, anchor))
}
/// Split token tree with separate expr: $($e:expr)SEP*
@ -166,51 +178,52 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::
res
}
fn convert_tokens<SpanAnchor, C: TokenConverter<SpanAnchor>>(
fn convert_tokens<Anchor, Ctx, C>(
conv: &mut C,
) -> tt::Subtree<SpanData<SpanAnchor>>
anchor: Anchor,
) -> tt::Subtree<SpanData<Anchor, Ctx>>
where
SpanData<SpanAnchor>: Span,
SpanAnchor: Copy,
C: TokenConverter<Anchor, Ctx>,
Ctx: SyntaxContext,
SpanData<Anchor, Ctx>: Span,
Anchor: Copy,
{
let entry = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] };
let entry = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] };
let mut stack = NonEmptyVec::new(entry);
let anchor = conv.anchor();
loop {
let subtree = stack.last_mut();
let result = &mut subtree.token_trees;
let Some((token, rel_range, abs_range)) = conv.bump() else { break };
while let Some((token, rel_range, abs_range)) = conv.bump() {
let tt::Subtree { delimiter, token_trees: result } = stack.last_mut();
let mk_dummy_span = || SpanData { range: rel_range, anchor, ctx: Ctx::DUMMY };
let kind = token.kind(conv);
if kind == COMMENT {
if let Some(tokens) = conv.convert_doc_comment(
&token,
conv.span_for(abs_range).unwrap_or(SpanData { range: rel_range, anchor }),
) {
let tt = match kind {
// Desugar doc comments into doc attributes
COMMENT => {
let span = conv.span_for(abs_range).unwrap_or_else(mk_dummy_span);
if let Some(tokens) = conv.convert_doc_comment(&token, span) {
result.extend(tokens);
}
continue;
}
let tt = if kind.is_punct() && kind != UNDERSCORE {
let expected = match subtree.delimiter.kind {
_ if kind.is_punct() && kind != UNDERSCORE => {
let expected = match delimiter.kind {
tt::DelimiterKind::Parenthesis => Some(T![')']),
tt::DelimiterKind::Brace => Some(T!['}']),
tt::DelimiterKind::Bracket => Some(T![']']),
tt::DelimiterKind::Invisible => None,
};
if let Some(expected) = expected {
if kind == expected {
// Current token is a closing delimiter that we expect, fix up the closing span
// and end the subtree here
if matches!(expected, Some(expected) if expected == kind) {
if let Some(mut subtree) = stack.pop() {
subtree.delimiter.close = conv
.span_for(abs_range)
.unwrap_or(SpanData { range: rel_range, anchor });
subtree.delimiter.close =
conv.span_for(abs_range).unwrap_or_else(mk_dummy_span);
stack.last_mut().token_trees.push(subtree.into());
}
continue;
}
}
let delim = match kind {
T!['('] => Some(tt::DelimiterKind::Parenthesis),
@ -219,19 +232,17 @@ where
_ => None,
};
// Start a new subtree
if let Some(kind) = delim {
let subtree = tt::Subtree {
stack.push(tt::Subtree {
delimiter: tt::Delimiter {
// FIXME: Open and close spans
open: conv
.span_for(abs_range)
.unwrap_or(SpanData { range: rel_range, anchor }),
close: Span::DUMMY,
open: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span),
// will be overwritten on subtree close above
close: mk_dummy_span(),
kind,
},
token_trees: vec![],
};
stack.push(subtree);
});
continue;
}
@ -239,25 +250,21 @@ where
Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
_ => tt::Spacing::Alone,
};
let char = match token.to_char(conv) {
Some(c) => c,
None => {
panic!("Token from lexer must be single char: token = {token:#?}");
}
let Some(char) = token.to_char(conv) else {
panic!("Token from lexer must be single char: token = {token:#?}")
};
tt::Leaf::from(tt::Punct {
char,
spacing,
span: conv.span_for(abs_range).unwrap_or(SpanData { range: rel_range, anchor }),
span: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span),
})
.into()
} else {
}
_ => {
macro_rules! make_leaf {
($i:ident) => {
tt::$i {
span: conv
.span_for(abs_range)
.unwrap_or(SpanData { range: rel_range, anchor }),
span: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span),
text: token.to_text(conv),
}
.into()
@ -276,15 +283,25 @@ where
let apostrophe = tt::Leaf::from(tt::Punct {
char: '\'',
spacing: tt::Spacing::Joint,
span: conv.span_for(abs_range).unwrap_or(SpanData { range: r, anchor }),
span: conv.span_for(abs_range).unwrap_or(SpanData {
range: r,
anchor,
ctx: Ctx::DUMMY,
}),
});
result.push(apostrophe.into());
let r =
TextRange::at(rel_range.start() + char_unit, rel_range.len() - char_unit);
let r = TextRange::at(
rel_range.start() + char_unit,
rel_range.len() - char_unit,
);
let ident = tt::Leaf::from(tt::Ident {
text: SmolStr::new(&token.to_text(conv)[1..]),
span: conv.span_for(abs_range).unwrap_or(SpanData { range: r, anchor }),
span: conv.span_for(abs_range).unwrap_or(SpanData {
range: r,
anchor,
ctx: Ctx::DUMMY,
}),
});
result.push(ident.into());
continue;
@ -293,7 +310,9 @@ where
};
leaf.into()
}
};
result.push(tt);
}
@ -417,11 +436,10 @@ fn convert_doc_comment<S: Copy>(
}
/// A raw token (straight from lexer) converter
struct RawConverter<'a, SpanAnchor> {
struct RawConverter<'a> {
lexed: parser::LexedStr<'a>,
pos: usize,
_offset: TextSize,
file_id: SpanAnchor,
}
trait SrcToken<Ctx>: std::fmt::Debug {
@ -432,48 +450,47 @@ trait SrcToken<Ctx>: std::fmt::Debug {
fn to_text(&self, ctx: &Ctx) -> SmolStr;
}
trait TokenConverter<SpanAnchor>: Sized {
trait TokenConverter<Anchor, Ctx>: Sized {
type Token: SrcToken<Self>;
fn convert_doc_comment(
&self,
token: &Self::Token,
span: SpanData<SpanAnchor>,
) -> Option<Vec<tt::TokenTree<SpanData<SpanAnchor>>>>;
span: SpanData<Anchor, Ctx>,
) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>>;
fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)>;
fn peek(&self) -> Option<Self::Token>;
fn anchor(&self) -> SpanAnchor;
fn span_for(&self, range: TextRange) -> Option<SpanData<SpanAnchor>>;
fn span_for(&self, range: TextRange) -> Option<SpanData<Anchor, Ctx>>;
}
impl<SpanAnchor> SrcToken<RawConverter<'_, SpanAnchor>> for usize {
fn kind(&self, ctx: &RawConverter<'_, SpanAnchor>) -> SyntaxKind {
impl SrcToken<RawConverter<'_>> for usize {
fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
fn to_char(&self, ctx: &RawConverter<'_, SpanAnchor>) -> Option<char> {
fn to_char(&self, ctx: &RawConverter<'_>) -> Option<char> {
ctx.lexed.text(*self).chars().next()
}
fn to_text(&self, ctx: &RawConverter<'_, SpanAnchor>) -> SmolStr {
fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr {
ctx.lexed.text(*self).into()
}
}
impl<SpanAnchor: Copy> TokenConverter<SpanAnchor> for RawConverter<'_, SpanAnchor>
impl<Anchor: Copy, Ctx> TokenConverter<Anchor, Ctx> for RawConverter<'_>
where
SpanData<SpanAnchor>: Span,
SpanData<Anchor, Ctx>: Span,
{
type Token = usize;
fn convert_doc_comment(
&self,
&token: &usize,
span: SpanData<SpanAnchor>,
) -> Option<Vec<tt::TokenTree<SpanData<SpanAnchor>>>> {
span: SpanData<Anchor, Ctx>,
) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>> {
let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text), span)
}
@ -497,34 +514,29 @@ where
Some(self.pos)
}
fn anchor(&self) -> SpanAnchor {
self.file_id
}
fn span_for(&self, _: TextRange) -> Option<SpanData<SpanAnchor>> {
fn span_for(&self, _: TextRange) -> Option<SpanData<Anchor, Ctx>> {
None
}
}
struct Converter<'a, SpanAnchor> {
struct Converter<'a, Anchor, Ctx> {
current: Option<SyntaxToken>,
preorder: PreorderWithTokens,
range: TextRange,
punct_offset: Option<(SyntaxToken, TextSize)>,
/// Used to make the emitted text ranges in the spans relative to the span anchor.
offset: TextSize,
file_id: SpanAnchor,
map: &'a TokenMap<SpanData<SpanAnchor>>,
map: &'a TokenMap<SpanData<Anchor, Ctx>>,
censored: Vec<SyntaxNode>,
}
impl<'a, SpanAnchor> Converter<'a, SpanAnchor> {
impl<'a, Anchor, Ctx> Converter<'a, Anchor, Ctx> {
fn new(
node: &SyntaxNode,
anchor_offset: TextSize,
file_id: SpanAnchor,
censored: Vec<SyntaxNode>,
map: &'a TokenMap<SpanData<SpanAnchor>>,
) -> Converter<'a, SpanAnchor> {
map: &'a TokenMap<SpanData<Anchor, Ctx>>,
) -> Self {
let range = node.text_range();
let mut preorder = node.preorder_with_tokens();
let first = Self::next_token(&mut preorder, &censored);
@ -534,7 +546,6 @@ impl<'a, SpanAnchor> Converter<'a, SpanAnchor> {
range,
punct_offset: None,
offset: anchor_offset,
file_id,
censored,
map,
}
@ -569,36 +580,36 @@ impl SynToken {
}
}
impl<SpanAnchor> SrcToken<Converter<'_, SpanAnchor>> for SynToken {
fn kind(&self, ctx: &Converter<'_, SpanAnchor>) -> SyntaxKind {
impl<Anchor, Ctx> SrcToken<Converter<'_, Anchor, Ctx>> for SynToken {
fn kind(&self, ctx: &Converter<'_, Anchor, Ctx>) -> SyntaxKind {
match self {
SynToken::Ordinary(token) => token.kind(),
SynToken::Punct(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
}
}
fn to_char(&self, _ctx: &Converter<'_, SpanAnchor>) -> Option<char> {
fn to_char(&self, _ctx: &Converter<'_, Anchor, Ctx>) -> Option<char> {
match self {
SynToken::Ordinary(_) => None,
SynToken::Punct(it, i) => it.text().chars().nth(*i),
}
}
fn to_text(&self, _ctx: &Converter<'_, SpanAnchor>) -> SmolStr {
fn to_text(&self, _ctx: &Converter<'_, Anchor, Ctx>) -> SmolStr {
match self {
SynToken::Ordinary(token) | SynToken::Punct(token, _) => token.text().into(),
}
}
}
impl<SpanAnchor: Copy> TokenConverter<SpanAnchor> for Converter<'_, SpanAnchor>
impl<Anchor: Copy, Ctx> TokenConverter<Anchor, Ctx> for Converter<'_, Anchor, Ctx>
where
SpanData<SpanAnchor>: Span,
SpanData<Anchor, Ctx>: Span,
{
type Token = SynToken;
fn convert_doc_comment(
&self,
token: &Self::Token,
span: SpanData<SpanAnchor>,
) -> Option<Vec<tt::TokenTree<SpanData<SpanAnchor>>>> {
span: SpanData<Anchor, Ctx>,
) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>> {
convert_doc_comment(token.token(), span)
}
@ -657,27 +668,24 @@ where
Some(token)
}
fn anchor(&self) -> SpanAnchor {
self.file_id
}
fn span_for(&self, range: TextRange) -> Option<SpanData<SpanAnchor>> {
fn span_for(&self, range: TextRange) -> Option<SpanData<Anchor, Ctx>> {
self.map.span_for_range(range)
}
}
struct TtTreeSink<'a, SpanAnchor> {
struct TtTreeSink<'a, Anchor, Ctx> {
buf: String,
cursor: Cursor<'a, SpanData<SpanAnchor>>,
cursor: Cursor<'a, SpanData<Anchor, Ctx>>,
text_pos: TextSize,
inner: SyntaxTreeBuilder,
token_map: TokenMap<SpanData<SpanAnchor>>,
token_map: TokenMap<SpanData<Anchor, Ctx>>,
}
impl<'a, SpanAnchor> TtTreeSink<'a, SpanAnchor>
impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx>
where
SpanData<SpanAnchor>: Span,
SpanData<Anchor, Ctx>: Span,
{
fn new(cursor: Cursor<'a, SpanData<SpanAnchor>>) -> Self {
fn new(cursor: Cursor<'a, SpanData<Anchor, Ctx>>) -> Self {
TtTreeSink {
buf: String::new(),
cursor,
@ -687,7 +695,7 @@ where
}
}
fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap<SpanData<SpanAnchor>>) {
fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap<SpanData<Anchor, Ctx>>) {
self.token_map.shrink_to_fit();
(self.inner.finish(), self.token_map)
}
@ -705,9 +713,9 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
Some(&texts[idx..texts.len() - (1 - idx)])
}
impl<SpanAnchor> TtTreeSink<'_, SpanAnchor>
impl<Anchor, Ctx> TtTreeSink<'_, Anchor, Ctx>
where
SpanData<SpanAnchor>: Span,
SpanData<Anchor, Ctx>: Span,
{
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.

View file

@ -4,20 +4,26 @@ use syntax::{ast, AstNode};
use test_utils::extract_annotations;
use tt::{
buffer::{TokenBuffer, TokenTreeRef},
Leaf, Punct, Spacing, Span,
Leaf, Punct, Spacing, SpanAnchor, SyntaxContext,
};
use crate::syntax_bridge::SpanData;
use super::syntax_node_to_token_tree;
fn check_punct_spacing(fixture: &str) {
type SpanData = tt::SpanData<DummyFile, DummyCtx>;
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
struct DummyFile;
impl Span for DummyFile {
impl SpanAnchor for DummyFile {
const DUMMY: Self = DummyFile;
}
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
struct DummyCtx;
impl SyntaxContext for DummyCtx {
const DUMMY: Self = DummyCtx;
}
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
let subtree =
syntax_node_to_token_tree(source_file.syntax(), DummyFile, 0.into(), &Default::default());

View file

@ -120,11 +120,13 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
Ok(())
}
/*
#[cfg(test)]
mod tests {
use tt::{
Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Span, Subtree, TokenId,
TokenTree,
Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, SpanAnchor, Subtree,
TokenId, TokenTree,
};
use super::*;
@ -176,3 +178,4 @@ mod tests {
assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION));
}
}
*/

View file

@ -39,7 +39,7 @@ use std::collections::{HashMap, VecDeque};
use serde::{Deserialize, Serialize};
use text_size::TextRange;
use tt::Span;
use tt::{Span, SyntaxContext};
use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS};
@ -47,25 +47,30 @@ pub trait SerializableSpan<const L: usize>: Span {
fn into_u32(self) -> [u32; L];
fn from_u32(input: [u32; L]) -> Self;
}
impl SerializableSpan<1> for tt::TokenId {
fn into_u32(self) -> [u32; 1] {
[self.0]
}
fn from_u32([input]: [u32; 1]) -> Self {
tt::TokenId(input)
}
}
// impl SerializableSpan<1> for tt::TokenId {
// fn into_u32(self) -> [u32; 1] {
// [self.0]
// }
// fn from_u32([input]: [u32; 1]) -> Self {
// tt::TokenId(input)
// }
// }
impl<FileId> SerializableSpan<3> for tt::SpanData<FileId>
impl<Anchor, Ctx> SerializableSpan<3> for tt::SpanData<Anchor, Ctx>
where
FileId: From<u32> + Into<u32>,
Anchor: From<u32> + Into<u32>,
Self: Span,
Ctx: SyntaxContext,
{
fn into_u32(self) -> [u32; 3] {
[self.anchor.into(), self.range.start().into(), self.range.end().into()]
}
fn from_u32([file_id, start, end]: [u32; 3]) -> Self {
tt::SpanData { anchor: file_id.into(), range: TextRange::new(start.into(), end.into()) }
tt::SpanData {
anchor: file_id.into(),
range: TextRange::new(start.into(), end.into()),
ctx: Ctx::DUMMY,
}
}
}

View file

@ -210,7 +210,7 @@ mod tests {
use cfg::CfgExpr;
use hir::HirFileId;
use ide_db::base_db::span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID};
use ide_db::base_db::span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use mbe::syntax_node_to_token_tree;
use syntax::{
ast::{self, AstNode},
@ -221,7 +221,7 @@ mod tests {
let cfg_expr = {
let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
let tt = syntax_node_to_token_tree::<_, SyntaxContextId>(
tt.syntax(),
SpanAnchor { file_id: HirFileId::from(0), ast_id: ROOT_ERASED_FILE_AST_ID },
TextSize::new(0),

View file

@ -32,27 +32,37 @@ impl TokenId {
Self::UNSPECIFIED
}
}
impl Span for TokenId {
const DUMMY: Self = TokenId(!0);
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct SpanData<Anchor> {
pub struct SpanData<Anchor, Ctx> {
/// The text range of this span, relative to the anchor.
/// We need the anchor for incrementality, as storing absolute ranges will require
/// recomputation on every change in a file at all times.
pub range: TextRange,
pub anchor: Anchor,
/// The syntax context of the span.
pub ctx: Ctx,
}
impl<Anchor: Span> Span for SpanData<Anchor> {
const DUMMY: Self =
SpanData { range: TextRange::empty(TextSize::new(0)), anchor: Anchor::DUMMY };
impl<Anchor: SpanAnchor, Ctx: SyntaxContext> Span for SpanData<Anchor, Ctx> {
const DUMMY: Self = SpanData {
range: TextRange::empty(TextSize::new(0)),
anchor: Anchor::DUMMY,
ctx: Ctx::DUMMY,
};
}
pub trait SpanAnchor: std::fmt::Debug + Copy + Sized + Eq {
const DUMMY: Self;
}
pub trait Span: std::fmt::Debug + Copy + Sized + Eq {
const DUMMY: Self;
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct SyntaxContext(pub u32);
pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq {
const DUMMY: Self;
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum TokenTree<S> {