mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 21:13:37 +00:00
Proper span representation with syntax context
This commit is contained in:
parent
890eb17b4e
commit
e36b3f7b8c
16 changed files with 414 additions and 470 deletions
|
@ -1,6 +1,7 @@
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use salsa::InternId;
|
use salsa::InternId;
|
||||||
|
use tt::SyntaxContext;
|
||||||
use vfs::FileId;
|
use vfs::FileId;
|
||||||
|
|
||||||
pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
|
pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
|
||||||
|
@ -9,10 +10,17 @@ pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
|
||||||
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
|
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
|
||||||
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
|
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
|
||||||
pub struct SyntaxContext;
|
|
||||||
|
|
||||||
pub type SpanData = tt::SpanData<SpanAnchor>;
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub struct SyntaxContextId(InternId);
|
||||||
|
crate::impl_intern_key!(SyntaxContextId);
|
||||||
|
|
||||||
|
impl SyntaxContext for SyntaxContextId {
|
||||||
|
// FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context
|
||||||
|
// currently (which kind of makes sense but we need it here!)
|
||||||
|
const DUMMY: Self = SyntaxContextId(unsafe { core::mem::transmute(1) });
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct SpanAnchor {
|
pub struct SpanAnchor {
|
||||||
|
@ -26,7 +34,7 @@ impl fmt::Debug for SpanAnchor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl tt::Span for SpanAnchor {
|
impl tt::SpanAnchor for SpanAnchor {
|
||||||
const DUMMY: Self = SpanAnchor { file_id: HirFileId(0), ast_id: ROOT_ERASED_FILE_AST_ID };
|
const DUMMY: Self = SpanAnchor { file_id: HirFileId(0), ast_id: ROOT_ERASED_FILE_AST_ID };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,20 +2,30 @@ use arbitrary::{Arbitrary, Unstructured};
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
use mbe::syntax_node_to_token_tree;
|
use mbe::syntax_node_to_token_tree;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
use tt::Span;
|
use tt::{SpanAnchor, SyntaxContext};
|
||||||
|
|
||||||
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
|
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||||
struct DummyFile;
|
struct DummyFile;
|
||||||
impl Span for DummyFile {
|
impl SpanAnchor for DummyFile {
|
||||||
const DUMMY: Self = DummyFile;
|
const DUMMY: Self = DummyFile;
|
||||||
}
|
}
|
||||||
|
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||||
|
struct DummyCtx;
|
||||||
|
impl SyntaxContext for DummyCtx {
|
||||||
|
const DUMMY: Self = DummyCtx;
|
||||||
|
}
|
||||||
|
|
||||||
fn assert_parse_result(input: &str, expected: CfgExpr) {
|
fn assert_parse_result(input: &str, expected: CfgExpr) {
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
|
let tt = syntax_node_to_token_tree::<_, DummyCtx>(
|
||||||
|
tt.syntax(),
|
||||||
|
DummyFile,
|
||||||
|
0.into(),
|
||||||
|
&Default::default(),
|
||||||
|
);
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
assert_eq!(cfg, expected);
|
assert_eq!(cfg, expected);
|
||||||
}
|
}
|
||||||
|
@ -23,7 +33,12 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
|
||||||
fn check_dnf(input: &str, expect: Expect) {
|
fn check_dnf(input: &str, expect: Expect) {
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
|
let tt = syntax_node_to_token_tree::<_, DummyCtx>(
|
||||||
|
tt.syntax(),
|
||||||
|
DummyFile,
|
||||||
|
0.into(),
|
||||||
|
&Default::default(),
|
||||||
|
);
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
|
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
|
||||||
expect.assert_eq(&actual);
|
expect.assert_eq(&actual);
|
||||||
|
@ -32,7 +47,12 @@ fn check_dnf(input: &str, expect: Expect) {
|
||||||
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
|
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
|
let tt = syntax_node_to_token_tree::<_, DummyCtx>(
|
||||||
|
tt.syntax(),
|
||||||
|
DummyFile,
|
||||||
|
0.into(),
|
||||||
|
&Default::default(),
|
||||||
|
);
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
let dnf = DnfExpr::new(cfg);
|
let dnf = DnfExpr::new(cfg);
|
||||||
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
|
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
|
||||||
|
@ -43,7 +63,12 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
|
||||||
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
|
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default());
|
let tt = syntax_node_to_token_tree::<_, DummyCtx>(
|
||||||
|
tt.syntax(),
|
||||||
|
DummyFile,
|
||||||
|
0.into(),
|
||||||
|
&Default::default(),
|
||||||
|
);
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
let dnf = DnfExpr::new(cfg);
|
let dnf = DnfExpr::new(cfg);
|
||||||
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
|
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
|
||||||
|
|
|
@ -4,15 +4,25 @@
|
||||||
use base_db::span::SpanAnchor;
|
use base_db::span::SpanAnchor;
|
||||||
use mbe::syntax_node_to_token_tree;
|
use mbe::syntax_node_to_token_tree;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
use tt::Span;
|
use tt::{SpanAnchor as _, SyntaxContext};
|
||||||
|
|
||||||
use crate::attr::{DocAtom, DocExpr};
|
use crate::attr::{DocAtom, DocExpr};
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||||
|
struct DummyCtx;
|
||||||
|
impl SyntaxContext for DummyCtx {
|
||||||
|
const DUMMY: Self = DummyCtx;
|
||||||
|
}
|
||||||
|
|
||||||
fn assert_parse_result(input: &str, expected: DocExpr) {
|
fn assert_parse_result(input: &str, expected: DocExpr) {
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let tt =
|
let tt = syntax_node_to_token_tree::<_, DummyCtx>(
|
||||||
syntax_node_to_token_tree(tt.syntax(), SpanAnchor::DUMMY, 0.into(), &Default::default());
|
tt.syntax(),
|
||||||
|
SpanAnchor::DUMMY,
|
||||||
|
0.into(),
|
||||||
|
&Default::default(),
|
||||||
|
);
|
||||||
let cfg = DocExpr::parse(&tt);
|
let cfg = DocExpr::parse(&tt);
|
||||||
assert_eq!(cfg, expected);
|
assert_eq!(cfg, expected);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! A higher level attributes based on TokenTree, with also some shortcuts.
|
//! A higher level attributes based on TokenTree, with also some shortcuts.
|
||||||
use std::{fmt, ops};
|
use std::{fmt, ops};
|
||||||
|
|
||||||
use ::tt::Span;
|
use ::tt::SpanAnchor as _;
|
||||||
use base_db::{span::SpanAnchor, CrateId};
|
use base_db::{span::SpanAnchor, CrateId};
|
||||||
use cfg::CfgExpr;
|
use cfg::CfgExpr;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
//! Builtin macro
|
//! Builtin macro
|
||||||
|
|
||||||
|
use ::tt::Span;
|
||||||
use base_db::{
|
use base_db::{
|
||||||
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
|
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
|
||||||
AnchoredPath, Edition, FileId,
|
AnchoredPath, Edition, FileId,
|
||||||
|
@ -15,7 +16,7 @@ use syntax::{
|
||||||
use crate::{
|
use crate::{
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
name, quote,
|
name, quote,
|
||||||
tt::{self, Span},
|
tt::{self},
|
||||||
EagerCallInfo, ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
|
EagerCallInfo, ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
//! Defines database & queries for macro expansion.
|
//! Defines database & queries for macro expansion.
|
||||||
|
|
||||||
|
use ::tt::SyntaxContext;
|
||||||
use base_db::{
|
use base_db::{
|
||||||
salsa,
|
salsa,
|
||||||
span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID},
|
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
|
||||||
CrateId, Edition, SourceDatabase,
|
CrateId, Edition, SourceDatabase,
|
||||||
};
|
};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
|
@ -15,11 +16,13 @@ use syntax::{
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
|
ast_id_map::AstIdMap,
|
||||||
builtin_fn_macro::EagerExpander, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
|
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||||
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
|
builtin_fn_macro::EagerExpander,
|
||||||
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
|
hygiene::{self, HygieneFrame, SyntaxContextData},
|
||||||
MacroDefKind, MacroFile, ProcMacroExpander, SpanMap, SyntaxContext, SyntaxContextId,
|
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
|
||||||
|
ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
|
||||||
|
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, SpanMap,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Total limit on the number of tokens produced by any macro invocation.
|
/// Total limit on the number of tokens produced by any macro invocation.
|
||||||
|
@ -89,7 +92,15 @@ pub trait ExpandDatabase: SourceDatabase {
|
||||||
#[salsa::interned]
|
#[salsa::interned]
|
||||||
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
||||||
#[salsa::interned]
|
#[salsa::interned]
|
||||||
fn intern_syntax_context(&self, ctx: SyntaxContext) -> SyntaxContextId;
|
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
|
||||||
|
#[salsa::transparent]
|
||||||
|
#[salsa::invoke(hygiene::apply_mark)]
|
||||||
|
fn apply_mark(
|
||||||
|
&self,
|
||||||
|
ctxt: SyntaxContextData,
|
||||||
|
file_id: HirFileId,
|
||||||
|
transparency: hygiene::Transparency,
|
||||||
|
) -> SyntaxContextId;
|
||||||
|
|
||||||
/// Lowers syntactic macro call to a token tree representation. That's a firewall
|
/// Lowers syntactic macro call to a token tree representation. That's a firewall
|
||||||
/// query, only typing in the macro call itself changes the returned
|
/// query, only typing in the macro call itself changes the returned
|
||||||
|
@ -225,6 +236,7 @@ pub fn expand_speculative(
|
||||||
.ranges_with_span(tt::SpanData {
|
.ranges_with_span(tt::SpanData {
|
||||||
range: token_to_map.text_range(),
|
range: token_to_map.text_range(),
|
||||||
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
|
ctx: SyntaxContextId::DUMMY,
|
||||||
})
|
})
|
||||||
.filter_map(|range| syntax_node.covering_element(range).into_token())
|
.filter_map(|range| syntax_node.covering_element(range).into_token())
|
||||||
.min_by_key(|t| {
|
.min_by_key(|t| {
|
||||||
|
|
|
@ -2,71 +2,92 @@
|
||||||
//!
|
//!
|
||||||
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
|
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
|
||||||
//! this moment, this is horribly incomplete and handles only `$crate`.
|
//! this moment, this is horribly incomplete and handles only `$crate`.
|
||||||
use base_db::CrateId;
|
use base_db::{span::SyntaxContextId, CrateId};
|
||||||
use db::TokenExpander;
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasDocComments},
|
ast::{self},
|
||||||
AstNode, SyntaxNode, TextRange, TextSize,
|
TextRange,
|
||||||
};
|
};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::{self, ExpandDatabase},
|
db::ExpandDatabase,
|
||||||
name::{AsName, Name},
|
name::{AsName, Name},
|
||||||
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, SpanMap,
|
HirFileId, InFile,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||||
pub struct Hygiene {
|
pub struct SyntaxContextData {
|
||||||
frames: Option<HygieneFrames>,
|
// FIXME: This might only need to be Option<MacroCallId>?
|
||||||
|
outer_expn: HirFileId,
|
||||||
|
outer_transparency: Transparency,
|
||||||
|
parent: SyntaxContextId,
|
||||||
|
/// This context, but with all transparent and semi-transparent expansions filtered away.
|
||||||
|
opaque: SyntaxContextId,
|
||||||
|
/// This context, but with all transparent expansions filtered away.
|
||||||
|
opaque_and_semitransparent: SyntaxContextId,
|
||||||
|
/// Name of the crate to which `$crate` with this context would resolve.
|
||||||
|
dollar_crate_name: Name,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A property of a macro expansion that determines how identifiers
|
||||||
|
/// produced by that expansion are resolved.
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
|
||||||
|
pub enum Transparency {
|
||||||
|
/// Identifier produced by a transparent expansion is always resolved at call-site.
|
||||||
|
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
|
||||||
|
Transparent,
|
||||||
|
/// Identifier produced by a semi-transparent expansion may be resolved
|
||||||
|
/// either at call-site or at definition-site.
|
||||||
|
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
|
||||||
|
/// Otherwise it's resolved at call-site.
|
||||||
|
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
|
||||||
|
/// but that's an implementation detail.
|
||||||
|
SemiTransparent,
|
||||||
|
/// Identifier produced by an opaque expansion is always resolved at definition-site.
|
||||||
|
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
|
||||||
|
Opaque,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn apply_mark(
|
||||||
|
_db: &dyn ExpandDatabase,
|
||||||
|
_ctxt: SyntaxContextData,
|
||||||
|
_file_id: HirFileId,
|
||||||
|
_transparency: Transparency,
|
||||||
|
) -> SyntaxContextId {
|
||||||
|
_db.intern_syntax_context(_ctxt)
|
||||||
|
}
|
||||||
|
|
||||||
|
// pub(super) fn with_ctxt_from_mark(db: &ExpandDatabase, file_id: HirFileId) {
|
||||||
|
// self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
|
||||||
|
// }
|
||||||
|
// pub(super) fn with_call_site_ctxt(db: &ExpandDatabase, file_id: HirFileId) {
|
||||||
|
// self.with_ctxt_from_mark(expn_id, Transparency::Transparent)
|
||||||
|
// }
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Hygiene {}
|
||||||
|
|
||||||
impl Hygiene {
|
impl Hygiene {
|
||||||
pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene {
|
pub fn new(_: &dyn ExpandDatabase, _: HirFileId) -> Hygiene {
|
||||||
Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
|
Hygiene {}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_unhygienic() -> Hygiene {
|
pub fn new_unhygienic() -> Hygiene {
|
||||||
Hygiene { frames: None }
|
Hygiene {}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: this should just return name
|
// FIXME: this should just return name
|
||||||
pub fn name_ref_to_name(
|
pub fn name_ref_to_name(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn ExpandDatabase,
|
_: &dyn ExpandDatabase,
|
||||||
name_ref: ast::NameRef,
|
name_ref: ast::NameRef,
|
||||||
) -> Either<Name, CrateId> {
|
) -> Either<Name, CrateId> {
|
||||||
if let Some(frames) = &self.frames {
|
|
||||||
if name_ref.text() == "$crate" {
|
|
||||||
if let Some(krate) = frames.root_crate(db, name_ref.syntax()) {
|
|
||||||
return Either::Right(krate);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Either::Left(name_ref.as_name())
|
Either::Left(name_ref.as_name())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn local_inner_macros(&self, _db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
|
pub fn local_inner_macros(&self, _: &dyn ExpandDatabase, _: ast::Path) -> Option<CrateId> {
|
||||||
let mut _token = path.syntax().first_token()?.text_range();
|
None
|
||||||
let frames = self.frames.as_ref()?;
|
|
||||||
let mut _current = &frames.0;
|
|
||||||
|
|
||||||
// FIXME: Hygiene ...
|
|
||||||
return None;
|
|
||||||
// loop {
|
|
||||||
// let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
|
|
||||||
// if origin == Origin::Def {
|
|
||||||
// return if current.local_inner {
|
|
||||||
// frames.root_crate(db, path.syntax())
|
|
||||||
// } else {
|
|
||||||
// None
|
|
||||||
// };
|
|
||||||
// }
|
|
||||||
// current = current.call_site.as_ref()?;
|
|
||||||
// token = mapped.value;
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,150 +95,19 @@ impl Hygiene {
|
||||||
struct HygieneFrames(Arc<HygieneFrame>);
|
struct HygieneFrames(Arc<HygieneFrame>);
|
||||||
|
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
pub struct HygieneFrame {
|
pub struct HygieneFrame {}
|
||||||
expansion: Option<HygieneInfo>,
|
|
||||||
|
|
||||||
// Indicate this is a local inner macro
|
|
||||||
local_inner: bool,
|
|
||||||
krate: Option<CrateId>,
|
|
||||||
|
|
||||||
call_site: Option<Arc<HygieneFrame>>,
|
|
||||||
def_site: Option<Arc<HygieneFrame>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HygieneFrames {
|
|
||||||
fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
|
|
||||||
// Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
|
|
||||||
// usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
|
|
||||||
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn root_crate(&self, _db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
|
|
||||||
let mut _token = node.first_token()?.text_range();
|
|
||||||
let mut _result = self.0.krate;
|
|
||||||
let mut _current = self.0.clone();
|
|
||||||
|
|
||||||
return None;
|
|
||||||
|
|
||||||
// while let Some((mapped, origin)) =
|
|
||||||
// current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
|
|
||||||
// {
|
|
||||||
// result = current.krate;
|
|
||||||
|
|
||||||
// let site = match origin {
|
|
||||||
// Origin::Def => ¤t.def_site,
|
|
||||||
// Origin::Call => ¤t.call_site,
|
|
||||||
// };
|
|
||||||
|
|
||||||
// let site = match site {
|
|
||||||
// None => break,
|
|
||||||
// Some(it) => it,
|
|
||||||
// };
|
|
||||||
|
|
||||||
// current = site.clone();
|
|
||||||
// token = mapped.value;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
struct HygieneInfo {
|
struct HygieneInfo {}
|
||||||
file: MacroFile,
|
|
||||||
/// The start offset of the `macro_rules!` arguments or attribute input.
|
|
||||||
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
|
|
||||||
|
|
||||||
macro_def: TokenExpander,
|
|
||||||
macro_arg: Arc<crate::tt::Subtree>,
|
|
||||||
exp_map: Arc<SpanMap>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HygieneInfo {
|
impl HygieneInfo {
|
||||||
fn _map_ident_up(
|
fn _map_ident_up(&self, _: &dyn ExpandDatabase, _: TextRange) -> Option<InFile<TextRange>> {
|
||||||
&self,
|
|
||||||
_db: &dyn ExpandDatabase,
|
|
||||||
_token: TextRange,
|
|
||||||
) -> Option<InFile<TextRange>> {
|
|
||||||
// self.exp_map.token_by_range(token).map(|span| InFile::new(span.anchor, span.range))
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_hygiene_info(
|
|
||||||
db: &dyn ExpandDatabase,
|
|
||||||
macro_file: MacroFile,
|
|
||||||
loc: &MacroCallLoc,
|
|
||||||
) -> HygieneInfo {
|
|
||||||
let def = loc.def.ast_id().left().and_then(|id| {
|
|
||||||
let def_tt = match id.to_node(db) {
|
|
||||||
ast::Macro::MacroRules(mac) => mac.token_tree()?,
|
|
||||||
ast::Macro::MacroDef(mac) => mac.body()?,
|
|
||||||
};
|
|
||||||
Some(InFile::new(id.file_id, def_tt))
|
|
||||||
});
|
|
||||||
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
|
|
||||||
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
|
|
||||||
let tt = ast_id
|
|
||||||
.to_node(db)
|
|
||||||
.doc_comments_and_attrs()
|
|
||||||
.nth(invoc_attr_index.ast_index())
|
|
||||||
.and_then(Either::left)?
|
|
||||||
.token_tree()?;
|
|
||||||
Some(InFile::new(ast_id.file_id, tt))
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
});
|
|
||||||
|
|
||||||
let macro_def = db.macro_expander(loc.def);
|
|
||||||
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
|
|
||||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
|
||||||
Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() })
|
|
||||||
});
|
|
||||||
|
|
||||||
HygieneInfo {
|
|
||||||
file: macro_file,
|
|
||||||
attr_input_or_mac_def_start: attr_input_or_mac_def
|
|
||||||
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
|
|
||||||
macro_arg,
|
|
||||||
macro_def,
|
|
||||||
exp_map,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HygieneFrame {
|
impl HygieneFrame {
|
||||||
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
|
pub(crate) fn new(_: &dyn ExpandDatabase, _: HirFileId) -> HygieneFrame {
|
||||||
let (info, krate, local_inner) = match file_id.macro_file() {
|
HygieneFrame {}
|
||||||
None => (None, None, false),
|
|
||||||
Some(macro_file) => {
|
|
||||||
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
|
||||||
let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
|
|
||||||
match loc.def.kind {
|
|
||||||
MacroDefKind::Declarative(_) => {
|
|
||||||
(info, Some(loc.def.krate), loc.def.local_inner)
|
|
||||||
}
|
|
||||||
MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
|
|
||||||
MacroDefKind::BuiltInAttr(..) => (info, None, false),
|
|
||||||
MacroDefKind::BuiltInDerive(..) => (info, None, false),
|
|
||||||
MacroDefKind::BuiltInEager(..) => (info, None, false),
|
|
||||||
MacroDefKind::ProcMacro(..) => (info, None, false),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some((info, calling_file)) = info else {
|
|
||||||
return HygieneFrame {
|
|
||||||
expansion: None,
|
|
||||||
local_inner,
|
|
||||||
krate,
|
|
||||||
call_site: None,
|
|
||||||
def_site: None,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
|
|
||||||
let call_site = Some(db.hygiene_frame(calling_file));
|
|
||||||
|
|
||||||
HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,7 +51,7 @@ pub type DeclarativeMacro = ::mbe::DeclarativeMacro<tt::SpanData>;
|
||||||
|
|
||||||
pub mod tt {
|
pub mod tt {
|
||||||
pub use base_db::span::SpanData;
|
pub use base_db::span::SpanData;
|
||||||
pub use tt::{DelimiterKind, Spacing, Span};
|
pub use tt::{DelimiterKind, Spacing, Span, SpanAnchor};
|
||||||
|
|
||||||
pub type Delimiter = ::tt::Delimiter<SpanData>;
|
pub type Delimiter = ::tt::Delimiter<SpanData>;
|
||||||
pub type Subtree = ::tt::Subtree<SpanData>;
|
pub type Subtree = ::tt::Subtree<SpanData>;
|
||||||
|
@ -97,44 +97,6 @@ impl fmt::Display for ExpandError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `MacroCallId` identifies a particular macro invocation, like
|
|
||||||
/// `println!("Hello, {}", world)`.
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
|
||||||
pub struct SyntaxContextId(base_db::salsa::InternId);
|
|
||||||
base_db::impl_intern_key!(SyntaxContextId);
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
|
||||||
pub struct SyntaxContext {
|
|
||||||
outer_expn: HirFileId,
|
|
||||||
outer_transparency: Transparency,
|
|
||||||
parent: SyntaxContextId,
|
|
||||||
/// This context, but with all transparent and semi-transparent expansions filtered away.
|
|
||||||
opaque: SyntaxContextId,
|
|
||||||
/// This context, but with all transparent expansions filtered away.
|
|
||||||
opaque_and_semitransparent: SyntaxContextId,
|
|
||||||
/// Name of the crate to which `$crate` with this context would resolve.
|
|
||||||
dollar_crate_name: name::Name,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A property of a macro expansion that determines how identifiers
|
|
||||||
/// produced by that expansion are resolved.
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
|
|
||||||
pub enum Transparency {
|
|
||||||
/// Identifier produced by a transparent expansion is always resolved at call-site.
|
|
||||||
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
|
|
||||||
Transparent,
|
|
||||||
/// Identifier produced by a semi-transparent expansion may be resolved
|
|
||||||
/// either at call-site or at definition-site.
|
|
||||||
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
|
|
||||||
/// Otherwise it's resolved at call-site.
|
|
||||||
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
|
|
||||||
/// but that's an implementation detail.
|
|
||||||
SemiTransparent,
|
|
||||||
/// Identifier produced by an opaque expansion is always resolved at definition-site.
|
|
||||||
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
|
|
||||||
Opaque,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct MacroCallLoc {
|
pub struct MacroCallLoc {
|
||||||
pub def: MacroDefId,
|
pub def: MacroDefId,
|
||||||
|
|
|
@ -247,8 +247,8 @@ mod tests {
|
||||||
assert_eq!(quoted.to_string(), "hello");
|
assert_eq!(quoted.to_string(), "hello");
|
||||||
let t = format!("{quoted:?}");
|
let t = format!("{quoted:?}");
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } } SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } }
|
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) }
|
||||||
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::<RustLanguage>>(0) } }"#]].assert_eq(&t);
|
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -6,19 +6,27 @@ use syntax::{
|
||||||
AstNode, SmolStr,
|
AstNode, SmolStr,
|
||||||
};
|
};
|
||||||
use test_utils::{bench, bench_fixture, skip_slow_tests};
|
use test_utils::{bench, bench_fixture, skip_slow_tests};
|
||||||
use tt::{Span, SpanData};
|
use tt::{Span, SpanAnchor, SyntaxContext};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
||||||
syntax_node_to_token_tree, DeclarativeMacro,
|
syntax_node_to_token_tree, DeclarativeMacro,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type SpanData = tt::SpanData<DummyFile, DummyCtx>;
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
||||||
struct DummyFile;
|
struct DummyFile;
|
||||||
impl Span for DummyFile {
|
impl SpanAnchor for DummyFile {
|
||||||
const DUMMY: Self = DummyFile;
|
const DUMMY: Self = DummyFile;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
||||||
|
struct DummyCtx;
|
||||||
|
impl SyntaxContext for DummyCtx {
|
||||||
|
const DUMMY: Self = DummyCtx;
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn benchmark_parse_macro_rules() {
|
fn benchmark_parse_macro_rules() {
|
||||||
if skip_slow_tests() {
|
if skip_slow_tests() {
|
||||||
|
@ -54,14 +62,14 @@ fn benchmark_expand_macro_rules() {
|
||||||
assert_eq!(hash, 69413);
|
assert_eq!(hash, 69413);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<SpanData<DummyFile>>> {
|
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<SpanData>> {
|
||||||
macro_rules_fixtures_tt()
|
macro_rules_fixtures_tt()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
|
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData<DummyFile>>> {
|
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData>> {
|
||||||
let fixture = bench_fixture::numerous_macro_rules();
|
let fixture = bench_fixture::numerous_macro_rules();
|
||||||
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
|
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
|
||||||
|
|
||||||
|
@ -84,8 +92,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData<DummyFile
|
||||||
|
|
||||||
/// Generate random invocation fixtures from rules
|
/// Generate random invocation fixtures from rules
|
||||||
fn invocation_fixtures(
|
fn invocation_fixtures(
|
||||||
rules: &FxHashMap<String, DeclarativeMacro<SpanData<DummyFile>>>,
|
rules: &FxHashMap<String, DeclarativeMacro<SpanData>>,
|
||||||
) -> Vec<(String, tt::Subtree<SpanData<DummyFile>>)> {
|
) -> Vec<(String, tt::Subtree<SpanData>)> {
|
||||||
let mut seed = 123456789;
|
let mut seed = 123456789;
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
|
|
||||||
|
@ -130,11 +138,7 @@ fn invocation_fixtures(
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
|
|
||||||
fn collect_from_op(
|
fn collect_from_op(op: &Op<SpanData>, parent: &mut tt::Subtree<SpanData>, seed: &mut usize) {
|
||||||
op: &Op<SpanData<DummyFile>>,
|
|
||||||
parent: &mut tt::Subtree<SpanData<DummyFile>>,
|
|
||||||
seed: &mut usize,
|
|
||||||
) {
|
|
||||||
return match op {
|
return match op {
|
||||||
Op::Var { kind, .. } => match kind.as_ref() {
|
Op::Var { kind, .. } => match kind.as_ref() {
|
||||||
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
|
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
|
||||||
|
@ -220,20 +224,20 @@ fn invocation_fixtures(
|
||||||
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
|
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
|
||||||
*seed
|
*seed
|
||||||
}
|
}
|
||||||
fn make_ident(ident: &str) -> tt::TokenTree<SpanData<DummyFile>> {
|
fn make_ident(ident: &str) -> tt::TokenTree<SpanData> {
|
||||||
tt::Leaf::Ident(tt::Ident { span: SpanData::DUMMY, text: SmolStr::new(ident) }).into()
|
tt::Leaf::Ident(tt::Ident { span: SpanData::DUMMY, text: SmolStr::new(ident) }).into()
|
||||||
}
|
}
|
||||||
fn make_punct(char: char) -> tt::TokenTree<SpanData<DummyFile>> {
|
fn make_punct(char: char) -> tt::TokenTree<SpanData> {
|
||||||
tt::Leaf::Punct(tt::Punct { span: SpanData::DUMMY, char, spacing: tt::Spacing::Alone })
|
tt::Leaf::Punct(tt::Punct { span: SpanData::DUMMY, char, spacing: tt::Spacing::Alone })
|
||||||
.into()
|
.into()
|
||||||
}
|
}
|
||||||
fn make_literal(lit: &str) -> tt::TokenTree<SpanData<DummyFile>> {
|
fn make_literal(lit: &str) -> tt::TokenTree<SpanData> {
|
||||||
tt::Leaf::Literal(tt::Literal { span: SpanData::DUMMY, text: SmolStr::new(lit) }).into()
|
tt::Leaf::Literal(tt::Literal { span: SpanData::DUMMY, text: SmolStr::new(lit) }).into()
|
||||||
}
|
}
|
||||||
fn make_subtree(
|
fn make_subtree(
|
||||||
kind: tt::DelimiterKind,
|
kind: tt::DelimiterKind,
|
||||||
token_trees: Option<Vec<tt::TokenTree<SpanData<DummyFile>>>>,
|
token_trees: Option<Vec<tt::TokenTree<SpanData>>>,
|
||||||
) -> tt::TokenTree<SpanData<DummyFile>> {
|
) -> tt::TokenTree<SpanData> {
|
||||||
tt::Subtree {
|
tt::Subtree {
|
||||||
delimiter: tt::Delimiter { open: SpanData::DUMMY, close: SpanData::DUMMY, kind },
|
delimiter: tt::Delimiter { open: SpanData::DUMMY, close: SpanData::DUMMY, kind },
|
||||||
token_trees: token_trees.unwrap_or_default(),
|
token_trees: token_trees.unwrap_or_default(),
|
||||||
|
|
|
@ -9,7 +9,7 @@ use syntax::{
|
||||||
};
|
};
|
||||||
use tt::{
|
use tt::{
|
||||||
buffer::{Cursor, TokenBuffer},
|
buffer::{Cursor, TokenBuffer},
|
||||||
Span, SpanData,
|
Span, SpanData, SyntaxContext,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
|
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
|
||||||
|
@ -23,33 +23,37 @@ mod tests;
|
||||||
/// to relative spans, relative to the passed anchor.
|
/// to relative spans, relative to the passed anchor.
|
||||||
/// `map` is used to resolve the converted spans accordingly.
|
/// `map` is used to resolve the converted spans accordingly.
|
||||||
/// TODO: Flesh out the doc comment more thoroughly
|
/// TODO: Flesh out the doc comment more thoroughly
|
||||||
pub fn syntax_node_to_token_tree<SpanAnchor: Copy>(
|
pub fn syntax_node_to_token_tree<Anchor, Ctx>(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
anchor: SpanAnchor,
|
anchor: Anchor,
|
||||||
anchor_offset: TextSize,
|
anchor_offset: TextSize,
|
||||||
map: &TokenMap<SpanData<SpanAnchor>>,
|
map: &TokenMap<SpanData<Anchor, Ctx>>,
|
||||||
) -> tt::Subtree<SpanData<SpanAnchor>>
|
) -> tt::Subtree<SpanData<Anchor, Ctx>>
|
||||||
where
|
where
|
||||||
SpanData<SpanAnchor>: Span,
|
SpanData<Anchor, Ctx>: Span,
|
||||||
|
Anchor: Copy,
|
||||||
|
Ctx: SyntaxContext,
|
||||||
{
|
{
|
||||||
assert!(anchor_offset <= node.text_range().start());
|
assert!(anchor_offset <= node.text_range().start());
|
||||||
let mut c = Converter::new(node, anchor_offset, anchor, vec![], map);
|
let mut c = Converter::new(node, anchor_offset, vec![], map);
|
||||||
convert_tokens(&mut c)
|
convert_tokens(&mut c, anchor)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn syntax_node_to_token_tree_censored<SpanAnchor: Copy>(
|
pub fn syntax_node_to_token_tree_censored<Anchor, Ctx>(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
anchor: SpanAnchor,
|
anchor: Anchor,
|
||||||
anchor_offset: TextSize,
|
anchor_offset: TextSize,
|
||||||
map: &TokenMap<SpanData<SpanAnchor>>,
|
map: &TokenMap<SpanData<Anchor, Ctx>>,
|
||||||
censored: Vec<SyntaxNode>,
|
censored: Vec<SyntaxNode>,
|
||||||
) -> tt::Subtree<SpanData<SpanAnchor>>
|
) -> tt::Subtree<SpanData<Anchor, Ctx>>
|
||||||
where
|
where
|
||||||
SpanData<SpanAnchor>: Span,
|
SpanData<Anchor, Ctx>: Span,
|
||||||
|
Anchor: Copy,
|
||||||
|
Ctx: SyntaxContext,
|
||||||
{
|
{
|
||||||
assert!(anchor_offset <= node.text_range().start());
|
assert!(anchor_offset <= node.text_range().start());
|
||||||
let mut c = Converter::new(node, anchor_offset, anchor, censored, map);
|
let mut c = Converter::new(node, anchor_offset, censored, map);
|
||||||
convert_tokens(&mut c)
|
convert_tokens(&mut c, anchor)
|
||||||
}
|
}
|
||||||
|
|
||||||
// The following items are what `rustc` macro can be parsed into :
|
// The following items are what `rustc` macro can be parsed into :
|
||||||
|
@ -64,12 +68,14 @@ where
|
||||||
// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
|
// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
|
||||||
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
|
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
|
||||||
|
|
||||||
pub fn token_tree_to_syntax_node<SpanAnchor: Copy>(
|
pub fn token_tree_to_syntax_node<Anchor, Ctx>(
|
||||||
tt: &tt::Subtree<SpanData<SpanAnchor>>,
|
tt: &tt::Subtree<SpanData<Anchor, Ctx>>,
|
||||||
entry_point: parser::TopEntryPoint,
|
entry_point: parser::TopEntryPoint,
|
||||||
) -> (Parse<SyntaxNode>, TokenMap<SpanData<SpanAnchor>>)
|
) -> (Parse<SyntaxNode>, TokenMap<SpanData<Anchor, Ctx>>)
|
||||||
where
|
where
|
||||||
SpanData<SpanAnchor>: Span,
|
SpanData<Anchor, Ctx>: Span,
|
||||||
|
Anchor: Copy,
|
||||||
|
Ctx: SyntaxContext,
|
||||||
{
|
{
|
||||||
let buffer = match tt {
|
let buffer = match tt {
|
||||||
tt::Subtree {
|
tt::Subtree {
|
||||||
|
@ -97,36 +103,42 @@ where
|
||||||
tree_sink.finish()
|
tree_sink.finish()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map_from_syntax_node<SpanAnchor>(
|
pub fn map_from_syntax_node<Anchor, Ctx>(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
anchor: SpanAnchor,
|
anchor: Anchor,
|
||||||
anchor_offset: TextSize,
|
anchor_offset: TextSize,
|
||||||
) -> TokenMap<SpanData<SpanAnchor>>
|
) -> TokenMap<SpanData<Anchor, Ctx>>
|
||||||
where
|
where
|
||||||
SpanAnchor: Copy,
|
Anchor: Copy,
|
||||||
SpanData<SpanAnchor>: Span,
|
SpanData<Anchor, Ctx>: Span,
|
||||||
|
Ctx: SyntaxContext,
|
||||||
{
|
{
|
||||||
let mut map = TokenMap::default();
|
let mut map = TokenMap::default();
|
||||||
node.descendants_with_tokens().filter_map(NodeOrToken::into_token).for_each(|t| {
|
node.descendants_with_tokens().filter_map(NodeOrToken::into_token).for_each(|t| {
|
||||||
map.insert(t.text_range(), SpanData { range: t.text_range() - anchor_offset, anchor });
|
map.insert(
|
||||||
|
t.text_range(),
|
||||||
|
SpanData { range: t.text_range() - anchor_offset, anchor, ctx: Ctx::DUMMY },
|
||||||
|
);
|
||||||
});
|
});
|
||||||
map
|
map
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert a string to a `TokenTree`
|
/// Convert a string to a `TokenTree`
|
||||||
pub fn parse_to_token_tree<SpanAnchor: Copy>(
|
pub fn parse_to_token_tree<Anchor, Ctx>(
|
||||||
text: &str,
|
text: &str,
|
||||||
file_id: SpanAnchor,
|
anchor: Anchor,
|
||||||
) -> Option<tt::Subtree<SpanData<SpanAnchor>>>
|
) -> Option<tt::Subtree<SpanData<Anchor, Ctx>>>
|
||||||
where
|
where
|
||||||
SpanData<SpanAnchor>: Span,
|
SpanData<Anchor, Ctx>: Span,
|
||||||
|
Anchor: Copy,
|
||||||
|
Ctx: SyntaxContext,
|
||||||
{
|
{
|
||||||
let lexed = parser::LexedStr::new(text);
|
let lexed = parser::LexedStr::new(text);
|
||||||
if lexed.errors().next().is_some() {
|
if lexed.errors().next().is_some() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let mut conv = RawConverter { lexed, pos: 0, _offset: TextSize::default(), file_id };
|
let mut conv = RawConverter { lexed, pos: 0, _offset: TextSize::default() };
|
||||||
Some(convert_tokens(&mut conv))
|
Some(convert_tokens(&mut conv, anchor))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Split token tree with separate expr: $($e:expr)SEP*
|
/// Split token tree with separate expr: $($e:expr)SEP*
|
||||||
|
@ -166,134 +178,141 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_tokens<SpanAnchor, C: TokenConverter<SpanAnchor>>(
|
fn convert_tokens<Anchor, Ctx, C>(
|
||||||
conv: &mut C,
|
conv: &mut C,
|
||||||
) -> tt::Subtree<SpanData<SpanAnchor>>
|
anchor: Anchor,
|
||||||
|
) -> tt::Subtree<SpanData<Anchor, Ctx>>
|
||||||
where
|
where
|
||||||
SpanData<SpanAnchor>: Span,
|
C: TokenConverter<Anchor, Ctx>,
|
||||||
SpanAnchor: Copy,
|
Ctx: SyntaxContext,
|
||||||
|
SpanData<Anchor, Ctx>: Span,
|
||||||
|
Anchor: Copy,
|
||||||
{
|
{
|
||||||
let entry = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] };
|
let entry = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] };
|
||||||
let mut stack = NonEmptyVec::new(entry);
|
let mut stack = NonEmptyVec::new(entry);
|
||||||
let anchor = conv.anchor();
|
|
||||||
|
|
||||||
loop {
|
while let Some((token, rel_range, abs_range)) = conv.bump() {
|
||||||
let subtree = stack.last_mut();
|
let tt::Subtree { delimiter, token_trees: result } = stack.last_mut();
|
||||||
let result = &mut subtree.token_trees;
|
let mk_dummy_span = || SpanData { range: rel_range, anchor, ctx: Ctx::DUMMY };
|
||||||
let Some((token, rel_range, abs_range)) = conv.bump() else { break };
|
|
||||||
|
|
||||||
let kind = token.kind(conv);
|
let kind = token.kind(conv);
|
||||||
if kind == COMMENT {
|
|
||||||
if let Some(tokens) = conv.convert_doc_comment(
|
|
||||||
&token,
|
|
||||||
conv.span_for(abs_range).unwrap_or(SpanData { range: rel_range, anchor }),
|
|
||||||
) {
|
|
||||||
result.extend(tokens);
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let tt = if kind.is_punct() && kind != UNDERSCORE {
|
|
||||||
let expected = match subtree.delimiter.kind {
|
|
||||||
tt::DelimiterKind::Parenthesis => Some(T![')']),
|
|
||||||
tt::DelimiterKind::Brace => Some(T!['}']),
|
|
||||||
tt::DelimiterKind::Bracket => Some(T![']']),
|
|
||||||
tt::DelimiterKind::Invisible => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(expected) = expected {
|
let tt = match kind {
|
||||||
if kind == expected {
|
// Desugar doc comments into doc attributes
|
||||||
|
COMMENT => {
|
||||||
|
let span = conv.span_for(abs_range).unwrap_or_else(mk_dummy_span);
|
||||||
|
if let Some(tokens) = conv.convert_doc_comment(&token, span) {
|
||||||
|
result.extend(tokens);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
_ if kind.is_punct() && kind != UNDERSCORE => {
|
||||||
|
let expected = match delimiter.kind {
|
||||||
|
tt::DelimiterKind::Parenthesis => Some(T![')']),
|
||||||
|
tt::DelimiterKind::Brace => Some(T!['}']),
|
||||||
|
tt::DelimiterKind::Bracket => Some(T![']']),
|
||||||
|
tt::DelimiterKind::Invisible => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Current token is a closing delimiter that we expect, fix up the closing span
|
||||||
|
// and end the subtree here
|
||||||
|
if matches!(expected, Some(expected) if expected == kind) {
|
||||||
if let Some(mut subtree) = stack.pop() {
|
if let Some(mut subtree) = stack.pop() {
|
||||||
subtree.delimiter.close = conv
|
subtree.delimiter.close =
|
||||||
.span_for(abs_range)
|
conv.span_for(abs_range).unwrap_or_else(mk_dummy_span);
|
||||||
.unwrap_or(SpanData { range: rel_range, anchor });
|
|
||||||
stack.last_mut().token_trees.push(subtree.into());
|
stack.last_mut().token_trees.push(subtree.into());
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
let delim = match kind {
|
let delim = match kind {
|
||||||
T!['('] => Some(tt::DelimiterKind::Parenthesis),
|
T!['('] => Some(tt::DelimiterKind::Parenthesis),
|
||||||
T!['{'] => Some(tt::DelimiterKind::Brace),
|
T!['{'] => Some(tt::DelimiterKind::Brace),
|
||||||
T!['['] => Some(tt::DelimiterKind::Bracket),
|
T!['['] => Some(tt::DelimiterKind::Bracket),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(kind) = delim {
|
|
||||||
let subtree = tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter {
|
|
||||||
// FIXME: Open and close spans
|
|
||||||
open: conv
|
|
||||||
.span_for(abs_range)
|
|
||||||
.unwrap_or(SpanData { range: rel_range, anchor }),
|
|
||||||
close: Span::DUMMY,
|
|
||||||
kind,
|
|
||||||
},
|
|
||||||
token_trees: vec![],
|
|
||||||
};
|
};
|
||||||
stack.push(subtree);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let spacing = match conv.peek().map(|next| next.kind(conv)) {
|
// Start a new subtree
|
||||||
Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
|
if let Some(kind) = delim {
|
||||||
_ => tt::Spacing::Alone,
|
stack.push(tt::Subtree {
|
||||||
};
|
delimiter: tt::Delimiter {
|
||||||
let char = match token.to_char(conv) {
|
open: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span),
|
||||||
Some(c) => c,
|
// will be overwritten on subtree close above
|
||||||
None => {
|
close: mk_dummy_span(),
|
||||||
panic!("Token from lexer must be single char: token = {token:#?}");
|
kind,
|
||||||
}
|
},
|
||||||
};
|
token_trees: vec![],
|
||||||
tt::Leaf::from(tt::Punct {
|
|
||||||
char,
|
|
||||||
spacing,
|
|
||||||
span: conv.span_for(abs_range).unwrap_or(SpanData { range: rel_range, anchor }),
|
|
||||||
})
|
|
||||||
.into()
|
|
||||||
} else {
|
|
||||||
macro_rules! make_leaf {
|
|
||||||
($i:ident) => {
|
|
||||||
tt::$i {
|
|
||||||
span: conv
|
|
||||||
.span_for(abs_range)
|
|
||||||
.unwrap_or(SpanData { range: rel_range, anchor }),
|
|
||||||
text: token.to_text(conv),
|
|
||||||
}
|
|
||||||
.into()
|
|
||||||
};
|
|
||||||
}
|
|
||||||
let leaf: tt::Leaf<_> = match kind {
|
|
||||||
T![true] | T![false] => make_leaf!(Ident),
|
|
||||||
IDENT => make_leaf!(Ident),
|
|
||||||
UNDERSCORE => make_leaf!(Ident),
|
|
||||||
k if k.is_keyword() => make_leaf!(Ident),
|
|
||||||
k if k.is_literal() => make_leaf!(Literal),
|
|
||||||
// FIXME: Check whether span splitting works as intended
|
|
||||||
LIFETIME_IDENT => {
|
|
||||||
let char_unit = TextSize::of('\'');
|
|
||||||
let r = TextRange::at(rel_range.start(), char_unit);
|
|
||||||
let apostrophe = tt::Leaf::from(tt::Punct {
|
|
||||||
char: '\'',
|
|
||||||
spacing: tt::Spacing::Joint,
|
|
||||||
span: conv.span_for(abs_range).unwrap_or(SpanData { range: r, anchor }),
|
|
||||||
});
|
});
|
||||||
result.push(apostrophe.into());
|
|
||||||
|
|
||||||
let r =
|
|
||||||
TextRange::at(rel_range.start() + char_unit, rel_range.len() - char_unit);
|
|
||||||
let ident = tt::Leaf::from(tt::Ident {
|
|
||||||
text: SmolStr::new(&token.to_text(conv)[1..]),
|
|
||||||
span: conv.span_for(abs_range).unwrap_or(SpanData { range: r, anchor }),
|
|
||||||
});
|
|
||||||
result.push(ident.into());
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
_ => continue,
|
|
||||||
};
|
|
||||||
|
|
||||||
leaf.into()
|
let spacing = match conv.peek().map(|next| next.kind(conv)) {
|
||||||
|
Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
|
||||||
|
_ => tt::Spacing::Alone,
|
||||||
|
};
|
||||||
|
let Some(char) = token.to_char(conv) else {
|
||||||
|
panic!("Token from lexer must be single char: token = {token:#?}")
|
||||||
|
};
|
||||||
|
tt::Leaf::from(tt::Punct {
|
||||||
|
char,
|
||||||
|
spacing,
|
||||||
|
span: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span),
|
||||||
|
})
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
macro_rules! make_leaf {
|
||||||
|
($i:ident) => {
|
||||||
|
tt::$i {
|
||||||
|
span: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span),
|
||||||
|
text: token.to_text(conv),
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
let leaf: tt::Leaf<_> = match kind {
|
||||||
|
T![true] | T![false] => make_leaf!(Ident),
|
||||||
|
IDENT => make_leaf!(Ident),
|
||||||
|
UNDERSCORE => make_leaf!(Ident),
|
||||||
|
k if k.is_keyword() => make_leaf!(Ident),
|
||||||
|
k if k.is_literal() => make_leaf!(Literal),
|
||||||
|
// FIXME: Check whether span splitting works as intended
|
||||||
|
LIFETIME_IDENT => {
|
||||||
|
let char_unit = TextSize::of('\'');
|
||||||
|
let r = TextRange::at(rel_range.start(), char_unit);
|
||||||
|
let apostrophe = tt::Leaf::from(tt::Punct {
|
||||||
|
char: '\'',
|
||||||
|
spacing: tt::Spacing::Joint,
|
||||||
|
span: conv.span_for(abs_range).unwrap_or(SpanData {
|
||||||
|
range: r,
|
||||||
|
anchor,
|
||||||
|
ctx: Ctx::DUMMY,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
result.push(apostrophe.into());
|
||||||
|
|
||||||
|
let r = TextRange::at(
|
||||||
|
rel_range.start() + char_unit,
|
||||||
|
rel_range.len() - char_unit,
|
||||||
|
);
|
||||||
|
let ident = tt::Leaf::from(tt::Ident {
|
||||||
|
text: SmolStr::new(&token.to_text(conv)[1..]),
|
||||||
|
span: conv.span_for(abs_range).unwrap_or(SpanData {
|
||||||
|
range: r,
|
||||||
|
anchor,
|
||||||
|
ctx: Ctx::DUMMY,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
result.push(ident.into());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
_ => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
leaf.into()
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
result.push(tt);
|
result.push(tt);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -417,11 +436,10 @@ fn convert_doc_comment<S: Copy>(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A raw token (straight from lexer) converter
|
/// A raw token (straight from lexer) converter
|
||||||
struct RawConverter<'a, SpanAnchor> {
|
struct RawConverter<'a> {
|
||||||
lexed: parser::LexedStr<'a>,
|
lexed: parser::LexedStr<'a>,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
_offset: TextSize,
|
_offset: TextSize,
|
||||||
file_id: SpanAnchor,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
trait SrcToken<Ctx>: std::fmt::Debug {
|
trait SrcToken<Ctx>: std::fmt::Debug {
|
||||||
|
@ -432,48 +450,47 @@ trait SrcToken<Ctx>: std::fmt::Debug {
|
||||||
fn to_text(&self, ctx: &Ctx) -> SmolStr;
|
fn to_text(&self, ctx: &Ctx) -> SmolStr;
|
||||||
}
|
}
|
||||||
|
|
||||||
trait TokenConverter<SpanAnchor>: Sized {
|
trait TokenConverter<Anchor, Ctx>: Sized {
|
||||||
type Token: SrcToken<Self>;
|
type Token: SrcToken<Self>;
|
||||||
|
|
||||||
fn convert_doc_comment(
|
fn convert_doc_comment(
|
||||||
&self,
|
&self,
|
||||||
token: &Self::Token,
|
token: &Self::Token,
|
||||||
span: SpanData<SpanAnchor>,
|
span: SpanData<Anchor, Ctx>,
|
||||||
) -> Option<Vec<tt::TokenTree<SpanData<SpanAnchor>>>>;
|
) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>>;
|
||||||
|
|
||||||
fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)>;
|
fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)>;
|
||||||
|
|
||||||
fn peek(&self) -> Option<Self::Token>;
|
fn peek(&self) -> Option<Self::Token>;
|
||||||
|
|
||||||
fn anchor(&self) -> SpanAnchor;
|
fn span_for(&self, range: TextRange) -> Option<SpanData<Anchor, Ctx>>;
|
||||||
fn span_for(&self, range: TextRange) -> Option<SpanData<SpanAnchor>>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<SpanAnchor> SrcToken<RawConverter<'_, SpanAnchor>> for usize {
|
impl SrcToken<RawConverter<'_>> for usize {
|
||||||
fn kind(&self, ctx: &RawConverter<'_, SpanAnchor>) -> SyntaxKind {
|
fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind {
|
||||||
ctx.lexed.kind(*self)
|
ctx.lexed.kind(*self)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_char(&self, ctx: &RawConverter<'_, SpanAnchor>) -> Option<char> {
|
fn to_char(&self, ctx: &RawConverter<'_>) -> Option<char> {
|
||||||
ctx.lexed.text(*self).chars().next()
|
ctx.lexed.text(*self).chars().next()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_text(&self, ctx: &RawConverter<'_, SpanAnchor>) -> SmolStr {
|
fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr {
|
||||||
ctx.lexed.text(*self).into()
|
ctx.lexed.text(*self).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<SpanAnchor: Copy> TokenConverter<SpanAnchor> for RawConverter<'_, SpanAnchor>
|
impl<Anchor: Copy, Ctx> TokenConverter<Anchor, Ctx> for RawConverter<'_>
|
||||||
where
|
where
|
||||||
SpanData<SpanAnchor>: Span,
|
SpanData<Anchor, Ctx>: Span,
|
||||||
{
|
{
|
||||||
type Token = usize;
|
type Token = usize;
|
||||||
|
|
||||||
fn convert_doc_comment(
|
fn convert_doc_comment(
|
||||||
&self,
|
&self,
|
||||||
&token: &usize,
|
&token: &usize,
|
||||||
span: SpanData<SpanAnchor>,
|
span: SpanData<Anchor, Ctx>,
|
||||||
) -> Option<Vec<tt::TokenTree<SpanData<SpanAnchor>>>> {
|
) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>> {
|
||||||
let text = self.lexed.text(token);
|
let text = self.lexed.text(token);
|
||||||
convert_doc_comment(&doc_comment(text), span)
|
convert_doc_comment(&doc_comment(text), span)
|
||||||
}
|
}
|
||||||
|
@ -497,34 +514,29 @@ where
|
||||||
Some(self.pos)
|
Some(self.pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn anchor(&self) -> SpanAnchor {
|
fn span_for(&self, _: TextRange) -> Option<SpanData<Anchor, Ctx>> {
|
||||||
self.file_id
|
|
||||||
}
|
|
||||||
fn span_for(&self, _: TextRange) -> Option<SpanData<SpanAnchor>> {
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Converter<'a, SpanAnchor> {
|
struct Converter<'a, Anchor, Ctx> {
|
||||||
current: Option<SyntaxToken>,
|
current: Option<SyntaxToken>,
|
||||||
preorder: PreorderWithTokens,
|
preorder: PreorderWithTokens,
|
||||||
range: TextRange,
|
range: TextRange,
|
||||||
punct_offset: Option<(SyntaxToken, TextSize)>,
|
punct_offset: Option<(SyntaxToken, TextSize)>,
|
||||||
/// Used to make the emitted text ranges in the spans relative to the span anchor.
|
/// Used to make the emitted text ranges in the spans relative to the span anchor.
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
file_id: SpanAnchor,
|
map: &'a TokenMap<SpanData<Anchor, Ctx>>,
|
||||||
map: &'a TokenMap<SpanData<SpanAnchor>>,
|
|
||||||
censored: Vec<SyntaxNode>,
|
censored: Vec<SyntaxNode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, SpanAnchor> Converter<'a, SpanAnchor> {
|
impl<'a, Anchor, Ctx> Converter<'a, Anchor, Ctx> {
|
||||||
fn new(
|
fn new(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
anchor_offset: TextSize,
|
anchor_offset: TextSize,
|
||||||
file_id: SpanAnchor,
|
|
||||||
censored: Vec<SyntaxNode>,
|
censored: Vec<SyntaxNode>,
|
||||||
map: &'a TokenMap<SpanData<SpanAnchor>>,
|
map: &'a TokenMap<SpanData<Anchor, Ctx>>,
|
||||||
) -> Converter<'a, SpanAnchor> {
|
) -> Self {
|
||||||
let range = node.text_range();
|
let range = node.text_range();
|
||||||
let mut preorder = node.preorder_with_tokens();
|
let mut preorder = node.preorder_with_tokens();
|
||||||
let first = Self::next_token(&mut preorder, &censored);
|
let first = Self::next_token(&mut preorder, &censored);
|
||||||
|
@ -534,7 +546,6 @@ impl<'a, SpanAnchor> Converter<'a, SpanAnchor> {
|
||||||
range,
|
range,
|
||||||
punct_offset: None,
|
punct_offset: None,
|
||||||
offset: anchor_offset,
|
offset: anchor_offset,
|
||||||
file_id,
|
|
||||||
censored,
|
censored,
|
||||||
map,
|
map,
|
||||||
}
|
}
|
||||||
|
@ -569,36 +580,36 @@ impl SynToken {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<SpanAnchor> SrcToken<Converter<'_, SpanAnchor>> for SynToken {
|
impl<Anchor, Ctx> SrcToken<Converter<'_, Anchor, Ctx>> for SynToken {
|
||||||
fn kind(&self, ctx: &Converter<'_, SpanAnchor>) -> SyntaxKind {
|
fn kind(&self, ctx: &Converter<'_, Anchor, Ctx>) -> SyntaxKind {
|
||||||
match self {
|
match self {
|
||||||
SynToken::Ordinary(token) => token.kind(),
|
SynToken::Ordinary(token) => token.kind(),
|
||||||
SynToken::Punct(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
|
SynToken::Punct(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn to_char(&self, _ctx: &Converter<'_, SpanAnchor>) -> Option<char> {
|
fn to_char(&self, _ctx: &Converter<'_, Anchor, Ctx>) -> Option<char> {
|
||||||
match self {
|
match self {
|
||||||
SynToken::Ordinary(_) => None,
|
SynToken::Ordinary(_) => None,
|
||||||
SynToken::Punct(it, i) => it.text().chars().nth(*i),
|
SynToken::Punct(it, i) => it.text().chars().nth(*i),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn to_text(&self, _ctx: &Converter<'_, SpanAnchor>) -> SmolStr {
|
fn to_text(&self, _ctx: &Converter<'_, Anchor, Ctx>) -> SmolStr {
|
||||||
match self {
|
match self {
|
||||||
SynToken::Ordinary(token) | SynToken::Punct(token, _) => token.text().into(),
|
SynToken::Ordinary(token) | SynToken::Punct(token, _) => token.text().into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<SpanAnchor: Copy> TokenConverter<SpanAnchor> for Converter<'_, SpanAnchor>
|
impl<Anchor: Copy, Ctx> TokenConverter<Anchor, Ctx> for Converter<'_, Anchor, Ctx>
|
||||||
where
|
where
|
||||||
SpanData<SpanAnchor>: Span,
|
SpanData<Anchor, Ctx>: Span,
|
||||||
{
|
{
|
||||||
type Token = SynToken;
|
type Token = SynToken;
|
||||||
fn convert_doc_comment(
|
fn convert_doc_comment(
|
||||||
&self,
|
&self,
|
||||||
token: &Self::Token,
|
token: &Self::Token,
|
||||||
span: SpanData<SpanAnchor>,
|
span: SpanData<Anchor, Ctx>,
|
||||||
) -> Option<Vec<tt::TokenTree<SpanData<SpanAnchor>>>> {
|
) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>> {
|
||||||
convert_doc_comment(token.token(), span)
|
convert_doc_comment(token.token(), span)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -657,27 +668,24 @@ where
|
||||||
Some(token)
|
Some(token)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn anchor(&self) -> SpanAnchor {
|
fn span_for(&self, range: TextRange) -> Option<SpanData<Anchor, Ctx>> {
|
||||||
self.file_id
|
|
||||||
}
|
|
||||||
fn span_for(&self, range: TextRange) -> Option<SpanData<SpanAnchor>> {
|
|
||||||
self.map.span_for_range(range)
|
self.map.span_for_range(range)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct TtTreeSink<'a, SpanAnchor> {
|
struct TtTreeSink<'a, Anchor, Ctx> {
|
||||||
buf: String,
|
buf: String,
|
||||||
cursor: Cursor<'a, SpanData<SpanAnchor>>,
|
cursor: Cursor<'a, SpanData<Anchor, Ctx>>,
|
||||||
text_pos: TextSize,
|
text_pos: TextSize,
|
||||||
inner: SyntaxTreeBuilder,
|
inner: SyntaxTreeBuilder,
|
||||||
token_map: TokenMap<SpanData<SpanAnchor>>,
|
token_map: TokenMap<SpanData<Anchor, Ctx>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, SpanAnchor> TtTreeSink<'a, SpanAnchor>
|
impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx>
|
||||||
where
|
where
|
||||||
SpanData<SpanAnchor>: Span,
|
SpanData<Anchor, Ctx>: Span,
|
||||||
{
|
{
|
||||||
fn new(cursor: Cursor<'a, SpanData<SpanAnchor>>) -> Self {
|
fn new(cursor: Cursor<'a, SpanData<Anchor, Ctx>>) -> Self {
|
||||||
TtTreeSink {
|
TtTreeSink {
|
||||||
buf: String::new(),
|
buf: String::new(),
|
||||||
cursor,
|
cursor,
|
||||||
|
@ -687,7 +695,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap<SpanData<SpanAnchor>>) {
|
fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap<SpanData<Anchor, Ctx>>) {
|
||||||
self.token_map.shrink_to_fit();
|
self.token_map.shrink_to_fit();
|
||||||
(self.inner.finish(), self.token_map)
|
(self.inner.finish(), self.token_map)
|
||||||
}
|
}
|
||||||
|
@ -705,9 +713,9 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
|
||||||
Some(&texts[idx..texts.len() - (1 - idx)])
|
Some(&texts[idx..texts.len() - (1 - idx)])
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<SpanAnchor> TtTreeSink<'_, SpanAnchor>
|
impl<Anchor, Ctx> TtTreeSink<'_, Anchor, Ctx>
|
||||||
where
|
where
|
||||||
SpanData<SpanAnchor>: Span,
|
SpanData<Anchor, Ctx>: Span,
|
||||||
{
|
{
|
||||||
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
|
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
|
||||||
/// This occurs when a float literal is used as a field access.
|
/// This occurs when a float literal is used as a field access.
|
||||||
|
|
|
@ -4,20 +4,26 @@ use syntax::{ast, AstNode};
|
||||||
use test_utils::extract_annotations;
|
use test_utils::extract_annotations;
|
||||||
use tt::{
|
use tt::{
|
||||||
buffer::{TokenBuffer, TokenTreeRef},
|
buffer::{TokenBuffer, TokenTreeRef},
|
||||||
Leaf, Punct, Spacing, Span,
|
Leaf, Punct, Spacing, SpanAnchor, SyntaxContext,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::syntax_bridge::SpanData;
|
|
||||||
|
|
||||||
use super::syntax_node_to_token_tree;
|
use super::syntax_node_to_token_tree;
|
||||||
|
|
||||||
fn check_punct_spacing(fixture: &str) {
|
fn check_punct_spacing(fixture: &str) {
|
||||||
|
type SpanData = tt::SpanData<DummyFile, DummyCtx>;
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
||||||
struct DummyFile;
|
struct DummyFile;
|
||||||
impl Span for DummyFile {
|
impl SpanAnchor for DummyFile {
|
||||||
const DUMMY: Self = DummyFile;
|
const DUMMY: Self = DummyFile;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
||||||
|
struct DummyCtx;
|
||||||
|
impl SyntaxContext for DummyCtx {
|
||||||
|
const DUMMY: Self = DummyCtx;
|
||||||
|
}
|
||||||
|
|
||||||
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
|
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
|
||||||
let subtree =
|
let subtree =
|
||||||
syntax_node_to_token_tree(source_file.syntax(), DummyFile, 0.into(), &Default::default());
|
syntax_node_to_token_tree(source_file.syntax(), DummyFile, 0.into(), &Default::default());
|
||||||
|
|
|
@ -120,11 +120,13 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use tt::{
|
use tt::{
|
||||||
Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Span, Subtree, TokenId,
|
Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, SpanAnchor, Subtree,
|
||||||
TokenTree,
|
TokenId, TokenTree,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -176,3 +178,4 @@ mod tests {
|
||||||
assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION));
|
assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
|
@ -39,7 +39,7 @@ use std::collections::{HashMap, VecDeque};
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use text_size::TextRange;
|
use text_size::TextRange;
|
||||||
use tt::Span;
|
use tt::{Span, SyntaxContext};
|
||||||
|
|
||||||
use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS};
|
use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS};
|
||||||
|
|
||||||
|
@ -47,25 +47,30 @@ pub trait SerializableSpan<const L: usize>: Span {
|
||||||
fn into_u32(self) -> [u32; L];
|
fn into_u32(self) -> [u32; L];
|
||||||
fn from_u32(input: [u32; L]) -> Self;
|
fn from_u32(input: [u32; L]) -> Self;
|
||||||
}
|
}
|
||||||
impl SerializableSpan<1> for tt::TokenId {
|
// impl SerializableSpan<1> for tt::TokenId {
|
||||||
fn into_u32(self) -> [u32; 1] {
|
// fn into_u32(self) -> [u32; 1] {
|
||||||
[self.0]
|
// [self.0]
|
||||||
}
|
// }
|
||||||
fn from_u32([input]: [u32; 1]) -> Self {
|
// fn from_u32([input]: [u32; 1]) -> Self {
|
||||||
tt::TokenId(input)
|
// tt::TokenId(input)
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
impl<FileId> SerializableSpan<3> for tt::SpanData<FileId>
|
impl<Anchor, Ctx> SerializableSpan<3> for tt::SpanData<Anchor, Ctx>
|
||||||
where
|
where
|
||||||
FileId: From<u32> + Into<u32>,
|
Anchor: From<u32> + Into<u32>,
|
||||||
Self: Span,
|
Self: Span,
|
||||||
|
Ctx: SyntaxContext,
|
||||||
{
|
{
|
||||||
fn into_u32(self) -> [u32; 3] {
|
fn into_u32(self) -> [u32; 3] {
|
||||||
[self.anchor.into(), self.range.start().into(), self.range.end().into()]
|
[self.anchor.into(), self.range.start().into(), self.range.end().into()]
|
||||||
}
|
}
|
||||||
fn from_u32([file_id, start, end]: [u32; 3]) -> Self {
|
fn from_u32([file_id, start, end]: [u32; 3]) -> Self {
|
||||||
tt::SpanData { anchor: file_id.into(), range: TextRange::new(start.into(), end.into()) }
|
tt::SpanData {
|
||||||
|
anchor: file_id.into(),
|
||||||
|
range: TextRange::new(start.into(), end.into()),
|
||||||
|
ctx: Ctx::DUMMY,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -210,7 +210,7 @@ mod tests {
|
||||||
|
|
||||||
use cfg::CfgExpr;
|
use cfg::CfgExpr;
|
||||||
use hir::HirFileId;
|
use hir::HirFileId;
|
||||||
use ide_db::base_db::span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID};
|
use ide_db::base_db::span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
|
||||||
use mbe::syntax_node_to_token_tree;
|
use mbe::syntax_node_to_token_tree;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
|
@ -221,7 +221,7 @@ mod tests {
|
||||||
let cfg_expr = {
|
let cfg_expr = {
|
||||||
let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
|
let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let tt = syntax_node_to_token_tree(
|
let tt = syntax_node_to_token_tree::<_, SyntaxContextId>(
|
||||||
tt.syntax(),
|
tt.syntax(),
|
||||||
SpanAnchor { file_id: HirFileId::from(0), ast_id: ROOT_ERASED_FILE_AST_ID },
|
SpanAnchor { file_id: HirFileId::from(0), ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
TextSize::new(0),
|
TextSize::new(0),
|
||||||
|
|
|
@ -32,27 +32,37 @@ impl TokenId {
|
||||||
Self::UNSPECIFIED
|
Self::UNSPECIFIED
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Span for TokenId {
|
|
||||||
const DUMMY: Self = TokenId(!0);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||||
pub struct SpanData<Anchor> {
|
pub struct SpanData<Anchor, Ctx> {
|
||||||
/// The text range of this span, relative to the anchor.
|
/// The text range of this span, relative to the anchor.
|
||||||
|
/// We need the anchor for incrementality, as storing absolute ranges will require
|
||||||
|
/// recomputation on every change in a file at all times.
|
||||||
pub range: TextRange,
|
pub range: TextRange,
|
||||||
pub anchor: Anchor,
|
pub anchor: Anchor,
|
||||||
|
/// The syntax context of the span.
|
||||||
|
pub ctx: Ctx,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Anchor: Span> Span for SpanData<Anchor> {
|
impl<Anchor: SpanAnchor, Ctx: SyntaxContext> Span for SpanData<Anchor, Ctx> {
|
||||||
const DUMMY: Self =
|
const DUMMY: Self = SpanData {
|
||||||
SpanData { range: TextRange::empty(TextSize::new(0)), anchor: Anchor::DUMMY };
|
range: TextRange::empty(TextSize::new(0)),
|
||||||
|
anchor: Anchor::DUMMY,
|
||||||
|
ctx: Ctx::DUMMY,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait SpanAnchor: std::fmt::Debug + Copy + Sized + Eq {
|
||||||
|
const DUMMY: Self;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Span: std::fmt::Debug + Copy + Sized + Eq {
|
pub trait Span: std::fmt::Debug + Copy + Sized + Eq {
|
||||||
const DUMMY: Self;
|
const DUMMY: Self;
|
||||||
}
|
}
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
|
||||||
pub struct SyntaxContext(pub u32);
|
pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq {
|
||||||
|
const DUMMY: Self;
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum TokenTree<S> {
|
pub enum TokenTree<S> {
|
||||||
|
|
Loading…
Reference in a new issue