mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-13 21:54:42 +00:00
Split out syntax-bridge into a separate crate
This commit is contained in:
parent
670a5ab4a9
commit
d2dd4f6d5f
30 changed files with 268 additions and 140 deletions
24
Cargo.lock
generated
24
Cargo.lock
generated
|
@ -148,10 +148,10 @@ dependencies = [
|
||||||
"derive_arbitrary",
|
"derive_arbitrary",
|
||||||
"expect-test",
|
"expect-test",
|
||||||
"intern",
|
"intern",
|
||||||
"mbe",
|
|
||||||
"oorandom",
|
"oorandom",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
"syntax-bridge",
|
||||||
"tt",
|
"tt",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -554,6 +554,7 @@ dependencies = [
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
"syntax-bridge",
|
||||||
"test-fixture",
|
"test-fixture",
|
||||||
"test-utils",
|
"test-utils",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
@ -582,6 +583,7 @@ dependencies = [
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
"syntax-bridge",
|
||||||
"tracing",
|
"tracing",
|
||||||
"triomphe",
|
"triomphe",
|
||||||
"tt",
|
"tt",
|
||||||
|
@ -1056,6 +1058,7 @@ dependencies = [
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
"syntax-bridge",
|
||||||
"test-utils",
|
"test-utils",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tt",
|
"tt",
|
||||||
|
@ -1350,7 +1353,6 @@ dependencies = [
|
||||||
"expect-test",
|
"expect-test",
|
||||||
"intern",
|
"intern",
|
||||||
"libloading",
|
"libloading",
|
||||||
"mbe",
|
|
||||||
"memmap2",
|
"memmap2",
|
||||||
"object 0.33.0",
|
"object 0.33.0",
|
||||||
"paths",
|
"paths",
|
||||||
|
@ -1360,6 +1362,7 @@ dependencies = [
|
||||||
"snap",
|
"snap",
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
|
"syntax-bridge",
|
||||||
"tt",
|
"tt",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1665,7 +1668,6 @@ dependencies = [
|
||||||
"load-cargo",
|
"load-cargo",
|
||||||
"lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"lsp-types",
|
"lsp-types",
|
||||||
"mbe",
|
|
||||||
"memchr",
|
"memchr",
|
||||||
"mimalloc",
|
"mimalloc",
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
|
@ -1685,6 +1687,7 @@ dependencies = [
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
"syntax-bridge",
|
||||||
"test-fixture",
|
"test-fixture",
|
||||||
"test-utils",
|
"test-utils",
|
||||||
"tikv-jemallocator",
|
"tikv-jemallocator",
|
||||||
|
@ -1967,6 +1970,21 @@ dependencies = [
|
||||||
"triomphe",
|
"triomphe",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "syntax-bridge"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"intern",
|
||||||
|
"parser",
|
||||||
|
"rustc-hash",
|
||||||
|
"span",
|
||||||
|
"stdx",
|
||||||
|
"syntax",
|
||||||
|
"test-utils",
|
||||||
|
"tracing",
|
||||||
|
"tt",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "test-fixture"
|
name = "test-fixture"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
|
|
|
@ -77,6 +77,7 @@ salsa = { path = "./crates/salsa", version = "0.0.0" }
|
||||||
span = { path = "./crates/span", version = "0.0.0" }
|
span = { path = "./crates/span", version = "0.0.0" }
|
||||||
stdx = { path = "./crates/stdx", version = "0.0.0" }
|
stdx = { path = "./crates/stdx", version = "0.0.0" }
|
||||||
syntax = { path = "./crates/syntax", version = "0.0.0" }
|
syntax = { path = "./crates/syntax", version = "0.0.0" }
|
||||||
|
syntax-bridge = { path = "./crates/syntax-bridge", version = "0.0.0" }
|
||||||
text-edit = { path = "./crates/text-edit", version = "0.0.0" }
|
text-edit = { path = "./crates/text-edit", version = "0.0.0" }
|
||||||
toolchain = { path = "./crates/toolchain", version = "0.0.0" }
|
toolchain = { path = "./crates/toolchain", version = "0.0.0" }
|
||||||
tt = { path = "./crates/tt", version = "0.0.0" }
|
tt = { path = "./crates/tt", version = "0.0.0" }
|
||||||
|
|
|
@ -28,7 +28,7 @@ arbitrary = "1.3.2"
|
||||||
derive_arbitrary = "1.3.2"
|
derive_arbitrary = "1.3.2"
|
||||||
|
|
||||||
# local deps
|
# local deps
|
||||||
mbe.workspace = true
|
syntax-bridge.workspace = true
|
||||||
syntax.workspace = true
|
syntax.workspace = true
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
use arbitrary::{Arbitrary, Unstructured};
|
use arbitrary::{Arbitrary, Unstructured};
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
use intern::Symbol;
|
use intern::Symbol;
|
||||||
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY};
|
|
||||||
use syntax::{ast, AstNode, Edition};
|
use syntax::{ast, AstNode, Edition};
|
||||||
|
use syntax_bridge::{
|
||||||
|
dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
|
||||||
|
syntax_node_to_token_tree, DocCommentDesugarMode,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
|
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
|
||||||
|
|
||||||
|
|
|
@ -52,7 +52,7 @@ expect-test.workspace = true
|
||||||
# local deps
|
# local deps
|
||||||
test-utils.workspace = true
|
test-utils.workspace = true
|
||||||
test-fixture.workspace = true
|
test-fixture.workspace = true
|
||||||
|
syntax-bridge.workspace = true
|
||||||
[features]
|
[features]
|
||||||
in-rust-tree = ["hir-expand/in-rust-tree"]
|
in-rust-tree = ["hir-expand/in-rust-tree"]
|
||||||
|
|
||||||
|
|
|
@ -657,9 +657,9 @@ mod tests {
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use hir_expand::span_map::{RealSpanMap, SpanMap};
|
use hir_expand::span_map::{RealSpanMap, SpanMap};
|
||||||
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode};
|
|
||||||
use span::FileId;
|
use span::FileId;
|
||||||
use syntax::{ast, AstNode, TextRange};
|
use syntax::{ast, AstNode, TextRange};
|
||||||
|
use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
|
||||||
|
|
||||||
use crate::attr::{DocAtom, DocExpr};
|
use crate::attr::{DocAtom, DocExpr};
|
||||||
|
|
||||||
|
|
|
@ -1201,7 +1201,6 @@ macro_rules! m {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_meta_doc_comments() {
|
fn test_meta_doc_comments() {
|
||||||
cov_mark::check!(test_meta_doc_comments);
|
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
macro_rules! m {
|
macro_rules! m {
|
||||||
|
|
|
@ -317,9 +317,9 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
|
||||||
_: Span,
|
_: Span,
|
||||||
_: Span,
|
_: Span,
|
||||||
) -> Result<Subtree, ProcMacroExpansionError> {
|
) -> Result<Subtree, ProcMacroExpansionError> {
|
||||||
let (parse, _) = ::mbe::token_tree_to_syntax_node(
|
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
|
||||||
subtree,
|
subtree,
|
||||||
::mbe::TopEntryPoint::MacroItems,
|
syntax_bridge::TopEntryPoint::MacroItems,
|
||||||
span::Edition::CURRENT,
|
span::Edition::CURRENT,
|
||||||
);
|
);
|
||||||
if parse.errors().is_empty() {
|
if parse.errors().is_empty() {
|
||||||
|
|
|
@ -33,6 +33,7 @@ mbe.workspace = true
|
||||||
limit.workspace = true
|
limit.workspace = true
|
||||||
span.workspace = true
|
span.workspace = true
|
||||||
parser.workspace = true
|
parser.workspace = true
|
||||||
|
syntax-bridge.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
expect-test = "1.4.0"
|
expect-test = "1.4.0"
|
||||||
|
|
|
@ -6,14 +6,12 @@ use cfg::CfgExpr;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use intern::{sym, Interned, Symbol};
|
use intern::{sym, Interned, Symbol};
|
||||||
|
|
||||||
use mbe::{
|
use mbe::{DelimiterKind, Punct};
|
||||||
desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode,
|
|
||||||
Punct,
|
|
||||||
};
|
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use span::{Span, SyntaxContextId};
|
use span::{Span, SyntaxContextId};
|
||||||
use syntax::unescape;
|
use syntax::unescape;
|
||||||
use syntax::{ast, match_ast, AstNode, AstToken, SyntaxNode};
|
use syntax::{ast, match_ast, AstNode, AstToken, SyntaxNode};
|
||||||
|
use syntax_bridge::{desugar_doc_comment_text, syntax_node_to_token_tree, DocCommentDesugarMode};
|
||||||
use triomphe::ThinArc;
|
use triomphe::ThinArc;
|
||||||
|
|
||||||
use crate::name::Name;
|
use crate::name::Name;
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
|
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use itertools::izip;
|
use itertools::izip;
|
||||||
use mbe::DocCommentDesugarMode;
|
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use span::{MacroCallId, Span};
|
use span::{MacroCallId, Span};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
|
use syntax_bridge::DocCommentDesugarMode;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -209,9 +209,9 @@ struct BasicAdtInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
|
fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
|
||||||
let (parsed, tm) = &mbe::token_tree_to_syntax_node(
|
let (parsed, tm) = &syntax_bridge::token_tree_to_syntax_node(
|
||||||
tt,
|
tt,
|
||||||
mbe::TopEntryPoint::MacroItems,
|
syntax_bridge::TopEntryPoint::MacroItems,
|
||||||
parser::Edition::CURRENT_FIXME,
|
parser::Edition::CURRENT_FIXME,
|
||||||
);
|
);
|
||||||
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
|
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
|
||||||
|
@ -268,7 +268,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
||||||
match this {
|
match this {
|
||||||
Some(it) => {
|
Some(it) => {
|
||||||
param_type_set.insert(it.as_name());
|
param_type_set.insert(it.as_name());
|
||||||
mbe::syntax_node_to_token_tree(
|
syntax_bridge::syntax_node_to_token_tree(
|
||||||
it.syntax(),
|
it.syntax(),
|
||||||
tm,
|
tm,
|
||||||
call_site,
|
call_site,
|
||||||
|
@ -282,7 +282,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
||||||
};
|
};
|
||||||
let bounds = match ¶m {
|
let bounds = match ¶m {
|
||||||
ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| {
|
ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| {
|
||||||
mbe::syntax_node_to_token_tree(
|
syntax_bridge::syntax_node_to_token_tree(
|
||||||
it.syntax(),
|
it.syntax(),
|
||||||
tm,
|
tm,
|
||||||
call_site,
|
call_site,
|
||||||
|
@ -295,7 +295,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
||||||
let ty = param
|
let ty = param
|
||||||
.ty()
|
.ty()
|
||||||
.map(|ty| {
|
.map(|ty| {
|
||||||
mbe::syntax_node_to_token_tree(
|
syntax_bridge::syntax_node_to_token_tree(
|
||||||
ty.syntax(),
|
ty.syntax(),
|
||||||
tm,
|
tm,
|
||||||
call_site,
|
call_site,
|
||||||
|
@ -316,7 +316,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
||||||
let where_clause = if let Some(w) = where_clause {
|
let where_clause = if let Some(w) = where_clause {
|
||||||
w.predicates()
|
w.predicates()
|
||||||
.map(|it| {
|
.map(|it| {
|
||||||
mbe::syntax_node_to_token_tree(
|
syntax_bridge::syntax_node_to_token_tree(
|
||||||
it.syntax(),
|
it.syntax(),
|
||||||
tm,
|
tm,
|
||||||
call_site,
|
call_site,
|
||||||
|
@ -353,7 +353,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
||||||
param_type_set.contains(&name).then_some(p)
|
param_type_set.contains(&name).then_some(p)
|
||||||
})
|
})
|
||||||
.map(|it| {
|
.map(|it| {
|
||||||
mbe::syntax_node_to_token_tree(
|
syntax_bridge::syntax_node_to_token_tree(
|
||||||
it.syntax(),
|
it.syntax(),
|
||||||
tm,
|
tm,
|
||||||
call_site,
|
call_site,
|
||||||
|
|
|
@ -4,13 +4,14 @@ use base_db::AnchoredPath;
|
||||||
use cfg::CfgExpr;
|
use cfg::CfgExpr;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use intern::{sym, Symbol};
|
use intern::{sym, Symbol};
|
||||||
use mbe::{parse_exprs_with_sep, parse_to_token_tree, DelimiterKind};
|
use mbe::{expect_fragment, DelimiterKind};
|
||||||
use span::{Edition, EditionedFileId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
|
use span::{Edition, EditionedFileId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
format_smolstr,
|
format_smolstr,
|
||||||
unescape::{unescape_byte, unescape_char, unescape_unicode, Mode},
|
unescape::{unescape_byte, unescape_char, unescape_unicode, Mode},
|
||||||
};
|
};
|
||||||
|
use syntax_bridge::parse_to_token_tree;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
builtin::quote::{dollar_crate, quote},
|
builtin::quote::{dollar_crate, quote},
|
||||||
|
@ -228,20 +229,22 @@ fn assert_expand(
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let call_site_span = span_with_call_site_ctxt(db, span, id);
|
let call_site_span = span_with_call_site_ctxt(db, span, id);
|
||||||
let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT_FIXME);
|
|
||||||
|
let mut iter = ::tt::iter::TtIter::new(tt);
|
||||||
|
|
||||||
|
let cond = expect_fragment(
|
||||||
|
&mut iter,
|
||||||
|
parser::PrefixEntryPoint::Expr,
|
||||||
|
db.crate_graph()[id.lookup(db).krate].edition,
|
||||||
|
tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close },
|
||||||
|
);
|
||||||
|
_ = iter.expect_char(',');
|
||||||
|
let rest = iter.as_slice();
|
||||||
|
|
||||||
let dollar_crate = dollar_crate(span);
|
let dollar_crate = dollar_crate(span);
|
||||||
let expanded = match &*args {
|
let expanded = match cond.value {
|
||||||
[cond, panic_args @ ..] => {
|
Some(cond) => {
|
||||||
let comma = tt::Subtree {
|
let panic_args = rest.iter().cloned();
|
||||||
delimiter: tt::Delimiter::invisible_spanned(call_site_span),
|
|
||||||
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
|
||||||
char: ',',
|
|
||||||
spacing: tt::Spacing::Alone,
|
|
||||||
span: call_site_span,
|
|
||||||
}))]),
|
|
||||||
};
|
|
||||||
let cond = cond.clone();
|
|
||||||
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
|
|
||||||
let mac = if use_panic_2021(db, span) {
|
let mac = if use_panic_2021(db, span) {
|
||||||
quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) }
|
quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) }
|
||||||
} else {
|
} else {
|
||||||
|
@ -253,10 +256,13 @@ fn assert_expand(
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
[] => quote! {call_site_span =>{}},
|
None => quote! {call_site_span =>{}},
|
||||||
};
|
};
|
||||||
|
|
||||||
ExpandResult::ok(expanded)
|
match cond.err {
|
||||||
|
Some(err) => ExpandResult::new(expanded, err.into()),
|
||||||
|
None => ExpandResult::ok(expanded),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn file_expand(
|
fn file_expand(
|
||||||
|
|
|
@ -3,10 +3,11 @@
|
||||||
use base_db::{salsa, CrateId, SourceDatabase};
|
use base_db::{salsa, CrateId, SourceDatabase};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, MatchedArmIndex};
|
use mbe::MatchedArmIndex;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
|
use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
|
||||||
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
|
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
|
||||||
|
use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -165,7 +166,7 @@ pub fn expand_speculative(
|
||||||
// Build the subtree and token mapping for the speculative args
|
// Build the subtree and token mapping for the speculative args
|
||||||
let (mut tt, undo_info) = match loc.kind {
|
let (mut tt, undo_info) = match loc.kind {
|
||||||
MacroCallKind::FnLike { .. } => (
|
MacroCallKind::FnLike { .. } => (
|
||||||
mbe::syntax_node_to_token_tree(
|
syntax_bridge::syntax_node_to_token_tree(
|
||||||
speculative_args,
|
speculative_args,
|
||||||
span_map,
|
span_map,
|
||||||
span,
|
span,
|
||||||
|
@ -178,7 +179,7 @@ pub fn expand_speculative(
|
||||||
SyntaxFixupUndoInfo::NONE,
|
SyntaxFixupUndoInfo::NONE,
|
||||||
),
|
),
|
||||||
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => (
|
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => (
|
||||||
mbe::syntax_node_to_token_tree(
|
syntax_bridge::syntax_node_to_token_tree(
|
||||||
speculative_args,
|
speculative_args,
|
||||||
span_map,
|
span_map,
|
||||||
span,
|
span,
|
||||||
|
@ -213,7 +214,7 @@ pub fn expand_speculative(
|
||||||
fixups.remove.extend(censor_cfg);
|
fixups.remove.extend(censor_cfg);
|
||||||
|
|
||||||
(
|
(
|
||||||
mbe::syntax_node_to_token_tree_modified(
|
syntax_bridge::syntax_node_to_token_tree_modified(
|
||||||
speculative_args,
|
speculative_args,
|
||||||
span_map,
|
span_map,
|
||||||
fixups.append,
|
fixups.append,
|
||||||
|
@ -459,7 +460,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
|
||||||
return dummy_tt(kind);
|
return dummy_tt(kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut tt = mbe::syntax_node_to_token_tree(
|
let mut tt = syntax_bridge::syntax_node_to_token_tree(
|
||||||
tt.syntax(),
|
tt.syntax(),
|
||||||
map.as_ref(),
|
map.as_ref(),
|
||||||
span,
|
span,
|
||||||
|
@ -515,7 +516,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
|
||||||
fixups.remove.extend(censor_cfg);
|
fixups.remove.extend(censor_cfg);
|
||||||
|
|
||||||
(
|
(
|
||||||
mbe::syntax_node_to_token_tree_modified(
|
syntax_bridge::syntax_node_to_token_tree_modified(
|
||||||
syntax,
|
syntax,
|
||||||
map,
|
map,
|
||||||
fixups.append,
|
fixups.append,
|
||||||
|
@ -720,13 +721,13 @@ fn token_tree_to_syntax_node(
|
||||||
edition: parser::Edition,
|
edition: parser::Edition,
|
||||||
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
|
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
|
||||||
let entry_point = match expand_to {
|
let entry_point = match expand_to {
|
||||||
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
|
ExpandTo::Statements => syntax_bridge::TopEntryPoint::MacroStmts,
|
||||||
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
|
ExpandTo::Items => syntax_bridge::TopEntryPoint::MacroItems,
|
||||||
ExpandTo::Pattern => mbe::TopEntryPoint::Pattern,
|
ExpandTo::Pattern => syntax_bridge::TopEntryPoint::Pattern,
|
||||||
ExpandTo::Type => mbe::TopEntryPoint::Type,
|
ExpandTo::Type => syntax_bridge::TopEntryPoint::Type,
|
||||||
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
|
ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr,
|
||||||
};
|
};
|
||||||
mbe::token_tree_to_syntax_node(tt, entry_point, edition)
|
syntax_bridge::token_tree_to_syntax_node(tt, entry_point, edition)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
|
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
|
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use mbe::DocCommentDesugarMode;
|
|
||||||
use span::{Edition, MacroCallId, Span, SyntaxContextId};
|
use span::{Edition, MacroCallId, Span, SyntaxContextId};
|
||||||
use stdx::TupleExt;
|
use stdx::TupleExt;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
|
use syntax_bridge::DocCommentDesugarMode;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -112,7 +112,7 @@ impl DeclarativeMacroExpander {
|
||||||
ast::Macro::MacroRules(macro_rules) => (
|
ast::Macro::MacroRules(macro_rules) => (
|
||||||
match macro_rules.token_tree() {
|
match macro_rules.token_tree() {
|
||||||
Some(arg) => {
|
Some(arg) => {
|
||||||
let tt = mbe::syntax_node_to_token_tree(
|
let tt = syntax_bridge::syntax_node_to_token_tree(
|
||||||
arg.syntax(),
|
arg.syntax(),
|
||||||
map.as_ref(),
|
map.as_ref(),
|
||||||
map.span_for_range(
|
map.span_for_range(
|
||||||
|
@ -135,14 +135,14 @@ impl DeclarativeMacroExpander {
|
||||||
let span =
|
let span =
|
||||||
map.span_for_range(macro_def.macro_token().unwrap().text_range());
|
map.span_for_range(macro_def.macro_token().unwrap().text_range());
|
||||||
let args = macro_def.args().map(|args| {
|
let args = macro_def.args().map(|args| {
|
||||||
mbe::syntax_node_to_token_tree(
|
syntax_bridge::syntax_node_to_token_tree(
|
||||||
args.syntax(),
|
args.syntax(),
|
||||||
map.as_ref(),
|
map.as_ref(),
|
||||||
span,
|
span,
|
||||||
DocCommentDesugarMode::Mbe,
|
DocCommentDesugarMode::Mbe,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
let body = mbe::syntax_node_to_token_tree(
|
let body = syntax_bridge::syntax_node_to_token_tree(
|
||||||
body.syntax(),
|
body.syntax(),
|
||||||
map.as_ref(),
|
map.as_ref(),
|
||||||
span,
|
span,
|
||||||
|
|
|
@ -19,9 +19,9 @@
|
||||||
//!
|
//!
|
||||||
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use mbe::DocCommentDesugarMode;
|
|
||||||
use span::SyntaxContextId;
|
use span::SyntaxContextId;
|
||||||
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
|
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
|
||||||
|
use syntax_bridge::DocCommentDesugarMode;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -82,7 +82,7 @@ pub fn expand_eager_macro_input(
|
||||||
return ExpandResult { value: None, err };
|
return ExpandResult { value: None, err };
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut subtree = mbe::syntax_node_to_token_tree(
|
let mut subtree = syntax_bridge::syntax_node_to_token_tree(
|
||||||
&expanded_eager_input,
|
&expanded_eager_input,
|
||||||
arg_map,
|
arg_map,
|
||||||
span,
|
span,
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
//! fix up syntax errors in the code we're passing to them.
|
//! fix up syntax errors in the code we're passing to them.
|
||||||
|
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use mbe::DocCommentDesugarMode;
|
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use span::{
|
use span::{
|
||||||
|
@ -14,6 +13,7 @@ use syntax::{
|
||||||
ast::{self, AstNode, HasLoopBody},
|
ast::{self, AstNode, HasLoopBody},
|
||||||
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
||||||
};
|
};
|
||||||
|
use syntax_bridge::DocCommentDesugarMode;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
use tt::Spacing;
|
use tt::Spacing;
|
||||||
|
|
||||||
|
@ -76,7 +76,8 @@ pub(crate) fn fixup_syntax(
|
||||||
if can_handle_error(&node) && has_error_to_handle(&node) {
|
if can_handle_error(&node) && has_error_to_handle(&node) {
|
||||||
remove.insert(node.clone().into());
|
remove.insert(node.clone().into());
|
||||||
// the node contains an error node, we have to completely replace it by something valid
|
// the node contains an error node, we have to completely replace it by something valid
|
||||||
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site, mode);
|
let original_tree =
|
||||||
|
syntax_bridge::syntax_node_to_token_tree(&node, span_map, call_site, mode);
|
||||||
let idx = original.len() as u32;
|
let idx = original.len() as u32;
|
||||||
original.push(original_tree);
|
original.push(original_tree);
|
||||||
let span = span_map.span_for_range(node_range);
|
let span = span_map.span_for_range(node_range);
|
||||||
|
@ -434,9 +435,9 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
use mbe::DocCommentDesugarMode;
|
|
||||||
use span::{Edition, EditionedFileId, FileId};
|
use span::{Edition, EditionedFileId, FileId};
|
||||||
use syntax::TextRange;
|
use syntax::TextRange;
|
||||||
|
use syntax_bridge::DocCommentDesugarMode;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -483,7 +484,7 @@ mod tests {
|
||||||
span_map.span_for_range(TextRange::empty(0.into())),
|
span_map.span_for_range(TextRange::empty(0.into())),
|
||||||
DocCommentDesugarMode::Mbe,
|
DocCommentDesugarMode::Mbe,
|
||||||
);
|
);
|
||||||
let mut tt = mbe::syntax_node_to_token_tree_modified(
|
let mut tt = syntax_bridge::syntax_node_to_token_tree_modified(
|
||||||
&parsed.syntax_node(),
|
&parsed.syntax_node(),
|
||||||
span_map.as_ref(),
|
span_map.as_ref(),
|
||||||
fixups.append,
|
fixups.append,
|
||||||
|
@ -498,9 +499,9 @@ mod tests {
|
||||||
expect.assert_eq(&actual);
|
expect.assert_eq(&actual);
|
||||||
|
|
||||||
// the fixed-up tree should be syntactically valid
|
// the fixed-up tree should be syntactically valid
|
||||||
let (parse, _) = mbe::token_tree_to_syntax_node(
|
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
|
||||||
&tt,
|
&tt,
|
||||||
::mbe::TopEntryPoint::MacroItems,
|
syntax_bridge::TopEntryPoint::MacroItems,
|
||||||
parser::Edition::CURRENT,
|
parser::Edition::CURRENT,
|
||||||
);
|
);
|
||||||
assert!(
|
assert!(
|
||||||
|
@ -513,7 +514,7 @@ mod tests {
|
||||||
|
|
||||||
// the fixed-up + reversed version should be equivalent to the original input
|
// the fixed-up + reversed version should be equivalent to the original input
|
||||||
// modulo token IDs and `Punct`s' spacing.
|
// modulo token IDs and `Punct`s' spacing.
|
||||||
let original_as_tt = mbe::syntax_node_to_token_tree(
|
let original_as_tt = syntax_bridge::syntax_node_to_token_tree(
|
||||||
&parsed.syntax_node(),
|
&parsed.syntax_node(),
|
||||||
span_map.as_ref(),
|
span_map.as_ref(),
|
||||||
span_map.span_for_range(TextRange::empty(0.into())),
|
span_map.span_for_range(TextRange::empty(0.into())),
|
||||||
|
|
|
@ -28,13 +28,13 @@ pub enum SpanMapRef<'a> {
|
||||||
RealSpanMap(&'a RealSpanMap),
|
RealSpanMap(&'a RealSpanMap),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl mbe::SpanMapper<Span> for SpanMap {
|
impl syntax_bridge::SpanMapper<Span> for SpanMap {
|
||||||
fn span_for(&self, range: TextRange) -> Span {
|
fn span_for(&self, range: TextRange) -> Span {
|
||||||
self.span_for_range(range)
|
self.span_for_range(range)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl mbe::SpanMapper<Span> for SpanMapRef<'_> {
|
impl syntax_bridge::SpanMapper<Span> for SpanMapRef<'_> {
|
||||||
fn span_for(&self, range: TextRange) -> Span {
|
fn span_for(&self, range: TextRange) -> Span {
|
||||||
self.span_for_range(range)
|
self.span_for_range(range)
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ tt.workspace = true
|
||||||
stdx.workspace = true
|
stdx.workspace = true
|
||||||
span.workspace = true
|
span.workspace = true
|
||||||
intern.workspace = true
|
intern.workspace = true
|
||||||
|
syntax-bridge.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
test-utils.workspace = true
|
test-utils.workspace = true
|
||||||
|
|
|
@ -7,11 +7,15 @@ use syntax::{
|
||||||
ast::{self, HasName},
|
ast::{self, HasName},
|
||||||
AstNode,
|
AstNode,
|
||||||
};
|
};
|
||||||
|
use syntax_bridge::{
|
||||||
|
dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
|
||||||
|
syntax_node_to_token_tree, DocCommentDesugarMode,
|
||||||
|
};
|
||||||
use test_utils::{bench, bench_fixture, skip_slow_tests};
|
use test_utils::{bench, bench_fixture, skip_slow_tests};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
||||||
syntax_node_to_token_tree, DeclarativeMacro, DocCommentDesugarMode, DummyTestSpanMap, DUMMY,
|
DeclarativeMacro,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -8,13 +8,12 @@
|
||||||
|
|
||||||
mod expander;
|
mod expander;
|
||||||
mod parser;
|
mod parser;
|
||||||
mod syntax_bridge;
|
|
||||||
mod to_parser_input;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod benchmark;
|
mod benchmark;
|
||||||
|
|
||||||
use span::{Edition, Span, SyntaxContextId};
|
use span::{Edition, Span, SyntaxContextId};
|
||||||
|
use syntax_bridge::to_parser_input;
|
||||||
use tt::iter::TtIter;
|
use tt::iter::TtIter;
|
||||||
use tt::DelimSpan;
|
use tt::DelimSpan;
|
||||||
|
|
||||||
|
@ -23,18 +22,8 @@ use std::sync::Arc;
|
||||||
|
|
||||||
use crate::parser::{MetaTemplate, MetaVarKind, Op};
|
use crate::parser::{MetaTemplate, MetaVarKind, Op};
|
||||||
|
|
||||||
// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
|
|
||||||
pub use ::parser::TopEntryPoint;
|
|
||||||
pub use tt::{Delimiter, DelimiterKind, Punct};
|
pub use tt::{Delimiter, DelimiterKind, Punct};
|
||||||
|
|
||||||
pub use crate::syntax_bridge::{
|
|
||||||
desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree,
|
|
||||||
parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified,
|
|
||||||
token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub use crate::syntax_bridge::dummy_test_span_utils::*;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
pub enum ParseError {
|
pub enum ParseError {
|
||||||
UnexpectedToken(Box<str>),
|
UnexpectedToken(Box<str>),
|
||||||
|
@ -361,7 +350,7 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expect_fragment(
|
pub fn expect_fragment(
|
||||||
tt_iter: &mut TtIter<'_, Span>,
|
tt_iter: &mut TtIter<'_, Span>,
|
||||||
entry_point: ::parser::PrefixEntryPoint,
|
entry_point: ::parser::PrefixEntryPoint,
|
||||||
edition: ::parser::Edition,
|
edition: ::parser::Edition,
|
||||||
|
@ -369,7 +358,7 @@ fn expect_fragment(
|
||||||
) -> ExpandResult<Option<tt::TokenTree<Span>>> {
|
) -> ExpandResult<Option<tt::TokenTree<Span>>> {
|
||||||
use ::parser;
|
use ::parser;
|
||||||
let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
|
let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
|
||||||
let parser_input = to_parser_input::to_parser_input(edition, &buffer);
|
let parser_input = to_parser_input(edition, &buffer);
|
||||||
let tree_traversal = entry_point.parse(&parser_input, edition);
|
let tree_traversal = entry_point.parse(&parser_input, edition);
|
||||||
let mut cursor = buffer.begin();
|
let mut cursor = buffer.begin();
|
||||||
let mut error = false;
|
let mut error = false;
|
||||||
|
|
|
@ -19,23 +19,24 @@ snap.workspace = true
|
||||||
|
|
||||||
stdx.workspace = true
|
stdx.workspace = true
|
||||||
tt.workspace = true
|
tt.workspace = true
|
||||||
mbe.workspace = true
|
syntax-bridge.workspace = true
|
||||||
paths.workspace = true
|
paths.workspace = true
|
||||||
base-db.workspace = true
|
base-db.workspace = true
|
||||||
span.workspace = true
|
span.workspace = true
|
||||||
proc-macro-api.workspace = true
|
proc-macro-api.workspace = true
|
||||||
ra-ap-rustc_lexer.workspace = true
|
|
||||||
intern.workspace = true
|
intern.workspace = true
|
||||||
|
|
||||||
|
ra-ap-rustc_lexer.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
expect-test = "1.4.0"
|
expect-test.workspace = true
|
||||||
|
|
||||||
# used as proc macro test targets
|
# used as proc macro test targets
|
||||||
proc-macro-test.path = "./proc-macro-test"
|
proc-macro-test.path = "./proc-macro-test"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
sysroot-abi = []
|
sysroot-abi = []
|
||||||
in-rust-tree = ["mbe/in-rust-tree", "tt/in-rust-tree","sysroot-abi"]
|
in-rust-tree = ["syntax-bridge/in-rust-tree", "tt/in-rust-tree", "sysroot-abi"]
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
@ -126,9 +126,12 @@ pub(super) mod token_stream {
|
||||||
/// change these errors into `LexError`s later.
|
/// change these errors into `LexError`s later.
|
||||||
impl<S: Copy + fmt::Debug> TokenStream<S> {
|
impl<S: Copy + fmt::Debug> TokenStream<S> {
|
||||||
pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
|
pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
|
||||||
let subtree =
|
let subtree = syntax_bridge::parse_to_token_tree_static_span(
|
||||||
mbe::parse_to_token_tree_static_span(span::Edition::CURRENT_FIXME, call_site, src)
|
span::Edition::CURRENT_FIXME,
|
||||||
.ok_or("lexing error")?;
|
call_site,
|
||||||
|
src,
|
||||||
|
)
|
||||||
|
.ok_or("lexing error")?;
|
||||||
|
|
||||||
Ok(TokenStream::with_subtree(subtree))
|
Ok(TokenStream::with_subtree(subtree))
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,8 @@ use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv};
|
||||||
|
|
||||||
fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
|
fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
|
||||||
crate::server_impl::TokenStream::with_subtree(
|
crate::server_impl::TokenStream::with_subtree(
|
||||||
mbe::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src).unwrap(),
|
syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src)
|
||||||
|
.unwrap(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,7 +20,7 @@ fn parse_string_spanned(
|
||||||
src: &str,
|
src: &str,
|
||||||
) -> crate::server_impl::TokenStream<Span> {
|
) -> crate::server_impl::TokenStream<Span> {
|
||||||
crate::server_impl::TokenStream::with_subtree(
|
crate::server_impl::TokenStream::with_subtree(
|
||||||
mbe::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(),
|
syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -82,7 +82,7 @@ xshell.workspace = true
|
||||||
|
|
||||||
test-utils.workspace = true
|
test-utils.workspace = true
|
||||||
test-fixture.workspace = true
|
test-fixture.workspace = true
|
||||||
mbe.workspace = true
|
syntax-bridge.workspace = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
jemalloc = ["jemallocator", "profile/jemalloc"]
|
jemalloc = ["jemallocator", "profile/jemalloc"]
|
||||||
|
|
|
@ -263,11 +263,14 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use ide::Edition;
|
use ide::Edition;
|
||||||
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY};
|
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
SmolStr,
|
SmolStr,
|
||||||
};
|
};
|
||||||
|
use syntax_bridge::{
|
||||||
|
dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
|
||||||
|
syntax_node_to_token_tree, DocCommentDesugarMode,
|
||||||
|
};
|
||||||
|
|
||||||
fn check(cfg: &str, expected_features: &[&str]) {
|
fn check(cfg: &str, expected_features: &[&str]) {
|
||||||
let cfg_expr = {
|
let cfg_expr = {
|
||||||
|
|
|
@ -79,6 +79,10 @@ impl SyntaxContextId {
|
||||||
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
|
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
|
||||||
pub struct SyntaxContextData {
|
pub struct SyntaxContextData {
|
||||||
/// Invariant: Only [`SyntaxContextId::ROOT`] has a [`None`] outer expansion.
|
/// Invariant: Only [`SyntaxContextId::ROOT`] has a [`None`] outer expansion.
|
||||||
|
// FIXME: The None case needs to encode the context crate id. We can encode that as the MSB of
|
||||||
|
// MacroCallId is reserved anyways so we can do bit tagging here just fine.
|
||||||
|
// The bigger issue is that that will cause interning to now create completely separate chains
|
||||||
|
// per crate. Though that is likely not a problem as `MacroCallId`s are already crate calling dependent.
|
||||||
pub outer_expn: Option<MacroCallId>,
|
pub outer_expn: Option<MacroCallId>,
|
||||||
pub outer_transparency: Transparency,
|
pub outer_transparency: Transparency,
|
||||||
pub parent: SyntaxContextId,
|
pub parent: SyntaxContextId,
|
||||||
|
|
33
crates/syntax-bridge/Cargo.toml
Normal file
33
crates/syntax-bridge/Cargo.toml
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
[package]
|
||||||
|
name = "syntax-bridge"
|
||||||
|
version = "0.0.0"
|
||||||
|
description = "TBD"
|
||||||
|
|
||||||
|
authors.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
doctest = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
rustc-hash.workspace = true
|
||||||
|
tracing.workspace = true
|
||||||
|
|
||||||
|
# local deps
|
||||||
|
syntax.workspace = true
|
||||||
|
parser.workspace = true
|
||||||
|
tt.workspace = true
|
||||||
|
stdx.workspace = true
|
||||||
|
span.workspace = true
|
||||||
|
intern.workspace = true
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
test-utils.workspace = true
|
||||||
|
|
||||||
|
[features]
|
||||||
|
in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
|
@ -14,11 +14,13 @@ use syntax::{
|
||||||
};
|
};
|
||||||
use tt::{
|
use tt::{
|
||||||
buffer::{Cursor, TokenBuffer},
|
buffer::{Cursor, TokenBuffer},
|
||||||
iter::TtIter,
|
|
||||||
token_to_literal,
|
token_to_literal,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::to_parser_input::to_parser_input;
|
mod to_parser_input;
|
||||||
|
pub use to_parser_input::to_parser_input;
|
||||||
|
// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
|
||||||
|
pub use ::parser::TopEntryPoint;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
@ -43,7 +45,7 @@ impl<S: Copy, SM: SpanMapper<S>> SpanMapper<S> for &SM {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Dummy things for testing where spans don't matter.
|
/// Dummy things for testing where spans don't matter.
|
||||||
pub(crate) mod dummy_test_span_utils {
|
pub mod dummy_test_span_utils {
|
||||||
|
|
||||||
use span::{Span, SyntaxContextId};
|
use span::{Span, SyntaxContextId};
|
||||||
|
|
||||||
|
@ -211,50 +213,6 @@ where
|
||||||
Some(convert_tokens(&mut conv))
|
Some(convert_tokens(&mut conv))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Split token tree with separate expr: $($e:expr)SEP*
|
|
||||||
pub fn parse_exprs_with_sep(
|
|
||||||
tt: &tt::Subtree<span::Span>,
|
|
||||||
sep: char,
|
|
||||||
span: span::Span,
|
|
||||||
edition: Edition,
|
|
||||||
) -> Vec<tt::Subtree<span::Span>> {
|
|
||||||
if tt.token_trees.is_empty() {
|
|
||||||
return Vec::new();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut iter = TtIter::new(tt);
|
|
||||||
let mut res = Vec::new();
|
|
||||||
|
|
||||||
while iter.peek_n(0).is_some() {
|
|
||||||
let expanded = crate::expect_fragment(
|
|
||||||
&mut iter,
|
|
||||||
parser::PrefixEntryPoint::Expr,
|
|
||||||
edition,
|
|
||||||
tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close },
|
|
||||||
);
|
|
||||||
|
|
||||||
res.push(match expanded.value {
|
|
||||||
None => break,
|
|
||||||
Some(tt) => tt.subtree_or_wrap(tt::DelimSpan { open: span, close: span }),
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut fork = iter.clone();
|
|
||||||
if fork.expect_char(sep).is_err() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
iter = fork;
|
|
||||||
}
|
|
||||||
|
|
||||||
if iter.peek_n(0).is_some() {
|
|
||||||
res.push(tt::Subtree {
|
|
||||||
delimiter: tt::Delimiter::invisible_spanned(span),
|
|
||||||
token_trees: iter.cloned().collect(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
|
fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
|
||||||
where
|
where
|
||||||
C: TokenConverter<S>,
|
C: TokenConverter<S>,
|
||||||
|
@ -479,7 +437,6 @@ fn convert_doc_comment<S: Copy>(
|
||||||
span: S,
|
span: S,
|
||||||
mode: DocCommentDesugarMode,
|
mode: DocCommentDesugarMode,
|
||||||
) -> Option<Vec<tt::TokenTree<S>>> {
|
) -> Option<Vec<tt::TokenTree<S>>> {
|
||||||
cov_mark::hit!(test_meta_doc_comments);
|
|
||||||
let comment = ast::Comment::cast(token.clone())?;
|
let comment = ast::Comment::cast(token.clone())?;
|
||||||
let doc = comment.kind().doc?;
|
let doc = comment.kind().doc?;
|
||||||
|
|
104
crates/syntax-bridge/src/tests.rs
Normal file
104
crates/syntax-bridge/src/tests.rs
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
|
use span::Span;
|
||||||
|
use syntax::{ast, AstNode};
|
||||||
|
use test_utils::extract_annotations;
|
||||||
|
use tt::{
|
||||||
|
buffer::{TokenBuffer, TokenTreeRef},
|
||||||
|
Leaf, Punct, Spacing,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
|
||||||
|
syntax_node_to_token_tree, DocCommentDesugarMode,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn check_punct_spacing(fixture: &str) {
|
||||||
|
let source_file = ast::SourceFile::parse(fixture, span::Edition::CURRENT).ok().unwrap();
|
||||||
|
let subtree = syntax_node_to_token_tree(
|
||||||
|
source_file.syntax(),
|
||||||
|
DummyTestSpanMap,
|
||||||
|
DUMMY,
|
||||||
|
DocCommentDesugarMode::Mbe,
|
||||||
|
);
|
||||||
|
let mut annotations: FxHashMap<_, _> = extract_annotations(fixture)
|
||||||
|
.into_iter()
|
||||||
|
.map(|(range, annotation)| {
|
||||||
|
let spacing = match annotation.as_str() {
|
||||||
|
"Alone" => Spacing::Alone,
|
||||||
|
"Joint" => Spacing::Joint,
|
||||||
|
a => panic!("unknown annotation: {a}"),
|
||||||
|
};
|
||||||
|
(range, spacing)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let buf = TokenBuffer::from_subtree(&subtree);
|
||||||
|
let mut cursor = buf.begin();
|
||||||
|
while !cursor.eof() {
|
||||||
|
while let Some(token_tree) = cursor.token_tree() {
|
||||||
|
if let TokenTreeRef::Leaf(
|
||||||
|
Leaf::Punct(Punct { spacing, span: Span { range, .. }, .. }),
|
||||||
|
_,
|
||||||
|
) = token_tree
|
||||||
|
{
|
||||||
|
if let Some(expected) = annotations.remove(range) {
|
||||||
|
assert_eq!(expected, *spacing);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cursor = cursor.bump_subtree();
|
||||||
|
}
|
||||||
|
cursor = cursor.bump();
|
||||||
|
}
|
||||||
|
|
||||||
|
assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn punct_spacing() {
|
||||||
|
check_punct_spacing(
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
0+0;
|
||||||
|
//^ Alone
|
||||||
|
0+(0);
|
||||||
|
//^ Alone
|
||||||
|
0<=0;
|
||||||
|
//^ Joint
|
||||||
|
// ^ Alone
|
||||||
|
0<=(0);
|
||||||
|
// ^ Alone
|
||||||
|
a=0;
|
||||||
|
//^ Alone
|
||||||
|
a=(0);
|
||||||
|
//^ Alone
|
||||||
|
a+=0;
|
||||||
|
//^ Joint
|
||||||
|
// ^ Alone
|
||||||
|
a+=(0);
|
||||||
|
// ^ Alone
|
||||||
|
a&&b;
|
||||||
|
//^ Joint
|
||||||
|
// ^ Alone
|
||||||
|
a&&(b);
|
||||||
|
// ^ Alone
|
||||||
|
foo::bar;
|
||||||
|
// ^ Joint
|
||||||
|
// ^ Alone
|
||||||
|
use foo::{bar,baz,};
|
||||||
|
// ^ Alone
|
||||||
|
// ^ Alone
|
||||||
|
// ^ Alone
|
||||||
|
struct Struct<'a> {};
|
||||||
|
// ^ Joint
|
||||||
|
// ^ Joint
|
||||||
|
Struct::<0>;
|
||||||
|
// ^ Alone
|
||||||
|
Struct::<{0}>;
|
||||||
|
// ^ Alone
|
||||||
|
;;
|
||||||
|
//^ Joint
|
||||||
|
// ^ Alone
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
|
@ -8,7 +8,7 @@ use syntax::{SyntaxKind, SyntaxKind::*, T};
|
||||||
|
|
||||||
use tt::buffer::TokenBuffer;
|
use tt::buffer::TokenBuffer;
|
||||||
|
|
||||||
pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(
|
pub fn to_parser_input<S: Copy + fmt::Debug>(
|
||||||
edition: Edition,
|
edition: Edition,
|
||||||
buffer: &TokenBuffer<'_, S>,
|
buffer: &TokenBuffer<'_, S>,
|
||||||
) -> parser::Input {
|
) -> parser::Input {
|
Loading…
Reference in a new issue