mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-10 07:04:22 +00:00
Auto merge of #17799 - Veykril:syntax-bridge, r=Veykril
Split out syntax-bridge into a separate crate This functionality is not really tied to mbe macros, so imo it has no place in that crate.
This commit is contained in:
commit
fbed308ebe
30 changed files with 268 additions and 140 deletions
24
Cargo.lock
generated
24
Cargo.lock
generated
|
@ -148,10 +148,10 @@ dependencies = [
|
|||
"derive_arbitrary",
|
||||
"expect-test",
|
||||
"intern",
|
||||
"mbe",
|
||||
"oorandom",
|
||||
"rustc-hash",
|
||||
"syntax",
|
||||
"syntax-bridge",
|
||||
"tt",
|
||||
]
|
||||
|
||||
|
@ -554,6 +554,7 @@ dependencies = [
|
|||
"span",
|
||||
"stdx",
|
||||
"syntax",
|
||||
"syntax-bridge",
|
||||
"test-fixture",
|
||||
"test-utils",
|
||||
"tracing",
|
||||
|
@ -582,6 +583,7 @@ dependencies = [
|
|||
"span",
|
||||
"stdx",
|
||||
"syntax",
|
||||
"syntax-bridge",
|
||||
"tracing",
|
||||
"triomphe",
|
||||
"tt",
|
||||
|
@ -1056,6 +1058,7 @@ dependencies = [
|
|||
"span",
|
||||
"stdx",
|
||||
"syntax",
|
||||
"syntax-bridge",
|
||||
"test-utils",
|
||||
"tracing",
|
||||
"tt",
|
||||
|
@ -1349,7 +1352,6 @@ dependencies = [
|
|||
"expect-test",
|
||||
"intern",
|
||||
"libloading",
|
||||
"mbe",
|
||||
"memmap2",
|
||||
"object 0.33.0",
|
||||
"paths",
|
||||
|
@ -1359,6 +1361,7 @@ dependencies = [
|
|||
"snap",
|
||||
"span",
|
||||
"stdx",
|
||||
"syntax-bridge",
|
||||
"tt",
|
||||
]
|
||||
|
||||
|
@ -1664,7 +1667,6 @@ dependencies = [
|
|||
"load-cargo",
|
||||
"lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lsp-types",
|
||||
"mbe",
|
||||
"memchr",
|
||||
"mimalloc",
|
||||
"nohash-hasher",
|
||||
|
@ -1684,6 +1686,7 @@ dependencies = [
|
|||
"serde_json",
|
||||
"stdx",
|
||||
"syntax",
|
||||
"syntax-bridge",
|
||||
"test-fixture",
|
||||
"test-utils",
|
||||
"tikv-jemallocator",
|
||||
|
@ -1966,6 +1969,21 @@ dependencies = [
|
|||
"triomphe",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syntax-bridge"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"intern",
|
||||
"parser",
|
||||
"rustc-hash",
|
||||
"span",
|
||||
"stdx",
|
||||
"syntax",
|
||||
"test-utils",
|
||||
"tracing",
|
||||
"tt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "test-fixture"
|
||||
version = "0.0.0"
|
||||
|
|
|
@ -77,6 +77,7 @@ salsa = { path = "./crates/salsa", version = "0.0.0" }
|
|||
span = { path = "./crates/span", version = "0.0.0" }
|
||||
stdx = { path = "./crates/stdx", version = "0.0.0" }
|
||||
syntax = { path = "./crates/syntax", version = "0.0.0" }
|
||||
syntax-bridge = { path = "./crates/syntax-bridge", version = "0.0.0" }
|
||||
text-edit = { path = "./crates/text-edit", version = "0.0.0" }
|
||||
toolchain = { path = "./crates/toolchain", version = "0.0.0" }
|
||||
tt = { path = "./crates/tt", version = "0.0.0" }
|
||||
|
|
|
@ -28,7 +28,7 @@ arbitrary = "1.3.2"
|
|||
derive_arbitrary = "1.3.2"
|
||||
|
||||
# local deps
|
||||
mbe.workspace = true
|
||||
syntax-bridge.workspace = true
|
||||
syntax.workspace = true
|
||||
|
||||
[lints]
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
use arbitrary::{Arbitrary, Unstructured};
|
||||
use expect_test::{expect, Expect};
|
||||
use intern::Symbol;
|
||||
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY};
|
||||
use syntax::{ast, AstNode, Edition};
|
||||
use syntax_bridge::{
|
||||
dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
|
||||
syntax_node_to_token_tree, DocCommentDesugarMode,
|
||||
};
|
||||
|
||||
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ expect-test.workspace = true
|
|||
# local deps
|
||||
test-utils.workspace = true
|
||||
test-fixture.workspace = true
|
||||
|
||||
syntax-bridge.workspace = true
|
||||
[features]
|
||||
in-rust-tree = ["hir-expand/in-rust-tree"]
|
||||
|
||||
|
|
|
@ -657,9 +657,9 @@ mod tests {
|
|||
use triomphe::Arc;
|
||||
|
||||
use hir_expand::span_map::{RealSpanMap, SpanMap};
|
||||
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode};
|
||||
use span::FileId;
|
||||
use syntax::{ast, AstNode, TextRange};
|
||||
use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
|
||||
|
||||
use crate::attr::{DocAtom, DocExpr};
|
||||
|
||||
|
|
|
@ -1201,7 +1201,6 @@ macro_rules! m {
|
|||
|
||||
#[test]
|
||||
fn test_meta_doc_comments() {
|
||||
cov_mark::check!(test_meta_doc_comments);
|
||||
check(
|
||||
r#"
|
||||
macro_rules! m {
|
||||
|
|
|
@ -317,9 +317,9 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
|
|||
_: Span,
|
||||
_: Span,
|
||||
) -> Result<Subtree, ProcMacroExpansionError> {
|
||||
let (parse, _) = ::mbe::token_tree_to_syntax_node(
|
||||
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
|
||||
subtree,
|
||||
::mbe::TopEntryPoint::MacroItems,
|
||||
syntax_bridge::TopEntryPoint::MacroItems,
|
||||
span::Edition::CURRENT,
|
||||
);
|
||||
if parse.errors().is_empty() {
|
||||
|
|
|
@ -33,6 +33,7 @@ mbe.workspace = true
|
|||
limit.workspace = true
|
||||
span.workspace = true
|
||||
parser.workspace = true
|
||||
syntax-bridge.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
expect-test = "1.4.0"
|
||||
|
|
|
@ -6,14 +6,12 @@ use cfg::CfgExpr;
|
|||
use either::Either;
|
||||
use intern::{sym, Interned, Symbol};
|
||||
|
||||
use mbe::{
|
||||
desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode,
|
||||
Punct,
|
||||
};
|
||||
use mbe::{DelimiterKind, Punct};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use span::{Span, SyntaxContextId};
|
||||
use syntax::unescape;
|
||||
use syntax::{ast, match_ast, AstNode, AstToken, SyntaxNode};
|
||||
use syntax_bridge::{desugar_doc_comment_text, syntax_node_to_token_tree, DocCommentDesugarMode};
|
||||
use triomphe::ThinArc;
|
||||
|
||||
use crate::name::Name;
|
||||
|
|
|
@ -2,10 +2,10 @@
|
|||
|
||||
use intern::sym;
|
||||
use itertools::izip;
|
||||
use mbe::DocCommentDesugarMode;
|
||||
use rustc_hash::FxHashSet;
|
||||
use span::{MacroCallId, Span};
|
||||
use stdx::never;
|
||||
use syntax_bridge::DocCommentDesugarMode;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::{
|
||||
|
@ -209,9 +209,9 @@ struct BasicAdtInfo {
|
|||
}
|
||||
|
||||
fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
|
||||
let (parsed, tm) = &mbe::token_tree_to_syntax_node(
|
||||
let (parsed, tm) = &syntax_bridge::token_tree_to_syntax_node(
|
||||
tt,
|
||||
mbe::TopEntryPoint::MacroItems,
|
||||
syntax_bridge::TopEntryPoint::MacroItems,
|
||||
parser::Edition::CURRENT_FIXME,
|
||||
);
|
||||
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
|
||||
|
@ -268,7 +268,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
|||
match this {
|
||||
Some(it) => {
|
||||
param_type_set.insert(it.as_name());
|
||||
mbe::syntax_node_to_token_tree(
|
||||
syntax_bridge::syntax_node_to_token_tree(
|
||||
it.syntax(),
|
||||
tm,
|
||||
call_site,
|
||||
|
@ -282,7 +282,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
|||
};
|
||||
let bounds = match ¶m {
|
||||
ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| {
|
||||
mbe::syntax_node_to_token_tree(
|
||||
syntax_bridge::syntax_node_to_token_tree(
|
||||
it.syntax(),
|
||||
tm,
|
||||
call_site,
|
||||
|
@ -295,7 +295,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
|||
let ty = param
|
||||
.ty()
|
||||
.map(|ty| {
|
||||
mbe::syntax_node_to_token_tree(
|
||||
syntax_bridge::syntax_node_to_token_tree(
|
||||
ty.syntax(),
|
||||
tm,
|
||||
call_site,
|
||||
|
@ -316,7 +316,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
|||
let where_clause = if let Some(w) = where_clause {
|
||||
w.predicates()
|
||||
.map(|it| {
|
||||
mbe::syntax_node_to_token_tree(
|
||||
syntax_bridge::syntax_node_to_token_tree(
|
||||
it.syntax(),
|
||||
tm,
|
||||
call_site,
|
||||
|
@ -353,7 +353,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
|
|||
param_type_set.contains(&name).then_some(p)
|
||||
})
|
||||
.map(|it| {
|
||||
mbe::syntax_node_to_token_tree(
|
||||
syntax_bridge::syntax_node_to_token_tree(
|
||||
it.syntax(),
|
||||
tm,
|
||||
call_site,
|
||||
|
|
|
@ -4,13 +4,14 @@ use base_db::AnchoredPath;
|
|||
use cfg::CfgExpr;
|
||||
use either::Either;
|
||||
use intern::{sym, Symbol};
|
||||
use mbe::{parse_exprs_with_sep, parse_to_token_tree, DelimiterKind};
|
||||
use mbe::{expect_fragment, DelimiterKind};
|
||||
use span::{Edition, EditionedFileId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
|
||||
use stdx::format_to;
|
||||
use syntax::{
|
||||
format_smolstr,
|
||||
unescape::{unescape_byte, unescape_char, unescape_unicode, Mode},
|
||||
};
|
||||
use syntax_bridge::parse_to_token_tree;
|
||||
|
||||
use crate::{
|
||||
builtin::quote::{dollar_crate, quote},
|
||||
|
@ -228,20 +229,22 @@ fn assert_expand(
|
|||
span: Span,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let call_site_span = span_with_call_site_ctxt(db, span, id);
|
||||
let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT_FIXME);
|
||||
|
||||
let mut iter = ::tt::iter::TtIter::new(tt);
|
||||
|
||||
let cond = expect_fragment(
|
||||
&mut iter,
|
||||
parser::PrefixEntryPoint::Expr,
|
||||
db.crate_graph()[id.lookup(db).krate].edition,
|
||||
tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close },
|
||||
);
|
||||
_ = iter.expect_char(',');
|
||||
let rest = iter.as_slice();
|
||||
|
||||
let dollar_crate = dollar_crate(span);
|
||||
let expanded = match &*args {
|
||||
[cond, panic_args @ ..] => {
|
||||
let comma = tt::Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(call_site_span),
|
||||
token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
||||
char: ',',
|
||||
spacing: tt::Spacing::Alone,
|
||||
span: call_site_span,
|
||||
}))]),
|
||||
};
|
||||
let cond = cond.clone();
|
||||
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
|
||||
let expanded = match cond.value {
|
||||
Some(cond) => {
|
||||
let panic_args = rest.iter().cloned();
|
||||
let mac = if use_panic_2021(db, span) {
|
||||
quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) }
|
||||
} else {
|
||||
|
@ -253,10 +256,13 @@ fn assert_expand(
|
|||
}
|
||||
}}
|
||||
}
|
||||
[] => quote! {call_site_span =>{}},
|
||||
None => quote! {call_site_span =>{}},
|
||||
};
|
||||
|
||||
ExpandResult::ok(expanded)
|
||||
match cond.err {
|
||||
Some(err) => ExpandResult::new(expanded, err.into()),
|
||||
None => ExpandResult::ok(expanded),
|
||||
}
|
||||
}
|
||||
|
||||
fn file_expand(
|
||||
|
|
|
@ -3,10 +3,11 @@
|
|||
use base_db::{salsa, CrateId, SourceDatabase};
|
||||
use either::Either;
|
||||
use limit::Limit;
|
||||
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, MatchedArmIndex};
|
||||
use mbe::MatchedArmIndex;
|
||||
use rustc_hash::FxHashSet;
|
||||
use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
|
||||
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
|
||||
use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
|
@ -165,7 +166,7 @@ pub fn expand_speculative(
|
|||
// Build the subtree and token mapping for the speculative args
|
||||
let (mut tt, undo_info) = match loc.kind {
|
||||
MacroCallKind::FnLike { .. } => (
|
||||
mbe::syntax_node_to_token_tree(
|
||||
syntax_bridge::syntax_node_to_token_tree(
|
||||
speculative_args,
|
||||
span_map,
|
||||
span,
|
||||
|
@ -178,7 +179,7 @@ pub fn expand_speculative(
|
|||
SyntaxFixupUndoInfo::NONE,
|
||||
),
|
||||
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => (
|
||||
mbe::syntax_node_to_token_tree(
|
||||
syntax_bridge::syntax_node_to_token_tree(
|
||||
speculative_args,
|
||||
span_map,
|
||||
span,
|
||||
|
@ -213,7 +214,7 @@ pub fn expand_speculative(
|
|||
fixups.remove.extend(censor_cfg);
|
||||
|
||||
(
|
||||
mbe::syntax_node_to_token_tree_modified(
|
||||
syntax_bridge::syntax_node_to_token_tree_modified(
|
||||
speculative_args,
|
||||
span_map,
|
||||
fixups.append,
|
||||
|
@ -459,7 +460,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
|
|||
return dummy_tt(kind);
|
||||
}
|
||||
|
||||
let mut tt = mbe::syntax_node_to_token_tree(
|
||||
let mut tt = syntax_bridge::syntax_node_to_token_tree(
|
||||
tt.syntax(),
|
||||
map.as_ref(),
|
||||
span,
|
||||
|
@ -515,7 +516,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
|
|||
fixups.remove.extend(censor_cfg);
|
||||
|
||||
(
|
||||
mbe::syntax_node_to_token_tree_modified(
|
||||
syntax_bridge::syntax_node_to_token_tree_modified(
|
||||
syntax,
|
||||
map,
|
||||
fixups.append,
|
||||
|
@ -720,13 +721,13 @@ fn token_tree_to_syntax_node(
|
|||
edition: parser::Edition,
|
||||
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
|
||||
let entry_point = match expand_to {
|
||||
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
|
||||
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
|
||||
ExpandTo::Pattern => mbe::TopEntryPoint::Pattern,
|
||||
ExpandTo::Type => mbe::TopEntryPoint::Type,
|
||||
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
|
||||
ExpandTo::Statements => syntax_bridge::TopEntryPoint::MacroStmts,
|
||||
ExpandTo::Items => syntax_bridge::TopEntryPoint::MacroItems,
|
||||
ExpandTo::Pattern => syntax_bridge::TopEntryPoint::Pattern,
|
||||
ExpandTo::Type => syntax_bridge::TopEntryPoint::Type,
|
||||
ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr,
|
||||
};
|
||||
mbe::token_tree_to_syntax_node(tt, entry_point, edition)
|
||||
syntax_bridge::token_tree_to_syntax_node(tt, entry_point, edition)
|
||||
}
|
||||
|
||||
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
|
||||
|
|
|
@ -2,10 +2,10 @@
|
|||
|
||||
use base_db::CrateId;
|
||||
use intern::sym;
|
||||
use mbe::DocCommentDesugarMode;
|
||||
use span::{Edition, MacroCallId, Span, SyntaxContextId};
|
||||
use stdx::TupleExt;
|
||||
use syntax::{ast, AstNode};
|
||||
use syntax_bridge::DocCommentDesugarMode;
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
|
@ -112,7 +112,7 @@ impl DeclarativeMacroExpander {
|
|||
ast::Macro::MacroRules(macro_rules) => (
|
||||
match macro_rules.token_tree() {
|
||||
Some(arg) => {
|
||||
let tt = mbe::syntax_node_to_token_tree(
|
||||
let tt = syntax_bridge::syntax_node_to_token_tree(
|
||||
arg.syntax(),
|
||||
map.as_ref(),
|
||||
map.span_for_range(
|
||||
|
@ -135,14 +135,14 @@ impl DeclarativeMacroExpander {
|
|||
let span =
|
||||
map.span_for_range(macro_def.macro_token().unwrap().text_range());
|
||||
let args = macro_def.args().map(|args| {
|
||||
mbe::syntax_node_to_token_tree(
|
||||
syntax_bridge::syntax_node_to_token_tree(
|
||||
args.syntax(),
|
||||
map.as_ref(),
|
||||
span,
|
||||
DocCommentDesugarMode::Mbe,
|
||||
)
|
||||
});
|
||||
let body = mbe::syntax_node_to_token_tree(
|
||||
let body = syntax_bridge::syntax_node_to_token_tree(
|
||||
body.syntax(),
|
||||
map.as_ref(),
|
||||
span,
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
//!
|
||||
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
||||
use base_db::CrateId;
|
||||
use mbe::DocCommentDesugarMode;
|
||||
use span::SyntaxContextId;
|
||||
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
|
||||
use syntax_bridge::DocCommentDesugarMode;
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
|
@ -82,7 +82,7 @@ pub fn expand_eager_macro_input(
|
|||
return ExpandResult { value: None, err };
|
||||
};
|
||||
|
||||
let mut subtree = mbe::syntax_node_to_token_tree(
|
||||
let mut subtree = syntax_bridge::syntax_node_to_token_tree(
|
||||
&expanded_eager_input,
|
||||
arg_map,
|
||||
span,
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
//! fix up syntax errors in the code we're passing to them.
|
||||
|
||||
use intern::sym;
|
||||
use mbe::DocCommentDesugarMode;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use smallvec::SmallVec;
|
||||
use span::{
|
||||
|
@ -14,6 +13,7 @@ use syntax::{
|
|||
ast::{self, AstNode, HasLoopBody},
|
||||
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
||||
};
|
||||
use syntax_bridge::DocCommentDesugarMode;
|
||||
use triomphe::Arc;
|
||||
use tt::Spacing;
|
||||
|
||||
|
@ -76,7 +76,8 @@ pub(crate) fn fixup_syntax(
|
|||
if can_handle_error(&node) && has_error_to_handle(&node) {
|
||||
remove.insert(node.clone().into());
|
||||
// the node contains an error node, we have to completely replace it by something valid
|
||||
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site, mode);
|
||||
let original_tree =
|
||||
syntax_bridge::syntax_node_to_token_tree(&node, span_map, call_site, mode);
|
||||
let idx = original.len() as u32;
|
||||
original.push(original_tree);
|
||||
let span = span_map.span_for_range(node_range);
|
||||
|
@ -434,9 +435,9 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use expect_test::{expect, Expect};
|
||||
use mbe::DocCommentDesugarMode;
|
||||
use span::{Edition, EditionedFileId, FileId};
|
||||
use syntax::TextRange;
|
||||
use syntax_bridge::DocCommentDesugarMode;
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
|
@ -483,7 +484,7 @@ mod tests {
|
|||
span_map.span_for_range(TextRange::empty(0.into())),
|
||||
DocCommentDesugarMode::Mbe,
|
||||
);
|
||||
let mut tt = mbe::syntax_node_to_token_tree_modified(
|
||||
let mut tt = syntax_bridge::syntax_node_to_token_tree_modified(
|
||||
&parsed.syntax_node(),
|
||||
span_map.as_ref(),
|
||||
fixups.append,
|
||||
|
@ -498,9 +499,9 @@ mod tests {
|
|||
expect.assert_eq(&actual);
|
||||
|
||||
// the fixed-up tree should be syntactically valid
|
||||
let (parse, _) = mbe::token_tree_to_syntax_node(
|
||||
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
|
||||
&tt,
|
||||
::mbe::TopEntryPoint::MacroItems,
|
||||
syntax_bridge::TopEntryPoint::MacroItems,
|
||||
parser::Edition::CURRENT,
|
||||
);
|
||||
assert!(
|
||||
|
@ -513,7 +514,7 @@ mod tests {
|
|||
|
||||
// the fixed-up + reversed version should be equivalent to the original input
|
||||
// modulo token IDs and `Punct`s' spacing.
|
||||
let original_as_tt = mbe::syntax_node_to_token_tree(
|
||||
let original_as_tt = syntax_bridge::syntax_node_to_token_tree(
|
||||
&parsed.syntax_node(),
|
||||
span_map.as_ref(),
|
||||
span_map.span_for_range(TextRange::empty(0.into())),
|
||||
|
|
|
@ -28,13 +28,13 @@ pub enum SpanMapRef<'a> {
|
|||
RealSpanMap(&'a RealSpanMap),
|
||||
}
|
||||
|
||||
impl mbe::SpanMapper<Span> for SpanMap {
|
||||
impl syntax_bridge::SpanMapper<Span> for SpanMap {
|
||||
fn span_for(&self, range: TextRange) -> Span {
|
||||
self.span_for_range(range)
|
||||
}
|
||||
}
|
||||
|
||||
impl mbe::SpanMapper<Span> for SpanMapRef<'_> {
|
||||
impl syntax_bridge::SpanMapper<Span> for SpanMapRef<'_> {
|
||||
fn span_for(&self, range: TextRange) -> Span {
|
||||
self.span_for_range(range)
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ tt.workspace = true
|
|||
stdx.workspace = true
|
||||
span.workspace = true
|
||||
intern.workspace = true
|
||||
syntax-bridge.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
test-utils.workspace = true
|
||||
|
|
|
@ -7,11 +7,15 @@ use syntax::{
|
|||
ast::{self, HasName},
|
||||
AstNode,
|
||||
};
|
||||
use syntax_bridge::{
|
||||
dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
|
||||
syntax_node_to_token_tree, DocCommentDesugarMode,
|
||||
};
|
||||
use test_utils::{bench, bench_fixture, skip_slow_tests};
|
||||
|
||||
use crate::{
|
||||
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
||||
syntax_node_to_token_tree, DeclarativeMacro, DocCommentDesugarMode, DummyTestSpanMap, DUMMY,
|
||||
DeclarativeMacro,
|
||||
};
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -8,13 +8,12 @@
|
|||
|
||||
mod expander;
|
||||
mod parser;
|
||||
mod syntax_bridge;
|
||||
mod to_parser_input;
|
||||
|
||||
#[cfg(test)]
|
||||
mod benchmark;
|
||||
|
||||
use span::{Edition, Span, SyntaxContextId};
|
||||
use syntax_bridge::to_parser_input;
|
||||
use tt::iter::TtIter;
|
||||
use tt::DelimSpan;
|
||||
|
||||
|
@ -23,18 +22,8 @@ use std::sync::Arc;
|
|||
|
||||
use crate::parser::{MetaTemplate, MetaVarKind, Op};
|
||||
|
||||
// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
|
||||
pub use ::parser::TopEntryPoint;
|
||||
pub use tt::{Delimiter, DelimiterKind, Punct};
|
||||
|
||||
pub use crate::syntax_bridge::{
|
||||
desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree,
|
||||
parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified,
|
||||
token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper,
|
||||
};
|
||||
|
||||
pub use crate::syntax_bridge::dummy_test_span_utils::*;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum ParseError {
|
||||
UnexpectedToken(Box<str>),
|
||||
|
@ -361,7 +350,7 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
|
|||
}
|
||||
}
|
||||
|
||||
fn expect_fragment(
|
||||
pub fn expect_fragment(
|
||||
tt_iter: &mut TtIter<'_, Span>,
|
||||
entry_point: ::parser::PrefixEntryPoint,
|
||||
edition: ::parser::Edition,
|
||||
|
@ -369,7 +358,7 @@ fn expect_fragment(
|
|||
) -> ExpandResult<Option<tt::TokenTree<Span>>> {
|
||||
use ::parser;
|
||||
let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
|
||||
let parser_input = to_parser_input::to_parser_input(edition, &buffer);
|
||||
let parser_input = to_parser_input(edition, &buffer);
|
||||
let tree_traversal = entry_point.parse(&parser_input, edition);
|
||||
let mut cursor = buffer.begin();
|
||||
let mut error = false;
|
||||
|
|
|
@ -19,23 +19,24 @@ snap.workspace = true
|
|||
|
||||
stdx.workspace = true
|
||||
tt.workspace = true
|
||||
mbe.workspace = true
|
||||
syntax-bridge.workspace = true
|
||||
paths.workspace = true
|
||||
base-db.workspace = true
|
||||
span.workspace = true
|
||||
proc-macro-api.workspace = true
|
||||
ra-ap-rustc_lexer.workspace = true
|
||||
intern.workspace = true
|
||||
|
||||
ra-ap-rustc_lexer.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
expect-test = "1.4.0"
|
||||
expect-test.workspace = true
|
||||
|
||||
# used as proc macro test targets
|
||||
proc-macro-test.path = "./proc-macro-test"
|
||||
|
||||
[features]
|
||||
sysroot-abi = []
|
||||
in-rust-tree = ["mbe/in-rust-tree", "tt/in-rust-tree","sysroot-abi"]
|
||||
in-rust-tree = ["syntax-bridge/in-rust-tree", "tt/in-rust-tree", "sysroot-abi"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -126,9 +126,12 @@ pub(super) mod token_stream {
|
|||
/// change these errors into `LexError`s later.
|
||||
impl<S: Copy + fmt::Debug> TokenStream<S> {
|
||||
pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
|
||||
let subtree =
|
||||
mbe::parse_to_token_tree_static_span(span::Edition::CURRENT_FIXME, call_site, src)
|
||||
.ok_or("lexing error")?;
|
||||
let subtree = syntax_bridge::parse_to_token_tree_static_span(
|
||||
span::Edition::CURRENT_FIXME,
|
||||
call_site,
|
||||
src,
|
||||
)
|
||||
.ok_or("lexing error")?;
|
||||
|
||||
Ok(TokenStream::with_subtree(subtree))
|
||||
}
|
||||
|
|
|
@ -9,7 +9,8 @@ use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv};
|
|||
|
||||
fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
|
||||
crate::server_impl::TokenStream::with_subtree(
|
||||
mbe::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src).unwrap(),
|
||||
syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src)
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -19,7 +20,7 @@ fn parse_string_spanned(
|
|||
src: &str,
|
||||
) -> crate::server_impl::TokenStream<Span> {
|
||||
crate::server_impl::TokenStream::with_subtree(
|
||||
mbe::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(),
|
||||
syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -82,7 +82,7 @@ xshell.workspace = true
|
|||
|
||||
test-utils.workspace = true
|
||||
test-fixture.workspace = true
|
||||
mbe.workspace = true
|
||||
syntax-bridge.workspace = true
|
||||
|
||||
[features]
|
||||
jemalloc = ["jemallocator", "profile/jemalloc"]
|
||||
|
|
|
@ -263,11 +263,14 @@ mod tests {
|
|||
use super::*;
|
||||
|
||||
use ide::Edition;
|
||||
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY};
|
||||
use syntax::{
|
||||
ast::{self, AstNode},
|
||||
SmolStr,
|
||||
};
|
||||
use syntax_bridge::{
|
||||
dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
|
||||
syntax_node_to_token_tree, DocCommentDesugarMode,
|
||||
};
|
||||
|
||||
fn check(cfg: &str, expected_features: &[&str]) {
|
||||
let cfg_expr = {
|
||||
|
|
|
@ -79,6 +79,10 @@ impl SyntaxContextId {
|
|||
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct SyntaxContextData {
|
||||
/// Invariant: Only [`SyntaxContextId::ROOT`] has a [`None`] outer expansion.
|
||||
// FIXME: The None case needs to encode the context crate id. We can encode that as the MSB of
|
||||
// MacroCallId is reserved anyways so we can do bit tagging here just fine.
|
||||
// The bigger issue is that that will cause interning to now create completely separate chains
|
||||
// per crate. Though that is likely not a problem as `MacroCallId`s are already crate calling dependent.
|
||||
pub outer_expn: Option<MacroCallId>,
|
||||
pub outer_transparency: Transparency,
|
||||
pub parent: SyntaxContextId,
|
||||
|
|
33
crates/syntax-bridge/Cargo.toml
Normal file
33
crates/syntax-bridge/Cargo.toml
Normal file
|
@ -0,0 +1,33 @@
|
|||
[package]
|
||||
name = "syntax-bridge"
|
||||
version = "0.0.0"
|
||||
description = "TBD"
|
||||
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
rustc-hash.workspace = true
|
||||
tracing.workspace = true
|
||||
|
||||
# local deps
|
||||
syntax.workspace = true
|
||||
parser.workspace = true
|
||||
tt.workspace = true
|
||||
stdx.workspace = true
|
||||
span.workspace = true
|
||||
intern.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
test-utils.workspace = true
|
||||
|
||||
[features]
|
||||
in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
|
@ -14,11 +14,13 @@ use syntax::{
|
|||
};
|
||||
use tt::{
|
||||
buffer::{Cursor, TokenBuffer},
|
||||
iter::TtIter,
|
||||
token_to_literal,
|
||||
};
|
||||
|
||||
use crate::to_parser_input::to_parser_input;
|
||||
mod to_parser_input;
|
||||
pub use to_parser_input::to_parser_input;
|
||||
// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
|
||||
pub use ::parser::TopEntryPoint;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
@ -43,7 +45,7 @@ impl<S: Copy, SM: SpanMapper<S>> SpanMapper<S> for &SM {
|
|||
}
|
||||
|
||||
/// Dummy things for testing where spans don't matter.
|
||||
pub(crate) mod dummy_test_span_utils {
|
||||
pub mod dummy_test_span_utils {
|
||||
|
||||
use span::{Span, SyntaxContextId};
|
||||
|
||||
|
@ -211,50 +213,6 @@ where
|
|||
Some(convert_tokens(&mut conv))
|
||||
}
|
||||
|
||||
/// Split token tree with separate expr: $($e:expr)SEP*
|
||||
pub fn parse_exprs_with_sep(
|
||||
tt: &tt::Subtree<span::Span>,
|
||||
sep: char,
|
||||
span: span::Span,
|
||||
edition: Edition,
|
||||
) -> Vec<tt::Subtree<span::Span>> {
|
||||
if tt.token_trees.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut iter = TtIter::new(tt);
|
||||
let mut res = Vec::new();
|
||||
|
||||
while iter.peek_n(0).is_some() {
|
||||
let expanded = crate::expect_fragment(
|
||||
&mut iter,
|
||||
parser::PrefixEntryPoint::Expr,
|
||||
edition,
|
||||
tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close },
|
||||
);
|
||||
|
||||
res.push(match expanded.value {
|
||||
None => break,
|
||||
Some(tt) => tt.subtree_or_wrap(tt::DelimSpan { open: span, close: span }),
|
||||
});
|
||||
|
||||
let mut fork = iter.clone();
|
||||
if fork.expect_char(sep).is_err() {
|
||||
break;
|
||||
}
|
||||
iter = fork;
|
||||
}
|
||||
|
||||
if iter.peek_n(0).is_some() {
|
||||
res.push(tt::Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(span),
|
||||
token_trees: iter.cloned().collect(),
|
||||
});
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
|
||||
where
|
||||
C: TokenConverter<S>,
|
||||
|
@ -479,7 +437,6 @@ fn convert_doc_comment<S: Copy>(
|
|||
span: S,
|
||||
mode: DocCommentDesugarMode,
|
||||
) -> Option<Vec<tt::TokenTree<S>>> {
|
||||
cov_mark::hit!(test_meta_doc_comments);
|
||||
let comment = ast::Comment::cast(token.clone())?;
|
||||
let doc = comment.kind().doc?;
|
||||
|
104
crates/syntax-bridge/src/tests.rs
Normal file
104
crates/syntax-bridge/src/tests.rs
Normal file
|
@ -0,0 +1,104 @@
|
|||
use rustc_hash::FxHashMap;
|
||||
use span::Span;
|
||||
use syntax::{ast, AstNode};
|
||||
use test_utils::extract_annotations;
|
||||
use tt::{
|
||||
buffer::{TokenBuffer, TokenTreeRef},
|
||||
Leaf, Punct, Spacing,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
|
||||
syntax_node_to_token_tree, DocCommentDesugarMode,
|
||||
};
|
||||
|
||||
fn check_punct_spacing(fixture: &str) {
|
||||
let source_file = ast::SourceFile::parse(fixture, span::Edition::CURRENT).ok().unwrap();
|
||||
let subtree = syntax_node_to_token_tree(
|
||||
source_file.syntax(),
|
||||
DummyTestSpanMap,
|
||||
DUMMY,
|
||||
DocCommentDesugarMode::Mbe,
|
||||
);
|
||||
let mut annotations: FxHashMap<_, _> = extract_annotations(fixture)
|
||||
.into_iter()
|
||||
.map(|(range, annotation)| {
|
||||
let spacing = match annotation.as_str() {
|
||||
"Alone" => Spacing::Alone,
|
||||
"Joint" => Spacing::Joint,
|
||||
a => panic!("unknown annotation: {a}"),
|
||||
};
|
||||
(range, spacing)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let buf = TokenBuffer::from_subtree(&subtree);
|
||||
let mut cursor = buf.begin();
|
||||
while !cursor.eof() {
|
||||
while let Some(token_tree) = cursor.token_tree() {
|
||||
if let TokenTreeRef::Leaf(
|
||||
Leaf::Punct(Punct { spacing, span: Span { range, .. }, .. }),
|
||||
_,
|
||||
) = token_tree
|
||||
{
|
||||
if let Some(expected) = annotations.remove(range) {
|
||||
assert_eq!(expected, *spacing);
|
||||
}
|
||||
}
|
||||
cursor = cursor.bump_subtree();
|
||||
}
|
||||
cursor = cursor.bump();
|
||||
}
|
||||
|
||||
assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn punct_spacing() {
|
||||
check_punct_spacing(
|
||||
r#"
|
||||
fn main() {
|
||||
0+0;
|
||||
//^ Alone
|
||||
0+(0);
|
||||
//^ Alone
|
||||
0<=0;
|
||||
//^ Joint
|
||||
// ^ Alone
|
||||
0<=(0);
|
||||
// ^ Alone
|
||||
a=0;
|
||||
//^ Alone
|
||||
a=(0);
|
||||
//^ Alone
|
||||
a+=0;
|
||||
//^ Joint
|
||||
// ^ Alone
|
||||
a+=(0);
|
||||
// ^ Alone
|
||||
a&&b;
|
||||
//^ Joint
|
||||
// ^ Alone
|
||||
a&&(b);
|
||||
// ^ Alone
|
||||
foo::bar;
|
||||
// ^ Joint
|
||||
// ^ Alone
|
||||
use foo::{bar,baz,};
|
||||
// ^ Alone
|
||||
// ^ Alone
|
||||
// ^ Alone
|
||||
struct Struct<'a> {};
|
||||
// ^ Joint
|
||||
// ^ Joint
|
||||
Struct::<0>;
|
||||
// ^ Alone
|
||||
Struct::<{0}>;
|
||||
// ^ Alone
|
||||
;;
|
||||
//^ Joint
|
||||
// ^ Alone
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
|
@ -8,7 +8,7 @@ use syntax::{SyntaxKind, SyntaxKind::*, T};
|
|||
|
||||
use tt::buffer::TokenBuffer;
|
||||
|
||||
pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(
|
||||
pub fn to_parser_input<S: Copy + fmt::Debug>(
|
||||
edition: Edition,
|
||||
buffer: &TokenBuffer<'_, S>,
|
||||
) -> parser::Input {
|
Loading…
Reference in a new issue