diff --git a/Cargo.lock b/Cargo.lock index b98a1195d8..99acda7c0b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -148,10 +148,10 @@ dependencies = [ "derive_arbitrary", "expect-test", "intern", - "mbe", "oorandom", "rustc-hash", "syntax", + "syntax-bridge", "tt", ] @@ -554,6 +554,7 @@ dependencies = [ "span", "stdx", "syntax", + "syntax-bridge", "test-fixture", "test-utils", "tracing", @@ -582,6 +583,7 @@ dependencies = [ "span", "stdx", "syntax", + "syntax-bridge", "tracing", "triomphe", "tt", @@ -1056,6 +1058,7 @@ dependencies = [ "span", "stdx", "syntax", + "syntax-bridge", "test-utils", "tracing", "tt", @@ -1350,7 +1353,6 @@ dependencies = [ "expect-test", "intern", "libloading", - "mbe", "memmap2", "object 0.33.0", "paths", @@ -1360,6 +1362,7 @@ dependencies = [ "snap", "span", "stdx", + "syntax-bridge", "tt", ] @@ -1665,7 +1668,6 @@ dependencies = [ "load-cargo", "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-types", - "mbe", "memchr", "mimalloc", "nohash-hasher", @@ -1685,6 +1687,7 @@ dependencies = [ "serde_json", "stdx", "syntax", + "syntax-bridge", "test-fixture", "test-utils", "tikv-jemallocator", @@ -1967,6 +1970,21 @@ dependencies = [ "triomphe", ] +[[package]] +name = "syntax-bridge" +version = "0.0.0" +dependencies = [ + "intern", + "parser", + "rustc-hash", + "span", + "stdx", + "syntax", + "test-utils", + "tracing", + "tt", +] + [[package]] name = "test-fixture" version = "0.0.0" diff --git a/Cargo.toml b/Cargo.toml index c2f601a91b..d56d941dbf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -77,6 +77,7 @@ salsa = { path = "./crates/salsa", version = "0.0.0" } span = { path = "./crates/span", version = "0.0.0" } stdx = { path = "./crates/stdx", version = "0.0.0" } syntax = { path = "./crates/syntax", version = "0.0.0" } +syntax-bridge = { path = "./crates/syntax-bridge", version = "0.0.0" } text-edit = { path = "./crates/text-edit", version = "0.0.0" } toolchain = { path = "./crates/toolchain", version = "0.0.0" } tt = { path = "./crates/tt", version = "0.0.0" } diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml index faf93f62c6..0076b67476 100644 --- a/crates/cfg/Cargo.toml +++ b/crates/cfg/Cargo.toml @@ -28,7 +28,7 @@ arbitrary = "1.3.2" derive_arbitrary = "1.3.2" # local deps -mbe.workspace = true +syntax-bridge.workspace = true syntax.workspace = true [lints] diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index 597023a792..6d87d83ad9 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -1,8 +1,11 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; use intern::Symbol; -use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY}; use syntax::{ast, AstNode, Edition}; +use syntax_bridge::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, +}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 8ac2d00313..7c58dd221d 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -52,7 +52,7 @@ expect-test.workspace = true # local deps test-utils.workspace = true test-fixture.workspace = true - +syntax-bridge.workspace = true [features] in-rust-tree = ["hir-expand/in-rust-tree"] diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index ba88495e14..198dc93f6b 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -657,9 +657,9 @@ mod tests { use triomphe::Arc; use hir_expand::span_map::{RealSpanMap, SpanMap}; - use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode}; use span::FileId; use syntax::{ast, AstNode, TextRange}; + use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode}; use crate::attr::{DocAtom, DocExpr}; diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 64b37d2d06..fc1460870c 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -1201,7 +1201,6 @@ macro_rules! m { #[test] fn test_meta_doc_comments() { - cov_mark::check!(test_meta_doc_comments); check( r#" macro_rules! m { diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index d34f0afc3e..b430e2cefb 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -317,9 +317,9 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander { _: Span, _: Span, ) -> Result { - let (parse, _) = ::mbe::token_tree_to_syntax_node( + let (parse, _) = syntax_bridge::token_tree_to_syntax_node( subtree, - ::mbe::TopEntryPoint::MacroItems, + syntax_bridge::TopEntryPoint::MacroItems, span::Edition::CURRENT, ); if parse.errors().is_empty() { diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml index ca05618aec..ff4b468f6c 100644 --- a/crates/hir-expand/Cargo.toml +++ b/crates/hir-expand/Cargo.toml @@ -33,6 +33,7 @@ mbe.workspace = true limit.workspace = true span.workspace = true parser.workspace = true +syntax-bridge.workspace = true [dev-dependencies] expect-test = "1.4.0" diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 777e415418..79cfeb4cf1 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -6,14 +6,12 @@ use cfg::CfgExpr; use either::Either; use intern::{sym, Interned, Symbol}; -use mbe::{ - desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode, - Punct, -}; +use mbe::{DelimiterKind, Punct}; use smallvec::{smallvec, SmallVec}; use span::{Span, SyntaxContextId}; use syntax::unescape; use syntax::{ast, match_ast, AstNode, AstToken, SyntaxNode}; +use syntax_bridge::{desugar_doc_comment_text, syntax_node_to_token_tree, DocCommentDesugarMode}; use triomphe::ThinArc; use crate::name::Name; diff --git a/crates/hir-expand/src/builtin/derive_macro.rs b/crates/hir-expand/src/builtin/derive_macro.rs index f560d3bfd1..7d3e8deaf0 100644 --- a/crates/hir-expand/src/builtin/derive_macro.rs +++ b/crates/hir-expand/src/builtin/derive_macro.rs @@ -2,10 +2,10 @@ use intern::sym; use itertools::izip; -use mbe::DocCommentDesugarMode; use rustc_hash::FxHashSet; use span::{MacroCallId, Span}; use stdx::never; +use syntax_bridge::DocCommentDesugarMode; use tracing::debug; use crate::{ @@ -209,9 +209,9 @@ struct BasicAdtInfo { } fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result { - let (parsed, tm) = &mbe::token_tree_to_syntax_node( + let (parsed, tm) = &syntax_bridge::token_tree_to_syntax_node( tt, - mbe::TopEntryPoint::MacroItems, + syntax_bridge::TopEntryPoint::MacroItems, parser::Edition::CURRENT_FIXME, ); let macro_items = ast::MacroItems::cast(parsed.syntax_node()) @@ -268,7 +268,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result { param_type_set.insert(it.as_name()); - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( it.syntax(), tm, call_site, @@ -282,7 +282,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result it.type_bound_list().map(|it| { - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( it.syntax(), tm, call_site, @@ -295,7 +295,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result Result Result ExpandResult { let call_site_span = span_with_call_site_ctxt(db, span, id); - let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT_FIXME); + + let mut iter = ::tt::iter::TtIter::new(tt); + + let cond = expect_fragment( + &mut iter, + parser::PrefixEntryPoint::Expr, + db.crate_graph()[id.lookup(db).krate].edition, + tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close }, + ); + _ = iter.expect_char(','); + let rest = iter.as_slice(); + let dollar_crate = dollar_crate(span); - let expanded = match &*args { - [cond, panic_args @ ..] => { - let comma = tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(call_site_span), - token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { - char: ',', - spacing: tt::Spacing::Alone, - span: call_site_span, - }))]), - }; - let cond = cond.clone(); - let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma); + let expanded = match cond.value { + Some(cond) => { + let panic_args = rest.iter().cloned(); let mac = if use_panic_2021(db, span) { quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) } } else { @@ -253,10 +256,13 @@ fn assert_expand( } }} } - [] => quote! {call_site_span =>{}}, + None => quote! {call_site_span =>{}}, }; - ExpandResult::ok(expanded) + match cond.err { + Some(err) => ExpandResult::new(expanded, err.into()), + None => ExpandResult::ok(expanded), + } } fn file_expand( diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 01a35660a9..584f9631e3 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -3,10 +3,11 @@ use base_db::{salsa, CrateId, SourceDatabase}; use either::Either; use limit::Limit; -use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, MatchedArmIndex}; +use mbe::MatchedArmIndex; use rustc_hash::FxHashSet; use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId}; use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T}; +use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode}; use triomphe::Arc; use crate::{ @@ -165,7 +166,7 @@ pub fn expand_speculative( // Build the subtree and token mapping for the speculative args let (mut tt, undo_info) = match loc.kind { MacroCallKind::FnLike { .. } => ( - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( speculative_args, span_map, span, @@ -178,7 +179,7 @@ pub fn expand_speculative( SyntaxFixupUndoInfo::NONE, ), MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => ( - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( speculative_args, span_map, span, @@ -213,7 +214,7 @@ pub fn expand_speculative( fixups.remove.extend(censor_cfg); ( - mbe::syntax_node_to_token_tree_modified( + syntax_bridge::syntax_node_to_token_tree_modified( speculative_args, span_map, fixups.append, @@ -459,7 +460,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { return dummy_tt(kind); } - let mut tt = mbe::syntax_node_to_token_tree( + let mut tt = syntax_bridge::syntax_node_to_token_tree( tt.syntax(), map.as_ref(), span, @@ -515,7 +516,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { fixups.remove.extend(censor_cfg); ( - mbe::syntax_node_to_token_tree_modified( + syntax_bridge::syntax_node_to_token_tree_modified( syntax, map, fixups.append, @@ -720,13 +721,13 @@ fn token_tree_to_syntax_node( edition: parser::Edition, ) -> (Parse, ExpansionSpanMap) { let entry_point = match expand_to { - ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts, - ExpandTo::Items => mbe::TopEntryPoint::MacroItems, - ExpandTo::Pattern => mbe::TopEntryPoint::Pattern, - ExpandTo::Type => mbe::TopEntryPoint::Type, - ExpandTo::Expr => mbe::TopEntryPoint::Expr, + ExpandTo::Statements => syntax_bridge::TopEntryPoint::MacroStmts, + ExpandTo::Items => syntax_bridge::TopEntryPoint::MacroItems, + ExpandTo::Pattern => syntax_bridge::TopEntryPoint::Pattern, + ExpandTo::Type => syntax_bridge::TopEntryPoint::Type, + ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr, }; - mbe::token_tree_to_syntax_node(tt, entry_point, edition) + syntax_bridge::token_tree_to_syntax_node(tt, entry_point, edition) } fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> { diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs index 48851af3fd..b1a6eed2fb 100644 --- a/crates/hir-expand/src/declarative.rs +++ b/crates/hir-expand/src/declarative.rs @@ -2,10 +2,10 @@ use base_db::CrateId; use intern::sym; -use mbe::DocCommentDesugarMode; use span::{Edition, MacroCallId, Span, SyntaxContextId}; use stdx::TupleExt; use syntax::{ast, AstNode}; +use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ @@ -112,7 +112,7 @@ impl DeclarativeMacroExpander { ast::Macro::MacroRules(macro_rules) => ( match macro_rules.token_tree() { Some(arg) => { - let tt = mbe::syntax_node_to_token_tree( + let tt = syntax_bridge::syntax_node_to_token_tree( arg.syntax(), map.as_ref(), map.span_for_range( @@ -135,14 +135,14 @@ impl DeclarativeMacroExpander { let span = map.span_for_range(macro_def.macro_token().unwrap().text_range()); let args = macro_def.args().map(|args| { - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( args.syntax(), map.as_ref(), span, DocCommentDesugarMode::Mbe, ) }); - let body = mbe::syntax_node_to_token_tree( + let body = syntax_bridge::syntax_node_to_token_tree( body.syntax(), map.as_ref(), span, diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index 5385b44532..3528b2dde7 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -19,9 +19,9 @@ //! //! See the full discussion : use base_db::CrateId; -use mbe::DocCommentDesugarMode; use span::SyntaxContextId; use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent}; +use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ @@ -82,7 +82,7 @@ pub fn expand_eager_macro_input( return ExpandResult { value: None, err }; }; - let mut subtree = mbe::syntax_node_to_token_tree( + let mut subtree = syntax_bridge::syntax_node_to_token_tree( &expanded_eager_input, arg_map, span, diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 71579d2f87..b6d5828da9 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -2,7 +2,6 @@ //! fix up syntax errors in the code we're passing to them. use intern::sym; -use mbe::DocCommentDesugarMode; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::SmallVec; use span::{ @@ -14,6 +13,7 @@ use syntax::{ ast::{self, AstNode, HasLoopBody}, match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, }; +use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use tt::Spacing; @@ -76,7 +76,8 @@ pub(crate) fn fixup_syntax( if can_handle_error(&node) && has_error_to_handle(&node) { remove.insert(node.clone().into()); // the node contains an error node, we have to completely replace it by something valid - let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site, mode); + let original_tree = + syntax_bridge::syntax_node_to_token_tree(&node, span_map, call_site, mode); let idx = original.len() as u32; original.push(original_tree); let span = span_map.span_for_range(node_range); @@ -434,9 +435,9 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { #[cfg(test)] mod tests { use expect_test::{expect, Expect}; - use mbe::DocCommentDesugarMode; use span::{Edition, EditionedFileId, FileId}; use syntax::TextRange; + use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ @@ -483,7 +484,7 @@ mod tests { span_map.span_for_range(TextRange::empty(0.into())), DocCommentDesugarMode::Mbe, ); - let mut tt = mbe::syntax_node_to_token_tree_modified( + let mut tt = syntax_bridge::syntax_node_to_token_tree_modified( &parsed.syntax_node(), span_map.as_ref(), fixups.append, @@ -498,9 +499,9 @@ mod tests { expect.assert_eq(&actual); // the fixed-up tree should be syntactically valid - let (parse, _) = mbe::token_tree_to_syntax_node( + let (parse, _) = syntax_bridge::token_tree_to_syntax_node( &tt, - ::mbe::TopEntryPoint::MacroItems, + syntax_bridge::TopEntryPoint::MacroItems, parser::Edition::CURRENT, ); assert!( @@ -513,7 +514,7 @@ mod tests { // the fixed-up + reversed version should be equivalent to the original input // modulo token IDs and `Punct`s' spacing. - let original_as_tt = mbe::syntax_node_to_token_tree( + let original_as_tt = syntax_bridge::syntax_node_to_token_tree( &parsed.syntax_node(), span_map.as_ref(), span_map.span_for_range(TextRange::empty(0.into())), diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs index 3be88ee9da..740c27b89c 100644 --- a/crates/hir-expand/src/span_map.rs +++ b/crates/hir-expand/src/span_map.rs @@ -28,13 +28,13 @@ pub enum SpanMapRef<'a> { RealSpanMap(&'a RealSpanMap), } -impl mbe::SpanMapper for SpanMap { +impl syntax_bridge::SpanMapper for SpanMap { fn span_for(&self, range: TextRange) -> Span { self.span_for_range(range) } } -impl mbe::SpanMapper for SpanMapRef<'_> { +impl syntax_bridge::SpanMapper for SpanMapRef<'_> { fn span_for(&self, range: TextRange) -> Span { self.span_for_range(range) } diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml index 57834623e8..e441c0ec3d 100644 --- a/crates/mbe/Cargo.toml +++ b/crates/mbe/Cargo.toml @@ -25,6 +25,7 @@ tt.workspace = true stdx.workspace = true span.workspace = true intern.workspace = true +syntax-bridge.workspace = true [dev-dependencies] test-utils.workspace = true diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index b6db4d2e76..43604eb232 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -7,11 +7,15 @@ use syntax::{ ast::{self, HasName}, AstNode, }; +use syntax_bridge::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, +}; use test_utils::{bench, bench_fixture, skip_slow_tests}; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, - syntax_node_to_token_tree, DeclarativeMacro, DocCommentDesugarMode, DummyTestSpanMap, DUMMY, + DeclarativeMacro, }; #[test] diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 568490d573..88785537c7 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -8,13 +8,12 @@ mod expander; mod parser; -mod syntax_bridge; -mod to_parser_input; #[cfg(test)] mod benchmark; use span::{Edition, Span, SyntaxContextId}; +use syntax_bridge::to_parser_input; use tt::iter::TtIter; use tt::DelimSpan; @@ -23,18 +22,8 @@ use std::sync::Arc; use crate::parser::{MetaTemplate, MetaVarKind, Op}; -// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces -pub use ::parser::TopEntryPoint; pub use tt::{Delimiter, DelimiterKind, Punct}; -pub use crate::syntax_bridge::{ - desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree, - parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified, - token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper, -}; - -pub use crate::syntax_bridge::dummy_test_span_utils::*; - #[derive(Debug, PartialEq, Eq, Clone)] pub enum ParseError { UnexpectedToken(Box), @@ -361,7 +350,7 @@ impl From> for ValueResult { } } -fn expect_fragment( +pub fn expect_fragment( tt_iter: &mut TtIter<'_, Span>, entry_point: ::parser::PrefixEntryPoint, edition: ::parser::Edition, @@ -369,7 +358,7 @@ fn expect_fragment( ) -> ExpandResult>> { use ::parser; let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice()); - let parser_input = to_parser_input::to_parser_input(edition, &buffer); + let parser_input = to_parser_input(edition, &buffer); let tree_traversal = entry_point.parse(&parser_input, edition); let mut cursor = buffer.begin(); let mut error = false; diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml index 673b5bd78a..b3221443bd 100644 --- a/crates/proc-macro-srv/Cargo.toml +++ b/crates/proc-macro-srv/Cargo.toml @@ -19,23 +19,24 @@ snap.workspace = true stdx.workspace = true tt.workspace = true -mbe.workspace = true +syntax-bridge.workspace = true paths.workspace = true base-db.workspace = true span.workspace = true proc-macro-api.workspace = true -ra-ap-rustc_lexer.workspace = true intern.workspace = true +ra-ap-rustc_lexer.workspace = true + [dev-dependencies] -expect-test = "1.4.0" +expect-test.workspace = true # used as proc macro test targets proc-macro-test.path = "./proc-macro-test" [features] sysroot-abi = [] -in-rust-tree = ["mbe/in-rust-tree", "tt/in-rust-tree","sysroot-abi"] +in-rust-tree = ["syntax-bridge/in-rust-tree", "tt/in-rust-tree", "sysroot-abi"] [lints] workspace = true diff --git a/crates/proc-macro-srv/src/server_impl/token_stream.rs b/crates/proc-macro-srv/src/server_impl/token_stream.rs index cdf93fa425..4d8d496418 100644 --- a/crates/proc-macro-srv/src/server_impl/token_stream.rs +++ b/crates/proc-macro-srv/src/server_impl/token_stream.rs @@ -126,9 +126,12 @@ pub(super) mod token_stream { /// change these errors into `LexError`s later. impl TokenStream { pub(crate) fn from_str(src: &str, call_site: S) -> Result, String> { - let subtree = - mbe::parse_to_token_tree_static_span(span::Edition::CURRENT_FIXME, call_site, src) - .ok_or("lexing error")?; + let subtree = syntax_bridge::parse_to_token_tree_static_span( + span::Edition::CURRENT_FIXME, + call_site, + src, + ) + .ok_or("lexing error")?; Ok(TokenStream::with_subtree(subtree)) } diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs index 70eff51cad..d63e04ae07 100644 --- a/crates/proc-macro-srv/src/tests/utils.rs +++ b/crates/proc-macro-srv/src/tests/utils.rs @@ -9,7 +9,8 @@ use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv}; fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream { crate::server_impl::TokenStream::with_subtree( - mbe::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src).unwrap(), + syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src) + .unwrap(), ) } @@ -19,7 +20,7 @@ fn parse_string_spanned( src: &str, ) -> crate::server_impl::TokenStream { crate::server_impl::TokenStream::with_subtree( - mbe::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(), + syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(), ) } diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index bc1b13a649..c07f9ee506 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -82,7 +82,7 @@ xshell.workspace = true test-utils.workspace = true test-fixture.workspace = true -mbe.workspace = true +syntax-bridge.workspace = true [features] jemalloc = ["jemallocator", "profile/jemalloc"] diff --git a/crates/rust-analyzer/src/target_spec.rs b/crates/rust-analyzer/src/target_spec.rs index 67e1bad528..965fd415e9 100644 --- a/crates/rust-analyzer/src/target_spec.rs +++ b/crates/rust-analyzer/src/target_spec.rs @@ -263,11 +263,14 @@ mod tests { use super::*; use ide::Edition; - use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY}; use syntax::{ ast::{self, AstNode}, SmolStr, }; + use syntax_bridge::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, + }; fn check(cfg: &str, expected_features: &[&str]) { let cfg_expr = { diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs index e8c558355c..874480c59f 100644 --- a/crates/span/src/hygiene.rs +++ b/crates/span/src/hygiene.rs @@ -79,6 +79,10 @@ impl SyntaxContextId { #[derive(Copy, Clone, Hash, PartialEq, Eq)] pub struct SyntaxContextData { /// Invariant: Only [`SyntaxContextId::ROOT`] has a [`None`] outer expansion. + // FIXME: The None case needs to encode the context crate id. We can encode that as the MSB of + // MacroCallId is reserved anyways so we can do bit tagging here just fine. + // The bigger issue is that that will cause interning to now create completely separate chains + // per crate. Though that is likely not a problem as `MacroCallId`s are already crate calling dependent. pub outer_expn: Option, pub outer_transparency: Transparency, pub parent: SyntaxContextId, diff --git a/crates/syntax-bridge/Cargo.toml b/crates/syntax-bridge/Cargo.toml new file mode 100644 index 0000000000..b4f59ae216 --- /dev/null +++ b/crates/syntax-bridge/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "syntax-bridge" +version = "0.0.0" +description = "TBD" + +authors.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true + +[lib] +doctest = false + +[dependencies] +rustc-hash.workspace = true +tracing.workspace = true + +# local deps +syntax.workspace = true +parser.workspace = true +tt.workspace = true +stdx.workspace = true +span.workspace = true +intern.workspace = true + +[dev-dependencies] +test-utils.workspace = true + +[features] +in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"] + +[lints] +workspace = true diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/syntax-bridge/src/lib.rs similarity index 96% rename from crates/mbe/src/syntax_bridge.rs rename to crates/syntax-bridge/src/lib.rs index a29efdd4ef..b0afd245c5 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/syntax-bridge/src/lib.rs @@ -14,11 +14,13 @@ use syntax::{ }; use tt::{ buffer::{Cursor, TokenBuffer}, - iter::TtIter, token_to_literal, }; -use crate::to_parser_input::to_parser_input; +mod to_parser_input; +pub use to_parser_input::to_parser_input; +// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces +pub use ::parser::TopEntryPoint; #[cfg(test)] mod tests; @@ -43,7 +45,7 @@ impl> SpanMapper for &SM { } /// Dummy things for testing where spans don't matter. -pub(crate) mod dummy_test_span_utils { +pub mod dummy_test_span_utils { use span::{Span, SyntaxContextId}; @@ -211,50 +213,6 @@ where Some(convert_tokens(&mut conv)) } -/// Split token tree with separate expr: $($e:expr)SEP* -pub fn parse_exprs_with_sep( - tt: &tt::Subtree, - sep: char, - span: span::Span, - edition: Edition, -) -> Vec> { - if tt.token_trees.is_empty() { - return Vec::new(); - } - - let mut iter = TtIter::new(tt); - let mut res = Vec::new(); - - while iter.peek_n(0).is_some() { - let expanded = crate::expect_fragment( - &mut iter, - parser::PrefixEntryPoint::Expr, - edition, - tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close }, - ); - - res.push(match expanded.value { - None => break, - Some(tt) => tt.subtree_or_wrap(tt::DelimSpan { open: span, close: span }), - }); - - let mut fork = iter.clone(); - if fork.expect_char(sep).is_err() { - break; - } - iter = fork; - } - - if iter.peek_n(0).is_some() { - res.push(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(span), - token_trees: iter.cloned().collect(), - }); - } - - res -} - fn convert_tokens(conv: &mut C) -> tt::Subtree where C: TokenConverter, @@ -479,7 +437,6 @@ fn convert_doc_comment( span: S, mode: DocCommentDesugarMode, ) -> Option>> { - cov_mark::hit!(test_meta_doc_comments); let comment = ast::Comment::cast(token.clone())?; let doc = comment.kind().doc?; diff --git a/crates/syntax-bridge/src/tests.rs b/crates/syntax-bridge/src/tests.rs new file mode 100644 index 0000000000..7b8e3f2b49 --- /dev/null +++ b/crates/syntax-bridge/src/tests.rs @@ -0,0 +1,104 @@ +use rustc_hash::FxHashMap; +use span::Span; +use syntax::{ast, AstNode}; +use test_utils::extract_annotations; +use tt::{ + buffer::{TokenBuffer, TokenTreeRef}, + Leaf, Punct, Spacing, +}; + +use crate::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, +}; + +fn check_punct_spacing(fixture: &str) { + let source_file = ast::SourceFile::parse(fixture, span::Edition::CURRENT).ok().unwrap(); + let subtree = syntax_node_to_token_tree( + source_file.syntax(), + DummyTestSpanMap, + DUMMY, + DocCommentDesugarMode::Mbe, + ); + let mut annotations: FxHashMap<_, _> = extract_annotations(fixture) + .into_iter() + .map(|(range, annotation)| { + let spacing = match annotation.as_str() { + "Alone" => Spacing::Alone, + "Joint" => Spacing::Joint, + a => panic!("unknown annotation: {a}"), + }; + (range, spacing) + }) + .collect(); + + let buf = TokenBuffer::from_subtree(&subtree); + let mut cursor = buf.begin(); + while !cursor.eof() { + while let Some(token_tree) = cursor.token_tree() { + if let TokenTreeRef::Leaf( + Leaf::Punct(Punct { spacing, span: Span { range, .. }, .. }), + _, + ) = token_tree + { + if let Some(expected) = annotations.remove(range) { + assert_eq!(expected, *spacing); + } + } + cursor = cursor.bump_subtree(); + } + cursor = cursor.bump(); + } + + assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}"); +} + +#[test] +fn punct_spacing() { + check_punct_spacing( + r#" +fn main() { + 0+0; + //^ Alone + 0+(0); + //^ Alone + 0<=0; + //^ Joint + // ^ Alone + 0<=(0); + // ^ Alone + a=0; + //^ Alone + a=(0); + //^ Alone + a+=0; + //^ Joint + // ^ Alone + a+=(0); + // ^ Alone + a&&b; + //^ Joint + // ^ Alone + a&&(b); + // ^ Alone + foo::bar; + // ^ Joint + // ^ Alone + use foo::{bar,baz,}; + // ^ Alone + // ^ Alone + // ^ Alone + struct Struct<'a> {}; + // ^ Joint + // ^ Joint + Struct::<0>; + // ^ Alone + Struct::<{0}>; + // ^ Alone + ;; + //^ Joint + // ^ Alone +} + "#, + ); +} diff --git a/crates/mbe/src/to_parser_input.rs b/crates/syntax-bridge/src/to_parser_input.rs similarity index 98% rename from crates/mbe/src/to_parser_input.rs rename to crates/syntax-bridge/src/to_parser_input.rs index c35b28527a..2c54899268 100644 --- a/crates/mbe/src/to_parser_input.rs +++ b/crates/syntax-bridge/src/to_parser_input.rs @@ -8,7 +8,7 @@ use syntax::{SyntaxKind, SyntaxKind::*, T}; use tt::buffer::TokenBuffer; -pub(crate) fn to_parser_input( +pub fn to_parser_input( edition: Edition, buffer: &TokenBuffer<'_, S>, ) -> parser::Input {