diff --git a/Cargo.lock b/Cargo.lock index c82dc0bc9a..90fcb2ac27 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -888,6 +888,7 @@ name = "mbe" version = "0.0.0" dependencies = [ "cov-mark", + "expect-test", "log", "parser", "profile", diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml index 0769c436f6..411bb75dbc 100644 --- a/crates/mbe/Cargo.toml +++ b/crates/mbe/Cargo.toml @@ -13,6 +13,7 @@ cov-mark = "2.0.0-pre.1" rustc-hash = "1.1.0" smallvec = "1.2.0" log = "0.4.8" +expect-test = "1.1" syntax = { path = "../syntax", version = "0.0.0" } parser = { path = "../parser", version = "0.0.0" } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 59fc8f8c77..6aa034abdc 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -15,14 +15,14 @@ use tt::buffer::{Cursor, TokenBuffer}; use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; use crate::{ExpandError, TokenMap}; -/// Convert the syntax node to a `TokenTree` with the censored nodes excluded (what macro +/// Convert the syntax node to a `TokenTree` (what macro /// will consume). pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { syntax_node_to_token_tree_censored(node, None) } -/// Convert the syntax node to a `TokenTree` with the censored nodes excluded (what macro -/// will consume). +/// Convert the syntax node to a `TokenTree` (what macro will consume) +/// with the censored range excluded. pub fn syntax_node_to_token_tree_censored( node: &SyntaxNode, censor: Option, diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs index 705cf5a2b1..c2a1696b3d 100644 --- a/crates/mbe/src/tests.rs +++ b/crates/mbe/src/tests.rs @@ -228,3 +228,44 @@ fn debug_dump_ignore_spaces(node: &syntax::SyntaxNode) -> String { buf } + +#[test] +fn test_node_to_tt_censor() { + use syntax::ast::{AttrsOwner, ModuleItemOwner}; + + let source = r##" +#[attr0] +#[attr1] +#[attr2] +struct Struct { + field: () +} +"##; + let source_file = ast::SourceFile::parse(&source).ok().unwrap(); + let item = source_file.items().next().unwrap(); + let attr = item.attrs().nth(1).unwrap(); + + let (tt, _) = + syntax_node_to_token_tree_censored(item.syntax(), Some(attr.syntax().text_range())); + expect_test::expect![[r##"# [attr0] # [attr2] struct Struct {field : ()}"##]] + .assert_eq(&tt.to_string()); + + let source = r##" +#[derive(Derive0)] +#[derive(Derive1)] +#[derive(Derive2)] +struct Struct { + field: () +} +"##; + let source_file = ast::SourceFile::parse(&source).ok().unwrap(); + let item = source_file.items().next().unwrap(); + let attr = item.attrs().nth(1).unwrap(); + + let (tt, _) = syntax_node_to_token_tree_censored( + item.syntax(), + Some(attr.syntax().text_range().cover_offset(0.into())), + ); + expect_test::expect![[r##"# [derive (Derive2)] struct Struct {field : ()}"##]] + .assert_eq(&tt.to_string()); +}