Auto merge of #17153 - Veykril:doc-comment-desugaring, r=Veykril

fix: Fix doc comment desugaring for proc-macros

Fixes https://github.com/rust-lang/rust-analyzer/issues/16259
This commit is contained in:
bors 2024-04-27 11:32:40 +00:00
commit f216be4a07
14 changed files with 228 additions and 52 deletions

View file

@ -1,6 +1,6 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect};
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY};
use syntax::{ast, AstNode, Edition};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
@ -8,7 +8,12 @@ use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let tt = syntax_node_to_token_tree(
tt.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
}
@ -16,7 +21,12 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let tt = syntax_node_to_token_tree(
tt.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual);
@ -25,7 +35,12 @@ fn check_dnf(input: &str, expect: Expect) {
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let tt = syntax_node_to_token_tree(
tt.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@ -36,7 +51,12 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let tt = syntax_node_to_token_tree(
tt.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::ProcMacro,
);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();

View file

@ -5,7 +5,7 @@ use triomphe::Arc;
use base_db::FileId;
use hir_expand::span_map::{RealSpanMap, SpanMap};
use mbe::syntax_node_to_token_tree;
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode};
use syntax::{ast, AstNode, TextRange};
use crate::attr::{DocAtom, DocExpr};
@ -18,6 +18,7 @@ fn assert_parse_result(input: &str, expected: DocExpr) {
tt.syntax(),
map.as_ref(),
map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::ProcMacro,
);
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);

View file

@ -186,3 +186,33 @@ fn#0:1@45..47#0# foo#0:1@48..51#0#(#0:1@51..52#0#&#0:1@52..53#0#self#0:1@53..57#
}#0:1@76..77#0#"#]],
);
}
#[test]
fn attribute_macro_doc_desugaring() {
check(
r#"
//- proc_macros: identity
#[proc_macros::identity]
/// doc string \n with newline
/**
MultiLines Doc
MultiLines Doc
*/
#[doc = "doc attr"]
struct S;
"#,
expect![[r##"
#[proc_macros::identity]
/// doc string \n with newline
/**
MultiLines Doc
MultiLines Doc
*/
#[doc = "doc attr"]
struct S;
#[doc = " doc string \\n with newline"]
#[doc = "\n MultiLines Doc\n MultiLines Doc\n"]
#[doc = "doc attr"] struct S;"##]],
);
}

View file

@ -5,7 +5,7 @@ use base_db::CrateId;
use cfg::CfgExpr;
use either::Either;
use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use mbe::{syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode, Punct};
use smallvec::{smallvec, SmallVec};
use span::{Span, SyntaxContextId};
use syntax::unescape;
@ -239,7 +239,12 @@ impl Attr {
span,
})))
} else if let Some(tt) = ast.token_tree() {
let tree = syntax_node_to_token_tree(tt.syntax(), span_map, span);
let tree = syntax_node_to_token_tree(
tt.syntax(),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else {
None

View file

@ -1,6 +1,7 @@
//! Builtin derives.
use itertools::izip;
use mbe::DocCommentDesugarMode;
use rustc_hash::FxHashSet;
use span::{MacroCallId, Span};
use stdx::never;
@ -262,7 +263,12 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
match this {
Some(it) => {
param_type_set.insert(it.as_name());
mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)
mbe::syntax_node_to_token_tree(
it.syntax(),
tm,
call_site,
DocCommentDesugarMode::ProcMacro,
)
}
None => {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
@ -270,15 +276,27 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
}
};
let bounds = match &param {
ast::TypeOrConstParam::Type(it) => it
.type_bound_list()
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)),
ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| {
mbe::syntax_node_to_token_tree(
it.syntax(),
tm,
call_site,
DocCommentDesugarMode::ProcMacro,
)
}),
ast::TypeOrConstParam::Const(_) => None,
};
let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param
.ty()
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm, call_site))
.map(|ty| {
mbe::syntax_node_to_token_tree(
ty.syntax(),
tm,
call_site,
DocCommentDesugarMode::ProcMacro,
)
})
.unwrap_or_else(|| {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
});
@ -292,7 +310,14 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
let where_clause = if let Some(w) = where_clause {
w.predicates()
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site))
.map(|it| {
mbe::syntax_node_to_token_tree(
it.syntax(),
tm,
call_site,
DocCommentDesugarMode::ProcMacro,
)
})
.collect()
} else {
vec![]
@ -322,7 +347,14 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandEr
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p)
})
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site))
.map(|it| {
mbe::syntax_node_to_token_tree(
it.syntax(),
tm,
call_site,
DocCommentDesugarMode::ProcMacro,
)
})
.collect();
let name_token = name_to_token(tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, where_clause, associated_types })

View file

@ -3,7 +3,7 @@
use base_db::{salsa, CrateId, FileId, SourceDatabase};
use either::Either;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, MatchedArmIndex};
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, MatchedArmIndex};
use rustc_hash::FxHashSet;
use span::{AstIdMap, Span, SyntaxContextData, SyntaxContextId};
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
@ -156,11 +156,25 @@ pub fn expand_speculative(
// Build the subtree and token mapping for the speculative args
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => (
mbe::syntax_node_to_token_tree(speculative_args, span_map, span),
mbe::syntax_node_to_token_tree(
speculative_args,
span_map,
span,
if loc.def.is_proc_macro() {
DocCommentDesugarMode::ProcMacro
} else {
DocCommentDesugarMode::Mbe
},
),
SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => (
mbe::syntax_node_to_token_tree(speculative_args, span_map, span),
mbe::syntax_node_to_token_tree(
speculative_args,
span_map,
span,
DocCommentDesugarMode::ProcMacro,
),
SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Derive { derive_attr_index: index, .. }
@ -176,7 +190,12 @@ pub fn expand_speculative(
let censor_cfg =
cfg_process::process_cfg_attrs(db, speculative_args, &loc).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, span);
let mut fixups = fixup::fixup_syntax(
span_map,
speculative_args,
span,
DocCommentDesugarMode::ProcMacro,
);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
it => !censor.contains(it) && !censor_cfg.contains(it),
@ -191,6 +210,7 @@ pub fn expand_speculative(
fixups.append,
fixups.remove,
span,
DocCommentDesugarMode::ProcMacro,
),
fixups.undo_info,
)
@ -212,7 +232,12 @@ pub fn expand_speculative(
}?;
match attr.token_tree() {
Some(token_tree) => {
let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map, span);
let mut tree = syntax_node_to_token_tree(
token_tree.syntax(),
span_map,
span,
DocCommentDesugarMode::ProcMacro,
);
tree.delimiter = tt::Delimiter::invisible_spanned(span);
Some(tree)
@ -432,7 +457,16 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
return dummy_tt(kind);
}
let mut tt = mbe::syntax_node_to_token_tree(tt.syntax(), map.as_ref(), span);
let mut tt = mbe::syntax_node_to_token_tree(
tt.syntax(),
map.as_ref(),
span,
if loc.def.is_proc_macro() {
DocCommentDesugarMode::ProcMacro
} else {
DocCommentDesugarMode::Mbe
},
);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter.kind = tt::DelimiterKind::Invisible;
@ -469,7 +503,8 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
let (mut tt, undo_info) = {
let syntax = item_node.syntax();
let censor_cfg = cfg_process::process_cfg_attrs(db, syntax, &loc).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(map.as_ref(), syntax, span);
let mut fixups =
fixup::fixup_syntax(map.as_ref(), syntax, span, DocCommentDesugarMode::ProcMacro);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
it => !censor.contains(it) && !censor_cfg.contains(it),
@ -484,6 +519,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
fixups.append,
fixups.remove,
span,
DocCommentDesugarMode::ProcMacro,
),
fixups.undo_info,
)

View file

@ -2,6 +2,7 @@
use std::sync::OnceLock;
use base_db::{CrateId, VersionReq};
use mbe::DocCommentDesugarMode;
use span::{Edition, MacroCallId, Span, SyntaxContextId};
use stdx::TupleExt;
use syntax::{ast, AstNode};
@ -158,6 +159,7 @@ impl DeclarativeMacroExpander {
map.span_for_range(
macro_rules.macro_rules_token().unwrap().text_range(),
),
DocCommentDesugarMode::Mbe,
);
mbe::DeclarativeMacro::parse_macro_rules(&tt, edition, new_meta_vars)
@ -175,6 +177,7 @@ impl DeclarativeMacroExpander {
arg.syntax(),
map.as_ref(),
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
DocCommentDesugarMode::Mbe,
);
mbe::DeclarativeMacro::parse_macro2(&tt, edition, new_meta_vars)

View file

@ -19,6 +19,7 @@
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId;
use mbe::DocCommentDesugarMode;
use span::SyntaxContextId;
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use triomphe::Arc;
@ -80,7 +81,12 @@ pub fn expand_eager_macro_input(
return ExpandResult { value: None, err };
};
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, span);
let mut subtree = mbe::syntax_node_to_token_tree(
&expanded_eager_input,
arg_map,
span,
DocCommentDesugarMode::Mbe,
);
subtree.delimiter.kind = crate::tt::DelimiterKind::Invisible;

View file

@ -1,6 +1,7 @@
//! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them.
use mbe::DocCommentDesugarMode;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use span::{ErasedFileAstId, Span, SpanAnchor, FIXUP_ERASED_FILE_AST_ID_MARKER};
@ -49,6 +50,7 @@ pub(crate) fn fixup_syntax(
span_map: SpanMapRef<'_>,
node: &SyntaxNode,
call_site: Span,
mode: DocCommentDesugarMode,
) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut remove = FxHashSet::<SyntaxElement>::default();
@ -70,7 +72,7 @@ pub(crate) fn fixup_syntax(
if can_handle_error(&node) && has_error_to_handle(&node) {
remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site, mode);
let idx = original.len() as u32;
original.push(original_tree);
let span = span_map.span_for_range(node_range);
@ -360,6 +362,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
mod tests {
use base_db::FileId;
use expect_test::{expect, Expect};
use mbe::DocCommentDesugarMode;
use syntax::TextRange;
use triomphe::Arc;
@ -402,6 +405,7 @@ mod tests {
span_map.as_ref(),
&parsed.syntax_node(),
span_map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::Mbe,
);
let mut tt = mbe::syntax_node_to_token_tree_modified(
&parsed.syntax_node(),
@ -409,6 +413,7 @@ mod tests {
fixups.append,
fixups.remove,
span_map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::Mbe,
);
let actual = format!("{tt}\n");
@ -436,6 +441,7 @@ mod tests {
&parsed.syntax_node(),
span_map.as_ref(),
span_map.span_for_range(TextRange::empty(0.into())),
DocCommentDesugarMode::Mbe,
);
assert!(
check_subtree_eq(&tt, &original_as_tt),

View file

@ -10,7 +10,7 @@ use test_utils::{bench, bench_fixture, skip_slow_tests};
use crate::{
parser::{MetaVarKind, Op, RepeatKind, Separator},
syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanMap, DUMMY,
syntax_node_to_token_tree, DeclarativeMacro, DocCommentDesugarMode, DummyTestSpanMap, DUMMY,
};
#[test]
@ -78,6 +78,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<Span>> {
rule.token_tree().unwrap().syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::Mbe,
);
(id, def_tt)
})

View file

@ -34,7 +34,7 @@ pub use tt::{Delimiter, DelimiterKind, Punct};
pub use crate::syntax_bridge::{
parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span,
syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node,
SpanMapper,
DocCommentDesugarMode, SpanMapper,
};
pub use crate::syntax_bridge::dummy_test_span_utils::*;

View file

@ -69,18 +69,28 @@ pub(crate) mod dummy_test_span_utils {
}
}
/// Doc comment desugaring differs between mbe and proc-macros.
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum DocCommentDesugarMode {
/// Desugars doc comments as quoted raw strings
Mbe,
/// Desugars doc comments as quoted strings
ProcMacro,
}
/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
/// subtree's spans.
pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
node: &SyntaxNode,
map: SpanMap,
span: SpanData<Ctx>,
mode: DocCommentDesugarMode,
) -> tt::Subtree<SpanData<Ctx>>
where
SpanData<Ctx>: Copy + fmt::Debug,
SpanMap: SpanMapper<SpanData<Ctx>>,
{
let mut c = Converter::new(node, map, Default::default(), Default::default(), span);
let mut c = Converter::new(node, map, Default::default(), Default::default(), span, mode);
convert_tokens(&mut c)
}
@ -93,12 +103,13 @@ pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
remove: FxHashSet<SyntaxElement>,
call_site: SpanData<Ctx>,
mode: DocCommentDesugarMode,
) -> tt::Subtree<SpanData<Ctx>>
where
SpanMap: SpanMapper<SpanData<Ctx>>,
SpanData<Ctx>: Copy + fmt::Debug,
{
let mut c = Converter::new(node, map, append, remove, call_site);
let mut c = Converter::new(node, map, append, remove, call_site, mode);
convert_tokens(&mut c)
}
@ -165,7 +176,8 @@ where
if lexed.errors().next().is_some() {
return None;
}
let mut conv = RawConverter { lexed, anchor, pos: 0, ctx };
let mut conv =
RawConverter { lexed, anchor, pos: 0, ctx, mode: DocCommentDesugarMode::ProcMacro };
Some(convert_tokens(&mut conv))
}
@ -178,7 +190,8 @@ where
if lexed.errors().next().is_some() {
return None;
}
let mut conv = StaticRawConverter { lexed, pos: 0, span };
let mut conv =
StaticRawConverter { lexed, pos: 0, span, mode: DocCommentDesugarMode::ProcMacro };
Some(convert_tokens(&mut conv))
}
@ -405,7 +418,7 @@ fn is_single_token_op(kind: SyntaxKind) -> bool {
/// That is, strips leading `///` (or `/**`, etc)
/// and strips the ending `*/`
/// And then quote the string, which is needed to convert to `tt::Literal`
fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
fn doc_comment_text(comment: &ast::Comment, mode: DocCommentDesugarMode) -> SmolStr {
let prefix_len = comment.prefix().len();
let mut text = &comment.text()[prefix_len..];
@ -414,26 +427,34 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
text = &text[0..text.len() - 2];
}
let mut num_of_hashes = 0;
let mut count = 0;
for ch in text.chars() {
count = match ch {
'"' => 1,
'#' if count > 0 => count + 1,
_ => 0,
};
num_of_hashes = num_of_hashes.max(count);
}
let text = match mode {
DocCommentDesugarMode::Mbe => {
let mut num_of_hashes = 0;
let mut count = 0;
for ch in text.chars() {
count = match ch {
'"' => 1,
'#' if count > 0 => count + 1,
_ => 0,
};
num_of_hashes = num_of_hashes.max(count);
}
// Quote raw string with delimiters
// Note that `tt::Literal` expect an escaped string
let text = format!("r{delim}\"{text}\"{delim}", delim = "#".repeat(num_of_hashes));
// Quote raw string with delimiters
// Note that `tt::Literal` expect an escaped string
format!(r#"r{delim}"{text}"{delim}"#, delim = "#".repeat(num_of_hashes))
}
// Quote string with delimiters
// Note that `tt::Literal` expect an escaped string
DocCommentDesugarMode::ProcMacro => format!(r#""{}""#, text.escape_debug()),
};
text.into()
}
fn convert_doc_comment<S: Copy>(
token: &syntax::SyntaxToken,
span: S,
mode: DocCommentDesugarMode,
) -> Option<Vec<tt::TokenTree<S>>> {
cov_mark::hit!(test_meta_doc_comments);
let comment = ast::Comment::cast(token.clone())?;
@ -451,7 +472,7 @@ fn convert_doc_comment<S: Copy>(
};
let mk_doc_literal = |comment: &ast::Comment| {
let lit = tt::Literal { text: doc_comment_text(comment), span };
let lit = tt::Literal { text: doc_comment_text(comment, mode), span };
tt::TokenTree::from(tt::Leaf::from(lit))
};
@ -479,12 +500,14 @@ struct RawConverter<'a, Ctx> {
pos: usize,
anchor: SpanAnchor,
ctx: Ctx,
mode: DocCommentDesugarMode,
}
/// A raw token (straight from lexer) converter that gives every token the same span.
struct StaticRawConverter<'a, S> {
lexed: parser::LexedStr<'a>,
pos: usize,
span: S,
mode: DocCommentDesugarMode,
}
trait SrcToken<Ctx, S> {
@ -553,7 +576,7 @@ where
span: SpanData<Ctx>,
) -> Option<Vec<tt::TokenTree<SpanData<Ctx>>>> {
let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text), span)
convert_doc_comment(&doc_comment(text), span, self.mode)
}
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
@ -592,7 +615,7 @@ where
fn convert_doc_comment(&self, &token: &usize, span: S) -> Option<Vec<tt::TokenTree<S>>> {
let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text), span)
convert_doc_comment(&doc_comment(text), span, self.mode)
}
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
@ -634,6 +657,7 @@ struct Converter<SpanMap, S> {
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
remove: FxHashSet<SyntaxElement>,
call_site: S,
mode: DocCommentDesugarMode,
}
impl<SpanMap, S> Converter<SpanMap, S> {
@ -643,6 +667,7 @@ impl<SpanMap, S> Converter<SpanMap, S> {
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
remove: FxHashSet<SyntaxElement>,
call_site: S,
mode: DocCommentDesugarMode,
) -> Self {
let mut this = Converter {
current: None,
@ -654,6 +679,7 @@ impl<SpanMap, S> Converter<SpanMap, S> {
remove,
call_site,
current_leaves: vec![],
mode,
};
let first = this.next_token();
this.current = first;
@ -755,7 +781,7 @@ where
{
type Token = SynToken<S>;
fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>> {
convert_doc_comment(token.token(), span)
convert_doc_comment(token.token(), span, self.mode)
}
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {

View file

@ -7,11 +7,16 @@ use tt::{
Leaf, Punct, Spacing,
};
use crate::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
use crate::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY};
fn check_punct_spacing(fixture: &str) {
let source_file = ast::SourceFile::parse(fixture, span::Edition::CURRENT).ok().unwrap();
let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY);
let subtree = syntax_node_to_token_tree(
source_file.syntax(),
DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::Mbe,
);
let mut annotations: FxHashMap<_, _> = extract_annotations(fixture)
.into_iter()
.map(|(range, annotation)| {

View file

@ -211,7 +211,7 @@ mod tests {
use super::*;
use ide::Edition;
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY};
use syntax::{
ast::{self, AstNode},
SmolStr,
@ -221,7 +221,12 @@ mod tests {
let cfg_expr = {
let source_file = ast::SourceFile::parse(cfg, Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap, DUMMY);
let tt = syntax_node_to_token_tree(
tt.syntax(),
&DummyTestSpanMap,
DUMMY,
DocCommentDesugarMode::Mbe,
);
CfgExpr::parse(&tt)
};