Merge pull request #18861 from ChayimFriedman2/await-edition

fix: Make edition per-token, not per-file
This commit is contained in:
Lukas Wirth 2025-01-09 10:20:11 +00:00 committed by GitHub
commit 3fe50ef83c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
34 changed files with 480 additions and 316 deletions

View file

@ -5,7 +5,8 @@ use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(u8)]
pub enum Edition {
Edition2015,
// The syntax context stuff needs the discriminants to start from 0 and be consecutive.
Edition2015 = 0,
Edition2018,
Edition2021,
Edition2024,

View file

@ -15,7 +15,7 @@ use hir_expand::{name::Name, ExpandError, InFile};
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{Edition, MacroFileId};
use span::{Edition, MacroFileId, SyntaxContextData};
use syntax::{ast, AstPtr, SyntaxNodePtr};
use triomphe::Arc;
use tt::TextRange;
@ -37,15 +37,22 @@ use crate::{
/// A wrapper around [`span::SyntaxContextId`] that is intended only for comparisons.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct HygieneId(pub(crate) span::SyntaxContextId);
pub struct HygieneId(span::SyntaxContextId);
impl HygieneId {
pub const ROOT: Self = Self(span::SyntaxContextId::ROOT);
// The edition doesn't matter here, we only use this for comparisons and to lookup the macro.
pub const ROOT: Self = Self(span::SyntaxContextId::root(Edition::Edition2015));
pub fn new(ctx: span::SyntaxContextId) -> Self {
pub fn new(mut ctx: span::SyntaxContextId) -> Self {
// See `Name` for why we're doing that.
ctx.remove_root_edition();
Self(ctx)
}
pub(crate) fn lookup(self, db: &dyn DefDatabase) -> SyntaxContextData {
db.lookup_intern_syntax_context(self.0)
}
pub(crate) fn is_root(self) -> bool {
self.0.is_root()
}

View file

@ -2460,7 +2460,7 @@ impl ExprCollector<'_> {
None => HygieneId::ROOT,
Some(span_map) => {
let ctx = span_map.span_at(span_start).ctx;
HygieneId(self.db.lookup_intern_syntax_context(ctx).opaque_and_semitransparent)
HygieneId::new(self.db.lookup_intern_syntax_context(ctx).opaque_and_semitransparent)
}
}
}

View file

@ -10,7 +10,7 @@ use hir_expand::{
ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
};
use limit::Limit;
use span::SyntaxContextId;
use span::{Edition, SyntaxContextId};
use syntax::{ast, Parse};
use triomphe::Arc;
@ -60,7 +60,7 @@ impl Expander {
pub fn syntax_context(&self) -> SyntaxContextId {
// FIXME:
SyntaxContextId::ROOT
SyntaxContextId::root(Edition::CURRENT)
}
pub fn enter_expand<T: ast::AstNode>(

View file

@ -270,7 +270,7 @@ m!();
// AstId: 2
pub macro m2 { ... }
// AstId: 3, SyntaxContext: 0, ExpandTo: Items
// AstId: 3, SyntaxContext: 2, ExpandTo: Items
m!(...);
"#]],
);

View file

@ -35,9 +35,9 @@ macro_rules! f {
};
}
struct#0:1@58..64#1# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#1#
map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..93#1#std#0:1@93..96#1#::#0:1@96..98#1#collections#0:1@98..109#1#::#0:1@109..111#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1#
}#0:1@132..133#1#
struct#0:1@58..64#4# MyTraitMap2#0:2@31..42#2# {#0:1@72..73#4#
map#0:1@86..89#4#:#0:1@89..90#4# #0:1@89..90#4#::#0:1@91..93#4#std#0:1@93..96#4#::#0:1@96..98#4#collections#0:1@98..109#4#::#0:1@109..111#4#HashSet#0:1@111..118#4#<#0:1@118..119#4#(#0:1@119..120#4#)#0:1@120..121#4#>#0:1@121..122#4#,#0:1@122..123#4#
}#0:1@132..133#4#
"#]],
);
}
@ -75,12 +75,12 @@ macro_rules! f {
};
}
fn#0:2@30..32#0# main#0:2@33..37#0#(#0:2@37..38#0#)#0:2@38..39#0# {#0:2@40..41#0#
1#0:2@50..51#0#;#0:2@51..52#0#
1.0#0:2@61..64#0#;#0:2@64..65#0#
(#0:2@74..75#0#(#0:2@75..76#0#1#0:2@76..77#0#,#0:2@77..78#0# )#0:2@78..79#0#,#0:2@79..80#0# )#0:2@80..81#0#.#0:2@81..82#0#0#0:2@82..85#0#.#0:2@82..85#0#0#0:2@82..85#0#;#0:2@85..86#0#
let#0:2@95..98#0# x#0:2@99..100#0# =#0:2@101..102#0# 1#0:2@103..104#0#;#0:2@104..105#0#
}#0:2@110..111#0#
fn#0:2@30..32#2# main#0:2@33..37#2#(#0:2@37..38#2#)#0:2@38..39#2# {#0:2@40..41#2#
1#0:2@50..51#2#;#0:2@51..52#2#
1.0#0:2@61..64#2#;#0:2@64..65#2#
(#0:2@74..75#2#(#0:2@75..76#2#1#0:2@76..77#2#,#0:2@77..78#2# )#0:2@78..79#2#,#0:2@79..80#2# )#0:2@80..81#2#.#0:2@81..82#2#0#0:2@82..85#2#.#0:2@82..85#2#0#0:2@82..85#2#;#0:2@85..86#2#
let#0:2@95..98#2# x#0:2@99..100#2# =#0:2@101..102#2# 1#0:2@103..104#2#;#0:2@104..105#2#
}#0:2@110..111#2#
"#]],
@ -171,7 +171,7 @@ fn main(foo: ()) {
}
fn main(foo: ()) {
/* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#0#;
/* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#2#;
}
}
@ -197,7 +197,7 @@ macro_rules! mk_struct {
#[macro_use]
mod foo;
struct#1:1@59..65#1# Foo#0:2@32..35#0#(#1:1@70..71#1#u32#0:2@41..44#0#)#1:1@74..75#1#;#1:1@75..76#1#
struct#1:1@59..65#4# Foo#0:2@32..35#2#(#1:1@70..71#4#u32#0:2@41..44#2#)#1:1@74..75#4#;#1:1@75..76#4#
"#]],
);
}
@ -423,10 +423,10 @@ m! { foo, bar }
macro_rules! m {
($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
}
impl#\1# Bar#\1# {#\1#
fn#\1# foo#\0#(#\1#)#\1# {#\1#}#\1#
fn#\1# bar#\0#(#\1#)#\1# {#\1#}#\1#
}#\1#
impl#\4# Bar#\4# {#\4#
fn#\4# foo#\2#(#\4#)#\4# {#\4#}#\4#
fn#\4# bar#\2#(#\4#)#\4# {#\4#}#\4#
}#\4#
"#]],
);
}

View file

@ -358,6 +358,7 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
subtree,
syntax_bridge::TopEntryPoint::MacroItems,
&mut |_| span::Edition::CURRENT,
span::Edition::CURRENT,
);
if parse.errors().is_empty() {

View file

@ -181,9 +181,9 @@ fn foo(&self) {
self.0. 1;
}
fn#0:1@45..47#0# foo#0:1@48..51#0#(#0:1@51..52#0#&#0:1@52..53#0#self#0:1@53..57#0# )#0:1@57..58#0# {#0:1@59..60#0#
self#0:1@65..69#0# .#0:1@69..70#0#0#0:1@70..71#0#.#0:1@71..72#0#1#0:1@73..74#0#;#0:1@74..75#0#
}#0:1@76..77#0#"#]],
fn#0:1@45..47#2# foo#0:1@48..51#2#(#0:1@51..52#2#&#0:1@52..53#2#self#0:1@53..57#2# )#0:1@57..58#2# {#0:1@59..60#2#
self#0:1@65..69#2# .#0:1@69..70#2#0#0:1@70..71#2#.#0:1@71..72#2#1#0:1@73..74#2#;#0:1@74..75#2#
}#0:1@76..77#2#"#]],
);
}

View file

@ -74,7 +74,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
let proc_macros = if krate.is_proc_macro {
db.proc_macros()
.for_crate(def_map.krate, db.syntax_context(tree_id.file_id()))
.for_crate(def_map.krate, db.syntax_context(tree_id.file_id(), krate.edition))
.unwrap_or_default()
} else {
Default::default()

View file

@ -324,7 +324,7 @@ impl Resolver {
if n_segments <= 1 {
let mut hygiene_info = if !hygiene_id.is_root() {
let ctx = db.lookup_intern_syntax_context(hygiene_id.0);
let ctx = hygiene_id.lookup(db);
ctx.outer_expn.map(|expansion| {
let expansion = db.lookup_intern_macro_call(expansion);
(ctx.parent, expansion.def)

View file

@ -4,7 +4,7 @@ use intern::sym;
use itertools::{izip, Itertools};
use parser::SyntaxKind;
use rustc_hash::FxHashSet;
use span::{MacroCallId, Span, SyntaxContextId};
use span::{Edition, MacroCallId, Span, SyntaxContextId};
use stdx::never;
use syntax_bridge::DocCommentDesugarMode;
use tracing::debug;
@ -33,7 +33,7 @@ macro_rules! register_builtin {
}
impl BuiltinDeriveExpander {
pub fn expander(&self) -> fn(Span, &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
pub fn expander(&self) -> fn(&dyn ExpandDatabase, Span, &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
}
@ -58,8 +58,8 @@ impl BuiltinDeriveExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id);
self.expander()(span, tt)
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
self.expander()(db, span, tt)
}
}
@ -226,8 +226,12 @@ struct AdtParam {
}
// FIXME: This whole thing needs a refactor. Each derive requires its special values, and the result is a mess.
fn parse_adt(tt: &tt::TopSubtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
let (adt, tm) = to_adt_syntax(tt, call_site)?;
fn parse_adt(
db: &dyn ExpandDatabase,
tt: &tt::TopSubtree,
call_site: Span,
) -> Result<BasicAdtInfo, ExpandError> {
let (adt, tm) = to_adt_syntax(db, tt, call_site)?;
parse_adt_from_syntax(&adt, &tm, call_site)
}
@ -382,12 +386,14 @@ fn parse_adt_from_syntax(
}
fn to_adt_syntax(
db: &dyn ExpandDatabase,
tt: &tt::TopSubtree,
call_site: Span,
) -> Result<(ast::Adt, span::SpanMap<SyntaxContextId>), ExpandError> {
let (parsed, tm) = syntax_bridge::token_tree_to_syntax_node(
let (parsed, tm) = crate::db::token_tree_to_syntax_node(
db,
tt,
syntax_bridge::TopEntryPoint::MacroItems,
crate::ExpandTo::Items,
parser::Edition::CURRENT_FIXME,
);
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
@ -446,12 +452,13 @@ fn name_to_token(
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
db: &dyn ExpandDatabase,
invoc_span: Span,
tt: &tt::TopSubtree,
trait_path: tt::TopSubtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let info = match parse_adt(tt, invoc_span) {
let info = match parse_adt(db, tt, invoc_span) {
Ok(info) => info,
Err(e) => {
return ExpandResult::new(
@ -520,14 +527,22 @@ fn expand_simple_derive_with_parsed(
}
}
fn copy_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn copy_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
expand_simple_derive(db, span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
}
fn clone_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn clone_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::clone::Clone }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@ -576,9 +591,13 @@ fn and_and(span: Span) -> tt::TopSubtree {
quote! {span => #and& }
}
fn default_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn default_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::default::Default }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::default::Default }, |adt| {
let body = match &adt.shape {
AdtShape::Struct(fields) => {
let name = &adt.name;
@ -615,9 +634,13 @@ fn default_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtre
})
}
fn debug_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn debug_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
@ -687,9 +710,13 @@ fn debug_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree>
})
}
fn hash_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn hash_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::hash::Hash }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {span =>};
@ -734,14 +761,22 @@ fn hash_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree>
})
}
fn eq_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn eq_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
}
fn partial_eq_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn partial_eq_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {span =>};
@ -811,9 +846,13 @@ fn self_and_other_patterns(
(self_patterns, other_patterns)
}
fn ord_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn ord_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@ -869,9 +908,13 @@ fn ord_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
})
}
fn partial_ord_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn partial_ord_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@ -932,8 +975,12 @@ fn partial_ord_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSu
})
}
fn coerce_pointee_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
let (adt, _span_map) = match to_adt_syntax(tt, span) {
fn coerce_pointee_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let (adt, _span_map) = match to_adt_syntax(db, tt, span) {
Ok(it) => it,
Err(err) => {
return ExpandResult::new(tt::TopSubtree::empty(tt::DelimSpan::from_single(span)), err);

View file

@ -69,7 +69,7 @@ impl BuiltinFnLikeExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id);
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
self.expander()(db, id, tt, span)
}
@ -86,7 +86,7 @@ impl EagerExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id);
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
self.expander()(db, id, tt, span)
}
@ -221,7 +221,7 @@ fn assert_expand(
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let call_site_span = span_with_call_site_ctxt(db, span, id);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let mut iter = tt.iter();
@ -342,7 +342,7 @@ fn panic_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span);
let call_site_span = span_with_call_site_ctxt(db, span, id);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) {
sym::panic_2021.clone()
@ -373,7 +373,7 @@ fn unreachable_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span);
let call_site_span = span_with_call_site_ctxt(db, span, id);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) {
sym::unreachable_2021.clone()

View file

@ -225,7 +225,7 @@ mod tests {
use ::tt::IdentIsRaw;
use expect_test::expect;
use intern::Symbol;
use span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use span::{Edition, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use syntax::{TextRange, TextSize};
use super::quote;
@ -239,7 +239,7 @@ mod tests {
),
ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
};
#[test]
@ -276,8 +276,8 @@ mod tests {
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:#?}");
expect![[r#"
SUBTREE $$ 937550:0@0..0#0 937550:0@0..0#0
IDENT hello 937550:0@0..0#0"#]]
SUBTREE $$ 937550:0@0..0#2 937550:0@0..0#2
IDENT hello 937550:0@0..0#2"#]]
.assert_eq(&t);
}

View file

@ -5,7 +5,7 @@ use either::Either;
use limit::Limit;
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
use span::{AstIdMap, Edition, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
use triomphe::Arc;
@ -136,12 +136,12 @@ pub trait ExpandDatabase: SourceDatabase {
macro_call: MacroCallId,
) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>>;
#[ra_salsa::transparent]
fn syntax_context(&self, file: HirFileId) -> SyntaxContextId;
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContextId;
}
fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId) -> SyntaxContextId {
fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContextId {
match file.repr() {
HirFileIdRepr::FileId(_) => SyntaxContextId::ROOT,
HirFileIdRepr::FileId(_) => SyntaxContextId::root(edition),
HirFileIdRepr::MacroFile(m) => {
db.macro_arg_considering_derives(m.macro_call_id, &m.macro_call_id.lookup(db).kind)
.2
@ -273,9 +273,9 @@ pub fn expand_speculative(
loc.krate,
&tt,
attr_arg.as_ref(),
span_with_def_site_ctxt(db, span, actual_macro_call),
span_with_call_site_ctxt(db, span, actual_macro_call),
span_with_mixed_site_ctxt(db, span, actual_macro_call),
span_with_def_site_ctxt(db, span, actual_macro_call, loc.def.edition),
span_with_call_site_ctxt(db, span, actual_macro_call, loc.def.edition),
span_with_mixed_site_ctxt(db, span, actual_macro_call, loc.def.edition),
)
}
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
@ -300,7 +300,7 @@ pub fn expand_speculative(
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) =
token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to, loc.def.edition);
let syntax_node = node.syntax_node();
let token = rev_tmap
@ -346,6 +346,7 @@ fn parse_macro_expansion(
macro_expand(db, macro_file.macro_call_id, loc);
let (parse, mut rev_token_map) = token_tree_to_syntax_node(
db,
match &tt {
CowArc::Arc(it) => it,
CowArc::Owned(it) => it,
@ -699,9 +700,9 @@ fn expand_proc_macro(
loc.krate,
&macro_arg,
attr_arg,
span_with_def_site_ctxt(db, span, id),
span_with_call_site_ctxt(db, span, id),
span_with_mixed_site_ctxt(db, span, id),
span_with_def_site_ctxt(db, span, id, loc.def.edition),
span_with_call_site_ctxt(db, span, id, loc.def.edition),
span_with_mixed_site_ctxt(db, span, id, loc.def.edition),
)
};
@ -715,7 +716,8 @@ fn expand_proc_macro(
ExpandResult { value: Arc::new(tt), err }
}
fn token_tree_to_syntax_node(
pub(crate) fn token_tree_to_syntax_node(
db: &dyn ExpandDatabase,
tt: &tt::TopSubtree,
expand_to: ExpandTo,
edition: parser::Edition,
@ -727,7 +729,12 @@ fn token_tree_to_syntax_node(
ExpandTo::Type => syntax_bridge::TopEntryPoint::Type,
ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr,
};
syntax_bridge::token_tree_to_syntax_node(tt, entry_point, edition)
syntax_bridge::token_tree_to_syntax_node(
tt,
entry_point,
&mut |ctx| ctx.lookup(db).edition,
edition,
)
}
fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
@ -751,5 +758,7 @@ fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
}
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
db.intern_syntax_context(SyntaxContextData::root());
for edition in Edition::iter() {
db.intern_syntax_context(SyntaxContextData::root(edition));
}
}

View file

@ -2,7 +2,7 @@
use base_db::CrateId;
use intern::sym;
use span::{Edition, MacroCallId, Span, SyntaxContextId};
use span::{Edition, HirFileIdRepr, MacroCallId, Span, SyntaxContextId};
use stdx::TupleExt;
use syntax::{ast, AstNode};
use syntax_bridge::DocCommentDesugarMode;
@ -20,6 +20,7 @@ use crate::{
pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro,
pub transparency: Transparency,
edition: Edition,
}
impl DeclarativeMacroExpander {
@ -40,7 +41,7 @@ impl DeclarativeMacroExpander {
.mac
.expand(
&tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency, self.edition),
span,
loc.def.edition,
)
@ -159,6 +160,10 @@ impl DeclarativeMacroExpander {
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
Arc::new(DeclarativeMacroExpander { mac, transparency })
let edition = ctx_edition(match id.file_id.repr() {
HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id.lookup(db).ctxt,
HirFileIdRepr::FileId(file) => SyntaxContextId::root(file.edition()),
});
Arc::new(DeclarativeMacroExpander { mac, transparency, edition })
}
}

View file

@ -380,14 +380,14 @@ impl InFile<TextRange> {
) -> (FileRange, SyntaxContextId) {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
(FileRange { file_id, range: self.value }, SyntaxContextId::root(file_id.edition()))
}
HirFileIdRepr::MacroFile(mac_file) => {
match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
None => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
(loc.kind.original_call_range(db), SyntaxContextId::ROOT)
(loc.kind.original_call_range(db), SyntaxContextId::root(loc.def.edition))
}
}
}
@ -432,9 +432,10 @@ impl InFile<TextRange> {
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContextId)> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
}
HirFileIdRepr::FileId(file_id) => Some((
FileRange { file_id, range: self.value },
SyntaxContextId::root(file_id.edition()),
)),
HirFileIdRepr::MacroFile(mac_file) => {
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value)
}

View file

@ -380,7 +380,7 @@ pub(crate) fn reverse_fixups(tt: &mut TopSubtree, undo_info: &SyntaxFixupUndoInf
let span = |file_id| Span {
range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(span::Edition::Edition2015),
};
delimiter.open = span(delimiter.open.anchor.file_id);
delimiter.close = span(delimiter.close.anchor.file_id);
@ -562,6 +562,7 @@ mod tests {
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
&tt,
syntax_bridge::TopEntryPoint::MacroItems,
&mut |_| parser::Edition::CURRENT,
parser::Edition::CURRENT,
);
assert!(

View file

@ -24,26 +24,37 @@
use std::iter;
use span::{MacroCallId, Span, SyntaxContextData, SyntaxContextId};
use span::{Edition, MacroCallId, Span, SyntaxContextData, SyntaxContextId};
use crate::db::{ExpandDatabase, InternSyntaxContextQuery};
pub use span::Transparency;
pub fn span_with_def_site_ctxt(db: &dyn ExpandDatabase, span: Span, expn_id: MacroCallId) -> Span {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque)
pub fn span_with_def_site_ctxt(
db: &dyn ExpandDatabase,
span: Span,
expn_id: MacroCallId,
edition: Edition,
) -> Span {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque, edition)
}
pub fn span_with_call_site_ctxt(db: &dyn ExpandDatabase, span: Span, expn_id: MacroCallId) -> Span {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent)
pub fn span_with_call_site_ctxt(
db: &dyn ExpandDatabase,
span: Span,
expn_id: MacroCallId,
edition: Edition,
) -> Span {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent, edition)
}
pub fn span_with_mixed_site_ctxt(
db: &dyn ExpandDatabase,
span: Span,
expn_id: MacroCallId,
edition: Edition,
) -> Span {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent)
span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent, edition)
}
fn span_with_ctxt_from_mark(
@ -51,8 +62,12 @@ fn span_with_ctxt_from_mark(
span: Span,
expn_id: MacroCallId,
transparency: Transparency,
edition: Edition,
) -> Span {
Span { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
Span {
ctx: apply_mark(db, SyntaxContextId::root(edition), expn_id, transparency, edition),
..span
}
}
pub(super) fn apply_mark(
@ -60,9 +75,10 @@ pub(super) fn apply_mark(
ctxt: SyntaxContextId,
call_id: MacroCallId,
transparency: Transparency,
edition: Edition,
) -> SyntaxContextId {
if transparency == Transparency::Opaque {
return apply_mark_internal(db, ctxt, call_id, transparency);
return apply_mark_internal(db, ctxt, call_id, transparency, edition);
}
let call_site_ctxt = db.lookup_intern_macro_call(call_id).ctxt;
@ -73,7 +89,7 @@ pub(super) fn apply_mark(
};
if call_site_ctxt.is_root() {
return apply_mark_internal(db, ctxt, call_id, transparency);
return apply_mark_internal(db, ctxt, call_id, transparency, edition);
}
// Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
@ -86,9 +102,9 @@ pub(super) fn apply_mark(
//
// See the example at `test/ui/hygiene/legacy_interaction.rs`.
for (call_id, transparency) in ctxt.marks(db) {
call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency, edition);
}
apply_mark_internal(db, call_site_ctxt, call_id, transparency)
apply_mark_internal(db, call_site_ctxt, call_id, transparency, edition)
}
fn apply_mark_internal(
@ -96,6 +112,7 @@ fn apply_mark_internal(
ctxt: SyntaxContextId,
call_id: MacroCallId,
transparency: Transparency,
edition: Edition,
) -> SyntaxContextId {
use base_db::ra_salsa;
@ -108,13 +125,14 @@ fn apply_mark_internal(
if transparency >= Transparency::Opaque {
let parent = opaque;
opaque = ra_salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
(parent, call_id, transparency),
(parent, call_id, transparency, edition),
|new_opaque| SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque: new_opaque,
opaque_and_semitransparent: new_opaque,
edition,
},
);
}
@ -123,13 +141,14 @@ fn apply_mark_internal(
let parent = opaque_and_semitransparent;
opaque_and_semitransparent =
ra_salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
(parent, call_id, transparency),
(parent, call_id, transparency, edition),
|new_opaque_and_semitransparent| SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent: new_opaque_and_semitransparent,
edition,
},
);
}
@ -141,6 +160,7 @@ fn apply_mark_internal(
parent,
opaque,
opaque_and_semitransparent,
edition,
})
}

View file

@ -273,10 +273,9 @@ fn convert_path(
res
}
}
ast::PathSegmentKind::SelfTypeKw => ModPath::from_segments(
PathKind::Plain,
Some(Name::new_symbol(sym::Self_.clone(), SyntaxContextId::ROOT)),
),
ast::PathSegmentKind::SelfTypeKw => {
ModPath::from_segments(PathKind::Plain, Some(Name::new_symbol_root(sym::Self_.clone())))
}
ast::PathSegmentKind::CrateKw => ModPath::from_segments(PathKind::Crate, iter::empty()),
ast::PathSegmentKind::SelfKw => handle_super_kw(0)?,
ast::PathSegmentKind::SuperKw => handle_super_kw(1)?,

View file

@ -80,11 +80,20 @@ impl Name {
Name { symbol: Symbol::intern(text), ctx: () }
}
pub fn new(text: &str, ctx: SyntaxContextId) -> Name {
pub fn new(text: &str, mut ctx: SyntaxContextId) -> Name {
// For comparisons etc. we remove the edition, because sometimes we search for some `Name`
// and we don't know which edition it came from.
// Can't do that for all `SyntaxContextId`s because it breaks Salsa.
ctx.remove_root_edition();
_ = ctx;
Self::new_text(text)
}
pub fn new_root(text: &str) -> Name {
// The edition doesn't matter for hygiene.
Self::new(text, SyntaxContextId::root(Edition::Edition2015))
}
pub fn new_tuple_field(idx: usize) -> Name {
Name { symbol: Symbol::intern(&idx.to_string()), ctx: () }
}

View file

@ -12,7 +12,6 @@ use hir_def::{
};
use hir_expand::{mod_path::PathKind, name::Name};
use hir_ty::{db::HirDatabase, method_resolution};
use span::SyntaxContextId;
use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
@ -328,9 +327,7 @@ fn doc_modpath_from_str(link: &str) -> Option<ModPath> {
};
let parts = first_segment.into_iter().chain(parts).map(|segment| match segment.parse() {
Ok(idx) => Name::new_tuple_field(idx),
Err(_) => {
Name::new(segment.split_once('<').map_or(segment, |it| it.0), SyntaxContextId::ROOT)
}
Err(_) => Name::new_root(segment.split_once('<').map_or(segment, |it| it.0)),
});
Some(ModPath::from_segments(kind, parts))
};

View file

@ -83,7 +83,7 @@ use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind;
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use span::{Edition, EditionedFileId, FileId, MacroCallId, SyntaxContextId};
use span::{Edition, EditionedFileId, FileId, MacroCallId};
use stdx::{format_to, impl_from, never};
use syntax::{
ast::{self, HasAttrs as _, HasGenericParams, HasName},
@ -4833,7 +4833,7 @@ impl Type {
let iterator_trait = db.lang_item(self.env.krate, LangItem::Iterator)?.as_trait()?;
let iterator_item = db
.trait_data(iterator_trait)
.associated_type_by_name(&Name::new_symbol(sym::Item.clone(), SyntaxContextId::ROOT))?;
.associated_type_by_name(&Name::new_symbol_root(sym::Item.clone()))?;
self.normalize_trait_assoc_type(db, &[], iterator_item.into())
}

View file

@ -986,7 +986,10 @@ impl<'db> SemanticsImpl<'db> {
process_expansion_for_token(&mut stack, include)?;
}
None => {
stack.push((file_id.into(), smallvec![(token, SyntaxContextId::ROOT)]));
stack.push((
file_id.into(),
smallvec![(token, SyntaxContextId::root(file_id.edition()))],
));
}
}
@ -1597,7 +1600,7 @@ impl<'db> SemanticsImpl<'db> {
self.db.upcast(),
&ModPath::from_segments(
hir_def::path::PathKind::Plain,
segments.into_iter().map(|it| Name::new(&it, SyntaxContextId::ROOT)),
segments.into_iter().map(|it| Name::new_root(&it)),
),
);
Some(items.iter_items().map(|(item, _)| item.into()))

View file

@ -399,4 +399,38 @@ fn f(s@m::Struct {
"#,
)
}
#[test]
fn editions_between_macros() {
check_diagnostics(
r#"
//- /edition2015.rs crate:edition2015 edition:2015
#[macro_export]
macro_rules! pass_expr_thorough {
($e:expr) => { $e };
}
//- /edition2018.rs crate:edition2018 deps:edition2015 edition:2018
async fn bar() {}
async fn foo() {
edition2015::pass_expr_thorough!(bar().await);
}
"#,
);
check_diagnostics(
r#"
//- /edition2018.rs crate:edition2018 edition:2018
pub async fn bar() {}
#[macro_export]
macro_rules! make_await {
() => { async { $crate::bar().await }; };
}
//- /edition2015.rs crate:edition2015 deps:edition2018 edition:2015
fn foo() {
edition2018::make_await!();
}
"#,
);
}
}

View file

@ -369,7 +369,8 @@ pub fn expect_fragment<'t>(
) -> ExpandResult<tt::TokenTreesView<'t, Span>> {
use ::parser;
let buffer = tt_iter.remaining();
let parser_input = to_parser_input(edition, buffer);
// FIXME: Pass the correct edition per token. Due to the split between mbe and hir-expand it's complicated.
let parser_input = to_parser_input(buffer, &mut |_ctx| edition);
let tree_traversal = entry_point.parse(&parser_input, edition);
let mut cursor = buffer.cursor();
let mut error = false;

View file

@ -26,7 +26,7 @@ fn check_(
file_id: EditionedFileId::new(FileId::from_raw(0), def_edition),
ast_id: ErasedFileAstId::from_raw(0),
},
SyntaxContextId::ROOT,
SyntaxContextId::root(Edition::CURRENT),
decl,
)
.unwrap();
@ -39,16 +39,20 @@ fn check_(
file_id: EditionedFileId::new(FileId::from_raw(1), call_edition),
ast_id: ErasedFileAstId::from_raw(0),
};
let arg_tt =
syntax_bridge::parse_to_token_tree(call_edition, call_anchor, SyntaxContextId::ROOT, arg)
.unwrap();
let arg_tt = syntax_bridge::parse_to_token_tree(
call_edition,
call_anchor,
SyntaxContextId::root(Edition::CURRENT),
arg,
)
.unwrap();
let res = mac.expand(
&arg_tt,
|_| (),
Span {
range: TextRange::up_to(TextSize::of(arg)),
anchor: call_anchor,
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
},
def_edition,
);
@ -59,7 +63,12 @@ fn check_(
if render_debug {
format_to!(expect_res, "{:#?}\n\n", res.value.0);
}
let (node, _) = syntax_bridge::token_tree_to_syntax_node(&res.value.0, parse, def_edition);
let (node, _) = syntax_bridge::token_tree_to_syntax_node(
&res.value.0,
parse,
&mut |_| def_edition,
def_edition,
);
format_to!(
expect_res,
"{}",
@ -106,25 +115,25 @@ fn token_mapping_smoke_test() {
struct MyTraitMap2
"#,
expect![[r#"
SUBTREE $$ 1:0@0..20#0 1:0@0..20#0
IDENT struct 0:0@34..40#0
IDENT MyTraitMap2 1:0@8..19#0
SUBTREE {} 0:0@48..49#0 0:0@100..101#0
IDENT map 0:0@58..61#0
PUNCH : [alone] 0:0@61..62#0
PUNCH : [joint] 0:0@63..64#0
PUNCH : [alone] 0:0@64..65#0
IDENT std 0:0@65..68#0
PUNCH : [joint] 0:0@68..69#0
PUNCH : [alone] 0:0@69..70#0
IDENT collections 0:0@70..81#0
PUNCH : [joint] 0:0@81..82#0
PUNCH : [alone] 0:0@82..83#0
IDENT HashSet 0:0@83..90#0
PUNCH < [alone] 0:0@90..91#0
SUBTREE () 0:0@91..92#0 0:0@92..93#0
PUNCH > [joint] 0:0@93..94#0
PUNCH , [alone] 0:0@94..95#0
SUBTREE $$ 1:0@0..20#2 1:0@0..20#2
IDENT struct 0:0@34..40#2
IDENT MyTraitMap2 1:0@8..19#2
SUBTREE {} 0:0@48..49#2 0:0@100..101#2
IDENT map 0:0@58..61#2
PUNCH : [alone] 0:0@61..62#2
PUNCH : [joint] 0:0@63..64#2
PUNCH : [alone] 0:0@64..65#2
IDENT std 0:0@65..68#2
PUNCH : [joint] 0:0@68..69#2
PUNCH : [alone] 0:0@69..70#2
IDENT collections 0:0@70..81#2
PUNCH : [joint] 0:0@81..82#2
PUNCH : [alone] 0:0@82..83#2
IDENT HashSet 0:0@83..90#2
PUNCH < [alone] 0:0@90..91#2
SUBTREE () 0:0@91..92#2 0:0@92..93#2
PUNCH > [joint] 0:0@93..94#2
PUNCH , [alone] 0:0@94..95#2
struct MyTraitMap2 {
map: ::std::collections::HashSet<()>,
@ -153,28 +162,28 @@ fn main() {
}
"#,
expect![[r#"
SUBTREE $$ 1:0@0..63#0 1:0@0..63#0
IDENT fn 1:0@1..3#0
IDENT main 1:0@4..8#0
SUBTREE () 1:0@8..9#0 1:0@9..10#0
SUBTREE {} 1:0@11..12#0 1:0@61..62#0
LITERAL Integer 1 1:0@17..18#0
PUNCH ; [alone] 1:0@18..19#0
LITERAL Float 1.0 1:0@24..27#0
PUNCH ; [alone] 1:0@27..28#0
SUBTREE () 1:0@33..34#0 1:0@39..40#0
SUBTREE () 1:0@34..35#0 1:0@37..38#0
LITERAL Integer 1 1:0@35..36#0
PUNCH , [alone] 1:0@36..37#0
PUNCH , [alone] 1:0@38..39#0
PUNCH . [alone] 1:0@40..41#0
LITERAL Float 0.0 1:0@41..44#0
PUNCH ; [alone] 1:0@44..45#0
IDENT let 1:0@50..53#0
IDENT x 1:0@54..55#0
PUNCH = [alone] 1:0@56..57#0
LITERAL Integer 1 1:0@58..59#0
PUNCH ; [alone] 1:0@59..60#0
SUBTREE $$ 1:0@0..63#2 1:0@0..63#2
IDENT fn 1:0@1..3#2
IDENT main 1:0@4..8#2
SUBTREE () 1:0@8..9#2 1:0@9..10#2
SUBTREE {} 1:0@11..12#2 1:0@61..62#2
LITERAL Integer 1 1:0@17..18#2
PUNCH ; [alone] 1:0@18..19#2
LITERAL Float 1.0 1:0@24..27#2
PUNCH ; [alone] 1:0@27..28#2
SUBTREE () 1:0@33..34#2 1:0@39..40#2
SUBTREE () 1:0@34..35#2 1:0@37..38#2
LITERAL Integer 1 1:0@35..36#2
PUNCH , [alone] 1:0@36..37#2
PUNCH , [alone] 1:0@38..39#2
PUNCH . [alone] 1:0@40..41#2
LITERAL Float 0.0 1:0@41..44#2
PUNCH ; [alone] 1:0@44..45#2
IDENT let 1:0@50..53#2
IDENT x 1:0@54..55#2
PUNCH = [alone] 1:0@56..57#2
LITERAL Integer 1 1:0@58..59#2
PUNCH ; [alone] 1:0@59..60#2
fn main(){
1;
@ -200,14 +209,14 @@ fn expr_2021() {
const { 1 },
"#,
expect![[r#"
SUBTREE $$ 1:0@0..25#0 1:0@0..25#0
IDENT _ 1:0@5..6#0
PUNCH ; [joint] 0:0@36..37#0
SUBTREE () 0:0@34..35#0 0:0@34..35#0
IDENT const 1:0@12..17#0
SUBTREE {} 1:0@18..19#0 1:0@22..23#0
LITERAL Integer 1 1:0@20..21#0
PUNCH ; [alone] 0:0@39..40#0
SUBTREE $$ 1:0@0..25#2 1:0@0..25#2
IDENT _ 1:0@5..6#2
PUNCH ; [joint] 0:0@36..37#2
SUBTREE () 0:0@34..35#2 0:0@34..35#2
IDENT const 1:0@12..17#2
SUBTREE {} 1:0@18..19#2 1:0@22..23#2
LITERAL Integer 1 1:0@20..21#2
PUNCH ; [alone] 0:0@39..40#2
_;
(const {
@ -228,13 +237,13 @@ fn expr_2021() {
expect![[r#"
ExpandError {
inner: (
1:0@5..6#0,
1:0@5..6#2,
NoMatchingRule,
),
}
SUBTREE $$ 1:0@0..8#0 1:0@0..8#0
PUNCH ; [alone] 0:0@39..40#0
SUBTREE $$ 1:0@0..8#2 1:0@0..8#2
PUNCH ; [alone] 0:0@39..40#2
;"#]],
);
@ -252,13 +261,13 @@ fn expr_2021() {
expect![[r#"
ExpandError {
inner: (
1:0@5..10#0,
1:0@5..10#2,
NoMatchingRule,
),
}
SUBTREE $$ 1:0@0..18#0 1:0@0..18#0
PUNCH ; [alone] 0:0@39..40#0
SUBTREE $$ 1:0@0..18#2 1:0@0..18#2
PUNCH ; [alone] 0:0@39..40#2
;"#]],
);
@ -278,26 +287,26 @@ fn expr_2021() {
break 'foo bar,
"#,
expect![[r#"
SUBTREE $$ 1:0@0..76#0 1:0@0..76#0
LITERAL Integer 4 1:0@5..6#0
PUNCH ; [joint] 0:0@41..42#0
LITERAL Str literal 1:0@12..21#0
PUNCH ; [joint] 0:0@41..42#0
SUBTREE () 0:0@39..40#0 0:0@39..40#0
IDENT funcall 1:0@27..34#0
SUBTREE () 1:0@34..35#0 1:0@35..36#0
PUNCH ; [joint] 0:0@41..42#0
SUBTREE () 0:0@39..40#0 0:0@39..40#0
IDENT future 1:0@42..48#0
PUNCH . [alone] 1:0@48..49#0
IDENT await 1:0@49..54#0
PUNCH ; [joint] 0:0@41..42#0
SUBTREE () 0:0@39..40#0 0:0@39..40#0
IDENT break 1:0@60..65#0
PUNCH ' [joint] 1:0@66..67#0
IDENT foo 1:0@67..70#0
IDENT bar 1:0@71..74#0
PUNCH ; [alone] 0:0@44..45#0
SUBTREE $$ 1:0@0..76#2 1:0@0..76#2
LITERAL Integer 4 1:0@5..6#2
PUNCH ; [joint] 0:0@41..42#2
LITERAL Str literal 1:0@12..21#2
PUNCH ; [joint] 0:0@41..42#2
SUBTREE () 0:0@39..40#2 0:0@39..40#2
IDENT funcall 1:0@27..34#2
SUBTREE () 1:0@34..35#2 1:0@35..36#2
PUNCH ; [joint] 0:0@41..42#2
SUBTREE () 0:0@39..40#2 0:0@39..40#2
IDENT future 1:0@42..48#2
PUNCH . [alone] 1:0@48..49#2
IDENT await 1:0@49..54#2
PUNCH ; [joint] 0:0@41..42#2
SUBTREE () 0:0@39..40#2 0:0@39..40#2
IDENT break 1:0@60..65#2
PUNCH ' [joint] 1:0@66..67#2
IDENT foo 1:0@67..70#2
IDENT bar 1:0@71..74#2
PUNCH ; [alone] 0:0@44..45#2
4;
"literal";
@ -319,13 +328,13 @@ fn expr_2021() {
expect![[r#"
ExpandError {
inner: (
1:0@5..6#0,
1:0@5..6#2,
NoMatchingRule,
),
}
SUBTREE $$ 1:0@0..8#0 1:0@0..8#0
PUNCH ; [alone] 0:0@44..45#0
SUBTREE $$ 1:0@0..8#2 1:0@0..8#2
PUNCH ; [alone] 0:0@44..45#2
;"#]],
);

View file

@ -159,7 +159,7 @@ type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str)
#[cfg(test)]
mod tests {
use intern::{sym, Symbol};
use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TextSize};
use span::{Edition, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TextSize};
use tt::{
Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, TopSubtree,
TopSubtreeBuilder,
@ -180,12 +180,12 @@ mod tests {
open: Span {
range: TextRange::empty(TextSize::new(0)),
anchor,
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
},
close: Span {
range: TextRange::empty(TextSize::new(19)),
anchor,
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
},
kind: DelimiterKind::Invisible,
});
@ -196,7 +196,7 @@ mod tests {
span: Span {
range: TextRange::at(TextSize::new(0), TextSize::of("struct")),
anchor,
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
},
is_raw: tt::IdentIsRaw::No,
}
@ -208,7 +208,7 @@ mod tests {
span: Span {
range: TextRange::at(TextSize::new(5), TextSize::of("r#Foo")),
anchor,
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
},
is_raw: tt::IdentIsRaw::Yes,
}
@ -219,7 +219,7 @@ mod tests {
span: Span {
range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")),
anchor,
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
},
kind: tt::LitKind::Str,
suffix: None,
@ -229,7 +229,7 @@ mod tests {
span: Span {
range: TextRange::at(TextSize::new(13), TextSize::of('@')),
anchor,
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
},
spacing: Spacing::Joint,
}));
@ -238,7 +238,7 @@ mod tests {
Span {
range: TextRange::at(TextSize::new(14), TextSize::of('{')),
anchor,
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
},
);
builder.push(Leaf::Literal(Literal {
@ -246,7 +246,7 @@ mod tests {
span: Span {
range: TextRange::at(TextSize::new(15), TextSize::of("0u32")),
anchor,
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
},
kind: tt::LitKind::Integer,
suffix: Some(sym::u32.clone()),
@ -254,7 +254,7 @@ mod tests {
builder.close(Span {
range: TextRange::at(TextSize::new(19), TextSize::of('}')),
anchor,
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
});
builder.build()

View file

@ -440,7 +440,7 @@ mod tests {
file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
ast_id: span::ErasedFileAstId::from_raw(0),
},
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(span::Edition::CURRENT),
};
let s = TokenStream {
token_trees: vec![
@ -482,7 +482,7 @@ mod tests {
file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
ast_id: span::ErasedFileAstId::from_raw(0),
},
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(span::Edition::CURRENT),
};
let subtree_paren_a = vec![
tt::TokenTree::Subtree(tt::Subtree {

View file

@ -12,7 +12,7 @@ fn test_derive_empty() {
"DeriveEmpty",
r#"struct S;"#,
expect!["SUBTREE $$ 1 1"],
expect!["SUBTREE $$ 42:2@0..100#0 42:2@0..100#0"],
expect!["SUBTREE $$ 42:2@0..100#2 42:2@0..100#2"],
);
}
@ -29,12 +29,12 @@ fn test_derive_error() {
LITERAL Str #[derive(DeriveError)] struct S ; 1
PUNCH ; [alone] 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
IDENT compile_error 42:2@0..100#0
PUNCH ! [alone] 42:2@0..100#0
SUBTREE () 42:2@0..100#0 42:2@0..100#0
LITERAL Str #[derive(DeriveError)] struct S ; 42:2@0..100#0
PUNCH ; [alone] 42:2@0..100#0"#]],
SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
IDENT compile_error 42:2@0..100#2
PUNCH ! [alone] 42:2@0..100#2
SUBTREE () 42:2@0..100#2 42:2@0..100#2
LITERAL Str #[derive(DeriveError)] struct S ; 42:2@0..100#2
PUNCH ; [alone] 42:2@0..100#2"#]],
);
}
@ -53,14 +53,14 @@ fn test_fn_like_macro_noop() {
PUNCH , [alone] 1
SUBTREE [] 1 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
IDENT ident 42:2@0..5#0
PUNCH , [alone] 42:2@5..6#0
LITERAL Integer 0 42:2@7..8#0
PUNCH , [alone] 42:2@8..9#0
LITERAL Integer 1 42:2@10..11#0
PUNCH , [alone] 42:2@11..12#0
SUBTREE [] 42:2@13..14#0 42:2@14..15#0"#]],
SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
IDENT ident 42:2@0..5#2
PUNCH , [alone] 42:2@5..6#2
LITERAL Integer 0 42:2@7..8#2
PUNCH , [alone] 42:2@8..9#2
LITERAL Integer 1 42:2@10..11#2
PUNCH , [alone] 42:2@11..12#2
SUBTREE [] 42:2@13..14#2 42:2@14..15#2"#]],
);
}
@ -75,10 +75,10 @@ fn test_fn_like_macro_clone_ident_subtree() {
PUNCH , [alone] 1
SUBTREE [] 1 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
IDENT ident 42:2@0..5#0
PUNCH , [alone] 42:2@5..6#0
SUBTREE [] 42:2@7..8#0 42:2@7..8#0"#]],
SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
IDENT ident 42:2@0..5#2
PUNCH , [alone] 42:2@5..6#2
SUBTREE [] 42:2@7..8#2 42:2@7..8#2"#]],
);
}
@ -91,8 +91,8 @@ fn test_fn_like_macro_clone_raw_ident() {
SUBTREE $$ 1 1
IDENT r#async 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
IDENT r#async 42:2@0..7#0"#]],
SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
IDENT r#async 42:2@0..7#2"#]],
);
}
@ -105,8 +105,8 @@ fn test_fn_like_fn_like_span_join() {
SUBTREE $$ 1 1
IDENT r#joined 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
IDENT r#joined 42:2@0..11#0"#]],
SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
IDENT r#joined 42:2@0..11#2"#]],
);
}
@ -121,10 +121,10 @@ fn test_fn_like_fn_like_span_ops() {
IDENT resolved_at_def_site 1
IDENT start_span 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
IDENT set_def_site 41:1@0..150#0
IDENT resolved_at_def_site 42:2@13..33#0
IDENT start_span 42:2@34..34#0"#]],
SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
IDENT set_def_site 41:1@0..150#2
IDENT resolved_at_def_site 42:2@13..33#2
IDENT start_span 42:2@34..34#2"#]],
);
}
@ -143,14 +143,14 @@ fn test_fn_like_mk_literals() {
LITERAL Integer 123i64 1
LITERAL Integer 123 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
LITERAL ByteStr byte_string 42:2@0..100#0
LITERAL Char c 42:2@0..100#0
LITERAL Str string 42:2@0..100#0
LITERAL Float 3.14f64 42:2@0..100#0
LITERAL Float 3.14 42:2@0..100#0
LITERAL Integer 123i64 42:2@0..100#0
LITERAL Integer 123 42:2@0..100#0"#]],
SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
LITERAL ByteStr byte_string 42:2@0..100#2
LITERAL Char c 42:2@0..100#2
LITERAL Str string 42:2@0..100#2
LITERAL Float 3.14f64 42:2@0..100#2
LITERAL Float 3.14 42:2@0..100#2
LITERAL Integer 123i64 42:2@0..100#2
LITERAL Integer 123 42:2@0..100#2"#]],
);
}
@ -164,9 +164,9 @@ fn test_fn_like_mk_idents() {
IDENT standard 1
IDENT r#raw 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
IDENT standard 42:2@0..100#0
IDENT r#raw 42:2@0..100#0"#]],
SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
IDENT standard 42:2@0..100#2
IDENT r#raw 42:2@0..100#2"#]],
);
}
@ -198,27 +198,27 @@ fn test_fn_like_macro_clone_literals() {
PUNCH , [alone] 1
LITERAL CStr null 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
LITERAL Integer 1u16 42:2@0..4#0
PUNCH , [alone] 42:2@4..5#0
LITERAL Integer 2_u32 42:2@6..11#0
PUNCH , [alone] 42:2@11..12#0
PUNCH - [alone] 42:2@13..14#0
LITERAL Integer 4i64 42:2@14..18#0
PUNCH , [alone] 42:2@18..19#0
LITERAL Float 3.14f32 42:2@20..27#0
PUNCH , [alone] 42:2@27..28#0
LITERAL Str hello bridge 42:2@29..43#0
PUNCH , [alone] 42:2@43..44#0
LITERAL Str suffixedsuffix 42:2@45..61#0
PUNCH , [alone] 42:2@61..62#0
LITERAL StrRaw(2) raw 42:2@63..73#0
PUNCH , [alone] 42:2@73..74#0
LITERAL Char a 42:2@75..78#0
PUNCH , [alone] 42:2@78..79#0
LITERAL Byte b 42:2@80..84#0
PUNCH , [alone] 42:2@84..85#0
LITERAL CStr null 42:2@86..93#0"#]],
SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
LITERAL Integer 1u16 42:2@0..4#2
PUNCH , [alone] 42:2@4..5#2
LITERAL Integer 2_u32 42:2@6..11#2
PUNCH , [alone] 42:2@11..12#2
PUNCH - [alone] 42:2@13..14#2
LITERAL Integer 4i64 42:2@14..18#2
PUNCH , [alone] 42:2@18..19#2
LITERAL Float 3.14f32 42:2@20..27#2
PUNCH , [alone] 42:2@27..28#2
LITERAL Str hello bridge 42:2@29..43#2
PUNCH , [alone] 42:2@43..44#2
LITERAL Str suffixedsuffix 42:2@45..61#2
PUNCH , [alone] 42:2@61..62#2
LITERAL StrRaw(2) raw 42:2@63..73#2
PUNCH , [alone] 42:2@73..74#2
LITERAL Char a 42:2@75..78#2
PUNCH , [alone] 42:2@78..79#2
LITERAL Byte b 42:2@80..84#2
PUNCH , [alone] 42:2@84..85#2
LITERAL CStr null 42:2@86..93#2"#]],
);
}
@ -239,12 +239,12 @@ fn test_attr_macro() {
LITERAL Str #[attr_error(some arguments)] mod m {} 1
PUNCH ; [alone] 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
IDENT compile_error 42:2@0..100#0
PUNCH ! [alone] 42:2@0..100#0
SUBTREE () 42:2@0..100#0 42:2@0..100#0
LITERAL Str #[attr_error(some arguments)] mod m {} 42:2@0..100#0
PUNCH ; [alone] 42:2@0..100#0"#]],
SUBTREE $$ 42:2@0..100#2 42:2@0..100#2
IDENT compile_error 42:2@0..100#2
PUNCH ! [alone] 42:2@0..100#2
SUBTREE () 42:2@0..100#2 42:2@0..100#2
LITERAL Str #[attr_error(some arguments)] mod m {} 42:2@0..100#2
PUNCH ; [alone] 42:2@0..100#2"#]],
);
}

View file

@ -76,7 +76,7 @@ fn assert_expand_impl(
file_id: EditionedFileId::current_edition(FileId::from_raw(41)),
ast_id: ErasedFileAstId::from_raw(1),
},
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(span::Edition::CURRENT),
};
let call_site = Span {
range: TextRange::new(0.into(), 100.into()),
@ -84,7 +84,7 @@ fn assert_expand_impl(
file_id: EditionedFileId::current_edition(FileId::from_raw(42)),
ast_id: ErasedFileAstId::from_raw(2),
},
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(span::Edition::CURRENT),
};
let mixed_site = call_site;

View file

@ -26,7 +26,7 @@ use crate::InternId;
#[cfg(feature = "ra-salsa")]
use ra_salsa::{InternId, InternValue};
use crate::MacroCallId;
use crate::{Edition, MacroCallId};
/// Interned [`SyntaxContextData`].
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -59,11 +59,20 @@ impl fmt::Display for SyntaxContextId {
}
impl SyntaxContextId {
#[inline]
pub fn remove_root_edition(&mut self) {
if self.is_root() {
*self = Self::root(Edition::Edition2015);
}
}
/// The root context, which is the parent of all other contexts. All [`FileId`]s have this context.
pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
pub const fn root(edition: Edition) -> Self {
SyntaxContextId(unsafe { InternId::new_unchecked(edition as u32) })
}
pub fn is_root(self) -> bool {
self == Self::ROOT
self.into_u32() <= Edition::LATEST as u32
}
/// Deconstruct a `SyntaxContextId` into a raw `u32`.
@ -89,6 +98,7 @@ pub struct SyntaxContextData {
// per crate. Though that is likely not a problem as `MacroCallId`s are already crate calling dependent.
pub outer_expn: Option<MacroCallId>,
pub outer_transparency: Transparency,
pub edition: Edition,
pub parent: SyntaxContextId,
/// This context, but with all transparent and semi-transparent expansions filtered away.
pub opaque: SyntaxContextId,
@ -98,10 +108,10 @@ pub struct SyntaxContextData {
#[cfg(feature = "ra-salsa")]
impl InternValue for SyntaxContextData {
type Key = (SyntaxContextId, Option<MacroCallId>, Transparency);
type Key = (SyntaxContextId, Option<MacroCallId>, Transparency, Edition);
fn into_key(&self) -> Self::Key {
(self.parent, self.outer_expn, self.outer_transparency)
(self.parent, self.outer_expn, self.outer_transparency, self.edition)
}
}
@ -118,13 +128,14 @@ impl std::fmt::Debug for SyntaxContextData {
}
impl SyntaxContextData {
pub fn root() -> Self {
pub fn root(edition: Edition) -> Self {
SyntaxContextData {
outer_expn: None,
outer_transparency: Transparency::Opaque,
parent: SyntaxContextId::ROOT,
opaque: SyntaxContextId::ROOT,
opaque_and_semitransparent: SyntaxContextId::ROOT,
parent: SyntaxContextId::root(edition),
opaque: SyntaxContextId::root(edition),
opaque_and_semitransparent: SyntaxContextId::root(edition),
edition,
}
}
}

View file

@ -208,7 +208,7 @@ impl RealSpanMap {
Span {
range: range - offset,
anchor: SpanAnchor { file_id: self.file_id, ast_id },
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(self.file_id.edition()),
}
}
}

View file

@ -1,6 +1,6 @@
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
use std::fmt;
use std::{fmt, hash::Hash};
use intern::Symbol;
use rustc_hash::{FxHashMap, FxHashSet};
@ -58,7 +58,7 @@ pub mod dummy_test_span_utils {
),
ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
};
pub struct DummyTestSpanMap;
@ -74,7 +74,7 @@ pub mod dummy_test_span_utils {
),
ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
}
}
}
@ -141,15 +141,16 @@ where
pub fn token_tree_to_syntax_node<Ctx>(
tt: &tt::TopSubtree<SpanData<Ctx>>,
entry_point: parser::TopEntryPoint,
edition: parser::Edition,
span_to_edition: &mut dyn FnMut(Ctx) -> Edition,
top_edition: Edition,
) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
where
SpanData<Ctx>: Copy + fmt::Debug,
Ctx: PartialEq,
Ctx: Copy + fmt::Debug + PartialEq + PartialEq + Eq + Hash,
{
let buffer = tt.view().strip_invisible();
let parser_input = to_parser_input(edition, buffer);
let parser_output = entry_point.parse(&parser_input, edition);
let parser_input = to_parser_input(buffer, span_to_edition);
// It matters what edition we parse with even when we escape all identifiers correctly.
let parser_output = entry_point.parse(&parser_input, top_edition);
let mut tree_sink = TtTreeSink::new(buffer.cursor());
for event in parser_output.iter() {
match event {

View file

@ -2,17 +2,20 @@
//! format that works for our parser.
use std::fmt;
use std::hash::Hash;
use span::Edition;
use rustc_hash::FxHashMap;
use span::{Edition, SpanData};
use syntax::{SyntaxKind, SyntaxKind::*, T};
pub fn to_parser_input<S: Copy + fmt::Debug>(
edition: Edition,
buffer: tt::TokenTreesView<'_, S>,
pub fn to_parser_input<Ctx: Copy + fmt::Debug + PartialEq + Eq + Hash>(
buffer: tt::TokenTreesView<'_, SpanData<Ctx>>,
span_to_edition: &mut dyn FnMut(Ctx) -> Edition,
) -> parser::Input {
let mut res = parser::Input::default();
let mut current = buffer.cursor();
let mut syntax_context_to_edition_cache = FxHashMap::default();
while !current.eof() {
let tt = current.token_tree();
@ -57,20 +60,25 @@ pub fn to_parser_input<S: Copy + fmt::Debug>(
res.was_joint();
}
}
tt::Leaf::Ident(ident) => match ident.sym.as_str() {
"_" => res.push(T![_]),
i if i.starts_with('\'') => res.push(LIFETIME_IDENT),
_ if ident.is_raw.yes() => res.push(IDENT),
text => match SyntaxKind::from_keyword(text, edition) {
Some(kind) => res.push(kind),
None => {
let contextual_keyword =
SyntaxKind::from_contextual_keyword(text, edition)
.unwrap_or(SyntaxKind::IDENT);
res.push_ident(contextual_keyword);
}
},
},
tt::Leaf::Ident(ident) => {
let edition = *syntax_context_to_edition_cache
.entry(ident.span.ctx)
.or_insert_with(|| span_to_edition(ident.span.ctx));
match ident.sym.as_str() {
"_" => res.push(T![_]),
i if i.starts_with('\'') => res.push(LIFETIME_IDENT),
_ if ident.is_raw.yes() => res.push(IDENT),
text => match SyntaxKind::from_keyword(text, edition) {
Some(kind) => res.push(kind),
None => {
let contextual_keyword =
SyntaxKind::from_contextual_keyword(text, edition)
.unwrap_or(SyntaxKind::IDENT);
res.push_ident(contextual_keyword);
}
},
}
}
tt::Leaf::Punct(punct) => {
let kind = SyntaxKind::from_char(punct.char)
.unwrap_or_else(|| panic!("{punct:#?} is not a valid punct"));