Deduplicate dummy test span maps

This commit is contained in:
Lukas Wirth 2023-11-25 15:10:31 +01:00
parent c43078f99d
commit 6208960c48
12 changed files with 99 additions and 139 deletions

View file

@ -243,6 +243,9 @@ impl CrateDisplayName {
} }
} }
// FIXME: These should not be defined in here? Why does base db know about proc-macros
// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32); pub struct ProcMacroId(pub u32);
@ -324,7 +327,9 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>, pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin, pub origin: CrateOrigin,
pub is_proc_macro: bool, pub is_proc_macro: bool,
// FIXME: These things should not be per crate! These are more per workspace crate graph level things // FIXME: These things should not be per crate! These are more per workspace crate graph level
// things. This info does need to be somewhat present though as to prevent deduplication from
// happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult, pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>, pub channel: Option<ReleaseChannel>,
} }

View file

@ -4,6 +4,7 @@
mod input; mod input;
mod change; mod change;
// FIXME: Is this purely a test util mod? Consider #[cfg(test)] gating it.
pub mod fixture; pub mod fixture;
pub mod span; pub mod span;
@ -13,14 +14,13 @@ use rustc_hash::FxHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize}; use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
use triomphe::Arc; use triomphe::Arc;
pub use crate::input::DependencyKind;
pub use crate::{ pub use crate::{
change::Change, change::Change,
input::{ input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander,
ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros, ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths,
ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult, ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
}, },
}; };
pub use salsa::{self, Cancelled}; pub use salsa::{self, Cancelled};
@ -69,8 +69,7 @@ pub trait FileLoader {
/// model. Everything else in rust-analyzer is derived from these queries. /// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)] #[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug { pub trait SourceDatabase: FileLoader + std::fmt::Debug {
// Parses the file into the syntax tree. /// Parses the file into the syntax tree.
#[salsa::invoke(parse_query)]
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>; fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
/// The crate graph. /// The crate graph.
@ -82,7 +81,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
fn proc_macros(&self) -> Arc<ProcMacros>; fn proc_macros(&self) -> Arc<ProcMacros>;
} }
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}")); let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
let text = db.file_text(file_id); let text = db.file_text(file_id);
SourceFile::parse(&text) SourceFile::parse(&text)

View file

@ -1,3 +1,5 @@
/// File and span related types.
// FIXME: This should probably be moved into its own crate.
use std::fmt; use std::fmt;
use salsa::InternId; use salsa::InternId;
@ -29,10 +31,10 @@ impl SyntaxContext for SyntaxContextId {
} }
// inherent trait impls please tyvm // inherent trait impls please tyvm
impl SyntaxContextId { impl SyntaxContextId {
// FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context // TODO: This is very much UB, salsa exposes no way to create an InternId in a const context
// currently (which kind of makes sense but we need it here!) // currently (which kind of makes sense but we need it here!)
pub const ROOT: Self = SyntaxContextId(unsafe { core::mem::transmute(1) }); pub const ROOT: Self = SyntaxContextId(unsafe { core::mem::transmute(1) });
// FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context // TODO: This is very much UB, salsa exposes no way to create an InternId in a const context
// currently (which kind of makes sense but we need it here!) // currently (which kind of makes sense but we need it here!)
pub const SELF_REF: Self = SyntaxContextId(unsafe { core::mem::transmute(!0u32) }); pub const SELF_REF: Self = SyntaxContextId(unsafe { core::mem::transmute(!0u32) });

View file

@ -1,34 +1,14 @@
use arbitrary::{Arbitrary, Unstructured}; use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use mbe::{syntax_node_to_token_tree, SpanMapper}; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use tt::{SpanAnchor, SyntaxContext};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
struct DummyFile;
impl SpanAnchor for DummyFile {
const DUMMY: Self = DummyFile;
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
struct DummyCtx;
impl SyntaxContext for DummyCtx {
const DUMMY: Self = DummyCtx;
}
struct NoOpMap;
impl SpanMapper<tt::SpanData<DummyFile, DummyCtx>> for NoOpMap {
fn span_for(&self, range: syntax::TextRange) -> tt::SpanData<DummyFile, DummyCtx> {
tt::SpanData { range, anchor: DummyFile, ctx: DummyCtx }
}
}
fn assert_parse_result(input: &str, expected: CfgExpr) { fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected); assert_eq!(cfg, expected);
} }
@ -36,7 +16,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
fn check_dnf(input: &str, expect: Expect) { fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual); expect.assert_eq(&actual);
@ -45,7 +25,7 @@ fn check_dnf(input: &str, expect: Expect) {
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@ -56,7 +36,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>(); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();

View file

@ -6,35 +6,13 @@ use syntax::{
AstNode, SmolStr, AstNode, SmolStr,
}; };
use test_utils::{bench, bench_fixture, skip_slow_tests}; use test_utils::{bench, bench_fixture, skip_slow_tests};
use tt::{Span, SpanAnchor, SyntaxContext}; use tt::Span;
use crate::{ use crate::{
parser::{MetaVarKind, Op, RepeatKind, Separator}, parser::{MetaVarKind, Op, RepeatKind, Separator},
syntax_node_to_token_tree, DeclarativeMacro, SpanMapper, syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap,
}; };
type SpanData = tt::SpanData<DummyFile, DummyCtx>;
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
struct DummyFile;
impl SpanAnchor for DummyFile {
const DUMMY: Self = DummyFile;
}
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
struct DummyCtx;
impl SyntaxContext for DummyCtx {
const DUMMY: Self = DummyCtx;
}
struct NoOpMap;
impl SpanMapper<SpanData> for NoOpMap {
fn span_for(&self, range: syntax::TextRange) -> SpanData {
SpanData { range, anchor: DummyFile, ctx: DummyCtx }
}
}
#[test] #[test]
fn benchmark_parse_macro_rules() { fn benchmark_parse_macro_rules() {
if skip_slow_tests() { if skip_slow_tests() {
@ -70,14 +48,14 @@ fn benchmark_expand_macro_rules() {
assert_eq!(hash, 69413); assert_eq!(hash, 69413);
} }
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<SpanData>> { fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> {
macro_rules_fixtures_tt() macro_rules_fixtures_tt()
.into_iter() .into_iter()
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true))) .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.collect() .collect()
} }
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData>> { fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>> {
let fixture = bench_fixture::numerous_macro_rules(); let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
@ -87,7 +65,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData>> {
.filter_map(ast::MacroRules::cast) .filter_map(ast::MacroRules::cast)
.map(|rule| { .map(|rule| {
let id = rule.name().unwrap().to_string(); let id = rule.name().unwrap().to_string();
let def_tt = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), NoOpMap); let def_tt =
syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap);
(id, def_tt) (id, def_tt)
}) })
.collect() .collect()
@ -95,8 +74,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData>> {
/// Generate random invocation fixtures from rules /// Generate random invocation fixtures from rules
fn invocation_fixtures( fn invocation_fixtures(
rules: &FxHashMap<String, DeclarativeMacro<SpanData>>, rules: &FxHashMap<String, DeclarativeMacro<DummyTestSpanData>>,
) -> Vec<(String, tt::Subtree<SpanData>)> { ) -> Vec<(String, tt::Subtree<DummyTestSpanData>)> {
let mut seed = 123456789; let mut seed = 123456789;
let mut res = Vec::new(); let mut res = Vec::new();
@ -118,8 +97,8 @@ fn invocation_fixtures(
loop { loop {
let mut subtree = tt::Subtree { let mut subtree = tt::Subtree {
delimiter: tt::Delimiter { delimiter: tt::Delimiter {
open: SpanData::DUMMY, open: DummyTestSpanData::DUMMY,
close: SpanData::DUMMY, close: DummyTestSpanData::DUMMY,
kind: tt::DelimiterKind::Invisible, kind: tt::DelimiterKind::Invisible,
}, },
token_trees: vec![], token_trees: vec![],
@ -141,7 +120,11 @@ fn invocation_fixtures(
} }
return res; return res;
fn collect_from_op(op: &Op<SpanData>, parent: &mut tt::Subtree<SpanData>, seed: &mut usize) { fn collect_from_op(
op: &Op<DummyTestSpanData>,
parent: &mut tt::Subtree<DummyTestSpanData>,
seed: &mut usize,
) {
return match op { return match op {
Op::Var { kind, .. } => match kind.as_ref() { Op::Var { kind, .. } => match kind.as_ref() {
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")), Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
@ -227,22 +210,35 @@ fn invocation_fixtures(
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
*seed *seed
} }
fn make_ident(ident: &str) -> tt::TokenTree<SpanData> { fn make_ident(ident: &str) -> tt::TokenTree<DummyTestSpanData> {
tt::Leaf::Ident(tt::Ident { span: SpanData::DUMMY, text: SmolStr::new(ident) }).into() tt::Leaf::Ident(tt::Ident { span: DummyTestSpanData::DUMMY, text: SmolStr::new(ident) })
}
fn make_punct(char: char) -> tt::TokenTree<SpanData> {
tt::Leaf::Punct(tt::Punct { span: SpanData::DUMMY, char, spacing: tt::Spacing::Alone })
.into() .into()
} }
fn make_literal(lit: &str) -> tt::TokenTree<SpanData> { fn make_punct(char: char) -> tt::TokenTree<DummyTestSpanData> {
tt::Leaf::Literal(tt::Literal { span: SpanData::DUMMY, text: SmolStr::new(lit) }).into() tt::Leaf::Punct(tt::Punct {
span: DummyTestSpanData::DUMMY,
char,
spacing: tt::Spacing::Alone,
})
.into()
}
fn make_literal(lit: &str) -> tt::TokenTree<DummyTestSpanData> {
tt::Leaf::Literal(tt::Literal {
span: DummyTestSpanData::DUMMY,
text: SmolStr::new(lit),
})
.into()
} }
fn make_subtree( fn make_subtree(
kind: tt::DelimiterKind, kind: tt::DelimiterKind,
token_trees: Option<Vec<tt::TokenTree<SpanData>>>, token_trees: Option<Vec<tt::TokenTree<DummyTestSpanData>>>,
) -> tt::TokenTree<SpanData> { ) -> tt::TokenTree<DummyTestSpanData> {
tt::Subtree { tt::Subtree {
delimiter: tt::Delimiter { open: SpanData::DUMMY, close: SpanData::DUMMY, kind }, delimiter: tt::Delimiter {
open: DummyTestSpanData::DUMMY,
close: DummyTestSpanData::DUMMY,
kind,
},
token_trees: token_trees.unwrap_or_default(), token_trees: token_trees.unwrap_or_default(),
} }
.into() .into()

View file

@ -40,6 +40,8 @@ pub use crate::{
token_map::TokenMap, token_map::TokenMap,
}; };
pub use crate::syntax_bridge::dummy_test_span_utils::*;
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]
pub enum ParseError { pub enum ParseError {
UnexpectedToken(Box<str>), UnexpectedToken(Box<str>),

View file

@ -33,6 +33,34 @@ impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM {
} }
} }
pub(crate) mod dummy_test_span_utils {
use super::*;
pub type DummyTestSpanData = tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct DummyTestSpanAnchor;
impl tt::SpanAnchor for DummyTestSpanAnchor {
const DUMMY: Self = DummyTestSpanAnchor;
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct DummyTestSyntaxContext;
impl SyntaxContext for DummyTestSyntaxContext {
const DUMMY: Self = DummyTestSyntaxContext;
}
pub struct DummyTestSpanMap;
impl SpanMapper<tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>> for DummyTestSpanMap {
fn span_for(
&self,
range: syntax::TextRange,
) -> tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext> {
tt::SpanData { range, anchor: DummyTestSpanAnchor, ctx: DummyTestSyntaxContext }
}
}
}
/// Convert the syntax node to a `TokenTree` (what macro /// Convert the syntax node to a `TokenTree` (what macro
/// will consume). /// will consume).
/// TODO: Flesh out the doc comment more thoroughly /// TODO: Flesh out the doc comment more thoroughly

View file

@ -4,38 +4,14 @@ use syntax::{ast, AstNode};
use test_utils::extract_annotations; use test_utils::extract_annotations;
use tt::{ use tt::{
buffer::{TokenBuffer, TokenTreeRef}, buffer::{TokenBuffer, TokenTreeRef},
Leaf, Punct, Spacing, SpanAnchor, SyntaxContext, Leaf, Punct, Spacing,
}; };
use crate::SpanMapper; use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap};
use super::syntax_node_to_token_tree;
fn check_punct_spacing(fixture: &str) { fn check_punct_spacing(fixture: &str) {
type SpanData = tt::SpanData<DummyFile, DummyCtx>;
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
struct DummyFile;
impl SpanAnchor for DummyFile {
const DUMMY: Self = DummyFile;
}
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
struct DummyCtx;
impl SyntaxContext for DummyCtx {
const DUMMY: Self = DummyCtx;
}
struct NoOpMap;
impl SpanMapper<SpanData> for NoOpMap {
fn span_for(&self, range: syntax::TextRange) -> SpanData {
SpanData { range, anchor: DummyFile, ctx: DummyCtx }
}
}
let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
let subtree = syntax_node_to_token_tree(source_file.syntax(), NoOpMap); let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap);
let mut annotations: HashMap<_, _> = extract_annotations(fixture) let mut annotations: HashMap<_, _> = extract_annotations(fixture)
.into_iter() .into_iter()
.map(|(range, annotation)| { .map(|(range, annotation)| {
@ -53,7 +29,7 @@ fn check_punct_spacing(fixture: &str) {
while !cursor.eof() { while !cursor.eof() {
while let Some(token_tree) = cursor.token_tree() { while let Some(token_tree) = cursor.token_tree() {
if let TokenTreeRef::Leaf( if let TokenTreeRef::Leaf(
Leaf::Punct(Punct { spacing, span: SpanData { range, .. }, .. }), Leaf::Punct(Punct { spacing, span: DummyTestSpanData { range, .. }, .. }),
_, _,
) = token_tree ) = token_tree
{ {

View file

@ -18,11 +18,13 @@ fn main() -> std::io::Result<()> {
run() run()
} }
#[cfg(not(FALSE))]
#[cfg(not(feature = "sysroot-abi"))] #[cfg(not(feature = "sysroot-abi"))]
fn run() -> io::Result<()> { fn run() -> io::Result<()> {
panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled"); panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled");
} }
#[cfg(FALSE)]
#[cfg(feature = "sysroot-abi")] #[cfg(feature = "sysroot-abi")]
fn run() -> io::Result<()> { fn run() -> io::Result<()> {
use proc_macro_api::msg::{self, Message}; use proc_macro_api::msg::{self, Message};

View file

@ -10,6 +10,7 @@
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` //! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)… //! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
#![cfg(FALSE)] // TODO
#![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![cfg(any(feature = "sysroot-abi", rust_analyzer))]
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]

View file

@ -209,26 +209,17 @@ mod tests {
use super::*; use super::*;
use cfg::CfgExpr; use cfg::CfgExpr;
use hir_def::tt::{self, Span}; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use mbe::{syntax_node_to_token_tree, SpanMapper};
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
SmolStr, SmolStr,
}; };
struct NoOpMap;
impl SpanMapper<tt::SpanData> for NoOpMap {
fn span_for(&self, _: syntax::TextRange) -> tt::SpanData {
tt::SpanData::DUMMY
}
}
fn check(cfg: &str, expected_features: &[&str]) { fn check(cfg: &str, expected_features: &[&str]) {
let cfg_expr = { let cfg_expr = {
let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), &NoOpMap); let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap);
CfgExpr::parse(&tt) CfgExpr::parse(&tt)
}; };

View file

@ -7,31 +7,9 @@
use std::fmt; use std::fmt;
use stdx::impl_from; use stdx::impl_from;
use text_size::{TextRange, TextSize};
pub use smol_str::SmolStr; pub use smol_str::SmolStr;
pub use text_size::{TextRange, TextSize};
/// Represents identity of the token.
///
/// For hygiene purposes, we need to track which expanded tokens originated from
/// which source tokens. We do it by assigning an distinct identity to each
/// source token and making sure that identities are preserved during macro
/// expansion.
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct TokenId(pub u32);
impl fmt::Debug for TokenId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl TokenId {
pub const UNSPECIFIED: TokenId = TokenId(!0);
pub const fn unspecified() -> TokenId {
Self::UNSPECIFIED
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct SpanData<Anchor, Ctx> { pub struct SpanData<Anchor, Ctx> {