mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 04:53:34 +00:00
Deduplicate dummy test span maps
This commit is contained in:
parent
c43078f99d
commit
6208960c48
12 changed files with 99 additions and 139 deletions
|
@ -243,6 +243,9 @@ impl CrateDisplayName {
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME: These should not be defined in here? Why does base db know about proc-macros
|
||||
// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either.
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ProcMacroId(pub u32);
|
||||
|
||||
|
@ -324,7 +327,9 @@ pub struct CrateData {
|
|||
pub dependencies: Vec<Dependency>,
|
||||
pub origin: CrateOrigin,
|
||||
pub is_proc_macro: bool,
|
||||
// FIXME: These things should not be per crate! These are more per workspace crate graph level things
|
||||
// FIXME: These things should not be per crate! These are more per workspace crate graph level
|
||||
// things. This info does need to be somewhat present though as to prevent deduplication from
|
||||
// happening across different workspaces with different layouts.
|
||||
pub target_layout: TargetLayoutLoadResult,
|
||||
pub channel: Option<ReleaseChannel>,
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
mod input;
|
||||
mod change;
|
||||
// FIXME: Is this purely a test util mod? Consider #[cfg(test)] gating it.
|
||||
pub mod fixture;
|
||||
pub mod span;
|
||||
|
||||
|
@ -13,14 +14,13 @@ use rustc_hash::FxHashSet;
|
|||
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
|
||||
use triomphe::Arc;
|
||||
|
||||
pub use crate::input::DependencyKind;
|
||||
pub use crate::{
|
||||
change::Change,
|
||||
input::{
|
||||
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
|
||||
Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
|
||||
ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros,
|
||||
ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
|
||||
DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander,
|
||||
ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths,
|
||||
ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
|
||||
},
|
||||
};
|
||||
pub use salsa::{self, Cancelled};
|
||||
|
@ -69,8 +69,7 @@ pub trait FileLoader {
|
|||
/// model. Everything else in rust-analyzer is derived from these queries.
|
||||
#[salsa::query_group(SourceDatabaseStorage)]
|
||||
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
|
||||
// Parses the file into the syntax tree.
|
||||
#[salsa::invoke(parse_query)]
|
||||
/// Parses the file into the syntax tree.
|
||||
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
|
||||
|
||||
/// The crate graph.
|
||||
|
@ -82,7 +81,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
|
|||
fn proc_macros(&self) -> Arc<ProcMacros>;
|
||||
}
|
||||
|
||||
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
|
||||
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
|
||||
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
|
||||
let text = db.file_text(file_id);
|
||||
SourceFile::parse(&text)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
/// File and span related types.
|
||||
// FIXME: This should probably be moved into its own crate.
|
||||
use std::fmt;
|
||||
|
||||
use salsa::InternId;
|
||||
|
@ -29,10 +31,10 @@ impl SyntaxContext for SyntaxContextId {
|
|||
}
|
||||
// inherent trait impls please tyvm
|
||||
impl SyntaxContextId {
|
||||
// FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context
|
||||
// TODO: This is very much UB, salsa exposes no way to create an InternId in a const context
|
||||
// currently (which kind of makes sense but we need it here!)
|
||||
pub const ROOT: Self = SyntaxContextId(unsafe { core::mem::transmute(1) });
|
||||
// FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context
|
||||
// TODO: This is very much UB, salsa exposes no way to create an InternId in a const context
|
||||
// currently (which kind of makes sense but we need it here!)
|
||||
pub const SELF_REF: Self = SyntaxContextId(unsafe { core::mem::transmute(!0u32) });
|
||||
|
||||
|
|
|
@ -1,34 +1,14 @@
|
|||
use arbitrary::{Arbitrary, Unstructured};
|
||||
use expect_test::{expect, Expect};
|
||||
use mbe::{syntax_node_to_token_tree, SpanMapper};
|
||||
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
|
||||
use syntax::{ast, AstNode};
|
||||
use tt::{SpanAnchor, SyntaxContext};
|
||||
|
||||
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
struct DummyFile;
|
||||
impl SpanAnchor for DummyFile {
|
||||
const DUMMY: Self = DummyFile;
|
||||
}
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
struct DummyCtx;
|
||||
impl SyntaxContext for DummyCtx {
|
||||
const DUMMY: Self = DummyCtx;
|
||||
}
|
||||
|
||||
struct NoOpMap;
|
||||
|
||||
impl SpanMapper<tt::SpanData<DummyFile, DummyCtx>> for NoOpMap {
|
||||
fn span_for(&self, range: syntax::TextRange) -> tt::SpanData<DummyFile, DummyCtx> {
|
||||
tt::SpanData { range, anchor: DummyFile, ctx: DummyCtx }
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_parse_result(input: &str, expected: CfgExpr) {
|
||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||
let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap);
|
||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
|
||||
let cfg = CfgExpr::parse(&tt);
|
||||
assert_eq!(cfg, expected);
|
||||
}
|
||||
|
@ -36,7 +16,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
|
|||
fn check_dnf(input: &str, expect: Expect) {
|
||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||
let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap);
|
||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
|
||||
let cfg = CfgExpr::parse(&tt);
|
||||
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
|
||||
expect.assert_eq(&actual);
|
||||
|
@ -45,7 +25,7 @@ fn check_dnf(input: &str, expect: Expect) {
|
|||
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
|
||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||
let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap);
|
||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
|
||||
let cfg = CfgExpr::parse(&tt);
|
||||
let dnf = DnfExpr::new(cfg);
|
||||
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
|
||||
|
@ -56,7 +36,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
|
|||
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
|
||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||
let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap);
|
||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
|
||||
let cfg = CfgExpr::parse(&tt);
|
||||
let dnf = DnfExpr::new(cfg);
|
||||
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
|
||||
|
|
|
@ -6,35 +6,13 @@ use syntax::{
|
|||
AstNode, SmolStr,
|
||||
};
|
||||
use test_utils::{bench, bench_fixture, skip_slow_tests};
|
||||
use tt::{Span, SpanAnchor, SyntaxContext};
|
||||
use tt::Span;
|
||||
|
||||
use crate::{
|
||||
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
||||
syntax_node_to_token_tree, DeclarativeMacro, SpanMapper,
|
||||
syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap,
|
||||
};
|
||||
|
||||
type SpanData = tt::SpanData<DummyFile, DummyCtx>;
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
|
||||
struct DummyFile;
|
||||
impl SpanAnchor for DummyFile {
|
||||
const DUMMY: Self = DummyFile;
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
||||
struct DummyCtx;
|
||||
impl SyntaxContext for DummyCtx {
|
||||
const DUMMY: Self = DummyCtx;
|
||||
}
|
||||
|
||||
struct NoOpMap;
|
||||
|
||||
impl SpanMapper<SpanData> for NoOpMap {
|
||||
fn span_for(&self, range: syntax::TextRange) -> SpanData {
|
||||
SpanData { range, anchor: DummyFile, ctx: DummyCtx }
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn benchmark_parse_macro_rules() {
|
||||
if skip_slow_tests() {
|
||||
|
@ -70,14 +48,14 @@ fn benchmark_expand_macro_rules() {
|
|||
assert_eq!(hash, 69413);
|
||||
}
|
||||
|
||||
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<SpanData>> {
|
||||
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> {
|
||||
macro_rules_fixtures_tt()
|
||||
.into_iter()
|
||||
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData>> {
|
||||
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>> {
|
||||
let fixture = bench_fixture::numerous_macro_rules();
|
||||
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
|
||||
|
||||
|
@ -87,7 +65,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData>> {
|
|||
.filter_map(ast::MacroRules::cast)
|
||||
.map(|rule| {
|
||||
let id = rule.name().unwrap().to_string();
|
||||
let def_tt = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), NoOpMap);
|
||||
let def_tt =
|
||||
syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap);
|
||||
(id, def_tt)
|
||||
})
|
||||
.collect()
|
||||
|
@ -95,8 +74,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<SpanData>> {
|
|||
|
||||
/// Generate random invocation fixtures from rules
|
||||
fn invocation_fixtures(
|
||||
rules: &FxHashMap<String, DeclarativeMacro<SpanData>>,
|
||||
) -> Vec<(String, tt::Subtree<SpanData>)> {
|
||||
rules: &FxHashMap<String, DeclarativeMacro<DummyTestSpanData>>,
|
||||
) -> Vec<(String, tt::Subtree<DummyTestSpanData>)> {
|
||||
let mut seed = 123456789;
|
||||
let mut res = Vec::new();
|
||||
|
||||
|
@ -118,8 +97,8 @@ fn invocation_fixtures(
|
|||
loop {
|
||||
let mut subtree = tt::Subtree {
|
||||
delimiter: tt::Delimiter {
|
||||
open: SpanData::DUMMY,
|
||||
close: SpanData::DUMMY,
|
||||
open: DummyTestSpanData::DUMMY,
|
||||
close: DummyTestSpanData::DUMMY,
|
||||
kind: tt::DelimiterKind::Invisible,
|
||||
},
|
||||
token_trees: vec![],
|
||||
|
@ -141,7 +120,11 @@ fn invocation_fixtures(
|
|||
}
|
||||
return res;
|
||||
|
||||
fn collect_from_op(op: &Op<SpanData>, parent: &mut tt::Subtree<SpanData>, seed: &mut usize) {
|
||||
fn collect_from_op(
|
||||
op: &Op<DummyTestSpanData>,
|
||||
parent: &mut tt::Subtree<DummyTestSpanData>,
|
||||
seed: &mut usize,
|
||||
) {
|
||||
return match op {
|
||||
Op::Var { kind, .. } => match kind.as_ref() {
|
||||
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
|
||||
|
@ -227,22 +210,35 @@ fn invocation_fixtures(
|
|||
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
|
||||
*seed
|
||||
}
|
||||
fn make_ident(ident: &str) -> tt::TokenTree<SpanData> {
|
||||
tt::Leaf::Ident(tt::Ident { span: SpanData::DUMMY, text: SmolStr::new(ident) }).into()
|
||||
}
|
||||
fn make_punct(char: char) -> tt::TokenTree<SpanData> {
|
||||
tt::Leaf::Punct(tt::Punct { span: SpanData::DUMMY, char, spacing: tt::Spacing::Alone })
|
||||
fn make_ident(ident: &str) -> tt::TokenTree<DummyTestSpanData> {
|
||||
tt::Leaf::Ident(tt::Ident { span: DummyTestSpanData::DUMMY, text: SmolStr::new(ident) })
|
||||
.into()
|
||||
}
|
||||
fn make_literal(lit: &str) -> tt::TokenTree<SpanData> {
|
||||
tt::Leaf::Literal(tt::Literal { span: SpanData::DUMMY, text: SmolStr::new(lit) }).into()
|
||||
fn make_punct(char: char) -> tt::TokenTree<DummyTestSpanData> {
|
||||
tt::Leaf::Punct(tt::Punct {
|
||||
span: DummyTestSpanData::DUMMY,
|
||||
char,
|
||||
spacing: tt::Spacing::Alone,
|
||||
})
|
||||
.into()
|
||||
}
|
||||
fn make_literal(lit: &str) -> tt::TokenTree<DummyTestSpanData> {
|
||||
tt::Leaf::Literal(tt::Literal {
|
||||
span: DummyTestSpanData::DUMMY,
|
||||
text: SmolStr::new(lit),
|
||||
})
|
||||
.into()
|
||||
}
|
||||
fn make_subtree(
|
||||
kind: tt::DelimiterKind,
|
||||
token_trees: Option<Vec<tt::TokenTree<SpanData>>>,
|
||||
) -> tt::TokenTree<SpanData> {
|
||||
token_trees: Option<Vec<tt::TokenTree<DummyTestSpanData>>>,
|
||||
) -> tt::TokenTree<DummyTestSpanData> {
|
||||
tt::Subtree {
|
||||
delimiter: tt::Delimiter { open: SpanData::DUMMY, close: SpanData::DUMMY, kind },
|
||||
delimiter: tt::Delimiter {
|
||||
open: DummyTestSpanData::DUMMY,
|
||||
close: DummyTestSpanData::DUMMY,
|
||||
kind,
|
||||
},
|
||||
token_trees: token_trees.unwrap_or_default(),
|
||||
}
|
||||
.into()
|
||||
|
|
|
@ -40,6 +40,8 @@ pub use crate::{
|
|||
token_map::TokenMap,
|
||||
};
|
||||
|
||||
pub use crate::syntax_bridge::dummy_test_span_utils::*;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum ParseError {
|
||||
UnexpectedToken(Box<str>),
|
||||
|
|
|
@ -33,6 +33,34 @@ impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) mod dummy_test_span_utils {
|
||||
use super::*;
|
||||
|
||||
pub type DummyTestSpanData = tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct DummyTestSpanAnchor;
|
||||
impl tt::SpanAnchor for DummyTestSpanAnchor {
|
||||
const DUMMY: Self = DummyTestSpanAnchor;
|
||||
}
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub struct DummyTestSyntaxContext;
|
||||
impl SyntaxContext for DummyTestSyntaxContext {
|
||||
const DUMMY: Self = DummyTestSyntaxContext;
|
||||
}
|
||||
|
||||
pub struct DummyTestSpanMap;
|
||||
|
||||
impl SpanMapper<tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>> for DummyTestSpanMap {
|
||||
fn span_for(
|
||||
&self,
|
||||
range: syntax::TextRange,
|
||||
) -> tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext> {
|
||||
tt::SpanData { range, anchor: DummyTestSpanAnchor, ctx: DummyTestSyntaxContext }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert the syntax node to a `TokenTree` (what macro
|
||||
/// will consume).
|
||||
/// TODO: Flesh out the doc comment more thoroughly
|
||||
|
|
|
@ -4,38 +4,14 @@ use syntax::{ast, AstNode};
|
|||
use test_utils::extract_annotations;
|
||||
use tt::{
|
||||
buffer::{TokenBuffer, TokenTreeRef},
|
||||
Leaf, Punct, Spacing, SpanAnchor, SyntaxContext,
|
||||
Leaf, Punct, Spacing,
|
||||
};
|
||||
|
||||
use crate::SpanMapper;
|
||||
|
||||
use super::syntax_node_to_token_tree;
|
||||
use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap};
|
||||
|
||||
fn check_punct_spacing(fixture: &str) {
|
||||
type SpanData = tt::SpanData<DummyFile, DummyCtx>;
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
|
||||
struct DummyFile;
|
||||
impl SpanAnchor for DummyFile {
|
||||
const DUMMY: Self = DummyFile;
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
||||
struct DummyCtx;
|
||||
impl SyntaxContext for DummyCtx {
|
||||
const DUMMY: Self = DummyCtx;
|
||||
}
|
||||
|
||||
struct NoOpMap;
|
||||
|
||||
impl SpanMapper<SpanData> for NoOpMap {
|
||||
fn span_for(&self, range: syntax::TextRange) -> SpanData {
|
||||
SpanData { range, anchor: DummyFile, ctx: DummyCtx }
|
||||
}
|
||||
}
|
||||
|
||||
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
|
||||
let subtree = syntax_node_to_token_tree(source_file.syntax(), NoOpMap);
|
||||
let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap);
|
||||
let mut annotations: HashMap<_, _> = extract_annotations(fixture)
|
||||
.into_iter()
|
||||
.map(|(range, annotation)| {
|
||||
|
@ -53,7 +29,7 @@ fn check_punct_spacing(fixture: &str) {
|
|||
while !cursor.eof() {
|
||||
while let Some(token_tree) = cursor.token_tree() {
|
||||
if let TokenTreeRef::Leaf(
|
||||
Leaf::Punct(Punct { spacing, span: SpanData { range, .. }, .. }),
|
||||
Leaf::Punct(Punct { spacing, span: DummyTestSpanData { range, .. }, .. }),
|
||||
_,
|
||||
) = token_tree
|
||||
{
|
||||
|
|
|
@ -18,11 +18,13 @@ fn main() -> std::io::Result<()> {
|
|||
run()
|
||||
}
|
||||
|
||||
#[cfg(not(FALSE))]
|
||||
#[cfg(not(feature = "sysroot-abi"))]
|
||||
fn run() -> io::Result<()> {
|
||||
panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled");
|
||||
}
|
||||
|
||||
#[cfg(FALSE)]
|
||||
#[cfg(feature = "sysroot-abi")]
|
||||
fn run() -> io::Result<()> {
|
||||
use proc_macro_api::msg::{self, Message};
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
|
||||
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
|
||||
|
||||
#![cfg(FALSE)] // TODO
|
||||
#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
|
||||
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
|
||||
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
|
||||
|
|
|
@ -209,26 +209,17 @@ mod tests {
|
|||
use super::*;
|
||||
|
||||
use cfg::CfgExpr;
|
||||
use hir_def::tt::{self, Span};
|
||||
use mbe::{syntax_node_to_token_tree, SpanMapper};
|
||||
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
|
||||
use syntax::{
|
||||
ast::{self, AstNode},
|
||||
SmolStr,
|
||||
};
|
||||
|
||||
struct NoOpMap;
|
||||
|
||||
impl SpanMapper<tt::SpanData> for NoOpMap {
|
||||
fn span_for(&self, _: syntax::TextRange) -> tt::SpanData {
|
||||
tt::SpanData::DUMMY
|
||||
}
|
||||
}
|
||||
|
||||
fn check(cfg: &str, expected_features: &[&str]) {
|
||||
let cfg_expr = {
|
||||
let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
|
||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||
let tt = syntax_node_to_token_tree(tt.syntax(), &NoOpMap);
|
||||
let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap);
|
||||
CfgExpr::parse(&tt)
|
||||
};
|
||||
|
||||
|
|
|
@ -7,31 +7,9 @@
|
|||
use std::fmt;
|
||||
|
||||
use stdx::impl_from;
|
||||
use text_size::{TextRange, TextSize};
|
||||
|
||||
pub use smol_str::SmolStr;
|
||||
|
||||
/// Represents identity of the token.
|
||||
///
|
||||
/// For hygiene purposes, we need to track which expanded tokens originated from
|
||||
/// which source tokens. We do it by assigning an distinct identity to each
|
||||
/// source token and making sure that identities are preserved during macro
|
||||
/// expansion.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct TokenId(pub u32);
|
||||
|
||||
impl fmt::Debug for TokenId {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenId {
|
||||
pub const UNSPECIFIED: TokenId = TokenId(!0);
|
||||
pub const fn unspecified() -> TokenId {
|
||||
Self::UNSPECIFIED
|
||||
}
|
||||
}
|
||||
pub use text_size::{TextRange, TextSize};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct SpanData<Anchor, Ctx> {
|
||||
|
|
Loading…
Reference in a new issue