internal: Remove span trait

This commit is contained in:
Lukas Wirth 2024-03-21 20:08:30 +01:00
parent 2ad14b8069
commit cd8eb0fe6d
11 changed files with 52 additions and 55 deletions

1
Cargo.lock generated
View file

@ -2115,7 +2115,6 @@ name = "tt"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"smol_str", "smol_str",
"span",
"stdx", "stdx",
"text-size", "text-size",
] ]

View file

@ -623,6 +623,10 @@ impl CrateGraph {
} }
id_map id_map
} }
pub fn shrink_to_fit(&mut self) {
self.arena.shrink_to_fit();
}
} }
impl ops::Index<CrateId> for CrateGraph { impl ops::Index<CrateId> for CrateGraph {

View file

@ -1,5 +1,7 @@
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`]. //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
use std::fmt;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use span::{SpanAnchor, SpanData, SpanMap}; use span::{SpanAnchor, SpanData, SpanMap};
use stdx::{never, non_empty_vec::NonEmptyVec}; use stdx::{never, non_empty_vec::NonEmptyVec};
@ -9,30 +11,27 @@ use syntax::{
SyntaxKind::*, SyntaxKind::*,
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
}; };
use tt::{ use tt::buffer::{Cursor, TokenBuffer};
buffer::{Cursor, TokenBuffer},
Span,
};
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter}; use crate::{to_parser_input::to_parser_input, tt_iter::TtIter};
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
pub trait SpanMapper<S: Span> { pub trait SpanMapper<S> {
fn span_for(&self, range: TextRange) -> S; fn span_for(&self, range: TextRange) -> S;
} }
impl<S> SpanMapper<SpanData<S>> for SpanMap<S> impl<S> SpanMapper<SpanData<S>> for SpanMap<S>
where where
SpanData<S>: Span, SpanData<S>: Copy,
{ {
fn span_for(&self, range: TextRange) -> SpanData<S> { fn span_for(&self, range: TextRange) -> SpanData<S> {
self.span_at(range.start()) self.span_at(range.start())
} }
} }
impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM { impl<S: Copy, SM: SpanMapper<S>> SpanMapper<S> for &SM {
fn span_for(&self, range: TextRange) -> S { fn span_for(&self, range: TextRange) -> S {
SM::span_for(self, range) SM::span_for(self, range)
} }
@ -78,8 +77,7 @@ pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
span: SpanData<Ctx>, span: SpanData<Ctx>,
) -> tt::Subtree<SpanData<Ctx>> ) -> tt::Subtree<SpanData<Ctx>>
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Copy + fmt::Debug,
Ctx: Copy,
SpanMap: SpanMapper<SpanData<Ctx>>, SpanMap: SpanMapper<SpanData<Ctx>>,
{ {
let mut c = Converter::new(node, map, Default::default(), Default::default(), span); let mut c = Converter::new(node, map, Default::default(), Default::default(), span);
@ -98,8 +96,7 @@ pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
) -> tt::Subtree<SpanData<Ctx>> ) -> tt::Subtree<SpanData<Ctx>>
where where
SpanMap: SpanMapper<SpanData<Ctx>>, SpanMap: SpanMapper<SpanData<Ctx>>,
SpanData<Ctx>: Span, SpanData<Ctx>: Copy + fmt::Debug,
Ctx: Copy,
{ {
let mut c = Converter::new(node, map, append, remove, call_site); let mut c = Converter::new(node, map, append, remove, call_site);
convert_tokens(&mut c) convert_tokens(&mut c)
@ -124,8 +121,7 @@ pub fn token_tree_to_syntax_node<Ctx>(
entry_point: parser::TopEntryPoint, entry_point: parser::TopEntryPoint,
) -> (Parse<SyntaxNode>, SpanMap<Ctx>) ) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Copy + fmt::Debug,
Ctx: Copy,
{ {
let buffer = match tt { let buffer = match tt {
tt::Subtree { tt::Subtree {
@ -161,7 +157,7 @@ pub fn parse_to_token_tree<Ctx>(
text: &str, text: &str,
) -> Option<tt::Subtree<SpanData<Ctx>>> ) -> Option<tt::Subtree<SpanData<Ctx>>>
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Copy + fmt::Debug,
Ctx: Copy, Ctx: Copy,
{ {
let lexed = parser::LexedStr::new(text); let lexed = parser::LexedStr::new(text);
@ -175,7 +171,7 @@ where
/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree. /// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>> pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
where where
S: Span, S: Copy + fmt::Debug,
{ {
let lexed = parser::LexedStr::new(text); let lexed = parser::LexedStr::new(text);
if lexed.errors().next().is_some() { if lexed.errors().next().is_some() {
@ -186,11 +182,10 @@ where
} }
/// Split token tree with separate expr: $($e:expr)SEP* /// Split token tree with separate expr: $($e:expr)SEP*
pub fn parse_exprs_with_sep<S: Span>( pub fn parse_exprs_with_sep<S>(tt: &tt::Subtree<S>, sep: char, span: S) -> Vec<tt::Subtree<S>>
tt: &tt::Subtree<S>, where
sep: char, S: Copy + fmt::Debug,
span: S, {
) -> Vec<tt::Subtree<S>> {
if tt.token_trees.is_empty() { if tt.token_trees.is_empty() {
return Vec::new(); return Vec::new();
} }
@ -226,7 +221,8 @@ pub fn parse_exprs_with_sep<S: Span>(
fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S> fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
where where
C: TokenConverter<S>, C: TokenConverter<S>,
S: Span, S: Copy + fmt::Debug,
C::Token: fmt::Debug,
{ {
let entry = tt::SubtreeBuilder { let entry = tt::SubtreeBuilder {
delimiter: tt::Delimiter::invisible_spanned(conv.call_site()), delimiter: tt::Delimiter::invisible_spanned(conv.call_site()),
@ -485,7 +481,7 @@ struct StaticRawConverter<'a, S> {
span: S, span: S,
} }
trait SrcToken<Ctx, S>: std::fmt::Debug { trait SrcToken<Ctx, S> {
fn kind(&self, ctx: &Ctx) -> SyntaxKind; fn kind(&self, ctx: &Ctx) -> SyntaxKind;
fn to_char(&self, ctx: &Ctx) -> Option<char>; fn to_char(&self, ctx: &Ctx) -> Option<char>;
@ -525,7 +521,7 @@ impl<S, Ctx> SrcToken<RawConverter<'_, Ctx>, S> for usize {
} }
} }
impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize { impl<S: Copy> SrcToken<StaticRawConverter<'_, S>, S> for usize {
fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind { fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
ctx.lexed.kind(*self) ctx.lexed.kind(*self)
} }
@ -541,7 +537,7 @@ impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize {
impl<Ctx: Copy> TokenConverter<SpanData<Ctx>> for RawConverter<'_, Ctx> impl<Ctx: Copy> TokenConverter<SpanData<Ctx>> for RawConverter<'_, Ctx>
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Copy,
{ {
type Token = usize; type Token = usize;
@ -584,7 +580,7 @@ where
impl<S> TokenConverter<S> for StaticRawConverter<'_, S> impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
where where
S: Span, S: Copy,
{ {
type Token = usize; type Token = usize;
@ -709,7 +705,7 @@ impl<S> SynToken<S> {
} }
} }
impl<SpanMap, S: std::fmt::Debug> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> { impl<SpanMap, S> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
fn kind(&self, _ctx: &Converter<SpanMap, S>) -> SyntaxKind { fn kind(&self, _ctx: &Converter<SpanMap, S>) -> SyntaxKind {
match self { match self {
SynToken::Ordinary(token) => token.kind(), SynToken::Ordinary(token) => token.kind(),
@ -748,7 +744,7 @@ impl<SpanMap, S: std::fmt::Debug> SrcToken<Converter<SpanMap, S>, S> for SynToke
impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap, S> impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap, S>
where where
S: Span, S: Copy,
SpanMap: SpanMapper<S>, SpanMap: SpanMapper<S>,
{ {
type Token = SynToken<S>; type Token = SynToken<S>;
@ -828,7 +824,7 @@ where
struct TtTreeSink<'a, Ctx> struct TtTreeSink<'a, Ctx>
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Copy,
{ {
buf: String, buf: String,
cursor: Cursor<'a, SpanData<Ctx>>, cursor: Cursor<'a, SpanData<Ctx>>,
@ -839,7 +835,7 @@ where
impl<'a, Ctx> TtTreeSink<'a, Ctx> impl<'a, Ctx> TtTreeSink<'a, Ctx>
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Copy,
{ {
fn new(cursor: Cursor<'a, SpanData<Ctx>>) -> Self { fn new(cursor: Cursor<'a, SpanData<Ctx>>) -> Self {
TtTreeSink { TtTreeSink {
@ -871,7 +867,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
impl<Ctx> TtTreeSink<'_, Ctx> impl<Ctx> TtTreeSink<'_, Ctx>
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Copy,
{ {
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access. /// This occurs when a float literal is used as a field access.

View file

@ -1,11 +1,13 @@
//! Convert macro-by-example tokens which are specific to macro expansion into a //! Convert macro-by-example tokens which are specific to macro expansion into a
//! format that works for our parser. //! format that works for our parser.
use std::fmt;
use syntax::{SyntaxKind, SyntaxKind::*, T}; use syntax::{SyntaxKind, SyntaxKind::*, T};
use tt::{buffer::TokenBuffer, Span}; use tt::buffer::TokenBuffer;
pub(crate) fn to_parser_input<S: Span>(buffer: &TokenBuffer<'_, S>) -> parser::Input { pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(buffer: &TokenBuffer<'_, S>) -> parser::Input {
let mut res = parser::Input::default(); let mut res = parser::Input::default();
let mut current = buffer.begin(); let mut current = buffer.begin();

View file

@ -1,9 +1,10 @@
//! A "Parser" structure for token trees. We use this when parsing a declarative //! A "Parser" structure for token trees. We use this when parsing a declarative
//! macro definition into a list of patterns and templates. //! macro definition into a list of patterns and templates.
use core::fmt;
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use syntax::SyntaxKind; use syntax::SyntaxKind;
use tt::Span;
use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult}; use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
@ -12,7 +13,7 @@ pub(crate) struct TtIter<'a, S> {
pub(crate) inner: std::slice::Iter<'a, tt::TokenTree<S>>, pub(crate) inner: std::slice::Iter<'a, tt::TokenTree<S>>,
} }
impl<'a, S: Span> TtIter<'a, S> { impl<'a, S: Copy> TtIter<'a, S> {
pub(crate) fn new(subtree: &'a tt::Subtree<S>) -> TtIter<'a, S> { pub(crate) fn new(subtree: &'a tt::Subtree<S>) -> TtIter<'a, S> {
TtIter { inner: subtree.token_trees.iter() } TtIter { inner: subtree.token_trees.iter() }
} }
@ -130,7 +131,12 @@ impl<'a, S: Span> TtIter<'a, S> {
_ => Ok(smallvec![first]), _ => Ok(smallvec![first]),
} }
} }
pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree<S>> {
self.inner.as_slice().get(n)
}
}
impl<'a, S: Copy + fmt::Debug> TtIter<'a, S> {
pub(crate) fn expect_fragment( pub(crate) fn expect_fragment(
&mut self, &mut self,
entry_point: parser::PrefixEntryPoint, entry_point: parser::PrefixEntryPoint,
@ -185,10 +191,6 @@ impl<'a, S: Span> TtIter<'a, S> {
}; };
ExpandResult { value: res, err } ExpandResult { value: res, err }
} }
pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree<S>> {
self.inner.as_slice().get(n)
}
} }
impl<'a, S> Iterator for TtIter<'a, S> { impl<'a, S> Iterator for TtIter<'a, S> {

View file

@ -88,8 +88,6 @@ impl std::fmt::Debug for TokenId {
} }
} }
impl tt::Span for TokenId {}
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct FlatTree { pub struct FlatTree {
subtree: Vec<u32>, subtree: Vec<u32>,

View file

@ -52,7 +52,7 @@ use crate::server::TokenStream;
// see `build.rs` // see `build.rs`
include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
trait ProcMacroSrvSpan: tt::Span { trait ProcMacroSrvSpan: Copy {
type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>; type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server; fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
} }

View file

@ -101,6 +101,8 @@ pub(super) struct TokenStreamBuilder<S> {
/// pub(super)lic implementation details for the `TokenStream` type, such as iterators. /// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
pub(super) mod token_stream { pub(super) mod token_stream {
use core::fmt;
use super::{TokenStream, TokenTree}; use super::{TokenStream, TokenTree};
/// An iterator over `TokenStream`'s `TokenTree`s. /// An iterator over `TokenStream`'s `TokenTree`s.
@ -122,7 +124,7 @@ pub(super) mod token_stream {
/// ///
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
/// change these errors into `LexError`s later. /// change these errors into `LexError`s later.
impl<S: tt::Span> TokenStream<S> { impl<S: Copy + fmt::Debug> TokenStream<S> {
pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> { pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
let subtree = let subtree =
mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?; mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?;

View file

@ -734,6 +734,8 @@ pub fn ws_to_crate_graph(
}); });
proc_macro_paths.push(crate_proc_macros); proc_macro_paths.push(crate_proc_macros);
} }
crate_graph.shrink_to_fit();
proc_macro_paths.shrink_to_fit();
(crate_graph, proc_macro_paths, layouts, toolchains) (crate_graph, proc_macro_paths, layouts, toolchains)
} }

View file

@ -17,8 +17,5 @@ text-size.workspace = true
stdx.workspace = true stdx.workspace = true
# FIXME: Remove this dependency once the `Span` trait is gone (that is once Span::DUMMY has been removed)
span.workspace = true
[lints] [lints]
workspace = true workspace = true

View file

@ -11,18 +11,13 @@ use stdx::impl_from;
pub use smol_str::SmolStr; pub use smol_str::SmolStr;
pub use text_size::{TextRange, TextSize}; pub use text_size::{TextRange, TextSize};
pub trait Span: std::fmt::Debug + Copy + Sized + Eq {}
impl<Ctx> Span for span::SpanData<Ctx> where span::SpanData<Ctx>: std::fmt::Debug + Copy + Sized + Eq
{}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum TokenTree<S> { pub enum TokenTree<S> {
Leaf(Leaf<S>), Leaf(Leaf<S>),
Subtree(Subtree<S>), Subtree(Subtree<S>),
} }
impl_from!(Leaf<S>, Subtree<S> for TokenTree); impl_from!(Leaf<S>, Subtree<S> for TokenTree);
impl<S: Span> TokenTree<S> { impl<S: Copy> TokenTree<S> {
pub fn empty(span: S) -> Self { pub fn empty(span: S) -> Self {
Self::Subtree(Subtree { Self::Subtree(Subtree {
delimiter: Delimiter::invisible_spanned(span), delimiter: Delimiter::invisible_spanned(span),
@ -72,7 +67,7 @@ pub struct Subtree<S> {
pub token_trees: Box<[TokenTree<S>]>, pub token_trees: Box<[TokenTree<S>]>,
} }
impl<S: Span> Subtree<S> { impl<S: Copy> Subtree<S> {
pub fn empty(span: DelimSpan<S>) -> Self { pub fn empty(span: DelimSpan<S>) -> Self {
Subtree { delimiter: Delimiter::invisible_delim_spanned(span), token_trees: Box::new([]) } Subtree { delimiter: Delimiter::invisible_delim_spanned(span), token_trees: Box::new([]) }
} }
@ -114,7 +109,7 @@ pub struct Delimiter<S> {
pub kind: DelimiterKind, pub kind: DelimiterKind,
} }
impl<S: Span> Delimiter<S> { impl<S: Copy> Delimiter<S> {
pub const fn invisible_spanned(span: S) -> Self { pub const fn invisible_spanned(span: S) -> Self {
Delimiter { open: span, close: span, kind: DelimiterKind::Invisible } Delimiter { open: span, close: span, kind: DelimiterKind::Invisible }
} }