mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Remove tt -> ast -> tt round trips in attrs lowering
This commit is contained in:
parent
f09020567e
commit
e320004dad
7 changed files with 94 additions and 72 deletions
|
@ -18,28 +18,6 @@ pub enum CfgAtom {
|
|||
KeyValue { key: SmolStr, value: SmolStr },
|
||||
}
|
||||
|
||||
impl CfgAtom {
|
||||
/// Returns `true` when the atom comes from the target specification.
|
||||
///
|
||||
/// If this returns `true`, then changing this atom requires changing the compilation target. If
|
||||
/// it returns `false`, the atom might come from a build script or the build system.
|
||||
pub fn is_target_defined(&self) -> bool {
|
||||
match self {
|
||||
CfgAtom::Flag(flag) => matches!(&**flag, "unix" | "windows"),
|
||||
CfgAtom::KeyValue { key, value: _ } => matches!(
|
||||
&**key,
|
||||
"target_arch"
|
||||
| "target_os"
|
||||
| "target_env"
|
||||
| "target_family"
|
||||
| "target_endian"
|
||||
| "target_pointer_width"
|
||||
| "target_vendor" // NOTE: `target_feature` is left out since it can be configured via `-Ctarget-feature`
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for CfgAtom {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
|
|
|
@ -131,11 +131,9 @@ impl CfgDiff {
|
|||
/// of both.
|
||||
pub fn new(enable: Vec<CfgAtom>, disable: Vec<CfgAtom>) -> Option<CfgDiff> {
|
||||
let mut occupied = FxHashSet::default();
|
||||
for item in enable.iter().chain(disable.iter()) {
|
||||
if !occupied.insert(item) {
|
||||
// was present
|
||||
return None;
|
||||
}
|
||||
if enable.iter().chain(disable.iter()).any(|item| occupied.insert(item)) {
|
||||
// was present
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(CfgDiff { enable, disable })
|
||||
|
|
|
@ -32,6 +32,7 @@ use crate::{
|
|||
VariantId,
|
||||
};
|
||||
|
||||
/// Desugared attributes of an item post `cfg_attr` expansion.
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Attrs(RawAttrs);
|
||||
|
||||
|
@ -228,7 +229,6 @@ pub enum DocAtom {
|
|||
KeyValue { key: SmolStr, value: SmolStr },
|
||||
}
|
||||
|
||||
// Adapted from `CfgExpr` parsing code
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum DocExpr {
|
||||
Invalid,
|
||||
|
@ -448,10 +448,7 @@ impl AttrsWithOwner {
|
|||
let map = db.fields_attrs_source_map(id.parent);
|
||||
let file_id = id.parent.file_id(db);
|
||||
let root = db.parse_or_expand(file_id);
|
||||
let owner = match &map[id.local_id] {
|
||||
Either::Left(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
|
||||
Either::Right(it) => ast::AnyHasAttrs::new(it.to_node(&root)),
|
||||
};
|
||||
let owner = ast::AnyHasAttrs::new(map[id.local_id].to_node(&root));
|
||||
InFile::new(file_id, owner)
|
||||
}
|
||||
AttrDefId::AdtId(adt) => match adt {
|
||||
|
@ -634,7 +631,7 @@ fn attrs_from_item_tree_assoc<'db, N: ItemTreeModItemNode>(
|
|||
pub(crate) fn fields_attrs_source_map(
|
||||
db: &dyn DefDatabase,
|
||||
def: VariantId,
|
||||
) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>> {
|
||||
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>> {
|
||||
let mut res = ArenaMap::default();
|
||||
let child_source = def.child_source(db);
|
||||
|
||||
|
@ -643,7 +640,7 @@ pub(crate) fn fields_attrs_source_map(
|
|||
idx,
|
||||
variant
|
||||
.as_ref()
|
||||
.either(|l| Either::Left(AstPtr::new(l)), |r| Either::Right(AstPtr::new(r))),
|
||||
.either(|l| AstPtr::new(l).wrap_left(), |r| AstPtr::new(r).wrap_right()),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -194,7 +194,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
|
|||
fn fields_attrs_source_map(
|
||||
&self,
|
||||
def: VariantId,
|
||||
) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>>;
|
||||
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
|
||||
|
||||
#[salsa::invoke(AttrsWithOwner::attrs_query)]
|
||||
fn attrs(&self, def: AttrDefId) -> Attrs;
|
||||
|
|
|
@ -117,14 +117,10 @@ impl RawAttrs {
|
|||
None => return smallvec![attr.clone()],
|
||||
};
|
||||
let index = attr.id;
|
||||
let attrs =
|
||||
parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
|
||||
let tree = Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(attr.first()?.first_span()),
|
||||
token_trees: attr.to_vec(),
|
||||
};
|
||||
Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
|
||||
});
|
||||
let attrs = parts
|
||||
.enumerate()
|
||||
.take(1 << AttrId::CFG_ATTR_BITS)
|
||||
.filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
|
||||
|
||||
let cfg_options = &crate_graph[krate].cfg_options;
|
||||
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
|
||||
|
@ -222,12 +218,41 @@ impl Attr {
|
|||
Some(Attr { id, path, input, span })
|
||||
}
|
||||
|
||||
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
|
||||
// FIXME: Unecessary roundtrip tt -> ast -> tt
|
||||
let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
|
||||
let ast = ast::Meta::cast(parse.syntax_node())?;
|
||||
fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> {
|
||||
dbg!(tt);
|
||||
let span = tt.first()?.first_span();
|
||||
let path_end = tt
|
||||
.iter()
|
||||
.position(|tt| {
|
||||
!matches!(
|
||||
tt,
|
||||
tt::TokenTree::Leaf(
|
||||
tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_),
|
||||
)
|
||||
)
|
||||
})
|
||||
.unwrap_or_else(|| tt.len());
|
||||
|
||||
Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id)
|
||||
let (path, input) = tt.split_at(path_end);
|
||||
let path = Interned::new(ModPath::from_tt(db, path)?);
|
||||
|
||||
let input = match input.get(0) {
|
||||
Some(tt::TokenTree::Subtree(tree)) => {
|
||||
Some(Interned::new(AttrInput::TokenTree(Box::new(tree.clone()))))
|
||||
}
|
||||
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))) => {
|
||||
let input = match input.get(1) {
|
||||
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text, .. }))) => {
|
||||
//FIXME the trimming here isn't quite right, raw strings are not handled
|
||||
Some(Interned::new(AttrInput::Literal(text.trim_matches('"').into())))
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
input
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
Some(Attr { id, path, input, span })
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &ModPath {
|
||||
|
@ -277,29 +302,8 @@ impl Attr {
|
|||
.token_trees
|
||||
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
|
||||
.filter_map(move |tts| {
|
||||
if tts.is_empty() {
|
||||
return None;
|
||||
}
|
||||
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
|
||||
// here or maybe just parse a mod path from a token tree directly
|
||||
let subtree = tt::Subtree {
|
||||
delimiter: tt::Delimiter::invisible_spanned(tts.first()?.first_span()),
|
||||
token_trees: tts.to_vec(),
|
||||
};
|
||||
let (parse, span_map) =
|
||||
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
|
||||
let meta = ast::Meta::cast(parse.syntax_node())?;
|
||||
// Only simple paths are allowed.
|
||||
if meta.eq_token().is_some() || meta.expr().is_some() || meta.token_tree().is_some()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let path = meta.path()?;
|
||||
let call_site = span_map.span_at(path.syntax().text_range().start());
|
||||
Some((
|
||||
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
|
||||
call_site,
|
||||
))
|
||||
let span = tts.first()?.first_span();
|
||||
Some((ModPath::from_tt(db, tts)?, span))
|
||||
});
|
||||
|
||||
Some(paths)
|
||||
|
|
|
@ -10,6 +10,7 @@ use crate::{
|
|||
hygiene::{marks_rev, SyntaxContextExt, Transparency},
|
||||
name::{known, AsName, Name},
|
||||
span_map::SpanMapRef,
|
||||
tt,
|
||||
};
|
||||
use base_db::CrateId;
|
||||
use smallvec::SmallVec;
|
||||
|
@ -53,6 +54,10 @@ impl ModPath {
|
|||
convert_path(db, None, path, span_map)
|
||||
}
|
||||
|
||||
pub fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
|
||||
convert_path_tt(db, tt)
|
||||
}
|
||||
|
||||
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
|
||||
let segments = segments.into_iter().collect();
|
||||
ModPath { kind, segments }
|
||||
|
@ -281,6 +286,46 @@ fn convert_path(
|
|||
Some(mod_path)
|
||||
}
|
||||
|
||||
fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModPath> {
|
||||
let mut leafs = tt.iter().filter_map(|tt| match tt {
|
||||
tt::TokenTree::Leaf(leaf) => Some(leaf),
|
||||
tt::TokenTree::Subtree(_) => None,
|
||||
});
|
||||
let mut segments = smallvec::smallvec![];
|
||||
let kind = match leafs.next()? {
|
||||
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => match leafs.next()? {
|
||||
tt::Leaf::Punct(tt::Punct { char: ':', .. }) => PathKind::Abs,
|
||||
_ => return None,
|
||||
},
|
||||
tt::Leaf::Ident(tt::Ident { text, span }) if text == "$crate" => {
|
||||
resolve_crate_root(db, span.ctx).map(PathKind::DollarCrate).unwrap_or(PathKind::Crate)
|
||||
}
|
||||
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "self" => PathKind::Super(0),
|
||||
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "super" => {
|
||||
let mut deg = 1;
|
||||
while let Some(tt::Leaf::Ident(tt::Ident { text, .. })) = leafs.next() {
|
||||
if text != "super" {
|
||||
segments.push(Name::new_text_dont_use(text.clone()));
|
||||
break;
|
||||
}
|
||||
deg += 1;
|
||||
}
|
||||
PathKind::Super(deg)
|
||||
}
|
||||
tt::Leaf::Ident(tt::Ident { text, .. }) if text == "crate" => PathKind::Crate,
|
||||
tt::Leaf::Ident(ident) => {
|
||||
segments.push(Name::new_text_dont_use(ident.text.clone()));
|
||||
PathKind::Plain
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
segments.extend(leafs.filter_map(|leaf| match leaf {
|
||||
::tt::Leaf::Ident(ident) => Some(Name::new_text_dont_use(ident.text.clone())),
|
||||
_ => None,
|
||||
}));
|
||||
Some(ModPath { kind, segments })
|
||||
}
|
||||
|
||||
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
|
||||
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
|
||||
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
|
||||
|
|
|
@ -206,7 +206,7 @@ impl server::TokenStream for TokenIdServer {
|
|||
stream: if subtree.token_trees.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(subtree.token_trees.into_iter().collect())
|
||||
Some(subtree.token_trees)
|
||||
},
|
||||
span: bridge::DelimSpan::from_single(subtree.delimiter.open),
|
||||
}),
|
||||
|
|
Loading…
Reference in a new issue