mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-25 19:35:06 +00:00
Reduce allocations in attribute collection
This commit is contained in:
parent
c08df0f1f5
commit
cc04cfc982
9 changed files with 61 additions and 70 deletions
|
@ -664,7 +664,7 @@ fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag:
|
||||||
let attr = node
|
let attr = node
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth((*invoc_attr_index) as usize)
|
.nth((*invoc_attr_index) as usize)
|
||||||
.and_then(Either::right)
|
.and_then(Either::left)
|
||||||
.unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
|
.unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
|
||||||
(
|
(
|
||||||
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
|
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
|
||||||
|
|
|
@ -525,38 +525,36 @@ impl AttrsWithOwner {
|
||||||
|
|
||||||
fn inner_attributes(
|
fn inner_attributes(
|
||||||
syntax: &SyntaxNode,
|
syntax: &SyntaxNode,
|
||||||
) -> Option<(impl Iterator<Item = ast::Attr>, impl Iterator<Item = ast::Comment>)> {
|
) -> Option<impl Iterator<Item = Either<ast::Attr, ast::Comment>>> {
|
||||||
let (attrs, docs) = match_ast! {
|
let node = match_ast! {
|
||||||
match syntax {
|
match syntax {
|
||||||
ast::SourceFile(it) => (it.attrs(), ast::DocCommentIter::from_syntax_node(it.syntax())),
|
ast::SourceFile(_) => syntax.clone(),
|
||||||
ast::ExternBlock(it) => {
|
ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
|
||||||
let extern_item_list = it.extern_item_list()?;
|
ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
|
||||||
(extern_item_list.attrs(), ast::DocCommentIter::from_syntax_node(extern_item_list.syntax()))
|
ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
|
||||||
|
ast::Module(it) => it.item_list()?.syntax().clone(),
|
||||||
|
ast::BlockExpr(it) => {
|
||||||
|
use syntax::SyntaxKind::{BLOCK_EXPR , EXPR_STMT};
|
||||||
|
// Block expressions accept outer and inner attributes, but only when they are the outer
|
||||||
|
// expression of an expression statement or the final expression of another block expression.
|
||||||
|
let may_carry_attributes = matches!(
|
||||||
|
it.syntax().parent().map(|it| it.kind()),
|
||||||
|
Some(BLOCK_EXPR | EXPR_STMT)
|
||||||
|
);
|
||||||
|
if !may_carry_attributes {
|
||||||
|
return None
|
||||||
|
}
|
||||||
|
syntax.clone()
|
||||||
},
|
},
|
||||||
ast::Fn(it) => {
|
|
||||||
let body = it.body()?;
|
|
||||||
let stmt_list = body.stmt_list()?;
|
|
||||||
(stmt_list.attrs(), ast::DocCommentIter::from_syntax_node(body.syntax()))
|
|
||||||
},
|
|
||||||
ast::Impl(it) => {
|
|
||||||
let assoc_item_list = it.assoc_item_list()?;
|
|
||||||
(assoc_item_list.attrs(), ast::DocCommentIter::from_syntax_node(assoc_item_list.syntax()))
|
|
||||||
},
|
|
||||||
ast::Module(it) => {
|
|
||||||
let item_list = it.item_list()?;
|
|
||||||
(item_list.attrs(), ast::DocCommentIter::from_syntax_node(item_list.syntax()))
|
|
||||||
},
|
|
||||||
// FIXME: BlockExpr's only accept inner attributes in specific cases
|
|
||||||
// Excerpt from the reference:
|
|
||||||
// Block expressions accept outer and inner attributes, but only when they are the outer
|
|
||||||
// expression of an expression statement or the final expression of another block expression.
|
|
||||||
ast::BlockExpr(_it) => return None,
|
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let attrs = attrs.filter(|attr| attr.kind().is_inner());
|
|
||||||
let docs = docs.filter(|doc| doc.is_inner());
|
let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el {
|
||||||
Some((attrs, docs))
|
Either::Left(attr) => attr.kind().is_inner(),
|
||||||
|
Either::Right(comment) => comment.is_inner(),
|
||||||
|
});
|
||||||
|
Some(attrs)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -833,24 +831,16 @@ fn attrs_from_item_tree<N: ItemTreeNode>(id: ItemTreeId<N>, db: &dyn DefDatabase
|
||||||
fn collect_attrs(
|
fn collect_attrs(
|
||||||
owner: &dyn ast::HasAttrs,
|
owner: &dyn ast::HasAttrs,
|
||||||
) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
|
) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
|
||||||
let (inner_attrs, inner_docs) = inner_attributes(owner.syntax())
|
let inner_attrs = inner_attributes(owner.syntax()).into_iter().flatten();
|
||||||
.map_or((None, None), |(attrs, docs)| (Some(attrs), Some(docs)));
|
let outer_attrs =
|
||||||
|
ast::AttrDocCommentIter::from_syntax_node(owner.syntax()).filter(|el| match el {
|
||||||
let outer_attrs = owner.attrs().filter(|attr| attr.kind().is_outer());
|
Either::Left(attr) => attr.kind().is_outer(),
|
||||||
let attrs = outer_attrs
|
Either::Right(comment) => comment.is_outer(),
|
||||||
.chain(inner_attrs.into_iter().flatten())
|
});
|
||||||
.map(|attr| (attr.syntax().text_range().start(), Either::Left(attr)));
|
outer_attrs
|
||||||
|
.chain(inner_attrs)
|
||||||
let outer_docs =
|
|
||||||
ast::DocCommentIter::from_syntax_node(owner.syntax()).filter(ast::Comment::is_outer);
|
|
||||||
let docs = outer_docs
|
|
||||||
.chain(inner_docs.into_iter().flatten())
|
|
||||||
.map(|docs_text| (docs_text.syntax().text_range().start(), Either::Right(docs_text)));
|
|
||||||
// sort here by syntax node offset because the source can have doc attributes and doc strings be interleaved
|
|
||||||
docs.chain(attrs)
|
|
||||||
.sorted_by_key(|&(offset, _)| offset)
|
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(id, (_, attr))| (AttrId { ast_index: id as u32 }, attr))
|
.map(|(id, attr)| (AttrId { ast_index: id as u32 }, attr))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn variants_attrs_source_map(
|
pub(crate) fn variants_attrs_source_map(
|
||||||
|
|
|
@ -117,7 +117,7 @@ impl ChildBySource for ItemScope {
|
||||||
|(ast_id, calls)| {
|
|(ast_id, calls)| {
|
||||||
let adt = ast_id.to_node(db.upcast());
|
let adt = ast_id.to_node(db.upcast());
|
||||||
calls.for_each(|(attr_id, calls)| {
|
calls.for_each(|(attr_id, calls)| {
|
||||||
if let Some(Either::Right(attr)) =
|
if let Some(Either::Left(attr)) =
|
||||||
adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize)
|
adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize)
|
||||||
{
|
{
|
||||||
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, calls.into()));
|
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, calls.into()));
|
||||||
|
|
|
@ -157,7 +157,7 @@ pub fn expand_speculative(
|
||||||
let attr = item
|
let attr = item
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(invoc_attr_index as usize)
|
.nth(invoc_attr_index as usize)
|
||||||
.and_then(Either::right)?;
|
.and_then(Either::left)?;
|
||||||
match attr.token_tree() {
|
match attr.token_tree() {
|
||||||
Some(token_tree) => {
|
Some(token_tree) => {
|
||||||
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
|
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
|
||||||
|
@ -323,7 +323,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
||||||
ast::Item::cast(node.clone())?
|
ast::Item::cast(node.clone())?
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(invoc_attr_index as usize)
|
.nth(invoc_attr_index as usize)
|
||||||
.and_then(Either::right)
|
.and_then(Either::left)
|
||||||
.map(|attr| attr.syntax().clone())
|
.map(|attr| attr.syntax().clone())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect()
|
.collect()
|
||||||
|
|
|
@ -191,7 +191,7 @@ fn make_hygiene_info(
|
||||||
.to_node(db)
|
.to_node(db)
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(invoc_attr_index as usize)
|
.nth(invoc_attr_index as usize)
|
||||||
.and_then(Either::right)?
|
.and_then(Either::left)?
|
||||||
.token_tree()?;
|
.token_tree()?;
|
||||||
Some(InFile::new(ast_id.file_id, tt))
|
Some(InFile::new(ast_id.file_id, tt))
|
||||||
}
|
}
|
||||||
|
|
|
@ -205,7 +205,7 @@ impl HirFileId {
|
||||||
.to_node(db)
|
.to_node(db)
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(invoc_attr_index as usize)
|
.nth(invoc_attr_index as usize)
|
||||||
.and_then(Either::right)?
|
.and_then(Either::left)?
|
||||||
.token_tree()?;
|
.token_tree()?;
|
||||||
Some(InFile::new(ast_id.file_id, tt))
|
Some(InFile::new(ast_id.file_id, tt))
|
||||||
}
|
}
|
||||||
|
@ -382,7 +382,7 @@ impl MacroCallKind {
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(derive_attr_index as usize)
|
.nth(derive_attr_index as usize)
|
||||||
.expect("missing derive")
|
.expect("missing derive")
|
||||||
.expect_right("derive is a doc comment?")
|
.expect_left("derive is a doc comment?")
|
||||||
.syntax()
|
.syntax()
|
||||||
.text_range()
|
.text_range()
|
||||||
}
|
}
|
||||||
|
@ -391,7 +391,7 @@ impl MacroCallKind {
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(invoc_attr_index as usize)
|
.nth(invoc_attr_index as usize)
|
||||||
.expect("missing attribute")
|
.expect("missing attribute")
|
||||||
.expect_right("attribute macro is a doc comment?")
|
.expect_left("attribute macro is a doc comment?")
|
||||||
.syntax()
|
.syntax()
|
||||||
.text_range(),
|
.text_range(),
|
||||||
};
|
};
|
||||||
|
@ -483,7 +483,7 @@ impl ExpansionInfo {
|
||||||
let attr = item
|
let attr = item
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(*invoc_attr_index as usize)
|
.nth(*invoc_attr_index as usize)
|
||||||
.and_then(Either::right)?;
|
.and_then(Either::left)?;
|
||||||
match attr.token_tree() {
|
match attr.token_tree() {
|
||||||
Some(token_tree)
|
Some(token_tree)
|
||||||
if token_tree.syntax().text_range().contains_range(token_range) =>
|
if token_tree.syntax().text_range().contains_range(token_range) =>
|
||||||
|
|
|
@ -27,8 +27,8 @@ pub use self::{
|
||||||
operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
|
operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
|
||||||
token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix},
|
token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix},
|
||||||
traits::{
|
traits::{
|
||||||
DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericParams, HasLoopBody,
|
AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericParams,
|
||||||
HasModuleItem, HasName, HasTypeBounds, HasVisibility,
|
HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -160,14 +160,9 @@ impl ast::Attr {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn kind(&self) -> AttrKind {
|
pub fn kind(&self) -> AttrKind {
|
||||||
let first_token = self.syntax().first_token();
|
match self.excl_token() {
|
||||||
let first_token_kind = first_token.as_ref().map(SyntaxToken::kind);
|
Some(_) => AttrKind::Inner,
|
||||||
let second_token_kind =
|
None => AttrKind::Outer,
|
||||||
first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind);
|
|
||||||
|
|
||||||
match (first_token_kind, second_token_kind) {
|
|
||||||
(Some(T![#]), Some(T![!])) => AttrKind::Inner,
|
|
||||||
_ => AttrKind::Outer,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -76,8 +76,8 @@ pub trait HasDocComments: HasAttrs {
|
||||||
fn doc_comments(&self) -> DocCommentIter {
|
fn doc_comments(&self) -> DocCommentIter {
|
||||||
DocCommentIter { iter: self.syntax().children_with_tokens() }
|
DocCommentIter { iter: self.syntax().children_with_tokens() }
|
||||||
}
|
}
|
||||||
fn doc_comments_and_attrs(&self) -> AttrCommentIter {
|
fn doc_comments_and_attrs(&self) -> AttrDocCommentIter {
|
||||||
AttrCommentIter { iter: self.syntax().children_with_tokens() }
|
AttrDocCommentIter { iter: self.syntax().children_with_tokens() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -113,17 +113,23 @@ impl Iterator for DocCommentIter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AttrCommentIter {
|
pub struct AttrDocCommentIter {
|
||||||
iter: SyntaxElementChildren,
|
iter: SyntaxElementChildren,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Iterator for AttrCommentIter {
|
impl AttrDocCommentIter {
|
||||||
type Item = Either<ast::Comment, ast::Attr>;
|
pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> AttrDocCommentIter {
|
||||||
|
AttrDocCommentIter { iter: syntax_node.children_with_tokens() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Iterator for AttrDocCommentIter {
|
||||||
|
type Item = Either<ast::Attr, ast::Comment>;
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
self.iter.by_ref().find_map(|el| match el {
|
self.iter.by_ref().find_map(|el| match el {
|
||||||
SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Right),
|
SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Left),
|
||||||
SyntaxElement::Token(tok) => {
|
SyntaxElement::Token(tok) => {
|
||||||
ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Left)
|
ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Right)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue