mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-15 09:27:27 +00:00
Merge #11382
11382: fix: Fix `cfg_attr` invalidating derive identifier IDE functionalities r=Veykril a=Veykril Proper fix for https://github.com/rust-analyzer/rust-analyzer/issues/11298 bors r+ Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
commit
fd3942eb62
11 changed files with 75 additions and 85 deletions
|
@ -664,7 +664,7 @@ fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag:
|
||||||
let attr = node
|
let attr = node
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth((*invoc_attr_index) as usize)
|
.nth((*invoc_attr_index) as usize)
|
||||||
.and_then(Either::right)
|
.and_then(Either::left)
|
||||||
.unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
|
.unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
|
||||||
(
|
(
|
||||||
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
|
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
|
||||||
|
|
|
@ -364,9 +364,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
self.imp.resolve_derive_ident(derive, ident)
|
self.imp.resolve_derive_ident(derive, ident)
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: use this instead?
|
|
||||||
// pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
|
|
||||||
|
|
||||||
pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
|
pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
|
||||||
self.imp.record_literal_missing_fields(literal)
|
self.imp.record_literal_missing_fields(literal)
|
||||||
}
|
}
|
||||||
|
@ -931,7 +928,6 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
file.with_value(derive.clone()),
|
file.with_value(derive.clone()),
|
||||||
)?;
|
)?;
|
||||||
let attrs = adt_def.attrs(self.db);
|
let attrs = adt_def.attrs(self.db);
|
||||||
// FIXME: https://github.com/rust-analyzer/rust-analyzer/issues/11298
|
|
||||||
let mut derive_paths = attrs.get(attr_id)?.parse_path_comma_token_tree()?;
|
let mut derive_paths = attrs.get(attr_id)?.parse_path_comma_token_tree()?;
|
||||||
|
|
||||||
let derive_idx = tt
|
let derive_idx = tt
|
||||||
|
|
|
@ -73,8 +73,8 @@ impl ops::Deref for RawAttrs {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Attrs {
|
impl Attrs {
|
||||||
pub fn get(&self, AttrId { ast_index, .. }: AttrId) -> Option<&Attr> {
|
pub fn get(&self, id: AttrId) -> Option<&Attr> {
|
||||||
(**self).get(ast_index as usize)
|
(**self).iter().find(|attr| attr.id == id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,14 +89,6 @@ impl ops::Deref for Attrs {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ops::Index<AttrId> for Attrs {
|
|
||||||
type Output = Attr;
|
|
||||||
|
|
||||||
fn index(&self, AttrId { ast_index, .. }: AttrId) -> &Self::Output {
|
|
||||||
&(**self)[ast_index as usize]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ops::Deref for AttrsWithOwner {
|
impl ops::Deref for AttrsWithOwner {
|
||||||
type Target = Attrs;
|
type Target = Attrs;
|
||||||
|
|
||||||
|
@ -110,7 +102,7 @@ impl RawAttrs {
|
||||||
|
|
||||||
pub(crate) fn new(db: &dyn DefDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
|
pub(crate) fn new(db: &dyn DefDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
|
||||||
let entries = collect_attrs(owner)
|
let entries = collect_attrs(owner)
|
||||||
.flat_map(|(id, attr)| match attr {
|
.filter_map(|(id, attr)| match attr {
|
||||||
Either::Left(attr) => {
|
Either::Left(attr) => {
|
||||||
attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
|
attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
|
||||||
}
|
}
|
||||||
|
@ -525,38 +517,36 @@ impl AttrsWithOwner {
|
||||||
|
|
||||||
fn inner_attributes(
|
fn inner_attributes(
|
||||||
syntax: &SyntaxNode,
|
syntax: &SyntaxNode,
|
||||||
) -> Option<(impl Iterator<Item = ast::Attr>, impl Iterator<Item = ast::Comment>)> {
|
) -> Option<impl Iterator<Item = Either<ast::Attr, ast::Comment>>> {
|
||||||
let (attrs, docs) = match_ast! {
|
let node = match_ast! {
|
||||||
match syntax {
|
match syntax {
|
||||||
ast::SourceFile(it) => (it.attrs(), ast::DocCommentIter::from_syntax_node(it.syntax())),
|
ast::SourceFile(_) => syntax.clone(),
|
||||||
ast::ExternBlock(it) => {
|
ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
|
||||||
let extern_item_list = it.extern_item_list()?;
|
ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
|
||||||
(extern_item_list.attrs(), ast::DocCommentIter::from_syntax_node(extern_item_list.syntax()))
|
ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
|
||||||
},
|
ast::Module(it) => it.item_list()?.syntax().clone(),
|
||||||
ast::Fn(it) => {
|
ast::BlockExpr(it) => {
|
||||||
let body = it.body()?;
|
use syntax::SyntaxKind::{BLOCK_EXPR , EXPR_STMT};
|
||||||
let stmt_list = body.stmt_list()?;
|
|
||||||
(stmt_list.attrs(), ast::DocCommentIter::from_syntax_node(body.syntax()))
|
|
||||||
},
|
|
||||||
ast::Impl(it) => {
|
|
||||||
let assoc_item_list = it.assoc_item_list()?;
|
|
||||||
(assoc_item_list.attrs(), ast::DocCommentIter::from_syntax_node(assoc_item_list.syntax()))
|
|
||||||
},
|
|
||||||
ast::Module(it) => {
|
|
||||||
let item_list = it.item_list()?;
|
|
||||||
(item_list.attrs(), ast::DocCommentIter::from_syntax_node(item_list.syntax()))
|
|
||||||
},
|
|
||||||
// FIXME: BlockExpr's only accept inner attributes in specific cases
|
|
||||||
// Excerpt from the reference:
|
|
||||||
// Block expressions accept outer and inner attributes, but only when they are the outer
|
// Block expressions accept outer and inner attributes, but only when they are the outer
|
||||||
// expression of an expression statement or the final expression of another block expression.
|
// expression of an expression statement or the final expression of another block expression.
|
||||||
ast::BlockExpr(_it) => return None,
|
let may_carry_attributes = matches!(
|
||||||
|
it.syntax().parent().map(|it| it.kind()),
|
||||||
|
Some(BLOCK_EXPR | EXPR_STMT)
|
||||||
|
);
|
||||||
|
if !may_carry_attributes {
|
||||||
|
return None
|
||||||
|
}
|
||||||
|
syntax.clone()
|
||||||
|
},
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let attrs = attrs.filter(|attr| attr.kind().is_inner());
|
|
||||||
let docs = docs.filter(|doc| doc.is_inner());
|
let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el {
|
||||||
Some((attrs, docs))
|
Either::Left(attr) => attr.kind().is_inner(),
|
||||||
|
Either::Right(comment) => comment.is_inner(),
|
||||||
|
});
|
||||||
|
Some(attrs)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -833,24 +823,16 @@ fn attrs_from_item_tree<N: ItemTreeNode>(id: ItemTreeId<N>, db: &dyn DefDatabase
|
||||||
fn collect_attrs(
|
fn collect_attrs(
|
||||||
owner: &dyn ast::HasAttrs,
|
owner: &dyn ast::HasAttrs,
|
||||||
) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
|
) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
|
||||||
let (inner_attrs, inner_docs) = inner_attributes(owner.syntax())
|
let inner_attrs = inner_attributes(owner.syntax()).into_iter().flatten();
|
||||||
.map_or((None, None), |(attrs, docs)| (Some(attrs), Some(docs)));
|
let outer_attrs =
|
||||||
|
ast::AttrDocCommentIter::from_syntax_node(owner.syntax()).filter(|el| match el {
|
||||||
let outer_attrs = owner.attrs().filter(|attr| attr.kind().is_outer());
|
Either::Left(attr) => attr.kind().is_outer(),
|
||||||
let attrs = outer_attrs
|
Either::Right(comment) => comment.is_outer(),
|
||||||
.chain(inner_attrs.into_iter().flatten())
|
});
|
||||||
.map(|attr| (attr.syntax().text_range().start(), Either::Left(attr)));
|
outer_attrs
|
||||||
|
.chain(inner_attrs)
|
||||||
let outer_docs =
|
|
||||||
ast::DocCommentIter::from_syntax_node(owner.syntax()).filter(ast::Comment::is_outer);
|
|
||||||
let docs = outer_docs
|
|
||||||
.chain(inner_docs.into_iter().flatten())
|
|
||||||
.map(|docs_text| (docs_text.syntax().text_range().start(), Either::Right(docs_text)));
|
|
||||||
// sort here by syntax node offset because the source can have doc attributes and doc strings be interleaved
|
|
||||||
docs.chain(attrs)
|
|
||||||
.sorted_by_key(|&(offset, _)| offset)
|
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(id, (_, attr))| (AttrId { ast_index: id as u32 }, attr))
|
.map(|(id, attr)| (AttrId { ast_index: id as u32 }, attr))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn variants_attrs_source_map(
|
pub(crate) fn variants_attrs_source_map(
|
||||||
|
|
|
@ -117,7 +117,7 @@ impl ChildBySource for ItemScope {
|
||||||
|(ast_id, calls)| {
|
|(ast_id, calls)| {
|
||||||
let adt = ast_id.to_node(db.upcast());
|
let adt = ast_id.to_node(db.upcast());
|
||||||
calls.for_each(|(attr_id, calls)| {
|
calls.for_each(|(attr_id, calls)| {
|
||||||
if let Some(Either::Right(attr)) =
|
if let Some(Either::Left(attr)) =
|
||||||
adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize)
|
adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize)
|
||||||
{
|
{
|
||||||
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, calls.into()));
|
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, calls.into()));
|
||||||
|
|
|
@ -157,7 +157,7 @@ pub fn expand_speculative(
|
||||||
let attr = item
|
let attr = item
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(invoc_attr_index as usize)
|
.nth(invoc_attr_index as usize)
|
||||||
.and_then(Either::right)?;
|
.and_then(Either::left)?;
|
||||||
match attr.token_tree() {
|
match attr.token_tree() {
|
||||||
Some(token_tree) => {
|
Some(token_tree) => {
|
||||||
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
|
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
|
||||||
|
@ -323,7 +323,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
||||||
ast::Item::cast(node.clone())?
|
ast::Item::cast(node.clone())?
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(invoc_attr_index as usize)
|
.nth(invoc_attr_index as usize)
|
||||||
.and_then(Either::right)
|
.and_then(Either::left)
|
||||||
.map(|attr| attr.syntax().clone())
|
.map(|attr| attr.syntax().clone())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect()
|
.collect()
|
||||||
|
|
|
@ -191,7 +191,7 @@ fn make_hygiene_info(
|
||||||
.to_node(db)
|
.to_node(db)
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(invoc_attr_index as usize)
|
.nth(invoc_attr_index as usize)
|
||||||
.and_then(Either::right)?
|
.and_then(Either::left)?
|
||||||
.token_tree()?;
|
.token_tree()?;
|
||||||
Some(InFile::new(ast_id.file_id, tt))
|
Some(InFile::new(ast_id.file_id, tt))
|
||||||
}
|
}
|
||||||
|
|
|
@ -205,7 +205,7 @@ impl HirFileId {
|
||||||
.to_node(db)
|
.to_node(db)
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(invoc_attr_index as usize)
|
.nth(invoc_attr_index as usize)
|
||||||
.and_then(Either::right)?
|
.and_then(Either::left)?
|
||||||
.token_tree()?;
|
.token_tree()?;
|
||||||
Some(InFile::new(ast_id.file_id, tt))
|
Some(InFile::new(ast_id.file_id, tt))
|
||||||
}
|
}
|
||||||
|
@ -382,7 +382,7 @@ impl MacroCallKind {
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(derive_attr_index as usize)
|
.nth(derive_attr_index as usize)
|
||||||
.expect("missing derive")
|
.expect("missing derive")
|
||||||
.expect_right("derive is a doc comment?")
|
.expect_left("derive is a doc comment?")
|
||||||
.syntax()
|
.syntax()
|
||||||
.text_range()
|
.text_range()
|
||||||
}
|
}
|
||||||
|
@ -391,7 +391,7 @@ impl MacroCallKind {
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(invoc_attr_index as usize)
|
.nth(invoc_attr_index as usize)
|
||||||
.expect("missing attribute")
|
.expect("missing attribute")
|
||||||
.expect_right("attribute macro is a doc comment?")
|
.expect_left("attribute macro is a doc comment?")
|
||||||
.syntax()
|
.syntax()
|
||||||
.text_range(),
|
.text_range(),
|
||||||
};
|
};
|
||||||
|
@ -483,7 +483,7 @@ impl ExpansionInfo {
|
||||||
let attr = item
|
let attr = item
|
||||||
.doc_comments_and_attrs()
|
.doc_comments_and_attrs()
|
||||||
.nth(*invoc_attr_index as usize)
|
.nth(*invoc_attr_index as usize)
|
||||||
.and_then(Either::right)?;
|
.and_then(Either::left)?;
|
||||||
match attr.token_tree() {
|
match attr.token_tree() {
|
||||||
Some(token_tree)
|
Some(token_tree)
|
||||||
if token_tree.syntax().text_range().contains_range(token_range) =>
|
if token_tree.syntax().text_range().contains_range(token_range) =>
|
||||||
|
|
|
@ -1364,10 +1364,21 @@ impl Twait for Stwuct {
|
||||||
fn goto_def_derive_input() {
|
fn goto_def_derive_input() {
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
|
//- minicore:derive
|
||||||
|
#[rustc_builtin_macro]
|
||||||
|
pub macro Copy {}
|
||||||
|
// ^^^^
|
||||||
|
#[derive(Copy$0)]
|
||||||
|
struct Foo;
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
//- minicore:derive
|
//- minicore:derive
|
||||||
#[rustc_builtin_macro]
|
#[rustc_builtin_macro]
|
||||||
pub macro Copy {}
|
pub macro Copy {}
|
||||||
// ^^^^
|
// ^^^^
|
||||||
|
#[cfg_attr(feature = "false", derive)]
|
||||||
#[derive(Copy$0)]
|
#[derive(Copy$0)]
|
||||||
struct Foo;
|
struct Foo;
|
||||||
"#,
|
"#,
|
||||||
|
|
|
@ -27,8 +27,8 @@ pub use self::{
|
||||||
operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
|
operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
|
||||||
token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix},
|
token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix},
|
||||||
traits::{
|
traits::{
|
||||||
DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericParams, HasLoopBody,
|
AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericParams,
|
||||||
HasModuleItem, HasName, HasTypeBounds, HasVisibility,
|
HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -160,14 +160,9 @@ impl ast::Attr {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn kind(&self) -> AttrKind {
|
pub fn kind(&self) -> AttrKind {
|
||||||
let first_token = self.syntax().first_token();
|
match self.excl_token() {
|
||||||
let first_token_kind = first_token.as_ref().map(SyntaxToken::kind);
|
Some(_) => AttrKind::Inner,
|
||||||
let second_token_kind =
|
None => AttrKind::Outer,
|
||||||
first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind);
|
|
||||||
|
|
||||||
match (first_token_kind, second_token_kind) {
|
|
||||||
(Some(T![#]), Some(T![!])) => AttrKind::Inner,
|
|
||||||
_ => AttrKind::Outer,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -76,8 +76,8 @@ pub trait HasDocComments: HasAttrs {
|
||||||
fn doc_comments(&self) -> DocCommentIter {
|
fn doc_comments(&self) -> DocCommentIter {
|
||||||
DocCommentIter { iter: self.syntax().children_with_tokens() }
|
DocCommentIter { iter: self.syntax().children_with_tokens() }
|
||||||
}
|
}
|
||||||
fn doc_comments_and_attrs(&self) -> AttrCommentIter {
|
fn doc_comments_and_attrs(&self) -> AttrDocCommentIter {
|
||||||
AttrCommentIter { iter: self.syntax().children_with_tokens() }
|
AttrDocCommentIter { iter: self.syntax().children_with_tokens() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -113,17 +113,23 @@ impl Iterator for DocCommentIter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AttrCommentIter {
|
pub struct AttrDocCommentIter {
|
||||||
iter: SyntaxElementChildren,
|
iter: SyntaxElementChildren,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Iterator for AttrCommentIter {
|
impl AttrDocCommentIter {
|
||||||
type Item = Either<ast::Comment, ast::Attr>;
|
pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> AttrDocCommentIter {
|
||||||
|
AttrDocCommentIter { iter: syntax_node.children_with_tokens() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Iterator for AttrDocCommentIter {
|
||||||
|
type Item = Either<ast::Attr, ast::Comment>;
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
self.iter.by_ref().find_map(|el| match el {
|
self.iter.by_ref().find_map(|el| match el {
|
||||||
SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Right),
|
SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Left),
|
||||||
SyntaxElement::Token(tok) => {
|
SyntaxElement::Token(tok) => {
|
||||||
ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Left)
|
ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Right)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue