Auto merge of #18108 - ChayimFriedman2:lint-level-cfg, r=Veykril

fix: Handle lint attributes that are under `#[cfg_attr]`

I forgot `cfg_attr` while working on #18099. Although the original code also didn't handle that (although case lints specifically were correct, by virtue of using hir attrs).
This commit is contained in:
bors 2024-09-20 06:54:53 +00:00
commit c467115fd9
5 changed files with 120 additions and 16 deletions

View file

@ -6,7 +6,7 @@ use cfg::{CfgAtom, CfgExpr};
use intern::{sym, Symbol}; use intern::{sym, Symbol};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use syntax::{ use syntax::{
ast::{self, Attr, HasAttrs, Meta, VariantList}, ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList},
AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T, AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T,
}; };
use tracing::{debug, warn}; use tracing::{debug, warn};
@ -17,7 +17,7 @@ fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Option<boo
if !attr.simple_name().as_deref().map(|v| v == "cfg")? { if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
return None; return None;
} }
let cfg = parse_from_attr_meta(attr.meta()?)?; let cfg = parse_from_attr_token_tree(&attr.meta()?.token_tree()?)?;
let enabled = db.crate_graph()[krate].cfg_options.check(&cfg) != Some(false); let enabled = db.crate_graph()[krate].cfg_options.check(&cfg) != Some(false);
Some(enabled) Some(enabled)
} }
@ -26,7 +26,15 @@ fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Optio
if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? { if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
return None; return None;
} }
let cfg_expr = parse_from_attr_meta(attr.meta()?)?; check_cfg_attr_value(db, &attr.token_tree()?, krate)
}
pub fn check_cfg_attr_value(
db: &dyn ExpandDatabase,
attr: &TokenTree,
krate: CrateId,
) -> Option<bool> {
let cfg_expr = parse_from_attr_token_tree(attr)?;
let enabled = db.crate_graph()[krate].cfg_options.check(&cfg_expr) != Some(false); let enabled = db.crate_graph()[krate].cfg_options.check(&cfg_expr) != Some(false);
Some(enabled) Some(enabled)
} }
@ -238,8 +246,7 @@ pub(crate) fn process_cfg_attrs(
Some(remove) Some(remove)
} }
/// Parses a `cfg` attribute from the meta /// Parses a `cfg` attribute from the meta
fn parse_from_attr_meta(meta: Meta) -> Option<CfgExpr> { fn parse_from_attr_token_tree(tt: &TokenTree) -> Option<CfgExpr> {
let tt = meta.token_tree()?;
let mut iter = tt let mut iter = tt
.token_trees_and_tokens() .token_trees_and_tokens()
.filter(is_not_whitespace) .filter(is_not_whitespace)
@ -328,7 +335,7 @@ mod tests {
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use syntax::{ast::Attr, AstNode, SourceFile}; use syntax::{ast::Attr, AstNode, SourceFile};
use crate::cfg_process::parse_from_attr_meta; use crate::cfg_process::parse_from_attr_token_tree;
fn check_dnf_from_syntax(input: &str, expect: Expect) { fn check_dnf_from_syntax(input: &str, expect: Expect) {
let parse = SourceFile::parse(input, span::Edition::CURRENT); let parse = SourceFile::parse(input, span::Edition::CURRENT);
@ -342,7 +349,7 @@ mod tests {
let node = node.clone_subtree(); let node = node.clone_subtree();
assert_eq!(node.syntax().text_range().start(), 0.into()); assert_eq!(node.syntax().text_range().start(), 0.into());
let cfg = parse_from_attr_meta(node.meta().unwrap()).unwrap(); let cfg = parse_from_attr_token_tree(&node.meta().unwrap().token_tree().unwrap()).unwrap();
let actual = format!("#![cfg({})]", DnfExpr::new(&cfg)); let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
expect.assert_eq(&actual); expect.assert_eq(&actual);
} }

View file

@ -53,6 +53,7 @@ use crate::{
}; };
pub use crate::{ pub use crate::{
cfg_process::check_cfg_attr_value,
files::{AstId, ErasedAstId, FileRange, InFile, InMacroFile, InRealFile}, files::{AstId, ErasedAstId, FileRange, InFile, InMacroFile, InRealFile},
prettify_macro_expansion_::prettify_macro_expansion, prettify_macro_expansion_::prettify_macro_expansion,
}; };

View file

@ -386,6 +386,19 @@ impl<'db> SemanticsImpl<'db> {
Some(node) Some(node)
} }
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
let file_id = self.find_file(attr.syntax()).file_id;
let krate = match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
self.file_to_module_defs(file_id.file_id()).next()?.krate().id
}
HirFileIdRepr::MacroFile(macro_file) => {
self.db.lookup_intern_macro_call(macro_file.macro_call_id).krate
}
};
hir_expand::check_cfg_attr_value(self.db.upcast(), attr, krate)
}
/// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy /// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy
/// expansions. /// expansions.
pub fn expand_allowed_builtins(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { pub fn expand_allowed_builtins(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {

View file

@ -997,6 +997,28 @@ fn BAR() {
); );
} }
#[test]
fn cfged_lint_attrs() {
check_diagnostics(
r#"
//- /lib.rs cfg:feature=cool_feature
#[cfg_attr(any(), allow(non_snake_case))]
fn FOO() {}
// ^^^ 💡 warn: Function `FOO` should have snake_case name, e.g. `foo`
#[cfg_attr(non_existent, allow(non_snake_case))]
fn BAR() {}
// ^^^ 💡 warn: Function `BAR` should have snake_case name, e.g. `bar`
#[cfg_attr(feature = "cool_feature", allow(non_snake_case))]
fn BAZ() {}
#[cfg_attr(feature = "cool_feature", cfg_attr ( all ( ) , allow ( non_snake_case ) ) ) ]
fn QUX() {}
"#,
);
}
#[test] #[test]
fn allow_with_comment() { fn allow_with_comment() {
check_diagnostics( check_diagnostics(

View file

@ -76,8 +76,9 @@ mod handlers {
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
use std::{collections::hash_map, sync::LazyLock}; use std::{collections::hash_map, iter, sync::LazyLock};
use either::Either;
use hir::{db::ExpandDatabase, diagnostics::AnyDiagnostic, Crate, HirFileId, InFile, Semantics}; use hir::{db::ExpandDatabase, diagnostics::AnyDiagnostic, Crate, HirFileId, InFile, Semantics};
use ide_db::{ use ide_db::{
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy}, assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
@ -92,7 +93,7 @@ use ide_db::{
use itertools::Itertools; use itertools::Itertools;
use syntax::{ use syntax::{
ast::{self, AstNode, HasAttrs}, ast::{self, AstNode, HasAttrs},
AstPtr, Edition, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, AstPtr, Edition, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, TextRange, T,
}; };
// FIXME: Make this an enum // FIXME: Make this an enum
@ -647,6 +648,7 @@ fn find_outline_mod_lint_severity(
let mut result = None; let mut result = None;
let lint_groups = lint_groups(&diag.code); let lint_groups = lint_groups(&diag.code);
lint_attrs( lint_attrs(
sema,
ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"), ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"),
edition, edition,
) )
@ -763,7 +765,7 @@ fn fill_lint_attrs(
if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) { if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
// Insert this node's attributes into any outline descendant, including this node. // Insert this node's attributes into any outline descendant, including this node.
lint_attrs(ancestor, edition).for_each(|(lint, severity)| { lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) { if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) {
diag_severity = Some(severity); diag_severity = Some(severity);
} }
@ -793,7 +795,7 @@ fn fill_lint_attrs(
cache_stack.pop(); cache_stack.pop();
return diag_severity; return diag_severity;
} else if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) { } else if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
lint_attrs(ancestor, edition).for_each(|(lint, severity)| { lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) { if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) {
diag_severity = Some(severity); diag_severity = Some(severity);
} }
@ -809,20 +811,27 @@ fn fill_lint_attrs(
} }
} }
fn lint_attrs( fn lint_attrs<'a>(
sema: &'a Semantics<'a, RootDatabase>,
ancestor: ast::AnyHasAttrs, ancestor: ast::AnyHasAttrs,
edition: Edition, edition: Edition,
) -> impl Iterator<Item = (SmolStr, Severity)> { ) -> impl Iterator<Item = (SmolStr, Severity)> + 'a {
ancestor ancestor
.attrs_including_inner() .attrs_including_inner()
.filter_map(|attr| { .filter_map(|attr| {
attr.as_simple_call().and_then(|(name, value)| match &*name { attr.as_simple_call().and_then(|(name, value)| match &*name {
"allow" | "expect" => Some((Severity::Allow, value)), "allow" | "expect" => Some(Either::Left(iter::once((Severity::Allow, value)))),
"warn" => Some((Severity::Warning, value)), "warn" => Some(Either::Left(iter::once((Severity::Warning, value)))),
"forbid" | "deny" => Some((Severity::Error, value)), "forbid" | "deny" => Some(Either::Left(iter::once((Severity::Error, value)))),
"cfg_attr" => {
let mut lint_attrs = Vec::new();
cfg_attr_lint_attrs(sema, &value, &mut lint_attrs);
Some(Either::Right(lint_attrs.into_iter()))
}
_ => None, _ => None,
}) })
}) })
.flatten()
.flat_map(move |(severity, lints)| { .flat_map(move |(severity, lints)| {
parse_tt_as_comma_sep_paths(lints, edition).into_iter().flat_map(move |lints| { parse_tt_as_comma_sep_paths(lints, edition).into_iter().flat_map(move |lints| {
// Rejoin the idents with `::`, so we have no spaces in between. // Rejoin the idents with `::`, so we have no spaces in between.
@ -836,6 +845,58 @@ fn lint_attrs(
}) })
} }
fn cfg_attr_lint_attrs(
sema: &Semantics<'_, RootDatabase>,
value: &ast::TokenTree,
lint_attrs: &mut Vec<(Severity, ast::TokenTree)>,
) {
let prev_len = lint_attrs.len();
let mut iter = value.token_trees_and_tokens().filter(|it| match it {
NodeOrToken::Node(_) => true,
NodeOrToken::Token(it) => !it.kind().is_trivia(),
});
// Skip the condition.
for value in &mut iter {
if value.as_token().is_some_and(|it| it.kind() == T![,]) {
break;
}
}
while let Some(value) = iter.next() {
if let Some(token) = value.as_token() {
if token.kind() == SyntaxKind::IDENT {
let severity = match token.text() {
"allow" | "expect" => Some(Severity::Allow),
"warn" => Some(Severity::Warning),
"forbid" | "deny" => Some(Severity::Error),
"cfg_attr" => {
if let Some(NodeOrToken::Node(value)) = iter.next() {
cfg_attr_lint_attrs(sema, &value, lint_attrs);
}
None
}
_ => None,
};
if let Some(severity) = severity {
let lints = iter.next();
if let Some(NodeOrToken::Node(lints)) = lints {
lint_attrs.push((severity, lints));
}
}
}
}
}
if prev_len != lint_attrs.len() {
if let Some(false) | None = sema.check_cfg_attr(value) {
// Discard the attributes when the condition is false.
lint_attrs.truncate(prev_len);
}
}
}
fn lint_groups(lint: &DiagnosticCode) -> &'static [&'static str] { fn lint_groups(lint: &DiagnosticCode) -> &'static [&'static str] {
match lint { match lint {
DiagnosticCode::RustcLint(name) => { DiagnosticCode::RustcLint(name) => {