Auto merge of #18099 - ChayimFriedman2:diag-only-necessary, r=Veykril

Use more correct handling of lint attributes

The previous analysis was top-down, and worked on a single file (expanding macros). The new analysis is bottom-up, starting from the diagnostics and climbing up the syntax and module tree.

While this is more efficient (and in fact, efficiency was the motivating reason to work on this), unfortunately the code was already fast enough. But luckily, it also fixes a correctness problem: outline parent modules' attributes were not respected for the previous analysis. Case lints specifically did their own analysis to accommodate that, but it was limited to only them. The new analysis works on all kinds of lints, present and future.

It was basically impossible to fix the old analysis without rewriting it because navigating the module hierarchy must come bottom-up, and if we already have a bottom-up analysis (including syntax analysis because modules can be nested in other syntax elements, including macros), it makes sense to use only this kind of analysis.

Few other bugs (not fundamental to the previous analysis) are also fixed, e.g. overwriting of lint levels (i.e. `#[allow(lint)] mod foo { #[warn(lint)] mod bar; }`.

After this PR is merged I intend to work on an editor command that does workspace-wide diagnostics analysis (that is, `rust-analyzer diagnostics` but from your editor and without having to spawn a new process, which will have to analyze the workspace from scratch). This can be useful to users who do not want to enable check on save because of its overhead, but want to see workspace wide diagnostics from r-a (or to maintainers of rust-analyzer).

Closes #18086.
Closes #18081.
Fixes #18056.
This commit is contained in:
bors 2024-09-12 12:39:27 +00:00
commit fd243cd0fb
6 changed files with 410 additions and 286 deletions

View file

@ -69,7 +69,7 @@ use la_arena::Arena;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, EditionedFileId, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to;
use syntax::{ast, SmolStr};
use syntax::{ast, AstNode, SmolStr, SyntaxNode};
use triomphe::Arc;
use tt::TextRange;
@ -291,7 +291,7 @@ impl ModuleOrigin {
/// Returns a node which defines this module.
/// That is, a file or a `mod foo {}` with items.
fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
pub fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
match self {
&ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => {
let sf = db.parse(definition).tree();
@ -728,6 +728,16 @@ pub enum ModuleSource {
BlockExpr(ast::BlockExpr),
}
impl ModuleSource {
pub fn node(&self) -> SyntaxNode {
match self {
ModuleSource::SourceFile(it) => it.syntax().clone(),
ModuleSource::Module(it) => it.syntax().clone(),
ModuleSource::BlockExpr(it) => it.syntax().clone(),
}
}
}
/// See `sub_namespace_match()`.
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum MacroSubNs {

View file

@ -16,15 +16,14 @@ mod case_conv;
use std::fmt;
use hir_def::{
data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, AttrDefId, ConstId,
EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId,
StaticId, StructId, TraitId, TypeAliasId,
data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, ConstId, EnumId,
EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, StaticId,
StructId, TraitId, TypeAliasId,
};
use hir_expand::{
name::{AsName, Name},
HirFileId, HirFileIdExt, MacroFileIdExt,
HirFileId, HirFileIdExt,
};
use intern::sym;
use stdx::{always, never};
use syntax::{
ast::{self, HasName},
@ -36,14 +35,6 @@ use crate::db::HirDatabase;
use self::case_conv::{to_camel_case, to_lower_snake_case, to_upper_snake_case};
mod allow {
pub(super) const BAD_STYLE: &str = "bad_style";
pub(super) const NONSTANDARD_STYLE: &str = "nonstandard_style";
pub(super) const NON_SNAKE_CASE: &str = "non_snake_case";
pub(super) const NON_UPPER_CASE_GLOBAL: &str = "non_upper_case_globals";
pub(super) const NON_CAMEL_CASE_TYPES: &str = "non_camel_case_types";
}
pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
let _p = tracing::info_span!("incorrect_case").entered();
let mut validator = DeclValidator::new(db);
@ -160,92 +151,7 @@ impl<'a> DeclValidator<'a> {
}
}
/// Checks whether not following the convention is allowed for this item.
fn allowed(&self, id: AttrDefId, allow_name: &str, recursing: bool) -> bool {
let is_allowed = |def_id| {
let attrs = self.db.attrs(def_id);
// don't bug the user about directly no_mangle annotated stuff, they can't do anything about it
(!recursing && attrs.by_key(&sym::no_mangle).exists())
|| attrs.by_key(&sym::allow).tt_values().any(|tt| {
let allows = tt.to_string();
allows.contains(allow_name)
|| allows.contains(allow::BAD_STYLE)
|| allows.contains(allow::NONSTANDARD_STYLE)
})
};
let db = self.db.upcast();
let file_id_is_derive = || {
match id {
AttrDefId::ModuleId(m) => {
m.def_map(db)[m.local_id].origin.file_id().map(Into::into)
}
AttrDefId::FunctionId(f) => Some(f.lookup(db).id.file_id()),
AttrDefId::StaticId(sid) => Some(sid.lookup(db).id.file_id()),
AttrDefId::ConstId(cid) => Some(cid.lookup(db).id.file_id()),
AttrDefId::TraitId(tid) => Some(tid.lookup(db).id.file_id()),
AttrDefId::TraitAliasId(taid) => Some(taid.lookup(db).id.file_id()),
AttrDefId::ImplId(iid) => Some(iid.lookup(db).id.file_id()),
AttrDefId::ExternBlockId(id) => Some(id.lookup(db).id.file_id()),
AttrDefId::ExternCrateId(id) => Some(id.lookup(db).id.file_id()),
AttrDefId::UseId(id) => Some(id.lookup(db).id.file_id()),
// These warnings should not explore macro definitions at all
AttrDefId::MacroId(_) => None,
AttrDefId::AdtId(aid) => match aid {
AdtId::StructId(sid) => Some(sid.lookup(db).id.file_id()),
AdtId::EnumId(eid) => Some(eid.lookup(db).id.file_id()),
// Unions aren't yet supported
AdtId::UnionId(_) => None,
},
AttrDefId::FieldId(_) => None,
AttrDefId::EnumVariantId(_) => None,
AttrDefId::TypeAliasId(_) => None,
AttrDefId::GenericParamId(_) => None,
}
.map_or(false, |file_id| {
matches!(file_id.macro_file(), Some(file_id) if file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast()))
})
};
let parent = || {
match id {
AttrDefId::ModuleId(m) => m.containing_module(db).map(|v| v.into()),
AttrDefId::FunctionId(f) => Some(f.lookup(db).container.into()),
AttrDefId::StaticId(sid) => Some(sid.lookup(db).container.into()),
AttrDefId::ConstId(cid) => Some(cid.lookup(db).container.into()),
AttrDefId::TraitId(tid) => Some(tid.lookup(db).container.into()),
AttrDefId::TraitAliasId(taid) => Some(taid.lookup(db).container.into()),
AttrDefId::ImplId(iid) => Some(iid.lookup(db).container.into()),
AttrDefId::ExternBlockId(id) => Some(id.lookup(db).container.into()),
AttrDefId::ExternCrateId(id) => Some(id.lookup(db).container.into()),
AttrDefId::UseId(id) => Some(id.lookup(db).container.into()),
// These warnings should not explore macro definitions at all
AttrDefId::MacroId(_) => None,
AttrDefId::AdtId(aid) => match aid {
AdtId::StructId(sid) => Some(sid.lookup(db).container.into()),
AdtId::EnumId(eid) => Some(eid.lookup(db).container.into()),
// Unions aren't yet supported
AdtId::UnionId(_) => None,
},
AttrDefId::FieldId(_) => None,
AttrDefId::EnumVariantId(_) => None,
AttrDefId::TypeAliasId(_) => None,
AttrDefId::GenericParamId(_) => None,
}
.is_some_and(|mid| self.allowed(mid, allow_name, true))
};
is_allowed(id)
// FIXME: this is a hack to avoid false positives in derive macros currently
|| file_id_is_derive()
// go upwards one step or give up
|| parent()
}
fn validate_module(&mut self, module_id: ModuleId) {
// Check whether non-snake case identifiers are allowed for this module.
if self.allowed(module_id.into(), allow::NON_SNAKE_CASE, false) {
return;
}
// Check the module name.
let Some(module_name) = module_id.name(self.db.upcast()) else { return };
let Some(module_name_replacement) =
@ -270,11 +176,6 @@ impl<'a> DeclValidator<'a> {
}
fn validate_trait(&mut self, trait_id: TraitId) {
// Check whether non-snake case identifiers are allowed for this trait.
if self.allowed(trait_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
return;
}
// Check the trait name.
let data = self.db.trait_data(trait_id);
self.create_incorrect_case_diagnostic_for_item_name(
@ -292,11 +193,6 @@ impl<'a> DeclValidator<'a> {
return;
}
// Check whether non-snake case identifiers are allowed for this function.
if self.allowed(func.into(), allow::NON_SNAKE_CASE, false) {
return;
}
// Check the function name.
// Skipped if function is an associated item of a trait implementation.
if !self.is_trait_impl_container(container) {
@ -389,17 +285,13 @@ impl<'a> DeclValidator<'a> {
fn validate_struct(&mut self, struct_id: StructId) {
// Check the structure name.
let non_camel_case_allowed =
self.allowed(struct_id.into(), allow::NON_CAMEL_CASE_TYPES, false);
if !non_camel_case_allowed {
let data = self.db.struct_data(struct_id);
self.create_incorrect_case_diagnostic_for_item_name(
struct_id,
&data.name,
CaseType::UpperCamelCase,
IdentType::Structure,
);
}
let data = self.db.struct_data(struct_id);
self.create_incorrect_case_diagnostic_for_item_name(
struct_id,
&data.name,
CaseType::UpperCamelCase,
IdentType::Structure,
);
// Check the field names.
self.validate_struct_fields(struct_id);
@ -407,10 +299,6 @@ impl<'a> DeclValidator<'a> {
/// Check incorrect names for struct fields.
fn validate_struct_fields(&mut self, struct_id: StructId) {
if self.allowed(struct_id.into(), allow::NON_SNAKE_CASE, false) {
return;
}
let data = self.db.struct_data(struct_id);
let VariantData::Record(fields) = data.variant_data.as_ref() else {
return;
@ -484,11 +372,6 @@ impl<'a> DeclValidator<'a> {
fn validate_enum(&mut self, enum_id: EnumId) {
let data = self.db.enum_data(enum_id);
// Check whether non-camel case names are allowed for this enum.
if self.allowed(enum_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
return;
}
// Check the enum name.
self.create_incorrect_case_diagnostic_for_item_name(
enum_id,
@ -653,10 +536,6 @@ impl<'a> DeclValidator<'a> {
return;
}
if self.allowed(const_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
let data = self.db.const_data(const_id);
let Some(name) = &data.name else {
return;
@ -676,10 +555,6 @@ impl<'a> DeclValidator<'a> {
return;
}
if self.allowed(static_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
self.create_incorrect_case_diagnostic_for_item_name(
static_id,
&data.name,
@ -695,11 +570,6 @@ impl<'a> DeclValidator<'a> {
return;
}
// Check whether non-snake case identifiers are allowed for this type alias.
if self.allowed(type_alias_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
return;
}
// Check the type alias name.
let data = self.db.type_alias_data(type_alias_id);
self.create_incorrect_case_diagnostic_for_item_name(

View file

@ -13,7 +13,7 @@ use either::Either;
use hir_def::{
hir::Expr,
lower::LowerCtx,
nameres::MacroSubNs,
nameres::{MacroSubNs, ModuleOrigin},
path::ModPath,
resolver::{self, HasResolver, Resolver, TypeNs},
type_ref::Mutability,
@ -32,7 +32,7 @@ use intern::Symbol;
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
use span::{EditionedFileId, FileId};
use span::{EditionedFileId, FileId, HirFileIdRepr};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
@ -323,6 +323,47 @@ impl<'db> SemanticsImpl<'db> {
tree
}
pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
let module = self.file_to_module_defs(file_id.file_id()).next()?;
let def_map = self.db.crate_def_map(module.krate().id);
match def_map[module.id.local_id].origin {
ModuleOrigin::CrateRoot { .. } => None,
ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
let file_id = declaration_tree_id.file_id();
let in_file = InFile::new(file_id, declaration);
let node = in_file.to_node(self.db.upcast());
let root = find_root(node.syntax());
self.cache(root, file_id);
Some(in_file.with_value(node.syntax().clone()))
}
_ => unreachable!("FileId can only belong to a file module"),
}
}
HirFileIdRepr::MacroFile(macro_file) => {
let node = self
.db
.lookup_intern_macro_call(macro_file.macro_call_id)
.to_node(self.db.upcast());
let root = find_root(&node.value);
self.cache(root, node.file_id);
Some(node)
}
}
}
/// Returns the `SyntaxNode` of the module. If this is a file module, returns
/// the `SyntaxNode` of the *definition* file, not of the *declaration*.
pub fn module_definition_node(&self, module: Module) -> InFile<SyntaxNode> {
let def_map = module.id.def_map(self.db.upcast());
let definition = def_map[module.id.local_id].origin.definition_source(self.db.upcast());
let definition = definition.map(|it| it.node());
let root_node = find_root(&definition.value);
self.cache(root_node, definition.file_id);
definition
}
pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
let node = self.db.parse_or_expand(file_id);
self.cache(node.clone(), file_id);

View file

@ -593,7 +593,7 @@ mod CheckBadStyle {
}
mod F {
//^ 💡 warn: Module `F` should have snake_case name, e.g. `f`
//^ 💡 error: Module `F` should have snake_case name, e.g. `f`
#![deny(non_snake_case)]
fn CheckItWorksWithModAttr() {}
//^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: Function `CheckItWorksWithModAttr` should have snake_case name, e.g. `check_it_works_with_mod_attr`
@ -856,4 +856,69 @@ fn func() {
"#,
);
}
#[test]
fn override_lint_level() {
check_diagnostics(
r#"
#[warn(nonstandard_style)]
fn foo() {
let BAR;
// ^^^ 💡 warn: Variable `BAR` should have snake_case name, e.g. `bar`
#[allow(non_snake_case)]
let FOO;
}
#[warn(nonstandard_style)]
fn foo() {
let BAR;
// ^^^ 💡 warn: Variable `BAR` should have snake_case name, e.g. `bar`
#[expect(non_snake_case)]
let FOO;
#[allow(non_snake_case)]
struct qux;
// ^^^ 💡 warn: Structure `qux` should have CamelCase name, e.g. `Qux`
fn BAZ() {
// ^^^ 💡 error: Function `BAZ` should have snake_case name, e.g. `baz`
#![forbid(bad_style)]
}
}
"#,
);
}
#[test]
fn different_files() {
check_diagnostics(
r#"
//- /lib.rs
#![expect(nonstandard_style)]
mod BAD_CASE;
fn BAD_CASE() {}
//- /BAD_CASE.rs
mod OtherBadCase;
// ^^^^^^^^^^^^ 💡 error: Module `OtherBadCase` should have snake_case name, e.g. `other_bad_case`
//- /BAD_CASE/OtherBadCase.rs
#![deny(non_snake_case)]
fn FOO() {}
// ^^^ 💡 error: Function `FOO` should have snake_case name, e.g. `foo`
#[allow(bad_style)]
mod FINE_WITH_BAD_CASE;
//- /BAD_CASE/OtherBadCase/FINE_WITH_BAD_CASE.rs
struct QUX;
const foo: i32 = 0;
fn BAR() {
let BAZ;
}
"#,
);
}
}

View file

@ -76,9 +76,9 @@ mod handlers {
#[cfg(test)]
mod tests;
use std::sync::LazyLock;
use std::{collections::hash_map, sync::LazyLock};
use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
use hir::{diagnostics::AnyDiagnostic, HirFileId, InFile, Semantics};
use ide_db::{
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
base_db::SourceDatabase,
@ -89,10 +89,10 @@ use ide_db::{
syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
};
use stdx::never;
use itertools::Itertools;
use syntax::{
ast::{self, AstNode},
AstPtr, Edition, SyntaxNode, SyntaxNodePtr, TextRange,
ast::{self, AstNode, HasAttrs},
AstPtr, Edition, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange,
};
// FIXME: Make this an enum
@ -476,8 +476,9 @@ pub fn semantic_diagnostics(
|| ctx.config.disable_experimental && d.experimental)
});
let mut diagnostics_of_range = res
let mut lints = res
.iter_mut()
.filter(|it| matches!(it.code, DiagnosticCode::Clippy(_) | DiagnosticCode::RustcLint(_)))
.filter_map(|it| {
Some((
it.main_node.map(|ptr| {
@ -486,23 +487,16 @@ pub fn semantic_diagnostics(
it,
))
})
.collect::<FxHashMap<_, _>>();
.collect::<Vec<_>>();
if diagnostics_of_range.is_empty() {
return res;
}
let mut rustc_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();
let mut clippy_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();
// FIXME: This becomes quite expensive for big files
// The edition isn't accurate (each diagnostics may have its own edition due to macros),
// but it's okay as it's only being used for error recovery.
handle_lint_attributes(
&ctx.sema,
parse.syntax(),
&mut rustc_stack,
&mut clippy_stack,
&mut diagnostics_of_range,
ctx.edition,
&mut FxHashMap::default(),
&mut lints,
&mut Vec::new(),
file_id.edition(),
);
res.retain(|d| d.severity != Severity::Allow);
@ -536,150 +530,267 @@ fn build_group_dict(
all_groups: &'static [&'static str],
prefix: &'static str,
) -> FxHashMap<&'static str, Vec<&'static str>> {
let mut r: FxHashMap<&str, Vec<&str>> = FxHashMap::default();
let mut map_with_prefixes: FxHashMap<&str, Vec<&str>> = FxHashMap::default();
for g in lint_group {
for child in g.children {
r.entry(child.strip_prefix(prefix).unwrap())
.or_default()
.push(g.lint.label.strip_prefix(prefix).unwrap());
let mut add_children = |label: &'static str| {
for child in g.children {
map_with_prefixes.entry(child).or_default().push(label);
}
};
add_children(g.lint.label);
if g.lint.label == "nonstandard_style" {
// Also add `bad_style`, which for some reason isn't listed in the groups.
add_children("bad_style");
}
}
for (lint, groups) in r.iter_mut() {
for (lint, groups) in map_with_prefixes.iter_mut() {
groups.push(lint);
groups.extend_from_slice(all_groups);
}
r
map_with_prefixes.into_iter().map(|(k, v)| (k.strip_prefix(prefix).unwrap(), v)).collect()
}
fn handle_lint_attributes(
sema: &Semantics<'_, RootDatabase>,
root: &SyntaxNode,
rustc_stack: &mut FxHashMap<String, Vec<Severity>>,
clippy_stack: &mut FxHashMap<String, Vec<Severity>>,
diagnostics_of_range: &mut FxHashMap<InFile<SyntaxNode>, &mut Diagnostic>,
cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
diagnostics: &mut [(InFile<SyntaxNode>, &mut Diagnostic)],
cache_stack: &mut Vec<HirFileId>,
edition: Edition,
) {
let _g = tracing::info_span!("handle_lint_attributes").entered();
let file_id = sema.hir_file_for(root);
let preorder = root.preorder();
for ev in preorder {
match ev {
syntax::WalkEvent::Enter(node) => {
for attr in node.children().filter_map(ast::Attr::cast) {
parse_lint_attribute(
attr,
rustc_stack,
clippy_stack,
|stack, severity| {
stack.push(severity);
},
edition,
);
}
if let Some(it) =
diagnostics_of_range.get_mut(&InFile { file_id, value: node.clone() })
{
const EMPTY_LINTS: &[&str] = &[];
let (names, stack) = match it.code {
DiagnosticCode::RustcLint(name) => (
RUSTC_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |it| &**it),
&mut *rustc_stack,
),
DiagnosticCode::Clippy(name) => (
CLIPPY_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |it| &**it),
&mut *clippy_stack,
),
_ => continue,
};
for &name in names {
if let Some(s) = stack.get(name).and_then(|it| it.last()) {
it.severity = *s;
}
}
}
if let Some(item) = ast::Item::cast(node.clone()) {
if let Some(me) = sema.expand_attr_macro(&item) {
for stack in [&mut *rustc_stack, &mut *clippy_stack] {
stack
.entry("__RA_EVERY_LINT".to_owned())
.or_default()
.push(Severity::Allow);
}
handle_lint_attributes(
sema,
&me,
rustc_stack,
clippy_stack,
diagnostics_of_range,
edition,
);
for stack in [&mut *rustc_stack, &mut *clippy_stack] {
stack.entry("__RA_EVERY_LINT".to_owned()).or_default().pop();
}
}
}
if let Some(mc) = ast::MacroCall::cast(node) {
if let Some(me) = sema.expand(&mc) {
handle_lint_attributes(
sema,
&me,
rustc_stack,
clippy_stack,
diagnostics_of_range,
edition,
);
}
}
}
syntax::WalkEvent::Leave(node) => {
for attr in node.children().filter_map(ast::Attr::cast) {
parse_lint_attribute(
attr,
rustc_stack,
clippy_stack,
|stack, severity| {
if stack.pop() != Some(severity) {
never!("Mismatched serevity in walking lint attributes");
}
},
edition,
);
}
}
for (node, diag) in diagnostics {
let mut diag_severity = fill_lint_attrs(sema, node, cache, cache_stack, diag, edition);
if let outline_diag_severity @ Some(_) =
find_outline_mod_lint_severity(sema, node, diag, edition)
{
diag_severity = outline_diag_severity;
}
if let Some(diag_severity) = diag_severity {
diag.severity = diag_severity;
}
}
}
fn parse_lint_attribute(
attr: ast::Attr,
rustc_stack: &mut FxHashMap<String, Vec<Severity>>,
clippy_stack: &mut FxHashMap<String, Vec<Severity>>,
job: impl Fn(&mut Vec<Severity>, Severity),
fn find_outline_mod_lint_severity(
sema: &Semantics<'_, RootDatabase>,
node: &InFile<SyntaxNode>,
diag: &Diagnostic,
edition: Edition,
) {
let Some((tag, args_tt)) = attr.as_simple_call() else {
return;
};
let severity = match tag.as_str() {
"allow" => Severity::Allow,
"warn" => Severity::Warning,
"forbid" | "deny" => Severity::Error,
_ => return,
};
for lint in parse_tt_as_comma_sep_paths(args_tt, edition).into_iter().flatten() {
if let Some(lint) = lint.as_single_name_ref() {
job(rustc_stack.entry(lint.to_string()).or_default(), severity);
) -> Option<Severity> {
let mod_node = node.value.ancestors().find_map(ast::Module::cast)?;
if mod_node.item_list().is_some() {
// Inline modules will be handled by `fill_lint_attrs()`.
return None;
}
let mod_def = sema.to_module_def(&mod_node)?;
let module_source_file = sema.module_definition_node(mod_def);
let mut result = None;
let lint_groups = lint_groups(&diag.code);
lint_attrs(
ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"),
edition,
)
.for_each(|(lint, severity)| {
if lint_groups.contains(&&*lint) {
result = Some(severity);
}
if let Some(tool) = lint.qualifier().and_then(|it| it.as_single_name_ref()) {
if let Some(name_ref) = &lint.segment().and_then(|it| it.name_ref()) {
if tool.to_string() == "clippy" {
job(clippy_stack.entry(name_ref.to_string()).or_default(), severity);
});
result
}
#[derive(Debug, Clone, Copy)]
struct SeverityAttr {
severity: Severity,
/// This field counts how far we are from the main node. Bigger values mean more far.
///
/// Note this isn't accurate: there can be gaps between values (created when merging severity maps).
/// The important thing is that if an attr is closer to the main node, it will have smaller value.
///
/// This is necessary even though we take care to never overwrite a value from deeper nesting
/// because of lint groups. For example, in the following code:
/// ```
/// #[warn(non_snake_case)]
/// mod foo {
/// #[allow(nonstandard_style)]
/// mod bar;
/// }
/// ```
/// We want to not warn on non snake case inside `bar`. If we are traversing this for the first
/// time, everything will be fine, because we will set `diag_severity` on the first matching group
/// and never overwrite it since then. But if `bar` is cached, the cache will contain both
/// `#[warn(non_snake_case)]` and `#[allow(nonstandard_style)]`, and without this field, we have
/// no way of differentiating between the two.
depth: u32,
}
fn fill_lint_attrs(
sema: &Semantics<'_, RootDatabase>,
node: &InFile<SyntaxNode>,
cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
cache_stack: &mut Vec<HirFileId>,
diag: &Diagnostic,
edition: Edition,
) -> Option<Severity> {
let mut collected_lint_attrs = FxHashMap::<SmolStr, SeverityAttr>::default();
let mut diag_severity = None;
let mut ancestors = node.value.ancestors().peekable();
let mut depth = 0;
loop {
let ancestor = ancestors.next().expect("we always return from top-level nodes");
depth += 1;
if ancestors.peek().is_none() {
// We don't want to insert too many nodes into cache, but top level nodes (aka. outline modules
// or macro expansions) need to touch the database so they seem like a good fit to cache.
if let Some(cached) = cache.get_mut(&node.file_id) {
// This node (and everything above it) is already cached; the attribute is either here or nowhere.
// Workaround for the borrow checker.
let cached = std::mem::take(cached);
cached.iter().for_each(|(lint, severity)| {
for item in &*cache_stack {
let node_cache_entry = cache
.get_mut(item)
.expect("we always insert cached nodes into the cache map");
let lint_cache_entry = node_cache_entry.entry(lint.clone());
if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
// Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
// overwrite top attrs.
lint_cache_entry.insert(SeverityAttr {
severity: severity.severity,
depth: severity.depth + depth,
});
}
}
});
let all_matching_groups = lint_groups(&diag.code)
.iter()
.filter_map(|lint_group| cached.get(&**lint_group));
let cached_severity =
all_matching_groups.min_by_key(|it| it.depth).map(|it| it.severity);
cache.insert(node.file_id, cached);
return diag_severity.or(cached_severity);
}
// Insert this node's descendants' attributes into any outline descendant, but not including this node.
// This must come before inserting this node's own attributes to preserve order.
collected_lint_attrs.drain().for_each(|(lint, severity)| {
if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) {
diag_severity = Some(severity.severity);
}
for item in &*cache_stack {
let node_cache_entry = cache
.get_mut(item)
.expect("we always insert cached nodes into the cache map");
let lint_cache_entry = node_cache_entry.entry(lint.clone());
if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
// Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
// overwrite top attrs.
lint_cache_entry.insert(severity);
}
}
});
cache_stack.push(node.file_id);
cache.insert(node.file_id, FxHashMap::default());
if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
// Insert this node's attributes into any outline descendant, including this node.
lint_attrs(ancestor, edition).for_each(|(lint, severity)| {
if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) {
diag_severity = Some(severity);
}
for item in &*cache_stack {
let node_cache_entry = cache
.get_mut(item)
.expect("we always insert cached nodes into the cache map");
let lint_cache_entry = node_cache_entry.entry(lint.clone());
if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
// Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
// overwrite top attrs.
lint_cache_entry.insert(SeverityAttr { severity, depth });
}
}
});
}
let parent_node = sema.find_parent_file(node.file_id);
if let Some(parent_node) = parent_node {
let parent_severity =
fill_lint_attrs(sema, &parent_node, cache, cache_stack, diag, edition);
if diag_severity.is_none() {
diag_severity = parent_severity;
}
}
cache_stack.pop();
return diag_severity;
} else if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
lint_attrs(ancestor, edition).for_each(|(lint, severity)| {
if diag_severity.is_none() && lint_groups(&diag.code).contains(&&*lint) {
diag_severity = Some(severity);
}
let lint_cache_entry = collected_lint_attrs.entry(lint);
if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
// Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
// overwrite top attrs.
lint_cache_entry.insert(SeverityAttr { severity, depth });
}
});
}
}
}
fn lint_attrs(
ancestor: ast::AnyHasAttrs,
edition: Edition,
) -> impl Iterator<Item = (SmolStr, Severity)> {
ancestor
.attrs_including_inner()
.filter_map(|attr| {
attr.as_simple_call().and_then(|(name, value)| match &*name {
"allow" | "expect" => Some((Severity::Allow, value)),
"warn" => Some((Severity::Warning, value)),
"forbid" | "deny" => Some((Severity::Error, value)),
_ => None,
})
})
.flat_map(move |(severity, lints)| {
parse_tt_as_comma_sep_paths(lints, edition).into_iter().flat_map(move |lints| {
// Rejoin the idents with `::`, so we have no spaces in between.
lints.into_iter().map(move |lint| {
(
lint.segments().filter_map(|segment| segment.name_ref()).join("::").into(),
severity,
)
})
})
})
}
fn lint_groups(lint: &DiagnosticCode) -> &'static [&'static str] {
match lint {
DiagnosticCode::RustcLint(name) => {
RUSTC_LINT_GROUPS_DICT.get(name).map(|it| &**it).unwrap_or_default()
}
DiagnosticCode::Clippy(name) => {
CLIPPY_LINT_GROUPS_DICT.get(name).map(|it| &**it).unwrap_or_default()
}
_ => &[],
}
}
fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextRange) -> Assist {
let mut res = unresolved_fix(id, label, target);
res.source_change = Some(source_change);

View file

@ -75,6 +75,33 @@ pub trait HasAttrs: AstNode {
fn has_atom_attr(&self, atom: &str) -> bool {
self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
}
/// Returns all attributes of this node, including inner attributes that may not be directly under this node
/// but under a child.
fn attrs_including_inner(self) -> impl Iterator<Item = ast::Attr>
where
Self: Sized,
{
let inner_attrs_node = if let Some(it) =
support::child::<ast::BlockExpr>(self.syntax()).and_then(|it| it.stmt_list())
{
Some(it.syntax)
} else if let Some(it) = support::child::<ast::MatchArmList>(self.syntax()) {
Some(it.syntax)
} else if let Some(it) = support::child::<ast::AssocItemList>(self.syntax()) {
Some(it.syntax)
} else if let Some(it) = support::child::<ast::ItemList>(self.syntax()) {
Some(it.syntax)
} else if let Some(it) = support::child::<ast::ExternItemList>(self.syntax()) {
Some(it.syntax)
} else if let Some(it) = support::child::<ast::MacroItems>(self.syntax()) {
Some(it.syntax)
} else {
None
};
self.attrs().chain(inner_attrs_node.into_iter().flat_map(|it| support::children(&it)))
}
}
pub trait HasDocComments: HasAttrs {