mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Auto merge of #16470 - Veykril:clippy-disallow, r=lnicola
internal: Lint debug prints and disallowed types with clippy
This commit is contained in:
commit
66cec4d11a
64 changed files with 170 additions and 229 deletions
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
|
@ -105,7 +105,7 @@ jobs:
|
||||||
|
|
||||||
- name: clippy
|
- name: clippy
|
||||||
if: matrix.os == 'ubuntu-latest'
|
if: matrix.os == 'ubuntu-latest'
|
||||||
run: cargo clippy --all-targets
|
run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr
|
||||||
|
|
||||||
# Weird targets to catch non-portable code
|
# Weird targets to catch non-portable code
|
||||||
rust-cross:
|
rust-cross:
|
||||||
|
|
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1283,6 +1283,7 @@ dependencies = [
|
||||||
"object 0.32.0",
|
"object 0.32.0",
|
||||||
"paths",
|
"paths",
|
||||||
"profile",
|
"profile",
|
||||||
|
"rustc-hash",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"snap",
|
"snap",
|
||||||
|
|
|
@ -164,6 +164,8 @@ len_without_is_empty = "allow"
|
||||||
enum_variant_names = "allow"
|
enum_variant_names = "allow"
|
||||||
# Builder pattern disagrees
|
# Builder pattern disagrees
|
||||||
new_ret_no_self = "allow"
|
new_ret_no_self = "allow"
|
||||||
|
# Has a bunch of false positives
|
||||||
|
useless_asref = "allow"
|
||||||
|
|
||||||
## Following lints should be tackled at some point
|
## Following lints should be tackled at some point
|
||||||
borrowed_box = "allow"
|
borrowed_box = "allow"
|
||||||
|
@ -178,9 +180,12 @@ type_complexity = "allow"
|
||||||
wrong_self_convention = "allow"
|
wrong_self_convention = "allow"
|
||||||
|
|
||||||
## warn at following lints
|
## warn at following lints
|
||||||
|
# CI raises these to deny
|
||||||
dbg_macro = "warn"
|
dbg_macro = "warn"
|
||||||
todo = "warn"
|
todo = "warn"
|
||||||
unimplemented = "allow"
|
print_stdout = "warn"
|
||||||
|
print_stderr = "warn"
|
||||||
|
|
||||||
rc_buffer = "warn"
|
rc_buffer = "warn"
|
||||||
# FIXME enable this, we use this pattern a lot so its annoying work ...
|
# FIXME enable this, we use this pattern a lot so its annoying work ...
|
||||||
# str_to_string = "warn"
|
# str_to_string = "warn"
|
||||||
|
|
5
clippy.toml
Normal file
5
clippy.toml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
disallowed-types = [
|
||||||
|
{ path = "std::collections::HashMap", reason = "use FxHashMap" },
|
||||||
|
{ path = "std::collections::HashSet", reason = "use FxHashSet" },
|
||||||
|
{ path = "std::collections::hash_map::RandomState", reason = "use BuildHasherDefault<FxHasher>"}
|
||||||
|
]
|
|
@ -493,9 +493,7 @@ impl CargoActor {
|
||||||
// Skip certain kinds of messages to only spend time on what's useful
|
// Skip certain kinds of messages to only spend time on what's useful
|
||||||
JsonMessage::Cargo(message) => match message {
|
JsonMessage::Cargo(message) => match message {
|
||||||
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
|
cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
|
||||||
self.sender
|
self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
|
||||||
.send(CargoMessage::CompilerArtifact(Box::new(artifact)))
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
cargo_metadata::Message::CompilerMessage(msg) => {
|
cargo_metadata::Message::CompilerMessage(msg) => {
|
||||||
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
|
self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
|
||||||
|
@ -539,8 +537,9 @@ impl CargoActor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
enum CargoMessage {
|
enum CargoMessage {
|
||||||
CompilerArtifact(Box<cargo_metadata::Artifact>),
|
CompilerArtifact(cargo_metadata::Artifact),
|
||||||
Diagnostic(Diagnostic),
|
Diagnostic(Diagnostic),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1980,10 +1980,7 @@ fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)>
|
||||||
let ast_lit = lit.literal()?;
|
let ast_lit = lit.literal()?;
|
||||||
let mut hir_lit: Literal = ast_lit.kind().into();
|
let mut hir_lit: Literal = ast_lit.kind().into();
|
||||||
if lit.minus_token().is_some() {
|
if lit.minus_token().is_some() {
|
||||||
let Some(h) = hir_lit.negate() else {
|
hir_lit = hir_lit.negate()?;
|
||||||
return None;
|
|
||||||
};
|
|
||||||
hir_lit = h;
|
|
||||||
}
|
}
|
||||||
Some((hir_lit, ast_lit))
|
Some((hir_lit, ast_lit))
|
||||||
}
|
}
|
||||||
|
|
|
@ -222,17 +222,15 @@ impl ItemScope {
|
||||||
self.declarations.iter().copied()
|
self.declarations.iter().copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extern_crate_decls(
|
pub fn extern_crate_decls(&self) -> impl ExactSizeIterator<Item = ExternCrateId> + '_ {
|
||||||
&self,
|
|
||||||
) -> impl Iterator<Item = ExternCrateId> + ExactSizeIterator + '_ {
|
|
||||||
self.extern_crate_decls.iter().copied()
|
self.extern_crate_decls.iter().copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn use_decls(&self) -> impl Iterator<Item = UseId> + ExactSizeIterator + '_ {
|
pub fn use_decls(&self) -> impl ExactSizeIterator<Item = UseId> + '_ {
|
||||||
self.use_decls.iter().copied()
|
self.use_decls.iter().copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ {
|
pub fn impls(&self) -> impl ExactSizeIterator<Item = ImplId> + '_ {
|
||||||
self.impls.iter().copied()
|
self.impls.iter().copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use chalk_ir::{AdtId, TyKind};
|
use chalk_ir::{AdtId, TyKind};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_def::db::DefDatabase;
|
use hir_def::db::DefDatabase;
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
use test_fixture::WithFixture;
|
use test_fixture::WithFixture;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
@ -16,7 +15,7 @@ use crate::{
|
||||||
mod closure;
|
mod closure;
|
||||||
|
|
||||||
fn current_machine_data_layout() -> String {
|
fn current_machine_data_layout() -> String {
|
||||||
project_model::target_data_layout::get(None, None, &HashMap::default()).unwrap()
|
project_model::target_data_layout::get(None, None, &FxHashMap::default()).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
|
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
|
||||||
|
|
|
@ -10,7 +10,7 @@ mod regression;
|
||||||
mod simple;
|
mod simple;
|
||||||
mod traits;
|
mod traits;
|
||||||
|
|
||||||
use std::{collections::HashMap, env};
|
use std::env;
|
||||||
|
|
||||||
use base_db::{FileRange, SourceDatabaseExt};
|
use base_db::{FileRange, SourceDatabaseExt};
|
||||||
use expect_test::Expect;
|
use expect_test::Expect;
|
||||||
|
@ -25,6 +25,7 @@ use hir_def::{
|
||||||
};
|
};
|
||||||
use hir_expand::{db::ExpandDatabase, InFile};
|
use hir_expand::{db::ExpandDatabase, InFile};
|
||||||
use once_cell::race::OnceBool;
|
use once_cell::race::OnceBool;
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode, HasName},
|
ast::{self, AstNode, HasName},
|
||||||
|
@ -90,9 +91,9 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
||||||
let (db, files) = TestDB::with_many_files(ra_fixture);
|
let (db, files) = TestDB::with_many_files(ra_fixture);
|
||||||
|
|
||||||
let mut had_annotations = false;
|
let mut had_annotations = false;
|
||||||
let mut mismatches = HashMap::new();
|
let mut mismatches = FxHashMap::default();
|
||||||
let mut types = HashMap::new();
|
let mut types = FxHashMap::default();
|
||||||
let mut adjustments = HashMap::<_, Vec<_>>::new();
|
let mut adjustments = FxHashMap::<_, Vec<_>>::default();
|
||||||
for (file_id, annotations) in db.extract_annotations() {
|
for (file_id, annotations) in db.extract_annotations() {
|
||||||
for (range, expected) in annotations {
|
for (range, expected) in annotations {
|
||||||
let file_range = FileRange { file_id, range };
|
let file_range = FileRange { file_id, range };
|
||||||
|
|
|
@ -187,7 +187,7 @@ struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, Ch
|
||||||
|
|
||||||
impl Drop for LoggingRustIrDatabaseLoggingOnDrop<'_> {
|
impl Drop for LoggingRustIrDatabaseLoggingOnDrop<'_> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
eprintln!("chalk program:\n{}", self.0);
|
tracing::info!("chalk program:\n{}", self.0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -546,9 +546,7 @@ impl AnyDiagnostic {
|
||||||
source_map.pat_syntax(pat).expect("unexpected synthetic");
|
source_map.pat_syntax(pat).expect("unexpected synthetic");
|
||||||
|
|
||||||
// cast from Either<Pat, SelfParam> -> Either<_, Pat>
|
// cast from Either<Pat, SelfParam> -> Either<_, Pat>
|
||||||
let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
|
let ptr = AstPtr::try_from_raw(value.syntax_node_ptr())?;
|
||||||
return None;
|
|
||||||
};
|
|
||||||
InFile { file_id, value: ptr }
|
InFile { file_id, value: ptr }
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -27,9 +27,7 @@ use crate::{
|
||||||
pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
let comment = ctx.find_token_at_offset::<ast::Comment>()?;
|
let comment = ctx.find_token_at_offset::<ast::Comment>()?;
|
||||||
// Only allow doc comments
|
// Only allow doc comments
|
||||||
let Some(placement) = comment.kind().doc else {
|
let placement = comment.kind().doc?;
|
||||||
return None;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Only allow comments which are alone on their line
|
// Only allow comments which are alone on their line
|
||||||
if let Some(prev) = comment.syntax().prev_token() {
|
if let Some(prev) = comment.syntax().prev_token() {
|
||||||
|
|
|
@ -1,7 +1,4 @@
|
||||||
use std::{
|
use std::iter;
|
||||||
collections::{HashMap, HashSet},
|
|
||||||
iter,
|
|
||||||
};
|
|
||||||
|
|
||||||
use hir::{HasSource, HirFileIdExt, ModuleSource};
|
use hir::{HasSource, HirFileIdExt, ModuleSource};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
|
@ -9,6 +6,7 @@ use ide_db::{
|
||||||
base_db::FileId,
|
base_db::FileId,
|
||||||
defs::{Definition, NameClass, NameRefClass},
|
defs::{Definition, NameClass, NameRefClass},
|
||||||
search::{FileReference, SearchScope},
|
search::{FileReference, SearchScope},
|
||||||
|
FxHashMap, FxHashSet,
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
@ -235,9 +233,9 @@ impl Module {
|
||||||
fn get_usages_and_record_fields(
|
fn get_usages_and_record_fields(
|
||||||
&self,
|
&self,
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
) -> (HashMap<FileId, Vec<(TextRange, String)>>, Vec<SyntaxNode>) {
|
) -> (FxHashMap<FileId, Vec<(TextRange, String)>>, Vec<SyntaxNode>) {
|
||||||
let mut adt_fields = Vec::new();
|
let mut adt_fields = Vec::new();
|
||||||
let mut refs: HashMap<FileId, Vec<(TextRange, String)>> = HashMap::new();
|
let mut refs: FxHashMap<FileId, Vec<(TextRange, String)>> = FxHashMap::default();
|
||||||
|
|
||||||
//Here impl is not included as each item inside impl will be tied to the parent of
|
//Here impl is not included as each item inside impl will be tied to the parent of
|
||||||
//implementing block(a struct, enum, etc), if the parent is in selected module, it will
|
//implementing block(a struct, enum, etc), if the parent is in selected module, it will
|
||||||
|
@ -320,7 +318,7 @@ impl Module {
|
||||||
&self,
|
&self,
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
node_def: Definition,
|
node_def: Definition,
|
||||||
refs_in_files: &mut HashMap<FileId, Vec<(TextRange, String)>>,
|
refs_in_files: &mut FxHashMap<FileId, Vec<(TextRange, String)>>,
|
||||||
) {
|
) {
|
||||||
for (file_id, references) in node_def.usages(&ctx.sema).all() {
|
for (file_id, references) in node_def.usages(&ctx.sema).all() {
|
||||||
let source_file = ctx.sema.parse(file_id);
|
let source_file = ctx.sema.parse(file_id);
|
||||||
|
@ -400,7 +398,7 @@ impl Module {
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
) -> Vec<TextRange> {
|
) -> Vec<TextRange> {
|
||||||
let mut import_paths_to_be_removed: Vec<TextRange> = vec![];
|
let mut import_paths_to_be_removed: Vec<TextRange> = vec![];
|
||||||
let mut node_set: HashSet<String> = HashSet::new();
|
let mut node_set: FxHashSet<String> = FxHashSet::default();
|
||||||
|
|
||||||
for item in self.body_items.clone() {
|
for item in self.body_items.clone() {
|
||||||
for x in item.syntax().descendants() {
|
for x in item.syntax().descendants() {
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
use std::collections::HashSet;
|
|
||||||
|
|
||||||
use hir::{self, HasCrate, HasVisibility};
|
use hir::{self, HasCrate, HasVisibility};
|
||||||
use ide_db::path_transform::PathTransform;
|
use ide_db::{path_transform::PathTransform, FxHashSet};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{
|
ast::{
|
||||||
self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _,
|
self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _,
|
||||||
|
@ -71,7 +69,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
|
|
||||||
let sema_field_ty = ctx.sema.resolve_type(&field_ty)?;
|
let sema_field_ty = ctx.sema.resolve_type(&field_ty)?;
|
||||||
let mut methods = vec![];
|
let mut methods = vec![];
|
||||||
let mut seen_names = HashSet::new();
|
let mut seen_names = FxHashSet::default();
|
||||||
|
|
||||||
for ty in sema_field_ty.autoderef(ctx.db()) {
|
for ty in sema_field_ty.autoderef(ctx.db()) {
|
||||||
let krate = ty.krate(ctx.db());
|
let krate = ty.krate(ctx.db());
|
||||||
|
|
|
@ -502,9 +502,7 @@ fn generate_args_for_impl(
|
||||||
trait_params: &Option<GenericParamList>,
|
trait_params: &Option<GenericParamList>,
|
||||||
old_trait_args: &FxHashSet<String>,
|
old_trait_args: &FxHashSet<String>,
|
||||||
) -> Option<ast::GenericArgList> {
|
) -> Option<ast::GenericArgList> {
|
||||||
let Some(old_impl_args) = old_impl_gpl.map(|gpl| gpl.to_generic_args().generic_args()) else {
|
let old_impl_args = old_impl_gpl.map(|gpl| gpl.to_generic_args().generic_args())?;
|
||||||
return None;
|
|
||||||
};
|
|
||||||
// Create pairs of the args of `self_ty` and corresponding `field_ty` to
|
// Create pairs of the args of `self_ty` and corresponding `field_ty` to
|
||||||
// form the substitution list
|
// form the substitution list
|
||||||
let mut arg_substs = FxHashMap::default();
|
let mut arg_substs = FxHashMap::default();
|
||||||
|
|
|
@ -3,12 +3,12 @@
|
||||||
// - Remove unused aliases if there are no longer any users, see inline_call.rs.
|
// - Remove unused aliases if there are no longer any users, see inline_call.rs.
|
||||||
|
|
||||||
use hir::{HasSource, PathResolution};
|
use hir::{HasSource, PathResolution};
|
||||||
|
use ide_db::FxHashMap;
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
defs::Definition, imports::insert_use::ast_to_remove_for_path_in_use_stmt,
|
defs::Definition, imports::insert_use::ast_to_remove_for_path_in_use_stmt,
|
||||||
search::FileReference,
|
search::FileReference,
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use std::collections::HashMap;
|
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make, HasGenericParams, HasName},
|
ast::{self, make, HasGenericParams, HasName},
|
||||||
ted, AstNode, NodeOrToken, SyntaxNode,
|
ted, AstNode, NodeOrToken, SyntaxNode,
|
||||||
|
@ -189,14 +189,14 @@ fn inline(alias_def: &ast::TypeAlias, alias_instance: &ast::PathType) -> Option<
|
||||||
Some(repl)
|
Some(repl)
|
||||||
}
|
}
|
||||||
|
|
||||||
struct LifetimeMap(HashMap<String, ast::Lifetime>);
|
struct LifetimeMap(FxHashMap<String, ast::Lifetime>);
|
||||||
|
|
||||||
impl LifetimeMap {
|
impl LifetimeMap {
|
||||||
fn new(
|
fn new(
|
||||||
instance_args: &Option<ast::GenericArgList>,
|
instance_args: &Option<ast::GenericArgList>,
|
||||||
alias_generics: &ast::GenericParamList,
|
alias_generics: &ast::GenericParamList,
|
||||||
) -> Option<Self> {
|
) -> Option<Self> {
|
||||||
let mut inner = HashMap::new();
|
let mut inner = FxHashMap::default();
|
||||||
|
|
||||||
let wildcard_lifetime = make::lifetime("'_");
|
let wildcard_lifetime = make::lifetime("'_");
|
||||||
let lifetimes = alias_generics
|
let lifetimes = alias_generics
|
||||||
|
@ -231,14 +231,14 @@ impl LifetimeMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ConstAndTypeMap(HashMap<String, SyntaxNode>);
|
struct ConstAndTypeMap(FxHashMap<String, SyntaxNode>);
|
||||||
|
|
||||||
impl ConstAndTypeMap {
|
impl ConstAndTypeMap {
|
||||||
fn new(
|
fn new(
|
||||||
instance_args: &Option<ast::GenericArgList>,
|
instance_args: &Option<ast::GenericArgList>,
|
||||||
alias_generics: &ast::GenericParamList,
|
alias_generics: &ast::GenericParamList,
|
||||||
) -> Option<Self> {
|
) -> Option<Self> {
|
||||||
let mut inner = HashMap::new();
|
let mut inner = FxHashMap::default();
|
||||||
let instance_generics = generic_args_to_const_and_type_generics(instance_args);
|
let instance_generics = generic_args_to_const_and_type_generics(instance_args);
|
||||||
let alias_generics = generic_param_list_to_const_and_type_generics(alias_generics);
|
let alias_generics = generic_param_list_to_const_and_type_generics(alias_generics);
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use hir::Type;
|
use hir::Type;
|
||||||
use std::{collections::HashMap, iter::successors};
|
use ide_db::FxHashMap;
|
||||||
|
use std::iter::successors;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::neighbor,
|
algo::neighbor,
|
||||||
ast::{self, AstNode, HasName},
|
ast::{self, AstNode, HasName},
|
||||||
|
@ -95,7 +96,7 @@ fn contains_placeholder(a: &ast::MatchArm) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn are_same_types(
|
fn are_same_types(
|
||||||
current_arm_types: &HashMap<String, Option<Type>>,
|
current_arm_types: &FxHashMap<String, Option<Type>>,
|
||||||
arm: &ast::MatchArm,
|
arm: &ast::MatchArm,
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
|
@ -114,11 +115,11 @@ fn are_same_types(
|
||||||
fn get_arm_types(
|
fn get_arm_types(
|
||||||
context: &AssistContext<'_>,
|
context: &AssistContext<'_>,
|
||||||
arm: &ast::MatchArm,
|
arm: &ast::MatchArm,
|
||||||
) -> HashMap<String, Option<Type>> {
|
) -> FxHashMap<String, Option<Type>> {
|
||||||
let mut mapping: HashMap<String, Option<Type>> = HashMap::new();
|
let mut mapping: FxHashMap<String, Option<Type>> = FxHashMap::default();
|
||||||
|
|
||||||
fn recurse(
|
fn recurse(
|
||||||
map: &mut HashMap<String, Option<Type>>,
|
map: &mut FxHashMap<String, Option<Type>>,
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
pat: &Option<ast::Pat>,
|
pat: &Option<ast::Pat>,
|
||||||
) {
|
) {
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
use std::collections::{hash_map::Entry, HashMap};
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
|
use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::FileRange,
|
base_db::FileRange,
|
||||||
defs::Definition,
|
defs::Definition,
|
||||||
search::{FileReference, ReferenceCategory, SearchScope},
|
search::{FileReference, ReferenceCategory, SearchScope},
|
||||||
RootDatabase,
|
FxHashMap, RootDatabase,
|
||||||
};
|
};
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
use text_edit::TextRange;
|
use text_edit::TextRange;
|
||||||
|
@ -44,7 +44,7 @@ pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||||
let uses = uses_up.chain(uses_down).collect::<Vec<_>>();
|
let uses = uses_up.chain(uses_down).collect::<Vec<_>>();
|
||||||
|
|
||||||
// Maps use nodes to the scope that we should search through to find
|
// Maps use nodes to the scope that we should search through to find
|
||||||
let mut search_scopes = HashMap::<Module, Vec<SearchScope>>::new();
|
let mut search_scopes = FxHashMap::<Module, Vec<SearchScope>>::default();
|
||||||
|
|
||||||
// iterator over all unused use trees
|
// iterator over all unused use trees
|
||||||
let mut unused = uses
|
let mut unused = uses
|
||||||
|
|
|
@ -47,9 +47,7 @@ pub(crate) fn unwrap_result_return_type(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let Some(ok_type) = unwrap_result_type(type_ref) else {
|
let ok_type = unwrap_result_type(type_ref)?;
|
||||||
return None;
|
|
||||||
};
|
|
||||||
|
|
||||||
acc.add(
|
acc.add(
|
||||||
AssistId("unwrap_result_return_type", AssistKind::RefactorRewrite),
|
AssistId("unwrap_result_return_type", AssistKind::RefactorRewrite),
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
//! This module contains functions to suggest names for expressions, functions and other items
|
//! This module contains functions to suggest names for expressions, functions and other items
|
||||||
|
|
||||||
use std::collections::HashSet;
|
|
||||||
|
|
||||||
use hir::Semantics;
|
use hir::Semantics;
|
||||||
use ide_db::RootDatabase;
|
use ide_db::{FxHashSet, RootDatabase};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use stdx::to_lower_snake_case;
|
use stdx::to_lower_snake_case;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
@ -78,7 +76,7 @@ pub(crate) fn for_unique_generic_name(
|
||||||
ast::GenericParam::TypeParam(t) => t.name().unwrap().to_string(),
|
ast::GenericParam::TypeParam(t) => t.name().unwrap().to_string(),
|
||||||
p => p.to_string(),
|
p => p.to_string(),
|
||||||
})
|
})
|
||||||
.collect::<HashSet<_>>();
|
.collect::<FxHashSet<_>>();
|
||||||
let mut name = name.to_string();
|
let mut name = name.to_string();
|
||||||
let base_len = name.len();
|
let base_len = name.len();
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
|
|
|
@ -241,6 +241,7 @@ fn unescape(s: &str) -> String {
|
||||||
s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
|
s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
|
fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
|
||||||
let file_content = std::fs::read_to_string(path).unwrap();
|
let file_content = std::fs::read_to_string(path).unwrap();
|
||||||
let mut clippy_lints: Vec<ClippyLint> = Vec::new();
|
let mut clippy_lints: Vec<ClippyLint> = Vec::new();
|
||||||
|
|
|
@ -73,8 +73,6 @@ mod handlers {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
|
use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
|
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
|
||||||
|
@ -413,18 +411,18 @@ pub fn diagnostics(
|
||||||
|
|
||||||
// `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
|
// `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
|
||||||
|
|
||||||
static RUSTC_LINT_GROUPS_DICT: Lazy<HashMap<&str, Vec<&str>>> =
|
static RUSTC_LINT_GROUPS_DICT: Lazy<FxHashMap<&str, Vec<&str>>> =
|
||||||
Lazy::new(|| build_group_dict(DEFAULT_LINT_GROUPS, &["warnings", "__RA_EVERY_LINT"], ""));
|
Lazy::new(|| build_group_dict(DEFAULT_LINT_GROUPS, &["warnings", "__RA_EVERY_LINT"], ""));
|
||||||
|
|
||||||
static CLIPPY_LINT_GROUPS_DICT: Lazy<HashMap<&str, Vec<&str>>> =
|
static CLIPPY_LINT_GROUPS_DICT: Lazy<FxHashMap<&str, Vec<&str>>> =
|
||||||
Lazy::new(|| build_group_dict(CLIPPY_LINT_GROUPS, &["__RA_EVERY_LINT"], "clippy::"));
|
Lazy::new(|| build_group_dict(CLIPPY_LINT_GROUPS, &["__RA_EVERY_LINT"], "clippy::"));
|
||||||
|
|
||||||
fn build_group_dict(
|
fn build_group_dict(
|
||||||
lint_group: &'static [LintGroup],
|
lint_group: &'static [LintGroup],
|
||||||
all_groups: &'static [&'static str],
|
all_groups: &'static [&'static str],
|
||||||
prefix: &'static str,
|
prefix: &'static str,
|
||||||
) -> HashMap<&'static str, Vec<&'static str>> {
|
) -> FxHashMap<&'static str, Vec<&'static str>> {
|
||||||
let mut r: HashMap<&str, Vec<&str>> = HashMap::new();
|
let mut r: FxHashMap<&str, Vec<&str>> = FxHashMap::default();
|
||||||
for g in lint_group {
|
for g in lint_group {
|
||||||
for child in g.children {
|
for child in g.children {
|
||||||
r.entry(child.strip_prefix(prefix).unwrap())
|
r.entry(child.strip_prefix(prefix).unwrap())
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#![allow(clippy::print_stderr)]
|
||||||
#[cfg(not(feature = "in-rust-tree"))]
|
#[cfg(not(feature = "in-rust-tree"))]
|
||||||
mod sourcegen;
|
mod sourcegen;
|
||||||
|
|
||||||
|
|
|
@ -706,7 +706,7 @@ where
|
||||||
// we are trying to match that bit of code. This saves us having to pass a boolean into all the bits
|
// we are trying to match that bit of code. This saves us having to pass a boolean into all the bits
|
||||||
// of code that can make the decision to not match.
|
// of code that can make the decision to not match.
|
||||||
thread_local! {
|
thread_local! {
|
||||||
pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = Cell::new(false);
|
pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = const { Cell::new(false) };
|
||||||
}
|
}
|
||||||
|
|
||||||
fn recording_match_fail_reasons() -> bool {
|
fn recording_match_fail_reasons() -> bool {
|
||||||
|
|
|
@ -113,6 +113,7 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
|
||||||
expected.assert_eq(&actual);
|
expected.assert_eq(&actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::print_stdout)]
|
||||||
fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snippet: &str) {
|
fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snippet: &str) {
|
||||||
let debug_info = match_finder.debug_where_text_equal(file_id, snippet);
|
let debug_info = match_finder.debug_where_text_equal(file_id, snippet);
|
||||||
println!(
|
println!(
|
||||||
|
|
|
@ -29,9 +29,6 @@ fn check_external_docs(
|
||||||
let web_url = links.web_url;
|
let web_url = links.web_url;
|
||||||
let local_url = links.local_url;
|
let local_url = links.local_url;
|
||||||
|
|
||||||
println!("web_url: {:?}", web_url);
|
|
||||||
println!("local_url: {:?}", local_url);
|
|
||||||
|
|
||||||
match (expect_web_url, web_url) {
|
match (expect_web_url, web_url) {
|
||||||
(Some(expect), Some(url)) => expect.assert_eq(&url),
|
(Some(expect), Some(url)) => expect.assert_eq(&url),
|
||||||
(None, None) => (),
|
(None, None) => (),
|
||||||
|
|
|
@ -9,8 +9,6 @@
|
||||||
//! at the index that the match starts at and its tree parent is
|
//! at the index that the match starts at and its tree parent is
|
||||||
//! resolved to the search element definition, we get a reference.
|
//! resolved to the search element definition, we get a reference.
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use hir::{DescendPreference, PathResolution, Semantics};
|
use hir::{DescendPreference, PathResolution, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::FileId,
|
base_db::FileId,
|
||||||
|
@ -79,7 +77,7 @@ pub(crate) fn find_all_refs(
|
||||||
.collect(),
|
.collect(),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect::<HashMap<_, Vec<_>, _>>();
|
.collect::<IntMap<_, Vec<_>>>();
|
||||||
let declaration = match def {
|
let declaration = match def {
|
||||||
Definition::Module(module) => {
|
Definition::Module(module) => {
|
||||||
Some(NavigationTarget::from_module_to_decl(sema.db, module))
|
Some(NavigationTarget::from_module_to_decl(sema.db, module))
|
||||||
|
|
|
@ -1,14 +1,12 @@
|
||||||
//! This module provides `StaticIndex` which is used for powering
|
//! This module provides `StaticIndex` which is used for powering
|
||||||
//! read-only code browsers and emitting LSIF
|
//! read-only code browsers and emitting LSIF
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
|
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
|
||||||
use ide_db::helpers::get_definition;
|
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{FileId, FileRange, SourceDatabaseExt},
|
base_db::{FileId, FileRange, SourceDatabaseExt},
|
||||||
defs::Definition,
|
defs::Definition,
|
||||||
FxHashSet, RootDatabase,
|
helpers::get_definition,
|
||||||
|
FxHashMap, FxHashSet, RootDatabase,
|
||||||
};
|
};
|
||||||
use syntax::{AstNode, SyntaxKind::*, TextRange, T};
|
use syntax::{AstNode, SyntaxKind::*, TextRange, T};
|
||||||
|
|
||||||
|
@ -31,7 +29,7 @@ pub struct StaticIndex<'a> {
|
||||||
pub tokens: TokenStore,
|
pub tokens: TokenStore,
|
||||||
analysis: &'a Analysis,
|
analysis: &'a Analysis,
|
||||||
db: &'a RootDatabase,
|
db: &'a RootDatabase,
|
||||||
def_map: HashMap<Definition, TokenId>,
|
def_map: FxHashMap<Definition, TokenId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -232,14 +230,13 @@ impl StaticIndex<'_> {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::{fixture, StaticIndex};
|
use crate::{fixture, StaticIndex};
|
||||||
use ide_db::base_db::FileRange;
|
use ide_db::{base_db::FileRange, FxHashSet};
|
||||||
use std::collections::HashSet;
|
|
||||||
use syntax::TextSize;
|
use syntax::TextSize;
|
||||||
|
|
||||||
fn check_all_ranges(ra_fixture: &str) {
|
fn check_all_ranges(ra_fixture: &str) {
|
||||||
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
|
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
|
||||||
let s = StaticIndex::compute(&analysis);
|
let s = StaticIndex::compute(&analysis);
|
||||||
let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect();
|
let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect();
|
||||||
for f in s.files {
|
for f in s.files {
|
||||||
for (range, _) in f.tokens {
|
for (range, _) in f.tokens {
|
||||||
let it = FileRange { file_id: f.file_id, range };
|
let it = FileRange { file_id: f.file_id, range };
|
||||||
|
@ -258,7 +255,7 @@ mod tests {
|
||||||
fn check_definitions(ra_fixture: &str) {
|
fn check_definitions(ra_fixture: &str) {
|
||||||
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
|
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
|
||||||
let s = StaticIndex::compute(&analysis);
|
let s = StaticIndex::compute(&analysis);
|
||||||
let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect();
|
let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect();
|
||||||
for (_, t) in s.tokens.iter() {
|
for (_, t) in s.tokens.iter() {
|
||||||
if let Some(t) = t.definition {
|
if let Some(t) = t.definition {
|
||||||
if t.range.start() == TextSize::from(0) {
|
if t.range.start() == TextSize::from(0) {
|
||||||
|
|
|
@ -55,13 +55,12 @@ impl Limit {
|
||||||
if other <= old_max || old_max == 0 {
|
if other <= old_max || old_max == 0 {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if self
|
_ = self.max.compare_exchange_weak(
|
||||||
.max
|
old_max,
|
||||||
.compare_exchange_weak(old_max, other, Ordering::Relaxed, Ordering::Relaxed)
|
other,
|
||||||
.is_ok()
|
Ordering::Relaxed,
|
||||||
{
|
Ordering::Relaxed,
|
||||||
eprintln!("new max: {other}");
|
);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
use std::collections::HashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
use test_utils::extract_annotations;
|
use test_utils::extract_annotations;
|
||||||
use tt::{
|
use tt::{
|
||||||
|
@ -12,7 +11,7 @@ use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap, DUMM
|
||||||
fn check_punct_spacing(fixture: &str) {
|
fn check_punct_spacing(fixture: &str) {
|
||||||
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
|
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
|
||||||
let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY);
|
let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY);
|
||||||
let mut annotations: HashMap<_, _> = extract_annotations(fixture)
|
let mut annotations: FxHashMap<_, _> = extract_annotations(fixture)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(range, annotation)| {
|
.map(|(range, annotation)| {
|
||||||
let spacing = match annotation.as_str() {
|
let spacing = match annotation.as_str() {
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
//! This module greps parser's code for specially formatted comments and turns
|
//! This module greps parser's code for specially formatted comments and turns
|
||||||
//! them into tests.
|
//! them into tests.
|
||||||
|
#![allow(clippy::disallowed_types, clippy::print_stdout)]
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
|
|
|
@ -23,6 +23,7 @@ serde.workspace = true
|
||||||
serde_json = { workspace = true, features = ["unbounded_depth"] }
|
serde_json = { workspace = true, features = ["unbounded_depth"] }
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
|
rustc-hash.workspace = true
|
||||||
memmap2 = "0.5.4"
|
memmap2 = "0.5.4"
|
||||||
snap = "1.1.0"
|
snap = "1.1.0"
|
||||||
indexmap = "2.1.0"
|
indexmap = "2.1.0"
|
||||||
|
|
|
@ -35,10 +35,11 @@
|
||||||
//! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for
|
//! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for
|
||||||
//! the time being.
|
//! the time being.
|
||||||
|
|
||||||
use std::collections::{HashMap, VecDeque};
|
use std::collections::VecDeque;
|
||||||
|
|
||||||
use indexmap::IndexSet;
|
use indexmap::IndexSet;
|
||||||
use la_arena::RawIdx;
|
use la_arena::RawIdx;
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId};
|
use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId};
|
||||||
use text_size::TextRange;
|
use text_size::TextRange;
|
||||||
|
@ -129,7 +130,7 @@ impl FlatTree {
|
||||||
span_data_table: &mut SpanDataIndexMap,
|
span_data_table: &mut SpanDataIndexMap,
|
||||||
) -> FlatTree {
|
) -> FlatTree {
|
||||||
let mut w = Writer {
|
let mut w = Writer {
|
||||||
string_table: HashMap::new(),
|
string_table: FxHashMap::default(),
|
||||||
work: VecDeque::new(),
|
work: VecDeque::new(),
|
||||||
span_data_table,
|
span_data_table,
|
||||||
|
|
||||||
|
@ -158,7 +159,7 @@ impl FlatTree {
|
||||||
|
|
||||||
pub fn new_raw(subtree: &tt::Subtree<TokenId>, version: u32) -> FlatTree {
|
pub fn new_raw(subtree: &tt::Subtree<TokenId>, version: u32) -> FlatTree {
|
||||||
let mut w = Writer {
|
let mut w = Writer {
|
||||||
string_table: HashMap::new(),
|
string_table: FxHashMap::default(),
|
||||||
work: VecDeque::new(),
|
work: VecDeque::new(),
|
||||||
span_data_table: &mut (),
|
span_data_table: &mut (),
|
||||||
|
|
||||||
|
@ -340,7 +341,7 @@ impl InternableSpan for Span {
|
||||||
|
|
||||||
struct Writer<'a, 'span, S: InternableSpan> {
|
struct Writer<'a, 'span, S: InternableSpan> {
|
||||||
work: VecDeque<(usize, &'a tt::Subtree<S>)>,
|
work: VecDeque<(usize, &'a tt::Subtree<S>)>,
|
||||||
string_table: HashMap<&'a str, u32>,
|
string_table: FxHashMap<&'a str, u32>,
|
||||||
span_data_table: &'span mut S::Table,
|
span_data_table: &'span mut S::Table,
|
||||||
|
|
||||||
subtree: Vec<SubtreeRepr>,
|
subtree: Vec<SubtreeRepr>,
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
//! A standalone binary for `proc-macro-srv`.
|
//! A standalone binary for `proc-macro-srv`.
|
||||||
//! Driver for proc macro server
|
//! Driver for proc macro server
|
||||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||||
|
#![allow(clippy::print_stderr)]
|
||||||
|
|
||||||
#[cfg(feature = "in-rust-tree")]
|
#[cfg(feature = "in-rust-tree")]
|
||||||
extern crate rustc_driver as _;
|
extern crate rustc_driver as _;
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||||
#![feature(proc_macro_span, proc_macro_def_site)]
|
#![feature(proc_macro_span, proc_macro_def_site)]
|
||||||
|
#![allow(clippy::all)]
|
||||||
|
|
||||||
use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
|
use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ pub use countme;
|
||||||
/// almost zero.
|
/// almost zero.
|
||||||
pub use countme::Count;
|
pub use countme::Count;
|
||||||
|
|
||||||
thread_local!(static IN_SCOPE: RefCell<bool> = RefCell::new(false));
|
thread_local!(static IN_SCOPE: RefCell<bool> = const { RefCell::new(false) });
|
||||||
|
|
||||||
/// Allows to check if the current code is within some dynamic scope, can be
|
/// Allows to check if the current code is within some dynamic scope, can be
|
||||||
/// useful during debugging to figure out why a function is called.
|
/// useful during debugging to figure out why a function is called.
|
||||||
|
@ -88,6 +88,7 @@ pub fn cpu_span() -> CpuSpan {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "cpu_profiler"))]
|
#[cfg(not(feature = "cpu_profiler"))]
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
{
|
{
|
||||||
eprintln!(
|
eprintln!(
|
||||||
r#"cpu profiling is disabled, uncomment `default = [ "cpu_profiler" ]` in Cargo.toml to enable."#
|
r#"cpu profiling is disabled, uncomment `default = [ "cpu_profiler" ]` in Cargo.toml to enable."#
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
//! Like `std::time::Instant`, but also measures memory & CPU cycles.
|
//! Like `std::time::Instant`, but also measures memory & CPU cycles.
|
||||||
|
|
||||||
|
#![allow(clippy::print_stderr)]
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
fmt,
|
fmt,
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
|
|
|
@ -399,7 +399,7 @@ impl CargoWorkspace {
|
||||||
CargoWorkspace { packages, targets, workspace_root, target_directory }
|
CargoWorkspace { packages, targets, workspace_root, target_directory }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn packages(&self) -> impl Iterator<Item = Package> + ExactSizeIterator + '_ {
|
pub fn packages(&self) -> impl ExactSizeIterator<Item = Package> + '_ {
|
||||||
self.packages.iter().map(|(id, _pkg)| id)
|
self.packages.iter().map(|(id, _pkg)| id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -57,7 +57,7 @@ impl Stitched {
|
||||||
self.by_name("proc_macro")
|
self.by_name("proc_macro")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn crates(&self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + '_ {
|
pub(crate) fn crates(&self) -> impl ExactSizeIterator<Item = SysrootCrate> + '_ {
|
||||||
self.crates.iter().map(|(id, _data)| id)
|
self.crates.iter().map(|(id, _data)| id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,9 @@
|
||||||
//! Based on cli flags, either spawns an LSP server, or runs a batch analysis
|
//! Based on cli flags, either spawns an LSP server, or runs a batch analysis
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||||
|
#![allow(clippy::print_stdout, clippy::print_stderr)]
|
||||||
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
|
||||||
|
|
||||||
#[cfg(feature = "in-rust-tree")]
|
#[cfg(feature = "in-rust-tree")]
|
||||||
extern crate rustc_driver as _;
|
extern crate rustc_driver as _;
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
//! Various batch processing tasks, intended primarily for debugging.
|
//! Various batch processing tasks, intended primarily for debugging.
|
||||||
|
|
||||||
|
#![allow(clippy::print_stdout, clippy::print_stderr)]
|
||||||
|
|
||||||
mod analysis_stats;
|
mod analysis_stats;
|
||||||
mod diagnostics;
|
mod diagnostics;
|
||||||
pub mod flags;
|
pub mod flags;
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
//! LSIF (language server index format) generator
|
//! LSIF (language server index format) generator
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
|
@ -16,6 +15,7 @@ use ide_db::{
|
||||||
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
|
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
|
||||||
use lsp_types::{self, lsif};
|
use lsp_types::{self, lsif};
|
||||||
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
|
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
use vfs::{AbsPathBuf, Vfs};
|
use vfs::{AbsPathBuf, Vfs};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -35,10 +35,10 @@ impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
|
||||||
|
|
||||||
struct LsifManager<'a> {
|
struct LsifManager<'a> {
|
||||||
count: i32,
|
count: i32,
|
||||||
token_map: HashMap<TokenId, Id>,
|
token_map: FxHashMap<TokenId, Id>,
|
||||||
range_map: HashMap<FileRange, Id>,
|
range_map: FxHashMap<FileRange, Id>,
|
||||||
file_map: HashMap<FileId, Id>,
|
file_map: FxHashMap<FileId, Id>,
|
||||||
package_map: HashMap<PackageInformation, Id>,
|
package_map: FxHashMap<PackageInformation, Id>,
|
||||||
analysis: &'a Analysis,
|
analysis: &'a Analysis,
|
||||||
db: &'a RootDatabase,
|
db: &'a RootDatabase,
|
||||||
vfs: &'a Vfs,
|
vfs: &'a Vfs,
|
||||||
|
@ -57,10 +57,10 @@ impl LsifManager<'_> {
|
||||||
fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> {
|
fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> {
|
||||||
LsifManager {
|
LsifManager {
|
||||||
count: 0,
|
count: 0,
|
||||||
token_map: HashMap::default(),
|
token_map: FxHashMap::default(),
|
||||||
range_map: HashMap::default(),
|
range_map: FxHashMap::default(),
|
||||||
file_map: HashMap::default(),
|
file_map: FxHashMap::default(),
|
||||||
package_map: HashMap::default(),
|
package_map: FxHashMap::default(),
|
||||||
analysis,
|
analysis,
|
||||||
db,
|
db,
|
||||||
vfs,
|
vfs,
|
||||||
|
@ -215,7 +215,7 @@ impl LsifManager<'_> {
|
||||||
out_v: result_set_id.into(),
|
out_v: result_set_id.into(),
|
||||||
}));
|
}));
|
||||||
let mut edges = token.references.iter().fold(
|
let mut edges = token.references.iter().fold(
|
||||||
HashMap::<_, Vec<lsp_types::NumberOrString>>::new(),
|
FxHashMap::<_, Vec<lsp_types::NumberOrString>>::default(),
|
||||||
|mut edges, it| {
|
|mut edges, it| {
|
||||||
let entry = edges.entry((it.range.file_id, it.is_definition)).or_default();
|
let entry = edges.entry((it.range.file_id, it.is_definition)).or_default();
|
||||||
entry.push((*self.range_map.get(&it.range).unwrap()).into());
|
entry.push((*self.range_map.get(&it.range).unwrap()).into());
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter.
|
//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter.
|
||||||
|
|
||||||
use std::{
|
use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf};
|
||||||
cell::RefCell, collections::HashMap, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf,
|
|
||||||
};
|
|
||||||
|
|
||||||
use hir::{Change, Crate};
|
use hir::{Change, Crate};
|
||||||
use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig};
|
use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig};
|
||||||
|
@ -10,6 +8,7 @@ use profile::StopWatch;
|
||||||
use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot};
|
use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot};
|
||||||
|
|
||||||
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
|
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
use vfs::{AbsPathBuf, FileId};
|
use vfs::{AbsPathBuf, FileId};
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
@ -27,7 +26,7 @@ struct Tester {
|
||||||
|
|
||||||
fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode {
|
fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode {
|
||||||
thread_local! {
|
thread_local! {
|
||||||
static LEAK_STORE: RefCell<HashMap<String, DiagnosticCode>> = RefCell::new(HashMap::new());
|
static LEAK_STORE: RefCell<FxHashMap<String, DiagnosticCode>> = RefCell::new(FxHashMap::default());
|
||||||
}
|
}
|
||||||
LEAK_STORE.with_borrow_mut(|s| match s.get(code) {
|
LEAK_STORE.with_borrow_mut(|s| match s.get(code) {
|
||||||
Some(c) => *c,
|
Some(c) => *c,
|
||||||
|
@ -39,9 +38,9 @@ fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> HashMap<DiagnosticCode, usize> {
|
fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> FxHashMap<DiagnosticCode, usize> {
|
||||||
let text = read_to_string(p).unwrap();
|
let text = read_to_string(p).unwrap();
|
||||||
let mut result = HashMap::new();
|
let mut result = FxHashMap::default();
|
||||||
{
|
{
|
||||||
let mut text = &*text;
|
let mut text = &*text;
|
||||||
while let Some(p) = text.find("error[E") {
|
while let Some(p) = text.find("error[E") {
|
||||||
|
@ -106,7 +105,7 @@ impl Tester {
|
||||||
let expected = if stderr_path.exists() {
|
let expected = if stderr_path.exists() {
|
||||||
detect_errors_from_rustc_stderr_file(stderr_path)
|
detect_errors_from_rustc_stderr_file(stderr_path)
|
||||||
} else {
|
} else {
|
||||||
HashMap::new()
|
FxHashMap::default()
|
||||||
};
|
};
|
||||||
let text = read_to_string(&p).unwrap();
|
let text = read_to_string(&p).unwrap();
|
||||||
let mut change = Change::new();
|
let mut change = Change::new();
|
||||||
|
@ -125,7 +124,7 @@ impl Tester {
|
||||||
self.host.apply_change(change);
|
self.host.apply_change(change);
|
||||||
let diagnostic_config = DiagnosticsConfig::test_sample();
|
let diagnostic_config = DiagnosticsConfig::test_sample();
|
||||||
|
|
||||||
let mut actual = HashMap::new();
|
let mut actual = FxHashMap::default();
|
||||||
let panicked = match std::panic::catch_unwind(|| {
|
let panicked = match std::panic::catch_unwind(|| {
|
||||||
self.host
|
self.host
|
||||||
.analysis()
|
.analysis()
|
||||||
|
|
|
@ -1,10 +1,6 @@
|
||||||
//! SCIP generator
|
//! SCIP generator
|
||||||
|
|
||||||
use std::{
|
use std::{path::PathBuf, time::Instant};
|
||||||
collections::{HashMap, HashSet},
|
|
||||||
path::PathBuf,
|
|
||||||
time::Instant,
|
|
||||||
};
|
|
||||||
|
|
||||||
use ide::{
|
use ide::{
|
||||||
LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile,
|
LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile,
|
||||||
|
@ -12,6 +8,7 @@ use ide::{
|
||||||
};
|
};
|
||||||
use ide_db::LineIndexDatabase;
|
use ide_db::LineIndexDatabase;
|
||||||
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
||||||
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use scip::types as scip_types;
|
use scip::types as scip_types;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -76,9 +73,10 @@ impl flags::Scip {
|
||||||
};
|
};
|
||||||
let mut documents = Vec::new();
|
let mut documents = Vec::new();
|
||||||
|
|
||||||
let mut symbols_emitted: HashSet<TokenId> = HashSet::default();
|
let mut symbols_emitted: FxHashSet<TokenId> = FxHashSet::default();
|
||||||
let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new();
|
let mut tokens_to_symbol: FxHashMap<TokenId, String> = FxHashMap::default();
|
||||||
let mut tokens_to_enclosing_symbol: HashMap<TokenId, Option<String>> = HashMap::new();
|
let mut tokens_to_enclosing_symbol: FxHashMap<TokenId, Option<String>> =
|
||||||
|
FxHashMap::default();
|
||||||
|
|
||||||
for StaticIndexedFile { file_id, tokens, .. } in si.files {
|
for StaticIndexedFile { file_id, tokens, .. } in si.files {
|
||||||
let mut local_count = 0;
|
let mut local_count = 0;
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
//! This module provides the functionality needed to convert diagnostics from
|
//! This module provides the functionality needed to convert diagnostics from
|
||||||
//! `cargo check` json format to the LSP diagnostic format.
|
//! `cargo check` json format to the LSP diagnostic format.
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan};
|
use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use vfs::{AbsPath, AbsPathBuf};
|
use vfs::{AbsPath, AbsPathBuf};
|
||||||
|
|
||||||
|
@ -186,7 +186,7 @@ fn map_rust_child_diagnostic(
|
||||||
return MappedRustChildDiagnostic::MessageLine(rd.message.clone());
|
return MappedRustChildDiagnostic::MessageLine(rd.message.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut edit_map: HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = HashMap::new();
|
let mut edit_map: FxHashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = FxHashMap::default();
|
||||||
let mut suggested_replacements = Vec::new();
|
let mut suggested_replacements = Vec::new();
|
||||||
let mut is_preferred = true;
|
let mut is_preferred = true;
|
||||||
for &span in &spans {
|
for &span in &spans {
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
//! rust-analyzer extensions to the LSP.
|
//! rust-analyzer extensions to the LSP.
|
||||||
|
|
||||||
use std::{collections::HashMap, path::PathBuf};
|
#![allow(clippy::disallowed_types)]
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use ide_db::line_index::WideEncoding;
|
use ide_db::line_index::WideEncoding;
|
||||||
use lsp_types::request::Request;
|
use lsp_types::request::Request;
|
||||||
|
@ -9,6 +11,7 @@ use lsp_types::{
|
||||||
PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
|
PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
|
||||||
};
|
};
|
||||||
use lsp_types::{PositionEncodingKind, Url};
|
use lsp_types::{PositionEncodingKind, Url};
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::line_index::PositionEncoding;
|
use crate::line_index::PositionEncoding;
|
||||||
|
@ -448,12 +451,16 @@ pub struct CodeActionData {
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct SnippetWorkspaceEdit {
|
pub struct SnippetWorkspaceEdit {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub changes: Option<HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>>,
|
pub changes: Option<FxHashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub document_changes: Option<Vec<SnippetDocumentChangeOperation>>,
|
pub document_changes: Option<Vec<SnippetDocumentChangeOperation>>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub change_annotations:
|
pub change_annotations: Option<
|
||||||
Option<HashMap<lsp_types::ChangeAnnotationIdentifier, lsp_types::ChangeAnnotation>>,
|
std::collections::HashMap<
|
||||||
|
lsp_types::ChangeAnnotationIdentifier,
|
||||||
|
lsp_types::ChangeAnnotation,
|
||||||
|
>,
|
||||||
|
>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
|
||||||
|
|
|
@ -179,6 +179,7 @@ impl Node {
|
||||||
self.go(0, filter)
|
self.go(0, filter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
fn go(&self, level: usize, filter: &WriteFilter) {
|
fn go(&self, level: usize, filter: &WriteFilter) {
|
||||||
if self.duration > filter.longer_than && level < filter.depth {
|
if self.duration > filter.longer_than && level < filter.depth {
|
||||||
let duration = ms(self.duration);
|
let duration = ms(self.duration);
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
//! be sure without a real client anyway.
|
//! be sure without a real client anyway.
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||||
|
#![allow(clippy::disallowed_types)]
|
||||||
|
|
||||||
#[cfg(not(feature = "in-rust-tree"))]
|
#[cfg(not(feature = "in-rust-tree"))]
|
||||||
mod sourcegen;
|
mod sourcegen;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#![allow(clippy::disallowed_types, clippy::print_stderr)]
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
@ -78,8 +79,6 @@ fn files_are_tidy() {
|
||||||
match extension {
|
match extension {
|
||||||
"rs" => {
|
"rs" => {
|
||||||
let text = sh.read_file(&path).unwrap();
|
let text = sh.read_file(&path).unwrap();
|
||||||
check_todo(&path, &text);
|
|
||||||
check_dbg(&path, &text);
|
|
||||||
check_test_attrs(&path, &text);
|
check_test_attrs(&path, &text);
|
||||||
check_trailing_ws(&path, &text);
|
check_trailing_ws(&path, &text);
|
||||||
tidy_docs.visit(&path, &text);
|
tidy_docs.visit(&path, &text);
|
||||||
|
@ -205,74 +204,6 @@ Zlib OR Apache-2.0 OR MIT
|
||||||
assert_eq!(licenses, expected);
|
assert_eq!(licenses, expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_todo(path: &Path, text: &str) {
|
|
||||||
let need_todo = &[
|
|
||||||
// This file itself obviously needs to use todo (<- like this!).
|
|
||||||
"tests/tidy.rs",
|
|
||||||
// Some of our assists generate `todo!()`.
|
|
||||||
"handlers/add_turbo_fish.rs",
|
|
||||||
"handlers/generate_function.rs",
|
|
||||||
"handlers/add_missing_match_arms.rs",
|
|
||||||
"handlers/replace_derive_with_manual_impl.rs",
|
|
||||||
// To support generating `todo!()` in assists, we have `expr_todo()` in
|
|
||||||
// `ast::make`.
|
|
||||||
"ast/make.rs",
|
|
||||||
// The documentation in string literals may contain anything for its own purposes
|
|
||||||
"ide-db/src/generated/lints.rs",
|
|
||||||
"ide-assists/src/utils/gen_trait_fn_body.rs",
|
|
||||||
"ide-assists/src/tests/generated.rs",
|
|
||||||
// The tests for missing fields
|
|
||||||
"ide-diagnostics/src/handlers/missing_fields.rs",
|
|
||||||
];
|
|
||||||
if need_todo.iter().any(|p| path.ends_with(p)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
|
|
||||||
// Generated by an assist
|
|
||||||
if text.contains("${0:todo!()}") {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
panic!(
|
|
||||||
"\nTODO markers or todo! macros should not be committed to the master branch,\n\
|
|
||||||
use FIXME instead\n\
|
|
||||||
{}\n",
|
|
||||||
path.display(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_dbg(path: &Path, text: &str) {
|
|
||||||
let need_dbg = &[
|
|
||||||
// This file itself obviously needs to use dbg.
|
|
||||||
"slow-tests/tidy.rs",
|
|
||||||
// Assists to remove `dbg!()`
|
|
||||||
"handlers/remove_dbg.rs",
|
|
||||||
// We have .dbg postfix
|
|
||||||
"ide-completion/src/completions/postfix.rs",
|
|
||||||
"ide-completion/src/completions/keyword.rs",
|
|
||||||
"ide-completion/src/tests/expression.rs",
|
|
||||||
"ide-completion/src/tests/proc_macros.rs",
|
|
||||||
// The documentation in string literals may contain anything for its own purposes
|
|
||||||
"ide-completion/src/lib.rs",
|
|
||||||
"ide-db/src/generated/lints.rs",
|
|
||||||
// test for doc test for remove_dbg
|
|
||||||
"src/tests/generated.rs",
|
|
||||||
// `expect!` string can contain `dbg!` (due to .dbg postfix)
|
|
||||||
"ide-completion/src/tests/special.rs",
|
|
||||||
];
|
|
||||||
if need_dbg.iter().any(|p| path.ends_with(p)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if text.contains("dbg!") {
|
|
||||||
panic!(
|
|
||||||
"\ndbg! macros should not be committed to the master branch,\n\
|
|
||||||
{}\n",
|
|
||||||
path.display(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_test_attrs(path: &Path, text: &str) {
|
fn check_test_attrs(path: &Path, text: &str) {
|
||||||
let ignore_rule =
|
let ignore_rule =
|
||||||
"https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore";
|
"https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore";
|
||||||
|
|
|
@ -167,6 +167,7 @@ pub fn add_preamble(generator: &'static str, mut text: String) -> String {
|
||||||
|
|
||||||
/// Checks that the `file` has the specified `contents`. If that is not the
|
/// Checks that the `file` has the specified `contents`. If that is not the
|
||||||
/// case, updates the file and then fails the test.
|
/// case, updates the file and then fails the test.
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
pub fn ensure_file_contents(file: &Path, contents: &str) {
|
pub fn ensure_file_contents(file: &Path, contents: &str) {
|
||||||
if let Ok(old_contents) = fs::read_to_string(file) {
|
if let Ok(old_contents) = fs::read_to_string(file) {
|
||||||
if normalize_newlines(&old_contents) == normalize_newlines(contents) {
|
if normalize_newlines(&old_contents) == normalize_newlines(contents) {
|
||||||
|
|
|
@ -54,12 +54,13 @@ use core::any::{Any, TypeId};
|
||||||
use core::hash::BuildHasherDefault;
|
use core::hash::BuildHasherDefault;
|
||||||
use core::marker::PhantomData;
|
use core::marker::PhantomData;
|
||||||
|
|
||||||
use ::std::collections::hash_map::{self, HashMap};
|
use ::std::collections::hash_map;
|
||||||
|
|
||||||
/// Raw access to the underlying `HashMap`.
|
/// Raw access to the underlying `HashMap`.
|
||||||
///
|
///
|
||||||
/// This alias is provided for convenience because of the ugly third generic parameter.
|
/// This alias is provided for convenience because of the ugly third generic parameter.
|
||||||
pub type RawMap<A> = HashMap<TypeId, Box<A>, BuildHasherDefault<TypeIdHasher>>;
|
#[allow(clippy::disallowed_types)] // Uses a custom hasher
|
||||||
|
pub type RawMap<A> = hash_map::HashMap<TypeId, Box<A>, BuildHasherDefault<TypeIdHasher>>;
|
||||||
|
|
||||||
/// A collection containing zero or one values for any given type and allowing convenient,
|
/// A collection containing zero or one values for any given type and allowing convenient,
|
||||||
/// type-safe access to those values.
|
/// type-safe access to those values.
|
||||||
|
|
|
@ -23,12 +23,14 @@ pub fn is_ci() -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
pub fn timeit(label: &'static str) -> impl Drop {
|
pub fn timeit(label: &'static str) -> impl Drop {
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
defer(move || eprintln!("{}: {:.2?}", label, start.elapsed()))
|
defer(move || eprintln!("{}: {:.2?}", label, start.elapsed()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Prints backtrace to stderr, useful for debugging.
|
/// Prints backtrace to stderr, useful for debugging.
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
pub fn print_backtrace() {
|
pub fn print_backtrace() {
|
||||||
#[cfg(feature = "backtrace")]
|
#[cfg(feature = "backtrace")]
|
||||||
eprintln!("{:?}", backtrace::Backtrace::new());
|
eprintln!("{:?}", backtrace::Backtrace::new());
|
||||||
|
|
|
@ -18,6 +18,7 @@ pub struct PanicContext {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PanicContext {
|
impl PanicContext {
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
fn init() {
|
fn init() {
|
||||||
let default_hook = panic::take_hook();
|
let default_hook = panic::take_hook();
|
||||||
let hook = move |panic_info: &panic::PanicInfo<'_>| {
|
let hook = move |panic_info: &panic::PanicInfo<'_>| {
|
||||||
|
@ -43,7 +44,7 @@ impl Drop for PanicContext {
|
||||||
|
|
||||||
fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
|
fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
|
||||||
thread_local! {
|
thread_local! {
|
||||||
static CTX: RefCell<Vec<String>> = RefCell::new(Vec::new());
|
static CTX: RefCell<Vec<String>> = const { RefCell::new(Vec::new()) };
|
||||||
}
|
}
|
||||||
CTX.with(|ctx| f(&mut ctx.borrow_mut()));
|
CTX.with(|ctx| f(&mut ctx.borrow_mut()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,8 +14,7 @@ pub fn shuffle<T>(slice: &mut [T], mut rand_index: impl FnMut(usize) -> usize) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn seed() -> u64 {
|
pub fn seed() -> u64 {
|
||||||
use std::collections::hash_map::RandomState;
|
|
||||||
use std::hash::{BuildHasher, Hasher};
|
use std::hash::{BuildHasher, Hasher};
|
||||||
|
#[allow(clippy::disallowed_types)]
|
||||||
RandomState::new().build_hasher().finish()
|
std::collections::hash_map::RandomState::new().build_hasher().finish()
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,6 +46,7 @@ impl CheckReparse {
|
||||||
Some(CheckReparse { text, edit, edited_text })
|
Some(CheckReparse { text, edit, edited_text })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::print_stderr)]
|
||||||
pub fn run(&self) {
|
pub fn run(&self) {
|
||||||
let parse = SourceFile::parse(&self.text);
|
let parse = SourceFile::parse(&self.text);
|
||||||
let new_parse = parse.reparse(&self.edit);
|
let new_parse = parse.reparse(&self.edit);
|
||||||
|
|
|
@ -3,14 +3,12 @@
|
||||||
//! Specifically, it generates the `SyntaxKind` enum and a number of newtype
|
//! Specifically, it generates the `SyntaxKind` enum and a number of newtype
|
||||||
//! wrappers around `SyntaxNode` which implement `syntax::AstNode`.
|
//! wrappers around `SyntaxNode` which implement `syntax::AstNode`.
|
||||||
|
|
||||||
use std::{
|
use std::{collections::BTreeSet, fmt::Write};
|
||||||
collections::{BTreeSet, HashSet},
|
|
||||||
fmt::Write,
|
|
||||||
};
|
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use proc_macro2::{Punct, Spacing};
|
use proc_macro2::{Punct, Spacing};
|
||||||
use quote::{format_ident, quote};
|
use quote::{format_ident, quote};
|
||||||
|
use rustc_hash::FxHashSet;
|
||||||
use ungrammar::{Grammar, Rule};
|
use ungrammar::{Grammar, Rule};
|
||||||
|
|
||||||
use crate::tests::ast_src::{
|
use crate::tests::ast_src::{
|
||||||
|
@ -278,7 +276,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let defined_nodes: HashSet<_> = node_names.collect();
|
let defined_nodes: FxHashSet<_> = node_names.collect();
|
||||||
|
|
||||||
for node in kinds
|
for node in kinds
|
||||||
.nodes
|
.nodes
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
//! * marks (see the eponymous module).
|
//! * marks (see the eponymous module).
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||||
|
#![allow(clippy::print_stderr)]
|
||||||
|
|
||||||
mod assert_linear;
|
mod assert_linear;
|
||||||
pub mod bench_fixture;
|
pub mod bench_fixture;
|
||||||
|
@ -424,7 +425,7 @@ pub fn format_diff(chunks: Vec<dissimilar::Chunk<'_>>) -> String {
|
||||||
///
|
///
|
||||||
/// A benchmark test looks like this:
|
/// A benchmark test looks like this:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```ignore
|
||||||
/// #[test]
|
/// #[test]
|
||||||
/// fn benchmark_foo() {
|
/// fn benchmark_foo() {
|
||||||
/// if skip_slow_tests() { return; }
|
/// if skip_slow_tests() { return; }
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
<!---
|
<!---
|
||||||
lsp/ext.rs hash: dff0b009e82ef06a
|
lsp/ext.rs hash: c0bf3acd7a9e72b2
|
||||||
|
|
||||||
If you need to change the above hash to make the test pass, please check if you
|
If you need to change the above hash to make the test pass, please check if you
|
||||||
need to adjust this doc as well and ping this issue:
|
need to adjust this doc as well and ping this issue:
|
||||||
|
|
|
@ -374,7 +374,7 @@ impl<T> Arena<T> {
|
||||||
/// ```
|
/// ```
|
||||||
pub fn iter(
|
pub fn iter(
|
||||||
&self,
|
&self,
|
||||||
) -> impl Iterator<Item = (Idx<T>, &T)> + ExactSizeIterator + DoubleEndedIterator + Clone {
|
) -> impl ExactSizeIterator<Item = (Idx<T>, &T)> + DoubleEndedIterator + Clone {
|
||||||
self.data.iter().enumerate().map(|(idx, value)| (Idx::from_raw(RawIdx(idx as u32)), value))
|
self.data.iter().enumerate().map(|(idx, value)| (Idx::from_raw(RawIdx(idx as u32)), value))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -394,7 +394,7 @@ impl<T> Arena<T> {
|
||||||
/// ```
|
/// ```
|
||||||
pub fn iter_mut(
|
pub fn iter_mut(
|
||||||
&mut self,
|
&mut self,
|
||||||
) -> impl Iterator<Item = (Idx<T>, &mut T)> + ExactSizeIterator + DoubleEndedIterator {
|
) -> impl ExactSizeIterator<Item = (Idx<T>, &mut T)> + DoubleEndedIterator {
|
||||||
self.data
|
self.data
|
||||||
.iter_mut()
|
.iter_mut()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
|
@ -414,7 +414,7 @@ impl<T> Arena<T> {
|
||||||
/// assert_eq!(iterator.next(), Some(&40));
|
/// assert_eq!(iterator.next(), Some(&40));
|
||||||
/// assert_eq!(iterator.next(), Some(&60));
|
/// assert_eq!(iterator.next(), Some(&60));
|
||||||
/// ```
|
/// ```
|
||||||
pub fn values(&self) -> impl Iterator<Item = &T> + ExactSizeIterator + DoubleEndedIterator {
|
pub fn values(&self) -> impl ExactSizeIterator<Item = &T> + DoubleEndedIterator {
|
||||||
self.data.iter()
|
self.data.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -432,9 +432,7 @@ impl<T> Arena<T> {
|
||||||
///
|
///
|
||||||
/// assert_eq!(arena[idx1], 10);
|
/// assert_eq!(arena[idx1], 10);
|
||||||
/// ```
|
/// ```
|
||||||
pub fn values_mut(
|
pub fn values_mut(&mut self) -> impl ExactSizeIterator<Item = &mut T> + DoubleEndedIterator {
|
||||||
&mut self,
|
|
||||||
) -> impl Iterator<Item = &mut T> + ExactSizeIterator + DoubleEndedIterator {
|
|
||||||
self.data.iter_mut()
|
self.data.iter_mut()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -73,17 +73,17 @@ impl<T, V> ArenaMap<Idx<T>, V> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator over the values in the map.
|
/// Returns an iterator over the values in the map.
|
||||||
pub fn values(&self) -> impl Iterator<Item = &V> + DoubleEndedIterator {
|
pub fn values(&self) -> impl DoubleEndedIterator<Item = &V> {
|
||||||
self.v.iter().filter_map(|o| o.as_ref())
|
self.v.iter().filter_map(|o| o.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator over mutable references to the values in the map.
|
/// Returns an iterator over mutable references to the values in the map.
|
||||||
pub fn values_mut(&mut self) -> impl Iterator<Item = &mut V> + DoubleEndedIterator {
|
pub fn values_mut(&mut self) -> impl DoubleEndedIterator<Item = &mut V> {
|
||||||
self.v.iter_mut().filter_map(|o| o.as_mut())
|
self.v.iter_mut().filter_map(|o| o.as_mut())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator over the arena indexes and values in the map.
|
/// Returns an iterator over the arena indexes and values in the map.
|
||||||
pub fn iter(&self) -> impl Iterator<Item = (Idx<T>, &V)> + DoubleEndedIterator {
|
pub fn iter(&self) -> impl DoubleEndedIterator<Item = (Idx<T>, &V)> {
|
||||||
self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?)))
|
self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -41,6 +41,9 @@
|
||||||
//!
|
//!
|
||||||
//! {"jsonrpc": "2.0", "method": "exit", "params": null}
|
//! {"jsonrpc": "2.0", "method": "exit", "params": null}
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
|
#![allow(clippy::print_stderr)]
|
||||||
|
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
|
||||||
use lsp_types::OneOf;
|
use lsp_types::OneOf;
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
//! Run with `RUST_LOG=lsp_server=debug` to see all the messages.
|
//! Run with `RUST_LOG=lsp_server=debug` to see all the messages.
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||||
|
#![allow(clippy::print_stdout, clippy::disallowed_types)]
|
||||||
|
|
||||||
mod error;
|
mod error;
|
||||||
mod msg;
|
mod msg;
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
//! `.cargo/config`.
|
//! `.cargo/config`.
|
||||||
|
|
||||||
#![warn(rust_2018_idioms, unused_lifetimes)]
|
#![warn(rust_2018_idioms, unused_lifetimes)]
|
||||||
|
#![allow(clippy::print_stderr, clippy::print_stdout)]
|
||||||
|
|
||||||
mod flags;
|
mod flags;
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue