9453: Add first-class limits. r=matklad,lnicola a=rbartlensky

Partially fixes #9286.

This introduces a new `Limits` structure which is passed as an input
to `SourceDatabase`. This makes limits accessible almost everywhere in
the code, since most places have a database in scope.

One downside of this approach is that whenever you query limits, you
essentially do an `Arc::clone` which is less than ideal.

Let me know if I missed anything, or would like me to take a different approach!

Co-authored-by: Robert Bartlensky <bartlensky.robert@gmail.com>
This commit is contained in:
bors[bot] 2021-07-22 10:33:05 +00:00 committed by GitHub
commit 0bee7cb716
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 84 additions and 24 deletions

8
Cargo.lock generated
View file

@ -488,6 +488,7 @@ dependencies = [
"indexmap", "indexmap",
"itertools", "itertools",
"la-arena", "la-arena",
"limit",
"log", "log",
"mbe", "mbe",
"once_cell", "once_cell",
@ -509,6 +510,7 @@ dependencies = [
"either", "either",
"expect-test", "expect-test",
"la-arena", "la-arena",
"limit",
"log", "log",
"mbe", "mbe",
"parser", "parser",
@ -535,6 +537,7 @@ dependencies = [
"hir_expand", "hir_expand",
"itertools", "itertools",
"la-arena", "la-arena",
"limit",
"log", "log",
"once_cell", "once_cell",
"profile", "profile",
@ -641,6 +644,7 @@ dependencies = [
"fst", "fst",
"hir", "hir",
"itertools", "itertools",
"limit",
"log", "log",
"once_cell", "once_cell",
"profile", "profile",
@ -793,6 +797,10 @@ dependencies = [
"cc", "cc",
] ]
[[package]]
name = "limit"
version = "0.0.0"
[[package]] [[package]]
name = "lock_api" name = "lock_api"
version = "0.4.4" version = "0.4.4"

View file

@ -31,6 +31,7 @@ hir_expand = { path = "../hir_expand", version = "0.0.0" }
mbe = { path = "../mbe", version = "0.0.0" } mbe = { path = "../mbe", version = "0.0.0" }
cfg = { path = "../cfg", version = "0.0.0" } cfg = { path = "../cfg", version = "0.0.0" }
tt = { path = "../tt", version = "0.0.0" } tt = { path = "../tt", version = "0.0.0" }
limit = { path = "../limit", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }

View file

@ -15,6 +15,7 @@ use hir_expand::{
ast_id_map::AstIdMap, hygiene::Hygiene, AstId, ExpandResult, HirFileId, InFile, MacroDefId, ast_id_map::AstIdMap, hygiene::Hygiene, AstId, ExpandResult, HirFileId, InFile, MacroDefId,
}; };
use la_arena::{Arena, ArenaMap}; use la_arena::{Arena, ArenaMap};
use limit::Limit;
use profile::Count; use profile::Count;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use syntax::{ast, AstNode, AstPtr, SyntaxNodePtr}; use syntax::{ast, AstNode, AstPtr, SyntaxNodePtr};
@ -53,10 +54,10 @@ pub struct Expander {
} }
#[cfg(test)] #[cfg(test)]
const EXPANSION_RECURSION_LIMIT: usize = 32; const EXPANSION_RECURSION_LIMIT: Limit = Limit::new(32);
#[cfg(not(test))] #[cfg(not(test))]
const EXPANSION_RECURSION_LIMIT: usize = 128; const EXPANSION_RECURSION_LIMIT: Limit = Limit::new(128);
impl CfgExpander { impl CfgExpander {
pub(crate) fn new( pub(crate) fn new(
@ -99,7 +100,7 @@ impl Expander {
db: &dyn DefDatabase, db: &dyn DefDatabase,
macro_call: ast::MacroCall, macro_call: ast::MacroCall,
) -> Result<ExpandResult<Option<(Mark, T)>>, UnresolvedMacro> { ) -> Result<ExpandResult<Option<(Mark, T)>>, UnresolvedMacro> {
if self.recursion_limit + 1 > EXPANSION_RECURSION_LIMIT { if EXPANSION_RECURSION_LIMIT.check(self.recursion_limit + 1).is_err() {
cov_mark::hit!(your_stack_belongs_to_me); cov_mark::hit!(your_stack_belongs_to_me);
return Ok(ExpandResult::str_err( return Ok(ExpandResult::str_err(
"reached recursion limit during macro expansion".into(), "reached recursion limit during macro expansion".into(),

View file

@ -19,6 +19,7 @@ use hir_expand::{
use hir_expand::{InFile, MacroCallLoc}; use hir_expand::{InFile, MacroCallLoc};
use itertools::Itertools; use itertools::Itertools;
use la_arena::Idx; use la_arena::Idx;
use limit::Limit;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use syntax::ast; use syntax::ast;
@ -49,9 +50,9 @@ use crate::{
UnresolvedMacro, UnresolvedMacro,
}; };
const GLOB_RECURSION_LIMIT: usize = 100; const GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
const EXPANSION_DEPTH_LIMIT: usize = 128; const EXPANSION_DEPTH_LIMIT: Limit = Limit::new(128);
const FIXED_POINT_LIMIT: usize = 8192; const FIXED_POINT_LIMIT: Limit = Limit::new(8192);
pub(super) fn collect_defs( pub(super) fn collect_defs(
db: &dyn DefDatabase, db: &dyn DefDatabase,
@ -356,7 +357,7 @@ impl DefCollector<'_> {
} }
i += 1; i += 1;
if i == FIXED_POINT_LIMIT { if FIXED_POINT_LIMIT.check(i).is_err() {
log::error!("name resolution is stuck"); log::error!("name resolution is stuck");
break 'outer; break 'outer;
} }
@ -925,7 +926,7 @@ impl DefCollector<'_> {
import_type: ImportType, import_type: ImportType,
depth: usize, depth: usize,
) { ) {
if depth > GLOB_RECURSION_LIMIT { if GLOB_RECURSION_LIMIT.check(depth).is_err() {
// prevent stack overflows (but this shouldn't be possible) // prevent stack overflows (but this shouldn't be possible)
panic!("infinite recursion in glob imports!"); panic!("infinite recursion in glob imports!");
} }
@ -1158,7 +1159,7 @@ impl DefCollector<'_> {
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
depth: usize, depth: usize,
) { ) {
if depth > EXPANSION_DEPTH_LIMIT { if EXPANSION_DEPTH_LIMIT.check(depth).is_err() {
cov_mark::hit!(macro_expansion_overflow); cov_mark::hit!(macro_expansion_overflow);
log::warn!("macro expansion is too deep"); log::warn!("macro expansion is too deep");
return; return;

View file

@ -1,11 +1,12 @@
//! This module resolves `mod foo;` declaration to file. //! This module resolves `mod foo;` declaration to file.
use base_db::{AnchoredPath, FileId}; use base_db::{AnchoredPath, FileId};
use hir_expand::name::Name; use hir_expand::name::Name;
use limit::Limit;
use syntax::SmolStr; use syntax::SmolStr;
use crate::{db::DefDatabase, HirFileId}; use crate::{db::DefDatabase, HirFileId};
const MOD_DEPTH_LIMIT: u32 = 32; const MOD_DEPTH_LIMIT: Limit = Limit::new(32);
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub(super) struct ModDir { pub(super) struct ModDir {
@ -25,7 +26,7 @@ impl ModDir {
} }
fn child(&self, dir_path: DirPath, root_non_dir_owner: bool) -> Option<ModDir> { fn child(&self, dir_path: DirPath, root_non_dir_owner: bool) -> Option<ModDir> {
let depth = self.depth + 1; let depth = self.depth + 1;
if depth > MOD_DEPTH_LIMIT { if MOD_DEPTH_LIMIT.check(depth as usize).is_err() {
log::error!("MOD_DEPTH_LIMIT exceeded"); log::error!("MOD_DEPTH_LIMIT exceeded");
cov_mark::hit!(circular_mods); cov_mark::hit!(circular_mods);
return None; return None;

View file

@ -21,6 +21,7 @@ parser = { path = "../parser", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" } profile = { path = "../profile", version = "0.0.0" }
tt = { path = "../tt", version = "0.0.0" } tt = { path = "../tt", version = "0.0.0" }
mbe = { path = "../mbe", version = "0.0.0" } mbe = { path = "../mbe", version = "0.0.0" }
limit = { path = "../limit", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }

View file

@ -3,6 +3,7 @@
use std::sync::Arc; use std::sync::Arc;
use base_db::{salsa, SourceDatabase}; use base_db::{salsa, SourceDatabase};
use limit::Limit;
use mbe::{ExpandError, ExpandResult}; use mbe::{ExpandError, ExpandResult};
use parser::FragmentKind; use parser::FragmentKind;
use syntax::{ use syntax::{
@ -21,7 +22,7 @@ use crate::{
/// ///
/// If an invocation produces more tokens than this limit, it will not be stored in the database and /// If an invocation produces more tokens than this limit, it will not be stored in the database and
/// an error will be emitted. /// an error will be emitted.
const TOKEN_LIMIT: usize = 524288; const TOKEN_LIMIT: Limit = Limit::new(524288);
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander { pub enum TokenExpander {
@ -356,10 +357,12 @@ fn macro_expand_with_arg(
let ExpandResult { value: tt, err } = macro_rules.expand(db, id, &macro_arg.0); let ExpandResult { value: tt, err } = macro_rules.expand(db, id, &macro_arg.0);
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
let count = tt.count(); let count = tt.count();
if count > TOKEN_LIMIT { // XXX: Make ExpandResult a real error and use .map_err instead?
if TOKEN_LIMIT.check(count).is_err() {
return ExpandResult::str_err(format!( return ExpandResult::str_err(format!(
"macro invocation exceeds token limit: produced {} tokens, limit is {}", "macro invocation exceeds token limit: produced {} tokens, limit is {}",
count, TOKEN_LIMIT, count,
TOKEN_LIMIT.inner(),
)); ));
} }

View file

@ -29,6 +29,7 @@ hir_expand = { path = "../hir_expand", version = "0.0.0" }
base_db = { path = "../base_db", version = "0.0.0" } base_db = { path = "../base_db", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" } profile = { path = "../profile", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" }
limit = { path = "../limit", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }

View file

@ -9,6 +9,7 @@ use base_db::CrateId;
use chalk_ir::{cast::Cast, fold::Fold, interner::HasInterner, VariableKind}; use chalk_ir::{cast::Cast, fold::Fold, interner::HasInterner, VariableKind};
use hir_def::lang_item::LangItemTarget; use hir_def::lang_item::LangItemTarget;
use hir_expand::name::name; use hir_expand::name::name;
use limit::Limit;
use log::{info, warn}; use log::{info, warn};
use crate::{ use crate::{
@ -17,6 +18,8 @@ use crate::{
Ty, TyBuilder, TyKind, Ty, TyBuilder, TyKind,
}; };
const AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(10);
pub(crate) enum AutoderefKind { pub(crate) enum AutoderefKind {
Builtin, Builtin,
Overloaded, Overloaded,
@ -63,7 +66,7 @@ impl Iterator for Autoderef<'_> {
return Some((self.ty.clone(), 0)); return Some((self.ty.clone(), 0));
} }
if self.steps.len() >= AUTODEREF_RECURSION_LIMIT { if AUTODEREF_RECURSION_LIMIT.check(self.steps.len() + 1).is_err() {
return None; return None;
} }
@ -87,8 +90,6 @@ impl Iterator for Autoderef<'_> {
} }
} }
const AUTODEREF_RECURSION_LIMIT: usize = 10;
// FIXME: replace uses of this with Autoderef above // FIXME: replace uses of this with Autoderef above
pub fn autoderef<'a>( pub fn autoderef<'a>(
db: &'a dyn HirDatabase, db: &'a dyn HirDatabase,
@ -99,7 +100,7 @@ pub fn autoderef<'a>(
successors(Some(ty), move |ty| { successors(Some(ty), move |ty| {
deref(db, krate?, InEnvironment { goal: ty, environment: environment.clone() }) deref(db, krate?, InEnvironment { goal: ty, environment: environment.clone() })
}) })
.take(AUTODEREF_RECURSION_LIMIT) .take(AUTODEREF_RECURSION_LIMIT.inner())
} }
pub(crate) fn deref( pub(crate) fn deref(

View file

@ -65,7 +65,7 @@ pub(crate) fn replace_derive_with_manual_impl(
current_crate, current_crate,
NameToImport::Exact(trait_name.to_string()), NameToImport::Exact(trait_name.to_string()),
items_locator::AssocItemSearch::Exclude, items_locator::AssocItemSearch::Exclude,
Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT), Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
) )
.filter_map(|item| match ModuleDef::from(item.as_module_def_id()?) { .filter_map(|item| match ModuleDef::from(item.as_module_def_id()?) {
ModuleDef::Trait(trait_) => Some(trait_), ModuleDef::Trait(trait_) => Some(trait_),

View file

@ -187,7 +187,7 @@ pub fn resolve_completion_edits(
current_crate, current_crate,
NameToImport::Exact(imported_name), NameToImport::Exact(imported_name),
items_locator::AssocItemSearch::Include, items_locator::AssocItemSearch::Include,
Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT), Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
) )
.filter_map(|candidate| { .filter_map(|candidate| {
current_module current_module

View file

@ -26,6 +26,7 @@ profile = { path = "../profile", version = "0.0.0" }
# ide should depend only on the top-level `hir` package. if you need # ide should depend only on the top-level `hir` package. if you need
# something from some `hir_xxx` subpackage, reexport the API via `hir`. # something from some `hir_xxx` subpackage, reexport the API via `hir`.
hir = { path = "../hir", version = "0.0.0" } hir = { path = "../hir", version = "0.0.0" }
limit = { path = "../limit", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }

View file

@ -282,7 +282,7 @@ fn path_applicable_imports(
// //
// see also an ignored test under FIXME comment in the qualify_path.rs module // see also an ignored test under FIXME comment in the qualify_path.rs module
AssocItemSearch::Exclude, AssocItemSearch::Exclude,
Some(DEFAULT_QUERY_SEARCH_LIMIT), Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
) )
.filter_map(|item| { .filter_map(|item| {
let mod_path = mod_path(item)?; let mod_path = mod_path(item)?;
@ -299,7 +299,7 @@ fn path_applicable_imports(
current_crate, current_crate,
path_candidate.name.clone(), path_candidate.name.clone(),
AssocItemSearch::Include, AssocItemSearch::Include,
Some(DEFAULT_QUERY_SEARCH_LIMIT), Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
) )
.filter_map(|item| { .filter_map(|item| {
import_for_item( import_for_item(
@ -445,7 +445,7 @@ fn trait_applicable_items(
current_crate, current_crate,
trait_candidate.assoc_item_name.clone(), trait_candidate.assoc_item_name.clone(),
AssocItemSearch::AssocItemsOnly, AssocItemSearch::AssocItemsOnly,
Some(DEFAULT_QUERY_SEARCH_LIMIT), Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
) )
.filter_map(|input| item_as_assoc(db, input)) .filter_map(|input| item_as_assoc(db, input))
.filter_map(|assoc| { .filter_map(|assoc| {

View file

@ -7,6 +7,7 @@ use hir::{
import_map::{self, ImportKind}, import_map::{self, ImportKind},
AsAssocItem, Crate, ItemInNs, ModuleDef, Semantics, AsAssocItem, Crate, ItemInNs, ModuleDef, Semantics,
}; };
use limit::Limit;
use syntax::{ast, AstNode, SyntaxKind::NAME}; use syntax::{ast, AstNode, SyntaxKind::NAME};
use crate::{ use crate::{
@ -17,7 +18,7 @@ use crate::{
}; };
/// A value to use, when uncertain which limit to pick. /// A value to use, when uncertain which limit to pick.
pub const DEFAULT_QUERY_SEARCH_LIMIT: usize = 40; pub const DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(40);
/// Three possible ways to search for the name in associated and/or other items. /// Three possible ways to search for the name in associated and/or other items.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]

9
crates/limit/Cargo.toml Normal file
View file

@ -0,0 +1,9 @@
[package]
name = "limit"
version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[dependencies]

31
crates/limit/src/lib.rs Normal file
View file

@ -0,0 +1,31 @@
//! limit defines a struct to enforce limits.
/// Represents a struct used to enforce a numerical limit.
pub struct Limit {
upper_bound: usize,
}
impl Limit {
/// Creates a new limit.
#[inline]
pub const fn new(upper_bound: usize) -> Self {
Self { upper_bound }
}
/// Gets the underlying numeric limit.
#[inline]
pub const fn inner(&self) -> usize {
self.upper_bound
}
/// Checks whether the given value is below the limit.
/// Returns `Ok` when `other` is below `self`, and `Err` otherwise.
#[inline]
pub const fn check(&self, other: usize) -> Result<(), ()> {
if other > self.upper_bound {
Err(())
} else {
Ok(())
}
}
}