mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 21:43:37 +00:00
Auto merge of #17203 - kilpkonn:collapse_terms, r=Veykril
Fix OOM caused by term search The issue came from multi Cartesian product for exprs with many (25+) arguments, each having multiple options. The solution is two fold: ### Avoid blowing up in Cartesian product **Before the logic was:** 1. Find expressions for each argument/param - there may be many 2. Take the Cartesian product (which blows up in some cases) 4. If there are more than 2 options throw them away by squashing them to `Many` **Now the logic is:** 1. Find expressions for each argument/param and squash them to `Many` if there are more than 2 as otherwise we are guaranteed to also have more than 2 after taking the product which means squashing them anyway. 2. Take the Cartesian product on iterator 3. Start consuming it one by one 4. If there are more than 2 options throw them away by squashing them to `Many` (same as before) This is also why I had to update some tests as the expressions get squashed to many more eagerly. ### Use fuel to avoid long search times and high memory usage Now all the tactics use `should_continue: Fn() -> bool` to chech if they should keep iterating _(Similarly to chalk)_. This reduces the search times by a magnitude, for example from ~139ms/hole to ~14ms/hole for `ripgrep` crate. There are slightly less expressions found, but I think speed gain worth it for usability. Also note that syntactic hits decreases more because of squashing so you simple need to run search multiple times to get full terms. Also the worst case time (For example `nalgebra` crate cus it has tons of generics) has search times mostly under 200ms. Benchmarks on `ripgrep` crate Before: ``` Tail Expr syntactic hits: 291/1692 (17%) Tail Exprs found: 1253/1692 (74%) Term search avg time: 139ms ```` After: ``` Tail Expr syntactic hits: 239/1692 (14%) Tail Exprs found: 1226/1692 (72%) Term search avg time: 14ms ```
This commit is contained in:
commit
9db1258dbe
17 changed files with 141 additions and 37 deletions
|
@ -127,6 +127,13 @@ impl LookupTable {
|
||||||
self.types_wishlist.insert(ty.clone());
|
self.types_wishlist.insert(ty.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Collapse suggestions if there are many
|
||||||
|
if let Some(res) = &res {
|
||||||
|
if res.len() > self.many_threshold {
|
||||||
|
return Some(vec![Expr::Many(ty.clone())]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -158,6 +165,13 @@ impl LookupTable {
|
||||||
self.types_wishlist.insert(ty.clone());
|
self.types_wishlist.insert(ty.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Collapse suggestions if there are many
|
||||||
|
if let Some(res) = &res {
|
||||||
|
if res.len() > self.many_threshold {
|
||||||
|
return Some(vec![Expr::Many(ty.clone())]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -255,13 +269,13 @@ pub struct TermSearchConfig {
|
||||||
pub enable_borrowcheck: bool,
|
pub enable_borrowcheck: bool,
|
||||||
/// Indicate when to squash multiple trees to `Many` as there are too many to keep track
|
/// Indicate when to squash multiple trees to `Many` as there are too many to keep track
|
||||||
pub many_alternatives_threshold: usize,
|
pub many_alternatives_threshold: usize,
|
||||||
/// Depth of the search eg. number of cycles to run
|
/// Fuel for term search in "units of work"
|
||||||
pub depth: usize,
|
pub fuel: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for TermSearchConfig {
|
impl Default for TermSearchConfig {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self { enable_borrowcheck: true, many_alternatives_threshold: 1, depth: 6 }
|
Self { enable_borrowcheck: true, many_alternatives_threshold: 1, fuel: 400 }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -280,8 +294,7 @@ impl Default for TermSearchConfig {
|
||||||
/// transformation tactics. For example functions take as from set of types (arguments) to some
|
/// transformation tactics. For example functions take as from set of types (arguments) to some
|
||||||
/// type (return type). Other transformations include methods on type, type constructors and
|
/// type (return type). Other transformations include methods on type, type constructors and
|
||||||
/// projections to struct fields (field access).
|
/// projections to struct fields (field access).
|
||||||
/// 3. Once we manage to find path to type we are interested in we continue for single round to see
|
/// 3. If we run out of fuel (term search takes too long) we stop iterating.
|
||||||
/// if we can find more paths that take us to the `goal` type.
|
|
||||||
/// 4. Return all the paths (type trees) that take us to the `goal` type.
|
/// 4. Return all the paths (type trees) that take us to the `goal` type.
|
||||||
///
|
///
|
||||||
/// Note that there are usually more ways we can get to the `goal` type but some are discarded to
|
/// Note that there are usually more ways we can get to the `goal` type but some are discarded to
|
||||||
|
@ -297,21 +310,31 @@ pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut lookup = LookupTable::new(ctx.config.many_alternatives_threshold, ctx.goal.clone());
|
let mut lookup = LookupTable::new(ctx.config.many_alternatives_threshold, ctx.goal.clone());
|
||||||
|
let fuel = std::cell::Cell::new(ctx.config.fuel);
|
||||||
|
|
||||||
|
let should_continue = &|| {
|
||||||
|
let remaining = fuel.get();
|
||||||
|
fuel.set(remaining.saturating_sub(1));
|
||||||
|
if remaining == 0 {
|
||||||
|
tracing::debug!("fuel exhausted");
|
||||||
|
}
|
||||||
|
remaining > 0
|
||||||
|
};
|
||||||
|
|
||||||
// Try trivial tactic first, also populates lookup table
|
// Try trivial tactic first, also populates lookup table
|
||||||
let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect();
|
let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect();
|
||||||
// Use well known types tactic before iterations as it does not depend on other tactics
|
// Use well known types tactic before iterations as it does not depend on other tactics
|
||||||
solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup));
|
solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup));
|
||||||
|
|
||||||
for _ in 0..ctx.config.depth {
|
while should_continue() {
|
||||||
lookup.new_round();
|
lookup.new_round();
|
||||||
|
|
||||||
solutions.extend(tactics::type_constructor(ctx, &defs, &mut lookup));
|
solutions.extend(tactics::type_constructor(ctx, &defs, &mut lookup, should_continue));
|
||||||
solutions.extend(tactics::free_function(ctx, &defs, &mut lookup));
|
solutions.extend(tactics::free_function(ctx, &defs, &mut lookup, should_continue));
|
||||||
solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup));
|
solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup, should_continue));
|
||||||
solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup));
|
solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup, should_continue));
|
||||||
solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup));
|
solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup, should_continue));
|
||||||
solutions.extend(tactics::make_tuple(ctx, &defs, &mut lookup));
|
solutions.extend(tactics::make_tuple(ctx, &defs, &mut lookup, should_continue));
|
||||||
|
|
||||||
// Discard not interesting `ScopeDef`s for speedup
|
// Discard not interesting `ScopeDef`s for speedup
|
||||||
for def in lookup.exhausted_scopedefs() {
|
for def in lookup.exhausted_scopedefs() {
|
||||||
|
|
|
@ -211,13 +211,13 @@ impl Expr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::Method { func, target, params, .. } => {
|
Expr::Method { func, target, params, .. } => {
|
||||||
if target.contains_many_in_illegal_pos() {
|
if self.contains_many_in_illegal_pos(db) {
|
||||||
return Ok(many_formatter(&target.ty(db)));
|
return Ok(many_formatter(&target.ty(db)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let func_name = func.name(db).display(db.upcast()).to_string();
|
let func_name = func.name(db).display(db.upcast()).to_string();
|
||||||
let self_param = func.self_param(db).unwrap();
|
let self_param = func.self_param(db).unwrap();
|
||||||
let target = target.gen_source_code(
|
let target_str = target.gen_source_code(
|
||||||
sema_scope,
|
sema_scope,
|
||||||
many_formatter,
|
many_formatter,
|
||||||
prefer_no_std,
|
prefer_no_std,
|
||||||
|
@ -236,9 +236,12 @@ impl Expr {
|
||||||
Some(trait_) => {
|
Some(trait_) => {
|
||||||
let trait_name = mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?;
|
let trait_name = mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?;
|
||||||
let target = match self_param.access(db) {
|
let target = match self_param.access(db) {
|
||||||
crate::Access::Shared => format!("&{target}"),
|
crate::Access::Shared if !target.is_many() => format!("&{target_str}"),
|
||||||
crate::Access::Exclusive => format!("&mut {target}"),
|
crate::Access::Exclusive if !target.is_many() => {
|
||||||
crate::Access::Owned => target,
|
format!("&mut {target_str}")
|
||||||
|
}
|
||||||
|
crate::Access::Owned => target_str,
|
||||||
|
_ => many_formatter(&target.ty(db)),
|
||||||
};
|
};
|
||||||
let res = match args.is_empty() {
|
let res = match args.is_empty() {
|
||||||
true => format!("{trait_name}::{func_name}({target})",),
|
true => format!("{trait_name}::{func_name}({target})",),
|
||||||
|
@ -246,7 +249,7 @@ impl Expr {
|
||||||
};
|
};
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
None => Ok(format!("{target}.{func_name}({args})")),
|
None => Ok(format!("{target_str}.{func_name}({args})")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::Variant { variant, generics, params } => {
|
Expr::Variant { variant, generics, params } => {
|
||||||
|
@ -381,7 +384,7 @@ impl Expr {
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
Expr::Field { expr, field } => {
|
Expr::Field { expr, field } => {
|
||||||
if expr.contains_many_in_illegal_pos() {
|
if expr.contains_many_in_illegal_pos(db) {
|
||||||
return Ok(many_formatter(&expr.ty(db)));
|
return Ok(many_formatter(&expr.ty(db)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -395,7 +398,7 @@ impl Expr {
|
||||||
Ok(format!("{strukt}.{field}"))
|
Ok(format!("{strukt}.{field}"))
|
||||||
}
|
}
|
||||||
Expr::Reference(expr) => {
|
Expr::Reference(expr) => {
|
||||||
if expr.contains_many_in_illegal_pos() {
|
if expr.contains_many_in_illegal_pos(db) {
|
||||||
return Ok(many_formatter(&expr.ty(db)));
|
return Ok(many_formatter(&expr.ty(db)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -466,10 +469,15 @@ impl Expr {
|
||||||
/// macro!().bar()
|
/// macro!().bar()
|
||||||
/// ¯o!()
|
/// ¯o!()
|
||||||
/// ```
|
/// ```
|
||||||
fn contains_many_in_illegal_pos(&self) -> bool {
|
fn contains_many_in_illegal_pos(&self, db: &dyn HirDatabase) -> bool {
|
||||||
match self {
|
match self {
|
||||||
Expr::Method { target, .. } => target.contains_many_in_illegal_pos(),
|
Expr::Method { target, func, .. } => {
|
||||||
Expr::Field { expr, .. } => expr.contains_many_in_illegal_pos(),
|
match func.as_assoc_item(db).and_then(|it| it.container_or_implemented_trait(db)) {
|
||||||
|
Some(_) => false,
|
||||||
|
None => target.is_many(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expr::Field { expr, .. } => expr.contains_many_in_illegal_pos(db),
|
||||||
Expr::Reference(target) => target.is_many(),
|
Expr::Reference(target) => target.is_many(),
|
||||||
Expr::Many(_) => true,
|
Expr::Many(_) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
//! * `ctx` - Context for the term search
|
//! * `ctx` - Context for the term search
|
||||||
//! * `defs` - Set of items in scope at term search target location
|
//! * `defs` - Set of items in scope at term search target location
|
||||||
//! * `lookup` - Lookup table for types
|
//! * `lookup` - Lookup table for types
|
||||||
|
//! * `should_continue` - Function that indicates when to stop iterating
|
||||||
//! And they return iterator that yields type trees that unify with the `goal` type.
|
//! And they return iterator that yields type trees that unify with the `goal` type.
|
||||||
|
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
@ -97,16 +98,19 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
|
||||||
/// * `ctx` - Context for the term search
|
/// * `ctx` - Context for the term search
|
||||||
/// * `defs` - Set of items in scope at term search target location
|
/// * `defs` - Set of items in scope at term search target location
|
||||||
/// * `lookup` - Lookup table for types
|
/// * `lookup` - Lookup table for types
|
||||||
|
/// * `should_continue` - Function that indicates when to stop iterating
|
||||||
pub(super) fn type_constructor<'a, DB: HirDatabase>(
|
pub(super) fn type_constructor<'a, DB: HirDatabase>(
|
||||||
ctx: &'a TermSearchCtx<'a, DB>,
|
ctx: &'a TermSearchCtx<'a, DB>,
|
||||||
defs: &'a FxHashSet<ScopeDef>,
|
defs: &'a FxHashSet<ScopeDef>,
|
||||||
lookup: &'a mut LookupTable,
|
lookup: &'a mut LookupTable,
|
||||||
|
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||||
) -> impl Iterator<Item = Expr> + 'a {
|
) -> impl Iterator<Item = Expr> + 'a {
|
||||||
let db = ctx.sema.db;
|
let db = ctx.sema.db;
|
||||||
let module = ctx.scope.module();
|
let module = ctx.scope.module();
|
||||||
fn variant_helper(
|
fn variant_helper(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
lookup: &mut LookupTable,
|
lookup: &mut LookupTable,
|
||||||
|
should_continue: &dyn std::ops::Fn() -> bool,
|
||||||
parent_enum: Enum,
|
parent_enum: Enum,
|
||||||
variant: Variant,
|
variant: Variant,
|
||||||
config: &TermSearchConfig,
|
config: &TermSearchConfig,
|
||||||
|
@ -152,6 +156,7 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
|
||||||
.chain((non_default_type_params_len == 0).then_some(Vec::new()));
|
.chain((non_default_type_params_len == 0).then_some(Vec::new()));
|
||||||
|
|
||||||
generic_params
|
generic_params
|
||||||
|
.filter(|_| should_continue())
|
||||||
.filter_map(move |generics| {
|
.filter_map(move |generics| {
|
||||||
// Insert default type params
|
// Insert default type params
|
||||||
let mut g = generics.into_iter();
|
let mut g = generics.into_iter();
|
||||||
|
@ -194,8 +199,14 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
|
||||||
defs.iter()
|
defs.iter()
|
||||||
.filter_map(move |def| match def {
|
.filter_map(move |def| match def {
|
||||||
ScopeDef::ModuleDef(ModuleDef::Variant(it)) => {
|
ScopeDef::ModuleDef(ModuleDef::Variant(it)) => {
|
||||||
let variant_exprs =
|
let variant_exprs = variant_helper(
|
||||||
variant_helper(db, lookup, it.parent_enum(db), *it, &ctx.config);
|
db,
|
||||||
|
lookup,
|
||||||
|
should_continue,
|
||||||
|
it.parent_enum(db),
|
||||||
|
*it,
|
||||||
|
&ctx.config,
|
||||||
|
);
|
||||||
if variant_exprs.is_empty() {
|
if variant_exprs.is_empty() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -213,7 +224,9 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
|
||||||
let exprs: Vec<(Type, Vec<Expr>)> = enum_
|
let exprs: Vec<(Type, Vec<Expr>)> = enum_
|
||||||
.variants(db)
|
.variants(db)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|it| variant_helper(db, lookup, *enum_, it, &ctx.config))
|
.flat_map(|it| {
|
||||||
|
variant_helper(db, lookup, should_continue, *enum_, it, &ctx.config)
|
||||||
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
if exprs.is_empty() {
|
if exprs.is_empty() {
|
||||||
|
@ -271,6 +284,7 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
|
||||||
.chain((non_default_type_params_len == 0).then_some(Vec::new()));
|
.chain((non_default_type_params_len == 0).then_some(Vec::new()));
|
||||||
|
|
||||||
let exprs = generic_params
|
let exprs = generic_params
|
||||||
|
.filter(|_| should_continue())
|
||||||
.filter_map(|generics| {
|
.filter_map(|generics| {
|
||||||
// Insert default type params
|
// Insert default type params
|
||||||
let mut g = generics.into_iter();
|
let mut g = generics.into_iter();
|
||||||
|
@ -345,10 +359,12 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
|
||||||
/// * `ctx` - Context for the term search
|
/// * `ctx` - Context for the term search
|
||||||
/// * `defs` - Set of items in scope at term search target location
|
/// * `defs` - Set of items in scope at term search target location
|
||||||
/// * `lookup` - Lookup table for types
|
/// * `lookup` - Lookup table for types
|
||||||
|
/// * `should_continue` - Function that indicates when to stop iterating
|
||||||
pub(super) fn free_function<'a, DB: HirDatabase>(
|
pub(super) fn free_function<'a, DB: HirDatabase>(
|
||||||
ctx: &'a TermSearchCtx<'a, DB>,
|
ctx: &'a TermSearchCtx<'a, DB>,
|
||||||
defs: &'a FxHashSet<ScopeDef>,
|
defs: &'a FxHashSet<ScopeDef>,
|
||||||
lookup: &'a mut LookupTable,
|
lookup: &'a mut LookupTable,
|
||||||
|
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||||
) -> impl Iterator<Item = Expr> + 'a {
|
) -> impl Iterator<Item = Expr> + 'a {
|
||||||
let db = ctx.sema.db;
|
let db = ctx.sema.db;
|
||||||
let module = ctx.scope.module();
|
let module = ctx.scope.module();
|
||||||
|
@ -390,6 +406,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
|
||||||
.permutations(non_default_type_params_len);
|
.permutations(non_default_type_params_len);
|
||||||
|
|
||||||
let exprs: Vec<_> = generic_params
|
let exprs: Vec<_> = generic_params
|
||||||
|
.filter(|_| should_continue())
|
||||||
.filter_map(|generics| {
|
.filter_map(|generics| {
|
||||||
// Insert default type params
|
// Insert default type params
|
||||||
let mut g = generics.into_iter();
|
let mut g = generics.into_iter();
|
||||||
|
@ -474,10 +491,12 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
|
||||||
/// * `ctx` - Context for the term search
|
/// * `ctx` - Context for the term search
|
||||||
/// * `defs` - Set of items in scope at term search target location
|
/// * `defs` - Set of items in scope at term search target location
|
||||||
/// * `lookup` - Lookup table for types
|
/// * `lookup` - Lookup table for types
|
||||||
|
/// * `should_continue` - Function that indicates when to stop iterating
|
||||||
pub(super) fn impl_method<'a, DB: HirDatabase>(
|
pub(super) fn impl_method<'a, DB: HirDatabase>(
|
||||||
ctx: &'a TermSearchCtx<'a, DB>,
|
ctx: &'a TermSearchCtx<'a, DB>,
|
||||||
_defs: &'a FxHashSet<ScopeDef>,
|
_defs: &'a FxHashSet<ScopeDef>,
|
||||||
lookup: &'a mut LookupTable,
|
lookup: &'a mut LookupTable,
|
||||||
|
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||||
) -> impl Iterator<Item = Expr> + 'a {
|
) -> impl Iterator<Item = Expr> + 'a {
|
||||||
let db = ctx.sema.db;
|
let db = ctx.sema.db;
|
||||||
let module = ctx.scope.module();
|
let module = ctx.scope.module();
|
||||||
|
@ -554,6 +573,7 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
|
||||||
.permutations(non_default_fn_type_params_len);
|
.permutations(non_default_fn_type_params_len);
|
||||||
|
|
||||||
let exprs: Vec<_> = generic_params
|
let exprs: Vec<_> = generic_params
|
||||||
|
.filter(|_| should_continue())
|
||||||
.filter_map(|generics| {
|
.filter_map(|generics| {
|
||||||
// Insert default type params
|
// Insert default type params
|
||||||
let mut g = generics.into_iter();
|
let mut g = generics.into_iter();
|
||||||
|
@ -645,10 +665,12 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
|
||||||
/// * `ctx` - Context for the term search
|
/// * `ctx` - Context for the term search
|
||||||
/// * `defs` - Set of items in scope at term search target location
|
/// * `defs` - Set of items in scope at term search target location
|
||||||
/// * `lookup` - Lookup table for types
|
/// * `lookup` - Lookup table for types
|
||||||
|
/// * `should_continue` - Function that indicates when to stop iterating
|
||||||
pub(super) fn struct_projection<'a, DB: HirDatabase>(
|
pub(super) fn struct_projection<'a, DB: HirDatabase>(
|
||||||
ctx: &'a TermSearchCtx<'a, DB>,
|
ctx: &'a TermSearchCtx<'a, DB>,
|
||||||
_defs: &'a FxHashSet<ScopeDef>,
|
_defs: &'a FxHashSet<ScopeDef>,
|
||||||
lookup: &'a mut LookupTable,
|
lookup: &'a mut LookupTable,
|
||||||
|
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||||
) -> impl Iterator<Item = Expr> + 'a {
|
) -> impl Iterator<Item = Expr> + 'a {
|
||||||
let db = ctx.sema.db;
|
let db = ctx.sema.db;
|
||||||
let module = ctx.scope.module();
|
let module = ctx.scope.module();
|
||||||
|
@ -656,6 +678,7 @@ pub(super) fn struct_projection<'a, DB: HirDatabase>(
|
||||||
.new_types(NewTypesKey::StructProjection)
|
.new_types(NewTypesKey::StructProjection)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|ty| (ty.clone(), lookup.find(db, &ty).expect("Expr not in lookup")))
|
.map(|ty| (ty.clone(), lookup.find(db, &ty).expect("Expr not in lookup")))
|
||||||
|
.filter(|_| should_continue())
|
||||||
.flat_map(move |(ty, targets)| {
|
.flat_map(move |(ty, targets)| {
|
||||||
ty.fields(db).into_iter().filter_map(move |(field, filed_ty)| {
|
ty.fields(db).into_iter().filter_map(move |(field, filed_ty)| {
|
||||||
if !field.is_visible_from(db, module) {
|
if !field.is_visible_from(db, module) {
|
||||||
|
@ -716,10 +739,12 @@ pub(super) fn famous_types<'a, DB: HirDatabase>(
|
||||||
/// * `ctx` - Context for the term search
|
/// * `ctx` - Context for the term search
|
||||||
/// * `defs` - Set of items in scope at term search target location
|
/// * `defs` - Set of items in scope at term search target location
|
||||||
/// * `lookup` - Lookup table for types
|
/// * `lookup` - Lookup table for types
|
||||||
|
/// * `should_continue` - Function that indicates when to stop iterating
|
||||||
pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
||||||
ctx: &'a TermSearchCtx<'a, DB>,
|
ctx: &'a TermSearchCtx<'a, DB>,
|
||||||
_defs: &'a FxHashSet<ScopeDef>,
|
_defs: &'a FxHashSet<ScopeDef>,
|
||||||
lookup: &'a mut LookupTable,
|
lookup: &'a mut LookupTable,
|
||||||
|
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||||
) -> impl Iterator<Item = Expr> + 'a {
|
) -> impl Iterator<Item = Expr> + 'a {
|
||||||
let db = ctx.sema.db;
|
let db = ctx.sema.db;
|
||||||
let module = ctx.scope.module();
|
let module = ctx.scope.module();
|
||||||
|
@ -728,6 +753,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
||||||
.clone()
|
.clone()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(iter::once(ctx.goal.clone()))
|
.chain(iter::once(ctx.goal.clone()))
|
||||||
|
.filter(|_| should_continue())
|
||||||
.flat_map(|ty| {
|
.flat_map(|ty| {
|
||||||
Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
|
Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
|
||||||
})
|
})
|
||||||
|
@ -801,6 +827,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
||||||
.permutations(non_default_fn_type_params_len);
|
.permutations(non_default_fn_type_params_len);
|
||||||
|
|
||||||
let exprs: Vec<_> = generic_params
|
let exprs: Vec<_> = generic_params
|
||||||
|
.filter(|_| should_continue())
|
||||||
.filter_map(|generics| {
|
.filter_map(|generics| {
|
||||||
// Insert default type params
|
// Insert default type params
|
||||||
let mut g = generics.into_iter();
|
let mut g = generics.into_iter();
|
||||||
|
@ -884,10 +911,12 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
||||||
/// * `ctx` - Context for the term search
|
/// * `ctx` - Context for the term search
|
||||||
/// * `defs` - Set of items in scope at term search target location
|
/// * `defs` - Set of items in scope at term search target location
|
||||||
/// * `lookup` - Lookup table for types
|
/// * `lookup` - Lookup table for types
|
||||||
|
/// * `should_continue` - Function that indicates when to stop iterating
|
||||||
pub(super) fn make_tuple<'a, DB: HirDatabase>(
|
pub(super) fn make_tuple<'a, DB: HirDatabase>(
|
||||||
ctx: &'a TermSearchCtx<'a, DB>,
|
ctx: &'a TermSearchCtx<'a, DB>,
|
||||||
_defs: &'a FxHashSet<ScopeDef>,
|
_defs: &'a FxHashSet<ScopeDef>,
|
||||||
lookup: &'a mut LookupTable,
|
lookup: &'a mut LookupTable,
|
||||||
|
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||||
) -> impl Iterator<Item = Expr> + 'a {
|
) -> impl Iterator<Item = Expr> + 'a {
|
||||||
let db = ctx.sema.db;
|
let db = ctx.sema.db;
|
||||||
let module = ctx.scope.module();
|
let module = ctx.scope.module();
|
||||||
|
@ -896,6 +925,7 @@ pub(super) fn make_tuple<'a, DB: HirDatabase>(
|
||||||
.types_wishlist()
|
.types_wishlist()
|
||||||
.clone()
|
.clone()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
.filter(|_| should_continue())
|
||||||
.filter(|ty| ty.is_tuple())
|
.filter(|ty| ty.is_tuple())
|
||||||
.filter_map(move |ty| {
|
.filter_map(move |ty| {
|
||||||
// Double check to not contain unknown
|
// Double check to not contain unknown
|
||||||
|
@ -915,6 +945,7 @@ pub(super) fn make_tuple<'a, DB: HirDatabase>(
|
||||||
let exprs: Vec<Expr> = param_exprs
|
let exprs: Vec<Expr> = param_exprs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.multi_cartesian_product()
|
.multi_cartesian_product()
|
||||||
|
.filter(|_| should_continue())
|
||||||
.map(|params| {
|
.map(|params| {
|
||||||
let tys: Vec<Type> = params.iter().map(|it| it.ty(db)).collect();
|
let tys: Vec<Type> = params.iter().map(|it| it.ty(db)).collect();
|
||||||
let tuple_ty = Type::new_tuple(module.krate().into(), &tys);
|
let tuple_ty = Type::new_tuple(module.krate().into(), &tys);
|
||||||
|
|
|
@ -16,4 +16,5 @@ pub struct AssistConfig {
|
||||||
pub prefer_no_std: bool,
|
pub prefer_no_std: bool,
|
||||||
pub prefer_prelude: bool,
|
pub prefer_prelude: bool,
|
||||||
pub assist_emit_must_use: bool,
|
pub assist_emit_must_use: bool,
|
||||||
|
pub term_search_fuel: u64,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
//! Term search assist
|
//! Term search assist
|
||||||
use hir::term_search::TermSearchCtx;
|
use hir::term_search::{TermSearchConfig, TermSearchCtx};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::{AssistId, AssistKind, GroupLabel},
|
assists::{AssistId, AssistKind, GroupLabel},
|
||||||
famous_defs::FamousDefs,
|
famous_defs::FamousDefs,
|
||||||
|
@ -34,7 +34,7 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
|
||||||
sema: &ctx.sema,
|
sema: &ctx.sema,
|
||||||
scope: &scope,
|
scope: &scope,
|
||||||
goal: target_ty,
|
goal: target_ty,
|
||||||
config: Default::default(),
|
config: TermSearchConfig { fuel: ctx.config.term_search_fuel, ..Default::default() },
|
||||||
};
|
};
|
||||||
let paths = hir::term_search::term_search(&term_search_ctx);
|
let paths = hir::term_search::term_search(&term_search_ctx);
|
||||||
|
|
||||||
|
|
|
@ -31,6 +31,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
|
||||||
prefer_no_std: false,
|
prefer_no_std: false,
|
||||||
prefer_prelude: true,
|
prefer_prelude: true,
|
||||||
assist_emit_must_use: false,
|
assist_emit_must_use: false,
|
||||||
|
term_search_fuel: 400,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
|
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
|
||||||
|
@ -46,6 +47,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
|
||||||
prefer_no_std: false,
|
prefer_no_std: false,
|
||||||
prefer_prelude: true,
|
prefer_prelude: true,
|
||||||
assist_emit_must_use: false,
|
assist_emit_must_use: false,
|
||||||
|
term_search_fuel: 400,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
|
pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
|
||||||
|
@ -61,6 +63,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
|
||||||
prefer_no_std: false,
|
prefer_no_std: false,
|
||||||
prefer_prelude: true,
|
prefer_prelude: true,
|
||||||
assist_emit_must_use: false,
|
assist_emit_must_use: false,
|
||||||
|
term_search_fuel: 400,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
|
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
|
||||||
|
|
|
@ -353,7 +353,7 @@ pub(crate) fn complete_expr(acc: &mut Completions, ctx: &CompletionContext<'_>)
|
||||||
config: hir::term_search::TermSearchConfig {
|
config: hir::term_search::TermSearchConfig {
|
||||||
enable_borrowcheck: false,
|
enable_borrowcheck: false,
|
||||||
many_alternatives_threshold: 1,
|
many_alternatives_threshold: 1,
|
||||||
depth: 6,
|
fuel: 200,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let exprs = hir::term_search::term_search(&term_search_ctx);
|
let exprs = hir::term_search::term_search(&term_search_ctx);
|
||||||
|
|
|
@ -15,6 +15,7 @@ pub struct CompletionConfig {
|
||||||
pub enable_self_on_the_fly: bool,
|
pub enable_self_on_the_fly: bool,
|
||||||
pub enable_private_editable: bool,
|
pub enable_private_editable: bool,
|
||||||
pub enable_term_search: bool,
|
pub enable_term_search: bool,
|
||||||
|
pub term_search_fuel: u64,
|
||||||
pub full_function_signatures: bool,
|
pub full_function_signatures: bool,
|
||||||
pub callable: Option<CallableSnippets>,
|
pub callable: Option<CallableSnippets>,
|
||||||
pub snippet_cap: Option<SnippetCap>,
|
pub snippet_cap: Option<SnippetCap>,
|
||||||
|
|
|
@ -1853,8 +1853,7 @@ fn f() { A { bar: b$0 }; }
|
||||||
fn baz() [type]
|
fn baz() [type]
|
||||||
ex baz() [type]
|
ex baz() [type]
|
||||||
ex bar() [type]
|
ex bar() [type]
|
||||||
ex A { bar: baz() }.bar [type]
|
ex A { bar: ... }.bar [type]
|
||||||
ex A { bar: bar() }.bar [type]
|
|
||||||
st A []
|
st A []
|
||||||
fn f() []
|
fn f() []
|
||||||
"#]],
|
"#]],
|
||||||
|
@ -1992,8 +1991,8 @@ fn main() {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify]
|
|
||||||
ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify]
|
ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify]
|
||||||
|
ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify]
|
||||||
lc m [local]
|
lc m [local]
|
||||||
lc t [local]
|
lc t [local]
|
||||||
lc &t [type+local]
|
lc &t [type+local]
|
||||||
|
@ -2042,8 +2041,8 @@ fn main() {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
ex core::ops::DerefMut::deref_mut(&mut T(S)) (use core::ops::DerefMut) [type_could_unify]
|
|
||||||
ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify]
|
ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify]
|
||||||
|
ex core::ops::DerefMut::deref_mut(&mut T(S)) (use core::ops::DerefMut) [type_could_unify]
|
||||||
lc m [local]
|
lc m [local]
|
||||||
lc t [local]
|
lc t [local]
|
||||||
lc &mut t [type+local]
|
lc &mut t [type+local]
|
||||||
|
|
|
@ -80,6 +80,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
|
||||||
},
|
},
|
||||||
snippets: Vec::new(),
|
snippets: Vec::new(),
|
||||||
limit: None,
|
limit: None,
|
||||||
|
term_search_fuel: 200,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn completion_list(ra_fixture: &str) -> String {
|
pub(crate) fn completion_list(ra_fixture: &str) -> String {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use hir::{
|
use hir::{
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
term_search::{term_search, TermSearchCtx},
|
term_search::{term_search, TermSearchConfig, TermSearchCtx},
|
||||||
ClosureStyle, HirDisplay,
|
ClosureStyle, HirDisplay,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
|
@ -47,7 +47,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>
|
||||||
sema: &ctx.sema,
|
sema: &ctx.sema,
|
||||||
scope: &scope,
|
scope: &scope,
|
||||||
goal: d.expected.clone(),
|
goal: d.expected.clone(),
|
||||||
config: Default::default(),
|
config: TermSearchConfig { fuel: ctx.config.term_search_fuel, ..Default::default() },
|
||||||
};
|
};
|
||||||
let paths = term_search(&term_search_ctx);
|
let paths = term_search(&term_search_ctx);
|
||||||
|
|
||||||
|
@ -274,7 +274,7 @@ impl Foo for Baz {
|
||||||
}
|
}
|
||||||
fn asd() -> Bar {
|
fn asd() -> Bar {
|
||||||
let a = Baz;
|
let a = Baz;
|
||||||
Foo::foo(a)
|
Foo::foo(_)
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
|
@ -363,7 +363,7 @@ impl Foo for A {
|
||||||
}
|
}
|
||||||
fn main() {
|
fn main() {
|
||||||
let a = A;
|
let a = A;
|
||||||
let c: Bar = Foo::foo(&a);
|
let c: Bar = Foo::foo(_);
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -232,6 +232,7 @@ pub struct DiagnosticsConfig {
|
||||||
pub insert_use: InsertUseConfig,
|
pub insert_use: InsertUseConfig,
|
||||||
pub prefer_no_std: bool,
|
pub prefer_no_std: bool,
|
||||||
pub prefer_prelude: bool,
|
pub prefer_prelude: bool,
|
||||||
|
pub term_search_fuel: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DiagnosticsConfig {
|
impl DiagnosticsConfig {
|
||||||
|
@ -256,6 +257,7 @@ impl DiagnosticsConfig {
|
||||||
},
|
},
|
||||||
prefer_no_std: false,
|
prefer_no_std: false,
|
||||||
prefer_prelude: true,
|
prefer_prelude: true,
|
||||||
|
term_search_fuel: 400,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -986,6 +986,7 @@ impl flags::AnalysisStats {
|
||||||
prefer_no_std: false,
|
prefer_no_std: false,
|
||||||
prefer_prelude: true,
|
prefer_prelude: true,
|
||||||
style_lints: false,
|
style_lints: false,
|
||||||
|
term_search_fuel: 400,
|
||||||
},
|
},
|
||||||
ide::AssistResolveStrategy::All,
|
ide::AssistResolveStrategy::All,
|
||||||
file_id,
|
file_id,
|
||||||
|
|
|
@ -454,6 +454,9 @@ config_data! {
|
||||||
/// Local configurations can be overridden for every crate by placing a `rust-analyzer.toml` on crate root.
|
/// Local configurations can be overridden for every crate by placing a `rust-analyzer.toml` on crate root.
|
||||||
/// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle.
|
/// A config is searched for by traversing a "config tree" in a bottom up fashion. It is chosen by the nearest first principle.
|
||||||
local: struct LocalDefaultConfigData <- LocalConfigInput -> {
|
local: struct LocalDefaultConfigData <- LocalConfigInput -> {
|
||||||
|
/// Term search fuel in "units of work" for assists (Defaults to 400).
|
||||||
|
assist_termSearch_fuel: usize = 400,
|
||||||
|
|
||||||
/// Toggles the additional completions that automatically add imports when completed.
|
/// Toggles the additional completions that automatically add imports when completed.
|
||||||
/// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
|
/// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
|
||||||
completion_autoimport_enable: bool = true,
|
completion_autoimport_enable: bool = true,
|
||||||
|
@ -515,6 +518,8 @@ config_data! {
|
||||||
}"#).unwrap(),
|
}"#).unwrap(),
|
||||||
/// Whether to enable term search based snippets like `Some(foo.bar().baz())`.
|
/// Whether to enable term search based snippets like `Some(foo.bar().baz())`.
|
||||||
completion_termSearch_enable: bool = false,
|
completion_termSearch_enable: bool = false,
|
||||||
|
/// Term search fuel in "units of work" for autocompletion (Defaults to 200).
|
||||||
|
completion_termSearch_fuel: usize = 200,
|
||||||
|
|
||||||
/// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
|
/// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
|
||||||
highlightRelated_breakPoints_enable: bool = true,
|
highlightRelated_breakPoints_enable: bool = true,
|
||||||
|
@ -1015,6 +1020,7 @@ impl Config {
|
||||||
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
|
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
|
||||||
assist_emit_must_use: self.assist_emitMustUse().to_owned(),
|
assist_emit_must_use: self.assist_emitMustUse().to_owned(),
|
||||||
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
|
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
|
||||||
|
term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1048,6 +1054,7 @@ impl Config {
|
||||||
snippets: self.snippets.clone().to_vec(),
|
snippets: self.snippets.clone().to_vec(),
|
||||||
limit: self.completion_limit(source_root).to_owned(),
|
limit: self.completion_limit(source_root).to_owned(),
|
||||||
enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
|
enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
|
||||||
|
term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
|
||||||
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
|
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1067,6 +1074,7 @@ impl Config {
|
||||||
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
|
prefer_no_std: self.imports_preferNoStd(source_root).to_owned(),
|
||||||
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
|
prefer_prelude: self.imports_preferPrelude(source_root).to_owned(),
|
||||||
style_lints: self.diagnostics_styleLints_enable().to_owned(),
|
style_lints: self.diagnostics_styleLints_enable().to_owned(),
|
||||||
|
term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn expand_proc_attr_macros(&self) -> bool {
|
pub fn expand_proc_attr_macros(&self) -> bool {
|
||||||
|
|
|
@ -153,6 +153,7 @@ fn integrated_completion_benchmark() {
|
||||||
prefer_no_std: false,
|
prefer_no_std: false,
|
||||||
prefer_prelude: true,
|
prefer_prelude: true,
|
||||||
limit: None,
|
limit: None,
|
||||||
|
term_search_fuel: 200,
|
||||||
};
|
};
|
||||||
let position =
|
let position =
|
||||||
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
||||||
|
@ -197,6 +198,7 @@ fn integrated_completion_benchmark() {
|
||||||
prefer_no_std: false,
|
prefer_no_std: false,
|
||||||
prefer_prelude: true,
|
prefer_prelude: true,
|
||||||
limit: None,
|
limit: None,
|
||||||
|
term_search_fuel: 200,
|
||||||
};
|
};
|
||||||
let position =
|
let position =
|
||||||
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
||||||
|
@ -239,6 +241,7 @@ fn integrated_completion_benchmark() {
|
||||||
prefer_no_std: false,
|
prefer_no_std: false,
|
||||||
prefer_prelude: true,
|
prefer_prelude: true,
|
||||||
limit: None,
|
limit: None,
|
||||||
|
term_search_fuel: 200,
|
||||||
};
|
};
|
||||||
let position =
|
let position =
|
||||||
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
||||||
|
@ -295,6 +298,7 @@ fn integrated_diagnostics_benchmark() {
|
||||||
},
|
},
|
||||||
prefer_no_std: false,
|
prefer_no_std: false,
|
||||||
prefer_prelude: false,
|
prefer_prelude: false,
|
||||||
|
term_search_fuel: 400,
|
||||||
};
|
};
|
||||||
host.analysis()
|
host.analysis()
|
||||||
.diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id)
|
.diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id)
|
||||||
|
|
|
@ -9,6 +9,11 @@ for enum variants.
|
||||||
--
|
--
|
||||||
Placeholder expression to use for missing expressions in assists.
|
Placeholder expression to use for missing expressions in assists.
|
||||||
--
|
--
|
||||||
|
[[rust-analyzer.assist.termSearch.fuel]]rust-analyzer.assist.termSearch.fuel (default: `400`)::
|
||||||
|
+
|
||||||
|
--
|
||||||
|
Term search fuel in "units of work" for assists (Defaults to 400).
|
||||||
|
--
|
||||||
[[rust-analyzer.cachePriming.enable]]rust-analyzer.cachePriming.enable (default: `true`)::
|
[[rust-analyzer.cachePriming.enable]]rust-analyzer.cachePriming.enable (default: `true`)::
|
||||||
+
|
+
|
||||||
--
|
--
|
||||||
|
@ -373,6 +378,11 @@ Custom completion snippets.
|
||||||
--
|
--
|
||||||
Whether to enable term search based snippets like `Some(foo.bar().baz())`.
|
Whether to enable term search based snippets like `Some(foo.bar().baz())`.
|
||||||
--
|
--
|
||||||
|
[[rust-analyzer.completion.termSearch.fuel]]rust-analyzer.completion.termSearch.fuel (default: `200`)::
|
||||||
|
+
|
||||||
|
--
|
||||||
|
Term search fuel in "units of work" for autocompletion (Defaults to 200).
|
||||||
|
--
|
||||||
[[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`)::
|
[[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`)::
|
||||||
+
|
+
|
||||||
--
|
--
|
||||||
|
|
|
@ -534,6 +534,12 @@
|
||||||
"Fill missing expressions with reasonable defaults, `new` or `default` constructors."
|
"Fill missing expressions with reasonable defaults, `new` or `default` constructors."
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"rust-analyzer.assist.termSearch.fuel": {
|
||||||
|
"markdownDescription": "Term search fuel in \"units of work\" for assists (Defaults to 400).",
|
||||||
|
"default": 400,
|
||||||
|
"type": "integer",
|
||||||
|
"minimum": 0
|
||||||
|
},
|
||||||
"rust-analyzer.cachePriming.enable": {
|
"rust-analyzer.cachePriming.enable": {
|
||||||
"markdownDescription": "Warm up caches on project load.",
|
"markdownDescription": "Warm up caches on project load.",
|
||||||
"default": true,
|
"default": true,
|
||||||
|
@ -930,6 +936,12 @@
|
||||||
"default": false,
|
"default": false,
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
},
|
},
|
||||||
|
"rust-analyzer.completion.termSearch.fuel": {
|
||||||
|
"markdownDescription": "Term search fuel in \"units of work\" for autocompletion (Defaults to 200).",
|
||||||
|
"default": 200,
|
||||||
|
"type": "integer",
|
||||||
|
"minimum": 0
|
||||||
|
},
|
||||||
"rust-analyzer.diagnostics.disabled": {
|
"rust-analyzer.diagnostics.disabled": {
|
||||||
"markdownDescription": "List of rust-analyzer diagnostics to disable.",
|
"markdownDescription": "List of rust-analyzer diagnostics to disable.",
|
||||||
"default": [],
|
"default": [],
|
||||||
|
|
Loading…
Reference in a new issue