Add quantified trees to reduce autocomplete options

This commit is contained in:
Tavo Annus 2024-01-03 22:11:08 +02:00
parent bdbdd83ec1
commit a946970e2d
16 changed files with 300 additions and 94 deletions

View file

@ -12,7 +12,7 @@ authors = ["rust-analyzer team"]
[profile.dev] [profile.dev]
# Disabling debug info speeds up builds a bunch, # Disabling debug info speeds up builds a bunch,
# and we don't rely on it for debugging that much. # and we don't rely on it for debugging that much.
debug = 0 debug = 2
[profile.dev.package] [profile.dev.package]
# These speed up local tests. # These speed up local tests.

View file

@ -12,13 +12,6 @@ pub use type_tree::TypeTree;
mod tactics; mod tactics;
/// # Maximum amount of variations to take per type
///
/// This is to speed up term search as there may be huge amount of variations of arguments for
/// function, even when the return type is always the same. The idea is to take first n and call it
/// a day.
const MAX_VARIATIONS: usize = 10;
/// Key for lookup table to query new types reached. /// Key for lookup table to query new types reached.
#[derive(Debug, Hash, PartialEq, Eq)] #[derive(Debug, Hash, PartialEq, Eq)]
enum NewTypesKey { enum NewTypesKey {
@ -26,6 +19,52 @@ enum NewTypesKey {
StructProjection, StructProjection,
} }
#[derive(Debug)]
enum AlternativeTrees {
Few(FxHashSet<TypeTree>),
Many(Type),
}
impl AlternativeTrees {
pub fn new(
threshold: usize,
ty: Type,
trees: impl Iterator<Item = TypeTree>,
) -> AlternativeTrees {
let mut it = AlternativeTrees::Few(Default::default());
it.extend_with_threshold(threshold, ty, trees);
it
}
pub fn trees(&self) -> Vec<TypeTree> {
match self {
AlternativeTrees::Few(trees) => trees.iter().cloned().collect(),
AlternativeTrees::Many(ty) => vec![TypeTree::Many(ty.clone())],
}
}
pub fn extend_with_threshold(
&mut self,
threshold: usize,
ty: Type,
mut trees: impl Iterator<Item = TypeTree>,
) {
match self {
AlternativeTrees::Few(tts) => {
while let Some(it) = trees.next() {
if tts.len() > threshold {
*self = AlternativeTrees::Many(ty);
break;
}
tts.insert(it);
}
}
AlternativeTrees::Many(_) => (),
}
}
}
/// # Lookup table for term search /// # Lookup table for term search
/// ///
/// Lookup table keeps all the state during term search. /// Lookup table keeps all the state during term search.
@ -38,7 +77,7 @@ enum NewTypesKey {
#[derive(Default, Debug)] #[derive(Default, Debug)]
struct LookupTable { struct LookupTable {
/// All the `TypeTree`s in "value" produce the type of "key" /// All the `TypeTree`s in "value" produce the type of "key"
data: FxHashMap<Type, FxHashSet<TypeTree>>, data: FxHashMap<Type, AlternativeTrees>,
/// New types reached since last query by the `NewTypesKey` /// New types reached since last query by the `NewTypesKey`
new_types: FxHashMap<NewTypesKey, Vec<Type>>, new_types: FxHashMap<NewTypesKey, Vec<Type>>,
/// ScopeDefs that are not interesting any more /// ScopeDefs that are not interesting any more
@ -49,6 +88,8 @@ struct LookupTable {
rounds_since_sopedef_hit: FxHashMap<ScopeDef, u32>, rounds_since_sopedef_hit: FxHashMap<ScopeDef, u32>,
/// Types queried but not present /// Types queried but not present
types_wishlist: FxHashSet<Type>, types_wishlist: FxHashSet<Type>,
/// Threshold to squash trees to `Many`
many_threshold: usize,
} }
impl LookupTable { impl LookupTable {
@ -65,7 +106,7 @@ impl LookupTable {
self.data self.data
.iter() .iter()
.find(|(t, _)| t.could_unify_with_deeply(db, ty)) .find(|(t, _)| t.could_unify_with_deeply(db, ty))
.map(|(_, tts)| tts.iter().cloned().collect()) .map(|(_, tts)| tts.trees())
} }
/// Same as find but automatically creates shared reference of types in the lookup /// Same as find but automatically creates shared reference of types in the lookup
@ -76,7 +117,7 @@ impl LookupTable {
self.data self.data
.iter() .iter()
.find(|(t, _)| t.could_unify_with_deeply(db, ty)) .find(|(t, _)| t.could_unify_with_deeply(db, ty))
.map(|(_, tts)| tts.iter().cloned().collect()) .map(|(_, tts)| tts.trees())
.or_else(|| { .or_else(|| {
self.data self.data
.iter() .iter()
@ -84,7 +125,10 @@ impl LookupTable {
Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, &ty) Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, &ty)
}) })
.map(|(_, tts)| { .map(|(_, tts)| {
tts.iter().map(|tt| TypeTree::Reference(Box::new(tt.clone()))).collect() tts.trees()
.into_iter()
.map(|tt| TypeTree::Reference(Box::new(tt)))
.collect()
}) })
}) })
} }
@ -96,9 +140,12 @@ impl LookupTable {
/// but they clearly do not unify themselves. /// but they clearly do not unify themselves.
fn insert(&mut self, ty: Type, trees: impl Iterator<Item = TypeTree>) { fn insert(&mut self, ty: Type, trees: impl Iterator<Item = TypeTree>) {
match self.data.get_mut(&ty) { match self.data.get_mut(&ty) {
Some(it) => it.extend(trees.take(MAX_VARIATIONS)), Some(it) => it.extend_with_threshold(self.many_threshold, ty, trees),
None => { None => {
self.data.insert(ty.clone(), trees.take(MAX_VARIATIONS).collect()); self.data.insert(
ty.clone(),
AlternativeTrees::new(self.many_threshold, ty.clone(), trees),
);
for it in self.new_types.values_mut() { for it in self.new_types.values_mut() {
it.push(ty.clone()); it.push(ty.clone());
} }
@ -175,11 +222,15 @@ pub struct TermSearchCtx<'a, DB: HirDatabase> {
pub struct TermSearchConfig { pub struct TermSearchConfig {
/// Enable borrow checking, this guarantees the outputs of the `term_search` to borrow-check /// Enable borrow checking, this guarantees the outputs of the `term_search` to borrow-check
pub enable_borrowcheck: bool, pub enable_borrowcheck: bool,
/// Indicate when to squash multiple trees to `Many` as there are too many to keep track
pub many_alternatives_threshold: usize,
/// Depth of the search eg. number of cycles to run
pub depth: usize,
} }
impl Default for TermSearchConfig { impl Default for TermSearchConfig {
fn default() -> Self { fn default() -> Self {
Self { enable_borrowcheck: true } Self { enable_borrowcheck: true, many_alternatives_threshold: 1, depth: 5 }
} }
} }
@ -225,7 +276,7 @@ pub fn term_search<DB: HirDatabase>(ctx: TermSearchCtx<'_, DB>) -> Vec<TypeTree>
let mut solution_found = !solutions.is_empty(); let mut solution_found = !solutions.is_empty();
for _ in 0..5 { for _ in 0..ctx.config.depth {
lookup.new_round(); lookup.new_round();
solutions.extend(tactics::type_constructor(&ctx, &defs, &mut lookup)); solutions.extend(tactics::type_constructor(&ctx, &defs, &mut lookup));

View file

@ -21,7 +21,7 @@ use crate::{
use crate::term_search::{TermSearchConfig, TypeTree}; use crate::term_search::{TermSearchConfig, TypeTree};
use super::{LookupTable, NewTypesKey, TermSearchCtx, MAX_VARIATIONS}; use super::{LookupTable, NewTypesKey, TermSearchCtx};
/// # Trivial tactic /// # Trivial tactic
/// ///
@ -194,7 +194,6 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
param_trees param_trees
.into_iter() .into_iter()
.multi_cartesian_product() .multi_cartesian_product()
.take(MAX_VARIATIONS)
.map(|params| TypeTree::Variant { .map(|params| TypeTree::Variant {
variant, variant,
generics: generics.clone(), generics: generics.clone(),
@ -315,7 +314,6 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
param_trees param_trees
.into_iter() .into_iter()
.multi_cartesian_product() .multi_cartesian_product()
.take(MAX_VARIATIONS)
.map(|params| TypeTree::Struct { .map(|params| TypeTree::Struct {
strukt: *it, strukt: *it,
generics: generics.clone(), generics: generics.clone(),
@ -440,7 +438,6 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
param_trees param_trees
.into_iter() .into_iter()
.multi_cartesian_product() .multi_cartesian_product()
.take(MAX_VARIATIONS)
.map(|params| TypeTree::Function { .map(|params| TypeTree::Function {
func: *it, func: *it,
generics: generics.clone(), generics: generics.clone(),
@ -603,7 +600,6 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
let fn_trees: Vec<TypeTree> = std::iter::once(target_type_trees) let fn_trees: Vec<TypeTree> = std::iter::once(target_type_trees)
.chain(param_trees.into_iter()) .chain(param_trees.into_iter())
.multi_cartesian_product() .multi_cartesian_product()
.take(MAX_VARIATIONS)
.map(|params| TypeTree::Function { func: it, generics: Vec::new(), params }) .map(|params| TypeTree::Function { func: it, generics: Vec::new(), params })
.collect(); .collect();
@ -822,7 +818,6 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
param_trees param_trees
.into_iter() .into_iter()
.multi_cartesian_product() .multi_cartesian_product()
.take(MAX_VARIATIONS)
.map(|params| TypeTree::Function { .map(|params| TypeTree::Function {
func: it, func: it,
generics: generics.clone(), generics: generics.clone(),

View file

@ -109,6 +109,8 @@ pub enum TypeTree {
Field { type_tree: Box<TypeTree>, field: Field }, Field { type_tree: Box<TypeTree>, field: Field },
/// Passing type as reference (with `&`) /// Passing type as reference (with `&`)
Reference(Box<TypeTree>), Reference(Box<TypeTree>),
/// Indicates possibility of many different options that all evaluate to `ty`
Many(Type),
} }
impl TypeTree { impl TypeTree {
@ -117,7 +119,11 @@ impl TypeTree {
/// Note that trait imports are not added to generated code. /// Note that trait imports are not added to generated code.
/// To make sure that the code is valid, callee has to also ensure that all the traits listed /// To make sure that the code is valid, callee has to also ensure that all the traits listed
/// by `traits_used` method are also imported. /// by `traits_used` method are also imported.
pub fn gen_source_code(&self, sema_scope: &SemanticsScope<'_>) -> String { pub fn gen_source_code(
&self,
sema_scope: &SemanticsScope<'_>,
many_formatter: &mut dyn FnMut(&Type) -> String,
) -> String {
let db = sema_scope.db; let db = sema_scope.db;
match self { match self {
TypeTree::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)), TypeTree::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
@ -128,9 +134,15 @@ impl TypeTree {
TypeTree::Function { func, params, .. } => { TypeTree::Function { func, params, .. } => {
if let Some(self_param) = func.self_param(db) { if let Some(self_param) = func.self_param(db) {
let func_name = func.name(db).display(db.upcast()).to_string(); let func_name = func.name(db).display(db.upcast()).to_string();
let target = params.first().expect("no self param").gen_source_code(sema_scope); let target = params
let args = .first()
params.iter().skip(1).map(|f| f.gen_source_code(sema_scope)).join(", "); .expect("no self param")
.gen_source_code(sema_scope, many_formatter);
let args = params
.iter()
.skip(1)
.map(|f| f.gen_source_code(sema_scope, many_formatter))
.join(", ");
match func.as_assoc_item(db).unwrap().containing_trait_or_trait_impl(db) { match func.as_assoc_item(db).unwrap().containing_trait_or_trait_impl(db) {
Some(trait_) => { Some(trait_) => {
@ -149,7 +161,10 @@ impl TypeTree {
None => format!("{target}.{func_name}({args})"), None => format!("{target}.{func_name}({args})"),
} }
} else { } else {
let args = params.iter().map(|f| f.gen_source_code(sema_scope)).join(", "); let args = params
.iter()
.map(|f| f.gen_source_code(sema_scope, many_formatter))
.join(", ");
match func.as_assoc_item(db).map(|it| it.container(db)) { match func.as_assoc_item(db).map(|it| it.container(db)) {
Some(container) => { Some(container) => {
@ -194,7 +209,10 @@ impl TypeTree {
}; };
let inner = match variant.kind(db) { let inner = match variant.kind(db) {
StructKind::Tuple => { StructKind::Tuple => {
let args = params.iter().map(|f| f.gen_source_code(sema_scope)).join(", "); let args = params
.iter()
.map(|f| f.gen_source_code(sema_scope, many_formatter))
.join(", ");
format!("{generics_str}({args})") format!("{generics_str}({args})")
} }
StructKind::Record => { StructKind::Record => {
@ -206,7 +224,7 @@ impl TypeTree {
format!( format!(
"{}: {}", "{}: {}",
f.name(db).display(db.upcast()).to_string(), f.name(db).display(db.upcast()).to_string(),
a.gen_source_code(sema_scope) a.gen_source_code(sema_scope, many_formatter)
) )
}) })
.join(", "); .join(", ");
@ -222,7 +240,10 @@ impl TypeTree {
let generics = non_default_generics(db, (*strukt).into(), generics); let generics = non_default_generics(db, (*strukt).into(), generics);
let inner = match strukt.kind(db) { let inner = match strukt.kind(db) {
StructKind::Tuple => { StructKind::Tuple => {
let args = params.iter().map(|a| a.gen_source_code(sema_scope)).join(", "); let args = params
.iter()
.map(|a| a.gen_source_code(sema_scope, many_formatter))
.join(", ");
format!("({args})") format!("({args})")
} }
StructKind::Record => { StructKind::Record => {
@ -234,7 +255,7 @@ impl TypeTree {
format!( format!(
"{}: {}", "{}: {}",
f.name(db).display(db.upcast()).to_string(), f.name(db).display(db.upcast()).to_string(),
a.gen_source_code(sema_scope) a.gen_source_code(sema_scope, many_formatter)
) )
}) })
.join(", "); .join(", ");
@ -254,14 +275,15 @@ impl TypeTree {
format!("{prefix}{inner}") format!("{prefix}{inner}")
} }
TypeTree::Field { type_tree, field } => { TypeTree::Field { type_tree, field } => {
let strukt = type_tree.gen_source_code(sema_scope); let strukt = type_tree.gen_source_code(sema_scope, many_formatter);
let field = field.name(db).display(db.upcast()).to_string(); let field = field.name(db).display(db.upcast()).to_string();
format!("{strukt}.{field}") format!("{strukt}.{field}")
} }
TypeTree::Reference(type_tree) => { TypeTree::Reference(type_tree) => {
let inner = type_tree.gen_source_code(sema_scope); let inner = type_tree.gen_source_code(sema_scope, many_formatter);
format!("&{inner}") format!("&{inner}")
} }
TypeTree::Many(ty) => many_formatter(ty),
} }
} }
@ -292,6 +314,7 @@ impl TypeTree {
field.ty_with_generics(db, type_tree.ty(db).type_arguments()) field.ty_with_generics(db, type_tree.ty(db).type_arguments())
} }
TypeTree::Reference(it) => it.ty(db), TypeTree::Reference(it) => it.ty(db),
TypeTree::Many(ty) => ty.clone(),
} }
} }

View file

@ -36,8 +36,9 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
return None; return None;
} }
let mut formatter = |_: &hir::Type| String::from("todo!()");
for path in paths.iter().unique() { for path in paths.iter().unique() {
let code = path.gen_source_code(&scope); let code = path.gen_source_code(&scope, &mut formatter);
acc.add_group( acc.add_group(
&GroupLabel(String::from("Term search")), &GroupLabel(String::from("Term search")),
AssistId("term_search", AssistKind::Generate), AssistId("term_search", AssistKind::Generate),

View file

@ -162,10 +162,11 @@ impl Completions {
&mut self, &mut self,
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
expr: &hir::term_search::TypeTree, expr: &hir::term_search::TypeTree,
path_ctx: &PathCompletionCtx,
) { ) {
let item = render_type_tree(ctx, expr, path_ctx); match render_type_tree(ctx, expr) {
item.add_to(self, ctx.db); Some(item) => item.add_to(self, ctx.db),
None => (),
}
} }
pub(crate) fn add_crate_roots( pub(crate) fn add_crate_roots(
@ -698,10 +699,10 @@ pub(super) fn complete_name_ref(
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
NameRefContext { nameref, kind }: &NameRefContext, NameRefContext { nameref, kind }: &NameRefContext,
) { ) {
expr::complete_expr(acc, ctx);
match kind { match kind {
NameRefKind::Path(path_ctx) => { NameRefKind::Path(path_ctx) => {
flyimport::import_on_the_fly_path(acc, ctx, path_ctx); flyimport::import_on_the_fly_path(acc, ctx, path_ctx);
expr::complete_expr(acc, ctx, path_ctx);
match &path_ctx.kind { match &path_ctx.kind {
PathKind::Expr { expr_ctx } => { PathKind::Expr { expr_ctx } => {

View file

@ -332,7 +332,6 @@ pub(crate) fn complete_expr_path(
pub(crate) fn complete_expr( pub(crate) fn complete_expr(
acc: &mut Completions, acc: &mut Completions,
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
path_ctx: &PathCompletionCtx,
) { ) {
let _p = profile::span("complete_expr"); let _p = profile::span("complete_expr");
if !ctx.qualifier_ctx.none() { if !ctx.qualifier_ctx.none() {
@ -349,11 +348,15 @@ pub(crate) fn complete_expr(
sema: &ctx.sema, sema: &ctx.sema,
scope: &ctx.scope, scope: &ctx.scope,
goal: ty.clone(), goal: ty.clone(),
config: hir::term_search::TermSearchConfig { enable_borrowcheck: false }, config: hir::term_search::TermSearchConfig {
enable_borrowcheck: false,
many_alternatives_threshold: 1,
depth: 2,
},
}; };
let exprs = hir::term_search::term_search(term_search_ctx); let exprs = hir::term_search::term_search(term_search_ctx);
for expr in exprs { for expr in exprs {
acc.add_expr(ctx, &expr, path_ctx); acc.add_expr(ctx, &expr);
} }
} }
} }

View file

@ -17,7 +17,7 @@ use ide_db::{
imports::import_assets::LocatedImport, imports::import_assets::LocatedImport,
RootDatabase, SnippetCap, SymbolKind, RootDatabase, SnippetCap, SymbolKind,
}; };
use syntax::{format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange}; use syntax::{ast, AstNode, SmolStr, SyntaxKind, TextRange};
use text_edit::TextEdit; use text_edit::TextEdit;
use crate::{ use crate::{
@ -275,21 +275,50 @@ pub(crate) fn render_resolution_with_import_pat(
pub(crate) fn render_type_tree( pub(crate) fn render_type_tree(
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
expr: &hir::term_search::TypeTree, expr: &hir::term_search::TypeTree,
path_ctx: &PathCompletionCtx, ) -> Option<Builder> {
) -> Builder { let mut i = 1;
let mut item = CompletionItem::new( let mut snippet_formatter = |ty: &hir::Type| {
CompletionItemKind::Snippet, let arg_name = ty
ctx.source_range(), .as_adt()
expr.gen_source_code(&ctx.scope), .and_then(|adt| adt.name(ctx.db).as_text())
); .map(|s| stdx::to_lower_snake_case(s.as_str()))
.unwrap_or_else(|| String::from("_"));
let res = format!("${{{i}:{arg_name}}}");
i += 1;
res
};
let mut label_formatter = |ty: &hir::Type| {
ty.as_adt()
.and_then(|adt| adt.name(ctx.db).as_text())
.map(|s| stdx::to_lower_snake_case(s.as_str()))
.unwrap_or_else(|| String::from("_"))
};
let label = expr.gen_source_code(&ctx.scope, &mut label_formatter);
let source_range = match &ctx.expected_name {
Some(name_or_ref) => name_or_ref.syntax().text_range(),
None => match ctx.original_token.parent() {
Some(node) => match node.ancestors().find_map(|n| ast::Path::cast(n)) {
Some(path) => path.syntax().text_range(),
None => node.text_range(),
},
None => ctx.source_range(),
},
};
let mut item = CompletionItem::new(CompletionItemKind::Snippet, source_range, label);
let snippet = format!("{}$0", expr.gen_source_code(&ctx.scope, &mut snippet_formatter));
let edit = TextEdit::replace(source_range, snippet);
item.snippet_edit(ctx.config.snippet_cap?, edit);
item.set_relevance(crate::CompletionRelevance { item.set_relevance(crate::CompletionRelevance {
type_match: Some(crate::item::CompletionRelevanceTypeMatch::CouldUnify), type_match: Some(crate::item::CompletionRelevanceTypeMatch::CouldUnify),
..Default::default() ..Default::default()
}); });
path_ref_match(ctx, path_ctx, &expr.ty(ctx.sema.db), &mut item); Some(item)
item
} }
fn scope_def_to_name( fn scope_def_to_name(

View file

@ -97,6 +97,11 @@ fn func(param0 @ (param1, param2): (i32, i32)) {
kw unsafe kw unsafe
kw while kw while
kw while let kw while let
sn ifletlocal
sn letlocal
sn matcharm
sn param1
sn param2
"#]], "#]],
); );
} }
@ -238,9 +243,11 @@ fn complete_in_block() {
kw use kw use
kw while kw while
kw while let kw while let
sn false
sn macro_rules sn macro_rules
sn pd sn pd
sn ppd sn ppd
sn true
"#]], "#]],
) )
} }
@ -682,7 +689,9 @@ fn main() {
} }
"#, "#,
expect![[r#" expect![[r#"
fn test() fn() -> Zulu fn test() fn() -> Zulu
sn Zulu
sn Zulu::test()
"#]], "#]],
); );
} }

View file

@ -316,6 +316,15 @@ fn func() {
bn RecordV {} RecordV { field$1 }$0 bn RecordV {} RecordV { field$1 }$0
bn TupleV() TupleV($1)$0 bn TupleV() TupleV($1)$0
bn UnitV UnitV$0 bn UnitV UnitV$0
sn ()
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn func()
sn function()
sn true
"#]], "#]],
); );
} }
@ -558,10 +567,12 @@ fn foo() {
} }
"#, "#,
expect![[r#" expect![[r#"
bn A A$0 bn A A$0
bn B {} B { r#type$1 }$0 bn B {} B { r#type$1 }$0
bn struct {} r#struct { r#type$1 }$0 bn struct {} r#struct { r#type$1 }$0
bn type r#type$0 bn type r#type$0
sn Enum::A
sn Enum::r#type
"#]], "#]],
); );
} }
@ -586,6 +597,7 @@ fn f(t: Ty) {
"#, "#,
expect![[r#" expect![[r#"
ct ABC const ABC: Self ct ABC const ABC: Self
sn t
"#]], "#]],
); );
@ -608,6 +620,7 @@ fn f(e: MyEnum) {
expect![[r#" expect![[r#"
ct A pub const A: i32 ct A pub const A: i32
ct B pub const B: i32 ct B pub const B: i32
sn e
"#]], "#]],
); );
@ -633,6 +646,7 @@ fn f(u: U) {
expect![[r#" expect![[r#"
ct C pub const C: i32 ct C pub const C: i32
ct D pub const D: i32 ct D pub const D: i32
sn u
"#]], "#]],
); );
@ -652,6 +666,7 @@ fn f(v: u32) {
"#, "#,
expect![[r#" expect![[r#"
ct MIN pub const MIN: Self ct MIN pub const MIN: Self
sn v
"#]], "#]],
); );
} }
@ -763,6 +778,7 @@ fn f(x: EnumAlias<u8>) {
expect![[r#" expect![[r#"
bn Tuple() Tuple($1)$0 bn Tuple() Tuple($1)$0
bn Unit Unit$0 bn Unit Unit$0
sn x
"#]], "#]],
); );
} }

View file

@ -192,6 +192,8 @@ fn main() {
bt u32 u32 bt u32 u32
kw crate:: kw crate::
kw self:: kw self::
sn Foo::default()
sn foo
"#]], "#]],
); );
check( check(

View file

@ -1230,6 +1230,10 @@ fn here_we_go() {
"#, "#,
expect![[r#" expect![[r#"
st Bar (alias Qux) Bar st Bar (alias Qux) Bar
sn ()
sn false
sn here_we_go()
sn true
"#]], "#]],
); );
} }
@ -1284,6 +1288,10 @@ fn here_we_go() {
kw unsafe kw unsafe
kw while kw while
kw while let kw while let
sn ()
sn false
sn here_we_go()
sn true
"#]], "#]],
); );
} }

View file

@ -70,18 +70,27 @@ fn fn_return_type() {
fn x<'lt, T, const C: usize>() -> $0 fn x<'lt, T, const C: usize>() -> $0
"#, "#,
expect![[r#" expect![[r#"
en Enum Enum en Enum Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
st Record Record st Record Record
st Tuple Tuple st Tuple Tuple
st Unit Unit st Unit Unit
tt Trait tt Trait
tp T tp T
un Union Union un Union Union
bt u32 u32 bt u32 u32
kw crate:: kw crate::
kw self:: kw self::
sn ()
sn C
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn function()
sn true
"#]], "#]],
); );
} }
@ -100,18 +109,27 @@ fn foo() -> B$0 {
} }
"#, "#,
expect![[r#" expect![[r#"
en Enum Enum en Enum Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
st Record Record st Record Record
st Tuple Tuple st Tuple Tuple
st Unit Unit st Unit Unit
tt Trait tt Trait
un Union Union un Union Union
bt u32 u32 bt u32 u32
it () it ()
kw crate:: kw crate::
kw self:: kw self::
sn ()
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn foo()
sn function()
sn true
"#]], "#]],
) )
} }
@ -204,18 +222,26 @@ fn f2(x: u64) -> $0 {
} }
"#, "#,
expect![[r#" expect![[r#"
en Enum Enum en Enum Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
st Record Record st Record Record
st Tuple Tuple st Tuple Tuple
st Unit Unit st Unit Unit
tt Trait tt Trait
un Union Union un Union Union
bt u32 u32 bt u32 u32
it u64 it u64
kw crate:: kw crate::
kw self:: kw self::
sn ()
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn function()
sn true
"#]], "#]],
); );
} }
@ -319,18 +345,27 @@ fn foo<'lt, T, const C: usize>() {
} }
"#, "#,
expect![[r#" expect![[r#"
en Enum Enum en Enum Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
st Record Record st Record Record
st Tuple Tuple st Tuple Tuple
st Unit Unit st Unit Unit
tt Trait tt Trait
tp T tp T
un Union Union un Union Union
bt u32 u32 bt u32 u32
kw crate:: kw crate::
kw self:: kw self::
sn ()
sn C
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn function()
sn true
"#]], "#]],
); );
check( check(
@ -341,14 +376,23 @@ fn foo<'lt, T, const C: usize>() {
} }
"#, "#,
expect![[r#" expect![[r#"
en Enum Enum en Enum Enum
ma makro!() macro_rules! makro ma makro!() macro_rules! makro
md module md module
st Record Record st Record Record
st Tuple Tuple st Tuple Tuple
st Unit Unit st Unit Unit
tt Trait tt Trait
un Union Union un Union Union
sn ()
sn C
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn function()
sn true
"#]], "#]],
); );
} }

View file

@ -44,12 +44,14 @@ fn fixes(sema: &Semantics<'_, RootDatabase>, d: &hir::TypedHole) -> Option<Vec<A
d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?; d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?;
let scope = sema.scope(d.expr.value.to_node(&root).syntax())?; let scope = sema.scope(d.expr.value.to_node(&root).syntax())?;
let ctx = TermSearchCtx { sema, scope: &scope, goal: d.expected.clone(), config: Default::default() }; let ctx =
TermSearchCtx { sema, scope: &scope, goal: d.expected.clone(), config: Default::default() };
let paths = term_search(ctx); let paths = term_search(ctx);
let mut assists = vec![]; let mut assists = vec![];
let mut formatter = |_: &hir::Type| String::from("_");
for path in paths.into_iter().unique() { for path in paths.into_iter().unique() {
let code = path.gen_source_code(&scope); let code = path.gen_source_code(&scope, &mut formatter);
assists.push(Assist { assists.push(Assist {
id: AssistId("typed-hole", AssistKind::QuickFix), id: AssistId("typed-hole", AssistKind::QuickFix),

View file

@ -412,6 +412,7 @@ impl flags::AnalysisStats {
goal: target_ty, goal: target_ty,
config: hir::term_search::TermSearchConfig { config: hir::term_search::TermSearchConfig {
enable_borrowcheck: true, enable_borrowcheck: true,
..Default::default()
}, },
}; };
let found_terms = hir::term_search::term_search(ctx); let found_terms = hir::term_search::term_search(ctx);
@ -427,9 +428,10 @@ impl flags::AnalysisStats {
s.chars().into_iter().filter(|c| !c.is_whitespace()).collect() s.chars().into_iter().filter(|c| !c.is_whitespace()).collect()
} }
let mut formatter = |_: &hir::Type| syntax::ast::make::ext::expr_todo().to_string();
let mut syntax_hit_found = false; let mut syntax_hit_found = false;
for term in found_terms { for term in found_terms {
let generated = term.gen_source_code(&scope); let generated = term.gen_source_code(&scope, &mut formatter);
syntax_hit_found |= trim(&original_text) == trim(&generated); syntax_hit_found |= trim(&original_text) == trim(&generated);
// Validate if type-checks // Validate if type-checks

View file

@ -569,6 +569,26 @@ impl fmt::Display for NameOrNameRef {
} }
} }
impl ast::AstNode for NameOrNameRef {
fn can_cast(kind: SyntaxKind) -> bool {
matches!(kind, SyntaxKind::NAME | SyntaxKind::NAME_REF)
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
let res = match syntax.kind() {
SyntaxKind::NAME => NameOrNameRef::Name(ast::Name { syntax }),
SyntaxKind::NAME_REF => NameOrNameRef::NameRef(ast::NameRef { syntax }),
_ => return None,
};
Some(res)
}
fn syntax(&self) -> &SyntaxNode {
match self {
NameOrNameRef::NameRef(it) => it.syntax(),
NameOrNameRef::Name(it) => it.syntax(),
}
}
}
impl NameOrNameRef { impl NameOrNameRef {
pub fn text(&self) -> TokenText<'_> { pub fn text(&self) -> TokenText<'_> {
match self { match self {