Working resolve completion imports prototype

This commit is contained in:
Kirill Bulatov 2020-11-30 22:28:19 +02:00
parent 48acd7d455
commit 6d2d279389
9 changed files with 78 additions and 46 deletions

View file

@ -257,14 +257,18 @@ impl CompletionItem {
pub fn ref_match(&self) -> Option<(Mutability, CompletionScore)> { pub fn ref_match(&self) -> Option<(Mutability, CompletionScore)> {
self.ref_match self.ref_match
} }
pub fn import_to_add(&self) -> Option<&ImportToAdd> {
self.import_to_add.as_ref()
}
} }
/// An extra import to add after the completion is applied. /// An extra import to add after the completion is applied.
#[derive(Clone)] #[derive(Debug, Clone)]
pub(crate) struct ImportToAdd { pub struct ImportToAdd {
pub(crate) import_path: ModPath, pub import_path: ModPath,
pub(crate) import_scope: ImportScope, pub import_scope: ImportScope,
pub(crate) merge_behaviour: Option<MergeBehaviour>, pub merge_behaviour: Option<MergeBehaviour>,
} }
/// A helper to make `CompletionItem`s. /// A helper to make `CompletionItem`s.

View file

@ -18,7 +18,7 @@ use crate::{completions::Completions, context::CompletionContext, item::Completi
pub use crate::{ pub use crate::{
config::CompletionConfig, config::CompletionConfig,
item::{CompletionItem, CompletionItemKind, CompletionScore, InsertTextFormat}, item::{CompletionItem, CompletionItemKind, CompletionScore, ImportToAdd, InsertTextFormat},
}; };
//FIXME: split the following feature into fine-grained features. //FIXME: split the following feature into fine-grained features.

View file

@ -80,7 +80,8 @@ pub use crate::{
}, },
}; };
pub use completion::{ pub use completion::{
CompletionConfig, CompletionItem, CompletionItemKind, CompletionScore, InsertTextFormat, CompletionConfig, CompletionItem, CompletionItemKind, CompletionScore, ImportToAdd,
InsertTextFormat,
}; };
pub use ide_db::{ pub use ide_db::{
call_info::CallInfo, call_info::CallInfo,

View file

@ -0,0 +1,2 @@
#[derive(Debug, Default)]
pub struct CompletionResolveActions {}

View file

@ -7,7 +7,7 @@ use std::{sync::Arc, time::Instant};
use crossbeam_channel::{unbounded, Receiver, Sender}; use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle; use flycheck::FlycheckHandle;
use ide::{Analysis, AnalysisHost, Change, FileId}; use ide::{Analysis, AnalysisHost, Change, FileId, ImportToAdd};
use ide_db::base_db::{CrateId, VfsPath}; use ide_db::base_db::{CrateId, VfsPath};
use lsp_types::{SemanticTokens, Url}; use lsp_types::{SemanticTokens, Url};
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};
@ -69,6 +69,7 @@ pub(crate) struct GlobalState {
pub(crate) config: Config, pub(crate) config: Config,
pub(crate) analysis_host: AnalysisHost, pub(crate) analysis_host: AnalysisHost,
pub(crate) diagnostics: DiagnosticCollection, pub(crate) diagnostics: DiagnosticCollection,
pub(crate) additional_imports: FxHashMap<String, ImportToAdd>,
pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>, pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>,
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>, pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
@ -121,6 +122,7 @@ impl GlobalState {
config, config,
analysis_host, analysis_host,
diagnostics: Default::default(), diagnostics: Default::default(),
additional_imports: FxHashMap::default(),
mem_docs: FxHashMap::default(), mem_docs: FxHashMap::default(),
semantic_tokens_cache: Arc::new(Default::default()), semantic_tokens_cache: Arc::new(Default::default()),
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))), vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),

View file

@ -11,6 +11,7 @@ use ide::{
FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query, FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query,
RangeInfo, Runnable, RunnableKind, SearchScope, TextEdit, RangeInfo, Runnable, RunnableKind, SearchScope, TextEdit,
}; };
use ide_db::helpers::{insert_use, mod_path_to_ast};
use itertools::Itertools; use itertools::Itertools;
use lsp_server::ErrorCode; use lsp_server::ErrorCode;
use lsp_types::{ use lsp_types::{
@ -24,6 +25,7 @@ use lsp_types::{
SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit, SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
}; };
use project_model::TargetKind; use project_model::TargetKind;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::to_value; use serde_json::to_value;
use stdx::{format_to, split_once}; use stdx::{format_to, split_once};
@ -535,10 +537,11 @@ pub(crate) fn handle_runnables(
} }
pub(crate) fn handle_completion( pub(crate) fn handle_completion(
snap: GlobalStateSnapshot, global_state: &mut GlobalState,
params: lsp_types::CompletionParams, params: lsp_types::CompletionParams,
) -> Result<Option<lsp_types::CompletionResponse>> { ) -> Result<Option<lsp_types::CompletionResponse>> {
let _p = profile::span("handle_completion"); let _p = profile::span("handle_completion");
let snap = global_state.snapshot();
let position = from_proto::file_position(&snap, params.text_document_position)?; let position = from_proto::file_position(&snap, params.text_document_position)?;
let completion_triggered_after_single_colon = { let completion_triggered_after_single_colon = {
let mut res = false; let mut res = false;
@ -568,22 +571,68 @@ pub(crate) fn handle_completion(
}; };
let line_index = snap.analysis.file_line_index(position.file_id)?; let line_index = snap.analysis.file_line_index(position.file_id)?;
let line_endings = snap.file_line_endings(position.file_id); let line_endings = snap.file_line_endings(position.file_id);
let mut additional_imports = FxHashMap::default();
let items: Vec<CompletionItem> = items let items: Vec<CompletionItem> = items
.into_iter() .into_iter()
.flat_map(|item| to_proto::completion_item(&line_index, line_endings, item)) .flat_map(|item| {
let import_to_add = item.import_to_add().cloned();
let new_completion_items = to_proto::completion_item(&line_index, line_endings, item);
if let Some(import_to_add) = import_to_add {
for new_item in &new_completion_items {
additional_imports.insert(new_item.label.clone(), import_to_add.clone());
}
}
new_completion_items
})
.map(|mut item| {
item.data = Some(position.file_id.0.into());
item
})
.collect(); .collect();
global_state.additional_imports = additional_imports;
let completion_list = lsp_types::CompletionList { is_incomplete: true, items }; let completion_list = lsp_types::CompletionList { is_incomplete: true, items };
Ok(Some(completion_list.into())) Ok(Some(completion_list.into()))
} }
pub(crate) fn handle_resolve_completion( pub(crate) fn handle_resolve_completion(
snap: GlobalStateSnapshot, global_state: &mut GlobalState,
original_completion: CompletionItem, mut original_completion: lsp_types::CompletionItem,
) -> Result<CompletionItem> { ) -> Result<lsp_types::CompletionItem> {
// TODO kb slow, takes over 130ms
let _p = profile::span("handle_resolve_completion"); let _p = profile::span("handle_resolve_completion");
// TODO kb use the field to detect it's for autocompletion and do the insert logic
let _data = dbg!(original_completion).data; if let Some(import_data) =
global_state.additional_imports.get(dbg!(original_completion.label.as_str()))
{
let rewriter = insert_use::insert_use(
&import_data.import_scope,
mod_path_to_ast(&import_data.import_path),
import_data.merge_behaviour,
);
if let Some((old_ast, file_id)) =
// TODO kb for file_id, better use &str and then cast to u32?
rewriter
.rewrite_root()
.zip(original_completion.data.as_ref().and_then(|value| Some(value.as_u64()? as u32)))
{
let snap = global_state.snapshot();
let mut import_insert = TextEdit::builder();
algo::diff(&old_ast, &rewriter.rewrite(&old_ast)).into_text_edit(&mut import_insert);
let line_index = snap.analysis.file_line_index(FileId(file_id))?;
let line_endings = snap.file_line_endings(FileId(file_id));
let text_edit = import_insert.finish();
let mut new_edits = original_completion.additional_text_edits.unwrap_or_default();
for indel in text_edit {
new_edits.push(to_proto::text_edit(&line_index, line_endings, indel));
}
original_completion.additional_text_edits = Some(new_edits);
}
}
Ok(original_completion) Ok(original_completion)
} }

View file

@ -36,6 +36,7 @@ mod thread_pool;
mod document; mod document;
pub mod lsp_ext; pub mod lsp_ext;
pub mod config; pub mod config;
mod completions;
use ide::AnalysisHost; use ide::AnalysisHost;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;

View file

@ -436,6 +436,10 @@ impl GlobalState {
handlers::handle_matching_brace(s.snapshot(), p) handlers::handle_matching_brace(s.snapshot(), p)
})? })?
.on_sync::<lsp_ext::MemoryUsage>(|s, p| handlers::handle_memory_usage(s, p))? .on_sync::<lsp_ext::MemoryUsage>(|s, p| handlers::handle_memory_usage(s, p))?
.on_sync::<lsp_types::request::Completion>(handlers::handle_completion)?
.on_sync::<lsp_types::request::ResolveCompletionItem>(
handlers::handle_resolve_completion,
)?
.on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status) .on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
.on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree) .on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
.on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro) .on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)
@ -453,8 +457,6 @@ impl GlobalState {
.on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition) .on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)
.on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation) .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
.on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition) .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
.on::<lsp_types::request::Completion>(handlers::handle_completion)
.on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_resolve_completion)
.on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens) .on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)
.on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve) .on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)
.on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range) .on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)

View file

@ -231,35 +231,6 @@ pub(crate) fn completion_item(
None => vec![res], None => vec![res],
}; };
// TODO kb need to get this logic away and store for the later resolve request
/*
let mut label = self.label;
let mut lookup = self.lookup;
let mut insert_text = self.insert_text;
let mut text_edits = TextEdit::builder();
if let Some((import_path, import_scope, merge_behaviour)) = completion_item.import_data.as_ref() {
let import = mod_path_to_ast(&import_path);
let mut import_path_without_last_segment = import_path;
let _ = import_path_without_last_segment.segments.pop();
if !import_path_without_last_segment.segments.is_empty() {
if lookup.is_none() {
lookup = Some(label.clone());
}
if insert_text.is_none() {
insert_text = Some(label.clone());
}
label = format!("{}::{}", import_path_without_last_segment, label);
}
let rewriter = insert_use(&import_scope, import, merge_behaviour);
if let Some(old_ast) = rewriter.rewrite_root() {
algo::diff(&old_ast, &rewriter.rewrite(&old_ast)).into_text_edit(&mut text_edits);
}
}
*/
for mut r in all_results.iter_mut() { for mut r in all_results.iter_mut() {
r.insert_text_format = Some(insert_text_format(completion_item.insert_text_format())); r.insert_text_format = Some(insert_text_format(completion_item.insert_text_format()));
} }