feature: Add a UnindexedProject notification and a corresponding setting.

This commit is contained in:
David Barsky 2023-11-07 16:26:32 -05:00
parent ca44e1ee9f
commit 6330b028b3
14 changed files with 262 additions and 8 deletions

View file

@ -359,7 +359,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
&'a self, &'a self,
ctor: &'a rustc_pattern_analysis::constructor::Constructor<Self>, ctor: &'a rustc_pattern_analysis::constructor::Constructor<Self>,
ty: &'a Self::Ty, ty: &'a Self::Ty,
) -> impl Iterator<Item = Self::Ty> + ExactSizeIterator + Captures<'a> { ) -> impl ExactSizeIterator<Item = Self::Ty> + Captures<'a> {
let single = |ty| smallvec![ty]; let single = |ty| smallvec![ty];
let tys: SmallVec<[_; 2]> = match ctor { let tys: SmallVec<[_; 2]> = match ctor {
Struct | Variant(_) | UnionField => match ty.kind(Interner) { Struct | Variant(_) | UnionField => match ty.kind(Interner) {

View file

@ -478,6 +478,9 @@ config_data! {
/// Whether to show `can't find Cargo.toml` error message. /// Whether to show `can't find Cargo.toml` error message.
notifications_cargoTomlNotFound: bool = "true", notifications_cargoTomlNotFound: bool = "true",
/// Whether to send an UnindexedProject notification to the client.
notifications_unindexedProject: bool = "false",
/// How many worker threads in the main loop. The default `null` means to pick automatically. /// How many worker threads in the main loop. The default `null` means to pick automatically.
numThreads: Option<usize> = "null", numThreads: Option<usize> = "null",
@ -745,6 +748,7 @@ pub enum FilesWatcher {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct NotificationsConfig { pub struct NotificationsConfig {
pub cargo_toml_not_found: bool, pub cargo_toml_not_found: bool,
pub unindexed_project: bool,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -1220,7 +1224,10 @@ impl Config {
} }
pub fn notifications(&self) -> NotificationsConfig { pub fn notifications(&self) -> NotificationsConfig {
NotificationsConfig { cargo_toml_not_found: self.data.notifications_cargoTomlNotFound } NotificationsConfig {
cargo_toml_not_found: self.data.notifications_cargoTomlNotFound,
unindexed_project: self.data.notifications_unindexedProject,
}
} }
pub fn cargo_autoreload(&self) -> bool { pub fn cargo_autoreload(&self) -> bool {

View file

@ -33,7 +33,7 @@ use crate::{
mem_docs::MemDocs, mem_docs::MemDocs,
op_queue::OpQueue, op_queue::OpQueue,
reload, reload,
task_pool::TaskPool, task_pool::{TaskPool, TaskQueue},
}; };
// Enforces drop order // Enforces drop order
@ -126,6 +126,17 @@ pub(crate) struct GlobalState {
OpQueue<(), (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>, OpQueue<(), (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>, pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>,
pub(crate) prime_caches_queue: OpQueue, pub(crate) prime_caches_queue: OpQueue,
/// A deferred task queue.
///
/// This queue is used for doing database-dependent work inside of sync
/// handlers, as accessing the database may block latency-sensitive
/// interactions and should be moved away from the main thread.
///
/// For certain features, such as [`lsp_ext::UnindexedProjectParams`],
/// this queue should run only *after* [`GlobalState::process_changes`] has
/// been called.
pub(crate) deferred_task_queue: TaskQueue,
} }
/// An immutable snapshot of the world's state at a point in time. /// An immutable snapshot of the world's state at a point in time.
@ -165,6 +176,11 @@ impl GlobalState {
Handle { handle, receiver } Handle { handle, receiver }
}; };
let task_queue = {
let (sender, receiver) = unbounded();
TaskQueue { sender, receiver }
};
let mut analysis_host = AnalysisHost::new(config.lru_parse_query_capacity()); let mut analysis_host = AnalysisHost::new(config.lru_parse_query_capacity());
if let Some(capacities) = config.lru_query_capacities() { if let Some(capacities) = config.lru_query_capacities() {
analysis_host.update_lru_capacities(capacities); analysis_host.update_lru_capacities(capacities);
@ -208,6 +224,8 @@ impl GlobalState {
fetch_proc_macros_queue: OpQueue::default(), fetch_proc_macros_queue: OpQueue::default(),
prime_caches_queue: OpQueue::default(), prime_caches_queue: OpQueue::default(),
deferred_task_queue: task_queue,
}; };
// Apply any required database inputs from the config. // Apply any required database inputs from the config.
this.update_configuration(config); this.update_configuration(config);

View file

@ -70,7 +70,15 @@ pub(crate) fn handle_did_open_text_document(
if already_exists { if already_exists {
tracing::error!("duplicate DidOpenTextDocument: {}", path); tracing::error!("duplicate DidOpenTextDocument: {}", path);
} }
state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes())); state.vfs.write().0.set_file_contents(path, Some(params.text_document.text.into_bytes()));
if state.config.notifications().unindexed_project {
tracing::debug!("queuing task");
let _ = state
.deferred_task_queue
.sender
.send(crate::main_loop::QueuedTask::CheckIfIndexed(params.text_document.uri));
}
} }
Ok(()) Ok(())
} }

View file

@ -703,3 +703,16 @@ pub struct CompletionImport {
pub struct ClientCommandOptions { pub struct ClientCommandOptions {
pub commands: Vec<String>, pub commands: Vec<String>,
} }
pub enum UnindexedProject {}
impl Notification for UnindexedProject {
type Params = UnindexedProjectParams;
const METHOD: &'static str = "rust-analyzer/unindexedProject";
}
#[derive(Deserialize, Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct UnindexedProjectParams {
pub text_documents: Vec<TextDocumentIdentifier>,
}

View file

@ -1,5 +1,6 @@
//! The main loop of `rust-analyzer` responsible for dispatching LSP //! The main loop of `rust-analyzer` responsible for dispatching LSP
//! requests/replies and notifications back to the client. //! requests/replies and notifications back to the client.
use crate::lsp::ext;
use std::{ use std::{
fmt, fmt,
time::{Duration, Instant}, time::{Duration, Instant},
@ -56,6 +57,7 @@ pub fn main_loop(config: Config, connection: Connection) -> anyhow::Result<()> {
enum Event { enum Event {
Lsp(lsp_server::Message), Lsp(lsp_server::Message),
Task(Task), Task(Task),
QueuedTask(QueuedTask),
Vfs(vfs::loader::Message), Vfs(vfs::loader::Message),
Flycheck(flycheck::Message), Flycheck(flycheck::Message),
} }
@ -67,13 +69,20 @@ impl fmt::Display for Event {
Event::Task(_) => write!(f, "Event::Task"), Event::Task(_) => write!(f, "Event::Task"),
Event::Vfs(_) => write!(f, "Event::Vfs"), Event::Vfs(_) => write!(f, "Event::Vfs"),
Event::Flycheck(_) => write!(f, "Event::Flycheck"), Event::Flycheck(_) => write!(f, "Event::Flycheck"),
Event::QueuedTask(_) => write!(f, "Event::QueuedTask"),
} }
} }
} }
#[derive(Debug)]
pub(crate) enum QueuedTask {
CheckIfIndexed(lsp_types::Url),
}
#[derive(Debug)] #[derive(Debug)]
pub(crate) enum Task { pub(crate) enum Task {
Response(lsp_server::Response), Response(lsp_server::Response),
ClientNotification(ext::UnindexedProjectParams),
Retry(lsp_server::Request), Retry(lsp_server::Request),
Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>), Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
PrimeCaches(PrimeCachesProgress), PrimeCaches(PrimeCachesProgress),
@ -115,6 +124,7 @@ impl fmt::Debug for Event {
match self { match self {
Event::Lsp(it) => fmt::Debug::fmt(it, f), Event::Lsp(it) => fmt::Debug::fmt(it, f),
Event::Task(it) => fmt::Debug::fmt(it, f), Event::Task(it) => fmt::Debug::fmt(it, f),
Event::QueuedTask(it) => fmt::Debug::fmt(it, f),
Event::Vfs(it) => fmt::Debug::fmt(it, f), Event::Vfs(it) => fmt::Debug::fmt(it, f),
Event::Flycheck(it) => fmt::Debug::fmt(it, f), Event::Flycheck(it) => fmt::Debug::fmt(it, f),
} }
@ -193,6 +203,9 @@ impl GlobalState {
recv(self.task_pool.receiver) -> task => recv(self.task_pool.receiver) -> task =>
Some(Event::Task(task.unwrap())), Some(Event::Task(task.unwrap())),
recv(self.deferred_task_queue.receiver) -> task =>
Some(Event::QueuedTask(task.unwrap())),
recv(self.fmt_pool.receiver) -> task => recv(self.fmt_pool.receiver) -> task =>
Some(Event::Task(task.unwrap())), Some(Event::Task(task.unwrap())),
@ -211,7 +224,7 @@ impl GlobalState {
.entered(); .entered();
let event_dbg_msg = format!("{event:?}"); let event_dbg_msg = format!("{event:?}");
tracing::debug!("{:?} handle_event({})", loop_start, event_dbg_msg); tracing::debug!(?loop_start, ?event, "handle_event");
if tracing::enabled!(tracing::Level::INFO) { if tracing::enabled!(tracing::Level::INFO) {
let task_queue_len = self.task_pool.handle.len(); let task_queue_len = self.task_pool.handle.len();
if task_queue_len > 0 { if task_queue_len > 0 {
@ -226,6 +239,16 @@ impl GlobalState {
lsp_server::Message::Notification(not) => self.on_notification(not)?, lsp_server::Message::Notification(not) => self.on_notification(not)?,
lsp_server::Message::Response(resp) => self.complete_request(resp), lsp_server::Message::Response(resp) => self.complete_request(resp),
}, },
Event::QueuedTask(task) => {
let _p =
tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/queued_task")
.entered();
self.handle_queued_task(task);
// Coalesce multiple task events into one loop turn
while let Ok(task) = self.deferred_task_queue.receiver.try_recv() {
self.handle_queued_task(task);
}
}
Event::Task(task) => { Event::Task(task) => {
let _p = tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/task") let _p = tracing::span!(tracing::Level::INFO, "GlobalState::handle_event/task")
.entered(); .entered();
@ -498,6 +521,9 @@ impl GlobalState {
fn handle_task(&mut self, prime_caches_progress: &mut Vec<PrimeCachesProgress>, task: Task) { fn handle_task(&mut self, prime_caches_progress: &mut Vec<PrimeCachesProgress>, task: Task) {
match task { match task {
Task::Response(response) => self.respond(response), Task::Response(response) => self.respond(response),
Task::ClientNotification(params) => {
self.send_notification::<lsp_ext::UnindexedProject>(params)
}
// Only retry requests that haven't been cancelled. Otherwise we do unnecessary work. // Only retry requests that haven't been cancelled. Otherwise we do unnecessary work.
Task::Retry(req) if !self.is_completed(&req) => self.on_request(req), Task::Retry(req) if !self.is_completed(&req) => self.on_request(req),
Task::Retry(_) => (), Task::Retry(_) => (),
@ -638,6 +664,31 @@ impl GlobalState {
} }
} }
fn handle_queued_task(&mut self, task: QueuedTask) {
match task {
QueuedTask::CheckIfIndexed(uri) => {
let snap = self.snapshot();
self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| {
let _p = tracing::span!(tracing::Level::INFO, "GlobalState::check_if_indexed")
.entered();
tracing::debug!(?uri, "handling uri");
let id = from_proto::file_id(&snap, &uri).expect("unable to get FileId");
if let Ok(crates) = &snap.analysis.crates_for(id) {
if crates.is_empty() {
let params = ext::UnindexedProjectParams {
text_documents: vec![lsp_types::TextDocumentIdentifier { uri }],
};
sender.send(Task::ClientNotification(params)).unwrap();
} else {
tracing::debug!(?uri, "is indexed");
}
}
});
}
}
}
fn handle_flycheck_msg(&mut self, message: flycheck::Message) { fn handle_flycheck_msg(&mut self, message: flycheck::Message) {
match message { match message {
flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => { flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => {

View file

@ -4,6 +4,8 @@
use crossbeam_channel::Sender; use crossbeam_channel::Sender;
use stdx::thread::{Pool, ThreadIntent}; use stdx::thread::{Pool, ThreadIntent};
use crate::main_loop::QueuedTask;
pub(crate) struct TaskPool<T> { pub(crate) struct TaskPool<T> {
sender: Sender<T>, sender: Sender<T>,
pool: Pool, pool: Pool,
@ -40,3 +42,12 @@ impl<T> TaskPool<T> {
self.pool.len() self.pool.len()
} }
} }
/// `TaskQueue`, like its name suggests, queues tasks.
///
/// This should only be used used if a task must run after [`GlobalState::process_changes`]
/// has been called.
pub(crate) struct TaskQueue {
pub(crate) sender: crossbeam_channel::Sender<QueuedTask>,
pub(crate) receiver: crossbeam_channel::Receiver<QueuedTask>,
}

View file

@ -30,7 +30,7 @@ use lsp_types::{
PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem, PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
TextDocumentPositionParams, WorkDoneProgressParams, TextDocumentPositionParams, WorkDoneProgressParams,
}; };
use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams}; use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams, UnindexedProject};
use serde_json::json; use serde_json::json;
use stdx::format_to_acc; use stdx::format_to_acc;
use test_utils::skip_slow_tests; use test_utils::skip_slow_tests;
@ -587,6 +587,66 @@ fn main() {{}}
); );
} }
#[test]
fn test_opening_a_file_outside_of_indexed_workspace() {
if skip_slow_tests() {
return;
}
let tmp_dir = TestDir::new();
let path = tmp_dir.path();
let project = json!({
"roots": [path],
"crates": [ {
"root_module": path.join("src/crate_one/lib.rs"),
"deps": [],
"edition": "2015",
"cfg": [ "cfg_atom_1", "feature=\"cfg_1\""],
} ]
});
let code = format!(
r#"
//- /rust-project.json
{project}
//- /src/crate_one/lib.rs
mod bar;
fn main() {{}}
"#,
);
let server = Project::with_fixture(&code)
.tmp_dir(tmp_dir)
.with_config(serde_json::json!({
"notifications": {
"unindexedProject": true
},
}))
.server()
.wait_until_workspace_is_loaded();
let uri = server.doc_id("src/crate_two/lib.rs").uri;
server.notification::<DidOpenTextDocument>(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: uri.clone(),
language_id: "rust".to_string(),
version: 0,
text: "/// Docs\nfn foo() {}".to_string(),
},
});
let expected = json!({
"textDocuments": [
{
"uri": uri
}
]
});
server.expect_notification::<UnindexedProject>(expected);
}
#[test] #[test]
fn diagnostics_dont_block_typing() { fn diagnostics_dont_block_typing() {
if skip_slow_tests() { if skip_slow_tests() {

View file

@ -9,7 +9,7 @@ use std::{
use crossbeam_channel::{after, select, Receiver}; use crossbeam_channel::{after, select, Receiver};
use lsp_server::{Connection, Message, Notification, Request}; use lsp_server::{Connection, Message, Notification, Request};
use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url}; use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url};
use rust_analyzer::{config::Config, lsp, main_loop, tracing}; use rust_analyzer::{config::Config, lsp, main_loop};
use serde::Serialize; use serde::Serialize;
use serde_json::{json, to_string_pretty, Value}; use serde_json::{json, to_string_pretty, Value};
use test_utils::FixtureWithProjectMeta; use test_utils::FixtureWithProjectMeta;
@ -91,7 +91,7 @@ impl Project<'_> {
static INIT: Once = Once::new(); static INIT: Once = Once::new();
INIT.call_once(|| { INIT.call_once(|| {
let _ = tracing::Config { let _ = rust_analyzer::tracing::Config {
writer: TestWriter::default(), writer: TestWriter::default(),
// Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually // Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually
// useful information in there for debugging. // useful information in there for debugging.
@ -214,6 +214,40 @@ impl Server {
self.send_notification(r) self.send_notification(r)
} }
pub(crate) fn expect_notification<N>(&self, expected: Value)
where
N: lsp_types::notification::Notification,
N::Params: Serialize,
{
while let Some(Message::Notification(actual)) =
recv_timeout(&self.client.receiver).unwrap_or_else(|_| panic!("timed out"))
{
if actual.method == N::METHOD {
let actual = actual
.clone()
.extract::<Value>(N::METHOD)
.expect("was not able to extract notification");
tracing::debug!(?actual, "got notification");
if let Some((expected_part, actual_part)) = find_mismatch(&expected, &actual) {
panic!(
"JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n",
to_string_pretty(&expected).unwrap(),
to_string_pretty(&actual).unwrap(),
to_string_pretty(expected_part).unwrap(),
to_string_pretty(actual_part).unwrap(),
);
} else {
tracing::debug!("sucessfully matched notification");
return;
}
} else {
continue;
}
}
panic!("never got expected notification");
}
#[track_caller] #[track_caller]
pub(crate) fn request<R>(&self, params: R::Params, expected_resp: Value) pub(crate) fn request<R>(&self, params: R::Params, expected_resp: Value)
where where

View file

@ -1,5 +1,5 @@
<!--- <!---
lsp/ext.rs hash: c0bf3acd7a9e72b2 lsp/ext.rs hash: 8be79cc3b7f10ad7
If you need to change the above hash to make the test pass, please check if you If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue: need to adjust this doc as well and ping this issue:
@ -445,6 +445,25 @@ Reloads project information (that is, re-executes `cargo metadata`).
Rebuilds build scripts and proc-macros, and runs the build scripts to reseed the build data. Rebuilds build scripts and proc-macros, and runs the build scripts to reseed the build data.
## Unindexed Project
**Experimental Client Capability:** `{ "unindexedProject": boolean }`
**Method:** `rust-analyzer/unindexedProject`
**Notification:**
```typescript
interface UnindexedProjectParams {
/// A list of documents that rust-analyzer has determined are not indexed.
textDocuments: lc.TextDocumentIdentifier[]
}
```
This notification is sent from the server to the client. The client is expected
to determine the appropriate owners of `textDocuments` and update `linkedProjects`
if an owner can be determined successfully.
## Server Status ## Server Status
**Experimental Client Capability:** `{ "serverStatusNotification": boolean }` **Experimental Client Capability:** `{ "serverStatusNotification": boolean }`

View file

@ -745,6 +745,11 @@ Sets the LRU capacity of the specified queries.
-- --
Whether to show `can't find Cargo.toml` error message. Whether to show `can't find Cargo.toml` error message.
-- --
[[rust-analyzer.notifications.unindexedProject]]rust-analyzer.notifications.unindexedProject (default: `false`)::
+
--
Whether to send an UnindexedProject notification to the client.
--
[[rust-analyzer.numThreads]]rust-analyzer.numThreads (default: `null`):: [[rust-analyzer.numThreads]]rust-analyzer.numThreads (default: `null`)::
+ +
-- --

View file

@ -1468,6 +1468,11 @@
"default": true, "default": true,
"type": "boolean" "type": "boolean"
}, },
"rust-analyzer.notifications.unindexedProject": {
"markdownDescription": "Whether to send an UnindexedProject notification to the client.",
"default": false,
"type": "boolean"
},
"rust-analyzer.numThreads": { "rust-analyzer.numThreads": {
"markdownDescription": "How many worker threads in the main loop. The default `null` means to pick automatically.", "markdownDescription": "How many worker threads in the main loop. The default `null` means to pick automatically.",
"default": null, "default": null,

View file

@ -234,6 +234,23 @@ export class Ctx implements RustAnalyzerExtensionApi {
this.outputChannel!.show(); this.outputChannel!.show();
}), }),
); );
this.pushClientCleanup(
this._client.onNotification(ra.unindexedProject, async (params) => {
if (this.config.discoverProjectRunner) {
const command = `${this.config.discoverProjectRunner}.discoverWorkspaceCommand`;
log.info(`running command: ${command}`);
const uris = params.textDocuments.map((doc) =>
vscode.Uri.parse(doc.uri, true),
);
const projects: JsonProject[] = await vscode.commands.executeCommand(
command,
uris,
);
this.setWorkspaces(projects);
await this.notifyRustAnalyzer();
}
}),
);
} }
return this._client; return this._client;
} }

View file

@ -220,3 +220,9 @@ export type RecursiveMemoryLayoutNode = {
export type RecursiveMemoryLayout = { export type RecursiveMemoryLayout = {
nodes: RecursiveMemoryLayoutNode[]; nodes: RecursiveMemoryLayoutNode[];
}; };
export const unindexedProject = new lc.NotificationType<UnindexedProjectParams>(
"rust-analyzer/unindexedProject",
);
export type UnindexedProjectParams = { textDocuments: lc.TextDocumentIdentifier[] };