Add minimal support for cargo scripts

This commit is contained in:
hkalbasi 2023-08-14 01:30:59 +03:30 committed by Lukas Wirth
parent 50bdeaad07
commit 2f828073aa
8 changed files with 238 additions and 16 deletions

View file

@ -305,6 +305,10 @@ impl CargoWorkspace {
.collect(),
);
}
if cargo_toml.extension().is_some_and(|x| x == "rs") {
// TODO: enable `+nightly` for cargo scripts
other_options.push("-Zscript".to_owned());
}
meta.other_options(other_options);
// FIXME: Fetching metadata is a slow process, as it might require

View file

@ -2,7 +2,7 @@
//! metadata` or `rust-project.json`) into representation stored in the salsa
//! database -- `CrateGraph`.
use std::{collections::VecDeque, fmt, fs, iter, sync};
use std::{collections::VecDeque, fmt, fs, io::BufRead, iter, sync};
use anyhow::{format_err, Context};
use base_db::{
@ -115,9 +115,55 @@ pub enum ProjectWorkspace {
target_layout: TargetLayoutLoadResult,
/// A set of cfg overrides for the files.
cfg_overrides: CfgOverrides,
/// Is this file a cargo script file?
cargo_script: Option<CargoWorkspace>,
},
}
/// Tracks the cargo toml parts in cargo scripts, to detect if they
/// changed and reload workspace in that case.
pub struct CargoScriptTomls(pub FxHashMap<AbsPathBuf, String>);
impl CargoScriptTomls {
fn extract_toml_part(p: &AbsPath) -> Option<String> {
let mut r = String::new();
let f = std::fs::File::open(p).ok()?;
let f = std::io::BufReader::new(f);
let mut started = false;
for line in f.lines() {
let line = line.ok()?;
if started {
if line.trim() == "//! ```" {
return Some(r);
}
r += &line;
} else {
if line.trim() == "//! ```cargo" {
started = true;
}
}
}
None
}
pub fn track_file(&mut self, p: AbsPathBuf) {
let toml = CargoScriptTomls::extract_toml_part(&p).unwrap_or_default();
self.0.insert(p, toml);
}
pub fn need_reload(&mut self, p: &AbsPath) -> bool {
let Some(prev) = self.0.get_mut(p) else {
return false; // File is not tracked
};
let next = CargoScriptTomls::extract_toml_part(p).unwrap_or_default();
if *prev == next {
return false;
}
*prev = next;
true
}
}
impl fmt::Debug for ProjectWorkspace {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Make sure this isn't too verbose.
@ -174,10 +220,12 @@ impl fmt::Debug for ProjectWorkspace {
toolchain,
target_layout,
cfg_overrides,
cargo_script,
} => f
.debug_struct("DetachedFiles")
.field("n_files", &files.len())
.field("sysroot", &sysroot.is_ok())
.field("cargo_script", &cargo_script.is_some())
.field("n_rustc_cfg", &rustc_cfg.len())
.field("toolchain", &toolchain)
.field("data_layout", &target_layout)
@ -431,6 +479,7 @@ impl ProjectWorkspace {
pub fn load_detached_files(
detached_files: Vec<AbsPathBuf>,
config: &CargoConfig,
cargo_script_tomls: &mut CargoScriptTomls,
) -> anyhow::Result<ProjectWorkspace> {
let dir = detached_files
.first()
@ -469,6 +518,23 @@ impl ProjectWorkspace {
None,
&config.extra_env,
);
let cargo_toml = ManifestPath::try_from(detached_files[0].clone()).unwrap();
let meta = CargoWorkspace::fetch_metadata(
&cargo_toml,
cargo_toml.parent(),
config,
sysroot_ref,
&|_| (),
)
.with_context(|| {
format!("Failed to read Cargo metadata from Cargo.toml file {cargo_toml}")
})?;
let cargo = CargoWorkspace::new(meta);
for file in &detached_files {
cargo_script_tomls.track_file(file.clone());
}
Ok(ProjectWorkspace::DetachedFiles {
files: detached_files,
sysroot,
@ -476,6 +542,7 @@ impl ProjectWorkspace {
toolchain,
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
cfg_overrides: config.cfg_overrides.clone(),
cargo_script: Some(cargo),
})
}
@ -788,14 +855,27 @@ impl ProjectWorkspace {
toolchain: _,
target_layout: _,
cfg_overrides,
cargo_script,
} => (
detached_files_to_crate_graph(
rustc_cfg.clone(),
load,
files,
sysroot.as_ref().ok(),
cfg_overrides,
),
if let Some(cargo) = cargo_script {
cargo_to_crate_graph(
load,
None,
cargo,
sysroot.as_ref().ok(),
rustc_cfg.clone(),
cfg_overrides,
&WorkspaceBuildScripts::default(),
)
} else {
detached_files_to_crate_graph(
rustc_cfg.clone(),
load,
files,
sysroot.as_ref().ok(),
cfg_overrides,
)
},
sysroot,
),
};
@ -873,6 +953,7 @@ impl ProjectWorkspace {
files,
sysroot,
rustc_cfg,
cargo_script,
toolchain,
target_layout,
cfg_overrides,
@ -881,6 +962,7 @@ impl ProjectWorkspace {
files: o_files,
sysroot: o_sysroot,
rustc_cfg: o_rustc_cfg,
cargo_script: o_cargo_script,
toolchain: o_toolchain,
target_layout: o_target_layout,
cfg_overrides: o_cfg_overrides,
@ -892,6 +974,7 @@ impl ProjectWorkspace {
&& toolchain == o_toolchain
&& target_layout == o_target_layout
&& cfg_overrides == o_cfg_overrides
&& cargo_script == o_cargo_script
}
_ => false,
}

View file

@ -82,6 +82,7 @@ impl Tester {
toolchain: None,
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
cfg_overrides: Default::default(),
cargo_script: None,
};
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: false,

View file

@ -18,7 +18,9 @@ use parking_lot::{
RwLockWriteGuard,
};
use proc_macro_api::ProcMacroServer;
use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
use project_model::{
CargoScriptTomls, CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts,
};
use rustc_hash::{FxHashMap, FxHashSet};
use triomphe::Arc;
use vfs::{AnchoredPathBuf, ChangedFile, Vfs};
@ -144,6 +146,7 @@ pub(crate) struct GlobalState {
/// this queue should run only *after* [`GlobalState::process_changes`] has
/// been called.
pub(crate) deferred_task_queue: TaskQueue,
pub(crate) cargo_script_tomls: Arc<Mutex<CargoScriptTomls>>,
}
/// An immutable snapshot of the world's state at a point in time.
@ -240,6 +243,7 @@ impl GlobalState {
prime_caches_queue: OpQueue::default(),
deferred_task_queue: task_queue,
cargo_script_tomls: Arc::new(Mutex::new(CargoScriptTomls(FxHashMap::default()))),
};
// Apply any required database inputs from the config.
this.update_configuration(config);
@ -322,7 +326,11 @@ impl GlobalState {
if file.is_created_or_deleted() {
workspace_structure_change.get_or_insert((path, false)).1 |=
self.crate_graph_file_dependencies.contains(vfs_path);
} else if reload::should_refresh_for_change(&path, file.kind()) {
} else if reload::should_refresh_for_change(
&path,
file.kind(),
&mut self.cargo_script_tomls.lock(),
) {
workspace_structure_change.get_or_insert((path.clone(), false));
}
}

View file

@ -150,7 +150,11 @@ pub(crate) fn handle_did_save_text_document(
if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
// Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
if reload::should_refresh_for_change(
abs_path,
ChangeKind::Modify,
&mut state.cargo_script_tomls.lock(),
) {
state
.fetch_workspaces_queue
.request_op(format!("workspace vfs file change saved {abs_path}"), false);

View file

@ -25,7 +25,7 @@ use ide_db::{
use itertools::Itertools;
use load_cargo::{load_proc_macro, ProjectFolders};
use proc_macro_api::ProcMacroServer;
use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
use project_model::{CargoScriptTomls, ProjectWorkspace, WorkspaceBuildScripts};
use stdx::{format_to, thread::ThreadIntent};
use triomphe::Arc;
use vfs::{AbsPath, AbsPathBuf, ChangeKind};
@ -206,6 +206,7 @@ impl GlobalState {
let linked_projects = self.config.linked_or_discovered_projects();
let detached_files = self.config.detached_files().to_vec();
let cargo_config = self.config.cargo();
let cargo_script_tomls = self.cargo_script_tomls.clone();
move |sender| {
let progress = {
@ -258,6 +259,7 @@ impl GlobalState {
workspaces.push(project_model::ProjectWorkspace::load_detached_files(
detached_files,
&cargo_config,
&mut cargo_script_tomls.lock(),
));
}
@ -758,7 +760,15 @@ pub fn ws_to_crate_graph(
(crate_graph, proc_macro_paths, layouts, toolchains)
}
pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
pub(crate) fn should_refresh_for_change(
path: &AbsPath,
change_kind: ChangeKind,
cargo_script_tomls: &mut CargoScriptTomls,
) -> bool {
if cargo_script_tomls.need_reload(path) {
return true;
}
const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];

View file

@ -117,6 +117,104 @@ fn f() {
);
}
#[test]
fn completes_items_from_standard_library_in_cargo_script() {
if skip_slow_tests() {
return;
}
let server = Project::with_fixture(
r#"
//- /dependency/Cargo.toml
[package]
name = "dependency"
version = "0.1.0"
//- /dependency/src/lib.rs
pub struct SpecialHashMap;
//- /dependency2/Cargo.toml
[package]
name = "dependency2"
version = "0.1.0"
//- /dependency2/src/lib.rs
pub struct SpecialHashMap2;
//- /src/lib.rs
#!/usr/bin/env -S cargo +nightly -Zscript
//! ```cargo
//! [dependencies]
//! dependency = { path = "../dependency" }
//! ```
use dependency::Spam;
use dependency2::Spam;
"#,
)
.with_config(serde_json::json!({
"cargo": { "sysroot": "discover" },
}))
.server()
.wait_until_workspace_is_loaded();
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(7, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
});
assert!(res.to_string().contains("SpecialHashMap"));
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(8, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
});
assert!(!res.to_string().contains("SpecialHashMap"));
server.write_file_and_save(
"src/lib.rs",
r#"#!/usr/bin/env -S cargo +nightly -Zscript
//! ```cargo
//! [dependencies]
//! dependency2 = { path = "../dependency2" }
//! ```
use dependency::Spam;
use dependency2::Spam;
"#
.to_owned(),
);
let server = server.wait_until_workspace_is_loaded();
std::thread::sleep(std::time::Duration::from_secs(3));
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(7, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
});
assert!(!res.to_string().contains("SpecialHashMap"));
let res = server.send_request::<Completion>(CompletionParams {
text_document_position: TextDocumentPositionParams::new(
server.doc_id("src/lib.rs"),
Position::new(8, 18),
),
context: None,
partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(),
});
assert!(res.to_string().contains("SpecialHashMap"));
}
#[test]
fn test_runnables_project() {
if skip_slow_tests() {

View file

@ -125,7 +125,7 @@ impl Project<'_> {
}
let mut config = Config::new(
tmp_dir_path,
tmp_dir_path.clone(),
lsp_types::ClientCapabilities {
workspace: Some(lsp_types::WorkspaceClientCapabilities {
did_change_watched_files: Some(
@ -185,10 +185,14 @@ impl Project<'_> {
roots,
None,
);
config.update(self.config).expect("invalid config");
// TODO: don't hardcode src/lib.rs as detached file
let mut c = self.config;
let p = tmp_dir_path.join("src/lib.rs").to_string();
c["detachedFiles"] = serde_json::json!([p]);
config.update(c).expect("invalid config");
config.rediscover_workspaces();
Server::new(tmp_dir, config)
Server::new(tmp_dir.keep(), config)
}
}
@ -374,6 +378,16 @@ impl Server {
pub(crate) fn path(&self) -> &Utf8Path {
self.dir.path()
}
pub(crate) fn write_file_and_save(&self, path: &str, text: String) {
fs::write(self.dir.path().join(path), &text).unwrap();
self.notification::<lsp_types::notification::DidSaveTextDocument>(
lsp_types::DidSaveTextDocumentParams {
text_document: self.doc_id(path),
text: Some(text),
},
)
}
}
impl Drop for Server {