fix: Fix sourceroot construction for virtual manifests

This commit is contained in:
Lukas Wirth 2024-12-12 12:58:18 +01:00
parent b707b5a117
commit ab46e97188
7 changed files with 75 additions and 75 deletions

View file

@ -15,9 +15,7 @@ use ide_db::{
}; };
use itertools::Itertools; use itertools::Itertools;
use proc_macro_api::{MacroDylib, ProcMacroServer}; use proc_macro_api::{MacroDylib, ProcMacroServer};
use project_model::{ use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace, ProjectWorkspaceKind,
};
use span::Span; use span::Span;
use vfs::{ use vfs::{
file_set::FileSetConfig, file_set::FileSetConfig,
@ -244,6 +242,9 @@ impl ProjectFolders {
} }
} }
if dirs.include.is_empty() {
continue;
}
vfs::loader::Entry::Directories(dirs) vfs::loader::Entry::Directories(dirs)
}; };
@ -258,43 +259,6 @@ impl ProjectFolders {
fsc.add_file_set(file_set_roots) fsc.add_file_set(file_set_roots)
} }
// register the workspace manifest as well, note that this currently causes duplicates for
// non-virtual cargo workspaces! We ought to fix that
for ws in workspaces.iter() {
let mut file_set_roots: Vec<VfsPath> = vec![];
let mut entries = vec![];
if let Some(manifest) = ws.manifest().map(|it| it.to_path_buf()) {
file_set_roots.push(VfsPath::from(manifest.to_owned()));
entries.push(manifest.to_owned());
}
for buildfile in ws.buildfiles() {
file_set_roots.push(VfsPath::from(buildfile.to_owned()));
entries.push(buildfile.to_owned());
}
// In case of detached files we do **not** look for a rust-analyzer.toml.
if !matches!(ws.kind, ProjectWorkspaceKind::DetachedFile { .. }) {
let ws_root = ws.workspace_root();
let ratoml_path = {
let mut p = ws_root.to_path_buf();
p.push("rust-analyzer.toml");
p
};
file_set_roots.push(VfsPath::from(ratoml_path.to_owned()));
entries.push(ratoml_path.to_owned());
}
if !file_set_roots.is_empty() {
let entry = vfs::loader::Entry::Files(entries);
res.watch.push(res.load.len());
res.load.push(entry);
local_filesets.push(fsc.len() as u64);
fsc.add_file_set(file_set_roots)
}
}
if let Some(user_config_path) = user_config_dir_path { if let Some(user_config_path) = user_config_dir_path {
let ratoml_path = { let ratoml_path = {
let mut p = user_config_path.to_path_buf(); let mut p = user_config_path.to_path_buf();
@ -303,7 +267,7 @@ impl ProjectFolders {
}; };
let file_set_roots = vec![VfsPath::from(ratoml_path.to_owned())]; let file_set_roots = vec![VfsPath::from(ratoml_path.to_owned())];
let entry = vfs::loader::Entry::Files(vec![ratoml_path.to_owned()]); let entry = vfs::loader::Entry::Files(vec![ratoml_path]);
res.watch.push(res.load.len()); res.watch.push(res.load.len());
res.load.push(entry); res.load.push(entry);

View file

@ -33,6 +33,7 @@ pub struct CargoWorkspace {
workspace_root: AbsPathBuf, workspace_root: AbsPathBuf,
target_directory: AbsPathBuf, target_directory: AbsPathBuf,
manifest_path: ManifestPath, manifest_path: ManifestPath,
is_virtual_workspace: bool,
} }
impl ops::Index<Package> for CargoWorkspace { impl ops::Index<Package> for CargoWorkspace {
@ -384,13 +385,20 @@ impl CargoWorkspace {
.with_context(|| format!("Failed to run `{:?}`", meta.cargo_command())) .with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))
} }
pub fn new(mut meta: cargo_metadata::Metadata, manifest_path: ManifestPath) -> CargoWorkspace { pub fn new(
mut meta: cargo_metadata::Metadata,
ws_manifest_path: ManifestPath,
) -> CargoWorkspace {
let mut pkg_by_id = FxHashMap::default(); let mut pkg_by_id = FxHashMap::default();
let mut packages = Arena::default(); let mut packages = Arena::default();
let mut targets = Arena::default(); let mut targets = Arena::default();
let ws_members = &meta.workspace_members; let ws_members = &meta.workspace_members;
let workspace_root = AbsPathBuf::assert(meta.workspace_root);
let target_directory = AbsPathBuf::assert(meta.target_directory);
let mut is_virtual_workspace = true;
meta.packages.sort_by(|a, b| a.id.cmp(&b.id)); meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
for meta_pkg in meta.packages { for meta_pkg in meta.packages {
let cargo_metadata::Package { let cargo_metadata::Package {
@ -429,12 +437,13 @@ impl CargoWorkspace {
let is_local = source.is_none(); let is_local = source.is_none();
let is_member = ws_members.contains(&id); let is_member = ws_members.contains(&id);
let manifest = AbsPathBuf::assert(manifest_path); let manifest = ManifestPath::try_from(AbsPathBuf::assert(manifest_path)).unwrap();
is_virtual_workspace &= manifest != ws_manifest_path;
let pkg = packages.alloc(PackageData { let pkg = packages.alloc(PackageData {
id: id.repr.clone(), id: id.repr.clone(),
name, name,
version, version,
manifest: manifest.clone().try_into().unwrap(), manifest: manifest.clone(),
targets: Vec::new(), targets: Vec::new(),
is_local, is_local,
is_member, is_member,
@ -468,7 +477,7 @@ impl CargoWorkspace {
// modified manifest file into a special target dir which is then used as // modified manifest file into a special target dir which is then used as
// the source path. We don't want that, we want the original here so map it // the source path. We don't want that, we want the original here so map it
// back // back
manifest.clone() manifest.clone().into()
} else { } else {
AbsPathBuf::assert(src_path) AbsPathBuf::assert(src_path)
}, },
@ -493,11 +502,14 @@ impl CargoWorkspace {
packages[source].active_features.extend(node.features); packages[source].active_features.extend(node.features);
} }
let workspace_root = AbsPathBuf::assert(meta.workspace_root); CargoWorkspace {
packages,
let target_directory = AbsPathBuf::assert(meta.target_directory); targets,
workspace_root,
CargoWorkspace { packages, targets, workspace_root, target_directory, manifest_path } target_directory,
manifest_path: ws_manifest_path,
is_virtual_workspace,
}
} }
pub fn packages(&self) -> impl ExactSizeIterator<Item = Package> + '_ { pub fn packages(&self) -> impl ExactSizeIterator<Item = Package> + '_ {
@ -579,6 +591,10 @@ impl CargoWorkspace {
fn is_unique(&self, name: &str) -> bool { fn is_unique(&self, name: &str) -> bool {
self.packages.iter().filter(|(_, v)| v.name == name).count() == 1 self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
} }
pub fn is_virtual_workspace(&self) -> bool {
self.is_virtual_workspace
}
} }
fn find_list_of_build_targets( fn find_list_of_build_targets(

View file

@ -29,6 +29,12 @@ impl TryFrom<AbsPathBuf> for ManifestPath {
} }
} }
impl From<ManifestPath> for AbsPathBuf {
fn from(it: ManifestPath) -> Self {
it.file
}
}
impl ManifestPath { impl ManifestPath {
// Shadow `parent` from `Deref`. // Shadow `parent` from `Deref`.
pub fn parent(&self) -> &AbsPath { pub fn parent(&self) -> &AbsPath {

View file

@ -11,8 +11,9 @@ use base_db::{
}; };
use cfg::{CfgAtom, CfgDiff, CfgOptions}; use cfg::{CfgAtom, CfgDiff, CfgOptions};
use intern::{sym, Symbol}; use intern::{sym, Symbol};
use itertools::Itertools;
use paths::{AbsPath, AbsPathBuf}; use paths::{AbsPath, AbsPathBuf};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::FxHashMap;
use semver::Version; use semver::Version;
use span::{Edition, FileId}; use span::{Edition, FileId};
use toolchain::Tool; use toolchain::Tool;
@ -41,7 +42,9 @@ pub type FileLoader<'a> = &'a mut dyn for<'b> FnMut(&'b AbsPath) -> Option<FileI
pub struct PackageRoot { pub struct PackageRoot {
/// Is from the local filesystem and may be edited /// Is from the local filesystem and may be edited
pub is_local: bool, pub is_local: bool,
/// Directories to include
pub include: Vec<AbsPathBuf>, pub include: Vec<AbsPathBuf>,
/// Directories to exclude
pub exclude: Vec<AbsPathBuf>, pub exclude: Vec<AbsPathBuf>,
} }
@ -553,17 +556,6 @@ impl ProjectWorkspace {
} }
} }
pub fn buildfiles(&self) -> Vec<AbsPathBuf> {
match &self.kind {
ProjectWorkspaceKind::Json(project) => project
.crates()
.filter_map(|(_, krate)| krate.build.as_ref().map(|build| build.build_file.clone()))
.map(|build_file| self.workspace_root().join(build_file))
.collect(),
_ => vec![],
}
}
pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> { pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
self.sysroot.discover_proc_macro_srv() self.sysroot.discover_proc_macro_srv()
} }
@ -608,15 +600,25 @@ impl ProjectWorkspace {
match &self.kind { match &self.kind {
ProjectWorkspaceKind::Json(project) => project ProjectWorkspaceKind::Json(project) => project
.crates() .crates()
.map(|(_, krate)| PackageRoot { .map(|(_, krate)| {
is_local: krate.is_workspace_member, let build_files = project
include: krate.include.clone(), .crates()
exclude: krate.exclude.clone(), .filter_map(|(_, krate)| {
krate.build.as_ref().map(|build| build.build_file.clone())
})
// FIXME: PackageRoots dont allow specifying files, only directories
.filter_map(|build_file| {
self.workspace_root().join(build_file).parent().map(ToOwned::to_owned)
});
PackageRoot {
is_local: krate.is_workspace_member,
include: krate.include.iter().cloned().chain(build_files).collect(),
exclude: krate.exclude.clone(),
}
}) })
.collect::<FxHashSet<_>>()
.into_iter()
.chain(mk_sysroot()) .chain(mk_sysroot())
.collect::<Vec<_>>(), .unique()
.collect(),
ProjectWorkspaceKind::Cargo { ProjectWorkspaceKind::Cargo {
cargo, cargo,
rustc, rustc,
@ -671,6 +673,11 @@ impl ProjectWorkspace {
exclude: Vec::new(), exclude: Vec::new(),
}) })
})) }))
.chain(cargo.is_virtual_workspace().then(|| PackageRoot {
is_local: true,
include: vec![cargo.workspace_root().to_path_buf()],
exclude: Vec::new(),
}))
.collect() .collect()
} }
ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => { ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => {

View file

@ -51,7 +51,9 @@ fn actual_main() -> anyhow::Result<ExitCode> {
} }
} }
setup_logging(flags.log_file.clone())?; if let Err(e) = setup_logging(flags.log_file.clone()) {
eprintln!("Failed to setup logging: {e:#}");
}
let verbosity = flags.verbosity(); let verbosity = flags.verbosity();

View file

@ -827,6 +827,7 @@ impl Config {
let mut should_update = false; let mut should_update = false;
if let Some(change) = change.user_config_change { if let Some(change) = change.user_config_change {
tracing::info!("updating config from user config toml: {:#}", change);
if let Ok(table) = toml::from_str(&change) { if let Ok(table) = toml::from_str(&change) {
let mut toml_errors = vec![]; let mut toml_errors = vec![];
validate_toml_table( validate_toml_table(
@ -919,7 +920,7 @@ impl Config {
RatomlFileKind::Crate => { RatomlFileKind::Crate => {
if let Some(text) = text { if let Some(text) = text {
let mut toml_errors = vec![]; let mut toml_errors = vec![];
tracing::info!("updating ra-toml config: {:#}", text); tracing::info!("updating ra-toml crate config: {:#}", text);
match toml::from_str(&text) { match toml::from_str(&text) {
Ok(table) => { Ok(table) => {
validate_toml_table( validate_toml_table(
@ -961,6 +962,7 @@ impl Config {
} }
RatomlFileKind::Workspace => { RatomlFileKind::Workspace => {
if let Some(text) = text { if let Some(text) = text {
tracing::info!("updating ra-toml workspace config: {:#}", text);
let mut toml_errors = vec![]; let mut toml_errors = vec![];
match toml::from_str(&text) { match toml::from_str(&text) {
Ok(table) => { Ok(table) => {

View file

@ -417,8 +417,10 @@ impl GlobalState {
}) })
.collect_vec(); .collect_vec();
for (file_id, (_change_kind, vfs_path)) in modified_ratoml_files { for (file_id, (change_kind, vfs_path)) in modified_ratoml_files {
tracing::info!(%vfs_path, ?change_kind, "Processing rust-analyzer.toml changes");
if vfs_path.as_path() == user_config_abs_path { if vfs_path.as_path() == user_config_abs_path {
tracing::info!(%vfs_path, ?change_kind, "Use config rust-analyzer.toml changes");
change.change_user_config(Some(db.file_text(file_id))); change.change_user_config(Some(db.file_text(file_id)));
continue; continue;
} }
@ -430,12 +432,14 @@ impl GlobalState {
if !sr.is_library { if !sr.is_library {
let entry = if workspace_ratoml_paths.contains(&vfs_path) { let entry = if workspace_ratoml_paths.contains(&vfs_path) {
tracing::info!(%vfs_path, ?sr_id, "workspace rust-analyzer.toml changes");
change.change_workspace_ratoml( change.change_workspace_ratoml(
sr_id, sr_id,
vfs_path.clone(), vfs_path.clone(),
Some(db.file_text(file_id)), Some(db.file_text(file_id)),
) )
} else { } else {
tracing::info!(%vfs_path, ?sr_id, "crate rust-analyzer.toml changes");
change.change_ratoml( change.change_ratoml(
sr_id, sr_id,
vfs_path.clone(), vfs_path.clone(),
@ -446,7 +450,7 @@ impl GlobalState {
if let Some((kind, old_path, old_text)) = entry { if let Some((kind, old_path, old_text)) = entry {
// SourceRoot has more than 1 RATOML files. In this case lexicographically smaller wins. // SourceRoot has more than 1 RATOML files. In this case lexicographically smaller wins.
if old_path < vfs_path { if old_path < vfs_path {
span!(Level::ERROR, "Two `rust-analyzer.toml` files were found inside the same crate. {vfs_path} has no effect."); tracing::error!("Two `rust-analyzer.toml` files were found inside the same crate. {vfs_path} has no effect.");
// Put the old one back in. // Put the old one back in.
match kind { match kind {
RatomlFileKind::Crate => { RatomlFileKind::Crate => {
@ -459,8 +463,7 @@ impl GlobalState {
} }
} }
} else { } else {
// Mapping to a SourceRoot should always end up in `Ok` tracing::info!(%vfs_path, "Ignoring library rust-analyzer.toml");
span!(Level::ERROR, "Mapping to SourceRootId failed.");
} }
} }
change.change_source_root_parent_map(self.local_roots_parent_map.clone()); change.change_source_root_parent_map(self.local_roots_parent_map.clone());