Auto merge of #18080 - Veykril:dedup, r=Veykril

Remove crate graph deduplication logic

Fixes https://github.com/rust-lang/rust-analyzer/issues/17748
This commit is contained in:
bors 2024-09-11 11:47:17 +00:00
commit cfe8e376c9
21 changed files with 132 additions and 19759 deletions

View file

@ -3,11 +3,15 @@
use std::fmt;
use rustc_hash::FxHashMap;
use salsa::Durability;
use triomphe::Arc;
use vfs::FileId;
use crate::{CrateGraph, SourceDatabaseFileInputExt, SourceRoot, SourceRootDatabase, SourceRootId};
use crate::{
CrateGraph, CrateId, CrateWorkspaceData, SourceDatabaseFileInputExt, SourceRoot,
SourceRootDatabase, SourceRootId,
};
/// Encapsulate a bunch of raw `.set` calls on the database.
#[derive(Default)]
@ -15,6 +19,7 @@ pub struct FileChange {
pub roots: Option<Vec<SourceRoot>>,
pub files_changed: Vec<(FileId, Option<String>)>,
pub crate_graph: Option<CrateGraph>,
pub ws_data: Option<FxHashMap<CrateId, Arc<CrateWorkspaceData>>>,
}
impl fmt::Debug for FileChange {
@ -50,6 +55,10 @@ impl FileChange {
self.crate_graph = Some(graph);
}
pub fn set_ws_data(&mut self, data: FxHashMap<CrateId, Arc<CrateWorkspaceData>>) {
self.ws_data = Some(data);
}
pub fn apply(self, db: &mut dyn SourceRootDatabase) {
let _p = tracing::info_span!("FileChange::apply").entered();
if let Some(roots) = self.roots {
@ -74,6 +83,9 @@ impl FileChange {
if let Some(crate_graph) = self.crate_graph {
db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH);
}
if let Some(data) = self.ws_data {
db.set_crate_workspace_data_with_durability(Arc::new(data), Durability::HIGH);
}
}
}

View file

@ -491,22 +491,15 @@ impl CrateGraph {
.for_each(|(_, data)| data.dependencies.sort_by_key(|dep| dep.crate_id));
}
/// Extends this crate graph by adding a complete disjoint second crate
/// Extends this crate graph by adding a complete second crate
/// graph and adjust the ids in the [`ProcMacroPaths`] accordingly.
///
/// This will deduplicate the crates of the graph where possible.
/// Note that for deduplication to fully work, `self`'s crate dependencies must be sorted by crate id.
/// If the crate dependencies were sorted, the resulting graph from this `extend` call will also
/// have the crate dependencies sorted.
///
/// Returns a mapping from `other`'s crate ids to the new crate ids in `self`.
/// Returns a map mapping `other`'s IDs to the new IDs in `self`.
pub fn extend(
&mut self,
mut other: CrateGraph,
proc_macros: &mut ProcMacroPaths,
merge: impl Fn((CrateId, &mut CrateData), (CrateId, &CrateData)) -> bool,
) -> FxHashMap<CrateId, CrateId> {
let m = self.len();
let topo = other.crates_in_topological_order();
let mut id_map: FxHashMap<CrateId, CrateId> = FxHashMap::default();
for topo in topo {
@ -514,20 +507,13 @@ impl CrateGraph {
crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]);
crate_data.dependencies.sort_by_key(|dep| dep.crate_id);
let res = self
.arena
.iter_mut()
.take(m)
.find_map(|(id, data)| merge((id, data), (topo, crate_data)).then_some(id));
let new_id =
if let Some(res) = res { res } else { self.arena.alloc(crate_data.clone()) };
let new_id = self.arena.alloc(crate_data.clone());
id_map.insert(topo, new_id);
}
*proc_macros =
mem::take(proc_macros).into_iter().map(|(id, macros)| (id_map[&id], macros)).collect();
id_map
}

View file

@ -5,11 +5,12 @@ mod input;
use std::panic;
use rustc_hash::FxHashMap;
use salsa::Durability;
use span::EditionedFileId;
use syntax::{ast, Parse, SourceFile, SyntaxError};
use triomphe::Arc;
use vfs::FileId;
use vfs::{AbsPathBuf, FileId};
pub use crate::{
change::FileChange,
@ -74,19 +75,30 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
#[salsa::input]
fn crate_graph(&self) -> Arc<CrateGraph>;
// FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
#[salsa::input]
fn data_layout(&self, krate: CrateId) -> TargetLayoutLoadResult;
#[salsa::input]
fn toolchain(&self, krate: CrateId) -> Option<Version>;
fn crate_workspace_data(&self) -> Arc<FxHashMap<CrateId, Arc<CrateWorkspaceData>>>;
#[salsa::transparent]
fn toolchain_channel(&self, krate: CrateId) -> Option<ReleaseChannel>;
}
/// Crate related data shared by the whole workspace.
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct CrateWorkspaceData {
/// The working directory to run proc-macros in. This is usually the workspace root of cargo workspaces.
pub proc_macro_cwd: Option<AbsPathBuf>,
// FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
pub data_layout: TargetLayoutLoadResult,
/// Toolchain version used to compile the crate.
pub toolchain: Option<Version>,
}
fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseChannel> {
db.toolchain(krate).as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
db.crate_workspace_data()
.get(&krate)?
.toolchain
.as_ref()
.and_then(|v| ReleaseChannel::from_str(&v.pre))
}
fn parse(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Parse<ast::SourceFile> {

View file

@ -117,7 +117,7 @@ impl Attrs {
}
impl Attrs {
pub fn by_key<'attrs>(&'attrs self, key: &'attrs Symbol) -> AttrQuery<'_> {
pub fn by_key<'attrs>(&'attrs self, key: &'attrs Symbol) -> AttrQuery<'attrs> {
AttrQuery { attrs: self, key }
}
@ -594,7 +594,7 @@ impl<'attr> AttrQuery<'attr> {
/// #[doc(html_root_url = "url")]
/// ^^^^^^^^^^^^^ key
/// ```
pub fn find_string_value_in_tt(self, key: &'attr Symbol) -> Option<&str> {
pub fn find_string_value_in_tt(self, key: &'attr Symbol) -> Option<&'attr str> {
self.tt_values().find_map(|tt| {
let name = tt.token_trees.iter()
.skip_while(|tt| !matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == *key))

View file

@ -204,7 +204,7 @@ impl Body {
pub fn blocks<'a>(
&'a self,
db: &'a dyn DefDatabase,
) -> impl Iterator<Item = (BlockId, Arc<DefMap>)> + '_ {
) -> impl Iterator<Item = (BlockId, Arc<DefMap>)> + 'a {
self.block_scopes.iter().map(move |&block| (block, db.block_def_map(block)))
}

View file

@ -320,6 +320,7 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
_: Span,
_: Span,
_: Span,
_: Option<String>,
) -> Result<Subtree, ProcMacroExpansionError> {
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
subtree,

View file

@ -1,10 +1,10 @@
//! Defines a unit of change that can applied to the database to get the next
//! state. Changes are transactional.
use base_db::{
salsa::Durability, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootDatabase,
TargetLayoutLoadResult, Version,
salsa::Durability, CrateGraph, CrateId, CrateWorkspaceData, FileChange, SourceRoot,
SourceRootDatabase,
};
use la_arena::RawIdx;
use rustc_hash::FxHashMap;
use span::FileId;
use triomphe::Arc;
@ -14,8 +14,6 @@ use crate::{db::ExpandDatabase, proc_macro::ProcMacros};
pub struct ChangeWithProcMacros {
pub source_change: FileChange,
pub proc_macros: Option<ProcMacros>,
pub toolchains: Option<Vec<Option<Version>>>,
pub target_data_layouts: Option<Vec<TargetLayoutLoadResult>>,
}
impl ChangeWithProcMacros {
@ -28,46 +26,25 @@ impl ChangeWithProcMacros {
if let Some(proc_macros) = self.proc_macros {
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
}
if let Some(target_data_layouts) = self.target_data_layouts {
for (id, val) in target_data_layouts.into_iter().enumerate() {
db.set_data_layout_with_durability(
CrateId::from_raw(RawIdx::from(id as u32)),
val,
Durability::HIGH,
);
}
}
if let Some(toolchains) = self.toolchains {
for (id, val) in toolchains.into_iter().enumerate() {
db.set_toolchain_with_durability(
CrateId::from_raw(RawIdx::from(id as u32)),
val,
Durability::HIGH,
);
}
}
}
pub fn change_file(&mut self, file_id: FileId, new_text: Option<String>) {
self.source_change.change_file(file_id, new_text)
}
pub fn set_crate_graph(&mut self, graph: CrateGraph) {
self.source_change.set_crate_graph(graph)
pub fn set_crate_graph(
&mut self,
graph: CrateGraph,
ws_data: FxHashMap<CrateId, Arc<CrateWorkspaceData>>,
) {
self.source_change.set_crate_graph(graph);
self.source_change.set_ws_data(ws_data);
}
pub fn set_proc_macros(&mut self, proc_macros: ProcMacros) {
self.proc_macros = Some(proc_macros);
}
pub fn set_toolchains(&mut self, toolchains: Vec<Option<Version>>) {
self.toolchains = Some(toolchains);
}
pub fn set_target_data_layouts(&mut self, target_data_layouts: Vec<TargetLayoutLoadResult>) {
self.target_data_layouts = Some(target_data_layouts);
}
pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
self.source_change.set_roots(roots)
}

View file

@ -29,6 +29,7 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: Option<String>,
) -> Result<tt::Subtree, ProcMacroExpansionError>;
}
@ -234,8 +235,18 @@ impl CustomProcMacroExpander {
let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site)
{
match proc_macro.expander.expand(
tt,
attr_arg,
env,
def_site,
call_site,
mixed_site,
db.crate_workspace_data()[&calling_crate]
.proc_macro_cwd
.as_ref()
.map(ToString::to_string),
) {
Ok(t) => ExpandResult::ok(t),
Err(err) => match err {
// Don't discard the item in case something unexpected happened while expanding attributes

View file

@ -11,8 +11,8 @@ pub fn target_data_layout_query(
db: &dyn HirDatabase,
krate: CrateId,
) -> Result<Arc<TargetDataLayout>, Arc<str>> {
match db.data_layout(krate) {
Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(&it) {
match &db.crate_workspace_data()[&krate].data_layout {
Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
Ok(it) => Ok(Arc::new(it)),
Err(e) => {
Err(match e {
@ -42,6 +42,6 @@ pub fn target_data_layout_query(
}.into())
}
},
Err(e) => Err(e),
Err(e) => Err(e.clone()),
}
}

View file

@ -57,7 +57,7 @@ mod view_item_tree;
mod view_memory_layout;
mod view_mir;
use std::panic::UnwindSafe;
use std::{iter, panic::UnwindSafe};
use cfg::CfgOptions;
use fetch_crates::CrateInfo;
@ -65,7 +65,8 @@ use hir::{sym, ChangeWithProcMacros};
use ide_db::{
base_db::{
salsa::{self, ParallelDatabase},
CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceRootDatabase, VfsPath,
CrateOrigin, CrateWorkspaceData, Env, FileLoader, FileSet, SourceDatabase,
SourceRootDatabase, VfsPath,
},
prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
};
@ -256,9 +257,16 @@ impl Analysis {
CrateOrigin::Local { repo: None, name: None },
);
change.change_file(file_id, Some(text));
change.set_crate_graph(crate_graph);
change.set_target_data_layouts(vec![Err("fixture has no layout".into())]);
change.set_toolchains(vec![None]);
let ws_data = crate_graph
.iter()
.zip(iter::repeat(Arc::new(CrateWorkspaceData {
proc_macro_cwd: None,
data_layout: Err("fixture has no layout".into()),
toolchain: None,
})))
.collect();
change.set_crate_graph(crate_graph, ws_data);
host.apply_change(change);
(host.analysis(), file_id)
}

View file

@ -10,7 +10,7 @@ use hir_expand::proc_macro::{
ProcMacros,
};
use ide_db::{
base_db::{CrateGraph, Env, SourceRoot, SourceRootId},
base_db::{CrateGraph, CrateWorkspaceData, Env, SourceRoot, SourceRootId},
prime_caches, ChangeWithProcMacros, FxHashMap, RootDatabase,
};
use itertools::Itertools;
@ -447,12 +447,16 @@ fn load_crate_graph(
let source_roots = source_root_config.partition(vfs);
analysis_change.set_roots(source_roots);
let num_crates = crate_graph.len();
analysis_change.set_crate_graph(crate_graph);
let ws_data = crate_graph
.iter()
.zip(iter::repeat(From::from(CrateWorkspaceData {
proc_macro_cwd: None,
data_layout: target_layout.clone(),
toolchain: toolchain.clone(),
})))
.collect();
analysis_change.set_crate_graph(crate_graph, ws_data);
analysis_change.set_proc_macros(proc_macros);
analysis_change
.set_target_data_layouts(iter::repeat(target_layout.clone()).take(num_crates).collect());
analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect());
db.apply_change(analysis_change);
db
@ -489,8 +493,17 @@ impl ProcMacroExpander for Expander {
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: Option<String>,
) -> Result<tt::Subtree<Span>, ProcMacroExpansionError> {
match self.0.expand(subtree, attrs, env.clone(), def_site, call_site, mixed_site) {
match self.0.expand(
subtree,
attrs,
env.clone(),
def_site,
call_site,
mixed_site,
current_dir,
) {
Ok(Ok(subtree)) => Ok(subtree),
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),

View file

@ -152,10 +152,9 @@ impl ProcMacro {
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: Option<String>,
) -> Result<Result<tt::Subtree<Span>, PanicMessage>, ServerError> {
let version = self.process.version();
let current_dir =
env.get("CARGO_RUSTC_CURRENT_DIR").or_else(|| env.get("CARGO_MANIFEST_DIR"));
let mut span_data_table = SpanDataIndexMap::default();
let def_site = span_data_table.insert_full(def_site).0;

View file

@ -45,39 +45,6 @@ fn load_cargo_with_overrides(
to_crate_graph(project_workspace)
}
fn load_cargo_with_fake_sysroot(
file_map: &mut FxHashMap<AbsPathBuf, FileId>,
file: &str,
) -> (CrateGraph, ProcMacroPaths) {
let meta: Metadata = get_test_json_file(file);
let manifest_path =
ManifestPath::try_from(AbsPathBuf::try_from(meta.workspace_root.clone()).unwrap()).unwrap();
let cargo_workspace = CargoWorkspace::new(meta, manifest_path);
let project_workspace = ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo {
cargo: cargo_workspace,
build_scripts: WorkspaceBuildScripts::default(),
rustc: Err(None),
cargo_config_extra_env: Default::default(),
error: None,
},
sysroot: get_fake_sysroot(),
rustc_cfg: Vec::new(),
cfg_overrides: Default::default(),
toolchain: None,
target_layout: Err("target_data_layout not loaded".into()),
};
project_workspace.to_crate_graph(
&mut {
|path| {
let len = file_map.len();
Some(*file_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32)))
}
},
&Default::default(),
)
}
fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) {
let data = get_test_json_file(file);
let project = rooted_project_json(data);
@ -253,34 +220,6 @@ fn rust_project_is_proc_macro_has_proc_macro_dep() {
crate_data.dependencies.iter().find(|&dep| dep.name.deref() == "proc_macro").unwrap();
}
#[test]
fn crate_graph_dedup_identical() {
let (mut crate_graph, proc_macros) =
load_cargo_with_fake_sysroot(&mut Default::default(), "regex-metadata.json");
crate_graph.sort_deps();
let (d_crate_graph, mut d_proc_macros) = (crate_graph.clone(), proc_macros.clone());
crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |(_, a), (_, b)| a == b);
assert!(crate_graph.iter().eq(d_crate_graph.iter()));
assert_eq!(proc_macros, d_proc_macros);
}
#[test]
fn crate_graph_dedup() {
let path_map = &mut Default::default();
let (mut crate_graph, _proc_macros) =
load_cargo_with_fake_sysroot(path_map, "ripgrep-metadata.json");
assert_eq!(crate_graph.iter().count(), 81);
crate_graph.sort_deps();
let (regex_crate_graph, mut regex_proc_macros) =
load_cargo_with_fake_sysroot(path_map, "regex-metadata.json");
assert_eq!(regex_crate_graph.iter().count(), 60);
crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |(_, a), (_, b)| a == b);
assert_eq!(crate_graph.iter().count(), 118);
}
#[test]
// FIXME Remove the ignore
#[ignore = "requires nightly until the sysroot ships a cargo workspace for library on stable"]

View file

@ -1456,7 +1456,7 @@ fn sysroot_to_crate_graph(
// Remove all crates except the ones we are interested in to keep the sysroot graph small.
let removed_mapping = cg.remove_crates_except(&marker_set);
let mapping = crate_graph.extend(cg, &mut pm, |(_, a), (_, b)| a == b);
let mapping = crate_graph.extend(cg, &mut pm);
// Map the id through the removal mapping first, then through the crate graph extension mapping.
pub_deps.iter_mut().for_each(|(_, cid, _)| {

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -16,8 +16,9 @@
use std::{iter, mem};
use hir::{db::DefDatabase, ChangeWithProcMacros, ProcMacros, ProcMacrosBuilder};
use ide::CrateId;
use ide_db::{
base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, Version},
base_db::{salsa::Durability, CrateGraph, CrateWorkspaceData, ProcMacroPaths},
FxHashMap,
};
use itertools::Itertools;
@ -692,7 +693,7 @@ impl GlobalState {
})
.collect();
let (crate_graph, proc_macro_paths, layouts, toolchains) = {
let (crate_graph, proc_macro_paths, ws_data) = {
// Create crate graph from all the workspaces
let vfs = &mut self.vfs.write().0;
@ -721,9 +722,7 @@ impl GlobalState {
.collect(),
);
}
change.set_crate_graph(crate_graph);
change.set_target_data_layouts(layouts);
change.set_toolchains(toolchains);
change.set_crate_graph(crate_graph, ws_data);
self.analysis_host.apply_change(change);
self.report_progress(
"Building CrateGraph",
@ -863,51 +862,27 @@ pub fn ws_to_crate_graph(
workspaces: &[ProjectWorkspace],
extra_env: &FxHashMap<String, String>,
mut load: impl FnMut(&AbsPath) -> Option<vfs::FileId>,
) -> (CrateGraph, Vec<ProcMacroPaths>, Vec<Result<Arc<str>, Arc<str>>>, Vec<Option<Version>>) {
) -> (CrateGraph, Vec<ProcMacroPaths>, FxHashMap<CrateId, Arc<CrateWorkspaceData>>) {
let mut crate_graph = CrateGraph::default();
let mut proc_macro_paths = Vec::default();
let mut layouts = Vec::default();
let mut toolchains = Vec::default();
let e = Err(Arc::from("missing layout"));
let mut ws_data = FxHashMap::default();
for ws in workspaces {
let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env);
let num_layouts = layouts.len();
let num_toolchains = toolchains.len();
let ProjectWorkspace { toolchain, target_layout, .. } = ws;
let mapping = crate_graph.extend(
other,
&mut crate_proc_macros,
|(cg_id, cg_data), (_o_id, o_data)| {
// if the newly created crate graph's layout is equal to the crate of the merged graph, then
// we can merge the crates.
let id = cg_id.into_raw().into_u32() as usize;
layouts[id] == *target_layout && toolchains[id] == *toolchain && cg_data == o_data
},
);
let mapping = crate_graph.extend(other, &mut crate_proc_macros);
// Populate the side tables for the newly merged crates
mapping.values().for_each(|val| {
let idx = val.into_raw().into_u32() as usize;
// we only need to consider crates that were not merged and remapped, as the
// ones that were remapped already have the correct layout and toolchain
if idx >= num_layouts {
if layouts.len() <= idx {
layouts.resize(idx + 1, e.clone());
}
layouts[idx].clone_from(target_layout);
}
if idx >= num_toolchains {
if toolchains.len() <= idx {
toolchains.resize(idx + 1, None);
}
toolchains[idx].clone_from(toolchain);
}
});
ws_data.extend(mapping.values().copied().zip(iter::repeat(Arc::new(CrateWorkspaceData {
toolchain: toolchain.clone(),
data_layout: target_layout.clone(),
proc_macro_cwd: Some(ws.workspace_root().to_owned()),
}))));
proc_macro_paths.push(crate_proc_macros);
}
crate_graph.shrink_to_fit();
proc_macro_paths.shrink_to_fit();
(crate_graph, proc_macro_paths, layouts, toolchains)
(crate_graph, proc_macro_paths, ws_data)
}
pub(crate) fn should_refresh_for_change(

View file

@ -1,126 +0,0 @@
use std::path::PathBuf;
use project_model::{
CargoWorkspace, ManifestPath, Metadata, ProjectWorkspace, ProjectWorkspaceKind, Sysroot,
WorkspaceBuildScripts,
};
use rust_analyzer::ws_to_crate_graph;
use rustc_hash::FxHashMap;
use serde::de::DeserializeOwned;
use vfs::{AbsPathBuf, FileId};
fn load_cargo_with_fake_sysroot(file: &str) -> ProjectWorkspace {
let meta: Metadata = get_test_json_file(file);
let manifest_path =
ManifestPath::try_from(AbsPathBuf::try_from(meta.workspace_root.clone()).unwrap()).unwrap();
let cargo_workspace = CargoWorkspace::new(meta, manifest_path);
ProjectWorkspace {
kind: ProjectWorkspaceKind::Cargo {
cargo: cargo_workspace,
build_scripts: WorkspaceBuildScripts::default(),
rustc: Err(None),
cargo_config_extra_env: Default::default(),
error: None,
},
sysroot: get_fake_sysroot(),
rustc_cfg: Vec::new(),
cfg_overrides: Default::default(),
toolchain: None,
target_layout: Err("target_data_layout not loaded".into()),
}
}
fn get_test_json_file<T: DeserializeOwned>(file: &str) -> T {
let base = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let file = base.join("tests/test_data").join(file);
let data = std::fs::read_to_string(file).unwrap();
let mut json = data.parse::<serde_json::Value>().unwrap();
fixup_paths(&mut json);
return serde_json::from_value(json).unwrap();
fn fixup_paths(val: &mut serde_json::Value) {
match val {
serde_json::Value::String(s) => replace_root(s, true),
serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths),
serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths),
serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => {
}
}
}
}
fn replace_root(s: &mut String, direction: bool) {
if direction {
let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" };
*s = s.replace("$ROOT$", root)
} else {
let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" };
*s = s.replace(root, "$ROOT$")
}
}
fn get_fake_sysroot_path() -> PathBuf {
let base = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
base.join("../project-model/test_data/fake-sysroot")
}
fn get_fake_sysroot() -> Sysroot {
let sysroot_path = get_fake_sysroot_path();
// there's no `libexec/` directory with a `proc-macro-srv` binary in that
// fake sysroot, so we give them both the same path:
let sysroot_dir = AbsPathBuf::assert_utf8(sysroot_path);
let sysroot_src_dir = sysroot_dir.clone();
Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir))
}
#[test]
fn test_deduplicate_origin_dev() {
let path_map = &mut FxHashMap::default();
let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json");
let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json");
let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| {
let len = path_map.len();
Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32)))
});
let mut crates_named_p2 = vec![];
for id in crate_graph.iter() {
let krate = &crate_graph[id];
if let Some(name) = krate.display_name.as_ref() {
if name.to_string() == "p2" {
crates_named_p2.push(krate);
}
}
}
assert_eq!(crates_named_p2.len(), 1);
let p2 = crates_named_p2[0];
assert!(p2.origin.is_local());
}
#[test]
fn test_deduplicate_origin_dev_rev() {
let path_map = &mut FxHashMap::default();
let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json");
let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json");
let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| {
let len = path_map.len();
Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32)))
});
let mut crates_named_p2 = vec![];
for id in crate_graph.iter() {
let krate = &crate_graph[id];
if let Some(name) = krate.display_name.as_ref() {
if name.to_string() == "p2" {
crates_named_p2.push(krate);
}
}
}
assert_eq!(crates_named_p2.len(), 1);
let p2 = crates_named_p2[0];
assert!(p2.origin.is_local());
}

View file

@ -1,140 +0,0 @@
{
"packages": [
{
"name": "p1",
"version": "0.1.0",
"id": "p1 0.1.0 (path+file:///example_project/p1)",
"license": null,
"license_file": null,
"description": null,
"source": null,
"dependencies": [
{
"name": "p2",
"source": null,
"req": "*",
"kind": null,
"rename": null,
"optional": false,
"uses_default_features": true,
"features": [],
"target": null,
"registry": null,
"path": "$ROOT$example_project/p2"
}
],
"targets": [
{
"kind": [
"lib"
],
"crate_types": [
"lib"
],
"name": "p1",
"src_path": "$ROOT$example_project/p1/src/lib.rs",
"edition": "2021",
"doc": true,
"doctest": true,
"test": true
}
],
"features": {},
"manifest_path": "$ROOT$example_project/p1/Cargo.toml",
"metadata": null,
"publish": null,
"authors": [],
"categories": [],
"keywords": [],
"readme": null,
"repository": null,
"homepage": null,
"documentation": null,
"edition": "2021",
"links": null,
"default_run": null,
"rust_version": null
},
{
"name": "p2",
"version": "0.1.0",
"id": "p2 0.1.0 (path+file:///example_project/p2)",
"license": null,
"license_file": null,
"description": null,
"source": null,
"dependencies": [],
"targets": [
{
"kind": [
"lib"
],
"crate_types": [
"lib"
],
"name": "p2",
"src_path": "$ROOT$example_project/p2/src/lib.rs",
"edition": "2021",
"doc": true,
"doctest": true,
"test": true
}
],
"features": {},
"manifest_path": "$ROOT$example_project/p2/Cargo.toml",
"metadata": null,
"publish": null,
"authors": [],
"categories": [],
"keywords": [],
"readme": null,
"repository": null,
"homepage": null,
"documentation": null,
"edition": "2021",
"links": null,
"default_run": null,
"rust_version": null
}
],
"workspace_members": [
"p1 0.1.0 (path+file:///example_project/p1)"
],
"workspace_default_members": [
"p1 0.1.0 (path+file:///example_project/p1)"
],
"resolve": {
"nodes": [
{
"id": "p1 0.1.0 (path+file:///example_project/p1)",
"dependencies": [
"p2 0.1.0 (path+file:///example_project/p2)"
],
"deps": [
{
"name": "p2",
"pkg": "p2 0.1.0 (path+file:///example_project/p2)",
"dep_kinds": [
{
"kind": null,
"target": null
}
]
}
],
"features": []
},
{
"id": "p2 0.1.0 (path+file:///example_project/p2)",
"dependencies": [],
"deps": [],
"features": []
}
],
"root": "p1 0.1.0 (path+file:///example_project/p1)"
},
"target_directory": "$ROOT$example_project/p1/target",
"version": 1,
"workspace_root": "$ROOT$example_project/p1",
"metadata": null
}

View file

@ -1,66 +0,0 @@
{
"packages": [
{
"name": "p2",
"version": "0.1.0",
"id": "p2 0.1.0 (path+file:///example_project/p2)",
"license": null,
"license_file": null,
"description": null,
"source": null,
"dependencies": [],
"targets": [
{
"kind": [
"lib"
],
"crate_types": [
"lib"
],
"name": "p2",
"src_path": "$ROOT$example_project/p2/src/lib.rs",
"edition": "2021",
"doc": true,
"doctest": true,
"test": true
}
],
"features": {},
"manifest_path": "$ROOT$example_project/p2/Cargo.toml",
"metadata": null,
"publish": null,
"authors": [],
"categories": [],
"keywords": [],
"readme": null,
"repository": null,
"homepage": null,
"documentation": null,
"edition": "2021",
"links": null,
"default_run": null,
"rust_version": null
}
],
"workspace_members": [
"p2 0.1.0 (path+file:///example_project/p2)"
],
"workspace_default_members": [
"p2 0.1.0 (path+file:///example_project/p2)"
],
"resolve": {
"nodes": [
{
"id": "p2 0.1.0 (path+file:///example_project/p2)",
"dependencies": [],
"deps": [],
"features": []
}
],
"root": "p2 0.1.0 (path+file:///example_project/p2)"
},
"target_directory": "$ROOT$example_project/p2/target",
"version": 1,
"workspace_root": "$ROOT$example_project/p2",
"metadata": null
}

View file

@ -2,8 +2,8 @@
use std::{iter, mem, str::FromStr, sync};
use base_db::{
CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, FileChange,
FileSet, LangCrateOrigin, SourceRoot, SourceRootDatabase, Version, VfsPath,
CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, CrateWorkspaceData, Dependency,
Env, FileChange, FileSet, LangCrateOrigin, SourceRoot, SourceRootDatabase, Version, VfsPath,
};
use cfg::CfgOptions;
use hir_expand::{
@ -354,16 +354,20 @@ impl ChangeFixture {
};
roots.push(root);
let mut change = ChangeWithProcMacros {
source_change,
proc_macros: Some(proc_macros.build()),
toolchains: Some(iter::repeat(toolchain).take(crate_graph.len()).collect()),
target_data_layouts: Some(
iter::repeat(target_data_layout).take(crate_graph.len()).collect(),
),
};
let mut change =
ChangeWithProcMacros { source_change, proc_macros: Some(proc_macros.build()) };
change.source_change.set_roots(roots);
change.source_change.set_ws_data(
crate_graph
.iter()
.zip(iter::repeat(From::from(CrateWorkspaceData {
proc_macro_cwd: None,
data_layout: target_data_layout,
toolchain,
})))
.collect(),
);
change.source_change.set_crate_graph(crate_graph);
ChangeFixture { file_position, files, change }
@ -567,6 +571,7 @@ impl ProcMacroExpander for IdentityProcMacroExpander {
_: Span,
_: Span,
_: Span,
_: Option<String>,
) -> Result<Subtree<Span>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
@ -584,6 +589,7 @@ impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
_: Span,
_: Span,
_: Span,
_: Option<String>,
) -> Result<Subtree<Span>, ProcMacroExpansionError> {
attrs
.cloned()
@ -602,6 +608,7 @@ impl ProcMacroExpander for MirrorProcMacroExpander {
_: Span,
_: Span,
_: Span,
_: Option<String>,
) -> Result<Subtree<Span>, ProcMacroExpansionError> {
fn traverse(input: &Subtree<Span>) -> Subtree<Span> {
let mut token_trees = vec![];
@ -632,6 +639,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
_: Span,
_: Span,
_: Span,
_: Option<String>,
) -> Result<Subtree<Span>, ProcMacroExpansionError> {
return Ok(traverse(input));