add analysis-bench to benchmark incremental analysis

Can be used like this:

```
$ cargo run --release -p ra_cli -- \
  analysis-bench ../chalk/ \
  --complete ../chalk/chalk-engine/src/logic.rs:94:0

loading: 225.970093ms

from scratch:   8.492373325s
no change:      445.265µs
trivial change: 95.631242ms
```

Or like this:

```
$ cargo run --release -p ra_cli -- \
  analysis-bench ../chalk/ \
  --highlight ../chalk/chalk-engine/src/logic.rs

loading: 209.873484ms

from scratch:   9.504916942s
no change:      7.731119ms
trivial change: 124.984039ms
```

"from scratch" includes initial analysis of the relevant bits of the
project

"no change" just asks the same question for the second time. It
measures overhead on assembling the answer outside of salsa.

"trivial change" doesn't do an actual salsa change, it just advances
the revision. This test how fast is salsa at validating things.
This commit is contained in:
Aleksey Kladov 2019-06-16 19:19:38 +03:00
parent b81caed43f
commit 6314e62cfb
9 changed files with 245 additions and 81 deletions

View file

@ -8,7 +8,7 @@ use ra_db::{
CrateGraph, FileId, SourceRootId,
};
use ra_ide_api::{AnalysisHost, AnalysisChange};
use ra_project_model::ProjectWorkspace;
use ra_project_model::{ProjectWorkspace, ProjectRoot};
use ra_vfs::{Vfs, VfsChange};
use vfs_filter::IncludeRustFiles;
@ -21,13 +21,11 @@ fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId {
SourceRootId(r.0)
}
pub fn load_cargo(root: &Path) -> Result<(AnalysisHost, Vec<SourceRootId>)> {
pub fn load_cargo(root: &Path) -> Result<(AnalysisHost, FxHashMap<SourceRootId, ProjectRoot>)> {
let root = std::env::current_dir()?.join(root);
let ws = ProjectWorkspace::discover(root.as_ref())?;
let mut roots = Vec::new();
roots.push(IncludeRustFiles::member(root.clone()));
roots.extend(IncludeRustFiles::from_roots(ws.to_roots()));
let (mut vfs, roots) = Vfs::new(roots);
let project_roots = ws.to_roots();
let (mut vfs, roots) = Vfs::new(IncludeRustFiles::from_roots(project_roots.clone()).collect());
let crate_graph = ws.to_crate_graph(&mut |path: &Path| {
let vfs_file = vfs.load(path);
log::debug!("vfs file {:?} -> {:?}", path, vfs_file);
@ -35,17 +33,27 @@ pub fn load_cargo(root: &Path) -> Result<(AnalysisHost, Vec<SourceRootId>)> {
});
log::debug!("crate graph: {:?}", crate_graph);
let local_roots = roots
.into_iter()
.filter(|r| vfs.root2path(*r).starts_with(&root))
.map(vfs_root_to_id)
.collect();
let host = load(root.as_path(), crate_graph, &mut vfs);
Ok((host, local_roots))
let source_roots = roots
.iter()
.map(|&vfs_root| {
let source_root_id = vfs_root_to_id(vfs_root);
let project_root = project_roots
.iter()
.find(|it| it.path() == &vfs.root2path(vfs_root))
.unwrap()
.clone();
(source_root_id, project_root)
})
.collect::<FxHashMap<_, _>>();
let host = load(&source_roots, crate_graph, &mut vfs);
Ok((host, source_roots))
}
pub fn load(project_root: &Path, crate_graph: CrateGraph, vfs: &mut Vfs) -> AnalysisHost {
pub fn load(
source_roots: &FxHashMap<SourceRootId, ProjectRoot>,
crate_graph: CrateGraph,
vfs: &mut Vfs,
) -> AnalysisHost {
let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
let mut host = AnalysisHost::new(lru_cap);
let mut analysis_change = AnalysisChange::new();
@ -60,8 +68,8 @@ pub fn load(project_root: &Path, crate_graph: CrateGraph, vfs: &mut Vfs) -> Anal
for change in vfs.commit_changes() {
match change {
VfsChange::AddRoot { root, files } => {
let is_local = vfs.root2path(root).starts_with(&project_root);
let source_root_id = vfs_root_to_id(root);
let is_local = source_roots[&source_root_id].is_member();
log::debug!(
"loaded source root {:?} with path {:?}",
source_root_id,
@ -106,7 +114,7 @@ mod tests {
let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
let (host, roots) = load_cargo(path).unwrap();
let mut n_crates = 0;
for root in roots {
for (root, _) in roots {
for _krate in Crate::source_root_crates(host.raw_database(), root) {
n_crates += 1;
}

View file

@ -1,54 +1,54 @@
use std::path::PathBuf;
use ra_project_model::ProjectRoot;
use ra_vfs::{RootEntry, Filter, RelativePath};
/// `IncludeRustFiles` is used to convert
/// from `ProjectRoot` to `RootEntry` for VFS
pub struct IncludeRustFiles {
root: ProjectRoot,
}
impl IncludeRustFiles {
pub fn from_roots<R>(roots: R) -> impl Iterator<Item = RootEntry>
where
R: IntoIterator<Item = ProjectRoot>,
{
roots.into_iter().map(IncludeRustFiles::from_root)
}
pub fn from_root(root: ProjectRoot) -> RootEntry {
IncludeRustFiles::from(root).into()
}
#[allow(unused)]
pub fn external(path: PathBuf) -> RootEntry {
IncludeRustFiles::from_root(ProjectRoot::new(path, false))
}
pub fn member(path: PathBuf) -> RootEntry {
IncludeRustFiles::from_root(ProjectRoot::new(path, true))
}
}
impl Filter for IncludeRustFiles {
fn include_dir(&self, dir_path: &RelativePath) -> bool {
self.root.include_dir(dir_path)
}
fn include_file(&self, file_path: &RelativePath) -> bool {
self.root.include_file(file_path)
}
}
impl std::convert::From<ProjectRoot> for IncludeRustFiles {
fn from(v: ProjectRoot) -> IncludeRustFiles {
IncludeRustFiles { root: v }
}
}
impl std::convert::From<IncludeRustFiles> for RootEntry {
fn from(v: IncludeRustFiles) -> RootEntry {
let path = v.root.path().clone();
RootEntry::new(path, Box::new(v))
}
}
use std::path::PathBuf;
use ra_project_model::ProjectRoot;
use ra_vfs::{RootEntry, Filter, RelativePath};
/// `IncludeRustFiles` is used to convert
/// from `ProjectRoot` to `RootEntry` for VFS
pub struct IncludeRustFiles {
root: ProjectRoot,
}
impl IncludeRustFiles {
pub fn from_roots<R>(roots: R) -> impl Iterator<Item = RootEntry>
where
R: IntoIterator<Item = ProjectRoot>,
{
roots.into_iter().map(IncludeRustFiles::from_root)
}
pub fn from_root(root: ProjectRoot) -> RootEntry {
IncludeRustFiles::from(root).into()
}
#[allow(unused)]
pub fn external(path: PathBuf) -> RootEntry {
IncludeRustFiles::from_root(ProjectRoot::new(path, false))
}
pub fn member(path: PathBuf) -> RootEntry {
IncludeRustFiles::from_root(ProjectRoot::new(path, true))
}
}
impl Filter for IncludeRustFiles {
fn include_dir(&self, dir_path: &RelativePath) -> bool {
self.root.include_dir(dir_path)
}
fn include_file(&self, file_path: &RelativePath) -> bool {
self.root.include_file(file_path)
}
}
impl From<ProjectRoot> for IncludeRustFiles {
fn from(v: ProjectRoot) -> IncludeRustFiles {
IncludeRustFiles { root: v }
}
}
impl From<IncludeRustFiles> for RootEntry {
fn from(v: IncludeRustFiles) -> RootEntry {
let path = v.root.path().clone();
RootEntry::new(path, Box::new(v))
}
}

View file

@ -0,0 +1,92 @@
use std::{
path::{PathBuf, Path},
time::Instant,
};
use ra_db::{SourceDatabase, salsa::Database};
use ra_ide_api::{AnalysisHost, Analysis, LineCol, FilePosition};
use crate::Result;
pub(crate) enum Op {
Highlight { path: PathBuf },
Complete { path: PathBuf, line: u32, column: u32 },
}
pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
let start = Instant::now();
eprint!("loading: ");
let (host, roots) = ra_batch::load_cargo(path)?;
let db = host.raw_database();
eprintln!("{:?}\n", start.elapsed());
let file_id = {
let path = match &op {
Op::Highlight { path } => path,
Op::Complete { path, .. } => path,
};
let path = std::env::current_dir()?.join(path).canonicalize()?;
roots
.iter()
.find_map(|(source_root_id, project_root)| {
if project_root.is_member() {
for (rel_path, file_id) in &db.source_root(*source_root_id).files {
let abs_path = rel_path.to_path(project_root.path());
if abs_path == path {
return Some(*file_id);
}
}
}
None
})
.ok_or_else(|| format!("Can't find {:?}", path))?
};
match op {
Op::Highlight { .. } => {
let res = do_work(&host, |analysis| {
analysis.diagnostics(file_id).unwrap();
analysis.highlight_as_html(file_id, false).unwrap()
});
if verbose {
println!("\n{}", res);
}
}
Op::Complete { line, column, .. } => {
let offset = host
.analysis()
.file_line_index(file_id)
.offset(LineCol { line, col_utf16: column });
let file_postion = FilePosition { file_id, offset };
let res = do_work(&host, |analysis| analysis.completions(file_postion));
if verbose {
println!("\n{:#?}", res);
}
}
}
Ok(())
}
fn do_work<F: Fn(&Analysis) -> T, T>(host: &AnalysisHost, work: F) -> T {
{
let start = Instant::now();
eprint!("from scratch: ");
work(&host.analysis());
eprintln!("{:?}", start.elapsed());
}
{
let start = Instant::now();
eprint!("no change: ");
work(&host.analysis());
eprintln!("{:?}", start.elapsed());
}
{
let start = Instant::now();
eprint!("trivial change: ");
host.raw_database().salsa_runtime().next_revision();
let res = work(&host.analysis());
eprintln!("{:?}", start.elapsed());
res
}
}

View file

@ -1,4 +1,4 @@
use std::{collections::HashSet, time::Instant, fmt::Write};
use std::{collections::HashSet, time::Instant, fmt::Write, path::Path};
use ra_db::SourceDatabase;
use ra_hir::{Crate, ModuleDef, Ty, ImplItem, HasSource};
@ -6,20 +6,23 @@ use ra_syntax::AstNode;
use crate::Result;
pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> {
pub fn run(verbose: bool, path: &Path, only: Option<&str>) -> Result<()> {
let db_load_time = Instant::now();
let (host, roots) = ra_batch::load_cargo(path.as_ref())?;
let (host, roots) = ra_batch::load_cargo(path)?;
let db = host.raw_database();
println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed());
let analysis_time = Instant::now();
let mut num_crates = 0;
let mut visited_modules = HashSet::new();
let mut visit_queue = Vec::new();
for root in roots {
for krate in Crate::source_root_crates(db, root) {
num_crates += 1;
let module = krate.root_module(db).expect("crate in source root without root module");
visit_queue.push(module);
for (source_root_id, project_root) in roots {
if project_root.is_member() {
for krate in Crate::source_root_crates(db, source_root_id) {
num_crates += 1;
let module =
krate.root_module(db).expect("crate in source root without root module");
visit_queue.push(module);
}
}
}
println!("Crates in this dir: {}", num_crates);

View file

@ -1,4 +1,5 @@
mod analysis_stats;
mod analysis_bench;
use std::{io::Read, error::Error};
@ -26,6 +27,27 @@ fn main() -> Result<()> {
.arg(Arg::with_name("only").short("o").takes_value(true))
.arg(Arg::with_name("path")),
)
.subcommand(
SubCommand::with_name("analysis-bench")
.arg(Arg::with_name("verbose").short("v").long("verbose"))
.arg(
Arg::with_name("highlight")
.long("highlight")
.takes_value(true)
.conflicts_with("complete")
.value_name("PATH")
.help("highlight this file"),
)
.arg(
Arg::with_name("complete")
.long("complete")
.takes_value(true)
.conflicts_with("highlight")
.value_name("PATH:LINE:COLUMN")
.help("compute completions at this location"),
)
.arg(Arg::with_name("path").value_name("PATH").help("project to analyze")),
)
.get_matches();
match matches.subcommand() {
("parse", Some(matches)) => {
@ -51,7 +73,25 @@ fn main() -> Result<()> {
let verbose = matches.is_present("verbose");
let path = matches.value_of("path").unwrap_or("");
let only = matches.value_of("only");
analysis_stats::run(verbose, path, only)?;
analysis_stats::run(verbose, path.as_ref(), only)?;
}
("analysis-bench", Some(matches)) => {
let verbose = matches.is_present("verbose");
let path = matches.value_of("path").unwrap_or("");
let op = if let Some(path) = matches.value_of("highlight") {
analysis_bench::Op::Highlight { path: path.into() }
} else if let Some(path_line_col) = matches.value_of("complete") {
let (path_line, column) = rsplit_at_char(path_line_col, ':')?;
let (path, line) = rsplit_at_char(path_line, ':')?;
analysis_bench::Op::Complete {
path: path.into(),
line: line.parse()?,
column: column.parse()?,
}
} else {
panic!("either --highlight or --complete must be set")
};
analysis_bench::run(verbose, path.as_ref(), op)?;
}
_ => unreachable!(),
}
@ -68,3 +108,8 @@ fn read_stdin() -> Result<String> {
std::io::stdin().read_to_string(&mut buff)?;
Ok(buff)
}
fn rsplit_at_char(s: &str, c: char) -> Result<(&str, &str)> {
let idx = s.rfind(":").ok_or_else(|| format!("no `{}` in {}", c, s))?;
Ok((&s[..idx], &s[idx + 1..]))
}

View file

@ -276,7 +276,7 @@ impl AnalysisHost {
pub fn collect_garbage(&mut self) {
self.db.collect_garbage();
}
pub fn raw_database(&self) -> &impl hir::db::HirDatabase {
pub fn raw_database(&self) -> &(impl hir::db::HirDatabase + salsa::Database) {
&self.db
}
}

View file

@ -10,7 +10,9 @@ pub struct LineIndex {
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct LineCol {
/// Zero-based
pub line: u32,
/// Zero-based
pub col_utf16: u32,
}

View file

@ -37,6 +37,7 @@ pub enum ProjectWorkspace {
/// `ProjectRoot` describes a workspace root folder.
/// Which may be an external dependency, or a member of
/// the current workspace.
#[derive(Clone)]
pub struct ProjectRoot {
/// Path to the root folder
path: PathBuf,

View file

@ -147,3 +147,16 @@ RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more tha
```
In particular, I have `export RA_PROFILE='*>10' in my shell profile.
To measure time for from-scratch analysis, use something like this:
```
$ cargo run --release -p ra_cli -- analysis-stats ../chalk/
```
For measuring time of incremental analysis, use either of these:
```
$ cargo run --release -p ra_cli -- analysis-bench ../chalk/ --highlight ../chalk/chalk-engine/src/logic.rs
$ cargo run --release -p ra_cli -- analysis-bench ../chalk/ --complete ../chalk/chalk-engine/src/logic.rs:94:0
```