5387: Add --memory-usage to analysis-bench r=matklad a=jonas-schievink



Co-authored-by: Jonas Schievink <jonasschievink@gmail.com>
This commit is contained in:
bors[bot] 2020-07-15 12:07:05 +00:00 committed by GitHub
commit 2c67ca0146
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 44 additions and 22 deletions

View file

@ -35,6 +35,7 @@ pub(crate) enum Command {
with_proc_macro: bool,
},
Bench {
memory_usage: bool,
path: PathBuf,
what: BenchWhat,
load_output_dirs: bool,
@ -165,7 +166,7 @@ USAGE:
FLAGS:
-o, --only Only analyze items matching this path
-h, --help Prints help information
--memory-usage Collect memory usage statistics (requires `--feature jemalloc`)
--memory-usage Collect memory usage statistics (requires `--features jemalloc`)
--randomize Randomize order in which crates, modules, and items are processed
--parallel Run type inference in parallel
--load-output-dirs Load OUT_DIR values by running `cargo check` before analysis
@ -220,6 +221,7 @@ USAGE:
FLAGS:
-h, --help Prints help information
--memory-usage Collect memory usage statistics (requires `--features jemalloc`)
--load-output-dirs Load OUT_DIR values by running `cargo check` before analysis
--with-proc-macro Use ra-proc-macro-srv for proc-macro expanding
-v, --verbose
@ -251,9 +253,10 @@ ARGS:
"exactly one of `--highlight`, `--complete` or `--goto-def` must be set"
),
};
let memory_usage = matches.contains("--memory-usage");
let load_output_dirs = matches.contains("--load-output-dirs");
let with_proc_macro = matches.contains("--with-proc-macro");
Command::Bench { path, what, load_output_dirs, with_proc_macro }
Command::Bench { memory_usage, path, what, load_output_dirs, with_proc_macro }
}
"diagnostics" => {
if matches.contains(["-h", "--help"]) {

View file

@ -49,11 +49,12 @@ fn main() -> Result<()> {
load_output_dirs,
with_proc_macro,
)?,
args::Command::Bench { path, what, load_output_dirs, with_proc_macro } => {
args::Command::Bench { memory_usage, path, what, load_output_dirs, with_proc_macro } => {
cli::analysis_bench(
args.verbosity,
path.as_ref(),
what,
memory_usage,
load_output_dirs,
with_proc_macro,
)?

View file

@ -10,7 +10,10 @@ use ra_db::{
use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CompletionConfig, FilePosition, LineCol};
use vfs::AbsPathBuf;
use crate::cli::{load_cargo::load_cargo, Verbosity};
use crate::{
cli::{load_cargo::load_cargo, Verbosity},
print_memory_usage,
};
pub enum BenchWhat {
Highlight { path: AbsPathBuf },
@ -44,6 +47,7 @@ pub fn analysis_bench(
verbosity: Verbosity,
path: &Path,
what: BenchWhat,
memory_usage: bool,
load_output_dirs: bool,
with_proc_macro: bool,
) -> Result<()> {
@ -99,6 +103,11 @@ pub fn analysis_bench(
}
}
}
if memory_usage {
print_memory_usage(host, vfs);
}
Ok(())
}

View file

@ -21,7 +21,10 @@ use ra_db::{
use ra_syntax::AstNode;
use stdx::format_to;
use crate::cli::{load_cargo::load_cargo, progress_report::ProgressReport, Result, Verbosity};
use crate::{
cli::{load_cargo::load_cargo, progress_report::ProgressReport, Result, Verbosity},
print_memory_usage,
};
/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
struct Snap<DB>(DB);
@ -43,7 +46,7 @@ pub fn analysis_stats(
with_proc_macro: bool,
) -> Result<()> {
let db_load_time = Instant::now();
let (mut host, vfs) = load_cargo(path, load_output_dirs, with_proc_macro)?;
let (host, vfs) = load_cargo(path, load_output_dirs, with_proc_macro)?;
let db = host.raw_database();
println!("Database loaded {:?}", db_load_time.elapsed());
let analysis_time = Instant::now();
@ -273,22 +276,7 @@ pub fn analysis_stats(
println!("Total: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage());
if memory_usage {
let mut mem = host.per_query_memory_usage();
let before = ra_prof::memory_usage();
drop(vfs);
let vfs = before.allocated - ra_prof::memory_usage().allocated;
mem.push(("VFS".into(), vfs));
let before = ra_prof::memory_usage();
drop(host);
mem.push(("Unaccounted".into(), before.allocated - ra_prof::memory_usage().allocated));
mem.push(("Remaining".into(), ra_prof::memory_usage().allocated));
for (name, bytes) in mem {
println!("{:>8} {}", bytes, name)
}
print_memory_usage(host, vfs);
}
Ok(())

View file

@ -40,7 +40,9 @@ use serde::de::DeserializeOwned;
pub type Result<T, E = Box<dyn std::error::Error + Send + Sync>> = std::result::Result<T, E>;
pub use crate::{caps::server_capabilities, main_loop::main_loop};
use ra_ide::AnalysisHost;
use std::fmt;
use vfs::Vfs;
pub fn from_json<T: DeserializeOwned>(what: &'static str, json: serde_json::Value) -> Result<T> {
let res = T::deserialize(&json)
@ -67,3 +69,22 @@ impl fmt::Display for LspError {
}
impl std::error::Error for LspError {}
fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) {
let mut mem = host.per_query_memory_usage();
let before = ra_prof::memory_usage();
drop(vfs);
let vfs = before.allocated - ra_prof::memory_usage().allocated;
mem.push(("VFS".into(), vfs));
let before = ra_prof::memory_usage();
drop(host);
mem.push(("Unaccounted".into(), before.allocated - ra_prof::memory_usage().allocated));
mem.push(("Remaining".into(), ra_prof::memory_usage().allocated));
for (name, bytes) in mem {
println!("{:>8} {}", bytes, name);
}
}