mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 04:53:34 +00:00
internal: add JSON formatting for hprof
This commit is contained in:
parent
e1a76671af
commit
56c10ca981
8 changed files with 186 additions and 58 deletions
|
@ -556,7 +556,7 @@ impl Module {
|
||||||
acc: &mut Vec<AnyDiagnostic>,
|
acc: &mut Vec<AnyDiagnostic>,
|
||||||
style_lints: bool,
|
style_lints: bool,
|
||||||
) {
|
) {
|
||||||
let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered();
|
let _p = tracing::info_span!("diagnostics", name = ?self.name(db)).entered();
|
||||||
let edition = db.crate_graph()[self.id.krate()].edition;
|
let edition = db.crate_graph()[self.id.krate()].edition;
|
||||||
let def_map = self.id.def_map(db.upcast());
|
let def_map = self.id.def_map(db.upcast());
|
||||||
for diag in def_map.diagnostics() {
|
for diag in def_map.diagnostics() {
|
||||||
|
|
|
@ -137,6 +137,7 @@ fn setup_logging(log_file_flag: Option<PathBuf>) -> anyhow::Result<()> {
|
||||||
filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
|
filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
|
||||||
chalk_filter: env::var("CHALK_DEBUG").ok(),
|
chalk_filter: env::var("CHALK_DEBUG").ok(),
|
||||||
profile_filter: env::var("RA_PROFILE").ok(),
|
profile_filter: env::var("RA_PROFILE").ok(),
|
||||||
|
json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(),
|
||||||
}
|
}
|
||||||
.init()?;
|
.init()?;
|
||||||
|
|
||||||
|
|
|
@ -34,6 +34,7 @@ mod handlers {
|
||||||
|
|
||||||
pub mod tracing {
|
pub mod tracing {
|
||||||
pub mod config;
|
pub mod config;
|
||||||
|
pub mod json;
|
||||||
pub use config::Config;
|
pub use config::Config;
|
||||||
pub mod hprof;
|
pub mod hprof;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +1,20 @@
|
||||||
//! Simple logger that logs either to stderr or to a file, using `tracing_subscriber`
|
//! Simple logger that logs either to stderr or to a file, using `tracing_subscriber`
|
||||||
//! filter syntax and `tracing_appender` for non blocking output.
|
//! filter syntax and `tracing_appender` for non blocking output.
|
||||||
|
|
||||||
use std::io;
|
use std::io::{self};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use tracing::level_filters::LevelFilter;
|
use tracing::level_filters::LevelFilter;
|
||||||
use tracing_subscriber::{
|
use tracing_subscriber::{
|
||||||
filter::Targets, fmt::MakeWriter, layer::SubscriberExt, util::SubscriberInitExt, Layer,
|
filter::{filter_fn, Targets},
|
||||||
Registry,
|
fmt::MakeWriter,
|
||||||
|
layer::SubscriberExt,
|
||||||
|
Layer, Registry,
|
||||||
};
|
};
|
||||||
use tracing_tree::HierarchicalLayer;
|
use tracing_tree::HierarchicalLayer;
|
||||||
|
|
||||||
use crate::tracing::hprof;
|
use crate::tracing::hprof;
|
||||||
|
use crate::tracing::json;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Config<T> {
|
pub struct Config<T> {
|
||||||
|
@ -34,6 +37,12 @@ pub struct Config<T> {
|
||||||
/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10
|
/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10
|
||||||
/// ```
|
/// ```
|
||||||
pub profile_filter: Option<String>,
|
pub profile_filter: Option<String>,
|
||||||
|
|
||||||
|
/// Filtering syntax, set in a shell:
|
||||||
|
/// ```
|
||||||
|
/// env RA_PROFILE_JSON=foo|bar|baz
|
||||||
|
/// ```
|
||||||
|
pub json_profile_filter: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Config<T>
|
impl<T> Config<T>
|
||||||
|
@ -41,7 +50,7 @@ where
|
||||||
T: for<'writer> MakeWriter<'writer> + Send + Sync + 'static,
|
T: for<'writer> MakeWriter<'writer> + Send + Sync + 'static,
|
||||||
{
|
{
|
||||||
pub fn init(self) -> anyhow::Result<()> {
|
pub fn init(self) -> anyhow::Result<()> {
|
||||||
let filter: Targets = self
|
let targets_filter: Targets = self
|
||||||
.filter
|
.filter
|
||||||
.parse()
|
.parse()
|
||||||
.with_context(|| format!("invalid log filter: `{}`", self.filter))?;
|
.with_context(|| format!("invalid log filter: `{}`", self.filter))?;
|
||||||
|
@ -52,30 +61,58 @@ where
|
||||||
.with_target(false)
|
.with_target(false)
|
||||||
.with_ansi(false)
|
.with_ansi(false)
|
||||||
.with_writer(writer)
|
.with_writer(writer)
|
||||||
.with_filter(filter);
|
.with_filter(targets_filter);
|
||||||
|
|
||||||
let mut chalk_layer = None;
|
let chalk_layer = match self.chalk_filter {
|
||||||
if let Some(chalk_filter) = self.chalk_filter {
|
Some(chalk_filter) => {
|
||||||
let level: LevelFilter =
|
let level: LevelFilter =
|
||||||
chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
|
chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
|
||||||
|
|
||||||
let chalk_filter = Targets::new()
|
let chalk_filter = Targets::new()
|
||||||
.with_target("chalk_solve", level)
|
.with_target("chalk_solve", level)
|
||||||
.with_target("chalk_ir", level)
|
.with_target("chalk_ir", level)
|
||||||
.with_target("chalk_recursive", level);
|
.with_target("chalk_recursive", level);
|
||||||
chalk_layer = Some(
|
// TODO: remove `.with_filter(LevelFilter::OFF)` on the `None` branch.
|
||||||
HierarchicalLayer::default()
|
HierarchicalLayer::default()
|
||||||
.with_indent_lines(true)
|
.with_indent_lines(true)
|
||||||
.with_ansi(false)
|
.with_ansi(false)
|
||||||
.with_indent_amount(2)
|
.with_indent_amount(2)
|
||||||
.with_writer(io::stderr)
|
.with_writer(io::stderr)
|
||||||
.with_filter(chalk_filter),
|
.with_filter(chalk_filter)
|
||||||
);
|
.boxed()
|
||||||
|
}
|
||||||
|
None => None::<HierarchicalLayer>.with_filter(LevelFilter::OFF).boxed(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let profiler_layer = self.profile_filter.map(|spec| hprof::layer(&spec));
|
// TODO: remove `.with_filter(LevelFilter::OFF)` on the `None` branch.
|
||||||
|
let profiler_layer = match self.profile_filter {
|
||||||
|
Some(spec) => Some(hprof::SpanTree::new(&spec)).with_filter(LevelFilter::INFO),
|
||||||
|
None => None.with_filter(LevelFilter::OFF),
|
||||||
|
};
|
||||||
|
|
||||||
Registry::default().with(ra_fmt_layer).with(chalk_layer).with(profiler_layer).try_init()?;
|
let json_profiler_layer = match self.json_profile_filter {
|
||||||
|
Some(spec) => {
|
||||||
|
let filter = json::JsonFilter::from_spec(&spec);
|
||||||
|
let filter = filter_fn(move |metadata| {
|
||||||
|
let allowed = match &filter.allowed_names {
|
||||||
|
Some(names) => names.contains(metadata.name()),
|
||||||
|
None => true,
|
||||||
|
};
|
||||||
|
|
||||||
|
allowed && metadata.is_span()
|
||||||
|
});
|
||||||
|
Some(json::TimingLayer::new(std::io::stderr).with_filter(filter))
|
||||||
|
}
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscriber = Registry::default()
|
||||||
|
.with(ra_fmt_layer)
|
||||||
|
.with(json_profiler_layer)
|
||||||
|
.with(profiler_layer)
|
||||||
|
.with(chalk_layer);
|
||||||
|
|
||||||
|
tracing::subscriber::set_global_default(subscriber)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,6 +33,7 @@
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
fmt::Write,
|
fmt::Write,
|
||||||
|
marker::PhantomData,
|
||||||
mem,
|
mem,
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
@ -50,53 +51,42 @@ use tracing_subscriber::{
|
||||||
Layer, Registry,
|
Layer, Registry,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::tracing::hprof;
|
|
||||||
|
|
||||||
pub fn init(spec: &str) -> tracing::subscriber::DefaultGuard {
|
pub fn init(spec: &str) -> tracing::subscriber::DefaultGuard {
|
||||||
let subscriber = Registry::default().with(layer(spec));
|
let subscriber = Registry::default().with(SpanTree::new(spec));
|
||||||
tracing::subscriber::set_default(subscriber)
|
tracing::subscriber::set_default(subscriber)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn layer<S>(spec: &str) -> impl Layer<S>
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct SpanTree<S> {
|
||||||
|
aggregate: bool,
|
||||||
|
write_filter: WriteFilter,
|
||||||
|
_inner: PhantomData<fn(S)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S> SpanTree<S>
|
||||||
where
|
where
|
||||||
S: Subscriber + for<'span> tracing_subscriber::registry::LookupSpan<'span>,
|
S: Subscriber + for<'span> tracing_subscriber::registry::LookupSpan<'span>,
|
||||||
{
|
{
|
||||||
let (write_filter, allowed_names) = WriteFilter::from_spec(spec);
|
pub(crate) fn new(spec: &str) -> impl Layer<S> {
|
||||||
|
let (write_filter, allowed_names) = WriteFilter::from_spec(spec);
|
||||||
|
|
||||||
// this filter the first pass for `tracing`: these are all the "profiling" spans, but things like
|
// this filter the first pass for `tracing`: these are all the "profiling" spans, but things like
|
||||||
// span depth or duration are not filtered here: that only occurs at write time.
|
// span depth or duration are not filtered here: that only occurs at write time.
|
||||||
let profile_filter = filter::filter_fn(move |metadata| {
|
let profile_filter = filter::filter_fn(move |metadata| {
|
||||||
let allowed = match &allowed_names {
|
let allowed = match &allowed_names {
|
||||||
Some(names) => names.contains(metadata.name()),
|
Some(names) => names.contains(metadata.name()),
|
||||||
None => true,
|
None => true,
|
||||||
};
|
};
|
||||||
|
|
||||||
allowed
|
allowed
|
||||||
&& metadata.is_span()
|
&& metadata.is_span()
|
||||||
&& metadata.level() >= &Level::INFO
|
&& metadata.level() >= &Level::INFO
|
||||||
&& !metadata.target().starts_with("salsa")
|
&& !metadata.target().starts_with("salsa")
|
||||||
&& metadata.name() != "compute_exhaustiveness_and_usefulness"
|
&& metadata.name() != "compute_exhaustiveness_and_usefulness"
|
||||||
&& !metadata.target().starts_with("chalk")
|
&& !metadata.target().starts_with("chalk")
|
||||||
});
|
});
|
||||||
|
|
||||||
hprof::SpanTree::default().aggregate(true).spec_filter(write_filter).with_filter(profile_filter)
|
Self { aggregate: true, write_filter, _inner: PhantomData }.with_filter(profile_filter)
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
|
||||||
pub(crate) struct SpanTree {
|
|
||||||
aggregate: bool,
|
|
||||||
write_filter: WriteFilter,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SpanTree {
|
|
||||||
/// Merge identical sibling spans together.
|
|
||||||
pub(crate) fn aggregate(self, yes: bool) -> SpanTree {
|
|
||||||
SpanTree { aggregate: yes, ..self }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a write-time filter for span duration or tree depth.
|
|
||||||
pub(crate) fn spec_filter(self, write_filter: WriteFilter) -> SpanTree {
|
|
||||||
SpanTree { write_filter, ..self }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,7 +126,7 @@ impl<'a> Visit for DataVisitor<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Layer<S> for SpanTree
|
impl<S> Layer<S> for SpanTree<S>
|
||||||
where
|
where
|
||||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||||
{
|
{
|
||||||
|
|
90
crates/rust-analyzer/src/tracing/json.rs
Normal file
90
crates/rust-analyzer/src/tracing/json.rs
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
//! A [tracing_subscriber::layer::Layer] that exports new-line delinated JSON.
|
||||||
|
//!
|
||||||
|
//! Usage:
|
||||||
|
//!
|
||||||
|
//! ```rust
|
||||||
|
//! let layer = json::TimingLayer::new(std::io::stderr);
|
||||||
|
//! Registry::default().with(layer).init();
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
use std::{io::Write as _, marker::PhantomData, time::Instant};
|
||||||
|
|
||||||
|
use ide_db::FxHashSet;
|
||||||
|
use tracing::{
|
||||||
|
span::{Attributes, Id},
|
||||||
|
Event, Subscriber,
|
||||||
|
};
|
||||||
|
use tracing_subscriber::{fmt::MakeWriter, layer::Context, registry::LookupSpan, Layer};
|
||||||
|
|
||||||
|
struct JsonData {
|
||||||
|
name: &'static str,
|
||||||
|
start: std::time::Instant,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JsonData {
|
||||||
|
fn new(name: &'static str) -> Self {
|
||||||
|
Self { name, start: Instant::now() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct TimingLayer<S, W> {
|
||||||
|
writer: W,
|
||||||
|
_inner: PhantomData<fn(S)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S, W> TimingLayer<S, W> {
|
||||||
|
pub(crate) fn new(writer: W) -> Self {
|
||||||
|
Self { writer, _inner: PhantomData }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S, W> Layer<S> for TimingLayer<S, W>
|
||||||
|
where
|
||||||
|
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||||
|
W: for<'writer> MakeWriter<'writer> + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
fn on_new_span(&self, attrs: &Attributes<'_>, id: &Id, ctx: Context<'_, S>) {
|
||||||
|
let span = ctx.span(id).unwrap();
|
||||||
|
|
||||||
|
let data = JsonData::new(attrs.metadata().name());
|
||||||
|
span.extensions_mut().insert(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, S>) {}
|
||||||
|
|
||||||
|
fn on_close(&self, id: Id, ctx: Context<'_, S>) {
|
||||||
|
#[derive(serde::Serialize)]
|
||||||
|
struct JsonDataInner {
|
||||||
|
name: &'static str,
|
||||||
|
elapsed_ms: u128,
|
||||||
|
}
|
||||||
|
|
||||||
|
let span = ctx.span(&id).unwrap();
|
||||||
|
let Some(data) = span.extensions_mut().remove::<JsonData>() else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
let data = JsonDataInner { name: data.name, elapsed_ms: data.start.elapsed().as_millis() };
|
||||||
|
let mut out = serde_json::to_string(&data).expect("Unable to serialize data");
|
||||||
|
out.push('\n');
|
||||||
|
self.writer.make_writer().write_all(out.as_bytes()).expect("Unable to write data");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Clone, Debug)]
|
||||||
|
pub(crate) struct JsonFilter {
|
||||||
|
pub(crate) allowed_names: Option<FxHashSet<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JsonFilter {
|
||||||
|
pub(crate) fn from_spec(spec: &str) -> Self {
|
||||||
|
let allowed_names = if spec == "*" {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(FxHashSet::from_iter(spec.split('|').map(String::from)))
|
||||||
|
};
|
||||||
|
|
||||||
|
Self { allowed_names }
|
||||||
|
}
|
||||||
|
}
|
|
@ -103,6 +103,7 @@ impl Project<'_> {
|
||||||
filter: std::env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
|
filter: std::env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
|
||||||
chalk_filter: std::env::var("CHALK_DEBUG").ok(),
|
chalk_filter: std::env::var("CHALK_DEBUG").ok(),
|
||||||
profile_filter: std::env::var("RA_PROFILE").ok(),
|
profile_filter: std::env::var("RA_PROFILE").ok(),
|
||||||
|
json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(),
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -178,7 +178,15 @@ RA_PROFILE=foo|bar|baz // enabled only selected entries
|
||||||
RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
|
RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
|
||||||
```
|
```
|
||||||
|
|
||||||
In particular, I have `export RA_PROFILE='*>10'` in my shell profile.
|
Some rust-analyzer contributors have `export RA_PROFILE='*>10'` in my shell profile.
|
||||||
|
|
||||||
|
For machine-readable JSON output, we have the `RA_PROFILE_JSON` env variable. We support
|
||||||
|
filtering only by span name:
|
||||||
|
|
||||||
|
```
|
||||||
|
RA_PROFILE=* // dump everything
|
||||||
|
RA_PROFILE_JSON="vfs_load|parallel_prime_caches|discover_command" // dump selected spans
|
||||||
|
```
|
||||||
|
|
||||||
We also have a "counting" profiler which counts number of instances of popular structs.
|
We also have a "counting" profiler which counts number of instances of popular structs.
|
||||||
It is enabled by `RA_COUNT=1`.
|
It is enabled by `RA_COUNT=1`.
|
||||||
|
|
Loading…
Reference in a new issue