Add more docs

This commit is contained in:
Aleksey Kladov 2021-05-22 16:53:47 +03:00
parent e6776c3e1b
commit 188b0f96f9
20 changed files with 72 additions and 40 deletions

View file

@ -1,5 +1,8 @@
//! FIXME: write short doc here
//! Re-exports various subcrates databases so that the calling code can depend
//! only on `hir`. This breaks abstraction boundary a bit, it would be cool if
//! we didn't do that.
//!
//! But we need this for at least LRU caching at the query level.
pub use hir_def::db::*;
pub use hir_expand::db::{
AstDatabase, AstDatabaseStorage, AstIdMapQuery, HygieneFrameQuery, InternMacroQuery,

View file

@ -1,4 +1,8 @@
//! FIXME: write short doc here
//! Re-export diagnostics such that clients of `hir` don't have to depend on
//! low-level crates.
//!
//! This probably isn't the best way to do this -- ideally, diagnistics should
//! be expressed in terms of hir types themselves.
pub use hir_def::diagnostics::{
InactiveCode, UnresolvedMacroCall, UnresolvedModule, UnresolvedProcMacro,
};

View file

@ -1,4 +1,4 @@
//! FIXME: write short doc here
//! See [`NavigationTarget`].
use std::fmt;

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use rustc_hash::FxHashSet;
use syntax::{
@ -29,6 +27,10 @@ pub struct Fold {
pub kind: FoldKind,
}
// Feature: Folding
//
// Defines folding regions for curly braced blocks, runs of consecutive import
// statements, and `region` / `endregion` comment markers.
pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
let mut res = vec![];
let mut visited_comments = FxHashSet::default();

View file

@ -1,4 +1,4 @@
//! See `AssistContext`
//! See [`AssistContext`].
use std::mem;

View file

@ -1,4 +1,5 @@
//! See `PathTransform`
//! See [`PathTransform`].
use hir::{HirDisplay, SemanticsScope};
use ide_db::helpers::mod_path_to_ast;
use rustc_hash::FxHashMap;

View file

@ -124,5 +124,5 @@ impl Drop for CpuSpan {
}
pub fn memory_usage() -> MemoryUsage {
MemoryUsage::current()
MemoryUsage::now()
}

View file

@ -1,4 +1,6 @@
//! FIXME: write short doc here
//! Like [`std::time::Instant`], but for memory.
//!
//! Measures the total size of all currently allocated objects.
use std::fmt;
use cfg_if::cfg_if;
@ -22,7 +24,7 @@ impl std::ops::Sub for MemoryUsage {
}
impl MemoryUsage {
pub fn current() -> MemoryUsage {
pub fn now() -> MemoryUsage {
cfg_if! {
if #[cfg(all(feature = "jemalloc", not(target_env = "msvc")))] {
jemalloc_ctl::epoch::advance().unwrap();

View file

@ -44,7 +44,7 @@ impl StopWatch {
}
pub fn memory(mut self, yes: bool) -> StopWatch {
if yes {
self.memory = Some(MemoryUsage::current());
self.memory = Some(MemoryUsage::now());
}
self
}
@ -58,7 +58,7 @@ impl StopWatch {
#[cfg(not(target_os = "linux"))]
let instructions = None;
let memory = self.memory.map(|it| MemoryUsage::current() - it);
let memory = self.memory.map(|it| MemoryUsage::now() - it);
StopWatchSpan { time, instructions, memory }
}
}

View file

@ -1,4 +1,4 @@
//! FIXME: write short doc here
//! See [`CargoWorkspace`].
use std::path::PathBuf;
use std::{convert::TryInto, ops, process::Command, sync::Arc};
@ -12,10 +12,9 @@ use rustc_hash::FxHashMap;
use serde::Deserialize;
use serde_json::from_value;
use crate::build_data::BuildDataConfig;
use crate::utf8_stdout;
use crate::{build_data::BuildDataConfig, utf8_stdout};
/// `CargoWorkspace` represents the logical structure of, well, a Cargo
/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
/// workspace. It pretty closely mirrors `cargo metadata` output.
///
/// Note that internally, rust analyzer uses a different structure:

View file

@ -1,4 +1,19 @@
//! FIXME: write short doc here
//! In rust-analyzer, we maintain a strict separation between pure abstract
//! semantic project model and a concrete model of a particular build system.
//!
//! Pure model is represented by the [`base_db::CrateGraph`] from another crate.
//!
//! In this crate, we are conserned with "real world" project models.
//!
//! Specifically, here we have a representation for a Cargo project
//! ([`CargoWorkspace`]) and for manually specified layout ([`ProjectJson`]).
//!
//! Roughly, the things we do here are:
//!
//! * Project discovery (where's the relevant Cargo.toml for the current dir).
//! * Custom build steps (`build.rs` code generation and compilation of
//! procedural macros).
//! * Lowering of concrete model to a [`base_db::CrateGraph`]
mod cargo_workspace;
mod cfg_flag;

View file

@ -1,4 +1,9 @@
//! FIXME: write short doc here
//! `rust-project.json` file format.
//!
//! This format is spiritually a serialization of [`base_db::CrateGraph`]. The
//! idea here is that people who do not use Cargo, can instead teach their build
//! system to generate `rust-project.json` which can be ingested by
//! rust-analyzer.
use std::path::PathBuf;

View file

@ -1,4 +1,4 @@
//! FIXME: write short doc here
//! Fuzzing for from-scratch parsing.
#![no_main]
use libfuzzer_sys::fuzz_target;

View file

@ -1,4 +1,4 @@
//! FIXME: write short doc here
//! Fuzzing for incremental parsing.
#![no_main]
use libfuzzer_sys::fuzz_target;

View file

@ -1,4 +1,4 @@
//! FIXME: write short doc here
//! Collection of assorted algorithms for syntax trees.
use std::{hash::BuildHasherDefault, ops::RangeInclusive};

View file

@ -1,4 +1,6 @@
//! FIXME: write short doc here
//! Some infrastructure for fuzzy testing.
//!
//! We don't normally run fuzzying, so this is hopelessly bitrotten :(
use std::{
convert::TryInto,

View file

@ -1,4 +1,4 @@
//! FIXME: write short doc here
//! See [`TextTreeSink`].
use std::mem;

View file

@ -1,4 +1,13 @@
//! FIXME: write short doc here
//! In rust-analyzer, syntax trees are transient objects.
//!
//! That means that we create trees when we need them, and tear them down to
//! save memory. In this architecture, hanging on to a particular syntax node
//! for a long time is ill-advisable, as that keeps the whole tree resident.
//!
//! Instead, we provide a [`SyntaxNodePtr`] type, which stores information about
//! *location* of a particular syntax node in a tree. Its a small type which can
//! be cheaply stored, and which can be resolved to a real [`SyntaxNode`] when
//! necessary.
use std::{
hash::{Hash, Hasher},

View file

@ -1,5 +1,6 @@
//! FIXME: write short doc here
//! Stateful iteration over token trees.
//!
//! We use this as the source of tokens for parser.
use crate::{Leaf, Subtree, TokenTree};
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
@ -104,7 +105,7 @@ impl<'t> TokenBuffer<'t> {
/// Creates a cursor referencing the first token in the buffer and able to
/// traverse until the end of the buffer.
pub fn begin(&self) -> Cursor {
pub fn begin(&self) -> Cursor<'_> {
Cursor::create(self, EntryPtr(EntryId(0), 0))
}

View file

@ -392,18 +392,7 @@ impl TidyDocs {
)
}
let poorly_documented = [
"hir",
"hir_expand",
"ide",
"mbe",
"parser",
"profile",
"project_model",
"syntax",
"tt",
"hir_ty",
];
let poorly_documented = ["hir_expand", "mbe", "parser", "hir_ty"];
let mut has_fixmes =
poorly_documented.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>();