mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 04:53:34 +00:00
Add more docs
This commit is contained in:
parent
e6776c3e1b
commit
188b0f96f9
20 changed files with 72 additions and 40 deletions
|
@ -1,5 +1,8 @@
|
||||||
//! FIXME: write short doc here
|
//! Re-exports various subcrates databases so that the calling code can depend
|
||||||
|
//! only on `hir`. This breaks abstraction boundary a bit, it would be cool if
|
||||||
|
//! we didn't do that.
|
||||||
|
//!
|
||||||
|
//! But we need this for at least LRU caching at the query level.
|
||||||
pub use hir_def::db::*;
|
pub use hir_def::db::*;
|
||||||
pub use hir_expand::db::{
|
pub use hir_expand::db::{
|
||||||
AstDatabase, AstDatabaseStorage, AstIdMapQuery, HygieneFrameQuery, InternMacroQuery,
|
AstDatabase, AstDatabaseStorage, AstIdMapQuery, HygieneFrameQuery, InternMacroQuery,
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
//! FIXME: write short doc here
|
//! Re-export diagnostics such that clients of `hir` don't have to depend on
|
||||||
|
//! low-level crates.
|
||||||
|
//!
|
||||||
|
//! This probably isn't the best way to do this -- ideally, diagnistics should
|
||||||
|
//! be expressed in terms of hir types themselves.
|
||||||
pub use hir_def::diagnostics::{
|
pub use hir_def::diagnostics::{
|
||||||
InactiveCode, UnresolvedMacroCall, UnresolvedModule, UnresolvedProcMacro,
|
InactiveCode, UnresolvedMacroCall, UnresolvedModule, UnresolvedProcMacro,
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! FIXME: write short doc here
|
//! See [`NavigationTarget`].
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
//! FIXME: write short doc here
|
|
||||||
|
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
@ -29,6 +27,10 @@ pub struct Fold {
|
||||||
pub kind: FoldKind,
|
pub kind: FoldKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Feature: Folding
|
||||||
|
//
|
||||||
|
// Defines folding regions for curly braced blocks, runs of consecutive import
|
||||||
|
// statements, and `region` / `endregion` comment markers.
|
||||||
pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
||||||
let mut res = vec![];
|
let mut res = vec![];
|
||||||
let mut visited_comments = FxHashSet::default();
|
let mut visited_comments = FxHashSet::default();
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! See `AssistContext`
|
//! See [`AssistContext`].
|
||||||
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
//! See `PathTransform`
|
//! See [`PathTransform`].
|
||||||
|
|
||||||
use hir::{HirDisplay, SemanticsScope};
|
use hir::{HirDisplay, SemanticsScope};
|
||||||
use ide_db::helpers::mod_path_to_ast;
|
use ide_db::helpers::mod_path_to_ast;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
|
@ -124,5 +124,5 @@ impl Drop for CpuSpan {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn memory_usage() -> MemoryUsage {
|
pub fn memory_usage() -> MemoryUsage {
|
||||||
MemoryUsage::current()
|
MemoryUsage::now()
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
//! FIXME: write short doc here
|
//! Like [`std::time::Instant`], but for memory.
|
||||||
|
//!
|
||||||
|
//! Measures the total size of all currently allocated objects.
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use cfg_if::cfg_if;
|
use cfg_if::cfg_if;
|
||||||
|
@ -22,7 +24,7 @@ impl std::ops::Sub for MemoryUsage {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MemoryUsage {
|
impl MemoryUsage {
|
||||||
pub fn current() -> MemoryUsage {
|
pub fn now() -> MemoryUsage {
|
||||||
cfg_if! {
|
cfg_if! {
|
||||||
if #[cfg(all(feature = "jemalloc", not(target_env = "msvc")))] {
|
if #[cfg(all(feature = "jemalloc", not(target_env = "msvc")))] {
|
||||||
jemalloc_ctl::epoch::advance().unwrap();
|
jemalloc_ctl::epoch::advance().unwrap();
|
||||||
|
|
|
@ -44,7 +44,7 @@ impl StopWatch {
|
||||||
}
|
}
|
||||||
pub fn memory(mut self, yes: bool) -> StopWatch {
|
pub fn memory(mut self, yes: bool) -> StopWatch {
|
||||||
if yes {
|
if yes {
|
||||||
self.memory = Some(MemoryUsage::current());
|
self.memory = Some(MemoryUsage::now());
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
@ -58,7 +58,7 @@ impl StopWatch {
|
||||||
#[cfg(not(target_os = "linux"))]
|
#[cfg(not(target_os = "linux"))]
|
||||||
let instructions = None;
|
let instructions = None;
|
||||||
|
|
||||||
let memory = self.memory.map(|it| MemoryUsage::current() - it);
|
let memory = self.memory.map(|it| MemoryUsage::now() - it);
|
||||||
StopWatchSpan { time, instructions, memory }
|
StopWatchSpan { time, instructions, memory }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! FIXME: write short doc here
|
//! See [`CargoWorkspace`].
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::{convert::TryInto, ops, process::Command, sync::Arc};
|
use std::{convert::TryInto, ops, process::Command, sync::Arc};
|
||||||
|
@ -12,10 +12,9 @@ use rustc_hash::FxHashMap;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::from_value;
|
use serde_json::from_value;
|
||||||
|
|
||||||
use crate::build_data::BuildDataConfig;
|
use crate::{build_data::BuildDataConfig, utf8_stdout};
|
||||||
use crate::utf8_stdout;
|
|
||||||
|
|
||||||
/// `CargoWorkspace` represents the logical structure of, well, a Cargo
|
/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
|
||||||
/// workspace. It pretty closely mirrors `cargo metadata` output.
|
/// workspace. It pretty closely mirrors `cargo metadata` output.
|
||||||
///
|
///
|
||||||
/// Note that internally, rust analyzer uses a different structure:
|
/// Note that internally, rust analyzer uses a different structure:
|
||||||
|
|
|
@ -1,4 +1,19 @@
|
||||||
//! FIXME: write short doc here
|
//! In rust-analyzer, we maintain a strict separation between pure abstract
|
||||||
|
//! semantic project model and a concrete model of a particular build system.
|
||||||
|
//!
|
||||||
|
//! Pure model is represented by the [`base_db::CrateGraph`] from another crate.
|
||||||
|
//!
|
||||||
|
//! In this crate, we are conserned with "real world" project models.
|
||||||
|
//!
|
||||||
|
//! Specifically, here we have a representation for a Cargo project
|
||||||
|
//! ([`CargoWorkspace`]) and for manually specified layout ([`ProjectJson`]).
|
||||||
|
//!
|
||||||
|
//! Roughly, the things we do here are:
|
||||||
|
//!
|
||||||
|
//! * Project discovery (where's the relevant Cargo.toml for the current dir).
|
||||||
|
//! * Custom build steps (`build.rs` code generation and compilation of
|
||||||
|
//! procedural macros).
|
||||||
|
//! * Lowering of concrete model to a [`base_db::CrateGraph`]
|
||||||
|
|
||||||
mod cargo_workspace;
|
mod cargo_workspace;
|
||||||
mod cfg_flag;
|
mod cfg_flag;
|
||||||
|
|
|
@ -1,4 +1,9 @@
|
||||||
//! FIXME: write short doc here
|
//! `rust-project.json` file format.
|
||||||
|
//!
|
||||||
|
//! This format is spiritually a serialization of [`base_db::CrateGraph`]. The
|
||||||
|
//! idea here is that people who do not use Cargo, can instead teach their build
|
||||||
|
//! system to generate `rust-project.json` which can be ingested by
|
||||||
|
//! rust-analyzer.
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! FIXME: write short doc here
|
//! Fuzzing for from-scratch parsing.
|
||||||
|
|
||||||
#![no_main]
|
#![no_main]
|
||||||
use libfuzzer_sys::fuzz_target;
|
use libfuzzer_sys::fuzz_target;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! FIXME: write short doc here
|
//! Fuzzing for incremental parsing.
|
||||||
|
|
||||||
#![no_main]
|
#![no_main]
|
||||||
use libfuzzer_sys::fuzz_target;
|
use libfuzzer_sys::fuzz_target;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! FIXME: write short doc here
|
//! Collection of assorted algorithms for syntax trees.
|
||||||
|
|
||||||
use std::{hash::BuildHasherDefault, ops::RangeInclusive};
|
use std::{hash::BuildHasherDefault, ops::RangeInclusive};
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
//! FIXME: write short doc here
|
//! Some infrastructure for fuzzy testing.
|
||||||
|
//!
|
||||||
|
//! We don't normally run fuzzying, so this is hopelessly bitrotten :(
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
convert::TryInto,
|
convert::TryInto,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! FIXME: write short doc here
|
//! See [`TextTreeSink`].
|
||||||
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,13 @@
|
||||||
//! FIXME: write short doc here
|
//! In rust-analyzer, syntax trees are transient objects.
|
||||||
|
//!
|
||||||
|
//! That means that we create trees when we need them, and tear them down to
|
||||||
|
//! save memory. In this architecture, hanging on to a particular syntax node
|
||||||
|
//! for a long time is ill-advisable, as that keeps the whole tree resident.
|
||||||
|
//!
|
||||||
|
//! Instead, we provide a [`SyntaxNodePtr`] type, which stores information about
|
||||||
|
//! *location* of a particular syntax node in a tree. Its a small type which can
|
||||||
|
//! be cheaply stored, and which can be resolved to a real [`SyntaxNode`] when
|
||||||
|
//! necessary.
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
hash::{Hash, Hasher},
|
hash::{Hash, Hasher},
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
//! FIXME: write short doc here
|
//! Stateful iteration over token trees.
|
||||||
|
//!
|
||||||
|
//! We use this as the source of tokens for parser.
|
||||||
use crate::{Leaf, Subtree, TokenTree};
|
use crate::{Leaf, Subtree, TokenTree};
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||||
|
@ -104,7 +105,7 @@ impl<'t> TokenBuffer<'t> {
|
||||||
|
|
||||||
/// Creates a cursor referencing the first token in the buffer and able to
|
/// Creates a cursor referencing the first token in the buffer and able to
|
||||||
/// traverse until the end of the buffer.
|
/// traverse until the end of the buffer.
|
||||||
pub fn begin(&self) -> Cursor {
|
pub fn begin(&self) -> Cursor<'_> {
|
||||||
Cursor::create(self, EntryPtr(EntryId(0), 0))
|
Cursor::create(self, EntryPtr(EntryId(0), 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -392,18 +392,7 @@ impl TidyDocs {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
let poorly_documented = [
|
let poorly_documented = ["hir_expand", "mbe", "parser", "hir_ty"];
|
||||||
"hir",
|
|
||||||
"hir_expand",
|
|
||||||
"ide",
|
|
||||||
"mbe",
|
|
||||||
"parser",
|
|
||||||
"profile",
|
|
||||||
"project_model",
|
|
||||||
"syntax",
|
|
||||||
"tt",
|
|
||||||
"hir_ty",
|
|
||||||
];
|
|
||||||
|
|
||||||
let mut has_fixmes =
|
let mut has_fixmes =
|
||||||
poorly_documented.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>();
|
poorly_documented.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>();
|
||||||
|
|
Loading…
Reference in a new issue