Move doc comment handling into ide-db

This commit is contained in:
Lukas Wirth 2023-09-02 16:26:48 +02:00
parent 0bf0563a00
commit b1575528c0
26 changed files with 342 additions and 266 deletions

View file

@ -14,12 +14,11 @@ use hir_expand::{
attrs::{collect_attrs, Attr, AttrId, RawAttrs}, attrs::{collect_attrs, Attr, AttrId, RawAttrs},
HirFileId, InFile, HirFileId, InFile,
}; };
use itertools::Itertools;
use la_arena::{ArenaMap, Idx, RawIdx}; use la_arena::{ArenaMap, Idx, RawIdx};
use mbe::DelimiterKind; use mbe::DelimiterKind;
use syntax::{ use syntax::{
ast::{self, HasAttrs, IsString}, ast::{self, HasAttrs},
AstPtr, AstToken, SmolStr, TextRange, TextSize, AstPtr, SmolStr,
}; };
use triomphe::Arc; use triomphe::Arc;
@ -33,26 +32,6 @@ use crate::{
LocalFieldId, Lookup, MacroId, VariantId, LocalFieldId, Lookup, MacroId, VariantId,
}; };
/// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Documentation(String);
impl Documentation {
pub fn new(s: String) -> Self {
Documentation(s)
}
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<Documentation> for String {
fn from(Documentation(string): Documentation) -> Self {
string
}
}
#[derive(Default, Debug, Clone, PartialEq, Eq)] #[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct Attrs(RawAttrs); pub struct Attrs(RawAttrs);
@ -221,33 +200,6 @@ impl Attrs {
self.by_key("lang").string_value().and_then(|it| LangItem::from_str(it)) self.by_key("lang").string_value().and_then(|it| LangItem::from_str(it))
} }
pub fn docs(&self) -> Option<Documentation> {
let docs = self.by_key("doc").attrs().filter_map(|attr| attr.string_value());
let indent = doc_indent(self);
let mut buf = String::new();
for doc in docs {
// str::lines doesn't yield anything for the empty string
if !doc.is_empty() {
buf.extend(Itertools::intersperse(
doc.lines().map(|line| {
line.char_indices()
.nth(indent)
.map_or(line, |(offset, _)| &line[offset..])
.trim_end()
}),
"\n",
));
}
buf.push('\n');
}
buf.pop();
if buf.is_empty() {
None
} else {
Some(Documentation(buf))
}
}
pub fn has_doc_hidden(&self) -> bool { pub fn has_doc_hidden(&self) -> bool {
self.by_key("doc").tt_values().any(|tt| { self.by_key("doc").tt_values().any(|tt| {
tt.delimiter.kind == DelimiterKind::Parenthesis && tt.delimiter.kind == DelimiterKind::Parenthesis &&
@ -574,62 +526,6 @@ impl AttrsWithOwner {
AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs)) AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
} }
pub fn docs_with_rangemap(
&self,
db: &dyn DefDatabase,
) -> Option<(Documentation, DocsRangeMap)> {
let docs =
self.by_key("doc").attrs().filter_map(|attr| attr.string_value().map(|s| (s, attr.id)));
let indent = doc_indent(self);
let mut buf = String::new();
let mut mapping = Vec::new();
for (doc, idx) in docs {
if !doc.is_empty() {
let mut base_offset = 0;
for raw_line in doc.split('\n') {
let line = raw_line.trim_end();
let line_len = line.len();
let (offset, line) = match line.char_indices().nth(indent) {
Some((offset, _)) => (offset, &line[offset..]),
None => (0, line),
};
let buf_offset = buf.len();
buf.push_str(line);
mapping.push((
TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
idx,
TextRange::at(
(base_offset + offset).try_into().ok()?,
line_len.try_into().ok()?,
),
));
buf.push('\n');
base_offset += raw_line.len() + 1;
}
} else {
buf.push('\n');
}
}
buf.pop();
if buf.is_empty() {
None
} else {
Some((Documentation(buf), DocsRangeMap { mapping, source_map: self.source_map(db) }))
}
}
}
fn doc_indent(attrs: &Attrs) -> usize {
attrs
.by_key("doc")
.attrs()
.filter_map(|attr| attr.string_value())
.flat_map(|s| s.lines())
.filter(|line| !line.chars().all(|c| c.is_whitespace()))
.map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
.min()
.unwrap_or(0)
} }
#[derive(Debug)] #[derive(Debug)]
@ -673,7 +569,7 @@ impl AttrSourceMap {
self.source_of_id(attr.id) self.source_of_id(attr.id)
} }
fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> { pub fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> {
let ast_idx = id.ast_index(); let ast_idx = id.ast_index();
let file_id = match self.mod_def_site_file_id { let file_id = match self.mod_def_site_file_id {
Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id, Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id,
@ -687,69 +583,6 @@ impl AttrSourceMap {
} }
} }
/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
#[derive(Debug)]
pub struct DocsRangeMap {
source_map: AttrSourceMap,
// (docstring-line-range, attr_index, attr-string-range)
// a mapping from the text range of a line of the [`Documentation`] to the attribute index and
// the original (untrimmed) syntax doc line
mapping: Vec<(TextRange, AttrId, TextRange)>,
}
impl DocsRangeMap {
/// Maps a [`TextRange`] relative to the documentation string back to its AST range
pub fn map(&self, range: TextRange) -> Option<InFile<TextRange>> {
let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
if !line_docs_range.contains_range(range) {
return None;
}
let relative_range = range - line_docs_range.start();
let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
match source {
Either::Left(attr) => {
let string = get_doc_string_in_attr(attr)?;
let text_range = string.open_quote_text_range()?;
let range = TextRange::at(
text_range.end() + original_line_src_range.start() + relative_range.start(),
string.syntax().text_range().len().min(range.len()),
);
Some(InFile { file_id, value: range })
}
Either::Right(comment) => {
let text_range = comment.syntax().text_range();
let range = TextRange::at(
text_range.start()
+ TextSize::try_from(comment.prefix().len()).ok()?
+ original_line_src_range.start()
+ relative_range.start(),
text_range.len().min(range.len()),
);
Some(InFile { file_id, value: range })
}
}
}
}
fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
match it.expr() {
// #[doc = lit]
Some(ast::Expr::Literal(lit)) => match lit.kind() {
ast::LiteralKind::String(it) => Some(it),
_ => None,
},
// #[cfg_attr(..., doc = "", ...)]
None => {
// FIXME: See highlight injection for what to do here
None
}
_ => None,
}
}
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct AttrQuery<'attr> { pub struct AttrQuery<'attr> {
attrs: &'attr Attrs, attrs: &'attr Attrs,

View file

@ -1,7 +1,7 @@
//! Attributes & documentation for hir types. //! Attributes & documentation for hir types.
use hir_def::{ use hir_def::{
attr::{AttrsWithOwner, Documentation}, attr::AttrsWithOwner,
item_scope::ItemInNs, item_scope::ItemInNs,
path::{ModPath, Path}, path::{ModPath, Path},
per_ns::Namespace, per_ns::Namespace,
@ -20,7 +20,6 @@ use crate::{
pub trait HasAttrs { pub trait HasAttrs {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner; fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner;
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
fn resolve_doc_path( fn resolve_doc_path(
self, self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
@ -43,10 +42,6 @@ macro_rules! impl_has_attrs {
let def = AttrDefId::$def_id(self.into()); let def = AttrDefId::$def_id(self.into());
db.attrs_with_owner(def) db.attrs_with_owner(def)
} }
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
let def = AttrDefId::$def_id(self.into());
db.attrs(def).docs()
}
fn resolve_doc_path( fn resolve_doc_path(
self, self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
@ -82,9 +77,6 @@ macro_rules! impl_has_attrs_enum {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
$enum::$variant(self).attrs(db) $enum::$variant(self).attrs(db)
} }
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
$enum::$variant(self).docs(db)
}
fn resolve_doc_path( fn resolve_doc_path(
self, self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
@ -109,14 +101,6 @@ impl HasAttrs for AssocItem {
} }
} }
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
match self {
AssocItem::Function(it) => it.docs(db),
AssocItem::Const(it) => it.docs(db),
AssocItem::TypeAlias(it) => it.docs(db),
}
}
fn resolve_doc_path( fn resolve_doc_path(
self, self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
@ -136,23 +120,6 @@ impl HasAttrs for ExternCrateDecl {
let def = AttrDefId::ExternCrateId(self.into()); let def = AttrDefId::ExternCrateId(self.into());
db.attrs_with_owner(def) db.attrs_with_owner(def)
} }
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
let crate_docs = self.resolved_crate(db)?.root_module().attrs(db).docs().map(String::from);
let def = AttrDefId::ExternCrateId(self.into());
let decl_docs = db.attrs(def).docs().map(String::from);
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(decl_docs), None) => Some(decl_docs),
(None, Some(crate_docs)) => Some(crate_docs),
(Some(mut decl_docs), Some(crate_docs)) => {
decl_docs.push('\n');
decl_docs.push('\n');
decl_docs += &crate_docs;
Some(decl_docs)
}
}
.map(Documentation::new)
}
fn resolve_doc_path( fn resolve_doc_path(
self, self,
db: &dyn HirDatabase, db: &dyn HirDatabase,

View file

@ -115,7 +115,7 @@ pub use crate::{
pub use { pub use {
cfg::{CfgAtom, CfgExpr, CfgOptions}, cfg::{CfgAtom, CfgExpr, CfgOptions},
hir_def::{ hir_def::{
attr::{builtin::AttributeTemplate, Attrs, AttrsWithOwner, Documentation}, attr::{builtin::AttributeTemplate, AttrSourceMap, Attrs, AttrsWithOwner},
data::adt::StructKind, data::adt::StructKind,
find_path::PrefixKind, find_path::PrefixKind,
import_map, import_map,
@ -130,7 +130,7 @@ pub use {
{AdtId, ModuleDefId}, {AdtId, ModuleDefId},
}, },
hir_expand::{ hir_expand::{
attrs::Attr, attrs::{Attr, AttrId},
name::{known, Name}, name::{known, Name},
ExpandResult, HirFileId, InFile, MacroFile, Origin, ExpandResult, HirFileId, InFile, MacroFile, Origin,
}, },

View file

@ -1,6 +1,6 @@
//! Completion for derives //! Completion for derives
use hir::{HasAttrs, ScopeDef}; use hir::ScopeDef;
use ide_db::SymbolKind; use ide_db::{documentation::HasDocs, SymbolKind};
use itertools::Itertools; use itertools::Itertools;
use syntax::SmolStr; use syntax::SmolStr;

View file

@ -1,5 +1,5 @@
//! Completion for lints //! Completion for lints
use ide_db::{generated::lints::Lint, SymbolKind}; use ide_db::{documentation::Documentation, generated::lints::Lint, SymbolKind};
use syntax::ast; use syntax::ast;
use crate::{context::CompletionContext, item::CompletionItem, Completions}; use crate::{context::CompletionContext, item::CompletionItem, Completions};
@ -55,7 +55,7 @@ pub(super) fn complete_lint(
_ => name.to_owned(), _ => name.to_owned(),
}; };
let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label); let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label);
item.documentation(hir::Documentation::new(description.to_owned())); item.documentation(Documentation::new(description.to_owned()));
item.add_to(acc, ctx.db) item.add_to(acc, ctx.db)
} }
} }

View file

@ -1,7 +1,7 @@
//! Completion for extern crates //! Completion for extern crates
use hir::{HasAttrs, Name}; use hir::Name;
use ide_db::SymbolKind; use ide_db::{documentation::HasDocs, SymbolKind};
use crate::{context::CompletionContext, CompletionItem, CompletionItemKind}; use crate::{context::CompletionContext, CompletionItem, CompletionItemKind};

View file

@ -33,8 +33,8 @@
use hir::{self, HasAttrs}; use hir::{self, HasAttrs};
use ide_db::{ use ide_db::{
path_transform::PathTransform, syntax_helpers::insert_whitespace_into_node, documentation::HasDocs, path_transform::PathTransform,
traits::get_missing_assoc_items, SymbolKind, syntax_helpers::insert_whitespace_into_node, traits::get_missing_assoc_items, SymbolKind,
}; };
use syntax::{ use syntax::{
ast::{self, edit_in_place::AttrsOwnerEdit, HasTypeBounds}, ast::{self, edit_in_place::AttrsOwnerEdit, HasTypeBounds},

View file

@ -2,8 +2,12 @@
mod format_like; mod format_like;
use hir::{Documentation, HasAttrs}; use ide_db::{
use ide_db::{imports::insert_use::ImportScope, ty_filter::TryEnum, SnippetCap}; documentation::{Documentation, HasDocs},
imports::insert_use::ImportScope,
ty_filter::TryEnum,
SnippetCap,
};
use syntax::{ use syntax::{
ast::{self, make, AstNode, AstToken}, ast::{self, make, AstNode, AstToken},
SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR}, SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR},

View file

@ -1,7 +1,6 @@
//! This file provides snippet completions, like `pd` => `eprintln!(...)`. //! This file provides snippet completions, like `pd` => `eprintln!(...)`.
use hir::Documentation; use ide_db::{documentation::Documentation, imports::insert_use::ImportScope, SnippetCap};
use ide_db::{imports::insert_use::ImportScope, SnippetCap};
use crate::{ use crate::{
context::{ExprCtx, ItemListKind, PathCompletionCtx, Qualified}, context::{ExprCtx, ItemListKind, PathCompletionCtx, Qualified},

View file

@ -2,8 +2,11 @@
use std::fmt; use std::fmt;
use hir::{Documentation, Mutability}; use hir::Mutability;
use ide_db::{imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind}; use ide_db::{
documentation::Documentation, imports::import_assets::LocatedImport, RootDatabase, SnippetCap,
SymbolKind,
};
use itertools::Itertools; use itertools::Itertools;
use smallvec::SmallVec; use smallvec::SmallVec;
use stdx::{impl_from, never}; use stdx::{impl_from, never};

View file

@ -12,7 +12,10 @@ pub(crate) mod literal;
use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef}; use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef};
use ide_db::{ use ide_db::{
helpers::item_name, imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind, documentation::{Documentation, HasDocs},
helpers::item_name,
imports::import_assets::LocatedImport,
RootDatabase, SnippetCap, SymbolKind,
}; };
use syntax::{AstNode, SmolStr, SyntaxKind, TextRange}; use syntax::{AstNode, SmolStr, SyntaxKind, TextRange};
@ -114,7 +117,7 @@ impl<'a> RenderContext<'a> {
} }
// FIXME: remove this // FIXME: remove this
fn docs(&self, def: impl HasAttrs) -> Option<hir::Documentation> { fn docs(&self, def: impl HasDocs) -> Option<Documentation> {
def.docs(self.db()) def.docs(self.db())
} }
} }
@ -409,7 +412,7 @@ fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind {
} }
} }
fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<hir::Documentation> { fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<Documentation> {
use hir::ModuleDef::*; use hir::ModuleDef::*;
match resolution { match resolution {
ScopeDef::ModuleDef(Module(it)) => it.docs(db), ScopeDef::ModuleDef(Module(it)) => it.docs(db),

View file

@ -1,7 +1,10 @@
//! Renderer for `enum` variants. //! Renderer for `enum` variants.
use hir::{db::HirDatabase, Documentation, HasAttrs, StructKind}; use hir::{db::HirDatabase, StructKind};
use ide_db::SymbolKind; use ide_db::{
documentation::{Documentation, HasDocs},
SymbolKind,
};
use crate::{ use crate::{
context::{CompletionContext, PathCompletionCtx, PathKind}, context::{CompletionContext, PathCompletionCtx, PathKind},

View file

@ -1,7 +1,7 @@
//! Renderer for macro invocations. //! Renderer for macro invocations.
use hir::{Documentation, HirDisplay}; use hir::HirDisplay;
use ide_db::SymbolKind; use ide_db::{documentation::Documentation, SymbolKind};
use syntax::SmolStr; use syntax::SmolStr;
use crate::{ use crate::{

View file

@ -1,7 +1,7 @@
//! Renderer for patterns. //! Renderer for patterns.
use hir::{db::HirDatabase, HasAttrs, Name, StructKind}; use hir::{db::HirDatabase, Name, StructKind};
use ide_db::SnippetCap; use ide_db::{documentation::HasDocs, SnippetCap};
use itertools::Itertools; use itertools::Itertools;
use syntax::SmolStr; use syntax::SmolStr;
@ -103,7 +103,7 @@ fn build_completion(
label: SmolStr, label: SmolStr,
lookup: SmolStr, lookup: SmolStr,
pat: String, pat: String,
def: impl HasAttrs + Copy, def: impl HasDocs + Copy,
adt_ty: hir::Type, adt_ty: hir::Type,
// Missing in context of match statement completions // Missing in context of match statement completions
is_variant_missing: bool, is_variant_missing: bool,

View file

@ -0,0 +1,250 @@
use either::Either;
use hir::{
db::{DefDatabase, HirDatabase},
AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile,
};
use itertools::Itertools;
use syntax::{
ast::{self, IsString},
AstToken,
};
use text_edit::{TextRange, TextSize};
/// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Documentation(String);
impl Documentation {
pub fn new(s: String) -> Self {
Documentation(s)
}
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<Documentation> for String {
fn from(Documentation(string): Documentation) -> Self {
string
}
}
pub trait HasDocs: HasAttrs {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
}
macro_rules! impl_has_docs {
($($def:ident,)*) => {$(
impl HasDocs for hir::$def {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
docs_from_attrs(&self.attrs(db)).map(Documentation)
}
}
)*};
}
impl_has_docs![
Field,
Variant,
Static,
Const,
Trait,
TraitAlias,
TypeAlias,
Macro,
Function,
Adt,
Module,
GenericParam,
Impl,
];
macro_rules! impl_has_docs_enum {
($($variant:ident),* for $enum:ident) => {$(
impl HasDocs for hir::$variant {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
hir::$enum::$variant(self).docs(db)
}
}
)*};
}
impl_has_docs_enum![Struct, Union, Enum for Adt];
impl_has_docs_enum![TypeParam, ConstParam, LifetimeParam for GenericParam];
impl HasDocs for hir::AssocItem {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
match self {
hir::AssocItem::Function(it) => it.docs(db),
hir::AssocItem::Const(it) => it.docs(db),
hir::AssocItem::TypeAlias(it) => it.docs(db),
}
}
}
impl HasDocs for hir::ExternCrateDecl {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
let crate_docs =
docs_from_attrs(&self.resolved_crate(db)?.root_module().attrs(db)).map(String::from);
let decl_docs = docs_from_attrs(&self.attrs(db)).map(String::from);
match (decl_docs, crate_docs) {
(None, None) => None,
(Some(decl_docs), None) => Some(decl_docs),
(None, Some(crate_docs)) => Some(crate_docs),
(Some(mut decl_docs), Some(crate_docs)) => {
decl_docs.push('\n');
decl_docs.push('\n');
decl_docs += &crate_docs;
Some(decl_docs)
}
}
.map(Documentation::new)
}
}
/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
#[derive(Debug)]
pub struct DocsRangeMap {
source_map: AttrSourceMap,
// (docstring-line-range, attr_index, attr-string-range)
// a mapping from the text range of a line of the [`Documentation`] to the attribute index and
// the original (untrimmed) syntax doc line
mapping: Vec<(TextRange, AttrId, TextRange)>,
}
impl DocsRangeMap {
/// Maps a [`TextRange`] relative to the documentation string back to its AST range
pub fn map(&self, range: TextRange) -> Option<InFile<TextRange>> {
let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
if !line_docs_range.contains_range(range) {
return None;
}
let relative_range = range - line_docs_range.start();
let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
match source {
Either::Left(attr) => {
let string = get_doc_string_in_attr(attr)?;
let text_range = string.open_quote_text_range()?;
let range = TextRange::at(
text_range.end() + original_line_src_range.start() + relative_range.start(),
string.syntax().text_range().len().min(range.len()),
);
Some(InFile { file_id, value: range })
}
Either::Right(comment) => {
let text_range = comment.syntax().text_range();
let range = TextRange::at(
text_range.start()
+ TextSize::try_from(comment.prefix().len()).ok()?
+ original_line_src_range.start()
+ relative_range.start(),
text_range.len().min(range.len()),
);
Some(InFile { file_id, value: range })
}
}
}
}
fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
match it.expr() {
// #[doc = lit]
Some(ast::Expr::Literal(lit)) => match lit.kind() {
ast::LiteralKind::String(it) => Some(it),
_ => None,
},
// #[cfg_attr(..., doc = "", ...)]
None => {
// FIXME: See highlight injection for what to do here
None
}
_ => None,
}
}
pub fn docs_with_rangemap(
db: &dyn DefDatabase,
attrs: &AttrsWithOwner,
) -> Option<(Documentation, DocsRangeMap)> {
let docs =
attrs.by_key("doc").attrs().filter_map(|attr| attr.string_value().map(|s| (s, attr.id)));
let indent = doc_indent(attrs);
let mut buf = String::new();
let mut mapping = Vec::new();
for (doc, idx) in docs {
if !doc.is_empty() {
let mut base_offset = 0;
for raw_line in doc.split('\n') {
let line = raw_line.trim_end();
let line_len = line.len();
let (offset, line) = match line.char_indices().nth(indent) {
Some((offset, _)) => (offset, &line[offset..]),
None => (0, line),
};
let buf_offset = buf.len();
buf.push_str(line);
mapping.push((
TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
idx,
TextRange::at(
(base_offset + offset).try_into().ok()?,
line_len.try_into().ok()?,
),
));
buf.push('\n');
base_offset += raw_line.len() + 1;
}
} else {
buf.push('\n');
}
}
buf.pop();
if buf.is_empty() {
None
} else {
Some((Documentation(buf), DocsRangeMap { mapping, source_map: attrs.source_map(db) }))
}
}
pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option<String> {
let docs = attrs.by_key("doc").attrs().filter_map(|attr| attr.string_value());
let indent = doc_indent(attrs);
let mut buf = String::new();
for doc in docs {
// str::lines doesn't yield anything for the empty string
if !doc.is_empty() {
buf.extend(Itertools::intersperse(
doc.lines().map(|line| {
line.char_indices()
.nth(indent)
.map_or(line, |(offset, _)| &line[offset..])
.trim_end()
}),
"\n",
));
}
buf.push('\n');
}
buf.pop();
if buf.is_empty() {
None
} else {
Some(buf)
}
}
fn doc_indent(attrs: &hir::Attrs) -> usize {
attrs
.by_key("doc")
.attrs()
.filter_map(|attr| attr.string_value())
.flat_map(|s| s.lines())
.filter(|line| !line.chars().all(|c| c.is_whitespace()))
.map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
.min()
.unwrap_or(0)
}

View file

@ -22,6 +22,7 @@ pub mod symbol_index;
pub mod traits; pub mod traits;
pub mod ty_filter; pub mod ty_filter;
pub mod use_trivial_constructor; pub mod use_trivial_constructor;
pub mod documentation;
pub mod imports { pub mod imports {
pub mod import_assets; pub mod import_assets;

View file

@ -16,6 +16,7 @@ use hir::{db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasA
use ide_db::{ use ide_db::{
base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase}, base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase},
defs::{Definition, NameClass, NameRefClass}, defs::{Definition, NameClass, NameRefClass},
documentation::{docs_with_rangemap, Documentation},
helpers::pick_best_token, helpers::pick_best_token,
RootDatabase, RootDatabase,
}; };
@ -171,7 +172,7 @@ pub(crate) fn external_docs(
/// Extracts all links from a given markdown text returning the definition text range, link-text /// Extracts all links from a given markdown text returning the definition text range, link-text
/// and the namespace if known. /// and the namespace if known.
pub(crate) fn extract_definitions_from_docs( pub(crate) fn extract_definitions_from_docs(
docs: &hir::Documentation, docs: &Documentation,
) -> Vec<(TextRange, String, Option<hir::Namespace>)> { ) -> Vec<(TextRange, String, Option<hir::Namespace>)> {
Parser::new_with_broken_link_callback( Parser::new_with_broken_link_callback(
docs.as_str(), docs.as_str(),
@ -297,7 +298,7 @@ impl DocCommentToken {
let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len; let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len;
let (attributes, def) = doc_attributes(sema, &node)?; let (attributes, def) = doc_attributes(sema, &node)?;
let (docs, doc_mapping) = attributes.docs_with_rangemap(sema.db)?; let (docs, doc_mapping) = docs_with_rangemap(sema.db, &attributes)?;
let (in_expansion_range, link, ns) = let (in_expansion_range, link, ns) =
extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| { extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
let mapped = doc_mapping.map(range)?; let mapped = doc_mapping.map(range)?;

View file

@ -1,10 +1,11 @@
use std::ffi::OsStr; use std::ffi::OsStr;
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use hir::{HasAttrs, Semantics}; use hir::Semantics;
use ide_db::{ use ide_db::{
base_db::{FilePosition, FileRange}, base_db::{FilePosition, FileRange},
defs::Definition, defs::Definition,
documentation::{Documentation, HasDocs},
RootDatabase, RootDatabase,
}; };
use itertools::Itertools; use itertools::Itertools;
@ -78,7 +79,7 @@ fn check_doc_links(ra_fixture: &str) {
fn def_under_cursor( fn def_under_cursor(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
position: &FilePosition, position: &FilePosition,
) -> (Definition, hir::Documentation) { ) -> (Definition, Documentation) {
let (docs, def) = sema let (docs, def) = sema
.parse(position.file_id) .parse(position.file_id)
.syntax() .syntax()
@ -96,7 +97,7 @@ fn def_under_cursor(
fn node_to_def( fn node_to_def(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
node: &SyntaxNode, node: &SyntaxNode,
) -> Option<Option<(Option<hir::Documentation>, Definition)>> { ) -> Option<Option<(Option<Documentation>, Definition)>> {
Some(match_ast! { Some(match_ast! {
match node { match node {
ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Module(def))), ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Module(def))),

View file

@ -3,12 +3,13 @@ use std::fmt::Display;
use either::Either; use either::Either;
use hir::{ use hir::{
Adt, AsAssocItem, AttributeTemplate, CaptureKind, HasAttrs, HasSource, HirDisplay, Layout, Adt, AsAssocItem, AttributeTemplate, CaptureKind, HasSource, HirDisplay, Layout, LayoutError,
LayoutError, Semantics, TypeInfo, Semantics, TypeInfo,
}; };
use ide_db::{ use ide_db::{
base_db::SourceDatabase, base_db::SourceDatabase,
defs::Definition, defs::Definition,
documentation::{Documentation, HasDocs},
famous_defs::FamousDefs, famous_defs::FamousDefs,
generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES}, generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
syntax_helpers::insert_whitespace_into_node, syntax_helpers::insert_whitespace_into_node,
@ -616,9 +617,9 @@ fn render_builtin_attr(db: &RootDatabase, attr: hir::BuiltinAttr) -> Option<Mark
markup(Some(docs.replace('*', "\\*")), desc, None) markup(Some(docs.replace('*', "\\*")), desc, None)
} }
fn label_and_docs<D>(db: &RootDatabase, def: D) -> (String, Option<hir::Documentation>) fn label_and_docs<D>(db: &RootDatabase, def: D) -> (String, Option<Documentation>)
where where
D: HasAttrs + HirDisplay, D: HasDocs + HirDisplay,
{ {
let label = def.display(db).to_string(); let label = def.display(db).to_string();
let docs = def.docs(db); let docs = def.docs(db);
@ -631,9 +632,9 @@ fn label_and_layout_info_and_docs<D, E, E2>(
config: &HoverConfig, config: &HoverConfig,
layout_extractor: E, layout_extractor: E,
layout_offset_extractor: E2, layout_offset_extractor: E2,
) -> (String, Option<hir::Documentation>) ) -> (String, Option<Documentation>)
where where
D: HasAttrs + HirDisplay, D: HasDocs + HirDisplay,
E: Fn(&D) -> Result<Layout, LayoutError>, E: Fn(&D) -> Result<Layout, LayoutError>,
E2: Fn(&Layout) -> Option<u64>, E2: Fn(&Layout) -> Option<u64>,
{ {
@ -657,9 +658,9 @@ fn label_value_and_layout_info_and_docs<D, E, E2, E3, V>(
value_extractor: E, value_extractor: E,
layout_extractor: E2, layout_extractor: E2,
layout_tag_extractor: E3, layout_tag_extractor: E3,
) -> (String, Option<hir::Documentation>) ) -> (String, Option<Documentation>)
where where
D: HasAttrs + HirDisplay, D: HasDocs + HirDisplay,
E: Fn(&D) -> Option<V>, E: Fn(&D) -> Option<V>,
E2: Fn(&D) -> Result<Layout, LayoutError>, E2: Fn(&D) -> Result<Layout, LayoutError>,
E3: Fn(&Layout) -> Option<usize>, E3: Fn(&Layout) -> Option<usize>,
@ -686,9 +687,9 @@ fn label_value_and_docs<D, E, V>(
db: &RootDatabase, db: &RootDatabase,
def: D, def: D,
value_extractor: E, value_extractor: E,
) -> (String, Option<hir::Documentation>) ) -> (String, Option<Documentation>)
where where
D: HasAttrs + HirDisplay, D: HasDocs + HirDisplay,
E: Fn(&D) -> Option<V>, E: Fn(&D) -> Option<V>,
V: Display, V: Display,
{ {

View file

@ -111,7 +111,7 @@ pub use crate::{
HighlightConfig, HlRange, HighlightConfig, HlRange,
}, },
}; };
pub use hir::{Documentation, Semantics}; pub use hir::Semantics;
pub use ide_assists::{ pub use ide_assists::{
Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve, Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve,
}; };
@ -124,6 +124,7 @@ pub use ide_db::{
Cancelled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, Cancelled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange,
SourceRoot, SourceRootId, SourceRoot, SourceRootId,
}, },
documentation::Documentation,
label::Label, label::Label,
line_index::{LineCol, LineIndex}, line_index::{LineCol, LineIndex},
search::{ReferenceCategory, SearchScope}, search::{ReferenceCategory, SearchScope},

View file

@ -4,14 +4,15 @@ use std::fmt;
use either::Either; use either::Either;
use hir::{ use hir::{
symbols::FileSymbol, AssocItem, Documentation, FieldSource, HasAttrs, HasContainer, HasSource, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource, HirDisplay, HirFileId,
HirDisplay, HirFileId, InFile, LocalSource, ModuleSource, InFile, LocalSource, ModuleSource,
}; };
use ide_db::{ use ide_db::{
base_db::{FileId, FileRange}, base_db::{FileId, FileRange},
SymbolKind, defs::Definition,
documentation::{Documentation, HasDocs},
RootDatabase, SymbolKind,
}; };
use ide_db::{defs::Definition, RootDatabase};
use stdx::never; use stdx::never;
use syntax::{ use syntax::{
ast::{self, HasName}, ast::{self, HasName},
@ -327,7 +328,7 @@ impl ToNavFromAst for hir::TraitAlias {
impl<D> TryToNav for D impl<D> TryToNav for D
where where
D: HasSource + ToNavFromAst + Copy + HasAttrs + HirDisplay, D: HasSource + ToNavFromAst + Copy + HasDocs + HirDisplay,
D::Ast: ast::HasName, D::Ast: ast::HasName,
{ {
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> { fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {

View file

@ -7,6 +7,7 @@ use ide_assists::utils::test_related_attribute;
use ide_db::{ use ide_db::{
base_db::{FilePosition, FileRange}, base_db::{FilePosition, FileRange},
defs::Definition, defs::Definition,
documentation::docs_from_attrs,
helpers::visit_file_defs, helpers::visit_file_defs,
search::SearchScope, search::SearchScope,
FxHashMap, FxHashSet, RootDatabase, SymbolKind, FxHashMap, FxHashSet, RootDatabase, SymbolKind,
@ -496,7 +497,7 @@ const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] =
&["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"]; &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"];
fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool { fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool {
attrs.docs().map_or(false, |doc| { docs_from_attrs(attrs).map_or(false, |doc| {
let mut in_code_block = false; let mut in_code_block = false;
for line in String::from(doc).lines() { for line in String::from(doc).lines() {

View file

@ -4,12 +4,11 @@
use std::collections::BTreeSet; use std::collections::BTreeSet;
use either::Either; use either::Either;
use hir::{ use hir::{AssocItem, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, Trait};
AssocItem, GenericParam, HasAttrs, HirDisplay, ModuleDef, PathResolution, Semantics, Trait,
};
use ide_db::{ use ide_db::{
active_parameter::{callable_for_node, generic_def_for_node}, active_parameter::{callable_for_node, generic_def_for_node},
base_db::FilePosition, base_db::FilePosition,
documentation::HasDocs,
FxIndexMap, FxIndexMap,
}; };
use stdx::format_to; use stdx::format_to;

View file

@ -5,8 +5,8 @@ use std::mem;
use either::Either; use either::Either;
use hir::{InFile, Semantics}; use hir::{InFile, Semantics};
use ide_db::{ use ide_db::{
active_parameter::ActiveParameter, base_db::FileId, defs::Definition, rust_doc::is_rust_fence, active_parameter::ActiveParameter, base_db::FileId, defs::Definition,
SymbolKind, documentation::docs_with_rangemap, rust_doc::is_rust_fence, SymbolKind,
}; };
use syntax::{ use syntax::{
ast::{self, AstNode, IsString, QuoteOffsets}, ast::{self, AstNode, IsString, QuoteOffsets},
@ -118,7 +118,7 @@ pub(super) fn doc_comment(
let src_file_id = src_file_id.into(); let src_file_id = src_file_id.into();
// Extract intra-doc links and emit highlights for them. // Extract intra-doc links and emit highlights for them.
if let Some((docs, doc_mapping)) = attributes.docs_with_rangemap(sema.db) { if let Some((docs, doc_mapping)) = docs_with_rangemap(sema.db, &attributes) {
extract_definitions_from_docs(&docs) extract_definitions_from_docs(&docs)
.into_iter() .into_iter()
.filter_map(|(range, link, ns)| { .filter_map(|(range, link, ns)| {

View file

@ -12,7 +12,10 @@ use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase};
use load_cargo::SourceRootConfig; use load_cargo::SourceRootConfig;
use lsp_types::{SemanticTokens, Url}; use lsp_types::{SemanticTokens, Url};
use nohash_hasher::IntMap; use nohash_hasher::IntMap;
use parking_lot::{Mutex, RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}; use parking_lot::{
MappedRwLockReadGuard, Mutex, RwLock, RwLockReadGuard, RwLockUpgradableReadGuard,
RwLockWriteGuard,
};
use proc_macro_api::ProcMacroServer; use proc_macro_api::ProcMacroServer;
use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts}; use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -438,12 +441,16 @@ impl Drop for GlobalState {
} }
impl GlobalStateSnapshot { impl GlobalStateSnapshot {
fn vfs_read(&self) -> MappedRwLockReadGuard<'_, vfs::Vfs> {
RwLockReadGuard::map(self.vfs.read(), |(it, _)| it)
}
pub(crate) fn url_to_file_id(&self, url: &Url) -> anyhow::Result<FileId> { pub(crate) fn url_to_file_id(&self, url: &Url) -> anyhow::Result<FileId> {
url_to_file_id(&self.vfs.read().0, url) url_to_file_id(&self.vfs_read(), url)
} }
pub(crate) fn file_id_to_url(&self, id: FileId) -> Url { pub(crate) fn file_id_to_url(&self, id: FileId) -> Url {
file_id_to_url(&self.vfs.read().0, id) file_id_to_url(&self.vfs_read(), id)
} }
pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> { pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
@ -459,7 +466,7 @@ impl GlobalStateSnapshot {
} }
pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Url { pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Url {
let mut base = self.vfs.read().0.file_path(path.anchor); let mut base = self.vfs_read().file_path(path.anchor);
base.pop(); base.pop();
let path = base.join(&path.path).unwrap(); let path = base.join(&path.path).unwrap();
let path = path.as_path().unwrap(); let path = path.as_path().unwrap();
@ -467,7 +474,7 @@ impl GlobalStateSnapshot {
} }
pub(crate) fn file_id_to_file_path(&self, file_id: FileId) -> vfs::VfsPath { pub(crate) fn file_id_to_file_path(&self, file_id: FileId) -> vfs::VfsPath {
self.vfs.read().0.file_path(file_id) self.vfs_read().file_path(file_id)
} }
pub(crate) fn cargo_target_for_crate_root( pub(crate) fn cargo_target_for_crate_root(
@ -475,7 +482,7 @@ impl GlobalStateSnapshot {
crate_id: CrateId, crate_id: CrateId,
) -> Option<(&CargoWorkspace, Target)> { ) -> Option<(&CargoWorkspace, Target)> {
let file_id = self.analysis.crate_root(crate_id).ok()?; let file_id = self.analysis.crate_root(crate_id).ok()?;
let path = self.vfs.read().0.file_path(file_id); let path = self.vfs_read().file_path(file_id);
let path = path.as_path()?; let path = path.as_path()?;
self.workspaces.iter().find_map(|ws| match ws { self.workspaces.iter().find_map(|ws| match ws {
ProjectWorkspace::Cargo { cargo, .. } => { ProjectWorkspace::Cargo { cargo, .. } => {
@ -487,7 +494,7 @@ impl GlobalStateSnapshot {
} }
pub(crate) fn vfs_memory_usage(&self) -> usize { pub(crate) fn vfs_memory_usage(&self) -> usize {
self.vfs.read().0.memory_usage() self.vfs_read().memory_usage()
} }
} }

View file

@ -1,4 +1,5 @@
//! Transforms markdown //! Transforms rust like doc content to markdown, replacing rustdoc fences and removing rustdoc code
//! block comments.
use ide_db::rust_doc::is_rust_fence; use ide_db::rust_doc::is_rust_fence;
const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];