⬆️ rust-analyzer

This commit is contained in:
Laurențiu Nicola 2022-08-30 14:51:24 +03:00
parent 31519bb394
commit 3e358a6827
74 changed files with 2091 additions and 951 deletions

81
Cargo.lock generated
View file

@ -247,20 +247,6 @@ dependencies = [
"cfg-if", "cfg-if",
] ]
[[package]]
name = "crossbeam"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c"
dependencies = [
"cfg-if",
"crossbeam-channel",
"crossbeam-deque",
"crossbeam-epoch",
"crossbeam-queue",
"crossbeam-utils",
]
[[package]] [[package]]
name = "crossbeam-channel" name = "crossbeam-channel"
version = "0.5.6" version = "0.5.6"
@ -296,16 +282,6 @@ dependencies = [
"scopeguard", "scopeguard",
] ]
[[package]]
name = "crossbeam-queue"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cd42583b04998a5363558e5f9291ee5a5ff6b49944332103f251e7479a82aa7"
dependencies = [
"cfg-if",
"crossbeam-utils",
]
[[package]] [[package]]
name = "crossbeam-utils" name = "crossbeam-utils"
version = "0.8.11" version = "0.8.11"
@ -728,6 +704,7 @@ dependencies = [
"ide-db", "ide-db",
"itertools", "itertools",
"parser", "parser",
"stdx",
"syntax", "syntax",
"test-utils", "test-utils",
"text-edit", "text-edit",
@ -895,9 +872,9 @@ dependencies = [
[[package]] [[package]]
name = "lsp-types" name = "lsp-types"
version = "0.93.0" version = "0.93.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70c74e2173b2b31f8655d33724b4b45ac13f439386f66290f539c22b144c2212" checksum = "a3bcfee315dde785ba887edb540b08765fd7df75a7d948844be6bf5712246734"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"serde", "serde",
@ -1178,7 +1155,6 @@ dependencies = [
name = "proc-macro-srv" name = "proc-macro-srv"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"crossbeam",
"expect-test", "expect-test",
"libloading", "libloading",
"mbe", "mbe",
@ -1254,6 +1230,26 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "protobuf"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ee4a7d8b91800c8f167a6268d1a1026607368e1adc84e98fe044aeb905302f7"
dependencies = [
"once_cell",
"protobuf-support",
"thiserror",
]
[[package]]
name = "protobuf-support"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ca157fe12fc7ee2e315f2f735e27df41b3d97cdd70ea112824dac1ffb08ee1c"
dependencies = [
"thiserror",
]
[[package]] [[package]]
name = "pulldown-cmark" name = "pulldown-cmark"
version = "0.9.2" version = "0.9.2"
@ -1385,6 +1381,7 @@ dependencies = [
"project-model", "project-model",
"rayon", "rayon",
"rustc-hash", "rustc-hash",
"scip",
"serde", "serde",
"serde_json", "serde_json",
"sourcegen", "sourcegen",
@ -1471,6 +1468,15 @@ dependencies = [
"winapi-util", "winapi-util",
] ]
[[package]]
name = "scip"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2bfbb10286f69fad7c78db71004b7839bf957788359fe0c479f029f9849136b"
dependencies = [
"protobuf",
]
[[package]] [[package]]
name = "scoped-tls" name = "scoped-tls"
version = "1.0.0" version = "1.0.0"
@ -1656,6 +1662,26 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a" checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
[[package]]
name = "thiserror"
version = "1.0.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "thread_local" name = "thread_local"
version = "1.1.4" version = "1.1.4"
@ -1896,6 +1922,7 @@ dependencies = [
"indexmap", "indexmap",
"paths", "paths",
"rustc-hash", "rustc-hash",
"stdx",
] ]
[[package]] [[package]]

View file

@ -9,10 +9,11 @@
use std::{fmt, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc}; use std::{fmt, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc};
use cfg::CfgOptions; use cfg::CfgOptions;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::FxHashMap;
use stdx::hash::{NoHashHashMap, NoHashHashSet};
use syntax::SmolStr; use syntax::SmolStr;
use tt::Subtree; use tt::Subtree;
use vfs::{file_set::FileSet, FileId, VfsPath}; use vfs::{file_set::FileSet, AnchoredPath, FileId, VfsPath};
/// Files are grouped into source roots. A source root is a directory on the /// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a /// file systems which is watched for changes. Typically it corresponds to a
@ -31,22 +32,30 @@ pub struct SourceRoot {
/// Libraries are considered mostly immutable, this assumption is used to /// Libraries are considered mostly immutable, this assumption is used to
/// optimize salsa's query structure /// optimize salsa's query structure
pub is_library: bool, pub is_library: bool,
pub(crate) file_set: FileSet, file_set: FileSet,
} }
impl SourceRoot { impl SourceRoot {
pub fn new_local(file_set: FileSet) -> SourceRoot { pub fn new_local(file_set: FileSet) -> SourceRoot {
SourceRoot { is_library: false, file_set } SourceRoot { is_library: false, file_set }
} }
pub fn new_library(file_set: FileSet) -> SourceRoot { pub fn new_library(file_set: FileSet) -> SourceRoot {
SourceRoot { is_library: true, file_set } SourceRoot { is_library: true, file_set }
} }
pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> { pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> {
self.file_set.path_for_file(file) self.file_set.path_for_file(file)
} }
pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> { pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> {
self.file_set.file_for_path(path) self.file_set.file_for_path(path)
} }
pub fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
self.file_set.resolve_path(path)
}
pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ { pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
self.file_set.iter() self.file_set.iter()
} }
@ -72,12 +81,19 @@ impl SourceRoot {
/// <https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#serialization> /// <https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#serialization>
#[derive(Debug, Clone, Default /* Serialize, Deserialize */)] #[derive(Debug, Clone, Default /* Serialize, Deserialize */)]
pub struct CrateGraph { pub struct CrateGraph {
arena: FxHashMap<CrateId, CrateData>, arena: NoHashHashMap<CrateId, CrateData>,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct CrateId(pub u32); pub struct CrateId(pub u32);
impl stdx::hash::NoHashHashable for CrateId {}
impl std::hash::Hash for CrateId {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CrateName(SmolStr); pub struct CrateName(SmolStr);
@ -342,7 +358,7 @@ impl CrateGraph {
// Check if adding a dep from `from` to `to` creates a cycle. To figure // Check if adding a dep from `from` to `to` creates a cycle. To figure
// that out, look for a path in the *opposite* direction, from `to` to // that out, look for a path in the *opposite* direction, from `to` to
// `from`. // `from`.
if let Some(path) = self.find_path(&mut FxHashSet::default(), dep.crate_id, from) { if let Some(path) = self.find_path(&mut NoHashHashSet::default(), dep.crate_id, from) {
let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect(); let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect();
let err = CyclicDependenciesError { path }; let err = CyclicDependenciesError { path };
assert!(err.from().0 == from && err.to().0 == dep.crate_id); assert!(err.from().0 == from && err.to().0 == dep.crate_id);
@ -365,7 +381,7 @@ impl CrateGraph {
/// including the crate itself. /// including the crate itself.
pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> { pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
let mut worklist = vec![of]; let mut worklist = vec![of];
let mut deps = FxHashSet::default(); let mut deps = NoHashHashSet::default();
while let Some(krate) = worklist.pop() { while let Some(krate) = worklist.pop() {
if !deps.insert(krate) { if !deps.insert(krate) {
@ -382,10 +398,10 @@ impl CrateGraph {
/// including the crate itself. /// including the crate itself.
pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> { pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
let mut worklist = vec![of]; let mut worklist = vec![of];
let mut rev_deps = FxHashSet::default(); let mut rev_deps = NoHashHashSet::default();
rev_deps.insert(of); rev_deps.insert(of);
let mut inverted_graph = FxHashMap::<_, Vec<_>>::default(); let mut inverted_graph = NoHashHashMap::<_, Vec<_>>::default();
self.arena.iter().for_each(|(&krate, data)| { self.arena.iter().for_each(|(&krate, data)| {
data.dependencies data.dependencies
.iter() .iter()
@ -409,7 +425,7 @@ impl CrateGraph {
/// come before the crate itself). /// come before the crate itself).
pub fn crates_in_topological_order(&self) -> Vec<CrateId> { pub fn crates_in_topological_order(&self) -> Vec<CrateId> {
let mut res = Vec::new(); let mut res = Vec::new();
let mut visited = FxHashSet::default(); let mut visited = NoHashHashSet::default();
for krate in self.arena.keys().copied() { for krate in self.arena.keys().copied() {
go(self, &mut visited, &mut res, krate); go(self, &mut visited, &mut res, krate);
@ -419,7 +435,7 @@ impl CrateGraph {
fn go( fn go(
graph: &CrateGraph, graph: &CrateGraph,
visited: &mut FxHashSet<CrateId>, visited: &mut NoHashHashSet<CrateId>,
res: &mut Vec<CrateId>, res: &mut Vec<CrateId>,
source: CrateId, source: CrateId,
) { ) {
@ -459,7 +475,7 @@ impl CrateGraph {
fn find_path( fn find_path(
&self, &self,
visited: &mut FxHashSet<CrateId>, visited: &mut NoHashHashSet<CrateId>,
from: CrateId, from: CrateId,
to: CrateId, to: CrateId,
) -> Option<Vec<CrateId>> { ) -> Option<Vec<CrateId>> {

View file

@ -8,7 +8,7 @@ pub mod fixture;
use std::{panic, sync::Arc}; use std::{panic, sync::Arc};
use rustc_hash::FxHashSet; use stdx::hash::NoHashHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize}; use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
pub use crate::{ pub use crate::{
@ -58,7 +58,7 @@ pub trait FileLoader {
/// Text of the file. /// Text of the file.
fn file_text(&self, file_id: FileId) -> Arc<String>; fn file_text(&self, file_id: FileId) -> Arc<String>;
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>; fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>; fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>>;
} }
/// Database which stores all significant input facts: source code and project /// Database which stores all significant input facts: source code and project
@ -94,10 +94,10 @@ pub trait SourceDatabaseExt: SourceDatabase {
#[salsa::input] #[salsa::input]
fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>; fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>; fn source_root_crates(&self, id: SourceRootId) -> Arc<NoHashHashSet<CrateId>>;
} }
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHashSet<CrateId>> { fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<NoHashHashSet<CrateId>> {
let graph = db.crate_graph(); let graph = db.crate_graph();
let res = graph let res = graph
.iter() .iter()
@ -120,10 +120,10 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
// FIXME: this *somehow* should be platform agnostic... // FIXME: this *somehow* should be platform agnostic...
let source_root = self.0.file_source_root(path.anchor); let source_root = self.0.file_source_root(path.anchor);
let source_root = self.0.source_root(source_root); let source_root = self.0.source_root(source_root);
source_root.file_set.resolve_path(path) source_root.resolve_path(path)
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
let _p = profile::span("relevant_crates"); let _p = profile::span("relevant_crates");
let source_root = self.0.file_source_root(file_id); let source_root = self.0.file_source_root(file_id);
self.0.source_root_crates(source_root) self.0.source_root_crates(source_root)

View file

@ -125,6 +125,7 @@ pub enum Progress {
DidCheckCrate(String), DidCheckCrate(String),
DidFinish(io::Result<()>), DidFinish(io::Result<()>),
DidCancel, DidCancel,
DidFailToRestart(String),
} }
enum Restart { enum Restart {
@ -193,10 +194,11 @@ impl FlycheckActor {
self.progress(Progress::DidStart); self.progress(Progress::DidStart);
} }
Err(error) => { Err(error) => {
tracing::error!( self.progress(Progress::DidFailToRestart(format!(
command = ?self.check_command(), "Failed to run the following command: {:?} error={}",
%error, "failed to restart flycheck" self.check_command(),
); error
)));
} }
} }
} }

View file

@ -2,7 +2,7 @@
use std::sync::Arc; use std::sync::Arc;
use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, MacroCallId, MacroDefKind}; use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroDefKind};
use smallvec::SmallVec; use smallvec::SmallVec;
use syntax::ast; use syntax::ast;
@ -12,7 +12,10 @@ use crate::{
db::DefDatabase, db::DefDatabase,
intern::Interned, intern::Interned,
item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId}, item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId},
nameres::{attr_resolution::ResolvedAttr, proc_macro::ProcMacroKind, DefMap}, nameres::{
attr_resolution::ResolvedAttr, diagnostics::DefDiagnostic, proc_macro::ProcMacroKind,
DefMap,
},
type_ref::{TraitRef, TypeBound, TypeRef}, type_ref::{TraitRef, TypeBound, TypeRef},
visibility::RawVisibility, visibility::RawVisibility,
AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId, AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
@ -210,6 +213,13 @@ pub struct TraitData {
impl TraitData { impl TraitData {
pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitData> { pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitData> {
db.trait_data_with_diagnostics(tr).0
}
pub(crate) fn trait_data_with_diagnostics_query(
db: &dyn DefDatabase,
tr: TraitId,
) -> (Arc<TraitData>, Arc<Vec<DefDiagnostic>>) {
let tr_loc @ ItemLoc { container: module_id, id: tree_id } = tr.lookup(db); let tr_loc @ ItemLoc { container: module_id, id: tree_id } = tr.lookup(db);
let item_tree = tree_id.item_tree(db); let item_tree = tree_id.item_tree(db);
let tr_def = &item_tree[tree_id.value]; let tr_def = &item_tree[tree_id.value];
@ -229,17 +239,20 @@ impl TraitData {
let mut collector = let mut collector =
AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr)); AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr));
collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items); collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items);
let (items, attribute_calls) = collector.finish(); let (items, attribute_calls, diagnostics) = collector.finish();
Arc::new(TraitData { (
name, Arc::new(TraitData {
attribute_calls, name,
items, attribute_calls,
is_auto, items,
is_unsafe, is_auto,
visibility, is_unsafe,
skip_array_during_method_dispatch, visibility,
}) skip_array_during_method_dispatch,
}),
Arc::new(diagnostics),
)
} }
pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ { pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
@ -280,7 +293,14 @@ pub struct ImplData {
impl ImplData { impl ImplData {
pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplData> { pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplData> {
let _p = profile::span("impl_data_query"); db.impl_data_with_diagnostics(id).0
}
pub(crate) fn impl_data_with_diagnostics_query(
db: &dyn DefDatabase,
id: ImplId,
) -> (Arc<ImplData>, Arc<Vec<DefDiagnostic>>) {
let _p = profile::span("impl_data_with_diagnostics_query");
let ItemLoc { container: module_id, id: tree_id } = id.lookup(db); let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
let item_tree = tree_id.item_tree(db); let item_tree = tree_id.item_tree(db);
@ -293,10 +313,13 @@ impl ImplData {
AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id)); AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id));
collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items); collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items);
let (items, attribute_calls) = collector.finish(); let (items, attribute_calls, diagnostics) = collector.finish();
let items = items.into_iter().map(|(_, item)| item).collect(); let items = items.into_iter().map(|(_, item)| item).collect();
Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls }) (
Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls }),
Arc::new(diagnostics),
)
} }
pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ { pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
@ -437,6 +460,7 @@ struct AssocItemCollector<'a> {
db: &'a dyn DefDatabase, db: &'a dyn DefDatabase,
module_id: ModuleId, module_id: ModuleId,
def_map: Arc<DefMap>, def_map: Arc<DefMap>,
inactive_diagnostics: Vec<DefDiagnostic>,
container: ItemContainerId, container: ItemContainerId,
expander: Expander, expander: Expander,
@ -459,15 +483,21 @@ impl<'a> AssocItemCollector<'a> {
expander: Expander::new(db, file_id, module_id), expander: Expander::new(db, file_id, module_id),
items: Vec::new(), items: Vec::new(),
attr_calls: Vec::new(), attr_calls: Vec::new(),
inactive_diagnostics: Vec::new(),
} }
} }
fn finish( fn finish(
self, self,
) -> (Vec<(Name, AssocItemId)>, Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>) { ) -> (
Vec<(Name, AssocItemId)>,
Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
Vec<DefDiagnostic>,
) {
( (
self.items, self.items,
if self.attr_calls.is_empty() { None } else { Some(Box::new(self.attr_calls)) }, if self.attr_calls.is_empty() { None } else { Some(Box::new(self.attr_calls)) },
self.inactive_diagnostics,
) )
} }
@ -479,6 +509,12 @@ impl<'a> AssocItemCollector<'a> {
'items: for &item in assoc_items { 'items: for &item in assoc_items {
let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into()); let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into());
if !attrs.is_cfg_enabled(self.expander.cfg_options()) { if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
self.inactive_diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id.local_id,
InFile::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast()),
attrs.cfg().unwrap(),
self.expander.cfg_options().clone(),
));
continue; continue;
} }

View file

@ -20,7 +20,7 @@ use crate::{
intern::Interned, intern::Interned,
item_tree::{AttrOwner, ItemTree}, item_tree::{AttrOwner, ItemTree},
lang_item::{LangItemTarget, LangItems}, lang_item::{LangItemTarget, LangItems},
nameres::DefMap, nameres::{diagnostics::DefDiagnostic, DefMap},
visibility::{self, Visibility}, visibility::{self, Visibility},
AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId,
ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalEnumVariantId, ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalEnumVariantId,
@ -106,9 +106,16 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
#[salsa::invoke(ImplData::impl_data_query)] #[salsa::invoke(ImplData::impl_data_query)]
fn impl_data(&self, e: ImplId) -> Arc<ImplData>; fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
#[salsa::invoke(ImplData::impl_data_with_diagnostics_query)]
fn impl_data_with_diagnostics(&self, e: ImplId) -> (Arc<ImplData>, Arc<Vec<DefDiagnostic>>);
#[salsa::invoke(TraitData::trait_data_query)] #[salsa::invoke(TraitData::trait_data_query)]
fn trait_data(&self, e: TraitId) -> Arc<TraitData>; fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
#[salsa::invoke(TraitData::trait_data_with_diagnostics_query)]
fn trait_data_with_diagnostics(&self, tr: TraitId)
-> (Arc<TraitData>, Arc<Vec<DefDiagnostic>>);
#[salsa::invoke(TypeAliasData::type_alias_data_query)] #[salsa::invoke(TypeAliasData::type_alias_data_query)]
fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>; fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;

View file

@ -73,7 +73,7 @@ impl DefDiagnostic {
Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } } Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
} }
pub(super) fn unconfigured_code( pub fn unconfigured_code(
container: LocalModuleId, container: LocalModuleId,
ast: AstId<ast::Item>, ast: AstId<ast::Item>,
cfg: CfgExpr, cfg: CfgExpr,

View file

@ -10,7 +10,7 @@ use base_db::{
SourceDatabase, Upcast, SourceDatabase, Upcast,
}; };
use hir_expand::{db::AstDatabase, InFile}; use hir_expand::{db::AstDatabase, InFile};
use rustc_hash::FxHashSet; use stdx::hash::NoHashHashSet;
use syntax::{algo, ast, AstNode}; use syntax::{algo, ast, AstNode};
use crate::{ use crate::{
@ -76,7 +76,7 @@ impl FileLoader for TestDB {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path) FileLoaderDelegate(self).resolve_path(path)
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id) FileLoaderDelegate(self).relevant_crates(file_id)
} }
} }

View file

@ -6,7 +6,7 @@
//! //!
//! This usually involves resolving names, collecting generic arguments etc. //! This usually involves resolving names, collecting generic arguments etc.
use std::{ use std::{
cell::{Cell, RefCell}, cell::{Cell, RefCell, RefMut},
iter, iter,
sync::Arc, sync::Arc,
}; };
@ -330,26 +330,26 @@ impl<'a> TyLoweringContext<'a> {
} }
} }
TypeRef::Macro(macro_call) => { TypeRef::Macro(macro_call) => {
let (expander, recursion_start) = { let (mut expander, recursion_start) = {
let mut expander = self.expander.borrow_mut(); match RefMut::filter_map(self.expander.borrow_mut(), Option::as_mut) {
if expander.is_some() { Ok(expander) => (expander, false),
(Some(expander), false) Err(expander) => (
} else { RefMut::map(expander, |it| {
*expander = Some(Expander::new( it.insert(Expander::new(
self.db.upcast(), self.db.upcast(),
macro_call.file_id, macro_call.file_id,
self.resolver.module(), self.resolver.module(),
)); ))
(Some(expander), true) }),
true,
),
} }
}; };
let ty = if let Some(mut expander) = expander { let ty = {
let expander_mut = expander.as_mut().unwrap();
let macro_call = macro_call.to_node(self.db.upcast()); let macro_call = macro_call.to_node(self.db.upcast());
match expander_mut.enter_expand::<ast::Type>(self.db.upcast(), macro_call) { match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call) {
Ok(ExpandResult { value: Some((mark, expanded)), .. }) => { Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
let ctx = let ctx = LowerCtx::new(self.db.upcast(), expander.current_file_id());
LowerCtx::new(self.db.upcast(), expander_mut.current_file_id());
let type_ref = TypeRef::from_ast(&ctx, expanded); let type_ref = TypeRef::from_ast(&ctx, expanded);
drop(expander); drop(expander);
@ -364,8 +364,6 @@ impl<'a> TyLoweringContext<'a> {
} }
_ => None, _ => None,
} }
} else {
None
}; };
if recursion_start { if recursion_start {
*self.expander.borrow_mut() = None; *self.expander.borrow_mut() = None;
@ -479,7 +477,14 @@ impl<'a> TyLoweringContext<'a> {
TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into())) TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
} }
ParamLoweringMode::Variable => { ParamLoweringMode::Variable => {
let idx = generics.param_idx(param_id.into()).expect("matching generics"); let idx = match generics.param_idx(param_id.into()) {
None => {
never!("no matching generics");
return (TyKind::Error.intern(Interner), None);
}
Some(idx) => idx,
};
TyKind::BoundVar(BoundVar::new(self.in_binders, idx)) TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
} }
} }

View file

@ -10,7 +10,7 @@ use base_db::{
}; };
use hir_def::{db::DefDatabase, ModuleId}; use hir_def::{db::DefDatabase, ModuleId};
use hir_expand::db::AstDatabase; use hir_expand::db::AstDatabase;
use rustc_hash::{FxHashMap, FxHashSet}; use stdx::hash::{NoHashHashMap, NoHashHashSet};
use syntax::TextRange; use syntax::TextRange;
use test_utils::extract_annotations; use test_utils::extract_annotations;
@ -80,7 +80,7 @@ impl FileLoader for TestDB {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path) FileLoaderDelegate(self).resolve_path(path)
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id) FileLoaderDelegate(self).relevant_crates(file_id)
} }
} }
@ -102,7 +102,7 @@ impl TestDB {
self.module_for_file_opt(file_id).unwrap() self.module_for_file_opt(file_id).unwrap()
} }
pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> { pub(crate) fn extract_annotations(&self) -> NoHashHashMap<FileId, Vec<(TextRange, String)>> {
let mut files = Vec::new(); let mut files = Vec::new();
let crate_graph = self.crate_graph(); let crate_graph = self.crate_graph();
for krate in crate_graph.iter() { for krate in crate_graph.iter() {

View file

@ -1526,6 +1526,34 @@ unsafe impl Storage for InlineStorage {
); );
} }
#[test]
fn gat_crash_3() {
// FIXME: This test currently crashes rust analyzer in a debug build but not in a
// release build (i.e. for the user). With the assumption that tests will always be run
// in debug mode, we catch the unwind and expect that it panicked. See the
// [`crate::utils::generics`] function for more information.
cov_mark::check!(ignore_gats);
std::panic::catch_unwind(|| {
check_no_mismatches(
r#"
trait Collection {
type Item;
type Member<T>: Collection<Item = T>;
fn add(&mut self, value: Self::Item) -> Result<(), Self::Error>;
}
struct ConstGen<T, const N: usize> {
data: [T; N],
}
impl<T, const N: usize> Collection for ConstGen<T, N> {
type Item = T;
type Member<U> = ConstGen<U, N>;
}
"#,
);
})
.expect_err("must panic");
}
#[test] #[test]
fn cfgd_out_self_param() { fn cfgd_out_self_param() {
cov_mark::check!(cfgd_out_self_param); cov_mark::check!(cfgd_out_self_param);

View file

@ -176,10 +176,19 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def))); let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
if parent_generics.is_some() && matches!(def, GenericDefId::TypeAliasId(_)) { if parent_generics.is_some() && matches!(def, GenericDefId::TypeAliasId(_)) {
let params = db.generic_params(def); let params = db.generic_params(def);
let parent_params = &parent_generics.as_ref().unwrap().params;
let has_consts = let has_consts =
params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_))); params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_)));
return if has_consts { let parent_has_consts =
// XXX: treat const generic associated types as not existing to avoid crashes (#11769) parent_params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_)));
return if has_consts || parent_has_consts {
// XXX: treat const generic associated types as not existing to avoid crashes
// (#11769)
//
// Note: Also crashes when the parent has const generics (also even if the GAT
// doesn't use them), see `tests::regression::gat_crash_3` for an example.
// Avoids that by disabling GATs when the parent (i.e. `impl` block) has
// const generics (#12193).
// //
// Chalk expects the inner associated type's parameters to come // Chalk expects the inner associated type's parameters to come
// *before*, not after the trait's generics as we've always done it. // *before*, not after the trait's generics as we've always done it.
@ -264,12 +273,8 @@ impl Generics {
fn find_param(&self, param: TypeOrConstParamId) -> Option<(usize, &TypeOrConstParamData)> { fn find_param(&self, param: TypeOrConstParamId) -> Option<(usize, &TypeOrConstParamData)> {
if param.parent == self.def { if param.parent == self.def {
let (idx, (_local_id, data)) = self let (idx, (_local_id, data)) =
.params self.params.iter().enumerate().find(|(_, (idx, _))| *idx == param.local_id)?;
.iter()
.enumerate()
.find(|(_, (idx, _))| *idx == param.local_id)
.unwrap();
let parent_len = self.parent_generics().map_or(0, Generics::len); let parent_len = self.parent_generics().map_or(0, Generics::len);
Some((parent_len + idx, data)) Some((parent_len + idx, data))
} else { } else {

View file

@ -511,6 +511,7 @@ impl Module {
.collect() .collect()
} }
/// Fills `acc` with the module's diagnostics.
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) { pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
let _p = profile::span("Module::diagnostics").detail(|| { let _p = profile::span("Module::diagnostics").detail(|| {
format!("{:?}", self.name(db).map_or("<unknown>".into(), |name| name.to_string())) format!("{:?}", self.name(db).map_or("<unknown>".into(), |name| name.to_string()))
@ -531,11 +532,21 @@ impl Module {
m.diagnostics(db, acc) m.diagnostics(db, acc)
} }
} }
ModuleDef::Trait(t) => {
for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
acc.extend(decl.diagnostics(db))
}
_ => acc.extend(decl.diagnostics(db)), _ => acc.extend(decl.diagnostics(db)),
} }
} }
for impl_def in self.impl_defs(db) { for impl_def in self.impl_defs(db) {
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
for item in impl_def.items(db) { for item in impl_def.items(db) {
let def: DefWithBody = match item { let def: DefWithBody = match item {
AssocItem::Function(it) => it.into(), AssocItem::Function(it) => it.into(),

View file

@ -171,6 +171,25 @@ fn collect_used_generics<'gp>(
ast::Type::RefType(ref_) => generics.extend( ast::Type::RefType(ref_) => generics.extend(
ref_.lifetime().and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))), ref_.lifetime().and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
), ),
ast::Type::ArrayType(ar) => {
if let Some(expr) = ar.expr() {
if let ast::Expr::PathExpr(p) = expr {
if let Some(path) = p.path() {
if let Some(name_ref) = path.as_single_name_ref() {
if let Some(param) = known_generics.iter().find(|gp| {
if let ast::GenericParam::ConstParam(cp) = gp {
cp.name().map_or(false, |n| n.text() == name_ref.text())
} else {
false
}
}) {
generics.push(param);
}
}
}
}
}
}
_ => (), _ => (),
}); });
// stable resort to lifetime, type, const // stable resort to lifetime, type, const
@ -357,4 +376,29 @@ impl<'outer, Outer, const OUTER: usize> () {
"#, "#,
); );
} }
#[test]
fn issue_11197() {
check_assist(
extract_type_alias,
r#"
struct Foo<T, const N: usize>
where
[T; N]: Sized,
{
arr: $0[T; N]$0,
}
"#,
r#"
type $0Type<T, const N: usize> = [T; N];
struct Foo<T, const N: usize>
where
[T; N]: Sized,
{
arr: Type<T, N>,
}
"#,
);
}
} }

View file

@ -311,12 +311,16 @@ fn inline(
} else { } else {
fn_body.clone_for_update() fn_body.clone_for_update()
}; };
if let Some(t) = body.syntax().ancestors().find_map(ast::Impl::cast).and_then(|i| i.self_ty()) { if let Some(imp) = body.syntax().ancestors().find_map(ast::Impl::cast) {
body.syntax() if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) {
.descendants_with_tokens() if let Some(t) = imp.self_ty() {
.filter_map(NodeOrToken::into_token) body.syntax()
.filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW) .descendants_with_tokens()
.for_each(|tok| ted::replace(tok, t.syntax())); .filter_map(NodeOrToken::into_token)
.filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
.for_each(|tok| ted::replace(tok, t.syntax()));
}
}
} }
let usages_for_locals = |local| { let usages_for_locals = |local| {
Definition::Local(local) Definition::Local(local)
@ -1221,6 +1225,31 @@ impl A {
fn main() { fn main() {
A(114514); A(114514);
} }
"#,
)
}
#[test]
fn inline_call_with_self_type_but_within_same_impl() {
check_assist(
inline_call,
r#"
struct A(u32);
impl A {
fn f() -> Self { Self(1919810) }
fn main() {
Self::f$0();
}
}
"#,
r#"
struct A(u32);
impl A {
fn f() -> Self { Self(1919810) }
fn main() {
Self(1919810);
}
}
"#, "#,
) )
} }

View file

@ -64,8 +64,11 @@ pub(crate) struct PathCompletionCtx {
pub(super) qualified: Qualified, pub(super) qualified: Qualified,
/// The parent of the path we are completing. /// The parent of the path we are completing.
pub(super) parent: Option<ast::Path>, pub(super) parent: Option<ast::Path>,
#[allow(dead_code)]
/// The path of which we are completing the segment /// The path of which we are completing the segment
pub(super) path: ast::Path, pub(super) path: ast::Path,
/// The path of which we are completing the segment in the original file
pub(crate) original_path: Option<ast::Path>,
pub(super) kind: PathKind, pub(super) kind: PathKind,
/// Whether the path segment has type args or not. /// Whether the path segment has type args or not.
pub(super) has_type_args: bool, pub(super) has_type_args: bool,

View file

@ -588,12 +588,15 @@ impl<'a> CompletionContext<'a> {
}; };
let path = segment.parent_path(); let path = segment.parent_path();
let original_path = find_node_in_file_compensated(sema, original_file, &path);
let mut path_ctx = PathCompletionCtx { let mut path_ctx = PathCompletionCtx {
has_call_parens: false, has_call_parens: false,
has_macro_bang: false, has_macro_bang: false,
qualified: Qualified::No, qualified: Qualified::No,
parent: None, parent: None,
path: path.clone(), path: path.clone(),
original_path,
kind: PathKind::Item { kind: ItemListKind::SourceFile }, kind: PathKind::Item { kind: ItemListKind::SourceFile },
has_type_args: false, has_type_args: false,
use_tree_parent: false, use_tree_parent: false,

View file

@ -323,9 +323,7 @@ fn render_resolution_path(
..CompletionRelevance::default() ..CompletionRelevance::default()
}); });
if let Some(ref_match) = compute_ref_match(completion, &ty) { path_ref_match(completion, path_ctx, &ty, &mut item);
item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
}
}; };
item item
} }
@ -453,6 +451,29 @@ fn compute_ref_match(
None None
} }
fn path_ref_match(
completion: &CompletionContext<'_>,
path_ctx: &PathCompletionCtx,
ty: &hir::Type,
item: &mut Builder,
) {
if let Some(original_path) = &path_ctx.original_path {
// At least one char was typed by the user already, in that case look for the original path
if let Some(original_path) = completion.sema.original_ast_node(original_path.clone()) {
if let Some(ref_match) = compute_ref_match(completion, ty) {
item.ref_match(ref_match, original_path.syntax().text_range().start());
}
}
} else {
// completion requested on an empty identifier, there is no path here yet.
// FIXME: This might create inconsistent completions where we show a ref match in macro inputs
// as long as nothing was typed yet
if let Some(ref_match) = compute_ref_match(completion, ty) {
item.ref_match(ref_match, completion.position.offset);
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::cmp; use std::cmp;

View file

@ -79,18 +79,18 @@ fn render(
..ctx.completion_relevance() ..ctx.completion_relevance()
}); });
if let Some(ref_match) = compute_ref_match(completion, &ret_type) { match func_kind {
match func_kind { FuncKind::Function(path_ctx) => {
FuncKind::Function(path_ctx) => { super::path_ref_match(completion, path_ctx, &ret_type, &mut item);
item.ref_match(ref_match, path_ctx.path.syntax().text_range().start()); }
} FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => {
FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => { if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) {
if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) { if let Some(ref_match) = compute_ref_match(completion, &ret_type) {
item.ref_match(ref_match, original_expr.syntax().text_range().start()); item.ref_match(ref_match, original_expr.syntax().text_range().start());
} }
} }
_ => (),
} }
_ => (),
} }
item.set_documentation(ctx.docs(func)) item.set_documentation(ctx.docs(func))

View file

@ -2,13 +2,12 @@
use hir::{db::HirDatabase, Documentation, HasAttrs, StructKind}; use hir::{db::HirDatabase, Documentation, HasAttrs, StructKind};
use ide_db::SymbolKind; use ide_db::SymbolKind;
use syntax::AstNode;
use crate::{ use crate::{
context::{CompletionContext, PathCompletionCtx, PathKind}, context::{CompletionContext, PathCompletionCtx, PathKind},
item::{Builder, CompletionItem}, item::{Builder, CompletionItem},
render::{ render::{
compute_ref_match, compute_type_match, compute_type_match,
variant::{ variant::{
format_literal_label, format_literal_lookup, render_record_lit, render_tuple_lit, format_literal_label, format_literal_lookup, render_record_lit, render_tuple_lit,
visible_fields, RenderedLiteral, visible_fields, RenderedLiteral,
@ -125,9 +124,8 @@ fn render(
type_match: compute_type_match(ctx.completion, &ty), type_match: compute_type_match(ctx.completion, &ty),
..ctx.completion_relevance() ..ctx.completion_relevance()
}); });
if let Some(ref_match) = compute_ref_match(completion, &ty) {
item.ref_match(ref_match, path_ctx.path.syntax().text_range().start()); super::path_ref_match(completion, path_ctx, &ty, &mut item);
}
if let Some(import_to_add) = ctx.import_to_add { if let Some(import_to_add) = ctx.import_to_add {
item.add_import(import_to_add); item.add_import(import_to_add);

View file

@ -52,6 +52,7 @@ use hir::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
symbols::FileSymbolKind, symbols::FileSymbolKind,
}; };
use stdx::hash::NoHashHashSet;
use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase}; use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase};
pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
@ -118,7 +119,7 @@ impl FileLoader for RootDatabase {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path) FileLoaderDelegate(self).resolve_path(path)
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id) FileLoaderDelegate(self).relevant_crates(file_id)
} }
} }

View file

@ -2,7 +2,7 @@
//! representation. //! representation.
use std::{iter, mem}; use std::{iter, mem};
use rustc_hash::FxHashMap; use stdx::hash::NoHashHashMap;
use syntax::{TextRange, TextSize}; use syntax::{TextRange, TextSize};
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
@ -10,7 +10,7 @@ pub struct LineIndex {
/// Offset the the beginning of each line, zero-based /// Offset the the beginning of each line, zero-based
pub(crate) newlines: Vec<TextSize>, pub(crate) newlines: Vec<TextSize>,
/// List of non-ASCII characters on each line /// List of non-ASCII characters on each line
pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>, pub(crate) utf16_lines: NoHashHashMap<u32, Vec<Utf16Char>>,
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@ -55,7 +55,7 @@ impl Utf16Char {
impl LineIndex { impl LineIndex {
pub fn new(text: &str) -> LineIndex { pub fn new(text: &str) -> LineIndex {
let mut utf16_lines = FxHashMap::default(); let mut utf16_lines = NoHashHashMap::default();
let mut utf16_chars = Vec::new(); let mut utf16_chars = Vec::new();
let mut newlines = vec![0.into()]; let mut newlines = vec![0.into()];

View file

@ -9,7 +9,7 @@ use std::{mem, sync::Arc};
use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility}; use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility};
use once_cell::unsync::Lazy; use once_cell::unsync::Lazy;
use rustc_hash::FxHashMap; use stdx::hash::NoHashHashMap;
use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
use crate::{ use crate::{
@ -20,7 +20,7 @@ use crate::{
#[derive(Debug, Default, Clone)] #[derive(Debug, Default, Clone)]
pub struct UsageSearchResult { pub struct UsageSearchResult {
pub references: FxHashMap<FileId, Vec<FileReference>>, pub references: NoHashHashMap<FileId, Vec<FileReference>>,
} }
impl UsageSearchResult { impl UsageSearchResult {
@ -45,7 +45,7 @@ impl UsageSearchResult {
impl IntoIterator for UsageSearchResult { impl IntoIterator for UsageSearchResult {
type Item = (FileId, Vec<FileReference>); type Item = (FileId, Vec<FileReference>);
type IntoIter = <FxHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter; type IntoIter = <NoHashHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter { fn into_iter(self) -> Self::IntoIter {
self.references.into_iter() self.references.into_iter()
@ -78,17 +78,17 @@ pub enum ReferenceCategory {
/// e.g. for things like local variables. /// e.g. for things like local variables.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct SearchScope { pub struct SearchScope {
entries: FxHashMap<FileId, Option<TextRange>>, entries: NoHashHashMap<FileId, Option<TextRange>>,
} }
impl SearchScope { impl SearchScope {
fn new(entries: FxHashMap<FileId, Option<TextRange>>) -> SearchScope { fn new(entries: NoHashHashMap<FileId, Option<TextRange>>) -> SearchScope {
SearchScope { entries } SearchScope { entries }
} }
/// Build a search scope spanning the entire crate graph of files. /// Build a search scope spanning the entire crate graph of files.
fn crate_graph(db: &RootDatabase) -> SearchScope { fn crate_graph(db: &RootDatabase) -> SearchScope {
let mut entries = FxHashMap::default(); let mut entries = NoHashHashMap::default();
let graph = db.crate_graph(); let graph = db.crate_graph();
for krate in graph.iter() { for krate in graph.iter() {
@ -102,7 +102,7 @@ impl SearchScope {
/// Build a search scope spanning all the reverse dependencies of the given crate. /// Build a search scope spanning all the reverse dependencies of the given crate.
fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope { fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope {
let mut entries = FxHashMap::default(); let mut entries = NoHashHashMap::default();
for rev_dep in of.transitive_reverse_dependencies(db) { for rev_dep in of.transitive_reverse_dependencies(db) {
let root_file = rev_dep.root_file(db); let root_file = rev_dep.root_file(db);
let source_root_id = db.file_source_root(root_file); let source_root_id = db.file_source_root(root_file);
@ -117,14 +117,12 @@ impl SearchScope {
let root_file = of.root_file(db); let root_file = of.root_file(db);
let source_root_id = db.file_source_root(root_file); let source_root_id = db.file_source_root(root_file);
let source_root = db.source_root(source_root_id); let source_root = db.source_root(source_root_id);
SearchScope { SearchScope { entries: source_root.iter().map(|id| (id, None)).collect() }
entries: source_root.iter().map(|id| (id, None)).collect::<FxHashMap<_, _>>(),
}
} }
/// Build a search scope spanning the given module and all its submodules. /// Build a search scope spanning the given module and all its submodules.
fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope { fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
let mut entries = FxHashMap::default(); let mut entries = NoHashHashMap::default();
let (file_id, range) = { let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db); let InFile { file_id, value } = module.definition_source(db);
@ -157,7 +155,7 @@ impl SearchScope {
/// Build an empty search scope. /// Build an empty search scope.
pub fn empty() -> SearchScope { pub fn empty() -> SearchScope {
SearchScope::new(FxHashMap::default()) SearchScope::new(NoHashHashMap::default())
} }
/// Build a empty search scope spanning the given file. /// Build a empty search scope spanning the given file.

View file

@ -6,8 +6,7 @@
use std::{collections::hash_map::Entry, iter, mem}; use std::{collections::hash_map::Entry, iter, mem};
use base_db::{AnchoredPathBuf, FileId}; use base_db::{AnchoredPathBuf, FileId};
use rustc_hash::FxHashMap; use stdx::{hash::NoHashHashMap, never};
use stdx::never;
use syntax::{algo, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize}; use syntax::{algo, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize};
use text_edit::{TextEdit, TextEditBuilder}; use text_edit::{TextEdit, TextEditBuilder};
@ -15,7 +14,7 @@ use crate::SnippetCap;
#[derive(Default, Debug, Clone)] #[derive(Default, Debug, Clone)]
pub struct SourceChange { pub struct SourceChange {
pub source_file_edits: FxHashMap<FileId, TextEdit>, pub source_file_edits: NoHashHashMap<FileId, TextEdit>,
pub file_system_edits: Vec<FileSystemEdit>, pub file_system_edits: Vec<FileSystemEdit>,
pub is_snippet: bool, pub is_snippet: bool,
} }
@ -24,7 +23,7 @@ impl SourceChange {
/// Creates a new SourceChange with the given label /// Creates a new SourceChange with the given label
/// from the edits. /// from the edits.
pub fn from_edits( pub fn from_edits(
source_file_edits: FxHashMap<FileId, TextEdit>, source_file_edits: NoHashHashMap<FileId, TextEdit>,
file_system_edits: Vec<FileSystemEdit>, file_system_edits: Vec<FileSystemEdit>,
) -> Self { ) -> Self {
SourceChange { source_file_edits, file_system_edits, is_snippet: false } SourceChange { source_file_edits, file_system_edits, is_snippet: false }
@ -78,8 +77,8 @@ impl Extend<FileSystemEdit> for SourceChange {
} }
} }
impl From<FxHashMap<FileId, TextEdit>> for SourceChange { impl From<NoHashHashMap<FileId, TextEdit>> for SourceChange {
fn from(source_file_edits: FxHashMap<FileId, TextEdit>) -> SourceChange { fn from(source_file_edits: NoHashHashMap<FileId, TextEdit>) -> SourceChange {
SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false } SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false }
} }
} }

View file

@ -106,18 +106,17 @@ fn f() {
#[test] #[test]
fn inactive_assoc_item() { fn inactive_assoc_item() {
// FIXME these currently don't work, hence the *
check( check(
r#" r#"
struct Foo; struct Foo;
impl Foo { impl Foo {
#[cfg(any())] pub fn f() {} #[cfg(any())] pub fn f() {}
//*************************** weak: code is inactive due to #[cfg] directives //^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives
} }
trait Bar { trait Bar {
#[cfg(any())] pub fn f() {} #[cfg(any())] pub fn f() {}
//*************************** weak: code is inactive due to #[cfg] directives //^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives
} }
"#, "#,
); );

View file

@ -20,6 +20,7 @@ parser = { path = "../parser", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" }
ide-db = { path = "../ide-db", version = "0.0.0" } ide-db = { path = "../ide-db", version = "0.0.0" }
hir = { path = "../hir", version = "0.0.0" } hir = { path = "../hir", version = "0.0.0" }
stdx = { path = "../stdx", version = "0.0.0" }
[dev-dependencies] [dev-dependencies]
test-utils = { path = "../test-utils" } test-utils = { path = "../test-utils" }

View file

@ -86,11 +86,9 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc
use crate::{errors::bail, matching::MatchFailureReason}; use crate::{errors::bail, matching::MatchFailureReason};
use hir::Semantics; use hir::Semantics;
use ide_db::{ use ide_db::base_db::{FileId, FilePosition, FileRange};
base_db::{FileId, FilePosition, FileRange},
FxHashMap,
};
use resolving::ResolvedRule; use resolving::ResolvedRule;
use stdx::hash::NoHashHashMap;
use syntax::{ast, AstNode, SyntaxNode, TextRange}; use syntax::{ast, AstNode, SyntaxNode, TextRange};
use text_edit::TextEdit; use text_edit::TextEdit;
@ -170,9 +168,9 @@ impl<'db> MatchFinder<'db> {
} }
/// Finds matches for all added rules and returns edits for all found matches. /// Finds matches for all added rules and returns edits for all found matches.
pub fn edits(&self) -> FxHashMap<FileId, TextEdit> { pub fn edits(&self) -> NoHashHashMap<FileId, TextEdit> {
use ide_db::base_db::SourceDatabaseExt; use ide_db::base_db::SourceDatabaseExt;
let mut matches_by_file = FxHashMap::default(); let mut matches_by_file = NoHashHashMap::default();
for m in self.matches().matches { for m in self.matches().matches {
matches_by_file matches_by_file
.entry(m.range.file_id) .entry(m.range.file_id)

View file

@ -184,10 +184,10 @@ pub(crate) fn resolve_doc_path_for_def(
Definition::TypeAlias(it) => it.resolve_doc_path(db, link, ns), Definition::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
Definition::Macro(it) => it.resolve_doc_path(db, link, ns), Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
Definition::Field(it) => it.resolve_doc_path(db, link, ns), Definition::Field(it) => it.resolve_doc_path(db, link, ns),
Definition::SelfType(it) => it.resolve_doc_path(db, link, ns),
Definition::BuiltinAttr(_) Definition::BuiltinAttr(_)
| Definition::ToolModule(_) | Definition::ToolModule(_)
| Definition::BuiltinType(_) | Definition::BuiltinType(_)
| Definition::SelfType(_)
| Definition::Local(_) | Definition::Local(_)
| Definition::GenericParam(_) | Definition::GenericParam(_)
| Definition::Label(_) | Definition::Label(_)

View file

@ -87,7 +87,7 @@ pub use crate::{
}, },
join_lines::JoinLinesConfig, join_lines::JoinLinesConfig,
markup::Markup, markup::Markup,
moniker::{MonikerKind, MonikerResult, PackageInformation}, moniker::{MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation},
move_item::Direction, move_item::Direction,
navigation_target::NavigationTarget, navigation_target::NavigationTarget,
prime_caches::ParallelPrimeCachesProgress, prime_caches::ParallelPrimeCachesProgress,
@ -98,7 +98,7 @@ pub use crate::{
static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData}, static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData},
syntax_highlighting::{ syntax_highlighting::{
tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag}, tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
HlRange, HighlightConfig, HlRange,
}, },
}; };
pub use hir::{Documentation, Semantics}; pub use hir::{Documentation, Semantics};
@ -517,8 +517,12 @@ impl Analysis {
} }
/// Computes syntax highlighting for the given file /// Computes syntax highlighting for the given file
pub fn highlight(&self, file_id: FileId) -> Cancellable<Vec<HlRange>> { pub fn highlight(
self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false)) &self,
highlight_config: HighlightConfig,
file_id: FileId,
) -> Cancellable<Vec<HlRange>> {
self.with_db(|db| syntax_highlighting::highlight(db, highlight_config, file_id, None))
} }
/// Computes all ranges to highlight for a given item in a file. /// Computes all ranges to highlight for a given item in a file.
@ -533,9 +537,13 @@ impl Analysis {
} }
/// Computes syntax highlighting for the given file range. /// Computes syntax highlighting for the given file range.
pub fn highlight_range(&self, frange: FileRange) -> Cancellable<Vec<HlRange>> { pub fn highlight_range(
&self,
highlight_config: HighlightConfig,
frange: FileRange,
) -> Cancellable<Vec<HlRange>> {
self.with_db(|db| { self.with_db(|db| {
syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false) syntax_highlighting::highlight(db, highlight_config, frange.file_id, Some(frange.range))
}) })
} }

View file

@ -13,17 +13,39 @@ use syntax::{AstNode, SyntaxKind::*, T};
use crate::{doc_links::token_as_doc_comment, RangeInfo}; use crate::{doc_links::token_as_doc_comment, RangeInfo};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum MonikerDescriptorKind {
Namespace,
Type,
Term,
Method,
TypeParameter,
Parameter,
Macro,
Meta,
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MonikerDescriptor {
pub name: Name,
pub desc: MonikerDescriptorKind,
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MonikerIdentifier { pub struct MonikerIdentifier {
crate_name: String, pub crate_name: String,
path: Vec<Name>, pub description: Vec<MonikerDescriptor>,
} }
impl ToString for MonikerIdentifier { impl ToString for MonikerIdentifier {
fn to_string(&self) -> String { fn to_string(&self) -> String {
match self { match self {
MonikerIdentifier { path, crate_name } => { MonikerIdentifier { description, crate_name } => {
format!("{}::{}", crate_name, path.iter().map(|x| x.to_string()).join("::")) format!(
"{}::{}",
crate_name,
description.iter().map(|x| x.name.to_string()).join("::")
)
} }
} }
} }
@ -42,6 +64,12 @@ pub struct MonikerResult {
pub package_information: PackageInformation, pub package_information: PackageInformation,
} }
impl MonikerResult {
pub fn from_def(db: &RootDatabase, def: Definition, from_crate: Crate) -> Option<Self> {
def_to_moniker(db, def, from_crate)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct PackageInformation { pub struct PackageInformation {
pub name: String, pub name: String,
@ -105,13 +133,23 @@ pub(crate) fn def_to_moniker(
def: Definition, def: Definition,
from_crate: Crate, from_crate: Crate,
) -> Option<MonikerResult> { ) -> Option<MonikerResult> {
if matches!(def, Definition::GenericParam(_) | Definition::SelfType(_) | Definition::Local(_)) { if matches!(
def,
Definition::GenericParam(_)
| Definition::Label(_)
| Definition::DeriveHelper(_)
| Definition::BuiltinAttr(_)
| Definition::ToolModule(_)
) {
return None; return None;
} }
let module = def.module(db)?; let module = def.module(db)?;
let krate = module.krate(); let krate = module.krate();
let mut path = vec![]; let mut description = vec![];
path.extend(module.path_to_root(db).into_iter().filter_map(|x| x.name(db))); description.extend(module.path_to_root(db).into_iter().filter_map(|x| {
Some(MonikerDescriptor { name: x.name(db)?, desc: MonikerDescriptorKind::Namespace })
}));
// Handle associated items within a trait // Handle associated items within a trait
if let Some(assoc) = def.as_assoc_item(db) { if let Some(assoc) = def.as_assoc_item(db) {
@ -120,31 +158,98 @@ pub(crate) fn def_to_moniker(
AssocItemContainer::Trait(trait_) => { AssocItemContainer::Trait(trait_) => {
// Because different traits can have functions with the same name, // Because different traits can have functions with the same name,
// we have to include the trait name as part of the moniker for uniqueness. // we have to include the trait name as part of the moniker for uniqueness.
path.push(trait_.name(db)); description.push(MonikerDescriptor {
name: trait_.name(db),
desc: MonikerDescriptorKind::Type,
});
} }
AssocItemContainer::Impl(impl_) => { AssocItemContainer::Impl(impl_) => {
// Because a struct can implement multiple traits, for implementations // Because a struct can implement multiple traits, for implementations
// we add both the struct name and the trait name to the path // we add both the struct name and the trait name to the path
if let Some(adt) = impl_.self_ty(db).as_adt() { if let Some(adt) = impl_.self_ty(db).as_adt() {
path.push(adt.name(db)); description.push(MonikerDescriptor {
name: adt.name(db),
desc: MonikerDescriptorKind::Type,
});
} }
if let Some(trait_) = impl_.trait_(db) { if let Some(trait_) = impl_.trait_(db) {
path.push(trait_.name(db)); description.push(MonikerDescriptor {
name: trait_.name(db),
desc: MonikerDescriptorKind::Type,
});
} }
} }
} }
} }
if let Definition::Field(it) = def { if let Definition::Field(it) = def {
path.push(it.parent_def(db).name(db)); description.push(MonikerDescriptor {
name: it.parent_def(db).name(db),
desc: MonikerDescriptorKind::Type,
});
} }
path.push(def.name(db)?); let name_desc = match def {
// These are handled by top-level guard (for performance).
Definition::GenericParam(_)
| Definition::Label(_)
| Definition::DeriveHelper(_)
| Definition::BuiltinAttr(_)
| Definition::ToolModule(_) => return None,
Definition::Local(local) => {
if !local.is_param(db) {
return None;
}
MonikerDescriptor { name: local.name(db), desc: MonikerDescriptorKind::Parameter }
}
Definition::Macro(m) => {
MonikerDescriptor { name: m.name(db), desc: MonikerDescriptorKind::Macro }
}
Definition::Function(f) => {
MonikerDescriptor { name: f.name(db), desc: MonikerDescriptorKind::Method }
}
Definition::Variant(v) => {
MonikerDescriptor { name: v.name(db), desc: MonikerDescriptorKind::Type }
}
Definition::Const(c) => {
MonikerDescriptor { name: c.name(db)?, desc: MonikerDescriptorKind::Term }
}
Definition::Trait(trait_) => {
MonikerDescriptor { name: trait_.name(db), desc: MonikerDescriptorKind::Type }
}
Definition::TypeAlias(ta) => {
MonikerDescriptor { name: ta.name(db), desc: MonikerDescriptorKind::TypeParameter }
}
Definition::Module(m) => {
MonikerDescriptor { name: m.name(db)?, desc: MonikerDescriptorKind::Namespace }
}
Definition::BuiltinType(b) => {
MonikerDescriptor { name: b.name(), desc: MonikerDescriptorKind::Type }
}
Definition::SelfType(imp) => MonikerDescriptor {
name: imp.self_ty(db).as_adt()?.name(db),
desc: MonikerDescriptorKind::Type,
},
Definition::Field(it) => {
MonikerDescriptor { name: it.name(db), desc: MonikerDescriptorKind::Term }
}
Definition::Adt(adt) => {
MonikerDescriptor { name: adt.name(db), desc: MonikerDescriptorKind::Type }
}
Definition::Static(s) => {
MonikerDescriptor { name: s.name(db), desc: MonikerDescriptorKind::Meta }
}
};
description.push(name_desc);
Some(MonikerResult { Some(MonikerResult {
identifier: MonikerIdentifier { identifier: MonikerIdentifier {
crate_name: krate.display_name(db)?.crate_name().to_string(), crate_name: krate.display_name(db)?.crate_name().to_string(),
path, description,
}, },
kind: if krate == from_crate { MonikerKind::Export } else { MonikerKind::Import }, kind: if krate == from_crate { MonikerKind::Export } else { MonikerKind::Import },
package_information: { package_information: {

View file

@ -12,8 +12,9 @@ use ide_db::{
salsa::{Database, ParallelDatabase, Snapshot}, salsa::{Database, ParallelDatabase, Snapshot},
Cancelled, CrateGraph, CrateId, SourceDatabase, SourceDatabaseExt, Cancelled, CrateGraph, CrateId, SourceDatabase, SourceDatabaseExt,
}, },
FxHashSet, FxIndexMap, FxIndexMap,
}; };
use stdx::hash::NoHashHashSet;
use crate::RootDatabase; use crate::RootDatabase;
@ -141,7 +142,7 @@ pub(crate) fn parallel_prime_caches(
} }
} }
fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> FxHashSet<CrateId> { fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> NoHashHashSet<CrateId> {
// We're only interested in the workspace crates and the `ImportMap`s of their direct // We're only interested in the workspace crates and the `ImportMap`s of their direct
// dependencies, though in practice the latter also compute the `DefMap`s. // dependencies, though in practice the latter also compute the `DefMap`s.
// We don't prime transitive dependencies because they're generally not visible in // We don't prime transitive dependencies because they're generally not visible in

View file

@ -14,8 +14,9 @@ use ide_db::{
base_db::FileId, base_db::FileId,
defs::{Definition, NameClass, NameRefClass}, defs::{Definition, NameClass, NameRefClass},
search::{ReferenceCategory, SearchScope, UsageSearchResult}, search::{ReferenceCategory, SearchScope, UsageSearchResult},
FxHashMap, RootDatabase, RootDatabase,
}; };
use stdx::hash::NoHashHashMap;
use syntax::{ use syntax::{
algo::find_node_at_offset, algo::find_node_at_offset,
ast::{self, HasName}, ast::{self, HasName},
@ -29,7 +30,7 @@ use crate::{FilePosition, NavigationTarget, TryToNav};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ReferenceSearchResult { pub struct ReferenceSearchResult {
pub declaration: Option<Declaration>, pub declaration: Option<Declaration>,
pub references: FxHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>, pub references: NoHashHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]

View file

@ -14,7 +14,7 @@ mod html;
mod tests; mod tests;
use hir::{Name, Semantics}; use hir::{Name, Semantics};
use ide_db::{FxHashMap, RootDatabase}; use ide_db::{FxHashMap, RootDatabase, SymbolKind};
use syntax::{ use syntax::{
ast, AstNode, AstToken, NodeOrToken, SyntaxKind::*, SyntaxNode, TextRange, WalkEvent, T, ast, AstNode, AstToken, NodeOrToken, SyntaxKind::*, SyntaxNode, TextRange, WalkEvent, T,
}; };
@ -24,7 +24,7 @@ use crate::{
escape::highlight_escape_string, format::highlight_format_string, highlights::Highlights, escape::highlight_escape_string, format::highlight_format_string, highlights::Highlights,
macro_::MacroHighlighter, tags::Highlight, macro_::MacroHighlighter, tags::Highlight,
}, },
FileId, HlMod, HlTag, FileId, HlMod, HlOperator, HlPunct, HlTag,
}; };
pub(crate) use html::highlight_as_html; pub(crate) use html::highlight_as_html;
@ -36,6 +36,26 @@ pub struct HlRange {
pub binding_hash: Option<u64>, pub binding_hash: Option<u64>,
} }
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct HighlightConfig {
/// Whether to highlight strings
pub strings: bool,
/// Whether to highlight punctuation
pub punctuation: bool,
/// Whether to specialize punctuation highlights
pub specialize_punctuation: bool,
/// Whether to highlight operator
pub operator: bool,
/// Whether to specialize operator highlights
pub specialize_operator: bool,
/// Whether to inject highlights into doc comments
pub inject_doc_comment: bool,
/// Whether to highlight the macro call bang
pub macro_bang: bool,
/// Whether to highlight unresolved things be their syntax
pub syntactic_name_ref_highlighting: bool,
}
// Feature: Semantic Syntax Highlighting // Feature: Semantic Syntax Highlighting
// //
// rust-analyzer highlights the code semantically. // rust-analyzer highlights the code semantically.
@ -155,9 +175,9 @@ pub struct HlRange {
// image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[] // image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[]
pub(crate) fn highlight( pub(crate) fn highlight(
db: &RootDatabase, db: &RootDatabase,
config: HighlightConfig,
file_id: FileId, file_id: FileId,
range_to_highlight: Option<TextRange>, range_to_highlight: Option<TextRange>,
syntactic_name_ref_highlighting: bool,
) -> Vec<HlRange> { ) -> Vec<HlRange> {
let _p = profile::span("highlight"); let _p = profile::span("highlight");
let sema = Semantics::new(db); let sema = Semantics::new(db);
@ -183,26 +203,18 @@ pub(crate) fn highlight(
Some(it) => it.krate(), Some(it) => it.krate(),
None => return hl.to_vec(), None => return hl.to_vec(),
}; };
traverse( traverse(&mut hl, &sema, config, file_id, &root, krate, range_to_highlight);
&mut hl,
&sema,
file_id,
&root,
krate,
range_to_highlight,
syntactic_name_ref_highlighting,
);
hl.to_vec() hl.to_vec()
} }
fn traverse( fn traverse(
hl: &mut Highlights, hl: &mut Highlights,
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
config: HighlightConfig,
file_id: FileId, file_id: FileId,
root: &SyntaxNode, root: &SyntaxNode,
krate: hir::Crate, krate: hir::Crate,
range_to_highlight: TextRange, range_to_highlight: TextRange,
syntactic_name_ref_highlighting: bool,
) { ) {
let is_unlinked = sema.to_module_def(file_id).is_none(); let is_unlinked = sema.to_module_def(file_id).is_none();
let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
@ -323,9 +335,11 @@ fn traverse(
Enter(it) => it, Enter(it) => it,
Leave(NodeOrToken::Token(_)) => continue, Leave(NodeOrToken::Token(_)) => continue,
Leave(NodeOrToken::Node(node)) => { Leave(NodeOrToken::Node(node)) => {
// Doc comment highlighting injection, we do this when leaving the node if config.inject_doc_comment {
// so that we overwrite the highlighting of the doc comment itself. // Doc comment highlighting injection, we do this when leaving the node
inject::doc_comment(hl, sema, file_id, &node); // so that we overwrite the highlighting of the doc comment itself.
inject::doc_comment(hl, sema, config, file_id, &node);
}
continue; continue;
} }
}; };
@ -400,7 +414,8 @@ fn traverse(
let string_to_highlight = ast::String::cast(descended_token.clone()); let string_to_highlight = ast::String::cast(descended_token.clone());
if let Some((string, expanded_string)) = string.zip(string_to_highlight) { if let Some((string, expanded_string)) = string.zip(string_to_highlight) {
if string.is_raw() { if string.is_raw() {
if inject::ra_fixture(hl, sema, &string, &expanded_string).is_some() { if inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some()
{
continue; continue;
} }
} }
@ -421,7 +436,7 @@ fn traverse(
sema, sema,
krate, krate,
&mut bindings_shadow_count, &mut bindings_shadow_count,
syntactic_name_ref_highlighting, config.syntactic_name_ref_highlighting,
name_like, name_like,
), ),
NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)), NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)),
@ -439,6 +454,29 @@ fn traverse(
// something unresolvable. FIXME: There should be a way to prevent that // something unresolvable. FIXME: There should be a way to prevent that
continue; continue;
} }
// apply config filtering
match &mut highlight.tag {
HlTag::StringLiteral if !config.strings => continue,
// If punctuation is disabled, make the macro bang part of the macro call again.
tag @ HlTag::Punctuation(HlPunct::MacroBang) => {
if !config.macro_bang {
*tag = HlTag::Symbol(SymbolKind::Macro);
} else if !config.specialize_punctuation {
*tag = HlTag::Punctuation(HlPunct::Other);
}
}
HlTag::Punctuation(_) if !config.punctuation => continue,
tag @ HlTag::Punctuation(_) if !config.specialize_punctuation => {
*tag = HlTag::Punctuation(HlPunct::Other);
}
HlTag::Operator(_) if !config.operator && highlight.mods.is_empty() => continue,
tag @ HlTag::Operator(_) if !config.specialize_operator => {
*tag = HlTag::Operator(HlOperator::Other);
}
_ => (),
}
if inside_attribute { if inside_attribute {
highlight |= HlMod::Attribute highlight |= HlMod::Attribute
} }

View file

@ -5,7 +5,10 @@ use oorandom::Rand32;
use stdx::format_to; use stdx::format_to;
use syntax::AstNode; use syntax::AstNode;
use crate::{syntax_highlighting::highlight, FileId, RootDatabase}; use crate::{
syntax_highlighting::{highlight, HighlightConfig},
FileId, RootDatabase,
};
pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
let parse = db.parse(file_id); let parse = db.parse(file_id);
@ -20,7 +23,21 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
) )
} }
let hl_ranges = highlight(db, file_id, None, false); let hl_ranges = highlight(
db,
HighlightConfig {
strings: true,
punctuation: true,
specialize_punctuation: true,
specialize_operator: true,
operator: true,
inject_doc_comment: true,
macro_bang: true,
syntactic_name_ref_highlighting: false,
},
file_id,
None,
);
let text = parse.tree().syntax().to_string(); let text = parse.tree().syntax().to_string();
let mut buf = String::new(); let mut buf = String::new();
buf.push_str(STYLE); buf.push_str(STYLE);

View file

@ -15,13 +15,14 @@ use syntax::{
use crate::{ use crate::{
doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def}, doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def},
syntax_highlighting::{highlights::Highlights, injector::Injector}, syntax_highlighting::{highlights::Highlights, injector::Injector, HighlightConfig},
Analysis, HlMod, HlRange, HlTag, RootDatabase, Analysis, HlMod, HlRange, HlTag, RootDatabase,
}; };
pub(super) fn ra_fixture( pub(super) fn ra_fixture(
hl: &mut Highlights, hl: &mut Highlights,
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
config: HighlightConfig,
literal: &ast::String, literal: &ast::String,
expanded: &ast::String, expanded: &ast::String,
) -> Option<()> { ) -> Option<()> {
@ -63,7 +64,13 @@ pub(super) fn ra_fixture(
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text()); let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
for mut hl_range in analysis.highlight(tmp_file_id).unwrap() { for mut hl_range in analysis
.highlight(
HighlightConfig { syntactic_name_ref_highlighting: false, ..config },
tmp_file_id,
)
.unwrap()
{
for range in inj.map_range_up(hl_range.range) { for range in inj.map_range_up(hl_range.range) {
if let Some(range) = literal.map_range_up(range) { if let Some(range) = literal.map_range_up(range) {
hl_range.range = range; hl_range.range = range;
@ -86,6 +93,7 @@ const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
pub(super) fn doc_comment( pub(super) fn doc_comment(
hl: &mut Highlights, hl: &mut Highlights,
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
config: HighlightConfig,
src_file_id: FileId, src_file_id: FileId,
node: &SyntaxNode, node: &SyntaxNode,
) { ) {
@ -206,7 +214,14 @@ pub(super) fn doc_comment(
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text()); let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
if let Ok(ranges) = analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)) { if let Ok(ranges) = analysis.with_db(|db| {
super::highlight(
db,
HighlightConfig { syntactic_name_ref_highlighting: true, ..config },
tmp_file_id,
None,
)
}) {
for HlRange { range, highlight, binding_hash } in ranges { for HlRange { range, highlight, binding_hash } in ranges {
for range in inj.map_range_up(range) { for range in inj.map_range_up(range) {
hl.add(HlRange { range, highlight: highlight | HlMod::Injected, binding_hash }); hl.add(HlRange { range, highlight: highlight | HlMod::Injected, binding_hash });

View file

@ -199,7 +199,7 @@ impl fmt::Display for HlTag {
} }
impl HlMod { impl HlMod {
const ALL: &'static [HlMod; HlMod::Unsafe as u8 as usize + 1] = &[ const ALL: &'static [HlMod; 19] = &[
HlMod::Associated, HlMod::Associated,
HlMod::Async, HlMod::Async,
HlMod::Attribute, HlMod::Attribute,
@ -296,7 +296,7 @@ impl Highlight {
Highlight { tag, mods: HlMods::default() } Highlight { tag, mods: HlMods::default() }
} }
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.tag == HlTag::None && self.mods == HlMods::default() self.tag == HlTag::None && self.mods.is_empty()
} }
} }
@ -330,6 +330,10 @@ impl ops::BitOr<HlMod> for Highlight {
} }
impl HlMods { impl HlMods {
pub fn is_empty(&self) -> bool {
self.0 == 0
}
pub fn contains(self, m: HlMod) -> bool { pub fn contains(self, m: HlMod) -> bool {
self.0 & m.mask() == m.mask() self.0 & m.mask() == m.mask()
} }

View file

@ -56,7 +56,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span> <span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span>
<span class="brace">}</span> <span class="brace">}</span>
<span class="comment documentation">/// This is an impl with a code block.</span> <span class="comment documentation">/// This is an impl of </span><span class="struct documentation injected intra_doc_link">[`Foo`]</span><span class="comment documentation"> with a code block.</span>
<span class="comment documentation">///</span> <span class="comment documentation">///</span>
<span class="comment documentation">/// ```</span> <span class="comment documentation">/// ```</span>
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span> <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>

View file

@ -4,7 +4,18 @@ use expect_test::{expect_file, ExpectFile};
use ide_db::SymbolKind; use ide_db::SymbolKind;
use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear}; use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear};
use crate::{fixture, FileRange, HlTag, TextRange}; use crate::{fixture, FileRange, HighlightConfig, HlTag, TextRange};
const HL_CONFIG: HighlightConfig = HighlightConfig {
strings: true,
punctuation: true,
specialize_punctuation: true,
specialize_operator: true,
operator: true,
inject_doc_comment: true,
macro_bang: true,
syntactic_name_ref_highlighting: false,
};
#[test] #[test]
fn attributes() { fn attributes() {
@ -613,7 +624,7 @@ struct Foo {
bar: bool, bar: bool,
} }
/// This is an impl with a code block. /// This is an impl of [`Foo`] with a code block.
/// ///
/// ``` /// ```
/// fn foo() { /// fn foo() {
@ -996,7 +1007,10 @@ struct Foo {
// The "x" // The "x"
let highlights = &analysis let highlights = &analysis
.highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) }) .highlight_range(
HL_CONFIG,
FileRange { file_id, range: TextRange::at(45.into(), 1.into()) },
)
.unwrap(); .unwrap();
assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public"); assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public");
@ -1011,7 +1025,7 @@ macro_rules! test {}
}"# }"#
.trim(), .trim(),
); );
let _ = analysis.highlight(file_id).unwrap(); let _ = analysis.highlight(HL_CONFIG, file_id).unwrap();
} }
/// Highlights the code given by the `ra_fixture` argument, renders the /// Highlights the code given by the `ra_fixture` argument, renders the
@ -1035,7 +1049,7 @@ fn benchmark_syntax_highlighting_long_struct() {
let hash = { let hash = {
let _pt = bench("syntax highlighting long struct"); let _pt = bench("syntax highlighting long struct");
analysis analysis
.highlight(file_id) .highlight(HL_CONFIG, file_id)
.unwrap() .unwrap()
.iter() .iter()
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
@ -1061,7 +1075,7 @@ fn syntax_highlighting_not_quadratic() {
let time = Instant::now(); let time = Instant::now();
let hash = analysis let hash = analysis
.highlight(file_id) .highlight(HL_CONFIG, file_id)
.unwrap() .unwrap()
.iter() .iter()
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
@ -1086,7 +1100,7 @@ fn benchmark_syntax_highlighting_parser() {
let hash = { let hash = {
let _pt = bench("syntax highlighting parser"); let _pt = bench("syntax highlighting parser");
analysis analysis
.highlight(file_id) .highlight(HL_CONFIG, file_id)
.unwrap() .unwrap()
.iter() .iter()
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function)) .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function))

View file

@ -3,8 +3,9 @@ use std::sync::Arc;
use dot::{Id, LabelText}; use dot::{Id, LabelText};
use ide_db::{ use ide_db::{
base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt}, base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt},
FxHashSet, RootDatabase, RootDatabase,
}; };
use stdx::hash::NoHashHashSet;
// Feature: View Crate Graph // Feature: View Crate Graph
// //
@ -41,7 +42,7 @@ pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String,
struct DotCrateGraph { struct DotCrateGraph {
graph: Arc<CrateGraph>, graph: Arc<CrateGraph>,
crates_to_render: FxHashSet<CrateId>, crates_to_render: NoHashHashSet<CrateId>,
} }
type Edge<'a> = (CrateId, &'a Dependency); type Edge<'a> = (CrateId, &'a Dependency);

View file

@ -13,6 +13,8 @@ pub(super) const PATTERN_FIRST: TokenSet =
T![.], T![.],
])); ]));
const PAT_TOP_FIRST: TokenSet = PATTERN_FIRST.union(TokenSet::new(&[T![|]]));
pub(crate) fn pattern(p: &mut Parser<'_>) { pub(crate) fn pattern(p: &mut Parser<'_>) {
pattern_r(p, PAT_RECOVERY_SET); pattern_r(p, PAT_RECOVERY_SET);
} }
@ -228,6 +230,7 @@ fn path_or_macro_pat(p: &mut Parser<'_>) -> CompletedMarker {
// let S(_) = (); // let S(_) = ();
// let S(_,) = (); // let S(_,) = ();
// let S(_, .. , x) = (); // let S(_, .. , x) = ();
// let S(| a) = ();
// } // }
fn tuple_pat_fields(p: &mut Parser<'_>) { fn tuple_pat_fields(p: &mut Parser<'_>) {
assert!(p.at(T!['('])); assert!(p.at(T!['(']));
@ -363,6 +366,7 @@ fn ref_pat(p: &mut Parser<'_>) -> CompletedMarker {
// let (a,) = (); // let (a,) = ();
// let (..) = (); // let (..) = ();
// let () = (); // let () = ();
// let (| a | a, | b) = ((),());
// } // }
fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker { fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker {
assert!(p.at(T!['('])); assert!(p.at(T!['(']));
@ -373,13 +377,13 @@ fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker {
let mut has_rest = false; let mut has_rest = false;
while !p.at(EOF) && !p.at(T![')']) { while !p.at(EOF) && !p.at(T![')']) {
has_pat = true; has_pat = true;
if !p.at_ts(PATTERN_FIRST) { if !p.at_ts(PAT_TOP_FIRST) {
p.error("expected a pattern"); p.error("expected a pattern");
break; break;
} }
has_rest |= p.at(T![..]); has_rest |= p.at(T![..]);
pattern(p); pattern_top(p);
if !p.at(T![')']) { if !p.at(T![')']) {
has_comma = true; has_comma = true;
p.expect(T![,]); p.expect(T![,]);
@ -393,6 +397,7 @@ fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker {
// test slice_pat // test slice_pat
// fn main() { // fn main() {
// let [a, b, ..] = []; // let [a, b, ..] = [];
// let [| a, ..] = [];
// } // }
fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker { fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker {
assert!(p.at(T!['['])); assert!(p.at(T!['[']));
@ -405,12 +410,12 @@ fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker {
fn pat_list(p: &mut Parser<'_>, ket: SyntaxKind) { fn pat_list(p: &mut Parser<'_>, ket: SyntaxKind) {
while !p.at(EOF) && !p.at(ket) { while !p.at(EOF) && !p.at(ket) {
if !p.at_ts(PATTERN_FIRST) { if !p.at_ts(PAT_TOP_FIRST) {
p.error("expected a pattern"); p.error("expected a pattern");
break; break;
} }
pattern(p); pattern_top(p);
if !p.at(ket) { if !p.at(ket) {
p.expect(T![,]); p.expect(T![,]);
} }

View file

@ -37,6 +37,29 @@ SOURCE_FILE
L_BRACK "[" L_BRACK "["
R_BRACK "]" R_BRACK "]"
SEMICOLON ";" SEMICOLON ";"
WHITESPACE "\n "
LET_STMT
LET_KW "let"
WHITESPACE " "
SLICE_PAT
L_BRACK "["
PIPE "|"
WHITESPACE " "
IDENT_PAT
NAME
IDENT "a"
COMMA ","
WHITESPACE " "
REST_PAT
DOT2 ".."
R_BRACK "]"
WHITESPACE " "
EQ "="
WHITESPACE " "
ARRAY_EXPR
L_BRACK "["
R_BRACK "]"
SEMICOLON ";"
WHITESPACE "\n" WHITESPACE "\n"
R_CURLY "}" R_CURLY "}"
WHITESPACE "\n" WHITESPACE "\n"

View file

@ -1,3 +1,4 @@
fn main() { fn main() {
let [a, b, ..] = []; let [a, b, ..] = [];
let [| a, ..] = [];
} }

View file

@ -100,6 +100,29 @@ SOURCE_FILE
L_PAREN "(" L_PAREN "("
R_PAREN ")" R_PAREN ")"
SEMICOLON ";" SEMICOLON ";"
WHITESPACE "\n "
LET_STMT
LET_KW "let"
WHITESPACE " "
TUPLE_STRUCT_PAT
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
L_PAREN "("
PIPE "|"
WHITESPACE " "
IDENT_PAT
NAME
IDENT "a"
R_PAREN ")"
WHITESPACE " "
EQ "="
WHITESPACE " "
TUPLE_EXPR
L_PAREN "("
R_PAREN ")"
SEMICOLON ";"
WHITESPACE "\n" WHITESPACE "\n"
R_CURLY "}" R_CURLY "}"
WHITESPACE "\n" WHITESPACE "\n"

View file

@ -3,4 +3,5 @@ fn foo() {
let S(_) = (); let S(_) = ();
let S(_,) = (); let S(_,) = ();
let S(_, .. , x) = (); let S(_, .. , x) = ();
let S(| a) = ();
} }

View file

@ -85,6 +85,46 @@ SOURCE_FILE
L_PAREN "(" L_PAREN "("
R_PAREN ")" R_PAREN ")"
SEMICOLON ";" SEMICOLON ";"
WHITESPACE "\n "
LET_STMT
LET_KW "let"
WHITESPACE " "
TUPLE_PAT
L_PAREN "("
PIPE "|"
WHITESPACE " "
OR_PAT
IDENT_PAT
NAME
IDENT "a"
WHITESPACE " "
PIPE "|"
WHITESPACE " "
IDENT_PAT
NAME
IDENT "a"
COMMA ","
WHITESPACE " "
PIPE "|"
WHITESPACE " "
IDENT_PAT
NAME
IDENT "b"
R_PAREN ")"
WHITESPACE " "
EQ "="
WHITESPACE " "
TUPLE_EXPR
L_PAREN "("
TUPLE_EXPR
L_PAREN "("
R_PAREN ")"
COMMA ","
TUPLE_EXPR
L_PAREN "("
R_PAREN ")"
R_PAREN ")"
SEMICOLON ";"
WHITESPACE "\n" WHITESPACE "\n"
R_CURLY "}" R_CURLY "}"
WHITESPACE "\n" WHITESPACE "\n"

View file

@ -3,4 +3,5 @@ fn main() {
let (a,) = (); let (a,) = ();
let (..) = (); let (..) = ();
let () = (); let () = ();
let (| a | a, | b) = ((),());
} }

View file

@ -24,7 +24,6 @@ tt = { path = "../tt", version = "0.0.0" }
mbe = { path = "../mbe", version = "0.0.0" } mbe = { path = "../mbe", version = "0.0.0" }
paths = { path = "../paths", version = "0.0.0" } paths = { path = "../paths", version = "0.0.0" }
proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" } proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
crossbeam = "0.8.1"
[dev-dependencies] [dev-dependencies]
expect-test = "1.4.0" expect-test = "1.4.0"

View file

@ -26,6 +26,7 @@ use std::{
ffi::OsString, ffi::OsString,
fs, fs,
path::{Path, PathBuf}, path::{Path, PathBuf},
thread,
time::SystemTime, time::SystemTime,
}; };
@ -65,18 +66,16 @@ impl ProcMacroSrv {
let macro_body = task.macro_body.to_subtree(); let macro_body = task.macro_body.to_subtree();
let attributes = task.attributes.map(|it| it.to_subtree()); let attributes = task.attributes.map(|it| it.to_subtree());
// FIXME: replace this with std's scoped threads once they stabilize let result = thread::scope(|s| {
// (then remove dependency on crossbeam) let thread = thread::Builder::new()
let result = crossbeam::scope(|s| {
let res = match s
.builder()
.stack_size(EXPANDER_STACK_SIZE) .stack_size(EXPANDER_STACK_SIZE)
.name(task.macro_name.clone()) .name(task.macro_name.clone())
.spawn(|_| { .spawn_scoped(s, || {
expander expander
.expand(&task.macro_name, &macro_body, attributes.as_ref()) .expand(&task.macro_name, &macro_body, attributes.as_ref())
.map(|it| FlatTree::new(&it)) .map(|it| FlatTree::new(&it))
}) { });
let res = match thread {
Ok(handle) => handle.join(), Ok(handle) => handle.join(),
Err(e) => std::panic::resume_unwind(Box::new(e)), Err(e) => std::panic::resume_unwind(Box::new(e)),
}; };
@ -86,10 +85,6 @@ impl ProcMacroSrv {
Err(e) => std::panic::resume_unwind(e), Err(e) => std::panic::resume_unwind(e),
} }
}); });
let result = match result {
Ok(result) => result,
Err(e) => std::panic::resume_unwind(e),
};
prev_env.rollback(); prev_env.rollback();

File diff suppressed because it is too large Load diff

View file

@ -13,7 +13,7 @@ use cfg::{CfgDiff, CfgOptions};
use paths::{AbsPath, AbsPathBuf}; use paths::{AbsPath, AbsPathBuf};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version; use semver::Version;
use stdx::always; use stdx::{always, hash::NoHashHashMap};
use crate::{ use crate::{
build_scripts::BuildScriptOutput, build_scripts::BuildScriptOutput,
@ -471,7 +471,7 @@ fn project_json_to_crate_graph(
.map(|sysroot| sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load)); .map(|sysroot| sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load));
let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default(); let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
let crates: FxHashMap<CrateId, CrateId> = project let crates: NoHashHashMap<CrateId, CrateId> = project
.crates() .crates()
.filter_map(|(crate_id, krate)| { .filter_map(|(crate_id, krate)| {
let file_path = &krate.root_module; let file_path = &krate.root_module;

View file

@ -22,7 +22,8 @@ anyhow = "1.0.57"
crossbeam-channel = "0.5.5" crossbeam-channel = "0.5.5"
dissimilar = "1.0.4" dissimilar = "1.0.4"
itertools = "0.10.3" itertools = "0.10.3"
lsp-types = { version = "0.93.0", features = ["proposed"] } scip = "0.1.1"
lsp-types = { version = "0.93.1", features = ["proposed"] }
parking_lot = "0.12.1" parking_lot = "0.12.1"
xflags = "0.2.4" xflags = "0.2.4"
oorandom = "11.1.3" oorandom = "11.1.3"
@ -88,5 +89,5 @@ in-rust-tree = [
"proc-macro-srv/sysroot-abi", "proc-macro-srv/sysroot-abi",
"sourcegen/in-rust-tree", "sourcegen/in-rust-tree",
"ide/in-rust-tree", "ide/in-rust-tree",
"syntax/in-rust-tree" "syntax/in-rust-tree",
] ]

View file

@ -93,6 +93,7 @@ fn try_main() -> Result<()> {
flags::RustAnalyzerCmd::Ssr(cmd) => cmd.run()?, flags::RustAnalyzerCmd::Ssr(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::Search(cmd) => cmd.run()?, flags::RustAnalyzerCmd::Search(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?, flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?,
} }
Ok(()) Ok(())
} }

View file

@ -9,6 +9,7 @@ mod analysis_stats;
mod diagnostics; mod diagnostics;
mod ssr; mod ssr;
mod lsif; mod lsif;
mod scip;
mod progress_report; mod progress_report;

View file

@ -112,6 +112,10 @@ xflags::xflags! {
cmd lsif cmd lsif
required path: PathBuf required path: PathBuf
{} {}
cmd scip
required path: PathBuf
{}
} }
} }
@ -140,6 +144,7 @@ pub enum RustAnalyzerCmd {
Search(Search), Search(Search),
ProcMacro(ProcMacro), ProcMacro(ProcMacro),
Lsif(Lsif), Lsif(Lsif),
Scip(Scip),
} }
#[derive(Debug)] #[derive(Debug)]
@ -207,6 +212,11 @@ pub struct Lsif {
pub path: PathBuf, pub path: PathBuf,
} }
#[derive(Debug)]
pub struct Scip {
pub path: PathBuf,
}
impl RustAnalyzer { impl RustAnalyzer {
pub const HELP: &'static str = Self::HELP_; pub const HELP: &'static str = Self::HELP_;

View file

@ -0,0 +1,448 @@
//! SCIP generator
use std::{
collections::{HashMap, HashSet},
time::Instant,
};
use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
use hir::Name;
use ide::{
LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile, TextRange,
TokenId,
};
use ide_db::LineIndexDatabase;
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
use scip::types as scip_types;
use std::env;
use crate::cli::{
flags,
load_cargo::{load_workspace, LoadCargoConfig},
Result,
};
impl flags::Scip {
pub fn run(self) -> Result<()> {
eprintln!("Generating SCIP start...");
let now = Instant::now();
let cargo_config = CargoConfig::default();
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {}", s));
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro: true,
prefill_caches: true,
};
let path = vfs::AbsPathBuf::assert(env::current_dir()?.join(&self.path));
let rootpath = path.normalize();
let manifest = ProjectManifest::discover_single(&path)?;
let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
let (host, vfs, _) = load_workspace(workspace, &load_cargo_config)?;
let db = host.raw_database();
let analysis = host.analysis();
let si = StaticIndex::compute(&analysis);
let mut index = scip_types::Index {
metadata: Some(scip_types::Metadata {
version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(),
tool_info: Some(scip_types::ToolInfo {
name: "rust-analyzer".to_owned(),
version: "0.1".to_owned(),
arguments: vec![],
..Default::default()
})
.into(),
project_root: format!(
"file://{}",
path.normalize()
.as_os_str()
.to_str()
.ok_or(anyhow::anyhow!("Unable to normalize project_root path"))?
.to_string()
),
text_document_encoding: scip_types::TextEncoding::UTF8.into(),
..Default::default()
})
.into(),
..Default::default()
};
let mut symbols_emitted: HashSet<TokenId> = HashSet::default();
let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new();
for file in si.files {
let mut local_count = 0;
let mut new_local_symbol = || {
let new_symbol = scip::types::Symbol::new_local(local_count);
local_count += 1;
new_symbol
};
let StaticIndexedFile { file_id, tokens, .. } = file;
let relative_path = match get_relative_filepath(&vfs, &rootpath, file_id) {
Some(relative_path) => relative_path,
None => continue,
};
let line_index = LineIndex {
index: db.line_index(file_id),
encoding: OffsetEncoding::Utf8,
endings: LineEndings::Unix,
};
let mut doc = scip_types::Document {
relative_path,
language: "rust".to_string(),
..Default::default()
};
tokens.into_iter().for_each(|(range, id)| {
let token = si.tokens.get(id).unwrap();
let mut occurrence = scip_types::Occurrence::default();
occurrence.range = text_range_to_scip_range(&line_index, range);
occurrence.symbol = match tokens_to_symbol.get(&id) {
Some(symbol) => symbol.clone(),
None => {
let symbol = match &token.moniker {
Some(moniker) => moniker_to_symbol(&moniker),
None => new_local_symbol(),
};
let symbol = scip::symbol::format_symbol(symbol);
tokens_to_symbol.insert(id, symbol.clone());
symbol
}
};
if let Some(def) = token.definition {
if def.range == range {
occurrence.symbol_roles |= scip_types::SymbolRole::Definition as i32;
}
if !symbols_emitted.contains(&id) {
symbols_emitted.insert(id);
let mut symbol_info = scip_types::SymbolInformation::default();
symbol_info.symbol = occurrence.symbol.clone();
if let Some(hover) = &token.hover {
if !hover.markup.as_str().is_empty() {
symbol_info.documentation = vec![hover.markup.as_str().to_string()];
}
}
doc.symbols.push(symbol_info)
}
}
doc.occurrences.push(occurrence);
});
if doc.occurrences.is_empty() {
continue;
}
index.documents.push(doc);
}
scip::write_message_to_file("index.scip", index)
.map_err(|err| anyhow::anyhow!("Failed to write scip to file: {}", err))?;
eprintln!("Generating SCIP finished {:?}", now.elapsed());
Ok(())
}
}
fn get_relative_filepath(
vfs: &vfs::Vfs,
rootpath: &vfs::AbsPathBuf,
file_id: ide::FileId,
) -> Option<String> {
Some(vfs.file_path(file_id).as_path()?.strip_prefix(&rootpath)?.as_ref().to_str()?.to_string())
}
// SCIP Ranges have a (very large) optimization that ranges if they are on the same line
// only encode as a vector of [start_line, start_col, end_col].
//
// This transforms a line index into the optimized SCIP Range.
fn text_range_to_scip_range(line_index: &LineIndex, range: TextRange) -> Vec<i32> {
let LineCol { line: start_line, col: start_col } = line_index.index.line_col(range.start());
let LineCol { line: end_line, col: end_col } = line_index.index.line_col(range.end());
if start_line == end_line {
vec![start_line as i32, start_col as i32, end_col as i32]
} else {
vec![start_line as i32, start_col as i32, end_line as i32, end_col as i32]
}
}
fn new_descriptor_str(
name: &str,
suffix: scip_types::descriptor::Suffix,
) -> scip_types::Descriptor {
scip_types::Descriptor {
name: name.to_string(),
disambiguator: "".to_string(),
suffix: suffix.into(),
..Default::default()
}
}
fn new_descriptor(name: Name, suffix: scip_types::descriptor::Suffix) -> scip_types::Descriptor {
let mut name = name.to_string();
if name.contains("'") {
name = format!("`{}`", name);
}
new_descriptor_str(name.as_str(), suffix)
}
/// Loosely based on `def_to_moniker`
///
/// Only returns a Symbol when it's a non-local symbol.
/// So if the visibility isn't outside of a document, then it will return None
fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol {
use scip_types::descriptor::Suffix::*;
let package_name = moniker.package_information.name.clone();
let version = moniker.package_information.version.clone();
let descriptors = moniker
.identifier
.description
.iter()
.map(|desc| {
new_descriptor(
desc.name.clone(),
match desc.desc {
MonikerDescriptorKind::Namespace => Namespace,
MonikerDescriptorKind::Type => Type,
MonikerDescriptorKind::Term => Term,
MonikerDescriptorKind::Method => Method,
MonikerDescriptorKind::TypeParameter => TypeParameter,
MonikerDescriptorKind::Parameter => Parameter,
MonikerDescriptorKind::Macro => Macro,
MonikerDescriptorKind::Meta => Meta,
},
)
})
.collect();
scip_types::Symbol {
scheme: "rust-analyzer".into(),
package: Some(scip_types::Package {
manager: "cargo".to_string(),
name: package_name,
version,
..Default::default()
})
.into(),
descriptors,
..Default::default()
}
}
#[cfg(test)]
mod test {
use super::*;
use hir::Semantics;
use ide::{AnalysisHost, FilePosition};
use ide_db::defs::IdentClass;
use ide_db::{base_db::fixture::ChangeFixture, helpers::pick_best_token};
use scip::symbol::format_symbol;
use syntax::SyntaxKind::*;
use syntax::{AstNode, T};
fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) {
let mut host = AnalysisHost::default();
let change_fixture = ChangeFixture::parse(ra_fixture);
host.raw_database_mut().apply_change(change_fixture.change);
let (file_id, range_or_offset) =
change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset();
(host, FilePosition { file_id, offset })
}
/// If expected == "", then assert that there are no symbols (this is basically local symbol)
#[track_caller]
fn check_symbol(ra_fixture: &str, expected: &str) {
let (host, position) = position(ra_fixture);
let FilePosition { file_id, offset } = position;
let db = host.raw_database();
let sema = &Semantics::new(db);
let file = sema.parse(file_id).syntax().clone();
let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
IDENT
| INT_NUMBER
| LIFETIME_IDENT
| T![self]
| T![super]
| T![crate]
| T![Self]
| COMMENT => 2,
kind if kind.is_trivia() => 0,
_ => 1,
})
.expect("OK OK");
let navs = sema
.descend_into_macros(original_token.clone())
.into_iter()
.filter_map(|token| {
IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
it.into_iter().flat_map(|def| {
let module = def.module(db).unwrap();
let current_crate = module.krate();
match MonikerResult::from_def(sema.db, def, current_crate) {
Some(moniker_result) => Some(moniker_to_symbol(&moniker_result)),
None => None,
}
})
})
})
.flatten()
.collect::<Vec<_>>();
if expected == "" {
assert_eq!(0, navs.len(), "must have no symbols {:?}", navs);
return;
}
assert_eq!(1, navs.len(), "must have one symbol {:?}", navs);
let res = navs.get(0).unwrap();
let formatted = format_symbol(res.clone());
assert_eq!(formatted, expected);
}
#[test]
fn basic() {
check_symbol(
r#"
//- /lib.rs crate:main deps:foo
use foo::example_mod::func;
fn main() {
func$0();
}
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
pub mod example_mod {
pub fn func() {}
}
"#,
"rust-analyzer cargo foo 0.1.0 example_mod/func().",
);
}
#[test]
fn symbol_for_trait() {
check_symbol(
r#"
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
pub mod module {
pub trait MyTrait {
pub fn func$0() {}
}
}
"#,
"rust-analyzer cargo foo 0.1.0 module/MyTrait#func().",
);
}
#[test]
fn symbol_for_trait_constant() {
check_symbol(
r#"
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
pub mod module {
pub trait MyTrait {
const MY_CONST$0: u8;
}
}
"#,
"rust-analyzer cargo foo 0.1.0 module/MyTrait#MY_CONST.",
);
}
#[test]
fn symbol_for_trait_type() {
check_symbol(
r#"
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
pub mod module {
pub trait MyTrait {
type MyType$0;
}
}
"#,
// "foo::module::MyTrait::MyType",
"rust-analyzer cargo foo 0.1.0 module/MyTrait#[MyType]",
);
}
#[test]
fn symbol_for_trait_impl_function() {
check_symbol(
r#"
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
pub mod module {
pub trait MyTrait {
pub fn func() {}
}
struct MyStruct {}
impl MyTrait for MyStruct {
pub fn func$0() {}
}
}
"#,
// "foo::module::MyStruct::MyTrait::func",
"rust-analyzer cargo foo 0.1.0 module/MyStruct#MyTrait#func().",
);
}
#[test]
fn symbol_for_field() {
check_symbol(
r#"
//- /lib.rs crate:main deps:foo
use foo::St;
fn main() {
let x = St { a$0: 2 };
}
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
pub struct St {
pub a: i32,
}
"#,
"rust-analyzer cargo foo 0.1.0 St#a.",
);
}
#[test]
fn local_symbol_for_local() {
check_symbol(
r#"
//- /lib.rs crate:main deps:foo
use foo::module::func;
fn main() {
func();
}
//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
pub mod module {
pub fn func() {
let x$0 = 2;
}
}
"#,
"",
);
}
}

View file

@ -12,8 +12,8 @@ use std::{ffi::OsString, fmt, iter, path::PathBuf};
use flycheck::FlycheckConfig; use flycheck::FlycheckConfig;
use ide::{ use ide::{
AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode, AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig, JoinLinesConfig, HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig,
Snippet, SnippetScope, JoinLinesConfig, Snippet, SnippetScope,
}; };
use ide_db::{ use ide_db::{
imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind}, imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
@ -385,6 +385,34 @@ config_data! {
/// available on a nightly build. /// available on a nightly build.
rustfmt_rangeFormatting_enable: bool = "false", rustfmt_rangeFormatting_enable: bool = "false",
/// Inject additional highlighting into doc comments.
///
/// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
/// doc links.
semanticHighlighting_doc_comment_inject_enable: bool = "true",
/// Use semantic tokens for operators.
///
/// When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
/// they are tagged with modifiers.
semanticHighlighting_operator_enable: bool = "true",
/// Use specialized semantic tokens for operators.
///
/// When enabled, rust-analyzer will emit special token types for operator tokens instead
/// of the generic `operator` token type.
semanticHighlighting_operator_specialization_enable: bool = "false",
/// Use semantic tokens for punctuations.
///
/// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
/// they are tagged with modifiers or have a special role.
semanticHighlighting_punctuation_enable: bool = "false",
/// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
/// calls.
semanticHighlighting_punctuation_separate_macro_bang: bool = "false",
/// Use specialized semantic tokens for punctuations.
///
/// When enabled, rust-analyzer will emit special token types for punctuation tokens instead
/// of the generic `punctuation` token type.
semanticHighlighting_punctuation_specialization_enable: bool = "false",
/// Use semantic tokens for strings. /// Use semantic tokens for strings.
/// ///
/// In some editors (e.g. vscode) semantic tokens override other highlighting grammars. /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
@ -1171,8 +1199,19 @@ impl Config {
} }
} }
pub fn highlighting_strings(&self) -> bool { pub fn highlighting_config(&self) -> HighlightConfig {
self.data.semanticHighlighting_strings_enable HighlightConfig {
strings: self.data.semanticHighlighting_strings_enable,
punctuation: self.data.semanticHighlighting_punctuation_enable,
specialize_punctuation: self
.data
.semanticHighlighting_punctuation_specialization_enable,
macro_bang: self.data.semanticHighlighting_punctuation_separate_macro_bang,
operator: self.data.semanticHighlighting_operator_enable,
specialize_operator: self.data.semanticHighlighting_operator_specialization_enable,
inject_doc_comment: self.data.semanticHighlighting_doc_comment_inject_enable,
syntactic_name_ref_highlighting: false,
}
} }
pub fn hover(&self) -> HoverConfig { pub fn hover(&self) -> HoverConfig {

View file

@ -4,11 +4,12 @@ pub(crate) mod to_proto;
use std::{mem, sync::Arc}; use std::{mem, sync::Arc};
use ide::FileId; use ide::FileId;
use rustc_hash::{FxHashMap, FxHashSet}; use ide_db::FxHashMap;
use stdx::hash::{NoHashHashMap, NoHashHashSet};
use crate::lsp_ext; use crate::lsp_ext;
pub(crate) type CheckFixes = Arc<FxHashMap<usize, FxHashMap<FileId, Vec<Fix>>>>; pub(crate) type CheckFixes = Arc<NoHashHashMap<usize, NoHashHashMap<FileId, Vec<Fix>>>>;
#[derive(Debug, Default, Clone)] #[derive(Debug, Default, Clone)]
pub struct DiagnosticsMapConfig { pub struct DiagnosticsMapConfig {
@ -19,12 +20,12 @@ pub struct DiagnosticsMapConfig {
#[derive(Debug, Default, Clone)] #[derive(Debug, Default, Clone)]
pub(crate) struct DiagnosticCollection { pub(crate) struct DiagnosticCollection {
// FIXME: should be FxHashMap<FileId, Vec<ra_id::Diagnostic>> // FIXME: should be NoHashHashMap<FileId, Vec<ra_id::Diagnostic>>
pub(crate) native: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>, pub(crate) native: NoHashHashMap<FileId, Vec<lsp_types::Diagnostic>>,
// FIXME: should be Vec<flycheck::Diagnostic> // FIXME: should be Vec<flycheck::Diagnostic>
pub(crate) check: FxHashMap<usize, FxHashMap<FileId, Vec<lsp_types::Diagnostic>>>, pub(crate) check: NoHashHashMap<usize, NoHashHashMap<FileId, Vec<lsp_types::Diagnostic>>>,
pub(crate) check_fixes: CheckFixes, pub(crate) check_fixes: CheckFixes,
changes: FxHashSet<FileId>, changes: NoHashHashSet<FileId>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -105,7 +106,7 @@ impl DiagnosticCollection {
native.chain(check) native.chain(check)
} }
pub(crate) fn take_changes(&mut self) -> Option<FxHashSet<FileId>> { pub(crate) fn take_changes(&mut self) -> Option<NoHashHashSet<FileId>> {
if self.changes.is_empty() { if self.changes.is_empty() {
return None; return None;
} }

View file

@ -14,6 +14,7 @@ use parking_lot::{Mutex, RwLock};
use proc_macro_api::ProcMacroServer; use proc_macro_api::ProcMacroServer;
use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts}; use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use stdx::hash::NoHashHashMap;
use vfs::AnchoredPathBuf; use vfs::AnchoredPathBuf;
use crate::{ use crate::{
@ -67,7 +68,7 @@ pub(crate) struct GlobalState {
pub(crate) flycheck_sender: Sender<flycheck::Message>, pub(crate) flycheck_sender: Sender<flycheck::Message>,
pub(crate) flycheck_receiver: Receiver<flycheck::Message>, pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, pub(crate) vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
pub(crate) vfs_config_version: u32, pub(crate) vfs_config_version: u32,
pub(crate) vfs_progress_config_version: u32, pub(crate) vfs_progress_config_version: u32,
pub(crate) vfs_progress_n_total: usize, pub(crate) vfs_progress_n_total: usize,
@ -113,7 +114,7 @@ pub(crate) struct GlobalStateSnapshot {
pub(crate) check_fixes: CheckFixes, pub(crate) check_fixes: CheckFixes,
mem_docs: MemDocs, mem_docs: MemDocs,
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>, pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>, vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>, pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
} }
@ -157,7 +158,7 @@ impl GlobalState {
flycheck_sender, flycheck_sender,
flycheck_receiver, flycheck_receiver,
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))), vfs: Arc::new(RwLock::new((vfs::Vfs::default(), NoHashHashMap::default()))),
vfs_config_version: 0, vfs_config_version: 0,
vfs_progress_config_version: 0, vfs_progress_config_version: 0,
vfs_progress_n_total: 0, vfs_progress_n_total: 0,

View file

@ -1504,10 +1504,8 @@ pub(crate) fn handle_semantic_tokens_full(
let text = snap.analysis.file_text(file_id)?; let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
let highlights = snap.analysis.highlight(file_id)?; let highlights = snap.analysis.highlight(snap.config.highlighting_config(), file_id)?;
let highlight_strings = snap.config.highlighting_strings(); let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
// Unconditionally cache the tokens // Unconditionally cache the tokens
snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone()); snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
@ -1525,10 +1523,8 @@ pub(crate) fn handle_semantic_tokens_full_delta(
let text = snap.analysis.file_text(file_id)?; let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
let highlights = snap.analysis.highlight(file_id)?; let highlights = snap.analysis.highlight(snap.config.highlighting_config(), file_id)?;
let highlight_strings = snap.config.highlighting_strings(); let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
let mut cache = snap.semantic_tokens_cache.lock(); let mut cache = snap.semantic_tokens_cache.lock();
let cached_tokens = cache.entry(params.text_document.uri).or_default(); let cached_tokens = cache.entry(params.text_document.uri).or_default();
@ -1556,10 +1552,8 @@ pub(crate) fn handle_semantic_tokens_range(
let text = snap.analysis.file_text(frange.file_id)?; let text = snap.analysis.file_text(frange.file_id)?;
let line_index = snap.file_line_index(frange.file_id)?; let line_index = snap.file_line_index(frange.file_id)?;
let highlights = snap.analysis.highlight_range(frange)?; let highlights = snap.analysis.highlight_range(snap.config.highlighting_config(), frange)?;
let highlight_strings = snap.config.highlighting_strings(); let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
let semantic_tokens =
to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
Ok(Some(semantic_tokens.into())) Ok(Some(semantic_tokens.into()))
} }

View file

@ -328,8 +328,33 @@ impl GlobalState {
} }
let uri = file_id_to_url(&self.vfs.read().0, file_id); let uri = file_id_to_url(&self.vfs.read().0, file_id);
let diagnostics = let mut diagnostics =
self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>(); self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
// VSCode assumes diagnostic messages to be non-empty strings, so we need to patch
// empty diagnostics. Neither the docs of VSCode nor the LSP spec say whether
// diagnostic messages are actually allowed to be empty or not and patching this
// in the VSCode client does not work as the assertion happens in the protocol
// conversion. So this hack is here to stay, and will be considered a hack
// until the LSP decides to state that empty messages are allowed.
// See https://github.com/rust-lang/rust-analyzer/issues/11404
// See https://github.com/rust-lang/rust-analyzer/issues/13130
let patch_empty = |message: &mut String| {
if message.is_empty() {
*message = " ".to_string();
}
};
for d in &mut diagnostics {
patch_empty(&mut d.message);
if let Some(dri) = &mut d.related_information {
for dri in dri {
patch_empty(&mut dri.message);
}
}
}
let version = from_proto::vfs_path(&uri) let version = from_proto::vfs_path(&uri)
.map(|path| self.mem_docs.get(&path).map(|it| it.version)) .map(|path| self.mem_docs.get(&path).map(|it| it.version))
.unwrap_or_default(); .unwrap_or_default();
@ -529,6 +554,13 @@ impl GlobalState {
} }
flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)), flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
flycheck::Progress::DidCancel => (Progress::End, None), flycheck::Progress::DidCancel => (Progress::End, None),
flycheck::Progress::DidFailToRestart(err) => {
self.show_and_log_error(
"cargo check failed".to_string(),
Some(err.to_string()),
);
return;
}
flycheck::Progress::DidFinish(result) => { flycheck::Progress::DidFinish(result) => {
if let Err(err) = result { if let Err(err) = result {
self.show_and_log_error( self.show_and_log_error(

View file

@ -8,107 +8,130 @@ use lsp_types::{
}; };
macro_rules! define_semantic_token_types { macro_rules! define_semantic_token_types {
($(($ident:ident, $string:literal)),*$(,)?) => { (
$(pub(crate) const $ident: SemanticTokenType = SemanticTokenType::new($string);)* standard {
$($standard:ident),*$(,)?
}
custom {
$(($custom:ident, $string:literal)),*$(,)?
}
) => {
$(pub(crate) const $standard: SemanticTokenType = SemanticTokenType::$standard;)*
$(pub(crate) const $custom: SemanticTokenType = SemanticTokenType::new($string);)*
pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[ pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
SemanticTokenType::COMMENT, $(SemanticTokenType::$standard,)*
SemanticTokenType::KEYWORD, $($custom),*
SemanticTokenType::STRING,
SemanticTokenType::NUMBER,
SemanticTokenType::REGEXP,
SemanticTokenType::OPERATOR,
SemanticTokenType::NAMESPACE,
SemanticTokenType::TYPE,
SemanticTokenType::STRUCT,
SemanticTokenType::CLASS,
SemanticTokenType::INTERFACE,
SemanticTokenType::ENUM,
SemanticTokenType::ENUM_MEMBER,
SemanticTokenType::TYPE_PARAMETER,
SemanticTokenType::FUNCTION,
SemanticTokenType::METHOD,
SemanticTokenType::PROPERTY,
SemanticTokenType::MACRO,
SemanticTokenType::VARIABLE,
SemanticTokenType::PARAMETER,
$($ident),*
]; ];
}; };
} }
define_semantic_token_types![ define_semantic_token_types![
(ANGLE, "angle"), standard {
(ARITHMETIC, "arithmetic"), COMMENT,
(ATTRIBUTE, "attribute"), DECORATOR,
(ATTRIBUTE_BRACKET, "attributeBracket"), ENUM_MEMBER,
(BITWISE, "bitwise"), ENUM,
(BOOLEAN, "boolean"), FUNCTION,
(BRACE, "brace"), INTERFACE,
(BRACKET, "bracket"), KEYWORD,
(BUILTIN_ATTRIBUTE, "builtinAttribute"), MACRO,
(BUILTIN_TYPE, "builtinType"), METHOD,
(CHAR, "character"), NAMESPACE,
(COLON, "colon"), NUMBER,
(COMMA, "comma"), OPERATOR,
(COMPARISON, "comparison"), PARAMETER,
(CONST_PARAMETER, "constParameter"), PROPERTY,
(DERIVE, "derive"), STRING,
(DERIVE_HELPER, "deriveHelper"), STRUCT,
(DOT, "dot"), TYPE_PARAMETER,
(ESCAPE_SEQUENCE, "escapeSequence"), VARIABLE,
(FORMAT_SPECIFIER, "formatSpecifier"), }
(GENERIC, "generic"),
(LABEL, "label"), custom {
(LIFETIME, "lifetime"), (ANGLE, "angle"),
(LOGICAL, "logical"), (ARITHMETIC, "arithmetic"),
(MACRO_BANG, "macroBang"), (ATTRIBUTE, "attribute"),
(OPERATOR, "operator"), (ATTRIBUTE_BRACKET, "attributeBracket"),
(PARENTHESIS, "parenthesis"), (BITWISE, "bitwise"),
(PUNCTUATION, "punctuation"), (BOOLEAN, "boolean"),
(SELF_KEYWORD, "selfKeyword"), (BRACE, "brace"),
(SELF_TYPE_KEYWORD, "selfTypeKeyword"), (BRACKET, "bracket"),
(SEMICOLON, "semicolon"), (BUILTIN_ATTRIBUTE, "builtinAttribute"),
(TYPE_ALIAS, "typeAlias"), (BUILTIN_TYPE, "builtinType"),
(TOOL_MODULE, "toolModule"), (CHAR, "character"),
(UNION, "union"), (COLON, "colon"),
(UNRESOLVED_REFERENCE, "unresolvedReference"), (COMMA, "comma"),
(COMPARISON, "comparison"),
(CONST_PARAMETER, "constParameter"),
(DERIVE, "derive"),
(DERIVE_HELPER, "deriveHelper"),
(DOT, "dot"),
(ESCAPE_SEQUENCE, "escapeSequence"),
(FORMAT_SPECIFIER, "formatSpecifier"),
(GENERIC, "generic"),
(LABEL, "label"),
(LIFETIME, "lifetime"),
(LOGICAL, "logical"),
(MACRO_BANG, "macroBang"),
(PARENTHESIS, "parenthesis"),
(PUNCTUATION, "punctuation"),
(SELF_KEYWORD, "selfKeyword"),
(SELF_TYPE_KEYWORD, "selfTypeKeyword"),
(SEMICOLON, "semicolon"),
(TYPE_ALIAS, "typeAlias"),
(TOOL_MODULE, "toolModule"),
(UNION, "union"),
(UNRESOLVED_REFERENCE, "unresolvedReference"),
}
]; ];
macro_rules! define_semantic_token_modifiers { macro_rules! define_semantic_token_modifiers {
($(($ident:ident, $string:literal)),*$(,)?) => { (
$(pub(crate) const $ident: SemanticTokenModifier = SemanticTokenModifier::new($string);)* standard {
$($standard:ident),*$(,)?
}
custom {
$(($custom:ident, $string:literal)),*$(,)?
}
) => {
$(pub(crate) const $standard: SemanticTokenModifier = SemanticTokenModifier::$standard;)*
$(pub(crate) const $custom: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[ pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
SemanticTokenModifier::DOCUMENTATION, $(SemanticTokenModifier::$standard,)*
SemanticTokenModifier::DECLARATION, $($custom),*
SemanticTokenModifier::DEFINITION,
SemanticTokenModifier::STATIC,
SemanticTokenModifier::ABSTRACT,
SemanticTokenModifier::DEPRECATED,
SemanticTokenModifier::READONLY,
SemanticTokenModifier::DEFAULT_LIBRARY,
$($ident),*
]; ];
}; };
} }
define_semantic_token_modifiers![ define_semantic_token_modifiers![
(ASYNC, "async"), standard {
(ATTRIBUTE_MODIFIER, "attribute"), DOCUMENTATION,
(CALLABLE, "callable"), DECLARATION,
(CONSTANT, "constant"), STATIC,
(CONSUMING, "consuming"), DEFAULT_LIBRARY,
(CONTROL_FLOW, "controlFlow"), }
(CRATE_ROOT, "crateRoot"), custom {
(INJECTED, "injected"), (ASYNC, "async"),
(INTRA_DOC_LINK, "intraDocLink"), (ATTRIBUTE_MODIFIER, "attribute"),
(LIBRARY, "library"), (CALLABLE, "callable"),
(MUTABLE, "mutable"), (CONSTANT, "constant"),
(PUBLIC, "public"), (CONSUMING, "consuming"),
(REFERENCE, "reference"), (CONTROL_FLOW, "controlFlow"),
(TRAIT_MODIFIER, "trait"), (CRATE_ROOT, "crateRoot"),
(UNSAFE, "unsafe"), (INJECTED, "injected"),
(INTRA_DOC_LINK, "intraDocLink"),
(LIBRARY, "library"),
(MUTABLE, "mutable"),
(PUBLIC, "public"),
(REFERENCE, "reference"),
(TRAIT_MODIFIER, "trait"),
(UNSAFE, "unsafe"),
}
]; ];
#[derive(Default)] #[derive(Default)]

View file

@ -517,7 +517,6 @@ pub(crate) fn semantic_tokens(
text: &str, text: &str,
line_index: &LineIndex, line_index: &LineIndex,
highlights: Vec<HlRange>, highlights: Vec<HlRange>,
highlight_strings: bool,
) -> lsp_types::SemanticTokens { ) -> lsp_types::SemanticTokens {
let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string(); let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
let mut builder = semantic_tokens::SemanticTokensBuilder::new(id); let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
@ -526,10 +525,8 @@ pub(crate) fn semantic_tokens(
if highlight_range.highlight.is_empty() { if highlight_range.highlight.is_empty() {
continue; continue;
} }
let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
if !highlight_strings && ty == lsp_types::SemanticTokenType::STRING {
continue;
}
let token_index = semantic_tokens::type_index(ty); let token_index = semantic_tokens::type_index(ty);
let modifier_bitset = mods.0; let modifier_bitset = mods.0;
@ -561,55 +558,55 @@ fn semantic_token_type_and_modifiers(
let mut mods = semantic_tokens::ModifierSet::default(); let mut mods = semantic_tokens::ModifierSet::default();
let type_ = match highlight.tag { let type_ = match highlight.tag {
HlTag::Symbol(symbol) => match symbol { HlTag::Symbol(symbol) => match symbol {
SymbolKind::Attribute => semantic_tokens::ATTRIBUTE, SymbolKind::Attribute => semantic_tokens::DECORATOR,
SymbolKind::Derive => semantic_tokens::DERIVE, SymbolKind::Derive => semantic_tokens::DERIVE,
SymbolKind::DeriveHelper => semantic_tokens::DERIVE_HELPER, SymbolKind::DeriveHelper => semantic_tokens::DERIVE_HELPER,
SymbolKind::Module => lsp_types::SemanticTokenType::NAMESPACE, SymbolKind::Module => semantic_tokens::NAMESPACE,
SymbolKind::Impl => semantic_tokens::TYPE_ALIAS, SymbolKind::Impl => semantic_tokens::TYPE_ALIAS,
SymbolKind::Field => lsp_types::SemanticTokenType::PROPERTY, SymbolKind::Field => semantic_tokens::PROPERTY,
SymbolKind::TypeParam => lsp_types::SemanticTokenType::TYPE_PARAMETER, SymbolKind::TypeParam => semantic_tokens::TYPE_PARAMETER,
SymbolKind::ConstParam => semantic_tokens::CONST_PARAMETER, SymbolKind::ConstParam => semantic_tokens::CONST_PARAMETER,
SymbolKind::LifetimeParam => semantic_tokens::LIFETIME, SymbolKind::LifetimeParam => semantic_tokens::LIFETIME,
SymbolKind::Label => semantic_tokens::LABEL, SymbolKind::Label => semantic_tokens::LABEL,
SymbolKind::ValueParam => lsp_types::SemanticTokenType::PARAMETER, SymbolKind::ValueParam => semantic_tokens::PARAMETER,
SymbolKind::SelfParam => semantic_tokens::SELF_KEYWORD, SymbolKind::SelfParam => semantic_tokens::SELF_KEYWORD,
SymbolKind::SelfType => semantic_tokens::SELF_TYPE_KEYWORD, SymbolKind::SelfType => semantic_tokens::SELF_TYPE_KEYWORD,
SymbolKind::Local => lsp_types::SemanticTokenType::VARIABLE, SymbolKind::Local => semantic_tokens::VARIABLE,
SymbolKind::Function => { SymbolKind::Function => {
if highlight.mods.contains(HlMod::Associated) { if highlight.mods.contains(HlMod::Associated) {
lsp_types::SemanticTokenType::METHOD semantic_tokens::METHOD
} else { } else {
lsp_types::SemanticTokenType::FUNCTION semantic_tokens::FUNCTION
} }
} }
SymbolKind::Const => { SymbolKind::Const => {
mods |= semantic_tokens::CONSTANT; mods |= semantic_tokens::CONSTANT;
mods |= lsp_types::SemanticTokenModifier::STATIC; mods |= semantic_tokens::STATIC;
lsp_types::SemanticTokenType::VARIABLE semantic_tokens::VARIABLE
} }
SymbolKind::Static => { SymbolKind::Static => {
mods |= lsp_types::SemanticTokenModifier::STATIC; mods |= semantic_tokens::STATIC;
lsp_types::SemanticTokenType::VARIABLE semantic_tokens::VARIABLE
} }
SymbolKind::Struct => lsp_types::SemanticTokenType::STRUCT, SymbolKind::Struct => semantic_tokens::STRUCT,
SymbolKind::Enum => lsp_types::SemanticTokenType::ENUM, SymbolKind::Enum => semantic_tokens::ENUM,
SymbolKind::Variant => lsp_types::SemanticTokenType::ENUM_MEMBER, SymbolKind::Variant => semantic_tokens::ENUM_MEMBER,
SymbolKind::Union => semantic_tokens::UNION, SymbolKind::Union => semantic_tokens::UNION,
SymbolKind::TypeAlias => semantic_tokens::TYPE_ALIAS, SymbolKind::TypeAlias => semantic_tokens::TYPE_ALIAS,
SymbolKind::Trait => lsp_types::SemanticTokenType::INTERFACE, SymbolKind::Trait => semantic_tokens::INTERFACE,
SymbolKind::Macro => lsp_types::SemanticTokenType::MACRO, SymbolKind::Macro => semantic_tokens::MACRO,
SymbolKind::BuiltinAttr => semantic_tokens::BUILTIN_ATTRIBUTE, SymbolKind::BuiltinAttr => semantic_tokens::BUILTIN_ATTRIBUTE,
SymbolKind::ToolModule => semantic_tokens::TOOL_MODULE, SymbolKind::ToolModule => semantic_tokens::TOOL_MODULE,
}, },
HlTag::AttributeBracket => semantic_tokens::ATTRIBUTE_BRACKET, HlTag::AttributeBracket => semantic_tokens::ATTRIBUTE_BRACKET,
HlTag::BoolLiteral => semantic_tokens::BOOLEAN, HlTag::BoolLiteral => semantic_tokens::BOOLEAN,
HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE, HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE,
HlTag::ByteLiteral | HlTag::NumericLiteral => lsp_types::SemanticTokenType::NUMBER, HlTag::ByteLiteral | HlTag::NumericLiteral => semantic_tokens::NUMBER,
HlTag::CharLiteral => semantic_tokens::CHAR, HlTag::CharLiteral => semantic_tokens::CHAR,
HlTag::Comment => lsp_types::SemanticTokenType::COMMENT, HlTag::Comment => semantic_tokens::COMMENT,
HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE, HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE,
HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER, HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER,
HlTag::Keyword => lsp_types::SemanticTokenType::KEYWORD, HlTag::Keyword => semantic_tokens::KEYWORD,
HlTag::None => semantic_tokens::GENERIC, HlTag::None => semantic_tokens::GENERIC,
HlTag::Operator(op) => match op { HlTag::Operator(op) => match op {
HlOperator::Bitwise => semantic_tokens::BITWISE, HlOperator::Bitwise => semantic_tokens::BITWISE,
@ -618,7 +615,7 @@ fn semantic_token_type_and_modifiers(
HlOperator::Comparison => semantic_tokens::COMPARISON, HlOperator::Comparison => semantic_tokens::COMPARISON,
HlOperator::Other => semantic_tokens::OPERATOR, HlOperator::Other => semantic_tokens::OPERATOR,
}, },
HlTag::StringLiteral => lsp_types::SemanticTokenType::STRING, HlTag::StringLiteral => semantic_tokens::STRING,
HlTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE, HlTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE,
HlTag::Punctuation(punct) => match punct { HlTag::Punctuation(punct) => match punct {
HlPunct::Bracket => semantic_tokens::BRACKET, HlPunct::Bracket => semantic_tokens::BRACKET,
@ -643,16 +640,16 @@ fn semantic_token_type_and_modifiers(
HlMod::Consuming => semantic_tokens::CONSUMING, HlMod::Consuming => semantic_tokens::CONSUMING,
HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW, HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW,
HlMod::CrateRoot => semantic_tokens::CRATE_ROOT, HlMod::CrateRoot => semantic_tokens::CRATE_ROOT,
HlMod::DefaultLibrary => lsp_types::SemanticTokenModifier::DEFAULT_LIBRARY, HlMod::DefaultLibrary => semantic_tokens::DEFAULT_LIBRARY,
HlMod::Definition => lsp_types::SemanticTokenModifier::DECLARATION, HlMod::Definition => semantic_tokens::DECLARATION,
HlMod::Documentation => lsp_types::SemanticTokenModifier::DOCUMENTATION, HlMod::Documentation => semantic_tokens::DOCUMENTATION,
HlMod::Injected => semantic_tokens::INJECTED, HlMod::Injected => semantic_tokens::INJECTED,
HlMod::IntraDocLink => semantic_tokens::INTRA_DOC_LINK, HlMod::IntraDocLink => semantic_tokens::INTRA_DOC_LINK,
HlMod::Library => semantic_tokens::LIBRARY, HlMod::Library => semantic_tokens::LIBRARY,
HlMod::Mutable => semantic_tokens::MUTABLE, HlMod::Mutable => semantic_tokens::MUTABLE,
HlMod::Public => semantic_tokens::PUBLIC, HlMod::Public => semantic_tokens::PUBLIC,
HlMod::Reference => semantic_tokens::REFERENCE, HlMod::Reference => semantic_tokens::REFERENCE,
HlMod::Static => lsp_types::SemanticTokenModifier::STATIC, HlMod::Static => semantic_tokens::STATIC,
HlMod::Trait => semantic_tokens::TRAIT_MODIFIER, HlMod::Trait => semantic_tokens::TRAIT_MODIFIER,
HlMod::Unsafe => semantic_tokens::UNSAFE, HlMod::Unsafe => semantic_tokens::UNSAFE,
}; };

80
crates/stdx/src/hash.rs Normal file
View file

@ -0,0 +1,80 @@
//! A none hashing [`Hasher`] implementation.
use std::{
hash::{BuildHasher, Hasher},
marker::PhantomData,
};
pub type NoHashHashMap<K, V> = std::collections::HashMap<K, V, NoHashHasherBuilder<K>>;
pub type NoHashHashSet<K> = std::collections::HashSet<K, NoHashHasherBuilder<K>>;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct NoHashHasherBuilder<T>(PhantomData<T>);
impl<T> Default for NoHashHasherBuilder<T> {
fn default() -> Self {
Self(Default::default())
}
}
pub trait NoHashHashable {}
impl NoHashHashable for usize {}
impl NoHashHashable for u32 {}
pub struct NoHashHasher(u64);
impl<T: NoHashHashable> BuildHasher for NoHashHasherBuilder<T> {
type Hasher = NoHashHasher;
fn build_hasher(&self) -> Self::Hasher {
NoHashHasher(0)
}
}
impl Hasher for NoHashHasher {
fn finish(&self) -> u64 {
self.0
}
fn write(&mut self, _: &[u8]) {
unimplemented!("NoHashHasher should only be used for hashing primitive integers")
}
fn write_u8(&mut self, i: u8) {
self.0 = i as u64;
}
fn write_u16(&mut self, i: u16) {
self.0 = i as u64;
}
fn write_u32(&mut self, i: u32) {
self.0 = i as u64;
}
fn write_u64(&mut self, i: u64) {
self.0 = i as u64;
}
fn write_usize(&mut self, i: usize) {
self.0 = i as u64;
}
fn write_i8(&mut self, i: i8) {
self.0 = i as u64;
}
fn write_i16(&mut self, i: i16) {
self.0 = i as u64;
}
fn write_i32(&mut self, i: i32) {
self.0 = i as u64;
}
fn write_i64(&mut self, i: i64) {
self.0 = i as u64;
}
fn write_isize(&mut self, i: isize) {
self.0 = i as u64;
}
}

View file

@ -7,6 +7,7 @@ use std::{cmp::Ordering, ops, time::Instant};
use std::{io as sio, iter}; use std::{io as sio, iter};
mod macros; mod macros;
pub mod hash;
pub mod process; pub mod process;
pub mod panic_context; pub mod panic_context;
pub mod non_empty_vec; pub mod non_empty_vec;

View file

@ -12,6 +12,7 @@ doctest = false
[dependencies] [dependencies]
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
fst = "0.4.7" fst = "0.4.7"
indexmap = "1.9.1"
paths = { path = "../paths", version = "0.0.0" } paths = { path = "../paths", version = "0.0.0" }
indexmap = "1.9.1" stdx = { path = "../stdx", version = "0.0.0" }

View file

@ -6,6 +6,7 @@ use std::fmt;
use fst::{IntoStreamer, Streamer}; use fst::{IntoStreamer, Streamer};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use stdx::hash::NoHashHashMap;
use crate::{AnchoredPath, FileId, Vfs, VfsPath}; use crate::{AnchoredPath, FileId, Vfs, VfsPath};
@ -13,7 +14,7 @@ use crate::{AnchoredPath, FileId, Vfs, VfsPath};
#[derive(Default, Clone, Eq, PartialEq)] #[derive(Default, Clone, Eq, PartialEq)]
pub struct FileSet { pub struct FileSet {
files: FxHashMap<VfsPath, FileId>, files: FxHashMap<VfsPath, FileId>,
paths: FxHashMap<FileId, VfsPath>, paths: NoHashHashMap<FileId, VfsPath>,
} }
impl FileSet { impl FileSet {

View file

@ -59,9 +59,16 @@ pub use paths::{AbsPath, AbsPathBuf};
/// Handle to a file in [`Vfs`] /// Handle to a file in [`Vfs`]
/// ///
/// Most functions in rust-analyzer use this when they need to refer to a file. /// Most functions in rust-analyzer use this when they need to refer to a file.
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
pub struct FileId(pub u32); pub struct FileId(pub u32);
impl stdx::hash::NoHashHashable for FileId {}
impl std::hash::Hash for FileId {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
/// Storage for all files read by rust-analyzer. /// Storage for all files read by rust-analyzer.
/// ///
/// For more information see the [crate-level](crate) documentation. /// For more information see the [crate-level](crate) documentation.

View file

@ -587,6 +587,52 @@ Enables the use of rustfmt's unstable range formatting command for the
`textDocument/rangeFormatting` request. The rustfmt option is unstable and only `textDocument/rangeFormatting` request. The rustfmt option is unstable and only
available on a nightly build. available on a nightly build.
-- --
[[rust-analyzer.semanticHighlighting.doc.comment.inject.enable]]rust-analyzer.semanticHighlighting.doc.comment.inject.enable (default: `true`)::
+
--
Inject additional highlighting into doc comments.
When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
doc links.
--
[[rust-analyzer.semanticHighlighting.operator.enable]]rust-analyzer.semanticHighlighting.operator.enable (default: `true`)::
+
--
Use semantic tokens for operators.
When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
they are tagged with modifiers.
--
[[rust-analyzer.semanticHighlighting.operator.specialization.enable]]rust-analyzer.semanticHighlighting.operator.specialization.enable (default: `false`)::
+
--
Use specialized semantic tokens for operators.
When enabled, rust-analyzer will emit special token types for operator tokens instead
of the generic `operator` token type.
--
[[rust-analyzer.semanticHighlighting.punctuation.enable]]rust-analyzer.semanticHighlighting.punctuation.enable (default: `false`)::
+
--
Use semantic tokens for punctuations.
When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
they are tagged with modifiers or have a special role.
--
[[rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang]]rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang (default: `false`)::
+
--
When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
calls.
--
[[rust-analyzer.semanticHighlighting.punctuation.specialization.enable]]rust-analyzer.semanticHighlighting.punctuation.specialization.enable (default: `false`)::
+
--
Use specialized semantic tokens for punctuations.
When enabled, rust-analyzer will emit special token types for punctuation tokens instead
of the generic `punctuation` token type.
--
[[rust-analyzer.semanticHighlighting.strings.enable]]rust-analyzer.semanticHighlighting.strings.enable (default: `true`):: [[rust-analyzer.semanticHighlighting.strings.enable]]rust-analyzer.semanticHighlighting.strings.enable (default: `true`)::
+ +
-- --

View file

@ -861,3 +861,14 @@ For example, if you want to run https://crates.io/crates/cargo-watch[`cargo watc
"isBackground": true "isBackground": true
} }
``` ```
==== Live Share
VS Code Live Share has partial support for rust-analyzer.
Live Share _requires_ the official Microsoft build of VS Code, OSS builds will not work correctly.
The host's rust-analyzer instance will be shared with all guests joining the session.
The guests do not have to have the rust-analyzer extension installed for this to work.
If you are joining a Live Share session and _do_ have rust-analyzer installed locally, commands from the command palette will not work correctly since they will attempt to communicate with the local server.

View file

@ -1084,6 +1084,36 @@
"default": false, "default": false,
"type": "boolean" "type": "boolean"
}, },
"rust-analyzer.semanticHighlighting.doc.comment.inject.enable": {
"markdownDescription": "Inject additional highlighting into doc comments.\n\nWhen enabled, rust-analyzer will highlight rust source in doc comments as well as intra\ndoc links.",
"default": true,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.operator.enable": {
"markdownDescription": "Use semantic tokens for operators.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for operator tokens when\nthey are tagged with modifiers.",
"default": true,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.operator.specialization.enable": {
"markdownDescription": "Use specialized semantic tokens for operators.\n\nWhen enabled, rust-analyzer will emit special token types for operator tokens instead\nof the generic `operator` token type.",
"default": false,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.punctuation.enable": {
"markdownDescription": "Use semantic tokens for punctuations.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when\nthey are tagged with modifiers or have a special role.",
"default": false,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang": {
"markdownDescription": "When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro\ncalls.",
"default": false,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.punctuation.specialization.enable": {
"markdownDescription": "Use specialized semantic tokens for punctuations.\n\nWhen enabled, rust-analyzer will emit special token types for punctuation tokens instead\nof the generic `punctuation` token type.",
"default": false,
"type": "boolean"
},
"rust-analyzer.semanticHighlighting.strings.enable": { "rust-analyzer.semanticHighlighting.strings.enable": {
"markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.", "markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
"default": true, "default": true,

View file

@ -5,7 +5,6 @@ import * as Is from "vscode-languageclient/lib/common/utils/is";
import { assert } from "./util"; import { assert } from "./util";
import { WorkspaceEdit } from "vscode"; import { WorkspaceEdit } from "vscode";
import { Workspace } from "./ctx"; import { Workspace } from "./ctx";
import { updateConfig } from "./config";
import { substituteVariablesInEnv } from "./config"; import { substituteVariablesInEnv } from "./config";
import { outputChannel, traceOutputChannel } from "./main"; import { outputChannel, traceOutputChannel } from "./main";
import { randomUUID } from "crypto"; import { randomUUID } from "crypto";
@ -86,11 +85,6 @@ export async function createClient(
let initializationOptions = vscode.workspace.getConfiguration("rust-analyzer"); let initializationOptions = vscode.workspace.getConfiguration("rust-analyzer");
// Update outdated user configs
await updateConfig(initializationOptions).catch((err) => {
void vscode.window.showErrorMessage(`Failed updating old config keys: ${err.message}`);
});
if (workspace.kind === "Detached Files") { if (workspace.kind === "Detached Files") {
initializationOptions = { initializationOptions = {
detachedFiles: workspace.files.map((file) => file.uri.fsPath), detachedFiles: workspace.files.map((file) => file.uri.fsPath),
@ -105,22 +99,6 @@ export async function createClient(
traceOutputChannel: traceOutputChannel(), traceOutputChannel: traceOutputChannel(),
outputChannel: outputChannel(), outputChannel: outputChannel(),
middleware: { middleware: {
async handleDiagnostics(uri, diagnostics, next) {
// Workaround for https://github.com/microsoft/vscode/issues/155531
for (const diagnostic of diagnostics) {
if (!diagnostic.message) {
diagnostic.message = " ";
}
if (diagnostic.relatedInformation) {
for (const relatedInformation of diagnostic.relatedInformation) {
if (!relatedInformation.message) {
relatedInformation.message = " ";
}
}
}
}
next(uri, diagnostics);
},
async provideHover( async provideHover(
document: vscode.TextDocument, document: vscode.TextDocument,
position: vscode.Position, position: vscode.Position,

View file

@ -3,8 +3,6 @@ import * as vscode from "vscode";
import { Env } from "./client"; import { Env } from "./client";
import { log } from "./util"; import { log } from "./util";
export type UpdatesChannel = "stable" | "nightly";
export type RunnableEnvCfg = export type RunnableEnvCfg =
| undefined | undefined
| Record<string, string> | Record<string, string>
@ -175,100 +173,6 @@ export class Config {
} }
} }
export async function updateConfig(config: vscode.WorkspaceConfiguration) {
const renames = [
["assist.allowMergingIntoGlobImports", "imports.merge.glob"],
["assist.exprFillDefault", "assist.expressionFillDefault"],
["assist.importEnforceGranularity", "imports.granularity.enforce"],
["assist.importGranularity", "imports.granularity.group"],
["assist.importMergeBehavior", "imports.granularity.group"],
["assist.importMergeBehaviour", "imports.granularity.group"],
["assist.importGroup", "imports.group.enable"],
["assist.importPrefix", "imports.prefix"],
["primeCaches.enable", "cachePriming.enable"],
["cache.warmup", "cachePriming.enable"],
["cargo.loadOutDirsFromCheck", "cargo.buildScripts.enable"],
["cargo.runBuildScripts", "cargo.buildScripts.enable"],
["cargo.runBuildScriptsCommand", "cargo.buildScripts.overrideCommand"],
["cargo.useRustcWrapperForBuildScripts", "cargo.buildScripts.useRustcWrapper"],
["completion.snippets", "completion.snippets.custom"],
["diagnostics.enableExperimental", "diagnostics.experimental.enable"],
["experimental.procAttrMacros", "procMacro.attributes.enable"],
["highlighting.strings", "semanticHighlighting.strings.enable"],
["highlightRelated.breakPoints", "highlightRelated.breakPoints.enable"],
["highlightRelated.exitPoints", "highlightRelated.exitPoints.enable"],
["highlightRelated.yieldPoints", "highlightRelated.yieldPoints.enable"],
["highlightRelated.references", "highlightRelated.references.enable"],
["hover.documentation", "hover.documentation.enable"],
["hover.linksInHover", "hover.links.enable"],
["hoverActions.linksInHover", "hover.links.enable"],
["hoverActions.debug", "hover.actions.debug.enable"],
["hoverActions.enable", "hover.actions.enable.enable"],
["hoverActions.gotoTypeDef", "hover.actions.gotoTypeDef.enable"],
["hoverActions.implementations", "hover.actions.implementations.enable"],
["hoverActions.references", "hover.actions.references.enable"],
["hoverActions.run", "hover.actions.run.enable"],
["inlayHints.chainingHints", "inlayHints.chainingHints.enable"],
["inlayHints.closureReturnTypeHints", "inlayHints.closureReturnTypeHints.enable"],
["inlayHints.hideNamedConstructorHints", "inlayHints.typeHints.hideNamedConstructor"],
["inlayHints.parameterHints", "inlayHints.parameterHints.enable"],
["inlayHints.reborrowHints", "inlayHints.reborrowHints.enable"],
["inlayHints.typeHints", "inlayHints.typeHints.enable"],
["lruCapacity", "lru.capacity"],
["runnables.cargoExtraArgs", "runnables.extraArgs"],
["runnables.overrideCargo", "runnables.command"],
["rustcSource", "rustc.source"],
["rustfmt.enableRangeFormatting", "rustfmt.rangeFormatting.enable"],
];
for (const [oldKey, newKey] of renames) {
const inspect = config.inspect(oldKey);
if (inspect !== undefined) {
const valMatrix = [
{
val: inspect.globalValue,
langVal: inspect.globalLanguageValue,
target: vscode.ConfigurationTarget.Global,
},
{
val: inspect.workspaceFolderValue,
langVal: inspect.workspaceFolderLanguageValue,
target: vscode.ConfigurationTarget.WorkspaceFolder,
},
{
val: inspect.workspaceValue,
langVal: inspect.workspaceLanguageValue,
target: vscode.ConfigurationTarget.Workspace,
},
];
for (const { val, langVal, target } of valMatrix) {
const patch = (val: unknown) => {
// some of the updates we do only append "enable" or "custom"
// that means on the next run we would find these again, but as objects with
// these properties causing us to destroy the config
// so filter those already updated ones out
return (
val !== undefined &&
!(
typeof val === "object" &&
val !== null &&
(oldKey === "completion.snippets" || !val.hasOwnProperty("custom"))
)
);
};
if (patch(val)) {
await config.update(newKey, val, target, false);
await config.update(oldKey, undefined, target, false);
}
if (patch(langVal)) {
await config.update(newKey, langVal, target, true);
await config.update(oldKey, undefined, target, true);
}
}
}
}
}
export function substituteVariablesInEnv(env: Env): Env { export function substituteVariablesInEnv(env: Env): Env {
const missingDeps = new Set<string>(); const missingDeps = new Set<string>();
// vscode uses `env:ENV_NAME` for env vars resolution, and it's easier // vscode uses `env:ENV_NAME` for env vars resolution, and it's easier

View file

@ -33,7 +33,7 @@ export function outputChannel() {
} }
export interface RustAnalyzerExtensionApi { export interface RustAnalyzerExtensionApi {
client: lc.LanguageClient; client?: lc.LanguageClient;
} }
export async function activate( export async function activate(
@ -48,6 +48,23 @@ export async function activate(
} }
async function tryActivate(context: vscode.ExtensionContext): Promise<RustAnalyzerExtensionApi> { async function tryActivate(context: vscode.ExtensionContext): Promise<RustAnalyzerExtensionApi> {
// We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if
// only those are in use.
// (r-a still somewhat works with Live Share, because commands are tunneled to the host)
const folders = (vscode.workspace.workspaceFolders || []).filter(
(folder) => folder.uri.scheme === "file"
);
const rustDocuments = vscode.workspace.textDocuments.filter((document) =>
isRustDocument(document)
);
if (folders.length === 0 && rustDocuments.length === 0) {
// FIXME: Ideally we would choose not to activate at all (and avoid registering
// non-functional editor commands), but VS Code doesn't seem to have a good way of doing
// that
return {};
}
const config = new Config(context); const config = new Config(context);
const state = new PersistentState(context.globalState); const state = new PersistentState(context.globalState);
const serverPath = await bootstrap(context, config, state).catch((err) => { const serverPath = await bootstrap(context, config, state).catch((err) => {
@ -60,18 +77,11 @@ async function tryActivate(context: vscode.ExtensionContext): Promise<RustAnalyz
throw new Error(message); throw new Error(message);
}); });
if ((vscode.workspace.workspaceFolders || []).length === 0) { if (folders.length === 0) {
const rustDocuments = vscode.workspace.textDocuments.filter((document) => ctx = await Ctx.create(config, context, serverPath, {
isRustDocument(document) kind: "Detached Files",
); files: rustDocuments,
if (rustDocuments.length > 0) { });
ctx = await Ctx.create(config, context, serverPath, {
kind: "Detached Files",
files: rustDocuments,
});
} else {
throw new Error("no rust files are opened");
}
} else { } else {
// Note: we try to start the server before we activate type hints so that it // Note: we try to start the server before we activate type hints so that it
// registers its `onDidChangeDocument` handler before us. // registers its `onDidChangeDocument` handler before us.

View file

@ -15,7 +15,7 @@ pub(crate) fn socket_transport(
stream: TcpStream, stream: TcpStream,
) -> (Sender<Message>, Receiver<Message>, IoThreads) { ) -> (Sender<Message>, Receiver<Message>, IoThreads) {
let (reader_receiver, reader) = make_reader(stream.try_clone().unwrap()); let (reader_receiver, reader) = make_reader(stream.try_clone().unwrap());
let (writer_sender, writer) = make_write(stream.try_clone().unwrap()); let (writer_sender, writer) = make_write(stream);
let io_threads = make_io_threads(reader, writer); let io_threads = make_io_threads(reader, writer);
(writer_sender, reader_receiver, io_threads) (writer_sender, reader_receiver, io_threads)
} }