Make ConfigData Ser and TOML De

This commit makes rust-analyzer::config module TOML ser and de.

Co-Authored-By: Cormac Relf <web@cormacrelf.net>
This commit is contained in:
Ali Bektas 2023-10-22 14:52:43 +02:00 committed by Lukas Wirth
parent 657b33b0cb
commit 67d8d2d4a0
14 changed files with 1398 additions and 951 deletions

55
Cargo.lock generated
View file

@ -781,6 +781,7 @@ checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4"
dependencies = [ dependencies = [
"equivalent", "equivalent",
"hashbrown", "hashbrown",
"serde",
] ]
[[package]] [[package]]
@ -1594,6 +1595,7 @@ dependencies = [
"ide", "ide",
"ide-db", "ide-db",
"ide-ssr", "ide-ssr",
"indexmap",
"itertools", "itertools",
"load-cargo", "load-cargo",
"lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1622,6 +1624,7 @@ dependencies = [
"test-fixture", "test-fixture",
"test-utils", "test-utils",
"tikv-jemallocator", "tikv-jemallocator",
"toml",
"toolchain", "toolchain",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
@ -1775,6 +1778,15 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "serde_spanned"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "sharded-slab" name = "sharded-slab"
version = "0.1.7" version = "0.1.7"
@ -2025,6 +2037,40 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "toml"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit",
]
[[package]]
name = "toml_datetime"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03"
dependencies = [
"indexmap",
"serde",
"serde_spanned",
"toml_datetime",
"winnow",
]
[[package]] [[package]]
name = "toolchain" name = "toolchain"
version = "0.0.0" version = "0.0.0"
@ -2401,6 +2447,15 @@ version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
[[package]]
name = "winnow"
version = "0.5.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8434aeec7b290e8da5c3f0d628cb0eac6cabcb31d14bb74f779a08109a5914d6"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "write-json" name = "write-json"
version = "0.1.4" version = "0.1.4"

View file

@ -19,6 +19,10 @@ use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`, // Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing. // then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>; pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
/// Files are grouped into source roots. A source root is a directory on the /// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a /// file systems which is watched for changes. Typically it corresponds to a
/// Rust crate. Source roots *might* be nested: in this case, a file belongs to /// Rust crate. Source roots *might* be nested: in this case, a file belongs to
@ -26,9 +30,6 @@ pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf)
/// source root, and the analyzer does not know the root path of the source root at /// source root, and the analyzer does not know the root path of the source root at
/// all. So, a file from one source root can't refer to a file in another source /// all. So, a file from one source root can't refer to a file in another source
/// root by path. /// root by path.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct SourceRoot { pub struct SourceRoot {
/// Sysroot or crates.io library. /// Sysroot or crates.io library.

View file

@ -64,7 +64,7 @@ use hir::ChangeWithProcMacros;
use ide_db::{ use ide_db::{
base_db::{ base_db::{
salsa::{self, ParallelDatabase}, salsa::{self, ParallelDatabase},
CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, VfsPath, CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceDatabaseExt, VfsPath,
}, },
prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase, prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
}; };
@ -271,6 +271,10 @@ impl Analysis {
self.with_db(|db| status::status(db, file_id)) self.with_db(|db| status::status(db, file_id))
} }
pub fn source_root(&self, file_id: FileId) -> Cancellable<SourceRootId> {
self.with_db(|db| db.file_source_root(file_id))
}
pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancellable<()> pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancellable<()>
where where
F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe, F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe,
@ -280,7 +284,7 @@ impl Analysis {
/// Gets the text of the source file. /// Gets the text of the source file.
pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> { pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
self.with_db(|db| db.file_text(file_id)) self.with_db(|db| SourceDatabaseExt::file_text(db, file_id))
} }
/// Gets the syntax tree of the file. /// Gets the syntax tree of the file.
@ -290,7 +294,6 @@ impl Analysis {
/// Returns true if this file belongs to an immutable library. /// Returns true if this file belongs to an immutable library.
pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> { pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> {
use ide_db::base_db::SourceDatabaseExt;
self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library) self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library)
} }

View file

@ -4,8 +4,9 @@
use std::{fmt, str::FromStr}; use std::{fmt, str::FromStr};
use cfg::CfgOptions; use cfg::CfgOptions;
use serde::Serialize;
#[derive(Clone, Eq, PartialEq, Debug)] #[derive(Clone, Eq, PartialEq, Debug, Serialize)]
pub enum CfgFlag { pub enum CfgFlag {
Atom(String), Atom(String),
KeyValue { key: String, value: String }, KeyValue { key: String, value: String },

View file

@ -52,7 +52,7 @@
use base_db::{CrateDisplayName, CrateName}; use base_db::{CrateDisplayName, CrateName};
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use serde::{de, Deserialize}; use serde::{de, Deserialize, Serialize};
use span::Edition; use span::Edition;
use crate::cfg_flag::CfgFlag; use crate::cfg_flag::CfgFlag;
@ -161,14 +161,14 @@ impl ProjectJson {
} }
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ProjectJsonData { pub struct ProjectJsonData {
sysroot: Option<Utf8PathBuf>, sysroot: Option<Utf8PathBuf>,
sysroot_src: Option<Utf8PathBuf>, sysroot_src: Option<Utf8PathBuf>,
crates: Vec<CrateData>, crates: Vec<CrateData>,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
struct CrateData { struct CrateData {
display_name: Option<String>, display_name: Option<String>,
root_module: Utf8PathBuf, root_module: Utf8PathBuf,
@ -190,7 +190,7 @@ struct CrateData {
repository: Option<String>, repository: Option<String>,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename = "edition")] #[serde(rename = "edition")]
enum EditionData { enum EditionData {
#[serde(rename = "2015")] #[serde(rename = "2015")]
@ -218,20 +218,21 @@ impl From<EditionData> for Edition {
/// ///
/// This will differ from `CrateId` when multiple `ProjectJson` /// This will differ from `CrateId` when multiple `ProjectJson`
/// workspaces are loaded. /// workspaces are loaded.
#[derive(Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)] #[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)]
#[serde(transparent)] #[serde(transparent)]
pub struct CrateArrayIdx(pub usize); pub struct CrateArrayIdx(pub usize);
#[derive(Deserialize, Debug, Clone, Eq, PartialEq)] #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
pub(crate) struct Dep { pub(crate) struct Dep {
/// Identifies a crate by position in the crates array. /// Identifies a crate by position in the crates array.
#[serde(rename = "crate")] #[serde(rename = "crate")]
pub(crate) krate: CrateArrayIdx, pub(crate) krate: CrateArrayIdx,
#[serde(serialize_with = "serialize_crate_name")]
#[serde(deserialize_with = "deserialize_crate_name")] #[serde(deserialize_with = "deserialize_crate_name")]
pub(crate) name: CrateName, pub(crate) name: CrateName,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
struct CrateSource { struct CrateSource {
include_dirs: Vec<Utf8PathBuf>, include_dirs: Vec<Utf8PathBuf>,
exclude_dirs: Vec<Utf8PathBuf>, exclude_dirs: Vec<Utf8PathBuf>,
@ -244,3 +245,10 @@ where
let name = String::deserialize(de)?; let name = String::deserialize(de)?;
CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {err:?}"))) CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {err:?}")))
} }
fn serialize_crate_name<S>(name: &CrateName, se: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
se.serialize_str(name)
}

View file

@ -39,11 +39,13 @@ tracing.workspace = true
tracing-subscriber.workspace = true tracing-subscriber.workspace = true
tracing-tree.workspace = true tracing-tree.workspace = true
triomphe.workspace = true triomphe.workspace = true
toml = "0.8.8"
nohash-hasher.workspace = true nohash-hasher.workspace = true
always-assert = "0.2.0" always-assert = "0.2.0"
walkdir = "2.3.2" walkdir = "2.3.2"
semver.workspace = true semver.workspace = true
memchr = "2.7.1" memchr = "2.7.1"
indexmap = { version = "2.0.0", features = ["serde"] }
cfg.workspace = true cfg.workspace = true
flycheck.workspace = true flycheck.workspace = true

File diff suppressed because it is too large Load diff

View file

@ -154,10 +154,12 @@ pub(crate) fn fetch_native_diagnostics(
.copied() .copied()
.filter_map(|file_id| { .filter_map(|file_id| {
let line_index = snapshot.file_line_index(file_id).ok()?; let line_index = snapshot.file_line_index(file_id).ok()?;
let source_root = snapshot.analysis.source_root(file_id).ok()?;
let diagnostics = snapshot let diagnostics = snapshot
.analysis .analysis
.diagnostics( .diagnostics(
&snapshot.config.diagnostics(), &snapshot.config.diagnostics(Some(source_root)),
ide::AssistResolveStrategy::None, ide::AssistResolveStrategy::None,
file_id, file_id,
) )

View file

@ -187,7 +187,7 @@ impl GlobalState {
}; };
let mut analysis_host = AnalysisHost::new(config.lru_parse_query_capacity()); let mut analysis_host = AnalysisHost::new(config.lru_parse_query_capacity());
if let Some(capacities) = config.lru_query_capacities() { if let Some(capacities) = config.lru_query_capacities_config() {
analysis_host.update_lru_capacities(capacities); analysis_host.update_lru_capacities(capacities);
} }
let (flycheck_sender, flycheck_receiver) = unbounded(); let (flycheck_sender, flycheck_receiver) = unbounded();

View file

@ -355,8 +355,9 @@ pub(crate) fn handle_join_lines(
) -> anyhow::Result<Vec<lsp_types::TextEdit>> { ) -> anyhow::Result<Vec<lsp_types::TextEdit>> {
let _p = tracing::span!(tracing::Level::INFO, "handle_join_lines").entered(); let _p = tracing::span!(tracing::Level::INFO, "handle_join_lines").entered();
let config = snap.config.join_lines();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?; let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let source_root = snap.analysis.source_root(file_id)?;
let config = snap.config.join_lines(Some(source_root));
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
let mut res = TextEdit::default(); let mut res = TextEdit::default();
@ -923,7 +924,8 @@ pub(crate) fn handle_completion(
let completion_trigger_character = let completion_trigger_character =
params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next()); params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
let completion_config = &snap.config.completion(); let source_root = snap.analysis.source_root(position.file_id)?;
let completion_config = &snap.config.completion(Some(source_root));
let items = match snap.analysis.completions( let items = match snap.analysis.completions(
completion_config, completion_config,
position, position,
@ -964,11 +966,12 @@ pub(crate) fn handle_completion_resolve(
let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?; let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, resolve_data.position.position)?; let offset = from_proto::offset(&line_index, resolve_data.position.position)?;
let source_root = snap.analysis.source_root(file_id)?;
let additional_edits = snap let additional_edits = snap
.analysis .analysis
.resolve_completion_edits( .resolve_completion_edits(
&snap.config.completion(), &snap.config.completion(Some(source_root)),
FilePosition { file_id, offset }, FilePosition { file_id, offset },
resolve_data resolve_data
.imports .imports
@ -1038,16 +1041,17 @@ pub(crate) fn handle_hover(
PositionOrRange::Position(position) => Range::new(position, position), PositionOrRange::Position(position) => Range::new(position, position),
PositionOrRange::Range(range) => range, PositionOrRange::Range(range) => range,
}; };
let file_range = from_proto::file_range(&snap, &params.text_document, range)?; let file_range = from_proto::file_range(&snap, &params.text_document, range)?;
let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
let hover = snap.config.hover();
let info = match snap.analysis.hover(&hover, file_range)? {
None => return Ok(None), None => return Ok(None),
Some(info) => info, Some(info) => info,
}; };
let line_index = snap.file_line_index(file_range.file_id)?; let line_index = snap.file_line_index(file_range.file_id)?;
let range = to_proto::range(&line_index, info.range); let range = to_proto::range(&line_index, info.range);
let markup_kind = snap.config.hover().format; let markup_kind = hover.format;
let hover = lsp_ext::Hover { let hover = lsp_ext::Hover {
hover: lsp_types::Hover { hover: lsp_types::Hover {
contents: HoverContents::Markup(to_proto::markup_content( contents: HoverContents::Markup(to_proto::markup_content(
@ -1191,11 +1195,12 @@ pub(crate) fn handle_code_action(
return Ok(None); return Ok(None);
} }
let line_index = let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
snap.file_line_index(from_proto::file_id(&snap, &params.text_document.uri)?)?; let line_index = snap.file_line_index(file_id)?;
let frange = from_proto::file_range(&snap, &params.text_document, params.range)?; let frange = from_proto::file_range(&snap, &params.text_document, params.range)?;
let source_root = snap.analysis.source_root(file_id)?;
let mut assists_config = snap.config.assist(); let mut assists_config = snap.config.assist(Some(source_root));
assists_config.allowed = params assists_config.allowed = params
.context .context
.only .only
@ -1212,7 +1217,7 @@ pub(crate) fn handle_code_action(
}; };
let assists = snap.analysis.assists_with_fixes( let assists = snap.analysis.assists_with_fixes(
&assists_config, &assists_config,
&snap.config.diagnostics(), &snap.config.diagnostics(Some(source_root)),
resolve, resolve,
frange, frange,
)?; )?;
@ -1266,8 +1271,9 @@ pub(crate) fn handle_code_action_resolve(
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
let range = from_proto::text_range(&line_index, params.code_action_params.range)?; let range = from_proto::text_range(&line_index, params.code_action_params.range)?;
let frange = FileRange { file_id, range }; let frange = FileRange { file_id, range };
let source_root = snap.analysis.source_root(file_id)?;
let mut assists_config = snap.config.assist(); let mut assists_config = snap.config.assist(Some(source_root));
assists_config.allowed = params assists_config.allowed = params
.code_action_params .code_action_params
.context .context
@ -1290,7 +1296,7 @@ pub(crate) fn handle_code_action_resolve(
let assists = snap.analysis.assists_with_fixes( let assists = snap.analysis.assists_with_fixes(
&assists_config, &assists_config,
&snap.config.diagnostics(), &snap.config.diagnostics(Some(source_root)),
AssistResolveStrategy::Single(assist_resolve), AssistResolveStrategy::Single(assist_resolve),
frange, frange,
)?; )?;
@ -1419,8 +1425,12 @@ pub(crate) fn handle_document_highlight(
let _p = tracing::span!(tracing::Level::INFO, "handle_document_highlight").entered(); let _p = tracing::span!(tracing::Level::INFO, "handle_document_highlight").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?; let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let line_index = snap.file_line_index(position.file_id)?; let line_index = snap.file_line_index(position.file_id)?;
let source_root = snap.analysis.source_root(position.file_id)?;
let refs = match snap.analysis.highlight_related(snap.config.highlight_related(), position)? { let refs = match snap
.analysis
.highlight_related(snap.config.highlight_related(Some(source_root)), position)?
{
None => return Ok(None), None => return Ok(None),
Some(refs) => refs, Some(refs) => refs,
}; };
@ -1466,7 +1476,9 @@ pub(crate) fn handle_inlay_hints(
params.range, params.range,
)?; )?;
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
let inlay_hints_config = snap.config.inlay_hints(); let source_root = snap.analysis.source_root(file_id)?;
let inlay_hints_config = snap.config.inlay_hints(Some(source_root));
Ok(Some( Ok(Some(
snap.analysis snap.analysis
.inlay_hints(&inlay_hints_config, file_id, Some(range))? .inlay_hints(&inlay_hints_config, file_id, Some(range))?
@ -1501,7 +1513,9 @@ pub(crate) fn handle_inlay_hints_resolve(
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
let hint_position = from_proto::offset(&line_index, original_hint.position)?; let hint_position = from_proto::offset(&line_index, original_hint.position)?;
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(); let source_root = snap.analysis.source_root(file_id)?;
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(Some(source_root));
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty(); forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
let resolve_hints = snap.analysis.inlay_hints_resolve( let resolve_hints = snap.analysis.inlay_hints_resolve(
&forced_resolve_inlay_hints_config, &forced_resolve_inlay_hints_config,
@ -1633,8 +1647,9 @@ pub(crate) fn handle_semantic_tokens_full(
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?; let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?; let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
let source_root = snap.analysis.source_root(file_id)?;
let mut highlight_config = snap.config.highlighting_config(); let mut highlight_config = snap.config.highlighting_config(Some(source_root));
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting = highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded; snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@ -1645,7 +1660,7 @@ pub(crate) fn handle_semantic_tokens_full(
&line_index, &line_index,
highlights, highlights,
snap.config.semantics_tokens_augments_syntax_tokens(), snap.config.semantics_tokens_augments_syntax_tokens(),
snap.config.highlighting_non_standard_tokens(), snap.config.highlighting_non_standard_tokens(Some(source_root)),
); );
// Unconditionally cache the tokens // Unconditionally cache the tokens
@ -1663,8 +1678,9 @@ pub(crate) fn handle_semantic_tokens_full_delta(
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?; let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?; let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
let source_root = snap.analysis.source_root(file_id)?;
let mut highlight_config = snap.config.highlighting_config(); let mut highlight_config = snap.config.highlighting_config(Some(source_root));
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting = highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded; snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@ -1675,7 +1691,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
&line_index, &line_index,
highlights, highlights,
snap.config.semantics_tokens_augments_syntax_tokens(), snap.config.semantics_tokens_augments_syntax_tokens(),
snap.config.highlighting_non_standard_tokens(), snap.config.highlighting_non_standard_tokens(Some(source_root)),
); );
let cached_tokens = snap.semantic_tokens_cache.lock().remove(&params.text_document.uri); let cached_tokens = snap.semantic_tokens_cache.lock().remove(&params.text_document.uri);
@ -1706,8 +1722,9 @@ pub(crate) fn handle_semantic_tokens_range(
let frange = from_proto::file_range(&snap, &params.text_document, params.range)?; let frange = from_proto::file_range(&snap, &params.text_document, params.range)?;
let text = snap.analysis.file_text(frange.file_id)?; let text = snap.analysis.file_text(frange.file_id)?;
let line_index = snap.file_line_index(frange.file_id)?; let line_index = snap.file_line_index(frange.file_id)?;
let source_root = snap.analysis.source_root(frange.file_id)?;
let mut highlight_config = snap.config.highlighting_config(); let mut highlight_config = snap.config.highlighting_config(Some(source_root));
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting = highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded; snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@ -1718,7 +1735,7 @@ pub(crate) fn handle_semantic_tokens_range(
&line_index, &line_index,
highlights, highlights,
snap.config.semantics_tokens_augments_syntax_tokens(), snap.config.semantics_tokens_augments_syntax_tokens(),
snap.config.highlighting_non_standard_tokens(), snap.config.highlighting_non_standard_tokens(Some(source_root)),
); );
Ok(Some(semantic_tokens.into())) Ok(Some(semantic_tokens.into()))
} }
@ -1931,8 +1948,8 @@ fn goto_type_action_links(
snap: &GlobalStateSnapshot, snap: &GlobalStateSnapshot,
nav_targets: &[HoverGotoTypeData], nav_targets: &[HoverGotoTypeData],
) -> Option<lsp_ext::CommandLinkGroup> { ) -> Option<lsp_ext::CommandLinkGroup> {
if !snap.config.hover_actions().goto_type_def if nav_targets.is_empty()
|| nav_targets.is_empty() || !snap.config.hover_actions().goto_type_def
|| !snap.config.client_commands().goto_location || !snap.config.client_commands().goto_location
{ {
return None; return None;

View file

@ -233,7 +233,7 @@ pub(crate) fn completion_items(
completion_item(&mut res, config, line_index, &tdpp, max_relevance, item); completion_item(&mut res, config, line_index, &tdpp, max_relevance, item);
} }
if let Some(limit) = config.completion().limit { if let Some(limit) = config.completion(None).limit {
res.sort_by(|item1, item2| item1.sort_text.cmp(&item2.sort_text)); res.sort_by(|item1, item2| item1.sort_text.cmp(&item2.sort_text));
res.truncate(limit); res.truncate(limit);
} }
@ -317,7 +317,7 @@ fn completion_item(
set_score(&mut lsp_item, max_relevance, item.relevance); set_score(&mut lsp_item, max_relevance, item.relevance);
if config.completion().enable_imports_on_the_fly && !item.import_to_add.is_empty() { if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() {
let imports = item let imports = item
.import_to_add .import_to_add
.into_iter() .into_iter()

View file

@ -434,7 +434,7 @@ impl GlobalState {
} }
} }
if self.config.cargo_autoreload() { if self.config.cargo_autoreload_config() {
if let Some((cause, force_crate_graph_reload)) = if let Some((cause, force_crate_graph_reload)) =
self.fetch_workspaces_queue.should_start_op() self.fetch_workspaces_queue.should_start_op()
{ {

View file

@ -76,9 +76,9 @@ impl GlobalState {
if self.config.lru_parse_query_capacity() != old_config.lru_parse_query_capacity() { if self.config.lru_parse_query_capacity() != old_config.lru_parse_query_capacity() {
self.analysis_host.update_lru_capacity(self.config.lru_parse_query_capacity()); self.analysis_host.update_lru_capacity(self.config.lru_parse_query_capacity());
} }
if self.config.lru_query_capacities() != old_config.lru_query_capacities() { if self.config.lru_query_capacities_config() != old_config.lru_query_capacities_config() {
self.analysis_host.update_lru_capacities( self.analysis_host.update_lru_capacities(
&self.config.lru_query_capacities().cloned().unwrap_or_default(), &self.config.lru_query_capacities_config().cloned().unwrap_or_default(),
); );
} }
if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects() if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects()

View file

@ -159,10 +159,17 @@ building from locking the `Cargo.lock` at the expense of duplicating build artif
Set to `true` to use a subdirectory of the existing target directory or Set to `true` to use a subdirectory of the existing target directory or
set to a path relative to the workspace to use that path. set to a path relative to the workspace to use that path.
-- --
[[rust-analyzer.cargo.unsetTest]]rust-analyzer.cargo.unsetTest (default: `["core"]`):: [[rust-analyzer.cargo.unsetTest]]rust-analyzer.cargo.unsetTest::
+ +
-- --
Default:
----
[
"core"
]
----
Unsets the implicit `#[cfg(test)]` for the specified crates. Unsets the implicit `#[cfg(test)]` for the specified crates.
-- --
[[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`):: [[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`)::
+ +
@ -321,46 +328,46 @@ Enables completions of private items and fields that are defined in the current
Default: Default:
---- ----
{ {
"Arc::new": { "Arc::new": {
"postfix": "arc", "postfix": "arc",
"body": "Arc::new(${receiver})", "body": "Arc::new(${receiver})",
"requires": "std::sync::Arc", "requires": "std::sync::Arc",
"description": "Put the expression into an `Arc`", "description": "Put the expression into an `Arc`",
"scope": "expr" "scope": "expr"
}, },
"Rc::new": { "Rc::new": {
"postfix": "rc", "postfix": "rc",
"body": "Rc::new(${receiver})", "body": "Rc::new(${receiver})",
"requires": "std::rc::Rc", "requires": "std::rc::Rc",
"description": "Put the expression into an `Rc`", "description": "Put the expression into an `Rc`",
"scope": "expr" "scope": "expr"
}, },
"Box::pin": { "Box::pin": {
"postfix": "pinbox", "postfix": "pinbox",
"body": "Box::pin(${receiver})", "body": "Box::pin(${receiver})",
"requires": "std::boxed::Box", "requires": "std::boxed::Box",
"description": "Put the expression into a pinned `Box`", "description": "Put the expression into a pinned `Box`",
"scope": "expr" "scope": "expr"
}, },
"Ok": { "Ok": {
"postfix": "ok", "postfix": "ok",
"body": "Ok(${receiver})", "body": "Ok(${receiver})",
"description": "Wrap the expression in a `Result::Ok`", "description": "Wrap the expression in a `Result::Ok`",
"scope": "expr" "scope": "expr"
}, },
"Err": { "Err": {
"postfix": "err", "postfix": "err",
"body": "Err(${receiver})", "body": "Err(${receiver})",
"description": "Wrap the expression in a `Result::Err`", "description": "Wrap the expression in a `Result::Err`",
"scope": "expr" "scope": "expr"
}, },
"Some": { "Some": {
"postfix": "some", "postfix": "some",
"body": "Some(${receiver})", "body": "Some(${receiver})",
"description": "Wrap the expression in an `Option::Some`", "description": "Wrap the expression in an `Option::Some`",
"scope": "expr" "scope": "expr"
} }
} }
---- ----
Custom completion snippets. Custom completion snippets.