Auto merge of #16639 - alibektas:13529/config_restruct, r=Veykril

internal : redesign rust-analyzer::config

This PR aims to cover the infrastructural requirements for the `rust-analyzer.toml` ( #13529 ) issue. This means, that

1. We no longer have a single config base. The once single `ConfigData` has been divided into 4 : A tree of `.ratoml` files, a set of configs coming from the client ( this is what was called before the `CrateData` except that now values do not default to anything when they are not defined) , a set of configs that will reflect what the contents of a `ratoml` file defined in user's config directory ( e.g `~/.config/rust-analyzer/.rust-analyzer.toml` and finally a tree root that is populated by default values only.
2. Configs have also been divided into 3 different blocks : `global` , `local` , `client`. The current status of a config may change until #13529 got merged.

Once again many thanks to `@cormacrelf` for doing all the serde work.
This commit is contained in:
bors 2024-04-16 07:52:07 +00:00
commit 1179c3ee83
15 changed files with 1404 additions and 945 deletions

55
Cargo.lock generated
View file

@ -781,6 +781,7 @@ checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4"
dependencies = [
"equivalent",
"hashbrown",
"serde",
]
[[package]]
@ -1594,6 +1595,7 @@ dependencies = [
"ide",
"ide-db",
"ide-ssr",
"indexmap",
"itertools",
"load-cargo",
"lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1622,6 +1624,7 @@ dependencies = [
"test-fixture",
"test-utils",
"tikv-jemallocator",
"toml",
"toolchain",
"tracing",
"tracing-subscriber",
@ -1775,6 +1778,15 @@ dependencies = [
"syn",
]
[[package]]
name = "serde_spanned"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1"
dependencies = [
"serde",
]
[[package]]
name = "sharded-slab"
version = "0.1.7"
@ -2026,6 +2038,40 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "toml"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit",
]
[[package]]
name = "toml_datetime"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03"
dependencies = [
"indexmap",
"serde",
"serde_spanned",
"toml_datetime",
"winnow",
]
[[package]]
name = "toolchain"
version = "0.0.0"
@ -2402,6 +2448,15 @@ version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
[[package]]
name = "winnow"
version = "0.5.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8434aeec7b290e8da5c3f0d628cb0eac6cabcb31d14bb74f779a08109a5914d6"
dependencies = [
"memchr",
]
[[package]]
name = "write-json"
version = "0.1.4"

View file

@ -19,6 +19,10 @@ use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
/// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a
/// Rust crate. Source roots *might* be nested: in this case, a file belongs to
@ -26,9 +30,6 @@ pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf)
/// source root, and the analyzer does not know the root path of the source root at
/// all. So, a file from one source root can't refer to a file in another source
/// root by path.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SourceRoot {
/// Sysroot or crates.io library.

View file

@ -177,7 +177,9 @@ fn _format(
use ide_db::base_db::{FileLoader, SourceDatabase};
// hack until we get hygiene working (same character amount to preserve formatting as much as possible)
const DOLLAR_CRATE_REPLACE: &str = "__r_a_";
let expansion = expansion.replace("$crate", DOLLAR_CRATE_REPLACE);
const BUILTIN_REPLACE: &str = "builtin__POUND";
let expansion =
expansion.replace("$crate", DOLLAR_CRATE_REPLACE).replace("builtin #", BUILTIN_REPLACE);
let (prefix, suffix) = match kind {
SyntaxKind::MACRO_PAT => ("fn __(", ": u32);"),
SyntaxKind::MACRO_EXPR | SyntaxKind::MACRO_STMTS => ("fn __() {", "}"),
@ -206,7 +208,9 @@ fn _format(
let captured_stdout = String::from_utf8(output.stdout).ok()?;
if output.status.success() && !captured_stdout.trim().is_empty() {
let output = captured_stdout.replace(DOLLAR_CRATE_REPLACE, "$crate");
let output = captured_stdout
.replace(DOLLAR_CRATE_REPLACE, "$crate")
.replace(BUILTIN_REPLACE, "builtin #");
let output = output.trim().strip_prefix(prefix)?;
let output = match kind {
SyntaxKind::MACRO_PAT => {

View file

@ -66,7 +66,7 @@ use hir::ChangeWithProcMacros;
use ide_db::{
base_db::{
salsa::{self, ParallelDatabase},
CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, VfsPath,
CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceDatabaseExt, VfsPath,
},
prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
};
@ -273,6 +273,10 @@ impl Analysis {
self.with_db(|db| status::status(db, file_id))
}
pub fn source_root(&self, file_id: FileId) -> Cancellable<SourceRootId> {
self.with_db(|db| db.file_source_root(file_id))
}
pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancellable<()>
where
F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe,
@ -282,7 +286,7 @@ impl Analysis {
/// Gets the text of the source file.
pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
self.with_db(|db| db.file_text(file_id))
self.with_db(|db| SourceDatabaseExt::file_text(db, file_id))
}
/// Gets the syntax tree of the file.
@ -292,7 +296,6 @@ impl Analysis {
/// Returns true if this file belongs to an immutable library.
pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> {
use ide_db::base_db::SourceDatabaseExt;
self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library)
}

View file

@ -4,8 +4,9 @@
use std::{fmt, str::FromStr};
use cfg::CfgOptions;
use serde::Serialize;
#[derive(Clone, Eq, PartialEq, Debug)]
#[derive(Clone, Eq, PartialEq, Debug, Serialize)]
pub enum CfgFlag {
Atom(String),
KeyValue { key: String, value: String },

View file

@ -52,7 +52,7 @@
use base_db::{CrateDisplayName, CrateName};
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashMap;
use serde::{de, Deserialize};
use serde::{de, Deserialize, Serialize};
use span::Edition;
use crate::cfg_flag::CfgFlag;
@ -161,14 +161,14 @@ impl ProjectJson {
}
}
#[derive(Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ProjectJsonData {
sysroot: Option<Utf8PathBuf>,
sysroot_src: Option<Utf8PathBuf>,
crates: Vec<CrateData>,
}
#[derive(Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug, Clone)]
struct CrateData {
display_name: Option<String>,
root_module: Utf8PathBuf,
@ -190,7 +190,7 @@ struct CrateData {
repository: Option<String>,
}
#[derive(Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename = "edition")]
enum EditionData {
#[serde(rename = "2015")]
@ -218,20 +218,21 @@ impl From<EditionData> for Edition {
///
/// This will differ from `CrateId` when multiple `ProjectJson`
/// workspaces are loaded.
#[derive(Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)]
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)]
#[serde(transparent)]
pub struct CrateArrayIdx(pub usize);
#[derive(Deserialize, Debug, Clone, Eq, PartialEq)]
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
pub(crate) struct Dep {
/// Identifies a crate by position in the crates array.
#[serde(rename = "crate")]
pub(crate) krate: CrateArrayIdx,
#[serde(serialize_with = "serialize_crate_name")]
#[serde(deserialize_with = "deserialize_crate_name")]
pub(crate) name: CrateName,
}
#[derive(Deserialize, Debug, Clone)]
#[derive(Serialize, Deserialize, Debug, Clone)]
struct CrateSource {
include_dirs: Vec<Utf8PathBuf>,
exclude_dirs: Vec<Utf8PathBuf>,
@ -244,3 +245,10 @@ where
let name = String::deserialize(de)?;
CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {err:?}")))
}
fn serialize_crate_name<S>(name: &CrateName, se: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
se.serialize_str(name)
}

View file

@ -39,11 +39,13 @@ tracing.workspace = true
tracing-subscriber.workspace = true
tracing-tree.workspace = true
triomphe.workspace = true
toml = "0.8.8"
nohash-hasher.workspace = true
always-assert = "0.2.0"
walkdir = "2.3.2"
semver.workspace = true
memchr = "2.7.1"
indexmap = { workspace = true, features = ["serde"] }
cfg.workspace = true
flycheck.workspace = true

File diff suppressed because it is too large Load diff

View file

@ -154,10 +154,12 @@ pub(crate) fn fetch_native_diagnostics(
.copied()
.filter_map(|file_id| {
let line_index = snapshot.file_line_index(file_id).ok()?;
let source_root = snapshot.analysis.source_root(file_id).ok()?;
let diagnostics = snapshot
.analysis
.diagnostics(
&snapshot.config.diagnostics(),
&snapshot.config.diagnostics(Some(source_root)),
ide::AssistResolveStrategy::None,
file_id,
)

View file

@ -189,7 +189,7 @@ impl GlobalState {
};
let mut analysis_host = AnalysisHost::new(config.lru_parse_query_capacity());
if let Some(capacities) = config.lru_query_capacities() {
if let Some(capacities) = config.lru_query_capacities_config() {
analysis_host.update_lru_capacities(capacities);
}
let (flycheck_sender, flycheck_receiver) = unbounded();

View file

@ -369,8 +369,9 @@ pub(crate) fn handle_join_lines(
) -> anyhow::Result<Vec<lsp_types::TextEdit>> {
let _p = tracing::span!(tracing::Level::INFO, "handle_join_lines").entered();
let config = snap.config.join_lines();
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let source_root = snap.analysis.source_root(file_id)?;
let config = snap.config.join_lines(Some(source_root));
let line_index = snap.file_line_index(file_id)?;
let mut res = TextEdit::default();
@ -937,7 +938,8 @@ pub(crate) fn handle_completion(
let completion_trigger_character =
params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
let completion_config = &snap.config.completion();
let source_root = snap.analysis.source_root(position.file_id)?;
let completion_config = &snap.config.completion(Some(source_root));
let items = match snap.analysis.completions(
completion_config,
position,
@ -978,11 +980,12 @@ pub(crate) fn handle_completion_resolve(
let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let offset = from_proto::offset(&line_index, resolve_data.position.position)?;
let source_root = snap.analysis.source_root(file_id)?;
let additional_edits = snap
.analysis
.resolve_completion_edits(
&snap.config.completion(),
&snap.config.completion(Some(source_root)),
FilePosition { file_id, offset },
resolve_data
.imports
@ -1052,16 +1055,17 @@ pub(crate) fn handle_hover(
PositionOrRange::Position(position) => Range::new(position, position),
PositionOrRange::Range(range) => range,
};
let file_range = from_proto::file_range(&snap, &params.text_document, range)?;
let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
let hover = snap.config.hover();
let info = match snap.analysis.hover(&hover, file_range)? {
None => return Ok(None),
Some(info) => info,
};
let line_index = snap.file_line_index(file_range.file_id)?;
let range = to_proto::range(&line_index, info.range);
let markup_kind = snap.config.hover().format;
let markup_kind = hover.format;
let hover = lsp_ext::Hover {
hover: lsp_types::Hover {
contents: HoverContents::Markup(to_proto::markup_content(
@ -1205,11 +1209,12 @@ pub(crate) fn handle_code_action(
return Ok(None);
}
let line_index =
snap.file_line_index(from_proto::file_id(&snap, &params.text_document.uri)?)?;
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let frange = from_proto::file_range(&snap, &params.text_document, params.range)?;
let source_root = snap.analysis.source_root(file_id)?;
let mut assists_config = snap.config.assist();
let mut assists_config = snap.config.assist(Some(source_root));
assists_config.allowed = params
.context
.only
@ -1226,7 +1231,7 @@ pub(crate) fn handle_code_action(
};
let assists = snap.analysis.assists_with_fixes(
&assists_config,
&snap.config.diagnostics(),
&snap.config.diagnostics(Some(source_root)),
resolve,
frange,
)?;
@ -1280,8 +1285,9 @@ pub(crate) fn handle_code_action_resolve(
let line_index = snap.file_line_index(file_id)?;
let range = from_proto::text_range(&line_index, params.code_action_params.range)?;
let frange = FileRange { file_id, range };
let source_root = snap.analysis.source_root(file_id)?;
let mut assists_config = snap.config.assist();
let mut assists_config = snap.config.assist(Some(source_root));
assists_config.allowed = params
.code_action_params
.context
@ -1304,7 +1310,7 @@ pub(crate) fn handle_code_action_resolve(
let assists = snap.analysis.assists_with_fixes(
&assists_config,
&snap.config.diagnostics(),
&snap.config.diagnostics(Some(source_root)),
AssistResolveStrategy::Single(assist_resolve),
frange,
)?;
@ -1433,8 +1439,12 @@ pub(crate) fn handle_document_highlight(
let _p = tracing::span!(tracing::Level::INFO, "handle_document_highlight").entered();
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let line_index = snap.file_line_index(position.file_id)?;
let source_root = snap.analysis.source_root(position.file_id)?;
let refs = match snap.analysis.highlight_related(snap.config.highlight_related(), position)? {
let refs = match snap
.analysis
.highlight_related(snap.config.highlight_related(Some(source_root)), position)?
{
None => return Ok(None),
Some(refs) => refs,
};
@ -1480,7 +1490,9 @@ pub(crate) fn handle_inlay_hints(
params.range,
)?;
let line_index = snap.file_line_index(file_id)?;
let inlay_hints_config = snap.config.inlay_hints();
let source_root = snap.analysis.source_root(file_id)?;
let inlay_hints_config = snap.config.inlay_hints(Some(source_root));
Ok(Some(
snap.analysis
.inlay_hints(&inlay_hints_config, file_id, Some(range))?
@ -1512,7 +1524,9 @@ pub(crate) fn handle_inlay_hints_resolve(
let line_index = snap.file_line_index(file_id)?;
let hint_position = from_proto::offset(&line_index, original_hint.position)?;
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints();
let source_root = snap.analysis.source_root(file_id)?;
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(Some(source_root));
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
let resolve_hints = snap.analysis.inlay_hints_resolve(
&forced_resolve_inlay_hints_config,
@ -1644,8 +1658,9 @@ pub(crate) fn handle_semantic_tokens_full(
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let source_root = snap.analysis.source_root(file_id)?;
let mut highlight_config = snap.config.highlighting_config();
let mut highlight_config = snap.config.highlighting_config(Some(source_root));
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@ -1656,7 +1671,7 @@ pub(crate) fn handle_semantic_tokens_full(
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
snap.config.highlighting_non_standard_tokens(),
snap.config.highlighting_non_standard_tokens(Some(source_root)),
);
// Unconditionally cache the tokens
@ -1674,8 +1689,9 @@ pub(crate) fn handle_semantic_tokens_full_delta(
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let text = snap.analysis.file_text(file_id)?;
let line_index = snap.file_line_index(file_id)?;
let source_root = snap.analysis.source_root(file_id)?;
let mut highlight_config = snap.config.highlighting_config();
let mut highlight_config = snap.config.highlighting_config(Some(source_root));
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@ -1686,7 +1702,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
snap.config.highlighting_non_standard_tokens(),
snap.config.highlighting_non_standard_tokens(Some(source_root)),
);
let cached_tokens = snap.semantic_tokens_cache.lock().remove(&params.text_document.uri);
@ -1717,8 +1733,9 @@ pub(crate) fn handle_semantic_tokens_range(
let frange = from_proto::file_range(&snap, &params.text_document, params.range)?;
let text = snap.analysis.file_text(frange.file_id)?;
let line_index = snap.file_line_index(frange.file_id)?;
let source_root = snap.analysis.source_root(frange.file_id)?;
let mut highlight_config = snap.config.highlighting_config();
let mut highlight_config = snap.config.highlighting_config(Some(source_root));
// Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
highlight_config.syntactic_name_ref_highlighting =
snap.workspaces.is_empty() || !snap.proc_macros_loaded;
@ -1729,7 +1746,7 @@ pub(crate) fn handle_semantic_tokens_range(
&line_index,
highlights,
snap.config.semantics_tokens_augments_syntax_tokens(),
snap.config.highlighting_non_standard_tokens(),
snap.config.highlighting_non_standard_tokens(Some(source_root)),
);
Ok(Some(semantic_tokens.into()))
}
@ -1942,8 +1959,8 @@ fn goto_type_action_links(
snap: &GlobalStateSnapshot,
nav_targets: &[HoverGotoTypeData],
) -> Option<lsp_ext::CommandLinkGroup> {
if !snap.config.hover_actions().goto_type_def
|| nav_targets.is_empty()
if nav_targets.is_empty()
|| !snap.config.hover_actions().goto_type_def
|| !snap.config.client_commands().goto_location
{
return None;

View file

@ -233,7 +233,7 @@ pub(crate) fn completion_items(
completion_item(&mut res, config, line_index, &tdpp, max_relevance, item);
}
if let Some(limit) = config.completion().limit {
if let Some(limit) = config.completion(None).limit {
res.sort_by(|item1, item2| item1.sort_text.cmp(&item2.sort_text));
res.truncate(limit);
}
@ -317,7 +317,7 @@ fn completion_item(
set_score(&mut lsp_item, max_relevance, item.relevance);
if config.completion().enable_imports_on_the_fly && !item.import_to_add.is_empty() {
if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() {
let imports = item
.import_to_add
.into_iter()

View file

@ -428,7 +428,7 @@ impl GlobalState {
}
}
if self.config.cargo_autoreload() {
if self.config.cargo_autoreload_config() {
if let Some((cause, force_crate_graph_reload)) =
self.fetch_workspaces_queue.should_start_op()
{

View file

@ -76,9 +76,9 @@ impl GlobalState {
if self.config.lru_parse_query_capacity() != old_config.lru_parse_query_capacity() {
self.analysis_host.update_lru_capacity(self.config.lru_parse_query_capacity());
}
if self.config.lru_query_capacities() != old_config.lru_query_capacities() {
if self.config.lru_query_capacities_config() != old_config.lru_query_capacities_config() {
self.analysis_host.update_lru_capacities(
&self.config.lru_query_capacities().cloned().unwrap_or_default(),
&self.config.lru_query_capacities_config().cloned().unwrap_or_default(),
);
}
if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects()

View file

@ -159,10 +159,17 @@ building from locking the `Cargo.lock` at the expense of duplicating build artif
Set to `true` to use a subdirectory of the existing target directory or
set to a path relative to the workspace to use that path.
--
[[rust-analyzer.cargo.unsetTest]]rust-analyzer.cargo.unsetTest (default: `["core"]`)::
[[rust-analyzer.cargo.unsetTest]]rust-analyzer.cargo.unsetTest::
+
--
Default:
----
[
"core"
]
----
Unsets the implicit `#[cfg(test)]` for the specified crates.
--
[[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`)::
+
@ -321,46 +328,46 @@ Enables completions of private items and fields that are defined in the current
Default:
----
{
"Arc::new": {
"postfix": "arc",
"body": "Arc::new(${receiver})",
"requires": "std::sync::Arc",
"description": "Put the expression into an `Arc`",
"scope": "expr"
},
"Rc::new": {
"postfix": "rc",
"body": "Rc::new(${receiver})",
"requires": "std::rc::Rc",
"description": "Put the expression into an `Rc`",
"scope": "expr"
},
"Box::pin": {
"postfix": "pinbox",
"body": "Box::pin(${receiver})",
"requires": "std::boxed::Box",
"description": "Put the expression into a pinned `Box`",
"scope": "expr"
},
"Ok": {
"postfix": "ok",
"body": "Ok(${receiver})",
"description": "Wrap the expression in a `Result::Ok`",
"scope": "expr"
},
"Err": {
"postfix": "err",
"body": "Err(${receiver})",
"description": "Wrap the expression in a `Result::Err`",
"scope": "expr"
},
"Some": {
"postfix": "some",
"body": "Some(${receiver})",
"description": "Wrap the expression in an `Option::Some`",
"scope": "expr"
}
}
"Arc::new": {
"postfix": "arc",
"body": "Arc::new(${receiver})",
"requires": "std::sync::Arc",
"description": "Put the expression into an `Arc`",
"scope": "expr"
},
"Rc::new": {
"postfix": "rc",
"body": "Rc::new(${receiver})",
"requires": "std::rc::Rc",
"description": "Put the expression into an `Rc`",
"scope": "expr"
},
"Box::pin": {
"postfix": "pinbox",
"body": "Box::pin(${receiver})",
"requires": "std::boxed::Box",
"description": "Put the expression into a pinned `Box`",
"scope": "expr"
},
"Ok": {
"postfix": "ok",
"body": "Ok(${receiver})",
"description": "Wrap the expression in a `Result::Ok`",
"scope": "expr"
},
"Err": {
"postfix": "err",
"body": "Err(${receiver})",
"description": "Wrap the expression in a `Result::Err`",
"scope": "expr"
},
"Some": {
"postfix": "some",
"body": "Some(${receiver})",
"description": "Wrap the expression in an `Option::Some`",
"scope": "expr"
}
}
----
Custom completion snippets.