More maintainable config

Rather than eagerly converting JSON, we losslessly keep it as is, and
change the shape of user-submitted data at the last moment.

This also allows us to remove a bunch of wrong Defaults
This commit is contained in:
Aleksey Kladov 2021-01-06 13:54:28 +03:00
parent c310446659
commit f7a15b5cd1
16 changed files with 422 additions and 442 deletions

View file

@ -17,7 +17,7 @@ use crate::{
doc_links::{remove_links, rewrite_links},
markdown_remove::remove_markdown,
markup::Markup,
runnables::runnable,
runnables::{runnable, runnable_fn},
FileId, FilePosition, NavigationTarget, RangeInfo, Runnable,
};
@ -31,19 +31,6 @@ pub struct HoverConfig {
pub markdown: bool,
}
impl Default for HoverConfig {
fn default() -> Self {
Self {
implementations: true,
run: true,
debug: true,
goto_type_def: true,
links_in_hover: true,
markdown: true,
}
}
}
impl HoverConfig {
pub const NO_ACTIONS: Self = Self {
implementations: false,
@ -204,22 +191,20 @@ fn runnable_action(
match def {
Definition::ModuleDef(it) => match it {
ModuleDef::Module(it) => match it.definition_source(sema.db).value {
ModuleSource::Module(it) => runnable(&sema, it.syntax().clone(), file_id)
.map(|it| HoverAction::Runnable(it)),
ModuleSource::Module(it) => {
runnable(&sema, it.syntax().clone()).map(|it| HoverAction::Runnable(it))
}
_ => None,
},
ModuleDef::Function(it) => {
#[allow(deprecated)]
let src = it.source(sema.db)?;
ModuleDef::Function(func) => {
let src = func.source(sema.db)?;
if src.file_id != file_id.into() {
mark::hit!(hover_macro_generated_struct_fn_doc_comment);
mark::hit!(hover_macro_generated_struct_fn_doc_attr);
return None;
}
runnable(&sema, src.value.syntax().clone(), file_id)
.map(|it| HoverAction::Runnable(it))
runnable_fn(&sema, func).map(HoverAction::Runnable)
}
_ => None,
},

View file

@ -18,12 +18,6 @@ pub struct InlayHintsConfig {
pub max_length: Option<usize>,
}
impl Default for InlayHintsConfig {
fn default() -> Self {
Self { type_hints: true, parameter_hints: true, chaining_hints: true, max_length: None }
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum InlayKind {
TypeHint,
@ -433,8 +427,15 @@ mod tests {
use crate::{fixture, inlay_hints::InlayHintsConfig};
const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig {
type_hints: true,
parameter_hints: true,
chaining_hints: true,
max_length: None,
};
fn check(ra_fixture: &str) {
check_with_config(InlayHintsConfig::default(), ra_fixture);
check_with_config(TEST_CONFIG, ra_fixture);
}
fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) {
@ -748,7 +749,7 @@ fn main() {
#[test]
fn hint_truncation() {
check_with_config(
InlayHintsConfig { max_length: Some(8), ..Default::default() },
InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG },
r#"
struct Smol<T>(T);
@ -831,7 +832,7 @@ fn main() {
#[test]
fn omitted_parameters_hints_heuristics() {
check_with_config(
InlayHintsConfig { max_length: Some(8), ..Default::default() },
InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG },
r#"
fn map(f: i32) {}
fn filter(predicate: i32) {}
@ -924,7 +925,7 @@ fn main() {
#[test]
fn unit_structs_have_no_type_hints() {
check_with_config(
InlayHintsConfig { max_length: Some(8), ..Default::default() },
InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG },
r#"
enum Result<T, E> { Ok(T), Err(E) }
use Result::*;

View file

@ -2,11 +2,11 @@ use std::fmt;
use assists::utils::test_related_attribute;
use cfg::CfgExpr;
use hir::{AsAssocItem, HasAttrs, InFile, Semantics};
use hir::{AsAssocItem, HasAttrs, HasSource, Semantics};
use ide_db::RootDatabase;
use itertools::Itertools;
use syntax::{
ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner},
ast::{self, AstNode, AttrsOwner, ModuleItemOwner},
match_ast, SyntaxNode,
};
@ -96,17 +96,16 @@ impl Runnable {
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
let sema = Semantics::new(db);
let source_file = sema.parse(file_id);
source_file.syntax().descendants().filter_map(|i| runnable(&sema, i, file_id)).collect()
source_file.syntax().descendants().filter_map(|i| runnable(&sema, i)).collect()
}
pub(crate) fn runnable(
sema: &Semantics<RootDatabase>,
item: SyntaxNode,
file_id: FileId,
) -> Option<Runnable> {
pub(crate) fn runnable(sema: &Semantics<RootDatabase>, item: SyntaxNode) -> Option<Runnable> {
let runnable_item = match_ast! {
match (item.clone()) {
ast::Fn(it) => runnable_fn(sema, it, file_id),
ast::Fn(func) => {
let def = sema.to_def(&func)?;
runnable_fn(sema, def)
},
ast::Module(it) => runnable_mod(sema, it),
_ => None,
}
@ -114,23 +113,23 @@ pub(crate) fn runnable(
runnable_item.or_else(|| runnable_doctest(sema, item))
}
fn runnable_fn(sema: &Semantics<RootDatabase>, func: ast::Fn, file_id: FileId) -> Option<Runnable> {
let def = sema.to_def(&func)?;
let name_string = func.name()?.text().to_string();
pub(crate) fn runnable_fn(sema: &Semantics<RootDatabase>, def: hir::Function) -> Option<Runnable> {
let func = def.source(sema.db)?;
let name_string = def.name(sema.db).to_string();
let kind = if name_string == "main" {
RunnableKind::Bin
} else {
let canonical_path = sema.to_def(&func).and_then(|def| {
let canonical_path = {
let def: hir::ModuleDef = def.into();
def.canonical_path(sema.db)
});
};
let test_id = canonical_path.map(TestId::Path).unwrap_or(TestId::Name(name_string));
if test_related_attribute(&func).is_some() {
let attr = TestAttr::from_fn(&func);
if test_related_attribute(&func.value).is_some() {
let attr = TestAttr::from_fn(&func.value);
RunnableKind::Test { test_id, attr }
} else if func.has_atom_attr("bench") {
} else if func.value.has_atom_attr("bench") {
RunnableKind::Bench { test_id }
} else {
return None;
@ -139,7 +138,7 @@ fn runnable_fn(sema: &Semantics<RootDatabase>, func: ast::Fn, file_id: FileId) -
let nav = NavigationTarget::from_named(
sema.db,
InFile::new(file_id.into(), &func),
func.as_ref().map(|it| it as &dyn ast::NameOwner),
SymbolKind::Function,
);
let cfg = def.attrs(sema.db).cfg();

View file

@ -110,13 +110,13 @@ impl ProjectJson {
}
}
#[derive(Deserialize)]
#[derive(Deserialize, Debug, Clone)]
pub struct ProjectJsonData {
sysroot_src: Option<PathBuf>,
crates: Vec<CrateData>,
}
#[derive(Deserialize)]
#[derive(Deserialize, Debug, Clone)]
struct CrateData {
display_name: Option<String>,
root_module: PathBuf,
@ -132,7 +132,7 @@ struct CrateData {
source: Option<CrateSource>,
}
#[derive(Deserialize)]
#[derive(Deserialize, Debug, Clone)]
#[serde(rename = "edition")]
enum EditionData {
#[serde(rename = "2015")]
@ -153,7 +153,7 @@ impl From<EditionData> for Edition {
}
}
#[derive(Deserialize)]
#[derive(Deserialize, Debug, Clone)]
struct DepData {
/// Identifies a crate by position in the crates array.
#[serde(rename = "crate")]
@ -162,7 +162,7 @@ struct DepData {
name: CrateName,
}
#[derive(Deserialize)]
#[derive(Deserialize, Debug, Clone)]
struct CrateSource {
include_dirs: Vec<PathBuf>,
exclude_dirs: Vec<PathBuf>,

View file

@ -8,11 +8,7 @@ use std::{convert::TryFrom, env, fs, path::PathBuf, process};
use lsp_server::Connection;
use project_model::ProjectManifest;
use rust_analyzer::{
cli,
config::{Config, LinkedProject},
from_json, Result,
};
use rust_analyzer::{cli, config::Config, from_json, Result};
use vfs::AbsPathBuf;
#[cfg(all(feature = "mimalloc"))]
@ -138,13 +134,12 @@ fn run_server() -> Result<()> {
}
};
let mut config = Config::new(root_path);
let mut config = Config::new(root_path, initialize_params.capabilities);
if let Some(json) = initialize_params.initialization_options {
config.update(json);
}
config.update_caps(&initialize_params.capabilities);
if config.linked_projects.is_empty() {
if config.linked_projects().is_empty() {
let workspace_roots = initialize_params
.workspace_folders
.map(|workspaces| {
@ -163,7 +158,7 @@ fn run_server() -> Result<()> {
log::error!("failed to find any projects in {:?}", workspace_roots);
}
config.linked_projects = discovered.into_iter().map(LinkedProject::from).collect();
config.discovered_projects = Some(discovered);
}
config

View file

@ -84,14 +84,15 @@ impl CargoTargetSpec {
}
}
if snap.config.cargo.all_features {
let cargo_config = snap.config.cargo();
if cargo_config.all_features {
args.push("--all-features".to_string());
} else {
let mut features = Vec::new();
if let Some(cfg) = cfg.as_ref() {
required_features(cfg, &mut features);
}
for feature in &snap.config.cargo.features {
for feature in cargo_config.features {
features.push(feature.clone());
}
features.dedup();

View file

@ -148,13 +148,19 @@ config_data! {
/// of projects.\n\nElements must be paths pointing to `Cargo.toml`,
/// `rust-project.json`, or JSON objects in `rust-project.json` format.
linkedProjects: Vec<ManifestOrProjectJson> = "[]",
/// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
lruCapacity: Option<usize> = "null",
/// Whether to show `can't find Cargo.toml` error message.
notifications_cargoTomlNotFound: bool = "true",
/// Enable Proc macro support, `#rust-analyzer.cargo.loadOutDirsFromCheck#` must be
/// enabled.
procMacro_enable: bool = "false",
/// Internal config, path to proc-macro server executable (typically,
/// this is rust-analyzer itself, but we override this in tests).
procMacro_server: Option<PathBuf> = "null",
/// Command to be executed instead of 'cargo' for runnables.
runnables_overrideCargo: Option<String> = "null",
@ -163,7 +169,7 @@ config_data! {
runnables_cargoExtraArgs: Vec<String> = "[]",
/// Path to the rust compiler sources, for usage in rustc_private projects.
rustcSource : Option<String> = "null",
rustcSource : Option<PathBuf> = "null",
/// Additional arguments to `rustfmt`.
rustfmt_extraArgs: Vec<String> = "[]",
@ -173,34 +179,17 @@ config_data! {
}
}
impl Default for ConfigData {
fn default() -> Self {
ConfigData::from_json(serde_json::Value::Null)
}
}
#[derive(Debug, Clone)]
pub struct Config {
pub caps: lsp_types::ClientCapabilities,
pub publish_diagnostics: bool,
pub diagnostics: DiagnosticsConfig,
pub diagnostics_map: DiagnosticsMapConfig,
pub lru_capacity: Option<usize>,
pub proc_macro_srv: Option<(PathBuf, Vec<OsString>)>,
pub files: FilesConfig,
pub notifications: NotificationsConfig,
pub cargo_autoreload: bool,
pub cargo: CargoConfig,
pub rustfmt: RustfmtConfig,
pub flycheck: Option<FlycheckConfig>,
pub runnables: RunnablesConfig,
pub inlay_hints: InlayHintsConfig,
pub completion: CompletionConfig,
pub assist: AssistConfig,
pub call_info_full: bool,
pub lens: LensConfig,
pub hover: HoverConfig,
pub semantic_tokens_refresh: bool,
pub code_lens_refresh: bool,
pub linked_projects: Vec<LinkedProject>,
caps: lsp_types::ClientCapabilities,
data: ConfigData,
pub discovered_projects: Option<Vec<ProjectManifest>>,
pub root_path: AbsPathBuf,
}
@ -230,12 +219,6 @@ pub struct LensConfig {
pub method_refs: bool,
}
impl Default for LensConfig {
fn default() -> Self {
Self { run: true, debug: true, implementations: true, method_refs: false }
}
}
impl LensConfig {
pub fn any(&self) -> bool {
self.implementations || self.runnable() || self.references()
@ -278,7 +261,7 @@ pub enum RustfmtConfig {
}
/// Configuration for runnable items, such as `main` function or tests.
#[derive(Debug, Clone, Default)]
#[derive(Debug, Clone)]
pub struct RunnablesConfig {
/// Custom command to be executed instead of `cargo` for runnables.
pub override_cargo: Option<String>,
@ -287,250 +270,15 @@ pub struct RunnablesConfig {
}
impl Config {
pub fn new(root_path: AbsPathBuf) -> Self {
// Defaults here don't matter, we'll immediately re-write them with
// ConfigData.
let mut res = Config {
caps: lsp_types::ClientCapabilities::default(),
publish_diagnostics: false,
diagnostics: DiagnosticsConfig::default(),
diagnostics_map: DiagnosticsMapConfig::default(),
lru_capacity: None,
proc_macro_srv: None,
files: FilesConfig { watcher: FilesWatcher::Notify, exclude: Vec::new() },
notifications: NotificationsConfig { cargo_toml_not_found: false },
cargo_autoreload: false,
cargo: CargoConfig::default(),
rustfmt: RustfmtConfig::Rustfmt { extra_args: Vec::new() },
flycheck: Some(FlycheckConfig::CargoCommand {
command: String::new(),
target_triple: None,
no_default_features: false,
all_targets: false,
all_features: false,
extra_args: Vec::new(),
features: Vec::new(),
}),
runnables: RunnablesConfig::default(),
inlay_hints: InlayHintsConfig {
type_hints: false,
parameter_hints: false,
chaining_hints: false,
max_length: None,
},
completion: CompletionConfig::default(),
assist: AssistConfig::default(),
call_info_full: false,
lens: LensConfig::default(),
hover: HoverConfig::default(),
semantic_tokens_refresh: false,
code_lens_refresh: false,
linked_projects: Vec::new(),
root_path,
};
res.do_update(serde_json::json!({}));
res
pub fn new(root_path: AbsPathBuf, caps: ClientCapabilities) -> Self {
Config { caps, data: ConfigData::default(), discovered_projects: None, root_path }
}
pub fn update(&mut self, json: serde_json::Value) {
log::info!("updating config from JSON: {:#}", json);
if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) {
return;
}
self.do_update(json);
log::info!("updated config: {:#?}", self);
}
fn do_update(&mut self, json: serde_json::Value) {
let data = ConfigData::from_json(json);
self.publish_diagnostics = data.diagnostics_enable;
self.diagnostics = DiagnosticsConfig {
disable_experimental: !data.diagnostics_enableExperimental,
disabled: data.diagnostics_disabled,
};
self.diagnostics_map = DiagnosticsMapConfig {
warnings_as_info: data.diagnostics_warningsAsInfo,
warnings_as_hint: data.diagnostics_warningsAsHint,
};
self.lru_capacity = data.lruCapacity;
self.files.watcher = match data.files_watcher.as_str() {
"notify" => FilesWatcher::Notify,
"client" | _ => FilesWatcher::Client,
};
self.notifications =
NotificationsConfig { cargo_toml_not_found: data.notifications_cargoTomlNotFound };
self.cargo_autoreload = data.cargo_autoreload;
let rustc_source = if let Some(rustc_source) = data.rustcSource {
let rustpath: PathBuf = rustc_source.into();
AbsPathBuf::try_from(rustpath)
.map_err(|_| {
log::error!("rustc source directory must be an absolute path");
})
.ok()
} else {
None
};
self.cargo = CargoConfig {
no_default_features: data.cargo_noDefaultFeatures,
all_features: data.cargo_allFeatures,
features: data.cargo_features.clone(),
load_out_dirs_from_check: data.cargo_loadOutDirsFromCheck,
target: data.cargo_target.clone(),
rustc_source: rustc_source,
no_sysroot: data.cargo_noSysroot,
};
self.runnables = RunnablesConfig {
override_cargo: data.runnables_overrideCargo,
cargo_extra_args: data.runnables_cargoExtraArgs,
};
self.proc_macro_srv = if data.procMacro_enable {
std::env::current_exe().ok().map(|path| (path, vec!["proc-macro".into()]))
} else {
None
};
self.rustfmt = match data.rustfmt_overrideCommand {
Some(mut args) if !args.is_empty() => {
let command = args.remove(0);
RustfmtConfig::CustomCommand { command, args }
}
Some(_) | None => RustfmtConfig::Rustfmt { extra_args: data.rustfmt_extraArgs },
};
self.flycheck = if data.checkOnSave_enable {
let flycheck_config = match data.checkOnSave_overrideCommand {
Some(mut args) if !args.is_empty() => {
let command = args.remove(0);
FlycheckConfig::CustomCommand { command, args }
}
Some(_) | None => FlycheckConfig::CargoCommand {
command: data.checkOnSave_command,
target_triple: data.checkOnSave_target.or(data.cargo_target),
all_targets: data.checkOnSave_allTargets,
no_default_features: data
.checkOnSave_noDefaultFeatures
.unwrap_or(data.cargo_noDefaultFeatures),
all_features: data.checkOnSave_allFeatures.unwrap_or(data.cargo_allFeatures),
features: data.checkOnSave_features.unwrap_or(data.cargo_features),
extra_args: data.checkOnSave_extraArgs,
},
};
Some(flycheck_config)
} else {
None
};
self.inlay_hints = InlayHintsConfig {
type_hints: data.inlayHints_typeHints,
parameter_hints: data.inlayHints_parameterHints,
chaining_hints: data.inlayHints_chainingHints,
max_length: data.inlayHints_maxLength,
};
self.assist.insert_use.merge = match data.assist_importMergeBehaviour {
MergeBehaviorDef::None => None,
MergeBehaviorDef::Full => Some(MergeBehavior::Full),
MergeBehaviorDef::Last => Some(MergeBehavior::Last),
};
self.assist.insert_use.prefix_kind = match data.assist_importPrefix {
ImportPrefixDef::Plain => PrefixKind::Plain,
ImportPrefixDef::ByCrate => PrefixKind::ByCrate,
ImportPrefixDef::BySelf => PrefixKind::BySelf,
};
self.completion.enable_postfix_completions = data.completion_postfix_enable;
self.completion.enable_autoimport_completions = data.completion_autoimport_enable;
self.completion.add_call_parenthesis = data.completion_addCallParenthesis;
self.completion.add_call_argument_snippets = data.completion_addCallArgumentSnippets;
self.completion.merge = self.assist.insert_use.merge;
self.call_info_full = data.callInfo_full;
self.lens = LensConfig {
run: data.lens_enable && data.lens_run,
debug: data.lens_enable && data.lens_debug,
implementations: data.lens_enable && data.lens_implementations,
method_refs: data.lens_enable && data.lens_methodReferences,
};
if !data.linkedProjects.is_empty() {
self.linked_projects.clear();
for linked_project in data.linkedProjects {
let linked_project = match linked_project {
ManifestOrProjectJson::Manifest(it) => {
let path = self.root_path.join(it);
match ProjectManifest::from_manifest_file(path) {
Ok(it) => it.into(),
Err(e) => {
log::error!("failed to load linked project: {}", e);
continue;
}
}
}
ManifestOrProjectJson::ProjectJson(it) => {
ProjectJson::new(&self.root_path, it).into()
}
};
self.linked_projects.push(linked_project);
}
}
self.hover = HoverConfig {
implementations: data.hoverActions_enable && data.hoverActions_implementations,
run: data.hoverActions_enable && data.hoverActions_run,
debug: data.hoverActions_enable && data.hoverActions_debug,
goto_type_def: data.hoverActions_enable && data.hoverActions_gotoTypeDef,
links_in_hover: data.hoverActions_linksInHover,
markdown: true,
};
}
pub fn update_caps(&mut self, caps: &ClientCapabilities) {
self.caps = caps.clone();
if let Some(doc_caps) = caps.text_document.as_ref() {
if let Some(value) = doc_caps.hover.as_ref().and_then(|it| it.content_format.as_ref()) {
self.hover.markdown = value.contains(&MarkupKind::Markdown)
}
self.completion.allow_snippets(false);
self.completion.active_resolve_capabilities =
enabled_completions_resolve_capabilities(caps).unwrap_or_default();
if let Some(completion) = &doc_caps.completion {
if let Some(completion_item) = &completion.completion_item {
if let Some(value) = completion_item.snippet_support {
self.completion.allow_snippets(value);
}
}
}
}
self.assist.allow_snippets(false);
if let Some(experimental) = &caps.experimental {
let get_bool =
|index: &str| experimental.get(index).and_then(|it| it.as_bool()) == Some(true);
let snippet_text_edit = get_bool("snippetTextEdit");
self.assist.allow_snippets(snippet_text_edit);
}
if let Some(workspace_caps) = caps.workspace.as_ref() {
if let Some(refresh_support) =
workspace_caps.semantic_tokens.as_ref().and_then(|it| it.refresh_support)
{
self.semantic_tokens_refresh = refresh_support;
}
if let Some(refresh_support) =
workspace_caps.code_lens.as_ref().and_then(|it| it.refresh_support)
{
self.code_lens_refresh = refresh_support;
}
}
self.data = ConfigData::from_json(json);
}
pub fn json_schema() -> serde_json::Value {
@ -550,6 +298,38 @@ macro_rules! try_or {
}
impl Config {
pub fn linked_projects(&self) -> Vec<LinkedProject> {
if self.data.linkedProjects.is_empty() {
self.discovered_projects
.as_ref()
.into_iter()
.flatten()
.cloned()
.map(LinkedProject::from)
.collect()
} else {
self.data
.linkedProjects
.iter()
.filter_map(|linked_project| {
let res = match linked_project {
ManifestOrProjectJson::Manifest(it) => {
let path = self.root_path.join(it);
ProjectManifest::from_manifest_file(path)
.map_err(|e| log::error!("failed to load linked project: {}", e))
.ok()?
.into()
}
ManifestOrProjectJson::ProjectJson(it) => {
ProjectJson::new(&self.root_path, it.clone()).into()
}
};
Some(res)
})
.collect()
}
}
pub fn location_link(&self) -> bool {
try_or!(self.caps.text_document.as_ref()?.definition?.link_support?, false)
}
@ -625,16 +405,217 @@ impl Config {
pub fn status_notification(&self) -> bool {
self.experimental("statusNotification")
}
pub fn publish_diagnostics(&self) -> bool {
self.data.diagnostics_enable
}
pub fn diagnostics(&self) -> DiagnosticsConfig {
DiagnosticsConfig {
disable_experimental: !self.data.diagnostics_enableExperimental,
disabled: self.data.diagnostics_disabled.clone(),
}
}
pub fn diagnostics_map(&self) -> DiagnosticsMapConfig {
DiagnosticsMapConfig {
warnings_as_info: self.data.diagnostics_warningsAsInfo.clone(),
warnings_as_hint: self.data.diagnostics_warningsAsHint.clone(),
}
}
pub fn lru_capacity(&self) -> Option<usize> {
self.data.lruCapacity
}
pub fn proc_macro_srv(&self) -> Option<(PathBuf, Vec<OsString>)> {
if !self.data.procMacro_enable {
return None;
}
let path = self.data.procMacro_server.clone().or_else(|| std::env::current_exe().ok())?;
Some((path, vec!["proc-macro".into()]))
}
pub fn files(&self) -> FilesConfig {
FilesConfig {
watcher: match self.data.files_watcher.as_str() {
"notify" => FilesWatcher::Notify,
"client" | _ => FilesWatcher::Client,
},
exclude: Vec::new(),
}
}
pub fn notifications(&self) -> NotificationsConfig {
NotificationsConfig { cargo_toml_not_found: self.data.notifications_cargoTomlNotFound }
}
pub fn cargo_autoreload(&self) -> bool {
self.data.cargo_autoreload
}
pub fn cargo(&self) -> CargoConfig {
let rustc_source = self.data.rustcSource.clone().and_then(|it| {
AbsPathBuf::try_from(it)
.map_err(|_| log::error!("rustc source directory must be an absolute path"))
.ok()
});
CargoConfig {
no_default_features: self.data.cargo_noDefaultFeatures,
all_features: self.data.cargo_allFeatures,
features: self.data.cargo_features.clone(),
load_out_dirs_from_check: self.data.cargo_loadOutDirsFromCheck,
target: self.data.cargo_target.clone(),
rustc_source,
no_sysroot: self.data.cargo_noSysroot,
}
}
pub fn rustfmt(&self) -> RustfmtConfig {
match &self.data.rustfmt_overrideCommand {
Some(args) if !args.is_empty() => {
let mut args = args.clone();
let command = args.remove(0);
RustfmtConfig::CustomCommand { command, args }
}
Some(_) | None => {
RustfmtConfig::Rustfmt { extra_args: self.data.rustfmt_extraArgs.clone() }
}
}
}
pub fn flycheck(&self) -> Option<FlycheckConfig> {
if !self.data.checkOnSave_enable {
return None;
}
let flycheck_config = match &self.data.checkOnSave_overrideCommand {
Some(args) if !args.is_empty() => {
let mut args = args.clone();
let command = args.remove(0);
FlycheckConfig::CustomCommand { command, args }
}
Some(_) | None => FlycheckConfig::CargoCommand {
command: self.data.checkOnSave_command.clone(),
target_triple: self
.data
.checkOnSave_target
.clone()
.or(self.data.cargo_target.clone()),
all_targets: self.data.checkOnSave_allTargets,
no_default_features: self
.data
.checkOnSave_noDefaultFeatures
.unwrap_or(self.data.cargo_noDefaultFeatures),
all_features: self
.data
.checkOnSave_allFeatures
.unwrap_or(self.data.cargo_allFeatures),
features: self
.data
.checkOnSave_features
.clone()
.unwrap_or(self.data.cargo_features.clone()),
extra_args: self.data.checkOnSave_extraArgs.clone(),
},
};
Some(flycheck_config)
}
pub fn runnables(&self) -> RunnablesConfig {
RunnablesConfig {
override_cargo: self.data.runnables_overrideCargo.clone(),
cargo_extra_args: self.data.runnables_cargoExtraArgs.clone(),
}
}
pub fn inlay_hints(&self) -> InlayHintsConfig {
InlayHintsConfig {
type_hints: self.data.inlayHints_typeHints,
parameter_hints: self.data.inlayHints_parameterHints,
chaining_hints: self.data.inlayHints_chainingHints,
max_length: self.data.inlayHints_maxLength,
}
}
fn merge_behavior(&self) -> Option<MergeBehavior> {
match self.data.assist_importMergeBehaviour {
MergeBehaviorDef::None => None,
MergeBehaviorDef::Full => Some(MergeBehavior::Full),
MergeBehaviorDef::Last => Some(MergeBehavior::Last),
}
}
pub fn completion(&self) -> CompletionConfig {
let mut res = CompletionConfig::default();
res.enable_postfix_completions = self.data.completion_postfix_enable;
res.enable_autoimport_completions = self.data.completion_autoimport_enable;
res.add_call_parenthesis = self.data.completion_addCallParenthesis;
res.add_call_argument_snippets = self.data.completion_addCallArgumentSnippets;
res.merge = self.merge_behavior();
res.active_resolve_capabilities =
enabled_completions_resolve_capabilities(&self.caps).unwrap_or_default();
res.allow_snippets(try_or!(
self.caps
.text_document
.as_ref()?
.completion
.as_ref()?
.completion_item
.as_ref()?
.snippet_support?,
false
));
res
}
pub fn assist(&self) -> AssistConfig {
let mut res = AssistConfig::default();
res.insert_use.merge = self.merge_behavior();
res.insert_use.prefix_kind = match self.data.assist_importPrefix {
ImportPrefixDef::Plain => PrefixKind::Plain,
ImportPrefixDef::ByCrate => PrefixKind::ByCrate,
ImportPrefixDef::BySelf => PrefixKind::BySelf,
};
res.allow_snippets(self.experimental("snippetTextEdit"));
res
}
pub fn call_info_full(&self) -> bool {
self.data.callInfo_full
}
pub fn lens(&self) -> LensConfig {
LensConfig {
run: self.data.lens_enable && self.data.lens_run,
debug: self.data.lens_enable && self.data.lens_debug,
implementations: self.data.lens_enable && self.data.lens_implementations,
method_refs: self.data.lens_enable && self.data.lens_methodReferences,
}
}
pub fn hover(&self) -> HoverConfig {
HoverConfig {
implementations: self.data.hoverActions_enable
&& self.data.hoverActions_implementations,
run: self.data.hoverActions_enable && self.data.hoverActions_run,
debug: self.data.hoverActions_enable && self.data.hoverActions_debug,
goto_type_def: self.data.hoverActions_enable && self.data.hoverActions_gotoTypeDef,
links_in_hover: self.data.hoverActions_linksInHover,
markdown: try_or!(
self.caps
.text_document
.as_ref()?
.hover
.as_ref()?
.content_format
.as_ref()?
.as_slice(),
&[]
)
.contains(&MarkupKind::Markdown),
}
}
pub fn semantic_tokens_refresh(&self) -> bool {
try_or!(self.caps.workspace.as_ref()?.semantic_tokens.as_ref()?.refresh_support?, false)
}
pub fn code_lens_refresh(&self) -> bool {
try_or!(self.caps.workspace.as_ref()?.code_lens.as_ref()?.refresh_support?, false)
}
}
#[derive(Deserialize)]
#[derive(Deserialize, Debug, Clone)]
#[serde(untagged)]
enum ManifestOrProjectJson {
Manifest(PathBuf),
ProjectJson(ProjectJsonData),
}
#[derive(Deserialize)]
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum MergeBehaviorDef {
None,
@ -642,7 +623,7 @@ enum MergeBehaviorDef {
Last,
}
#[derive(Deserialize)]
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum ImportPrefixDef {
Plain,
@ -658,6 +639,7 @@ macro_rules! _config_data {
)*
}) => {
#[allow(non_snake_case)]
#[derive(Debug, Clone)]
struct $name { $($field: $ty,)* }
impl $name {
fn from_json(mut json: serde_json::Value) -> $name {
@ -763,6 +745,9 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"Option<String>" => set! {
"type": ["null", "string"],
},
"Option<PathBuf>" => set! {
"type": ["null", "string"],
},
"Option<bool>" => set! {
"type": ["null", "boolean"],
},

View file

@ -109,7 +109,7 @@ impl GlobalState {
Handle { handle, receiver }
};
let analysis_host = AnalysisHost::new(config.lru_capacity);
let analysis_host = AnalysisHost::new(config.lru_capacity());
let (flycheck_sender, flycheck_receiver) = unbounded();
GlobalState {
sender,

View file

@ -9,9 +9,9 @@ use std::{
};
use ide::{
AssistConfig, CompletionResolveCapability, FileId, FilePosition, FileRange, HoverAction,
HoverGotoTypeData, LineIndex, NavigationTarget, Query, RangeInfo, Runnable, RunnableKind,
SearchScope, SourceChange, SymbolKind, TextEdit,
CompletionResolveCapability, FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData,
LineIndex, NavigationTarget, Query, RangeInfo, Runnable, RunnableKind, SearchScope,
SourceChange, SymbolKind, TextEdit,
};
use itertools::Itertools;
use lsp_server::ErrorCode;
@ -548,7 +548,7 @@ pub(crate) fn handle_runnables(
}
// Add `cargo check` and `cargo test` for all targets of the whole package
let config = &snap.config.runnables;
let config = snap.config.runnables();
match cargo_spec {
Some(spec) => {
for &cmd in ["check", "test"].iter() {
@ -579,9 +579,9 @@ pub(crate) fn handle_runnables(
kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable {
workspace_root: None,
override_cargo: config.override_cargo.clone(),
override_cargo: config.override_cargo,
cargo_args: vec!["check".to_string(), "--workspace".to_string()],
cargo_extra_args: config.cargo_extra_args.clone(),
cargo_extra_args: config.cargo_extra_args,
executable_args: Vec::new(),
expect_test: None,
},
@ -620,7 +620,8 @@ pub(crate) fn handle_completion(
return Ok(None);
}
let items = match snap.analysis.completions(&snap.config.completion, position)? {
let completion_config = &snap.config.completion();
let items = match snap.analysis.completions(completion_config, position)? {
None => return Ok(None),
Some(items) => items,
};
@ -633,7 +634,7 @@ pub(crate) fn handle_completion(
let mut new_completion_items =
to_proto::completion_item(&line_index, line_endings, item.clone());
if snap.config.completion.resolve_additional_edits_lazily() {
if completion_config.resolve_additional_edits_lazily() {
for new_item in &mut new_completion_items {
let _ = fill_resolve_data(&mut new_item.data, &item, &text_document_position)
.take();
@ -663,9 +664,8 @@ pub(crate) fn handle_completion_resolve(
}
// FIXME resolve the other capabilities also?
if !snap
.config
.completion
let completion_config = &snap.config.completion();
if !completion_config
.active_resolve_capabilities
.contains(&CompletionResolveCapability::AdditionalTextEdits)
{
@ -690,7 +690,7 @@ pub(crate) fn handle_completion_resolve(
let additional_edits = snap
.analysis
.resolve_completion_edits(
&snap.config.completion,
&completion_config,
FilePosition { file_id, offset },
&resolve_data.full_import_path,
resolve_data.imported_name,
@ -746,7 +746,7 @@ pub(crate) fn handle_signature_help(
Some(it) => it,
None => return Ok(None),
};
let concise = !snap.config.call_info_full;
let concise = !snap.config.call_info_full();
let res =
to_proto::signature_help(call_info, concise, snap.config.signature_help_label_offsets());
Ok(Some(res))
@ -758,11 +758,9 @@ pub(crate) fn handle_hover(
) -> Result<Option<lsp_ext::Hover>> {
let _p = profile::span("handle_hover");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let info = match snap.analysis.hover(
position,
snap.config.hover.links_in_hover,
snap.config.hover.markdown,
)? {
let hover_config = snap.config.hover();
let info =
match snap.analysis.hover(position, hover_config.links_in_hover, hover_config.markdown)? {
None => return Ok(None),
Some(info) => info,
};
@ -851,7 +849,7 @@ pub(crate) fn handle_formatting(
let file_line_index = snap.analysis.file_line_index(file_id)?;
let file_line_endings = snap.file_line_endings(file_id);
let mut rustfmt = match &snap.config.rustfmt {
let mut rustfmt = match snap.config.rustfmt() {
RustfmtConfig::Rustfmt { extra_args } => {
let mut cmd = process::Command::new(toolchain::rustfmt());
cmd.args(extra_args);
@ -947,14 +945,12 @@ pub(crate) fn handle_code_action(
let range = from_proto::text_range(&line_index, params.range);
let frange = FileRange { file_id, range };
let assists_config = AssistConfig {
allowed: params
let mut assists_config = snap.config.assist();
assists_config.allowed = params
.clone()
.context
.only
.map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()),
..snap.config.assist
};
.map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
let mut res: Vec<lsp_ext::CodeAction> = Vec::new();
@ -989,7 +985,7 @@ fn add_quick_fixes(
line_index: &Arc<LineIndex>,
acc: &mut Vec<lsp_ext::CodeAction>,
) -> Result<()> {
let diagnostics = snap.analysis.diagnostics(&snap.config.diagnostics, frange.file_id)?;
let diagnostics = snap.analysis.diagnostics(&snap.config.diagnostics(), frange.file_id)?;
for fix in diagnostics
.into_iter()
@ -1018,7 +1014,7 @@ fn add_quick_fixes(
}
pub(crate) fn handle_code_action_resolve(
mut snap: GlobalStateSnapshot,
snap: GlobalStateSnapshot,
mut code_action: lsp_ext::CodeAction,
) -> Result<lsp_ext::CodeAction> {
let _p = profile::span("handle_code_action_resolve");
@ -1032,13 +1028,14 @@ pub(crate) fn handle_code_action_resolve(
let range = from_proto::text_range(&line_index, params.code_action_params.range);
let frange = FileRange { file_id, range };
snap.config.assist.allowed = params
let mut assists_config = snap.config.assist();
assists_config.allowed = params
.code_action_params
.context
.only
.map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
let assists = snap.analysis.assists(&snap.config.assist, true, frange)?;
let assists = snap.analysis.assists(&assists_config, true, frange)?;
let (id, index) = split_once(&params.id, ':').unwrap();
let index = index.parse::<usize>().unwrap();
let assist = &assists[index];
@ -1055,7 +1052,8 @@ pub(crate) fn handle_code_lens(
let _p = profile::span("handle_code_lens");
let mut lenses: Vec<CodeLens> = Default::default();
if snap.config.lens.none() {
let lens_config = snap.config.lens();
if lens_config.none() {
// early return before any db query!
return Ok(Some(lenses));
}
@ -1064,7 +1062,7 @@ pub(crate) fn handle_code_lens(
let line_index = snap.analysis.file_line_index(file_id)?;
let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?;
if snap.config.lens.runnable() {
if lens_config.runnable() {
// Gather runnables
for runnable in snap.analysis.runnables(file_id)? {
if should_skip_target(&runnable, cargo_spec.as_ref()) {
@ -1074,7 +1072,7 @@ pub(crate) fn handle_code_lens(
let action = runnable.action();
let range = to_proto::range(&line_index, runnable.nav.full_range);
let r = to_proto::runnable(&snap, file_id, runnable)?;
if snap.config.lens.run {
if lens_config.run {
let lens = CodeLens {
range,
command: Some(run_single_command(&r, action.run_title)),
@ -1083,7 +1081,7 @@ pub(crate) fn handle_code_lens(
lenses.push(lens);
}
if action.debugee && snap.config.lens.debug {
if action.debugee && lens_config.debug {
let debug_lens =
CodeLens { range, command: Some(debug_single_command(&r)), data: None };
lenses.push(debug_lens);
@ -1091,7 +1089,7 @@ pub(crate) fn handle_code_lens(
}
}
if snap.config.lens.implementations {
if lens_config.implementations {
// Handle impls
lenses.extend(
snap.analysis
@ -1126,7 +1124,7 @@ pub(crate) fn handle_code_lens(
);
}
if snap.config.lens.references() {
if lens_config.references() {
lenses.extend(snap.analysis.find_all_methods(file_id)?.into_iter().map(|it| {
let range = to_proto::range(&line_index, it.range);
let position = to_proto::position(&line_index, it.range.start());
@ -1272,7 +1270,7 @@ pub(crate) fn publish_diagnostics(
let diagnostics: Vec<Diagnostic> = snap
.analysis
.diagnostics(&snap.config.diagnostics, file_id)?
.diagnostics(&snap.config.diagnostics(), file_id)?
.into_iter()
.map(|d| Diagnostic {
range: to_proto::range(&line_index, d.range),
@ -1305,7 +1303,7 @@ pub(crate) fn handle_inlay_hints(
let line_index = snap.analysis.file_line_index(file_id)?;
Ok(snap
.analysis
.inlay_hints(file_id, &snap.config.inlay_hints)?
.inlay_hints(file_id, &snap.config.inlay_hints())?
.into_iter()
.map(|it| to_proto::inlay_hint(&line_index, it))
.collect())
@ -1575,7 +1573,7 @@ fn show_impl_command_link(
snap: &GlobalStateSnapshot,
position: &FilePosition,
) -> Option<lsp_ext::CommandLinkGroup> {
if snap.config.hover.implementations {
if snap.config.hover().implementations {
if let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) {
let uri = to_proto::url(snap, position.file_id);
let line_index = snap.analysis.file_line_index(position.file_id).ok()?;
@ -1603,7 +1601,8 @@ fn runnable_action_links(
runnable: Runnable,
) -> Option<lsp_ext::CommandLinkGroup> {
let cargo_spec = CargoTargetSpec::for_file(&snap, file_id).ok()?;
if !snap.config.hover.runnable() || should_skip_target(&runnable, cargo_spec.as_ref()) {
let hover_config = snap.config.hover();
if !hover_config.runnable() || should_skip_target(&runnable, cargo_spec.as_ref()) {
return None;
}
@ -1611,12 +1610,12 @@ fn runnable_action_links(
to_proto::runnable(snap, file_id, runnable).ok().map(|r| {
let mut group = lsp_ext::CommandLinkGroup::default();
if snap.config.hover.run {
if hover_config.run {
let run_command = run_single_command(&r, action.run_title);
group.commands.push(to_command_link(run_command, r.label.clone()));
}
if snap.config.hover.debug {
if hover_config.debug {
let dbg_command = debug_single_command(&r);
group.commands.push(to_command_link(dbg_command, r.label));
}
@ -1629,7 +1628,7 @@ fn goto_type_action_links(
snap: &GlobalStateSnapshot,
nav_targets: &[HoverGotoTypeData],
) -> Option<lsp_ext::CommandLinkGroup> {
if !snap.config.hover.goto_type_def || nav_targets.is_empty() {
if !snap.config.hover().goto_type_def || nav_targets.is_empty() {
return None;
}
@ -1650,7 +1649,7 @@ fn prepare_hover_actions(
file_id: FileId,
actions: &[HoverAction],
) -> Vec<lsp_ext::CommandLinkGroup> {
if snap.config.hover.none() || !snap.config.hover_actions() {
if snap.config.hover().none() || !snap.config.hover_actions() {
return Vec::new();
}

View file

@ -99,7 +99,8 @@ impl fmt::Debug for Event {
impl GlobalState {
fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> {
if self.config.linked_projects.is_empty() && self.config.notifications.cargo_toml_not_found
if self.config.linked_projects().is_empty()
&& self.config.notifications().cargo_toml_not_found
{
self.show_message(
lsp_types::MessageType::Error,
@ -296,7 +297,7 @@ impl GlobalState {
flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => {
let diagnostics =
crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
&self.config.diagnostics_map,
&self.config.diagnostics_map(),
&diagnostic,
&workspace_root,
);
@ -365,13 +366,13 @@ impl GlobalState {
self.update_file_notifications_on_threadpool();
// Refresh semantic tokens if the client supports it.
if self.config.semantic_tokens_refresh {
if self.config.semantic_tokens_refresh() {
self.semantic_tokens_cache.lock().clear();
self.send_request::<lsp_types::request::SemanticTokensRefesh>((), |_, _| ());
}
// Refresh code lens if the client supports it.
if self.config.code_lens_refresh {
if self.config.code_lens_refresh() {
self.send_request::<lsp_types::request::CodeLensRefresh>((), |_, _| ());
}
}
@ -658,7 +659,7 @@ impl GlobalState {
.collect::<Vec<_>>();
log::trace!("updating notifications for {:?}", subscriptions);
if self.config.publish_diagnostics {
if self.config.publish_diagnostics() {
let snapshot = self.snapshot();
self.task_pool.handle.spawn(move || {
let diagnostics = subscriptions

View file

@ -19,12 +19,12 @@ impl GlobalState {
pub(crate) fn update_configuration(&mut self, config: Config) {
let _p = profile::span("GlobalState::update_configuration");
let old_config = mem::replace(&mut self.config, config);
if self.config.lru_capacity != old_config.lru_capacity {
self.analysis_host.update_lru_capacity(old_config.lru_capacity);
if self.config.lru_capacity() != old_config.lru_capacity() {
self.analysis_host.update_lru_capacity(self.config.lru_capacity());
}
if self.config.linked_projects != old_config.linked_projects {
if self.config.linked_projects() != old_config.linked_projects() {
self.fetch_workspaces()
} else if self.config.flycheck != old_config.flycheck {
} else if self.config.flycheck() != old_config.flycheck() {
self.reload_flycheck();
}
}
@ -36,7 +36,7 @@ impl GlobalState {
Status::Loading | Status::NeedsReload => return,
Status::Ready | Status::Invalid => (),
}
if self.config.cargo_autoreload {
if self.config.cargo_autoreload() {
self.fetch_workspaces();
} else {
self.transition(Status::NeedsReload);
@ -94,8 +94,8 @@ impl GlobalState {
pub(crate) fn fetch_workspaces(&mut self) {
log::info!("will fetch workspaces");
self.task_pool.handle.spawn({
let linked_projects = self.config.linked_projects.clone();
let cargo_config = self.config.cargo.clone();
let linked_projects = self.config.linked_projects();
let cargo_config = self.config.cargo();
move || {
let workspaces = linked_projects
.iter()
@ -143,7 +143,7 @@ impl GlobalState {
return;
}
if let FilesWatcher::Client = self.config.files.watcher {
if let FilesWatcher::Client = self.config.files().watcher {
let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
watchers: workspaces
.iter()
@ -170,9 +170,9 @@ impl GlobalState {
let project_folders = ProjectFolders::new(&workspaces);
self.proc_macro_client = match &self.config.proc_macro_srv {
self.proc_macro_client = match self.config.proc_macro_srv() {
None => None,
Some((path, args)) => match ProcMacroClient::extern_process(path.into(), args) {
Some((path, args)) => match ProcMacroClient::extern_process(path.clone(), args) {
Ok(it) => Some(it),
Err(err) => {
log::error!(
@ -185,7 +185,7 @@ impl GlobalState {
},
};
let watch = match self.config.files.watcher {
let watch = match self.config.files().watcher {
FilesWatcher::Client => vec![],
FilesWatcher::Notify => project_folders.watch,
};
@ -211,7 +211,7 @@ impl GlobalState {
};
for ws in workspaces.iter() {
crate_graph.extend(ws.to_crate_graph(
self.config.cargo.target.as_deref(),
self.config.cargo().target.as_deref(),
self.proc_macro_client.as_ref(),
&mut load,
));
@ -231,7 +231,7 @@ impl GlobalState {
}
fn reload_flycheck(&mut self) {
let config = match self.config.flycheck.clone() {
let config = match self.config.flycheck() {
Some(it) => it,
None => {
self.flycheck = Vec::new();

View file

@ -818,7 +818,7 @@ pub(crate) fn runnable(
file_id: FileId,
runnable: Runnable,
) -> Result<lsp_ext::Runnable> {
let config = &snap.config.runnables;
let config = snap.config.runnables();
let spec = CargoTargetSpec::for_file(snap, file_id)?;
let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone());
let target = spec.as_ref().map(|s| s.target.clone());
@ -833,9 +833,9 @@ pub(crate) fn runnable(
kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable {
workspace_root: workspace_root.map(|it| it.into()),
override_cargo: config.override_cargo.clone(),
override_cargo: config.override_cargo,
cargo_args,
cargo_extra_args: config.cargo_extra_args.clone(),
cargo_extra_args: config.cargo_extra_args,
executable_args,
expect_test: None,
},

View file

@ -13,6 +13,7 @@ mod support;
use std::{collections::HashMap, path::PathBuf, time::Instant};
use expect_test::expect;
use lsp_types::{
notification::DidOpenTextDocument,
request::{CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest},
@ -569,9 +570,9 @@ fn main() {
}
"###,
)
.with_config(|config| {
config.cargo.load_out_dirs_from_check = true;
})
.with_config(serde_json::json!({
"cargo": { "loadOutDirsFromCheck": true }
}))
.server()
.wait_until_workspace_is_loaded();
@ -712,12 +713,13 @@ pub fn foo(_input: TokenStream) -> TokenStream {
"###,
)
.with_config(|config| {
let macro_srv_path = PathBuf::from(env!("CARGO_BIN_EXE_rust-analyzer"));
config.cargo.load_out_dirs_from_check = true;
config.proc_macro_srv = Some((macro_srv_path, vec!["proc-macro".into()]));
})
.with_config(serde_json::json!({
"cargo": { "loadOutDirsFromCheck": true },
"procMacro": {
"enable": true,
"server": PathBuf::from(env!("CARGO_BIN_EXE_rust-analyzer")),
}
}))
.root("foo")
.root("bar")
.server()
@ -731,5 +733,5 @@ pub fn foo(_input: TokenStream) -> TokenStream {
work_done_progress_params: Default::default(),
});
let value = res.get("contents").unwrap().get("value").unwrap().to_string();
assert_eq!(value, r#""\n```rust\nfoo::Bar\n```\n\n```rust\nfn bar()\n```""#)
expect![[r#""\n```rust\nfoo::Bar\n```\n\n```rust\nfn bar()\n```""#]].assert_eq(&value);
}

View file

@ -12,11 +12,8 @@ use lsp_types::{
notification::Exit, request::Shutdown, TextDocumentIdentifier, Url, WorkDoneProgress,
};
use lsp_types::{ProgressParams, ProgressParamsValue};
use project_model::{CargoConfig, ProjectManifest};
use rust_analyzer::{
config::{Config, FilesConfig, FilesWatcher, LinkedProject},
main_loop,
};
use project_model::ProjectManifest;
use rust_analyzer::{config::Config, main_loop};
use serde::Serialize;
use serde_json::{to_string_pretty, Value};
use test_utils::{find_mismatch, Fixture};
@ -29,12 +26,18 @@ pub(crate) struct Project<'a> {
with_sysroot: bool,
tmp_dir: Option<TestDir>,
roots: Vec<PathBuf>,
config: Option<Box<dyn Fn(&mut Config)>>,
config: serde_json::Value,
}
impl<'a> Project<'a> {
pub(crate) fn with_fixture(fixture: &str) -> Project {
Project { fixture, tmp_dir: None, roots: vec![], with_sysroot: false, config: None }
Project {
fixture,
tmp_dir: None,
roots: vec![],
with_sysroot: false,
config: serde_json::Value::Null,
}
}
pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Project<'a> {
@ -52,8 +55,8 @@ impl<'a> Project<'a> {
self
}
pub(crate) fn with_config(mut self, config: impl Fn(&mut Config) + 'static) -> Project<'a> {
self.config = Some(Box::new(config));
pub(crate) fn with_config(mut self, config: serde_json::Value) -> Project<'a> {
self.config = config;
self
}
@ -77,14 +80,14 @@ impl<'a> Project<'a> {
if roots.is_empty() {
roots.push(tmp_dir_path.clone());
}
let linked_projects = roots
let discovered_projects = roots
.into_iter()
.map(|it| ProjectManifest::discover_single(&it).unwrap())
.map(LinkedProject::from)
.collect::<Vec<_>>();
let mut config = Config {
caps: lsp_types::ClientCapabilities {
let mut config = Config::new(
tmp_dir_path,
lsp_types::ClientCapabilities {
text_document: Some(lsp_types::TextDocumentClientCapabilities {
definition: Some(lsp_types::GotoCapability {
link_support: Some(true),
@ -96,6 +99,10 @@ impl<'a> Project<'a> {
),
..Default::default()
}),
hover: Some(lsp_types::HoverClientCapabilities {
content_format: Some(vec![lsp_types::MarkupKind::Markdown]),
..Default::default()
}),
..Default::default()
}),
window: Some(lsp_types::WindowClientCapabilities {
@ -104,14 +111,9 @@ impl<'a> Project<'a> {
}),
..Default::default()
},
cargo: CargoConfig { no_sysroot: !self.with_sysroot, ..Default::default() },
linked_projects,
files: FilesConfig { watcher: FilesWatcher::Client, exclude: Vec::new() },
..Config::new(tmp_dir_path)
};
if let Some(f) = &self.config {
f(&mut config)
}
);
config.discovered_projects = Some(discovered_projects);
config.update(self.config);
Server::new(tmp_dir, config)
}

View file

@ -94,6 +94,8 @@
Whether to show `can't find Cargo.toml` error message.
[[rust-analyzer.procMacro.enable]]rust-analyzer.procMacro.enable (default: `false`)::
Enable Proc macro support, `#rust-analyzer.cargo.loadOutDirsFromCheck#` must be enabled.
[[rust-analyzer.procMacro.server]]rust-analyzer.procMacro.server (default: `null`)::
Internal config, path to proc-macro server executable (typically, this is rust-analyzer itself, but we override this in tests).
[[rust-analyzer.runnables.overrideCargo]]rust-analyzer.runnables.overrideCargo (default: `null`)::
Command to be executed instead of 'cargo' for runnables.
[[rust-analyzer.runnables.cargoExtraArgs]]rust-analyzer.runnables.cargoExtraArgs (default: `[]`)::

View file

@ -663,6 +663,14 @@
"default": false,
"type": "boolean"
},
"rust-analyzer.procMacro.server": {
"markdownDescription": "Internal config, path to proc-macro server executable (typically, this is rust-analyzer itself, but we override this in tests).",
"default": null,
"type": [
"null",
"string"
]
},
"rust-analyzer.runnables.overrideCargo": {
"markdownDescription": "Command to be executed instead of 'cargo' for runnables.",
"default": null,