Rollup merge of #103579 - lnicola:rust-analyzer-2022-10-26, r=lnicola

⬆️ rust-analyzer

r? ``@ghost``
This commit is contained in:
Yuki Okushi 2022-10-27 08:30:57 +09:00 committed by GitHub
commit ee84d1ad06
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
64 changed files with 2244 additions and 1607 deletions

View file

@ -50,5 +50,7 @@ jobs:
cargo workspaces rename --from test-utils test_utils cargo workspaces rename --from test-utils test_utils
cargo workspaces rename --from text-edit text_edit cargo workspaces rename --from text-edit text_edit
cargo workspaces rename ra_ap_%n cargo workspaces rename ra_ap_%n
# Remove library crates from the workspaces so we don't auto-publish them as well
sed -i 's/ "lib\/\*",//' ./Cargo.toml
find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} + find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} +
cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$PATCH cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$PATCH

View file

@ -21,6 +21,20 @@ pub use cargo_metadata::diagnostic::{
DiagnosticSpanMacroExpansion, DiagnosticSpanMacroExpansion,
}; };
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub enum InvocationStrategy {
Once,
#[default]
PerWorkspace,
}
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub enum InvocationLocation {
Root(AbsPathBuf),
#[default]
Workspace,
}
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub enum FlycheckConfig { pub enum FlycheckConfig {
CargoCommand { CargoCommand {
@ -37,6 +51,8 @@ pub enum FlycheckConfig {
command: String, command: String,
args: Vec<String>, args: Vec<String>,
extra_env: FxHashMap<String, String>, extra_env: FxHashMap<String, String>,
invocation_strategy: InvocationStrategy,
invocation_location: InvocationLocation,
}, },
} }
@ -136,11 +152,15 @@ enum Restart {
No, No,
} }
/// A [`FlycheckActor`] is a single check instance of a workspace.
struct FlycheckActor { struct FlycheckActor {
/// The workspace id of this flycheck instance.
id: usize, id: usize,
sender: Box<dyn Fn(Message) + Send>, sender: Box<dyn Fn(Message) + Send>,
config: FlycheckConfig, config: FlycheckConfig,
workspace_root: AbsPathBuf, /// Either the workspace root of the workspace we are flychecking,
/// or the project root of the project.
root: AbsPathBuf,
/// CargoHandle exists to wrap around the communication needed to be able to /// CargoHandle exists to wrap around the communication needed to be able to
/// run `cargo check` without blocking. Currently the Rust standard library /// run `cargo check` without blocking. Currently the Rust standard library
/// doesn't provide a way to read sub-process output without blocking, so we /// doesn't provide a way to read sub-process output without blocking, so we
@ -162,11 +182,13 @@ impl FlycheckActor {
workspace_root: AbsPathBuf, workspace_root: AbsPathBuf,
) -> FlycheckActor { ) -> FlycheckActor {
tracing::info!(%id, ?workspace_root, "Spawning flycheck"); tracing::info!(%id, ?workspace_root, "Spawning flycheck");
FlycheckActor { id, sender, config, workspace_root, cargo_handle: None } FlycheckActor { id, sender, config, root: workspace_root, cargo_handle: None }
} }
fn progress(&self, progress: Progress) {
fn report_progress(&self, progress: Progress) {
self.send(Message::Progress { id: self.id, progress }); self.send(Message::Progress { id: self.id, progress });
} }
fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> { fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> {
let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver); let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver);
if let Ok(msg) = inbox.try_recv() { if let Ok(msg) = inbox.try_recv() {
@ -178,6 +200,7 @@ impl FlycheckActor {
recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())), recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
} }
} }
fn run(mut self, inbox: Receiver<Restart>) { fn run(mut self, inbox: Receiver<Restart>) {
'event: while let Some(event) = self.next_event(&inbox) { 'event: while let Some(event) = self.next_event(&inbox) {
match event { match event {
@ -203,10 +226,10 @@ impl FlycheckActor {
"did restart flycheck" "did restart flycheck"
); );
self.cargo_handle = Some(cargo_handle); self.cargo_handle = Some(cargo_handle);
self.progress(Progress::DidStart); self.report_progress(Progress::DidStart);
} }
Err(error) => { Err(error) => {
self.progress(Progress::DidFailToRestart(format!( self.report_progress(Progress::DidFailToRestart(format!(
"Failed to run the following command: {:?} error={}", "Failed to run the following command: {:?} error={}",
self.check_command(), self.check_command(),
error error
@ -226,17 +249,17 @@ impl FlycheckActor {
self.check_command() self.check_command()
); );
} }
self.progress(Progress::DidFinish(res)); self.report_progress(Progress::DidFinish(res));
} }
Event::CheckEvent(Some(message)) => match message { Event::CheckEvent(Some(message)) => match message {
CargoMessage::CompilerArtifact(msg) => { CargoMessage::CompilerArtifact(msg) => {
self.progress(Progress::DidCheckCrate(msg.target.name)); self.report_progress(Progress::DidCheckCrate(msg.target.name));
} }
CargoMessage::Diagnostic(msg) => { CargoMessage::Diagnostic(msg) => {
self.send(Message::AddDiagnostic { self.send(Message::AddDiagnostic {
id: self.id, id: self.id,
workspace_root: self.workspace_root.clone(), workspace_root: self.root.clone(),
diagnostic: msg, diagnostic: msg,
}); });
} }
@ -254,12 +277,12 @@ impl FlycheckActor {
"did cancel flycheck" "did cancel flycheck"
); );
cargo_handle.cancel(); cargo_handle.cancel();
self.progress(Progress::DidCancel); self.report_progress(Progress::DidCancel);
} }
} }
fn check_command(&self) -> Command { fn check_command(&self) -> Command {
let mut cmd = match &self.config { let (mut cmd, args) = match &self.config {
FlycheckConfig::CargoCommand { FlycheckConfig::CargoCommand {
command, command,
target_triple, target_triple,
@ -272,9 +295,7 @@ impl FlycheckActor {
} => { } => {
let mut cmd = Command::new(toolchain::cargo()); let mut cmd = Command::new(toolchain::cargo());
cmd.arg(command); cmd.arg(command);
cmd.current_dir(&self.workspace_root); cmd.args(&["--workspace", "--message-format=json"]);
cmd.args(&["--workspace", "--message-format=json", "--manifest-path"])
.arg(self.workspace_root.join("Cargo.toml").as_os_str());
if let Some(target) = target_triple { if let Some(target) = target_triple {
cmd.args(&["--target", target.as_str()]); cmd.args(&["--target", target.as_str()]);
@ -293,18 +314,41 @@ impl FlycheckActor {
cmd.arg(features.join(" ")); cmd.arg(features.join(" "));
} }
} }
cmd.args(extra_args);
cmd.envs(extra_env); cmd.envs(extra_env);
cmd (cmd, extra_args)
} }
FlycheckConfig::CustomCommand { command, args, extra_env } => { FlycheckConfig::CustomCommand {
command,
args,
extra_env,
invocation_strategy,
invocation_location,
} => {
let mut cmd = Command::new(command); let mut cmd = Command::new(command);
cmd.args(args);
cmd.envs(extra_env); cmd.envs(extra_env);
cmd
match invocation_location {
InvocationLocation::Workspace => {
match invocation_strategy {
InvocationStrategy::Once => {
cmd.current_dir(&self.root);
}
InvocationStrategy::PerWorkspace => {
// FIXME: cmd.current_dir(&affected_workspace);
cmd.current_dir(&self.root);
}
}
}
InvocationLocation::Root(root) => {
cmd.current_dir(root);
}
}
(cmd, args)
} }
}; };
cmd.current_dir(&self.workspace_root);
cmd.args(args);
cmd cmd
} }

View file

@ -12,11 +12,11 @@ fn test_copy_expand_simple() {
#[derive(Copy)] #[derive(Copy)]
struct Foo; struct Foo;
"#, "#,
expect![[r##" expect![[r#"
#[derive(Copy)] #[derive(Copy)]
struct Foo; struct Foo;
impl < > core::marker::Copy for Foo< > {}"##]], impl < > core::marker::Copy for Foo< > {}"#]],
); );
} }
@ -33,7 +33,7 @@ macro Copy {}
#[derive(Copy)] #[derive(Copy)]
struct Foo; struct Foo;
"#, "#,
expect![[r##" expect![[r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro derive {} macro derive {}
#[rustc_builtin_macro] #[rustc_builtin_macro]
@ -41,7 +41,7 @@ macro Copy {}
#[derive(Copy)] #[derive(Copy)]
struct Foo; struct Foo;
impl < > crate ::marker::Copy for Foo< > {}"##]], impl < > crate ::marker::Copy for Foo< > {}"#]],
); );
} }
@ -53,11 +53,11 @@ fn test_copy_expand_with_type_params() {
#[derive(Copy)] #[derive(Copy)]
struct Foo<A, B>; struct Foo<A, B>;
"#, "#,
expect![[r##" expect![[r#"
#[derive(Copy)] #[derive(Copy)]
struct Foo<A, B>; struct Foo<A, B>;
impl <T0: core::marker::Copy, T1: core::marker::Copy> core::marker::Copy for Foo<T0, T1> {}"##]], impl <T0: core::marker::Copy, T1: core::marker::Copy, > core::marker::Copy for Foo<T0, T1, > {}"#]],
); );
} }
@ -70,11 +70,11 @@ fn test_copy_expand_with_lifetimes() {
#[derive(Copy)] #[derive(Copy)]
struct Foo<A, B, 'a, 'b>; struct Foo<A, B, 'a, 'b>;
"#, "#,
expect![[r##" expect![[r#"
#[derive(Copy)] #[derive(Copy)]
struct Foo<A, B, 'a, 'b>; struct Foo<A, B, 'a, 'b>;
impl <T0: core::marker::Copy, T1: core::marker::Copy> core::marker::Copy for Foo<T0, T1> {}"##]], impl <T0: core::marker::Copy, T1: core::marker::Copy, > core::marker::Copy for Foo<T0, T1, > {}"#]],
); );
} }
@ -86,10 +86,26 @@ fn test_clone_expand() {
#[derive(Clone)] #[derive(Clone)]
struct Foo<A, B>; struct Foo<A, B>;
"#, "#,
expect![[r##" expect![[r#"
#[derive(Clone)] #[derive(Clone)]
struct Foo<A, B>; struct Foo<A, B>;
impl <T0: core::clone::Clone, T1: core::clone::Clone> core::clone::Clone for Foo<T0, T1> {}"##]], impl <T0: core::clone::Clone, T1: core::clone::Clone, > core::clone::Clone for Foo<T0, T1, > {}"#]],
);
}
#[test]
fn test_clone_expand_with_const_generics() {
check(
r#"
//- minicore: derive, clone
#[derive(Clone)]
struct Foo<const X: usize, T>(u32);
"#,
expect![[r#"
#[derive(Clone)]
struct Foo<const X: usize, T>(u32);
impl <const T0: usize, T1: core::clone::Clone, > core::clone::Clone for Foo<T0, T1, > {}"#]],
); );
} }

View file

@ -60,7 +60,8 @@ pub fn find_builtin_derive(ident: &name::Name) -> Option<BuiltinDeriveExpander>
struct BasicAdtInfo { struct BasicAdtInfo {
name: tt::Ident, name: tt::Ident,
type_or_const_params: usize, /// `Some(ty)` if it's a const param of type `ty`, `None` if it's a type param.
param_types: Vec<Option<tt::Subtree>>,
} }
fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> { fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
@ -92,50 +93,22 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
let name_token_id = let name_token_id =
token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified); token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
let name_token = tt::Ident { id: name_token_id, text: name.text().into() }; let name_token = tt::Ident { id: name_token_id, text: name.text().into() };
let type_or_const_params = let param_types = params
params.map_or(0, |type_param_list| type_param_list.type_or_const_params().count()); .into_iter()
Ok(BasicAdtInfo { name: name_token, type_or_const_params }) .flat_map(|param_list| param_list.type_or_const_params())
.map(|param| {
if let ast::TypeOrConstParam::Const(param) = param {
let ty = param
.ty()
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
.unwrap_or_default();
Some(ty)
} else {
None
} }
fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
let mut result = Vec::<tt::TokenTree>::with_capacity(n * 2);
result.push(
tt::Leaf::Punct(tt::Punct {
char: '<',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
}) })
.into(), .collect();
); Ok(BasicAdtInfo { name: name_token, param_types })
for i in 0..n {
if i > 0 {
result.push(
tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
);
}
result.push(
tt::Leaf::Ident(tt::Ident {
id: tt::TokenId::unspecified(),
text: format!("T{}", i).into(),
})
.into(),
);
result.extend(bound.iter().cloned());
}
result.push(
tt::Leaf::Punct(tt::Punct {
char: '>',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
);
result
} }
fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResult<tt::Subtree> { fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResult<tt::Subtree> {
@ -143,14 +116,27 @@ fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResu
Ok(info) => info, Ok(info) => info,
Err(e) => return ExpandResult::only_err(e), Err(e) => return ExpandResult::only_err(e),
}; };
let (params, args): (Vec<_>, Vec<_>) = info
.param_types
.into_iter()
.enumerate()
.map(|(idx, param_ty)| {
let ident = tt::Leaf::Ident(tt::Ident {
id: tt::TokenId::unspecified(),
text: format!("T{idx}").into(),
});
let ident_ = ident.clone();
if let Some(ty) = param_ty {
(quote! { const #ident : #ty , }, quote! { #ident_ , })
} else {
let bound = trait_path.clone();
(quote! { #ident : #bound , }, quote! { #ident_ , })
}
})
.unzip();
let name = info.name; let name = info.name;
let trait_path_clone = trait_path.token_trees.clone();
let bound = (quote! { : ##trait_path_clone }).token_trees;
let type_params = make_type_args(info.type_or_const_params, bound);
let type_args = make_type_args(info.type_or_const_params, Vec::new());
let trait_path = trait_path.token_trees;
let expanded = quote! { let expanded = quote! {
impl ##type_params ##trait_path for #name ##type_args {} impl < ##params > #trait_path for #name < ##args > {}
}; };
ExpandResult::ok(expanded) ExpandResult::ok(expanded)
} }

View file

@ -259,7 +259,6 @@ macro_rules! __known_path {
(core::future::Future) => {}; (core::future::Future) => {};
(core::future::IntoFuture) => {}; (core::future::IntoFuture) => {};
(core::ops::Try) => {}; (core::ops::Try) => {};
(core::ops::FromResidual) => {};
($path:path) => { ($path:path) => {
compile_error!("Please register your known path in the path module") compile_error!("Please register your known path in the path module")
}; };

View file

@ -279,8 +279,6 @@ pub mod known {
RangeToInclusive, RangeToInclusive,
RangeTo, RangeTo,
Range, Range,
Residual,
FromResidual,
Neg, Neg,
Not, Not,
None, None,

View file

@ -190,9 +190,7 @@ pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
pub enum InferenceDiagnostic { pub enum InferenceDiagnostic {
NoSuchField { expr: ExprId }, NoSuchField { expr: ExprId },
BreakOutsideOfLoop { expr: ExprId, is_break: bool }, BreakOutsideOfLoop { expr: ExprId, is_break: bool },
IncorrectTryTarget { expr: ExprId },
MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize }, MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize },
DoesNotImplement { expr: ExprId, trait_: TraitId, ty: Ty },
} }
/// A mismatch between an expected and an inferred type. /// A mismatch between an expected and an inferred type.
@ -907,6 +905,17 @@ impl<'a> InferenceContext<'a> {
self.db.trait_data(trait_).associated_type_by_name(&name![Item]) self.db.trait_data(trait_).associated_type_by_name(&name![Item])
} }
fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
// FIXME resolve via lang_item once try v2 is stable
let path = path![core::ops::Try];
let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
let trait_data = self.db.trait_data(trait_);
trait_data
// FIXME remove once try v2 is stable
.associated_type_by_name(&name![Ok])
.or_else(|| trait_data.associated_type_by_name(&name![Output]))
}
fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> { fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
let trait_ = self.resolve_lang_item(name![neg])?.as_trait()?; let trait_ = self.resolve_lang_item(name![neg])?.as_trait()?;
self.db.trait_data(trait_).associated_type_by_name(&name![Output]) self.db.trait_data(trait_).associated_type_by_name(&name![Output])

View file

@ -19,24 +19,24 @@ use hir_def::{
resolver::resolver_for_expr, resolver::resolver_for_expr,
ConstParamId, FieldId, ItemContainerId, Lookup, ConstParamId, FieldId, ItemContainerId, Lookup,
}; };
use hir_expand::{name, name::Name}; use hir_expand::name::Name;
use stdx::always; use stdx::always;
use syntax::ast::RangeOp; use syntax::ast::RangeOp;
use crate::{ use crate::{
autoderef::{self, Autoderef}, autoderef::{self, Autoderef},
consteval, consteval,
infer::{coerce::CoerceMany, find_continuable, path, BreakableKind}, infer::{coerce::CoerceMany, find_continuable, BreakableKind},
lower::{ lower::{
const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode, const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
}, },
mapping::{from_chalk, ToChalk}, mapping::{from_chalk, ToChalk},
method_resolution::{self, lang_names_for_bin_op, VisibleFromModule}, method_resolution::{self, lang_names_for_bin_op, VisibleFromModule},
primitive::{self, UintTy}, primitive::{self, UintTy},
static_lifetime, to_assoc_type_id, to_chalk_trait_id, static_lifetime, to_chalk_trait_id,
utils::{generics, Generics}, utils::{generics, Generics},
AdtId, AliasEq, AliasTy, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar,
ProjectionTy, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
}; };
use super::{ use super::{
@ -564,29 +564,9 @@ impl<'a> InferenceContext<'a> {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
} }
&Expr::Try { expr } => { Expr::Try { expr } => {
let inner_ty = self.infer_expr_inner(expr, &Expectation::none()); let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
match self.resolve_try_impl_for(inner_ty.clone()) { self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
Some((_, Some((output, residual)))) => {
if let Some((_trait, false)) =
self.implements_from_residual(self.return_ty.clone(), residual)
{
self.push_diagnostic(InferenceDiagnostic::IncorrectTryTarget {
expr: tgt_expr,
});
}
output
}
Some((trait_, None)) => {
self.push_diagnostic(InferenceDiagnostic::DoesNotImplement {
expr,
trait_,
ty: inner_ty,
});
self.err_ty()
}
None => self.err_ty(),
}
} }
Expr::Cast { expr, type_ref } => { Expr::Cast { expr, type_ref } => {
// FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary) // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary)
@ -1550,67 +1530,4 @@ impl<'a> InferenceContext<'a> {
let ctx = self.breakables.pop().expect("breakable stack broken"); let ctx = self.breakables.pop().expect("breakable stack broken");
(ctx.may_break.then(|| ctx.coerce.complete()), res) (ctx.may_break.then(|| ctx.coerce.complete()), res)
} }
/// Check whether `ty` implements `FromResidual<r>`
fn implements_from_residual(&mut self, ty: Ty, r: Ty) -> Option<(hir_def::TraitId, bool)> {
let from_residual_trait = self
.resolver
.resolve_known_trait(self.db.upcast(), &(super::path![core::ops::FromResidual]))?;
let r = GenericArgData::Ty(r).intern(Interner);
let b = TyBuilder::trait_ref(self.db, from_residual_trait);
if b.remaining() != 2 {
return Some((from_residual_trait, false));
}
let trait_ref = b.push(ty).push(r).build();
Some((from_residual_trait, self.table.try_obligation(trait_ref.cast(Interner)).is_some()))
}
fn resolve_try_impl_for(&mut self, ty: Ty) -> Option<(hir_def::TraitId, Option<(Ty, Ty)>)> {
let path = path![core::ops::Try];
let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
let trait_ref = TyBuilder::trait_ref(self.db, trait_).push(ty).build();
let substitution = trait_ref.substitution.clone();
self.push_obligation(trait_ref.clone().cast(Interner));
let trait_data = self.db.trait_data(trait_);
let output = trait_data.associated_type_by_name(&name![Output]);
let residual = trait_data.associated_type_by_name(&name![Residual]);
let output_ty = match output {
Some(output) => {
let output_ty = self.table.new_type_var();
let alias_eq = AliasEq {
alias: AliasTy::Projection(ProjectionTy {
associated_ty_id: to_assoc_type_id(output),
substitution: substitution.clone(),
}),
ty: output_ty.clone(),
};
self.push_obligation(alias_eq.cast(Interner));
output_ty
}
None => self.err_ty(),
};
let residual_ty = match residual {
Some(residual) => {
let residual_ty = self.table.new_type_var();
let alias_eq = AliasEq {
alias: AliasTy::Projection(ProjectionTy {
associated_ty_id: to_assoc_type_id(residual),
substitution,
}),
ty: residual_ty.clone(),
};
self.push_obligation(alias_eq.cast(Interner));
residual_ty
}
None => self.err_ty(),
};
// FIXME: We are doing the work twice here I think?
Some((
trait_,
self.table.try_obligation(trait_ref.cast(Interner)).map(|_| (output_ty, residual_ty)),
))
}
} }

View file

@ -1111,24 +1111,6 @@ pub fn resolve_indexing_op(
} }
None None
} }
/// Returns the receiver type for the try branch trait call.
pub fn resolve_branch_op(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
ty: Canonical<Ty>,
try_trait: TraitId,
) -> Option<ReceiverAdjustments> {
let mut table = InferenceTable::new(db, env.clone());
let ty = table.instantiate_canonical(ty);
let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
for (ty, adj) in deref_chain.into_iter().zip(adj) {
let goal = generic_implements_goal(db, env.clone(), try_trait, &ty);
if db.trait_solve(env.krate, goal.cast(Interner)).is_some() {
return Some(adj);
}
}
None
}
macro_rules! check_that { macro_rules! check_that {
($cond:expr) => { ($cond:expr) => {

View file

@ -162,16 +162,98 @@ fn test() {
); );
} }
#[test]
fn infer_try() {
check_types(
r#"
//- /main.rs crate:main deps:core
fn test() {
let r: Result<i32, u64> = Result::Ok(1);
let v = r?;
v;
} //^ i32
//- /core.rs crate:core
pub mod ops {
pub trait Try {
type Ok;
type Error;
}
}
pub mod result {
pub enum Result<O, E> {
Ok(O),
Err(E)
}
impl<O, E> crate::ops::Try for Result<O, E> {
type Ok = O;
type Error = E;
}
}
pub mod prelude {
pub mod rust_2018 {
pub use crate::{result::*, ops::*};
}
}
"#,
);
}
#[test] #[test]
fn infer_try_trait_v2() { fn infer_try_trait_v2() {
check_types( check_types(
r#" r#"
//- minicore: try //- /main.rs crate:main deps:core
fn test() -> core::ops::ControlFlow<u32, f32> { fn test() {
let r: core::ops::ControlFlow<u32, f32> = core::ops::ControlFlow::Continue(1.0); let r: Result<i32, u64> = Result::Ok(1);
let v = r?; let v = r?;
//^ f32 v;
r } //^ i32
//- /core.rs crate:core
mod ops {
mod try_trait {
pub trait Try: FromResidual {
type Output;
type Residual;
}
pub trait FromResidual<R = <Self as Try>::Residual> {}
}
pub use self::try_trait::FromResidual;
pub use self::try_trait::Try;
}
mod convert {
pub trait From<T> {}
impl<T> From<T> for T {}
}
pub mod result {
use crate::convert::From;
use crate::ops::{Try, FromResidual};
pub enum Infallible {}
pub enum Result<O, E> {
Ok(O),
Err(E)
}
impl<O, E> Try for Result<O, E> {
type Output = O;
type Error = Result<Infallible, E>;
}
impl<T, E, F: From<E>> FromResidual<Result<Infallible, E>> for Result<T, F> {}
}
pub mod prelude {
pub mod rust_2018 {
pub use crate::result::*;
}
} }
"#, "#,
); );

View file

@ -6,7 +6,7 @@
use base_db::CrateId; use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions}; use cfg::{CfgExpr, CfgOptions};
use either::Either; use either::Either;
use hir_def::{path::ModPath, TraitId}; use hir_def::path::ModPath;
use hir_expand::{name::Name, HirFileId, InFile}; use hir_expand::{name::Name, HirFileId, InFile};
use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange}; use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange};
@ -33,7 +33,6 @@ diagnostics![
BreakOutsideOfLoop, BreakOutsideOfLoop,
InactiveCode, InactiveCode,
IncorrectCase, IncorrectCase,
IncorrectTryExpr,
InvalidDeriveTarget, InvalidDeriveTarget,
MacroError, MacroError,
MalformedDerive, MalformedDerive,
@ -41,7 +40,6 @@ diagnostics![
MissingFields, MissingFields,
MissingMatchArms, MissingMatchArms,
MissingUnsafe, MissingUnsafe,
NotImplemented,
NoSuchField, NoSuchField,
ReplaceFilterMapNextWithFindMap, ReplaceFilterMapNextWithFindMap,
TypeMismatch, TypeMismatch,
@ -155,16 +153,6 @@ pub struct MismatchedArgCount {
pub expected: usize, pub expected: usize,
pub found: usize, pub found: usize,
} }
#[derive(Debug)]
pub struct IncorrectTryExpr {
pub expr: InFile<AstPtr<ast::Expr>>,
}
#[derive(Debug)]
pub struct NotImplemented {
pub expr: InFile<AstPtr<ast::Expr>>,
pub trait_: TraitId,
pub ty: Type,
}
#[derive(Debug)] #[derive(Debug)]
pub struct MissingMatchArms { pub struct MissingMatchArms {

View file

@ -81,12 +81,11 @@ use crate::db::{DefDatabase, HirDatabase};
pub use crate::{ pub use crate::{
attrs::{HasAttrs, Namespace}, attrs::{HasAttrs, Namespace},
diagnostics::{ diagnostics::{
AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, IncorrectTryExpr, AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget,
InvalidDeriveTarget, MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms,
MissingMatchArms, MissingUnsafe, NoSuchField, NotImplemented, MissingUnsafe, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch,
ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro, UnimplementedBuiltinMacro, UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall,
UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, UnresolvedModule, UnresolvedModule, UnresolvedProcMacro,
UnresolvedProcMacro,
}, },
has_source::HasSource, has_source::HasSource,
semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits}, semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
@ -1283,45 +1282,30 @@ impl DefWithBody {
let infer = db.infer(self.into()); let infer = db.infer(self.into());
let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1); let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
for d in &infer.diagnostics { for d in &infer.diagnostics {
match *d { match d {
hir_ty::InferenceDiagnostic::NoSuchField { expr } => { hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
let field = source_map.field_syntax(expr); let field = source_map.field_syntax(*expr);
acc.push(NoSuchField { field }.into()) acc.push(NoSuchField { field }.into())
} }
hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break } => { &hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break } => {
let expr = source_map let expr = source_map
.expr_syntax(expr) .expr_syntax(expr)
.expect("break outside of loop in synthetic syntax"); .expect("break outside of loop in synthetic syntax");
acc.push(BreakOutsideOfLoop { expr, is_break }.into()) acc.push(BreakOutsideOfLoop { expr, is_break }.into())
} }
hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => { hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
match source_map.expr_syntax(call_expr) { match source_map.expr_syntax(*call_expr) {
Ok(source_ptr) => acc.push( Ok(source_ptr) => acc.push(
MismatchedArgCount { MismatchedArgCount {
call_expr: source_ptr, call_expr: source_ptr,
expected: expected, expected: *expected,
found: found, found: *found,
} }
.into(), .into(),
), ),
Err(SyntheticSyntax) => (), Err(SyntheticSyntax) => (),
} }
} }
hir_ty::InferenceDiagnostic::IncorrectTryTarget { expr } => {
let expr = source_map.expr_syntax(expr).expect("try in synthetic syntax");
acc.push(IncorrectTryExpr { expr }.into())
}
hir_ty::InferenceDiagnostic::DoesNotImplement { expr, trait_, ref ty } => {
let expr = source_map.expr_syntax(expr).expect("try in synthetic syntax");
acc.push(
NotImplemented {
expr,
trait_,
ty: Type::new(db, DefWithBodyId::from(self), ty.clone()),
}
.into(),
)
}
} }
} }
for (expr, mismatch) in infer.expr_type_mismatches() { for (expr, mismatch) in infer.expr_type_mismatches() {

View file

@ -77,7 +77,7 @@ pub(crate) fn generate_constant(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
target_data_for_generate_constant(ctx, current_module, constant_module).unwrap_or_else( target_data_for_generate_constant(ctx, current_module, constant_module).unwrap_or_else(
|| { || {
let indent = IndentLevel::from_node(statement.syntax()); let indent = IndentLevel::from_node(statement.syntax());
(statement.syntax().text_range().start(), indent, None, format!("\n{}", indent)) (statement.syntax().text_range().start(), indent, None, format!("\n{indent}"))
}, },
); );
@ -90,7 +90,7 @@ pub(crate) fn generate_constant(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
if let Some(file_id) = file_id { if let Some(file_id) = file_id {
builder.edit_file(file_id); builder.edit_file(file_id);
} }
builder.insert(offset, format!("{}{}", text, post_string)); builder.insert(offset, format!("{text}{post_string}"));
}, },
) )
} }
@ -103,13 +103,13 @@ fn get_text_for_generate_constant(
) -> Option<String> { ) -> Option<String> {
let constant_token = not_exist_name_ref.pop()?; let constant_token = not_exist_name_ref.pop()?;
let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " }; let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " };
let mut text = format!("{}const {}: {} = $0;", vis, constant_token, type_name); let mut text = format!("{vis}const {constant_token}: {type_name} = $0;");
while let Some(name_ref) = not_exist_name_ref.pop() { while let Some(name_ref) = not_exist_name_ref.pop() {
let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " }; let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " };
text = text.replace("\n", "\n "); text = text.replace("\n", "\n ");
text = format!("{}mod {} {{{}\n}}", vis, name_ref.to_string(), text); text = format!("{vis}mod {name_ref} {{{text}\n}}");
} }
Some(text.replace("\n", &format!("\n{}", indent))) Some(text.replace("\n", &format!("\n{indent}")))
} }
fn target_data_for_generate_constant( fn target_data_for_generate_constant(
@ -134,7 +134,7 @@ fn target_data_for_generate_constant(
.find(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains("\n")) .find(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains("\n"))
.is_some(); .is_some();
let post_string = let post_string =
if siblings_has_newline { format!("{}", indent) } else { format!("\n{}", indent) }; if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") };
Some((offset, indent + 1, Some(file_id), post_string)) Some((offset, indent + 1, Some(file_id), post_string))
} }
_ => Some((TextSize::from(0), 0.into(), Some(file_id), "\n".into())), _ => Some((TextSize::from(0), 0.into(), Some(file_id), "\n".into())),

View file

@ -55,12 +55,11 @@ pub(crate) fn generate_default_from_enum_variant(
let buf = format!( let buf = format!(
r#" r#"
impl Default for {0} {{ impl Default for {enum_name} {{
fn default() -> Self {{ fn default() -> Self {{
Self::{1} Self::{variant_name}
}} }}
}}"#, }}"#,
enum_name, variant_name
); );
edit.insert(start_offset, buf); edit.insert(start_offset, buf);
}, },

View file

@ -1,8 +1,7 @@
use ide_db::famous_defs::FamousDefs; use ide_db::famous_defs::FamousDefs;
use itertools::Itertools;
use stdx::format_to; use stdx::format_to;
use syntax::{ use syntax::{
ast::{self, HasGenericParams, HasName, HasTypeBounds, Impl}, ast::{self, make, HasGenericParams, HasName, Impl},
AstNode, AstNode,
}; };
@ -77,45 +76,47 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
) )
} }
// FIXME: based on from utils::generate_impl_text_inner
fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: &str) -> String { fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: &str) -> String {
let generic_params = impl_.generic_param_list(); let impl_ty = impl_.self_ty().unwrap();
let generic_params = impl_.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
// remove defaults since they can't be specified in impls
match param {
ast::TypeOrConstParam::Type(param) => {
let param = param.clone_for_update();
param.remove_default();
Some(ast::GenericParam::TypeParam(param))
}
ast::TypeOrConstParam::Const(param) => {
let param = param.clone_for_update();
param.remove_default();
Some(ast::GenericParam::ConstParam(param))
}
}
});
make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
});
let mut buf = String::with_capacity(code.len()); let mut buf = String::with_capacity(code.len());
buf.push_str("\n\n"); buf.push_str("\n\n");
// `impl{generic_params} {trait_text} for {impl_.self_ty()}`
buf.push_str("impl"); buf.push_str("impl");
if let Some(generic_params) = &generic_params { if let Some(generic_params) = &generic_params {
let lifetimes = generic_params.lifetime_params().map(|lt| format!("{}", lt.syntax())); format_to!(buf, "{generic_params}")
let toc_params = generic_params.type_or_const_params().map(|toc_param| match toc_param {
ast::TypeOrConstParam::Type(type_param) => {
let mut buf = String::new();
if let Some(it) = type_param.name() {
format_to!(buf, "{}", it.syntax());
} }
if let Some(it) = type_param.colon_token() { format_to!(buf, " {trait_text} for {impl_ty}");
format_to!(buf, "{} ", it);
}
if let Some(it) = type_param.type_bound_list() {
format_to!(buf, "{}", it.syntax());
}
buf
}
ast::TypeOrConstParam::Const(const_param) => const_param.syntax().to_string(),
});
let generics = lifetimes.chain(toc_params).format(", ");
format_to!(buf, "<{}>", generics);
}
buf.push(' ');
buf.push_str(trait_text);
buf.push_str(" for ");
buf.push_str(&impl_.self_ty().unwrap().syntax().text().to_string());
match impl_.where_clause() { match impl_.where_clause() {
Some(where_clause) => { Some(where_clause) => {
format_to!(buf, "\n{}\n{{\n{}\n}}", where_clause, code); format_to!(buf, "\n{where_clause}\n{{\n{code}\n}}");
} }
None => { None => {
format_to!(buf, " {{\n{}\n}}", code); format_to!(buf, " {{\n{code}\n}}");
} }
} }

View file

@ -51,14 +51,14 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
Some(field) => { Some(field) => {
let field_name = field.name()?; let field_name = field.name()?;
let field_ty = field.ty()?; let field_ty = field.ty()?;
(format!("{}", field_name), field_ty, field.syntax().text_range()) (field_name.to_string(), field_ty, field.syntax().text_range())
} }
None => { None => {
let field = ctx.find_node_at_offset::<ast::TupleField>()?; let field = ctx.find_node_at_offset::<ast::TupleField>()?;
let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?; let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
let field_list_index = field_list.fields().position(|it| it == field)?; let field_list_index = field_list.fields().position(|it| it == field)?;
let field_ty = field.ty()?; let field_ty = field.ty()?;
(format!("{}", field_list_index), field_ty, field.syntax().text_range()) (field_list_index.to_string(), field_ty, field.syntax().text_range())
} }
}; };
@ -77,7 +77,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
for method in methods { for method in methods {
let adt = ast::Adt::Struct(strukt.clone()); let adt = ast::Adt::Struct(strukt.clone());
let name = method.name(ctx.db()).to_string(); let name = method.name(ctx.db()).to_string();
let impl_def = find_struct_impl(ctx, &adt, &name).flatten(); let impl_def = find_struct_impl(ctx, &adt, &[name]).flatten();
acc.add_group( acc.add_group(
&GroupLabel("Generate delegate methods…".to_owned()), &GroupLabel("Generate delegate methods…".to_owned()),
AssistId("generate_delegate_methods", AssistKind::Generate), AssistId("generate_delegate_methods", AssistKind::Generate),
@ -151,7 +151,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
Some(cap) => { Some(cap) => {
let offset = strukt.syntax().text_range().end(); let offset = strukt.syntax().text_range().end();
let snippet = render_snippet(cap, impl_def.syntax(), cursor); let snippet = render_snippet(cap, impl_def.syntax(), cursor);
let snippet = format!("\n\n{}", snippet); let snippet = format!("\n\n{snippet}");
builder.insert_snippet(cap, offset, snippet); builder.insert_snippet(cap, offset, snippet);
} }
None => { None => {

View file

@ -66,7 +66,7 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let target = field.syntax().text_range(); let target = field.syntax().text_range();
acc.add( acc.add(
AssistId("generate_deref", AssistKind::Generate), AssistId("generate_deref", AssistKind::Generate),
format!("Generate `{:?}` impl using `{}`", deref_type_to_generate, field_name), format!("Generate `{deref_type_to_generate:?}` impl using `{field_name}`"),
target, target,
|edit| { |edit| {
generate_edit( generate_edit(
@ -106,7 +106,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
let target = field.syntax().text_range(); let target = field.syntax().text_range();
acc.add( acc.add(
AssistId("generate_deref", AssistKind::Generate), AssistId("generate_deref", AssistKind::Generate),
format!("Generate `{:?}` impl using `{}`", deref_type_to_generate, field.syntax()), format!("Generate `{deref_type_to_generate:?}` impl using `{field}`"),
target, target,
|edit| { |edit| {
generate_edit( generate_edit(
@ -132,18 +132,16 @@ fn generate_edit(
let start_offset = strukt.syntax().text_range().end(); let start_offset = strukt.syntax().text_range().end();
let impl_code = match deref_type { let impl_code = match deref_type {
DerefType::Deref => format!( DerefType::Deref => format!(
r#" type Target = {0}; r#" type Target = {field_type_syntax};
fn deref(&self) -> &Self::Target {{ fn deref(&self) -> &Self::Target {{
&self.{1} &self.{field_name}
}}"#, }}"#,
field_type_syntax, field_name
), ),
DerefType::DerefMut => format!( DerefType::DerefMut => format!(
r#" fn deref_mut(&mut self) -> &mut Self::Target {{ r#" fn deref_mut(&mut self) -> &mut Self::Target {{
&mut self.{} &mut self.{field_name}
}}"#, }}"#,
field_name
), ),
}; };
let strukt_adt = ast::Adt::Struct(strukt); let strukt_adt = ast::Adt::Struct(strukt);

View file

@ -139,40 +139,44 @@ fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<St
let mut example = String::new(); let mut example = String::new();
let use_path = build_path(ast_func, ctx)?;
let is_unsafe = ast_func.unsafe_token().is_some(); let is_unsafe = ast_func.unsafe_token().is_some();
let param_list = ast_func.param_list()?; let param_list = ast_func.param_list()?;
let ref_mut_params = ref_mut_params(&param_list); let ref_mut_params = ref_mut_params(&param_list);
let self_name = self_name(ast_func); let self_name = self_name(ast_func);
format_to!(example, "use {};\n\n", build_path(ast_func, ctx)?); format_to!(example, "use {use_path};\n\n");
if let Some(self_name) = &self_name { if let Some(self_name) = &self_name {
if let Some(mtbl) = is_ref_mut_self(ast_func) { if let Some(mut_) = is_ref_mut_self(ast_func) {
let mtbl = if mtbl == true { " mut" } else { "" }; let mut_ = if mut_ == true { "mut " } else { "" };
format_to!(example, "let{} {} = ;\n", mtbl, self_name); format_to!(example, "let {mut_}{self_name} = ;\n");
} }
} }
for param_name in &ref_mut_params { for param_name in &ref_mut_params {
format_to!(example, "let mut {} = ;\n", param_name); format_to!(example, "let mut {param_name} = ;\n");
} }
// Call the function, check result // Call the function, check result
let function_call = function_call(ast_func, &param_list, self_name.as_deref(), is_unsafe)?; let function_call = function_call(ast_func, &param_list, self_name.as_deref(), is_unsafe)?;
if returns_a_value(ast_func, ctx) { if returns_a_value(ast_func, ctx) {
if count_parameters(&param_list) < 3 { if count_parameters(&param_list) < 3 {
format_to!(example, "assert_eq!({}, );\n", function_call); format_to!(example, "assert_eq!({function_call}, );\n");
} else { } else {
format_to!(example, "let result = {};\n", function_call); format_to!(example, "let result = {function_call};\n");
example.push_str("assert_eq!(result, );\n"); example.push_str("assert_eq!(result, );\n");
} }
} else { } else {
format_to!(example, "{};\n", function_call); format_to!(example, "{function_call};\n");
} }
// Check the mutated values // Check the mutated values
if let Some(self_name) = &self_name {
if is_ref_mut_self(ast_func) == Some(true) { if is_ref_mut_self(ast_func) == Some(true) {
format_to!(example, "assert_eq!({}, );", self_name?); format_to!(example, "assert_eq!({self_name}, );");
}
} }
for param_name in &ref_mut_params { for param_name in &ref_mut_params {
format_to!(example, "assert_eq!({}, );", param_name); format_to!(example, "assert_eq!({param_name}, );");
} }
Some(example) Some(example)
} }
@ -189,7 +193,8 @@ fn introduction_builder(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<S
let intro_for_new = || { let intro_for_new = || {
let is_new = name == "new"; let is_new = name == "new";
if is_new && ret_ty == self_ty { if is_new && ret_ty == self_ty {
Some(format!("Creates a new [`{}`].", linkable_self_ty?)) let self_ty = linkable_self_ty?;
Some(format!("Creates a new [`{self_ty}`]."))
} else { } else {
None None
} }
@ -214,7 +219,9 @@ fn introduction_builder(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<S
} else { } else {
"" ""
}; };
Some(format!("Returns{reference} the {what} of this [`{}`].", linkable_self_ty?))
let self_ty = linkable_self_ty?;
Some(format!("Returns{reference} the {what} of this [`{self_ty}`]."))
} }
_ => None, _ => None,
}; };
@ -228,7 +235,9 @@ fn introduction_builder(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<S
if what == "len" { if what == "len" {
what = "length".into() what = "length".into()
}; };
Some(format!("Sets the {what} of this [`{}`].", linkable_self_ty?))
let self_ty = linkable_self_ty?;
Some(format!("Sets the {what} of this [`{self_ty}`]."))
}; };
if let Some(intro) = intro_for_new() { if let Some(intro) = intro_for_new() {
@ -404,7 +413,7 @@ fn arguments_from_params(param_list: &ast::ParamList) -> String {
// instance `TuplePat`) could be managed later. // instance `TuplePat`) could be managed later.
Some(ast::Pat::IdentPat(ident_pat)) => match ident_pat.name() { Some(ast::Pat::IdentPat(ident_pat)) => match ident_pat.name() {
Some(name) => match is_a_ref_mut_param(&param) { Some(name) => match is_a_ref_mut_param(&param) {
true => format!("&mut {}", name), true => format!("&mut {name}"),
false => name.to_string(), false => name.to_string(),
}, },
None => "_".to_string(), None => "_".to_string(),
@ -424,14 +433,15 @@ fn function_call(
let name = ast_func.name()?; let name = ast_func.name()?;
let arguments = arguments_from_params(param_list); let arguments = arguments_from_params(param_list);
let function_call = if param_list.self_param().is_some() { let function_call = if param_list.self_param().is_some() {
format!("{}.{}({})", self_name?, name, arguments) let self_ = self_name?;
format!("{self_}.{name}({arguments})")
} else if let Some(implementation) = self_partial_type(ast_func) { } else if let Some(implementation) = self_partial_type(ast_func) {
format!("{}::{}({})", implementation, name, arguments) format!("{implementation}::{name}({arguments})")
} else { } else {
format!("{}({})", name, arguments) format!("{name}({arguments})")
}; };
match is_unsafe { match is_unsafe {
true => Some(format!("unsafe {{ {} }}", function_call)), true => Some(format!("unsafe {{ {function_call} }}")),
false => Some(function_call), false => Some(function_call),
} }
} }
@ -469,8 +479,8 @@ fn build_path(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
.unwrap_or_else(|| "*".into()); .unwrap_or_else(|| "*".into());
let module_def: ModuleDef = ctx.sema.to_def(ast_func)?.module(ctx.db()).into(); let module_def: ModuleDef = ctx.sema.to_def(ast_func)?.module(ctx.db()).into();
match module_def.canonical_path(ctx.db()) { match module_def.canonical_path(ctx.db()) {
Some(path) => Some(format!("{}::{}::{}", crate_name, path, leaf)), Some(path) => Some(format!("{crate_name}::{path}::{leaf}")),
None => Some(format!("{}::{}", crate_name, leaf)), None => Some(format!("{crate_name}::{leaf}")),
} }
} }

View file

@ -52,7 +52,7 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_>
let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text())); let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text()));
// Return early if we've found an existing new fn // Return early if we've found an existing new fn
let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?; let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?;
let target = variant.syntax().text_range(); let target = variant.syntax().text_range();
acc.add_group( acc.add_group(
@ -61,21 +61,15 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_>
"Generate an `is_` method for this enum variant", "Generate an `is_` method for this enum variant",
target, target,
|builder| { |builder| {
let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v)); let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} "));
let method = format!( let method = format!(
" /// Returns `true` if the {} is [`{variant}`]. " /// Returns `true` if the {enum_lowercase_name} is [`{variant_name}`].
/// ///
/// [`{variant}`]: {}::{variant} /// [`{variant_name}`]: {enum_name}::{variant_name}
#[must_use] #[must_use]
{}fn {}(&self) -> bool {{ {vis}fn {fn_name}(&self) -> bool {{
matches!(self, Self::{variant}{}) matches!(self, Self::{variant_name}{pattern_suffix})
}}", }}",
enum_lowercase_name,
enum_name,
vis,
fn_name,
pattern_suffix,
variant = variant_name
); );
add_method_to_adt(builder, &parent_enum, impl_def, &method); add_method_to_adt(builder, &parent_enum, impl_def, &method);

View file

@ -116,6 +116,14 @@ fn generate_enum_projection_method(
assist_description: &str, assist_description: &str,
props: ProjectionProps, props: ProjectionProps,
) -> Option<()> { ) -> Option<()> {
let ProjectionProps {
fn_name_prefix,
self_param,
return_prefix,
return_suffix,
happy_case,
sad_case,
} = props;
let variant = ctx.find_node_at_offset::<ast::Variant>()?; let variant = ctx.find_node_at_offset::<ast::Variant>()?;
let variant_name = variant.name()?; let variant_name = variant.name()?;
let parent_enum = ast::Adt::Enum(variant.parent_enum()); let parent_enum = ast::Adt::Enum(variant.parent_enum());
@ -125,7 +133,7 @@ fn generate_enum_projection_method(
let (field,) = record.fields().collect_tuple()?; let (field,) = record.fields().collect_tuple()?;
let name = field.name()?.to_string(); let name = field.name()?.to_string();
let ty = field.ty()?; let ty = field.ty()?;
let pattern_suffix = format!(" {{ {} }}", name); let pattern_suffix = format!(" {{ {name} }}");
(pattern_suffix, ty, name) (pattern_suffix, ty, name)
} }
ast::StructKind::Tuple(tuple) => { ast::StructKind::Tuple(tuple) => {
@ -136,11 +144,10 @@ fn generate_enum_projection_method(
ast::StructKind::Unit => return None, ast::StructKind::Unit => return None,
}; };
let fn_name = let fn_name = format!("{}_{}", fn_name_prefix, &to_lower_snake_case(&variant_name.text()));
format!("{}_{}", props.fn_name_prefix, &to_lower_snake_case(&variant_name.text()));
// Return early if we've found an existing new fn // Return early if we've found an existing new fn
let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?; let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?;
let target = variant.syntax().text_range(); let target = variant.syntax().text_range();
acc.add_group( acc.add_group(
@ -149,27 +156,15 @@ fn generate_enum_projection_method(
assist_description, assist_description,
target, target,
|builder| { |builder| {
let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v)); let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} "));
let method = format!( let method = format!(
" {0}fn {1}({2}) -> {3}{4}{5} {{ " {vis}fn {fn_name}({self_param}) -> {return_prefix}{field_type}{return_suffix} {{
if let Self::{6}{7} = self {{ if let Self::{variant_name}{pattern_suffix} = self {{
{8}({9}) {happy_case}({bound_name})
}} else {{ }} else {{
{10} {sad_case}
}} }}
}}", }}");
vis,
fn_name,
props.self_param,
props.return_prefix,
field_type.syntax(),
props.return_suffix,
variant_name,
pattern_suffix,
props.happy_case,
bound_name,
props.sad_case,
);
add_method_to_adt(builder, &parent_enum, impl_def, &method); add_method_to_adt(builder, &parent_enum, impl_def, &method);
}, },

View file

@ -56,23 +56,18 @@ pub(crate) fn generate_from_impl_for_enum(
target, target,
|edit| { |edit| {
let start_offset = variant.parent_enum().syntax().text_range().end(); let start_offset = variant.parent_enum().syntax().text_range().end();
let from_trait = format!("From<{}>", field_type.syntax()); let from_trait = format!("From<{field_type}>");
let impl_code = if let Some(name) = field_name { let impl_code = if let Some(name) = field_name {
format!( format!(
r#" fn from({0}: {1}) -> Self {{ r#" fn from({name}: {field_type}) -> Self {{
Self::{2} {{ {0} }} Self::{variant_name} {{ {name} }}
}}"#, }}"#
name.text(),
field_type.syntax(),
variant_name,
) )
} else { } else {
format!( format!(
r#" fn from(v: {}) -> Self {{ r#" fn from(v: {field_type}) -> Self {{
Self::{}(v) Self::{variant_name}(v)
}}"#, }}"#
field_type.syntax(),
variant_name,
) )
}; };
let from_impl = generate_trait_impl_text(&enum_, &from_trait, &impl_code); let from_impl = generate_trait_impl_text(&enum_, &from_trait, &impl_code);

View file

@ -179,7 +179,7 @@ fn add_func_to_accumulator(
let function_template = function_builder.render(adt_name.is_some()); let function_template = function_builder.render(adt_name.is_some());
let mut func = function_template.to_string(ctx.config.snippet_cap); let mut func = function_template.to_string(ctx.config.snippet_cap);
if let Some(name) = adt_name { if let Some(name) = adt_name {
func = format!("\n{}impl {} {{\n{}\n{}}}", indent, name, func, indent); func = format!("\n{indent}impl {name} {{\n{func}\n{indent}}}");
} }
builder.edit_file(file); builder.edit_file(file);
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
@ -198,7 +198,7 @@ fn get_adt_source(
let file = ctx.sema.parse(range.file_id); let file = ctx.sema.parse(range.file_id);
let adt_source = let adt_source =
ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?; ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
find_struct_impl(ctx, &adt_source, fn_name).map(|impl_| (impl_, range.file_id)) find_struct_impl(ctx, &adt_source, &[fn_name.to_string()]).map(|impl_| (impl_, range.file_id))
} }
struct FunctionTemplate { struct FunctionTemplate {
@ -212,23 +212,26 @@ struct FunctionTemplate {
impl FunctionTemplate { impl FunctionTemplate {
fn to_string(&self, cap: Option<SnippetCap>) -> String { fn to_string(&self, cap: Option<SnippetCap>) -> String {
let Self { leading_ws, fn_def, ret_type, should_focus_return_type, trailing_ws, tail_expr } =
self;
let f = match cap { let f = match cap {
Some(cap) => { Some(cap) => {
let cursor = if self.should_focus_return_type { let cursor = if *should_focus_return_type {
// Focus the return type if there is one // Focus the return type if there is one
match self.ret_type { match ret_type {
Some(ref ret_type) => ret_type.syntax(), Some(ret_type) => ret_type.syntax(),
None => self.tail_expr.syntax(), None => tail_expr.syntax(),
} }
} else { } else {
self.tail_expr.syntax() tail_expr.syntax()
}; };
render_snippet(cap, self.fn_def.syntax(), Cursor::Replace(cursor)) render_snippet(cap, fn_def.syntax(), Cursor::Replace(cursor))
} }
None => self.fn_def.to_string(), None => fn_def.to_string(),
}; };
format!("{}{}{}", self.leading_ws, f, self.trailing_ws) format!("{leading_ws}{f}{trailing_ws}")
} }
} }
@ -330,9 +333,9 @@ impl FunctionBuilder {
let mut indent = IndentLevel::from_node(&it); let mut indent = IndentLevel::from_node(&it);
if is_method { if is_method {
indent = indent + 1; indent = indent + 1;
leading_ws = format!("{}", indent); leading_ws = format!("{indent}");
} else { } else {
leading_ws = format!("\n\n{}", indent); leading_ws = format!("\n\n{indent}");
} }
fn_def = fn_def.indent(indent); fn_def = fn_def.indent(indent);
@ -340,9 +343,10 @@ impl FunctionBuilder {
} }
GeneratedFunctionTarget::InEmptyItemList(it) => { GeneratedFunctionTarget::InEmptyItemList(it) => {
let indent = IndentLevel::from_node(&it); let indent = IndentLevel::from_node(&it);
leading_ws = format!("\n{}", indent + 1); let leading_indent = indent + 1;
fn_def = fn_def.indent(indent + 1); leading_ws = format!("\n{leading_indent}");
trailing_ws = format!("\n{}", indent); fn_def = fn_def.indent(leading_indent);
trailing_ws = format!("\n{indent}");
} }
}; };

View file

@ -1,6 +1,9 @@
use ide_db::famous_defs::FamousDefs; use ide_db::famous_defs::FamousDefs;
use stdx::{format_to, to_lower_snake_case}; use stdx::{format_to, to_lower_snake_case};
use syntax::ast::{self, AstNode, HasName, HasVisibility}; use syntax::{
ast::{self, AstNode, HasName, HasVisibility},
TextRange,
};
use crate::{ use crate::{
utils::{convert_reference_type, find_impl_block_end, find_struct_impl, generate_impl_text}, utils::{convert_reference_type, find_impl_block_end, find_struct_impl, generate_impl_text},
@ -72,61 +75,193 @@ pub(crate) fn generate_getter_mut(acc: &mut Assists, ctx: &AssistContext<'_>) ->
generate_getter_impl(acc, ctx, true) generate_getter_impl(acc, ctx, true)
} }
#[derive(Clone, Debug)]
struct RecordFieldInfo {
field_name: syntax::ast::Name,
field_ty: syntax::ast::Type,
fn_name: String,
target: TextRange,
}
struct GetterInfo {
impl_def: Option<ast::Impl>,
strukt: ast::Struct,
mutable: bool,
}
pub(crate) fn generate_getter_impl( pub(crate) fn generate_getter_impl(
acc: &mut Assists, acc: &mut Assists,
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
mutable: bool, mutable: bool,
) -> Option<()> { ) -> Option<()> {
// This if condition denotes two modes this assist can work in:
// - First is acting upon selection of record fields
// - Next is acting upon a single record field
//
// This is the only part where implementation diverges a bit,
// subsequent code is generic for both of these modes
let (strukt, info_of_record_fields, fn_names) = if !ctx.has_empty_selection() {
// Selection Mode
let node = ctx.covering_element();
let node = match node {
syntax::NodeOrToken::Node(n) => n,
syntax::NodeOrToken::Token(t) => t.parent()?,
};
let parent_struct = node.ancestors().find_map(ast::Struct::cast)?;
let (info_of_record_fields, field_names) =
extract_and_parse_record_fields(&parent_struct, ctx.selection_trimmed(), mutable)?;
(parent_struct, info_of_record_fields, field_names)
} else {
// Single Record Field mode
let strukt = ctx.find_node_at_offset::<ast::Struct>()?; let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
let field = ctx.find_node_at_offset::<ast::RecordField>()?; let field = ctx.find_node_at_offset::<ast::RecordField>()?;
let field_name = field.name()?; let record_field_info = parse_record_field(field, mutable)?;
let field_ty = field.ty()?;
// Return early if we've found an existing fn let fn_name = record_field_info.fn_name.clone();
let mut fn_name = to_lower_snake_case(&field_name.to_string());
if mutable { (strukt, vec![record_field_info], vec![fn_name])
format_to!(fn_name, "_mut"); };
// No record fields to do work on :(
if info_of_record_fields.len() == 0 {
return None;
} }
let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?;
let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &fn_names)?;
let (id, label) = if mutable { let (id, label) = if mutable {
("generate_getter_mut", "Generate a mut getter method") ("generate_getter_mut", "Generate a mut getter method")
} else { } else {
("generate_getter", "Generate a getter method") ("generate_getter", "Generate a getter method")
}; };
let target = field.syntax().text_range();
// Computing collective text range of all record fields in selected region
let target: TextRange = info_of_record_fields
.iter()
.map(|record_field_info| record_field_info.target)
.reduce(|acc, target| acc.cover(target))?;
let getter_info = GetterInfo { impl_def, strukt, mutable };
acc.add_group( acc.add_group(
&GroupLabel("Generate getter/setter".to_owned()), &GroupLabel("Generate getter/setter".to_owned()),
AssistId(id, AssistKind::Generate), AssistId(id, AssistKind::Generate),
label, label,
target, target,
|builder| { |builder| {
let record_fields_count = info_of_record_fields.len();
let mut buf = String::with_capacity(512); let mut buf = String::with_capacity(512);
if impl_def.is_some() { // Check if an impl exists
if let Some(impl_def) = &getter_info.impl_def {
// Check if impl is empty
if let Some(assoc_item_list) = impl_def.assoc_item_list() {
if assoc_item_list.assoc_items().next().is_some() {
// If not empty then only insert a new line
buf.push('\n'); buf.push('\n');
} }
}
}
let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v)); for (i, record_field_info) in info_of_record_fields.iter().enumerate() {
let (ty, body) = if mutable { // this buf inserts a newline at the end of a getter
(format!("&mut {}", field_ty), format!("&mut self.{}", field_name)) // automatically, if one wants to add one more newline
// for separating it from other assoc items, that needs
// to be handled spearately
let mut getter_buf =
generate_getter_from_info(ctx, &getter_info, &record_field_info);
// Insert `$0` only for last getter we generate
if i == record_fields_count - 1 {
getter_buf = getter_buf.replacen("fn ", "fn $0", 1);
}
// For first element we do not merge with '\n', as
// that can be inserted by impl_def check defined
// above, for other cases which are:
//
// - impl exists but it empty, here we would ideally
// not want to keep newline between impl <struct> {
// and fn <fn-name>() { line
//
// - next if impl itself does not exist, in this
// case we ourselves generate a new impl and that
// again ends up with the same reasoning as above
// for not keeping newline
if i == 0 {
buf = buf + &getter_buf;
} else {
buf = buf + "\n" + &getter_buf;
}
// We don't insert a new line at the end of
// last getter as it will end up in the end
// of an impl where we would not like to keep
// getter and end of impl ( i.e. `}` ) with an
// extra line for no reason
if i < record_fields_count - 1 {
buf = buf + "\n";
}
}
let start_offset = getter_info
.impl_def
.as_ref()
.and_then(|impl_def| find_impl_block_end(impl_def.to_owned(), &mut buf))
.unwrap_or_else(|| {
buf = generate_impl_text(&ast::Adt::Struct(getter_info.strukt.clone()), &buf);
getter_info.strukt.syntax().text_range().end()
});
match ctx.config.snippet_cap {
Some(cap) => builder.insert_snippet(cap, start_offset, buf),
None => builder.insert(start_offset, buf),
}
},
)
}
fn generate_getter_from_info(
ctx: &AssistContext<'_>,
info: &GetterInfo,
record_field_info: &RecordFieldInfo,
) -> String {
let mut buf = String::with_capacity(512);
let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
let (ty, body) = if info.mutable {
(
format!("&mut {}", record_field_info.field_ty),
format!("&mut self.{}", record_field_info.field_name),
)
} else { } else {
(|| { (|| {
let krate = ctx.sema.scope(field_ty.syntax())?.krate(); let krate = ctx.sema.scope(record_field_info.field_ty.syntax())?.krate();
let famous_defs = &FamousDefs(&ctx.sema, krate); let famous_defs = &FamousDefs(&ctx.sema, krate);
ctx.sema ctx.sema
.resolve_type(&field_ty) .resolve_type(&record_field_info.field_ty)
.and_then(|ty| convert_reference_type(ty, ctx.db(), famous_defs)) .and_then(|ty| convert_reference_type(ty, ctx.db(), famous_defs))
.map(|conversion| { .map(|conversion| {
cov_mark::hit!(convert_reference_type); cov_mark::hit!(convert_reference_type);
( (
conversion.convert_type(ctx.db()), conversion.convert_type(ctx.db()),
conversion.getter(field_name.to_string()), conversion.getter(record_field_info.field_name.to_string()),
) )
}) })
})() })()
.unwrap_or_else(|| (format!("&{}", field_ty), format!("&self.{}", field_name))) .unwrap_or_else(|| {
(
format!("&{}", record_field_info.field_ty),
format!("&self.{}", record_field_info.field_name),
)
})
}; };
format_to!( format_to!(
@ -135,27 +270,62 @@ pub(crate) fn generate_getter_impl(
{} {}
}}", }}",
vis, vis,
fn_name, record_field_info.fn_name,
mutable.then(|| "mut ").unwrap_or_default(), info.mutable.then(|| "mut ").unwrap_or_default(),
ty, ty,
body, body,
); );
let start_offset = impl_def buf
.and_then(|impl_def| find_impl_block_end(impl_def, &mut buf)) }
.unwrap_or_else(|| {
buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf);
strukt.syntax().text_range().end()
});
match ctx.config.snippet_cap { fn extract_and_parse_record_fields(
Some(cap) => { node: &ast::Struct,
builder.insert_snippet(cap, start_offset, buf.replacen("fn ", "fn $0", 1)) selection_range: TextRange,
mutable: bool,
) -> Option<(Vec<RecordFieldInfo>, Vec<String>)> {
let mut field_names: Vec<String> = vec![];
let field_list = node.field_list()?;
match field_list {
ast::FieldList::RecordFieldList(ele) => {
let info_of_record_fields_in_selection = ele
.fields()
.filter_map(|record_field| {
if selection_range.contains_range(record_field.syntax().text_range()) {
let record_field_info = parse_record_field(record_field, mutable)?;
field_names.push(record_field_info.fn_name.clone());
return Some(record_field_info);
} }
None => builder.insert(start_offset, buf),
None
})
.collect::<Vec<RecordFieldInfo>>();
if info_of_record_fields_in_selection.len() == 0 {
return None;
} }
},
) Some((info_of_record_fields_in_selection, field_names))
}
ast::FieldList::TupleFieldList(_) => {
return None;
}
}
}
fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option<RecordFieldInfo> {
let field_name = record_field.name()?;
let field_ty = record_field.ty()?;
let mut fn_name = to_lower_snake_case(&field_name.to_string());
if mutable {
format_to!(fn_name, "_mut");
}
let target = record_field.syntax().text_range();
Some(RecordFieldInfo { field_name, field_ty, fn_name, target })
} }
#[cfg(test)] #[cfg(test)]
@ -485,6 +655,55 @@ impl Context {
fn $0data(&self) -> Result<&bool, &i32> { fn $0data(&self) -> Result<&bool, &i32> {
self.data.as_ref() self.data.as_ref()
} }
}
"#,
);
}
#[test]
fn test_generate_multiple_getters_from_selection() {
check_assist(
generate_getter,
r#"
struct Context {
$0data: Data,
count: usize,$0
}
"#,
r#"
struct Context {
data: Data,
count: usize,
}
impl Context {
fn data(&self) -> &Data {
&self.data
}
fn $0count(&self) -> &usize {
&self.count
}
}
"#,
);
}
#[test]
fn test_generate_multiple_getters_from_selection_one_already_exists() {
// As impl for one of the fields already exist, skip it
check_assist_not_applicable(
generate_getter,
r#"
struct Context {
$0data: Data,
count: usize,$0
}
impl Context {
fn data(&self) -> &Data {
&self.data
}
} }
"#, "#,
); );

View file

@ -28,7 +28,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
acc.add( acc.add(
AssistId("generate_impl", AssistKind::Generate), AssistId("generate_impl", AssistKind::Generate),
format!("Generate impl for `{}`", name), format!("Generate impl for `{name}`"),
target, target,
|edit| { |edit| {
let start_offset = nominal.syntax().text_range().end(); let start_offset = nominal.syntax().text_range().end();

View file

@ -39,7 +39,8 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
}; };
// Return early if we've found an existing new fn // Return early if we've found an existing new fn
let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), "new")?; let impl_def =
find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &[String::from("new")])?;
let current_module = ctx.sema.scope(strukt.syntax())?.module(); let current_module = ctx.sema.scope(strukt.syntax())?.module();
@ -51,11 +52,13 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
buf.push('\n'); buf.push('\n');
} }
let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v)); let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} "));
let trivial_constructors = field_list let trivial_constructors = field_list
.fields() .fields()
.map(|f| { .map(|f| {
let name = f.name()?;
let ty = ctx.sema.resolve_type(&f.ty()?)?; let ty = ctx.sema.resolve_type(&f.ty()?)?;
let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?)); let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
@ -72,7 +75,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
&ty, &ty,
)?; )?;
Some(format!("{}: {}", f.name()?.syntax(), expr)) Some(format!("{name}: {expr}"))
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -81,7 +84,10 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
.enumerate() .enumerate()
.filter_map(|(i, f)| { .filter_map(|(i, f)| {
if trivial_constructors[i].is_none() { if trivial_constructors[i].is_none() {
Some(format!("{}: {}", f.name()?.syntax(), f.ty()?.syntax())) let name = f.name()?;
let ty = f.ty()?;
Some(format!("{name}: {ty}"))
} else { } else {
None None
} }
@ -101,7 +107,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
}) })
.format(", "); .format(", ");
format_to!(buf, " {}fn new({}) -> Self {{ Self {{ {} }} }}", vis, params, fields); format_to!(buf, " {vis}fn new({params}) -> Self {{ Self {{ {fields} }} }}");
let start_offset = impl_def let start_offset = impl_def
.and_then(|impl_def| find_impl_block_start(impl_def, &mut buf)) .and_then(|impl_def| find_impl_block_start(impl_def, &mut buf))

View file

@ -36,11 +36,8 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
// Return early if we've found an existing fn // Return early if we've found an existing fn
let fn_name = to_lower_snake_case(&field_name.to_string()); let fn_name = to_lower_snake_case(&field_name.to_string());
let impl_def = find_struct_impl( let impl_def =
ctx, find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &[format!("set_{fn_name}")])?;
&ast::Adt::Struct(strukt.clone()),
format!("set_{}", fn_name).as_str(),
)?;
let target = field.syntax().text_range(); let target = field.syntax().text_range();
acc.add_group( acc.add_group(
@ -55,18 +52,12 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
buf.push('\n'); buf.push('\n');
} }
let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v)); let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} "));
format_to!( format_to!(
buf, buf,
" {}fn set_{}(&mut self, {}: {}) {{ " {vis}fn set_{fn_name}(&mut self, {fn_name}: {field_ty}) {{
self.{} = {}; self.{fn_name} = {fn_name};
}}", }}"
vis,
fn_name,
fn_name,
field_ty,
fn_name,
fn_name,
); );
let start_offset = impl_def let start_offset = impl_def

View file

@ -331,10 +331,14 @@ fn calc_depth(pat: &ast::Pat, depth: usize) -> usize {
// FIXME: change the new fn checking to a more semantic approach when that's more // FIXME: change the new fn checking to a more semantic approach when that's more
// viable (e.g. we process proc macros, etc) // viable (e.g. we process proc macros, etc)
// FIXME: this partially overlaps with `find_impl_block_*` // FIXME: this partially overlaps with `find_impl_block_*`
/// `find_struct_impl` looks for impl of a struct, but this also has additional feature
/// where it takes a list of function names and check if they exist inside impl_, if
/// even one match is found, it returns None
pub(crate) fn find_struct_impl( pub(crate) fn find_struct_impl(
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
adt: &ast::Adt, adt: &ast::Adt,
name: &str, names: &[String],
) -> Option<Option<ast::Impl>> { ) -> Option<Option<ast::Impl>> {
let db = ctx.db(); let db = ctx.db();
let module = adt.syntax().parent()?; let module = adt.syntax().parent()?;
@ -362,7 +366,7 @@ pub(crate) fn find_struct_impl(
}); });
if let Some(ref impl_blk) = block { if let Some(ref impl_blk) = block {
if has_fn(impl_blk, name) { if has_any_fn(impl_blk, names) {
return None; return None;
} }
} }
@ -370,12 +374,12 @@ pub(crate) fn find_struct_impl(
Some(block) Some(block)
} }
fn has_fn(imp: &ast::Impl, rhs_name: &str) -> bool { fn has_any_fn(imp: &ast::Impl, names: &[String]) -> bool {
if let Some(il) = imp.assoc_item_list() { if let Some(il) = imp.assoc_item_list() {
for item in il.assoc_items() { for item in il.assoc_items() {
if let ast::AssocItem::Fn(f) = item { if let ast::AssocItem::Fn(f) = item {
if let Some(name) = f.name() { if let Some(name) = f.name() {
if name.text().eq_ignore_ascii_case(rhs_name) { if names.iter().any(|n| n.eq_ignore_ascii_case(&name.text())) {
return true; return true;
} }
} }

View file

@ -1,37 +0,0 @@
use hir::InFile;
use crate::{Diagnostic, DiagnosticsContext};
// Diagnostic: incorrect-try-target
//
// This diagnostic is triggered if a question mark operator was used in a context where it is not applicable.
pub(crate) fn incorrect_try_expr(
ctx: &DiagnosticsContext<'_>,
d: &hir::IncorrectTryExpr,
) -> Diagnostic {
Diagnostic::new(
"incorrect-try-target",
format!("the return type of the containing function does not implement `FromResidual`"),
ctx.sema
.diagnostics_display_range(InFile::new(d.expr.file_id, d.expr.value.clone().into()))
.range,
)
}
#[cfg(test)]
mod tests {
use crate::tests::check_diagnostics;
#[test]
fn try_ops_diag() {
check_diagnostics(
r#"
//- minicore: try
fn test() {
core::ops::ControlFlow::<u32, f32>::Continue(1.0)?;
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the return type of the containing function does not implement `FromResidual`
}
"#,
);
}
}

View file

@ -1,35 +0,0 @@
use hir::{db::DefDatabase, HirDisplay};
use crate::{Diagnostic, DiagnosticsContext};
// Diagnostic: not-implemented
//
// This diagnostic is triggered if a type doesn't implement a necessary trait.
pub(crate) fn not_implemented(ctx: &DiagnosticsContext<'_>, d: &hir::NotImplemented) -> Diagnostic {
Diagnostic::new(
"not-implemented",
format!(
"the trait `{}` is not implemented for `{}`",
ctx.sema.db.trait_data(d.trait_).name,
d.ty.display(ctx.sema.db)
),
ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
)
}
#[cfg(test)]
mod tests {
use crate::tests::check_diagnostics;
#[test]
fn missing_try_impl() {
check_diagnostics(
r#"
//- minicore: try
fn main() {
()?;
} //^^ error: the trait `Try` is not implemented for `()`
"#,
)
}
}

View file

@ -29,7 +29,6 @@ mod handlers {
pub(crate) mod break_outside_of_loop; pub(crate) mod break_outside_of_loop;
pub(crate) mod inactive_code; pub(crate) mod inactive_code;
pub(crate) mod incorrect_case; pub(crate) mod incorrect_case;
pub(crate) mod incorrect_try_expr;
pub(crate) mod invalid_derive_target; pub(crate) mod invalid_derive_target;
pub(crate) mod macro_error; pub(crate) mod macro_error;
pub(crate) mod malformed_derive; pub(crate) mod malformed_derive;
@ -37,7 +36,6 @@ mod handlers {
pub(crate) mod missing_fields; pub(crate) mod missing_fields;
pub(crate) mod missing_match_arms; pub(crate) mod missing_match_arms;
pub(crate) mod missing_unsafe; pub(crate) mod missing_unsafe;
pub(crate) mod not_implemented;
pub(crate) mod no_such_field; pub(crate) mod no_such_field;
pub(crate) mod replace_filter_map_next_with_find_map; pub(crate) mod replace_filter_map_next_with_find_map;
pub(crate) mod type_mismatch; pub(crate) mod type_mismatch;
@ -227,14 +225,12 @@ pub fn diagnostics(
let d = match diag { let d = match diag {
AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d), AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d),
AnyDiagnostic::IncorrectCase(d) => handlers::incorrect_case::incorrect_case(&ctx, &d), AnyDiagnostic::IncorrectCase(d) => handlers::incorrect_case::incorrect_case(&ctx, &d),
AnyDiagnostic::IncorrectTryExpr(d) => handlers::incorrect_try_expr::incorrect_try_expr(&ctx, &d),
AnyDiagnostic::MacroError(d) => handlers::macro_error::macro_error(&ctx, &d), AnyDiagnostic::MacroError(d) => handlers::macro_error::macro_error(&ctx, &d),
AnyDiagnostic::MalformedDerive(d) => handlers::malformed_derive::malformed_derive(&ctx, &d), AnyDiagnostic::MalformedDerive(d) => handlers::malformed_derive::malformed_derive(&ctx, &d),
AnyDiagnostic::MismatchedArgCount(d) => handlers::mismatched_arg_count::mismatched_arg_count(&ctx, &d), AnyDiagnostic::MismatchedArgCount(d) => handlers::mismatched_arg_count::mismatched_arg_count(&ctx, &d),
AnyDiagnostic::MissingFields(d) => handlers::missing_fields::missing_fields(&ctx, &d), AnyDiagnostic::MissingFields(d) => handlers::missing_fields::missing_fields(&ctx, &d),
AnyDiagnostic::MissingMatchArms(d) => handlers::missing_match_arms::missing_match_arms(&ctx, &d), AnyDiagnostic::MissingMatchArms(d) => handlers::missing_match_arms::missing_match_arms(&ctx, &d),
AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d), AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d),
AnyDiagnostic::NotImplemented(d) => handlers::not_implemented::not_implemented(&ctx, &d),
AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d), AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d),
AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d), AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d),
AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d), AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d),

View file

@ -4913,22 +4913,6 @@ fn foo() -> NotResult<(), Short> {
``` ```
"#]], "#]],
); );
check_hover_range(
r#"
//- minicore: try
use core::ops::ControlFlow;
fn foo() -> ControlFlow<()> {
$0ControlFlow::Break(())?$0;
ControlFlow::Continue(())
}
"#,
expect![[r#"
```text
Try Target Type: ControlFlow<(), {unknown}>
Propagated as: ControlFlow<(), ()>
```
"#]],
);
} }
#[test] #[test]
@ -4945,7 +4929,7 @@ fn foo() -> Option<()> {
"#, "#,
expect![[r#" expect![[r#"
```rust ```rust
i32 <Option<i32> as Try>::Output
```"#]], ```"#]],
); );
} }

View file

@ -482,8 +482,18 @@ impl Analysis {
} }
/// Returns crates this file belongs too. /// Returns crates this file belongs too.
pub fn crate_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> { pub fn crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
self.with_db(|db| parent_module::crate_for(db, file_id)) self.with_db(|db| parent_module::crates_for(db, file_id))
}
/// Returns crates this file belongs too.
pub fn transitive_rev_deps(&self, crate_id: CrateId) -> Cancellable<Vec<CrateId>> {
self.with_db(|db| db.crate_graph().transitive_rev_deps(crate_id).collect())
}
/// Returns crates this file *might* belong too.
pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
self.with_db(|db| db.relevant_crates(file_id).iter().copied().collect())
} }
/// Returns the edition of the given crate. /// Returns the edition of the given crate.

View file

@ -1,6 +1,6 @@
use hir::Semantics; use hir::{db::DefDatabase, Semantics};
use ide_db::{ use ide_db::{
base_db::{CrateId, FileId, FilePosition}, base_db::{CrateId, FileId, FileLoader, FilePosition},
RootDatabase, RootDatabase,
}; };
use itertools::Itertools; use itertools::Itertools;
@ -55,9 +55,13 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
} }
/// Returns `Vec` for the same reason as `parent_module` /// Returns `Vec` for the same reason as `parent_module`
pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> { pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
let sema = Semantics::new(db); db.relevant_crates(file_id)
sema.to_module_defs(file_id).map(|module| module.krate().into()).unique().collect() .iter()
.copied()
.filter(|&crate_id| db.crate_def_map(crate_id).modules_for_file(file_id).next().is_some())
.sorted()
.collect()
} }
#[cfg(test)] #[cfg(test)]
@ -147,7 +151,7 @@ $0
mod foo; mod foo;
"#, "#,
); );
assert_eq!(analysis.crate_for(file_id).unwrap().len(), 1); assert_eq!(analysis.crates_for(file_id).unwrap().len(), 1);
} }
#[test] #[test]
@ -162,6 +166,6 @@ mod baz;
mod baz; mod baz;
"#, "#,
); );
assert_eq!(analysis.crate_for(file_id).unwrap().len(), 2); assert_eq!(analysis.crates_for(file_id).unwrap().len(), 2);
} }
} }

View file

@ -210,9 +210,7 @@ fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Opt
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
if let Some(&[x]) = def.as_deref() { if let Some(&[x]) = def.as_deref() {
return Some(x); return Some(x);
} else { }
continue;
};
} }
None None
} }

View file

@ -45,7 +45,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
if let Some(file_id) = file_id { if let Some(file_id) = file_id {
format_to!(buf, "\nFile info:\n"); format_to!(buf, "\nFile info:\n");
let crates = crate::parent_module::crate_for(db, file_id); let crates = crate::parent_module::crates_for(db, file_id);
if crates.is_empty() { if crates.is_empty() {
format_to!(buf, "Does not belong to any crate"); format_to!(buf, "Does not belong to any crate");
} }

View file

@ -17,7 +17,6 @@ semver = "1.0.14"
serde = { version = "1.0.137", features = ["derive"] } serde = { version = "1.0.137", features = ["derive"] }
serde_json = "1.0.86" serde_json = "1.0.86"
anyhow = "1.0.62" anyhow = "1.0.62"
expect-test = "1.4.0"
la-arena = { version = "0.3.0", path = "../../lib/la-arena" } la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
cfg = { path = "../cfg", version = "0.0.0" } cfg = { path = "../cfg", version = "0.0.0" }
@ -26,3 +25,6 @@ toolchain = { path = "../toolchain", version = "0.0.0" }
paths = { path = "../paths", version = "0.0.0" } paths = { path = "../paths", version = "0.0.0" }
stdx = { path = "../stdx", version = "0.0.0" } stdx = { path = "../stdx", version = "0.0.0" }
profile = { path = "../profile", version = "0.0.0" } profile = { path = "../profile", version = "0.0.0" }
[dev-dependencies]
expect-test = "1.4.0"

View file

@ -6,7 +6,12 @@
//! This module implements this second part. We use "build script" terminology //! This module implements this second part. We use "build script" terminology
//! here, but it covers procedural macros as well. //! here, but it covers procedural macros as well.
use std::{cell::RefCell, io, path::PathBuf, process::Command}; use std::{
cell::RefCell,
io, mem,
path::{self, PathBuf},
process::Command,
};
use cargo_metadata::{camino::Utf8Path, Message}; use cargo_metadata::{camino::Utf8Path, Message};
use la_arena::ArenaMap; use la_arena::ArenaMap;
@ -15,11 +20,14 @@ use rustc_hash::FxHashMap;
use semver::Version; use semver::Version;
use serde::Deserialize; use serde::Deserialize;
use crate::{cfg_flag::CfgFlag, CargoConfig, CargoFeatures, CargoWorkspace, Package}; use crate::{
cfg_flag::CfgFlag, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation,
InvocationStrategy, Package,
};
#[derive(Debug, Default, Clone, PartialEq, Eq)] #[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct WorkspaceBuildScripts { pub struct WorkspaceBuildScripts {
outputs: ArenaMap<Package, Option<BuildScriptOutput>>, outputs: ArenaMap<Package, BuildScriptOutput>,
error: Option<String>, error: Option<String>,
} }
@ -38,17 +46,26 @@ pub(crate) struct BuildScriptOutput {
pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>, pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
} }
impl WorkspaceBuildScripts { impl BuildScriptOutput {
fn build_command(config: &CargoConfig) -> Command { fn is_unchanged(&self) -> bool {
if let Some([program, args @ ..]) = config.run_build_script_command.as_deref() { self.cfgs.is_empty()
let mut cmd = Command::new(program); && self.envs.is_empty()
cmd.args(args); && self.out_dir.is_none()
cmd.envs(&config.extra_env); && self.proc_macro_dylib_path.is_none()
return cmd; }
} }
impl WorkspaceBuildScripts {
fn build_command(config: &CargoConfig) -> io::Result<Command> {
let mut cmd = match config.run_build_script_command.as_deref() {
Some([program, args @ ..]) => {
let mut cmd = Command::new(program);
cmd.args(args);
cmd
}
_ => {
let mut cmd = Command::new(toolchain::cargo()); let mut cmd = Command::new(toolchain::cargo());
cmd.envs(&config.extra_env);
cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]); cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]);
// --all-targets includes tests, benches and examples in addition to the // --all-targets includes tests, benches and examples in addition to the
@ -77,37 +94,9 @@ impl WorkspaceBuildScripts {
cmd cmd
} }
};
pub(crate) fn run( cmd.envs(&config.extra_env);
config: &CargoConfig,
workspace: &CargoWorkspace,
progress: &dyn Fn(String),
toolchain: &Option<Version>,
) -> io::Result<WorkspaceBuildScripts> {
const RUST_1_62: Version = Version::new(1, 62, 0);
match Self::run_(Self::build_command(config), config, workspace, progress) {
Ok(WorkspaceBuildScripts { error: Some(error), .. })
if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_62) =>
{
// building build scripts failed, attempt to build with --keep-going so
// that we potentially get more build data
let mut cmd = Self::build_command(config);
cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1");
let mut res = Self::run_(cmd, config, workspace, progress)?;
res.error = Some(error);
Ok(res)
}
res => res,
}
}
fn run_(
mut cmd: Command,
config: &CargoConfig,
workspace: &CargoWorkspace,
progress: &dyn Fn(String),
) -> io::Result<WorkspaceBuildScripts> {
if config.wrap_rustc_in_build_scripts { if config.wrap_rustc_in_build_scripts {
// Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
// that to compile only proc macros and build scripts during the initial // that to compile only proc macros and build scripts during the initial
@ -117,8 +106,126 @@ impl WorkspaceBuildScripts {
cmd.env("RA_RUSTC_WRAPPER", "1"); cmd.env("RA_RUSTC_WRAPPER", "1");
} }
cmd.current_dir(workspace.workspace_root()); Ok(cmd)
}
/// Runs the build scripts for the given workspace
pub(crate) fn run_for_workspace(
config: &CargoConfig,
workspace: &CargoWorkspace,
progress: &dyn Fn(String),
toolchain: &Option<Version>,
) -> io::Result<WorkspaceBuildScripts> {
const RUST_1_62: Version = Version::new(1, 62, 0);
let current_dir = match &config.invocation_location {
InvocationLocation::Root(root) if config.run_build_script_command.is_some() => {
root.as_path()
}
_ => &workspace.workspace_root(),
}
.as_ref();
match Self::run_per_ws(Self::build_command(config)?, workspace, current_dir, progress) {
Ok(WorkspaceBuildScripts { error: Some(error), .. })
if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_62) =>
{
// building build scripts failed, attempt to build with --keep-going so
// that we potentially get more build data
let mut cmd = Self::build_command(config)?;
cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1");
let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?;
res.error = Some(error);
Ok(res)
}
res => res,
}
}
/// Runs the build scripts by invoking the configured command *once*.
/// This populates the outputs for all passed in workspaces.
pub(crate) fn run_once(
config: &CargoConfig,
workspaces: &[&CargoWorkspace],
progress: &dyn Fn(String),
) -> io::Result<Vec<WorkspaceBuildScripts>> {
assert_eq!(config.invocation_strategy, InvocationStrategy::Once);
let current_dir = match &config.invocation_location {
InvocationLocation::Root(root) => root,
InvocationLocation::Workspace => {
return Err(io::Error::new(
io::ErrorKind::Other,
"Cannot run build scripts from workspace with invocation strategy `once`",
))
}
};
let cmd = Self::build_command(config)?;
// NB: Cargo.toml could have been modified between `cargo metadata` and
// `cargo check`. We shouldn't assume that package ids we see here are
// exactly those from `config`.
let mut by_id = FxHashMap::default();
// some workspaces might depend on the same crates, so we need to duplicate the outputs
// to those collisions
let mut collisions = Vec::new();
let mut res: Vec<_> = workspaces
.iter()
.enumerate()
.map(|(idx, workspace)| {
let mut res = WorkspaceBuildScripts::default();
for package in workspace.packages() {
res.outputs.insert(package, BuildScriptOutput::default());
if by_id.contains_key(&workspace[package].id) {
collisions.push((&workspace[package].id, idx, package));
} else {
by_id.insert(workspace[package].id.clone(), (package, idx));
}
}
res
})
.collect();
let errors = Self::run_command(
cmd,
current_dir.as_path().as_ref(),
|package, cb| {
if let Some(&(package, workspace)) = by_id.get(package) {
cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]);
}
},
progress,
)?;
res.iter_mut().for_each(|it| it.error = errors.clone());
collisions.into_iter().for_each(|(id, workspace, package)| {
if let Some(&(p, w)) = by_id.get(id) {
res[workspace].outputs[package] = res[w].outputs[p].clone();
}
});
if tracing::enabled!(tracing::Level::INFO) {
for (idx, workspace) in workspaces.iter().enumerate() {
for package in workspace.packages() {
let package_build_data = &mut res[idx].outputs[package];
if !package_build_data.is_unchanged() {
tracing::info!(
"{}: {:?}",
workspace[package].manifest.parent().display(),
package_build_data,
);
}
}
}
}
Ok(res)
}
fn run_per_ws(
cmd: Command,
workspace: &CargoWorkspace,
current_dir: &path::Path,
progress: &dyn Fn(String),
) -> io::Result<WorkspaceBuildScripts> {
let mut res = WorkspaceBuildScripts::default(); let mut res = WorkspaceBuildScripts::default();
let outputs = &mut res.outputs; let outputs = &mut res.outputs;
// NB: Cargo.toml could have been modified between `cargo metadata` and // NB: Cargo.toml could have been modified between `cargo metadata` and
@ -126,10 +233,46 @@ impl WorkspaceBuildScripts {
// exactly those from `config`. // exactly those from `config`.
let mut by_id: FxHashMap<String, Package> = FxHashMap::default(); let mut by_id: FxHashMap<String, Package> = FxHashMap::default();
for package in workspace.packages() { for package in workspace.packages() {
outputs.insert(package, None); outputs.insert(package, BuildScriptOutput::default());
by_id.insert(workspace[package].id.clone(), package); by_id.insert(workspace[package].id.clone(), package);
} }
res.error = Self::run_command(
cmd,
current_dir,
|package, cb| {
if let Some(&package) = by_id.get(package) {
cb(&workspace[package].name, &mut outputs[package]);
}
},
progress,
)?;
if tracing::enabled!(tracing::Level::INFO) {
for package in workspace.packages() {
let package_build_data = &mut outputs[package];
if !package_build_data.is_unchanged() {
tracing::info!(
"{}: {:?}",
workspace[package].manifest.parent().display(),
package_build_data,
);
}
}
}
Ok(res)
}
fn run_command(
mut cmd: Command,
current_dir: &path::Path,
// ideally this would be something like:
// with_output_for: impl FnMut(&str, dyn FnOnce(&mut BuildScriptOutput)),
// but owned trait objects aren't a thing
mut with_output_for: impl FnMut(&str, &mut dyn FnMut(&str, &mut BuildScriptOutput)),
progress: &dyn Fn(String),
) -> io::Result<Option<String>> {
let errors = RefCell::new(String::new()); let errors = RefCell::new(String::new());
let push_err = |err: &str| { let push_err = |err: &str| {
let mut e = errors.borrow_mut(); let mut e = errors.borrow_mut();
@ -137,7 +280,8 @@ impl WorkspaceBuildScripts {
e.push('\n'); e.push('\n');
}; };
tracing::info!("Running build scripts: {:?}", cmd); tracing::info!("Running build scripts in {}: {:?}", current_dir.display(), cmd);
cmd.current_dir(current_dir);
let output = stdx::process::spawn_with_streaming_output( let output = stdx::process::spawn_with_streaming_output(
cmd, cmd,
&mut |line| { &mut |line| {
@ -149,16 +293,12 @@ impl WorkspaceBuildScripts {
.unwrap_or_else(|_| Message::TextLine(line.to_string())); .unwrap_or_else(|_| Message::TextLine(line.to_string()));
match message { match message {
Message::BuildScriptExecuted(message) => { Message::BuildScriptExecuted(mut message) => {
let package = match by_id.get(&message.package_id.repr) { with_output_for(&message.package_id.repr, &mut |name, data| {
Some(&it) => it, progress(format!("running build-script: {}", name));
None => return,
};
progress(format!("running build-script: {}", workspace[package].name));
let cfgs = { let cfgs = {
let mut acc = Vec::new(); let mut acc = Vec::new();
for cfg in message.cfgs { for cfg in &message.cfgs {
match cfg.parse::<CfgFlag>() { match cfg.parse::<CfgFlag>() {
Ok(it) => acc.push(it), Ok(it) => acc.push(it),
Err(err) => { Err(err) => {
@ -172,38 +312,39 @@ impl WorkspaceBuildScripts {
} }
acc acc
}; };
if !message.env.is_empty() {
data.envs = mem::take(&mut message.env);
}
// cargo_metadata crate returns default (empty) path for // cargo_metadata crate returns default (empty) path for
// older cargos, which is not absolute, so work around that. // older cargos, which is not absolute, so work around that.
let out_dir = message.out_dir.into_os_string(); let out_dir = mem::take(&mut message.out_dir).into_os_string();
if !out_dir.is_empty() { if !out_dir.is_empty() {
let data = outputs[package].get_or_insert_with(Default::default); let out_dir = AbsPathBuf::assert(PathBuf::from(out_dir));
data.out_dir = Some(AbsPathBuf::assert(PathBuf::from(out_dir))); // inject_cargo_env(package, package_build_data);
// NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
if let Some(out_dir) =
out_dir.as_os_str().to_str().map(|s| s.to_owned())
{
data.envs.push(("OUT_DIR".to_string(), out_dir));
}
data.out_dir = Some(out_dir);
data.cfgs = cfgs; data.cfgs = cfgs;
} }
if !message.env.is_empty() { });
outputs[package].get_or_insert_with(Default::default).envs =
message.env;
}
} }
Message::CompilerArtifact(message) => { Message::CompilerArtifact(message) => {
let package = match by_id.get(&message.package_id.repr) { with_output_for(&message.package_id.repr, &mut |name, data| {
Some(it) => *it, progress(format!("building proc-macros: {}", name));
None => return,
};
progress(format!("building proc-macros: {}", message.target.name));
if message.target.kind.iter().any(|k| k == "proc-macro") { if message.target.kind.iter().any(|k| k == "proc-macro") {
// Skip rmeta file // Skip rmeta file
if let Some(filename) = if let Some(filename) =
message.filenames.iter().find(|name| is_dylib(name)) message.filenames.iter().find(|name| is_dylib(name))
{ {
let filename = AbsPathBuf::assert(PathBuf::from(&filename)); let filename = AbsPathBuf::assert(PathBuf::from(&filename));
outputs[package] data.proc_macro_dylib_path = Some(filename);
.get_or_insert_with(Default::default)
.proc_macro_dylib_path = Some(filename);
} }
} }
});
} }
Message::CompilerMessage(message) => { Message::CompilerMessage(message) => {
progress(message.target.name); progress(message.target.name);
@ -222,32 +363,13 @@ impl WorkspaceBuildScripts {
}, },
)?; )?;
for package in workspace.packages() { let errors = if !output.status.success() {
if let Some(package_build_data) = &mut outputs[package] { let errors = errors.into_inner();
tracing::info!( Some(if errors.is_empty() { "cargo check failed".to_string() } else { errors })
"{}: {:?}", } else {
workspace[package].manifest.parent().display(), None
package_build_data, };
); Ok(errors)
// inject_cargo_env(package, package_build_data);
if let Some(out_dir) = &package_build_data.out_dir {
// NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
if let Some(out_dir) = out_dir.as_os_str().to_str().map(|s| s.to_owned()) {
package_build_data.envs.push(("OUT_DIR".to_string(), out_dir));
}
}
}
}
let mut errors = errors.into_inner();
if !output.status.success() {
if errors.is_empty() {
errors = "cargo check failed".to_string();
}
res.error = Some(errors);
}
Ok(res)
} }
pub fn error(&self) -> Option<&str> { pub fn error(&self) -> Option<&str> {
@ -255,11 +377,11 @@ impl WorkspaceBuildScripts {
} }
pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> { pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> {
self.outputs.get(idx)?.as_ref() self.outputs.get(idx)
} }
} }
// FIXME: File a better way to know if it is a dylib. // FIXME: Find a better way to know if it is a dylib.
fn is_dylib(path: &Utf8Path) -> bool { fn is_dylib(path: &Utf8Path) -> bool {
match path.extension().map(|e| e.to_string().to_lowercase()) { match path.extension().map(|e| e.to_string().to_lowercase()) {
None => false, None => false,

View file

@ -14,8 +14,8 @@ use rustc_hash::FxHashMap;
use serde::Deserialize; use serde::Deserialize;
use serde_json::from_value; use serde_json::from_value;
use crate::CfgOverrides; use crate::{utf8_stdout, InvocationLocation, ManifestPath};
use crate::{utf8_stdout, ManifestPath}; use crate::{CfgOverrides, InvocationStrategy};
/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo /// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
/// workspace. It pretty closely mirrors `cargo metadata` output. /// workspace. It pretty closely mirrors `cargo metadata` output.
@ -106,6 +106,8 @@ pub struct CargoConfig {
pub run_build_script_command: Option<Vec<String>>, pub run_build_script_command: Option<Vec<String>>,
/// Extra env vars to set when invoking the cargo command /// Extra env vars to set when invoking the cargo command
pub extra_env: FxHashMap<String, String>, pub extra_env: FxHashMap<String, String>,
pub invocation_strategy: InvocationStrategy,
pub invocation_location: InvocationLocation,
} }
impl CargoConfig { impl CargoConfig {
@ -283,8 +285,6 @@ impl CargoWorkspace {
} }
CargoFeatures::Selected { features, no_default_features } => { CargoFeatures::Selected { features, no_default_features } => {
if *no_default_features { if *no_default_features {
// FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
// https://github.com/oli-obk/cargo_metadata/issues/79
meta.features(CargoOpt::NoDefaultFeatures); meta.features(CargoOpt::NoDefaultFeatures);
} }
if !features.is_empty() { if !features.is_empty() {
@ -329,18 +329,21 @@ impl CargoWorkspace {
let ws_members = &meta.workspace_members; let ws_members = &meta.workspace_members;
meta.packages.sort_by(|a, b| a.id.cmp(&b.id)); meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
for meta_pkg in &meta.packages { for meta_pkg in meta.packages {
let cargo_metadata::Package { let cargo_metadata::Package {
id,
edition,
name, name,
manifest_path,
version, version,
metadata, id,
source,
targets: meta_targets,
features,
manifest_path,
repository, repository,
edition,
metadata,
.. ..
} = meta_pkg; } = meta_pkg;
let meta = from_value::<PackageMetadata>(metadata.clone()).unwrap_or_default(); let meta = from_value::<PackageMetadata>(metadata).unwrap_or_default();
let edition = match edition { let edition = match edition {
cargo_metadata::Edition::E2015 => Edition::Edition2015, cargo_metadata::Edition::E2015 => Edition::Edition2015,
cargo_metadata::Edition::E2018 => Edition::Edition2018, cargo_metadata::Edition::E2018 => Edition::Edition2018,
@ -352,35 +355,36 @@ impl CargoWorkspace {
}; };
// We treat packages without source as "local" packages. That includes all members of // We treat packages without source as "local" packages. That includes all members of
// the current workspace, as well as any path dependency outside the workspace. // the current workspace, as well as any path dependency outside the workspace.
let is_local = meta_pkg.source.is_none(); let is_local = source.is_none();
let is_member = ws_members.contains(id); let is_member = ws_members.contains(&id);
let pkg = packages.alloc(PackageData { let pkg = packages.alloc(PackageData {
id: id.repr.clone(), id: id.repr.clone(),
name: name.clone(), name,
version: version.clone(), version,
manifest: AbsPathBuf::assert(PathBuf::from(&manifest_path)).try_into().unwrap(), manifest: AbsPathBuf::assert(manifest_path.into()).try_into().unwrap(),
targets: Vec::new(), targets: Vec::new(),
is_local, is_local,
is_member, is_member,
edition, edition,
repository: repository.clone(), repository,
dependencies: Vec::new(), dependencies: Vec::new(),
features: meta_pkg.features.clone().into_iter().collect(), features: features.into_iter().collect(),
active_features: Vec::new(), active_features: Vec::new(),
metadata: meta.rust_analyzer.unwrap_or_default(), metadata: meta.rust_analyzer.unwrap_or_default(),
}); });
let pkg_data = &mut packages[pkg]; let pkg_data = &mut packages[pkg];
pkg_by_id.insert(id, pkg); pkg_by_id.insert(id, pkg);
for meta_tgt in &meta_pkg.targets { for meta_tgt in meta_targets {
let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"]; let cargo_metadata::Target { name, kind, required_features, src_path, .. } =
meta_tgt;
let tgt = targets.alloc(TargetData { let tgt = targets.alloc(TargetData {
package: pkg, package: pkg,
name: meta_tgt.name.clone(), name,
root: AbsPathBuf::assert(PathBuf::from(&meta_tgt.src_path)), root: AbsPathBuf::assert(src_path.into()),
kind: TargetKind::new(meta_tgt.kind.as_slice()), kind: TargetKind::new(&kind),
is_proc_macro, is_proc_macro: &*kind == ["proc-macro"],
required_features: meta_tgt.required_features.clone(), required_features,
}); });
pkg_data.targets.push(tgt); pkg_data.targets.push(tgt);
} }

View file

@ -67,7 +67,7 @@ impl ProjectManifest {
if path.file_name().unwrap_or_default() == "Cargo.toml" { if path.file_name().unwrap_or_default() == "Cargo.toml" {
return Ok(ProjectManifest::CargoToml(path)); return Ok(ProjectManifest::CargoToml(path));
} }
bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display()) bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display());
} }
pub fn discover_single(path: &AbsPath) -> Result<ProjectManifest> { pub fn discover_single(path: &AbsPath) -> Result<ProjectManifest> {
@ -78,7 +78,7 @@ impl ProjectManifest {
}; };
if !candidates.is_empty() { if !candidates.is_empty() {
bail!("more than one project") bail!("more than one project");
} }
Ok(res) Ok(res)
} }
@ -157,3 +157,17 @@ fn utf8_stdout(mut cmd: Command) -> Result<String> {
let stdout = String::from_utf8(output.stdout)?; let stdout = String::from_utf8(output.stdout)?;
Ok(stdout.trim().to_string()) Ok(stdout.trim().to_string())
} }
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub enum InvocationStrategy {
Once,
#[default]
PerWorkspace,
}
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub enum InvocationLocation {
Root(AbsPathBuf),
#[default]
Workspace,
}

View file

@ -64,14 +64,15 @@ impl Sysroot {
self.by_name("proc_macro") self.by_name("proc_macro")
} }
pub fn crates<'a>(&'a self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + 'a { pub fn crates(&self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + '_ {
self.crates.iter().map(|(id, _data)| id) self.crates.iter().map(|(id, _data)| id)
} }
} }
impl Sysroot { impl Sysroot {
/// Attempts to discover the toolchain's sysroot from the given `dir`.
pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Result<Sysroot> { pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Result<Sysroot> {
tracing::debug!("Discovering sysroot for {}", dir.display()); tracing::debug!("discovering sysroot for {}", dir.display());
let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
let sysroot_src_dir = let sysroot_src_dir =
discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?; discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?;
@ -83,11 +84,10 @@ impl Sysroot {
cargo_toml: &ManifestPath, cargo_toml: &ManifestPath,
extra_env: &FxHashMap<String, String>, extra_env: &FxHashMap<String, String>,
) -> Option<ManifestPath> { ) -> Option<ManifestPath> {
tracing::debug!("Discovering rustc source for {}", cargo_toml.display()); tracing::debug!("discovering rustc source for {}", cargo_toml.display());
let current_dir = cargo_toml.parent(); let current_dir = cargo_toml.parent();
discover_sysroot_dir(current_dir, extra_env) let sysroot_dir = discover_sysroot_dir(current_dir, extra_env).ok()?;
.ok() get_rustc_src(&sysroot_dir)
.and_then(|sysroot_dir| get_rustc_src(&sysroot_dir))
} }
pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf) -> Result<Sysroot> { pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf) -> Result<Sysroot> {
@ -189,6 +189,7 @@ fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
get_rust_src(sysroot_path) get_rust_src(sysroot_path)
} }
fn discover_sysroot_src_dir_or_add_component( fn discover_sysroot_src_dir_or_add_component(
sysroot_path: &AbsPathBuf, sysroot_path: &AbsPathBuf,
current_dir: &AbsPath, current_dir: &AbsPath,
@ -199,6 +200,7 @@ fn discover_sysroot_src_dir_or_add_component(
let mut rustup = Command::new(toolchain::rustup()); let mut rustup = Command::new(toolchain::rustup());
rustup.envs(extra_env); rustup.envs(extra_env);
rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]); rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]);
tracing::info!("adding rust-src component by {:?}", rustup);
utf8_stdout(rustup).ok()?; utf8_stdout(rustup).ok()?;
get_rust_src(sysroot_path) get_rust_src(sysroot_path)
}) })
@ -217,7 +219,7 @@ try installing the Rust source the same way you installed rustc",
fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> { fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml"); let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml");
let rustc_src = ManifestPath::try_from(rustc_src).ok()?; let rustc_src = ManifestPath::try_from(rustc_src).ok()?;
tracing::debug!("Checking for rustc source code: {}", rustc_src.display()); tracing::debug!("checking for rustc source code: {}", rustc_src.display());
if fs::metadata(&rustc_src).is_ok() { if fs::metadata(&rustc_src).is_ok() {
Some(rustc_src) Some(rustc_src)
} else { } else {
@ -227,7 +229,7 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> { fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> {
let rust_src = sysroot_path.join("lib/rustlib/src/rust/library"); let rust_src = sysroot_path.join("lib/rustlib/src/rust/library");
tracing::debug!("Checking sysroot: {}", rust_src.display()); tracing::debug!("checking sysroot library: {}", rust_src.display());
if fs::metadata(&rust_src).is_ok() { if fs::metadata(&rust_src).is_ok() {
Some(rust_src) Some(rust_src)
} else { } else {

View file

@ -2,7 +2,7 @@
//! metadata` or `rust-project.json`) into representation stored in the salsa //! metadata` or `rust-project.json`) into representation stored in the salsa
//! database -- `CrateGraph`. //! database -- `CrateGraph`.
use std::{collections::VecDeque, fmt, fs, process::Command}; use std::{collections::VecDeque, fmt, fs, process::Command, sync::Arc};
use anyhow::{format_err, Context, Result}; use anyhow::{format_err, Context, Result};
use base_db::{ use base_db::{
@ -21,8 +21,8 @@ use crate::{
cfg_flag::CfgFlag, cfg_flag::CfgFlag,
rustc_cfg, rustc_cfg,
sysroot::SysrootCrate, sysroot::SysrootCrate,
utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath, Package, ProjectJson, ProjectManifest, utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package,
Sysroot, TargetKind, WorkspaceBuildScripts, ProjectJson, ProjectManifest, Sysroot, TargetKind, WorkspaceBuildScripts,
}; };
/// A set of cfg-overrides per crate. /// A set of cfg-overrides per crate.
@ -209,6 +209,9 @@ impl ProjectWorkspace {
), ),
None => None, None => None,
}; };
if let Some(sysroot) = &sysroot {
tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
}
let rustc_dir = match &config.rustc_source { let rustc_dir = match &config.rustc_source {
Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(), Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(),
@ -217,6 +220,9 @@ impl ProjectWorkspace {
} }
None => None, None => None,
}; };
if let Some(rustc_dir) = &rustc_dir {
tracing::info!(rustc_dir = %rustc_dir.display(), "Using rustc source");
}
let rustc = match rustc_dir { let rustc = match rustc_dir {
Some(rustc_dir) => Some({ Some(rustc_dir) => Some({
@ -277,6 +283,9 @@ impl ProjectWorkspace {
} }
(None, None) => None, (None, None) => None,
}; };
if let Some(sysroot) = &sysroot {
tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
}
let rustc_cfg = rustc_cfg::get(None, target, extra_env); let rustc_cfg = rustc_cfg::get(None, target, extra_env);
Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg }) Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg })
@ -294,6 +303,7 @@ impl ProjectWorkspace {
Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
} }
/// Runs the build scripts for this [`ProjectWorkspace`].
pub fn run_build_scripts( pub fn run_build_scripts(
&self, &self,
config: &CargoConfig, config: &CargoConfig,
@ -301,8 +311,12 @@ impl ProjectWorkspace {
) -> Result<WorkspaceBuildScripts> { ) -> Result<WorkspaceBuildScripts> {
match self { match self {
ProjectWorkspace::Cargo { cargo, toolchain, .. } => { ProjectWorkspace::Cargo { cargo, toolchain, .. } => {
WorkspaceBuildScripts::run(config, cargo, progress, toolchain).with_context(|| { WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, toolchain)
format!("Failed to run build scripts for {}", &cargo.workspace_root().display()) .with_context(|| {
format!(
"Failed to run build scripts for {}",
&cargo.workspace_root().display()
)
}) })
} }
ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => { ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
@ -311,6 +325,49 @@ impl ProjectWorkspace {
} }
} }
/// Runs the build scripts for the given [`ProjectWorkspace`]s. Depending on the invocation
/// strategy this may run a single build process for all project workspaces.
pub fn run_all_build_scripts(
workspaces: &[ProjectWorkspace],
config: &CargoConfig,
progress: &dyn Fn(String),
) -> Vec<Result<WorkspaceBuildScripts>> {
if matches!(config.invocation_strategy, InvocationStrategy::PerWorkspace)
|| config.run_build_script_command.is_none()
{
return workspaces.iter().map(|it| it.run_build_scripts(config, progress)).collect();
}
let cargo_ws: Vec<_> = workspaces
.iter()
.filter_map(|it| match it {
ProjectWorkspace::Cargo { cargo, .. } => Some(cargo),
_ => None,
})
.collect();
let ref mut outputs = match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) {
Ok(it) => Ok(it.into_iter()),
// io::Error is not Clone?
Err(e) => Err(Arc::new(e)),
};
workspaces
.iter()
.map(|it| match it {
ProjectWorkspace::Cargo { cargo, .. } => match outputs {
Ok(outputs) => Ok(outputs.next().unwrap()),
Err(e) => Err(e.clone()).with_context(|| {
format!(
"Failed to run build scripts for {}",
&cargo.workspace_root().display()
)
}),
},
_ => Ok(WorkspaceBuildScripts::default()),
})
.collect()
}
pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) { pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) {
match self { match self {
ProjectWorkspace::Cargo { build_scripts, .. } => *build_scripts = bs, ProjectWorkspace::Cargo { build_scripts, .. } => *build_scripts = bs,

View file

@ -118,7 +118,7 @@ impl CargoTargetSpec {
global_state_snapshot: &GlobalStateSnapshot, global_state_snapshot: &GlobalStateSnapshot,
file_id: FileId, file_id: FileId,
) -> Result<Option<CargoTargetSpec>> { ) -> Result<Option<CargoTargetSpec>> {
let crate_id = match &*global_state_snapshot.analysis.crate_for(file_id)? { let crate_id = match &*global_state_snapshot.analysis.crates_for(file_id)? {
&[crate_id, ..] => crate_id, &[crate_id, ..] => crate_id,
_ => return Ok(None), _ => return Ok(None),
}; };

View file

@ -8,8 +8,8 @@ use std::{
use crate::line_index::{LineEndings, LineIndex, OffsetEncoding}; use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
use hir::Name; use hir::Name;
use ide::{ use ide::{
LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile, TextRange, LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId,
TokenId, TokenStaticData,
}; };
use ide_db::LineIndexDatabase; use ide_db::LineIndexDatabase;
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
@ -75,7 +75,7 @@ impl flags::Scip {
let mut symbols_emitted: HashSet<TokenId> = HashSet::default(); let mut symbols_emitted: HashSet<TokenId> = HashSet::default();
let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new(); let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new();
for file in si.files { for StaticIndexedFile { file_id, tokens, .. } in si.files {
let mut local_count = 0; let mut local_count = 0;
let mut new_local_symbol = || { let mut new_local_symbol = || {
let new_symbol = scip::types::Symbol::new_local(local_count); let new_symbol = scip::types::Symbol::new_local(local_count);
@ -84,7 +84,6 @@ impl flags::Scip {
new_symbol new_symbol
}; };
let StaticIndexedFile { file_id, tokens, .. } = file;
let relative_path = match get_relative_filepath(&vfs, &rootpath, file_id) { let relative_path = match get_relative_filepath(&vfs, &rootpath, file_id) {
Some(relative_path) => relative_path, Some(relative_path) => relative_path,
None => continue, None => continue,
@ -107,28 +106,20 @@ impl flags::Scip {
let mut occurrence = scip_types::Occurrence::default(); let mut occurrence = scip_types::Occurrence::default();
occurrence.range = text_range_to_scip_range(&line_index, range); occurrence.range = text_range_to_scip_range(&line_index, range);
occurrence.symbol = match tokens_to_symbol.get(&id) { occurrence.symbol = tokens_to_symbol
Some(symbol) => symbol.clone(), .entry(id)
None => { .or_insert_with(|| {
let symbol = match &token.moniker { let symbol = token_to_symbol(&token).unwrap_or_else(&mut new_local_symbol);
Some(moniker) => moniker_to_symbol(&moniker), scip::symbol::format_symbol(symbol)
None => new_local_symbol(), })
}; .clone();
let symbol = scip::symbol::format_symbol(symbol);
tokens_to_symbol.insert(id, symbol.clone());
symbol
}
};
if let Some(def) = token.definition { if let Some(def) = token.definition {
if def.range == range { if def.range == range {
occurrence.symbol_roles |= scip_types::SymbolRole::Definition as i32; occurrence.symbol_roles |= scip_types::SymbolRole::Definition as i32;
} }
if !symbols_emitted.contains(&id) { if symbols_emitted.insert(id) {
symbols_emitted.insert(id);
let mut symbol_info = scip_types::SymbolInformation::default(); let mut symbol_info = scip_types::SymbolInformation::default();
symbol_info.symbol = occurrence.symbol.clone(); symbol_info.symbol = occurrence.symbol.clone();
if let Some(hover) = &token.hover { if let Some(hover) = &token.hover {
@ -207,9 +198,11 @@ fn new_descriptor(name: Name, suffix: scip_types::descriptor::Suffix) -> scip_ty
/// ///
/// Only returns a Symbol when it's a non-local symbol. /// Only returns a Symbol when it's a non-local symbol.
/// So if the visibility isn't outside of a document, then it will return None /// So if the visibility isn't outside of a document, then it will return None
fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol { fn token_to_symbol(token: &TokenStaticData) -> Option<scip_types::Symbol> {
use scip_types::descriptor::Suffix::*; use scip_types::descriptor::Suffix::*;
let moniker = token.moniker.as_ref()?;
let package_name = moniker.package_information.name.clone(); let package_name = moniker.package_information.name.clone();
let version = moniker.package_information.version.clone(); let version = moniker.package_information.version.clone();
let descriptors = moniker let descriptors = moniker
@ -233,7 +226,7 @@ fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol {
}) })
.collect(); .collect();
scip_types::Symbol { Some(scip_types::Symbol {
scheme: "rust-analyzer".into(), scheme: "rust-analyzer".into(),
package: Some(scip_types::Package { package: Some(scip_types::Package {
manager: "cargo".to_string(), manager: "cargo".to_string(),
@ -244,19 +237,15 @@ fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol {
.into(), .into(),
descriptors, descriptors,
..Default::default() ..Default::default()
} })
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
use hir::Semantics; use ide::{AnalysisHost, FilePosition, StaticIndex, TextSize};
use ide::{AnalysisHost, FilePosition}; use ide_db::base_db::fixture::ChangeFixture;
use ide_db::defs::IdentClass;
use ide_db::{base_db::fixture::ChangeFixture, helpers::pick_best_token};
use scip::symbol::format_symbol; use scip::symbol::format_symbol;
use syntax::SyntaxKind::*;
use syntax::{AstNode, T};
fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) { fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) {
let mut host = AnalysisHost::default(); let mut host = AnalysisHost::default();
@ -273,53 +262,33 @@ mod test {
fn check_symbol(ra_fixture: &str, expected: &str) { fn check_symbol(ra_fixture: &str, expected: &str) {
let (host, position) = position(ra_fixture); let (host, position) = position(ra_fixture);
let analysis = host.analysis();
let si = StaticIndex::compute(&analysis);
let FilePosition { file_id, offset } = position; let FilePosition { file_id, offset } = position;
let db = host.raw_database(); let mut found_symbol = None;
let sema = &Semantics::new(db); for file in &si.files {
let file = sema.parse(file_id).syntax().clone(); if file.file_id != file_id {
let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { continue;
IDENT }
| INT_NUMBER for &(range, id) in &file.tokens {
| LIFETIME_IDENT if range.contains(offset - TextSize::from(1)) {
| T![self] let token = si.tokens.get(id).unwrap();
| T![super] found_symbol = token_to_symbol(token);
| T![crate] break;
| T![Self] }
| COMMENT => 2, }
kind if kind.is_trivia() => 0,
_ => 1,
})
.expect("OK OK");
let navs = sema
.descend_into_macros(original_token.clone())
.into_iter()
.filter_map(|token| {
IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
it.into_iter().flat_map(|def| {
let module = def.module(db).unwrap();
let current_crate = module.krate();
match MonikerResult::from_def(sema.db, def, current_crate) {
Some(moniker_result) => Some(moniker_to_symbol(&moniker_result)),
None => None,
} }
})
})
})
.flatten()
.collect::<Vec<_>>();
if expected == "" { if expected == "" {
assert_eq!(0, navs.len(), "must have no symbols {:?}", navs); assert!(found_symbol.is_none(), "must have no symbols {:?}", found_symbol);
return; return;
} }
assert_eq!(1, navs.len(), "must have one symbol {:?}", navs); assert!(found_symbol.is_some(), "must have one symbol {:?}", found_symbol);
let res = found_symbol.unwrap();
let res = navs.get(0).unwrap(); let formatted = format_symbol(res);
let formatted = format_symbol(res.clone());
assert_eq!(formatted, expected); assert_eq!(formatted, expected);
} }

View file

@ -69,6 +69,19 @@ config_data! {
cargo_autoreload: bool = "true", cargo_autoreload: bool = "true",
/// Run build scripts (`build.rs`) for more precise code analysis. /// Run build scripts (`build.rs`) for more precise code analysis.
cargo_buildScripts_enable: bool = "true", cargo_buildScripts_enable: bool = "true",
/// Specifies the working directory for running build scripts.
/// - "workspace": run build scripts for a workspace in the workspace's root directory.
/// This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`.
/// - "root": run build scripts in the project's root directory.
/// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
/// is set.
cargo_buildScripts_invocationLocation: InvocationLocation = "\"workspace\"",
/// Specifies the invocation strategy to use when running the build scripts command.
/// If `per_workspace` is set, the command will be executed for each workspace.
/// If `once` is set, the command will be executed once.
/// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
/// is set.
cargo_buildScripts_invocationStrategy: InvocationStrategy = "\"per_workspace\"",
/// Override the command rust-analyzer uses to run build scripts and /// Override the command rust-analyzer uses to run build scripts and
/// build procedural macros. The command is required to output json /// build procedural macros. The command is required to output json
/// and should therefore include `--message-format=json` or a similar /// and should therefore include `--message-format=json` or a similar
@ -122,6 +135,20 @@ config_data! {
/// ///
/// Set to `"all"` to pass `--all-features` to Cargo. /// Set to `"all"` to pass `--all-features` to Cargo.
checkOnSave_features: Option<CargoFeaturesDef> = "null", checkOnSave_features: Option<CargoFeaturesDef> = "null",
/// Specifies the working directory for running checks.
/// - "workspace": run checks for workspaces in the corresponding workspaces' root directories.
// FIXME: Ideally we would support this in some way
/// This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`.
/// - "root": run checks in the project's root directory.
/// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
/// is set.
checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"",
/// Specifies the invocation strategy to use when running the checkOnSave command.
/// If `per_workspace` is set, the command will be executed for each workspace.
/// If `once` is set, the command will be executed once.
/// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
/// is set.
checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"",
/// Whether to pass `--no-default-features` to Cargo. Defaults to /// Whether to pass `--no-default-features` to Cargo. Defaults to
/// `#rust-analyzer.cargo.noDefaultFeatures#`. /// `#rust-analyzer.cargo.noDefaultFeatures#`.
checkOnSave_noDefaultFeatures: Option<bool> = "null", checkOnSave_noDefaultFeatures: Option<bool> = "null",
@ -1056,6 +1083,16 @@ impl Config {
rustc_source, rustc_source,
unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()), unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()),
wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper, wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper,
invocation_strategy: match self.data.cargo_buildScripts_invocationStrategy {
InvocationStrategy::Once => project_model::InvocationStrategy::Once,
InvocationStrategy::PerWorkspace => project_model::InvocationStrategy::PerWorkspace,
},
invocation_location: match self.data.cargo_buildScripts_invocationLocation {
InvocationLocation::Root => {
project_model::InvocationLocation::Root(self.root_path.clone())
}
InvocationLocation::Workspace => project_model::InvocationLocation::Workspace,
},
run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(), run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(),
extra_env: self.data.cargo_extraEnv.clone(), extra_env: self.data.cargo_extraEnv.clone(),
} }
@ -1087,6 +1124,18 @@ impl Config {
command, command,
args, args,
extra_env: self.check_on_save_extra_env(), extra_env: self.check_on_save_extra_env(),
invocation_strategy: match self.data.checkOnSave_invocationStrategy {
InvocationStrategy::Once => flycheck::InvocationStrategy::Once,
InvocationStrategy::PerWorkspace => {
flycheck::InvocationStrategy::PerWorkspace
}
},
invocation_location: match self.data.checkOnSave_invocationLocation {
InvocationLocation::Root => {
flycheck::InvocationLocation::Root(self.root_path.clone())
}
InvocationLocation::Workspace => flycheck::InvocationLocation::Workspace,
},
} }
} }
Some(_) | None => FlycheckConfig::CargoCommand { Some(_) | None => FlycheckConfig::CargoCommand {
@ -1587,6 +1636,20 @@ enum CargoFeaturesDef {
Selected(Vec<String>), Selected(Vec<String>),
} }
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum InvocationStrategy {
Once,
PerWorkspace,
}
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
enum InvocationLocation {
Root,
Workspace,
}
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
#[serde(untagged)] #[serde(untagged)]
enum LifetimeElisionDef { enum LifetimeElisionDef {
@ -2001,6 +2064,22 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"Render annotations above the whole item, including documentation comments and attributes." "Render annotations above the whole item, including documentation comments and attributes."
], ],
}, },
"InvocationStrategy" => set! {
"type": "string",
"enum": ["per_workspace", "once"],
"enumDescriptions": [
"The command will be executed for each workspace.",
"The command will be executed once."
],
},
"InvocationLocation" => set! {
"type": "string",
"enum": ["workspace", "root"],
"enumDescriptions": [
"The command will be executed in the corresponding workspace root.",
"The command will be executed in the project root."
],
},
_ => panic!("missing entry for {}: {}", ty, default), _ => panic!("missing entry for {}: {}", ty, default),
} }

View file

@ -52,7 +52,7 @@ impl<'a> RequestDispatcher<'a> {
let _pctx = stdx::panic_context::enter(panic_context); let _pctx = stdx::panic_context::enter(panic_context);
f(self.global_state, params) f(self.global_state, params)
}; };
if let Ok(response) = result_to_response::<R>(req.id.clone(), result) { if let Ok(response) = result_to_response::<R>(req.id, result) {
self.global_state.respond(response); self.global_state.respond(response);
} }
@ -80,7 +80,7 @@ impl<'a> RequestDispatcher<'a> {
f(global_state_snapshot, params) f(global_state_snapshot, params)
}); });
if let Ok(response) = thread_result_to_response::<R>(req.id.clone(), result) { if let Ok(response) = thread_result_to_response::<R>(req.id, result) {
self.global_state.respond(response); self.global_state.respond(response);
} }

View file

@ -64,7 +64,7 @@ pub(crate) struct GlobalState {
pub(crate) source_root_config: SourceRootConfig, pub(crate) source_root_config: SourceRootConfig,
pub(crate) proc_macro_clients: Vec<Result<ProcMacroServer, String>>, pub(crate) proc_macro_clients: Vec<Result<ProcMacroServer, String>>,
pub(crate) flycheck: Vec<FlycheckHandle>, pub(crate) flycheck: Arc<[FlycheckHandle]>,
pub(crate) flycheck_sender: Sender<flycheck::Message>, pub(crate) flycheck_sender: Sender<flycheck::Message>,
pub(crate) flycheck_receiver: Receiver<flycheck::Message>, pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
@ -117,6 +117,7 @@ pub(crate) struct GlobalStateSnapshot {
vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>, vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>, pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
pub(crate) proc_macros_loaded: bool, pub(crate) proc_macros_loaded: bool,
pub(crate) flycheck: Arc<[FlycheckHandle]>,
} }
impl std::panic::UnwindSafe for GlobalStateSnapshot {} impl std::panic::UnwindSafe for GlobalStateSnapshot {}
@ -155,7 +156,7 @@ impl GlobalState {
source_root_config: SourceRootConfig::default(), source_root_config: SourceRootConfig::default(),
proc_macro_clients: vec![], proc_macro_clients: vec![],
flycheck: Vec::new(), flycheck: Arc::new([]),
flycheck_sender, flycheck_sender,
flycheck_receiver, flycheck_receiver,
@ -295,6 +296,7 @@ impl GlobalState {
mem_docs: self.mem_docs.clone(), mem_docs: self.mem_docs.clone(),
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache), semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
proc_macros_loaded: !self.fetch_build_data_queue.last_op_result().0.is_empty(), proc_macros_loaded: !self.fetch_build_data_queue.last_op_result().0.is_empty(),
flycheck: self.flycheck.clone(),
} }
} }
@ -398,6 +400,10 @@ impl GlobalStateSnapshot {
url_from_abs_path(path) url_from_abs_path(path)
} }
pub(crate) fn file_id_to_file_path(&self, file_id: FileId) -> vfs::VfsPath {
self.vfs.read().0.file_path(file_id)
}
pub(crate) fn cargo_target_for_crate_root( pub(crate) fn cargo_target_for_crate_root(
&self, &self,
crate_id: CrateId, crate_id: CrateId,

View file

@ -658,7 +658,7 @@ pub(crate) fn handle_parent_module(
// check if invoked at the crate root // check if invoked at the crate root
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?; let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let crate_id = match snap.analysis.crate_for(file_id)?.first() { let crate_id = match snap.analysis.crates_for(file_id)?.first() {
Some(&crate_id) => crate_id, Some(&crate_id) => crate_id,
None => return Ok(None), None => return Ok(None),
}; };
@ -1782,7 +1782,15 @@ fn run_rustfmt(
) -> Result<Option<Vec<lsp_types::TextEdit>>> { ) -> Result<Option<Vec<lsp_types::TextEdit>>> {
let file_id = from_proto::file_id(snap, &text_document.uri)?; let file_id = from_proto::file_id(snap, &text_document.uri)?;
let file = snap.analysis.file_text(file_id)?; let file = snap.analysis.file_text(file_id)?;
let crate_ids = snap.analysis.crate_for(file_id)?;
// find the edition of the package the file belongs to
// (if it belongs to multiple we'll just pick the first one and pray)
let edition = snap
.analysis
.relevant_crates_for(file_id)?
.into_iter()
.find_map(|crate_id| snap.cargo_target_for_crate_root(crate_id))
.map(|(ws, target)| ws[ws[target].package].edition);
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
@ -1808,9 +1816,7 @@ fn run_rustfmt(
); );
} }
} }
if let Some(&crate_id) = crate_ids.first() { if let Some(edition) = edition {
// Assume all crates are in the same edition
let edition = snap.analysis.crate_edition(crate_id)?;
cmd.arg("--edition"); cmd.arg("--edition");
cmd.arg(edition.to_string()); cmd.arg(edition.to_string());
} }

View file

@ -87,6 +87,7 @@ impl GlobalState {
state: Progress, state: Progress,
message: Option<String>, message: Option<String>,
fraction: Option<f64>, fraction: Option<f64>,
cancel_token: Option<String>,
) { ) {
if !self.config.work_done_progress() { if !self.config.work_done_progress() {
return; return;
@ -95,7 +96,10 @@ impl GlobalState {
assert!((0.0..=1.0).contains(&f)); assert!((0.0..=1.0).contains(&f));
(f * 100.0) as u32 (f * 100.0) as u32
}); });
let token = lsp_types::ProgressToken::String(format!("rustAnalyzer/{}", title)); let cancellable = Some(cancel_token.is_some());
let token = lsp_types::ProgressToken::String(
cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{}", title)),
);
let work_done_progress = match state { let work_done_progress = match state {
Progress::Begin => { Progress::Begin => {
self.send_request::<lsp_types::request::WorkDoneProgressCreate>( self.send_request::<lsp_types::request::WorkDoneProgressCreate>(
@ -105,14 +109,14 @@ impl GlobalState {
lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin { lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin {
title: title.into(), title: title.into(),
cancellable: None, cancellable,
message, message,
percentage, percentage,
}) })
} }
Progress::Report => { Progress::Report => {
lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport { lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport {
cancellable: None, cancellable,
message, message,
percentage, percentage,
}) })

View file

@ -10,7 +10,7 @@ use std::{
use always_assert::always; use always_assert::always;
use crossbeam_channel::{select, Receiver}; use crossbeam_channel::{select, Receiver};
use flycheck::FlycheckHandle; use flycheck::FlycheckHandle;
use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; use ide_db::base_db::{SourceDatabaseExt, VfsPath};
use itertools::Itertools; use itertools::Itertools;
use lsp_server::{Connection, Notification, Request}; use lsp_server::{Connection, Notification, Request};
use lsp_types::notification::Notification as _; use lsp_types::notification::Notification as _;
@ -191,7 +191,7 @@ impl GlobalState {
// NOTE: don't count blocking select! call as a loop-turn time // NOTE: don't count blocking select! call as a loop-turn time
let _p = profile::span("GlobalState::handle_event"); let _p = profile::span("GlobalState::handle_event");
tracing::debug!("handle_event({:?})", event); tracing::debug!("{:?} handle_event({:?})", loop_start, event);
let task_queue_len = self.task_pool.handle.len(); let task_queue_len = self.task_pool.handle.len();
if task_queue_len > 0 { if task_queue_len > 0 {
tracing::info!("task queue len: {}", task_queue_len); tracing::info!("task queue len: {}", task_queue_len);
@ -257,7 +257,7 @@ impl GlobalState {
} }
}; };
self.report_progress("Indexing", state, message, Some(fraction)); self.report_progress("Indexing", state, message, Some(fraction), None);
} }
} }
Event::Vfs(message) => { Event::Vfs(message) => {
@ -465,7 +465,7 @@ impl GlobalState {
} }
}; };
self.report_progress("Fetching", state, msg, None); self.report_progress("Fetching", state, msg, None, None);
} }
Task::FetchBuildData(progress) => { Task::FetchBuildData(progress) => {
let (state, msg) = match progress { let (state, msg) = match progress {
@ -481,7 +481,7 @@ impl GlobalState {
}; };
if let Some(state) = state { if let Some(state) = state {
self.report_progress("Loading", state, msg, None); self.report_progress("Loading", state, msg, None, None);
} }
} }
} }
@ -518,6 +518,7 @@ impl GlobalState {
state, state,
Some(format!("{}/{}", n_done, n_total)), Some(format!("{}/{}", n_done, n_total)),
Some(Progress::fraction(n_done, n_total)), Some(Progress::fraction(n_done, n_total)),
None,
) )
} }
} }
@ -542,7 +543,10 @@ impl GlobalState {
diag.fix, diag.fix,
), ),
Err(err) => { Err(err) => {
tracing::error!("File with cargo diagnostic not found in VFS: {}", err); tracing::error!(
"flycheck {id}: File with cargo diagnostic not found in VFS: {}",
err
);
} }
}; };
} }
@ -584,7 +588,13 @@ impl GlobalState {
} else { } else {
format!("cargo check (#{})", id + 1) format!("cargo check (#{})", id + 1)
}; };
self.report_progress(&title, state, message, None); self.report_progress(
&title,
state,
message,
None,
Some(format!("rust-analyzer/checkOnSave/{}", id)),
);
} }
} }
} }
@ -698,7 +708,16 @@ impl GlobalState {
this.cancel(id); this.cancel(id);
Ok(()) Ok(())
})? })?
.on::<lsp_types::notification::WorkDoneProgressCancel>(|_this, _params| { .on::<lsp_types::notification::WorkDoneProgressCancel>(|this, params| {
if let lsp_types::NumberOrString::String(s) = &params.token {
if let Some(id) = s.strip_prefix("rust-analyzer/checkOnSave/") {
if let Ok(id) = u32::from_str_radix(id, 10) {
if let Some(flycheck) = this.flycheck.get(id as usize) {
flycheck.cancel();
}
}
}
}
// Just ignore this. It is OK to continue sending progress // Just ignore this. It is OK to continue sending progress
// notifications for this token, as the client can't know when // notifications for this token, as the client can't know when
// we accepted notification. // we accepted notification.
@ -711,7 +730,7 @@ impl GlobalState {
.insert(path.clone(), DocumentData::new(params.text_document.version)) .insert(path.clone(), DocumentData::new(params.text_document.version))
.is_err(); .is_err();
if already_exists { if already_exists {
tracing::error!("duplicate DidOpenTextDocument: {}", path) tracing::error!("duplicate DidOpenTextDocument: {}", path);
} }
this.vfs this.vfs
.write() .write()
@ -758,23 +777,28 @@ impl GlobalState {
Ok(()) Ok(())
})? })?
.on::<lsp_types::notification::DidSaveTextDocument>(|this, params| { .on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
let mut updated = false;
if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) { if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
let (vfs, _) = &*this.vfs.read(); // Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
this.fetch_workspaces_queue
.request_op(format!("DidSaveTextDocument {}", abs_path.display()));
}
}
let file_id = this.vfs.read().0.file_id(&vfs_path);
if let Some(file_id) = file_id {
let world = this.snapshot();
let mut updated = false;
let task = move || -> std::result::Result<(), ide::Cancelled> {
// Trigger flychecks for all workspaces that depend on the saved file // Trigger flychecks for all workspaces that depend on the saved file
if let Some(file_id) = vfs.file_id(&vfs_path) {
let analysis = this.analysis_host.analysis();
// Crates containing or depending on the saved file // Crates containing or depending on the saved file
let crate_ids: Vec<_> = analysis let crate_ids: Vec<_> = world
.crate_for(file_id)? .analysis
.crates_for(file_id)?
.into_iter() .into_iter()
.flat_map(|id| { .flat_map(|id| world.analysis.transitive_rev_deps(id))
this.analysis_host .flatten()
.raw_database()
.crate_graph()
.transitive_rev_deps(id)
})
.sorted() .sorted()
.unique() .unique()
.collect(); .collect();
@ -782,10 +806,14 @@ impl GlobalState {
let crate_root_paths: Vec<_> = crate_ids let crate_root_paths: Vec<_> = crate_ids
.iter() .iter()
.filter_map(|&crate_id| { .filter_map(|&crate_id| {
analysis world
.analysis
.crate_root(crate_id) .crate_root(crate_id)
.map(|file_id| { .map(|file_id| {
vfs.file_path(file_id).as_path().map(ToOwned::to_owned) world
.file_id_to_file_path(file_id)
.as_path()
.map(ToOwned::to_owned)
}) })
.transpose() .transpose()
}) })
@ -795,7 +823,7 @@ impl GlobalState {
// Find all workspaces that have at least one target containing the saved file // Find all workspaces that have at least one target containing the saved file
let workspace_ids = let workspace_ids =
this.workspaces.iter().enumerate().filter(|(_, ws)| match ws { world.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
project_model::ProjectWorkspace::Cargo { cargo, .. } => { project_model::ProjectWorkspace::Cargo { cargo, .. } => {
cargo.packages().any(|pkg| { cargo.packages().any(|pkg| {
cargo[pkg].targets.iter().any(|&it| { cargo[pkg].targets.iter().any(|&it| {
@ -803,14 +831,16 @@ impl GlobalState {
}) })
}) })
} }
project_model::ProjectWorkspace::Json { project, .. } => project project_model::ProjectWorkspace::Json { project, .. } => {
.crates() project.crates().any(|(c, _)| {
.any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)), crate_ids.iter().any(|&crate_id| crate_id == c)
})
}
project_model::ProjectWorkspace::DetachedFiles { .. } => false, project_model::ProjectWorkspace::DetachedFiles { .. } => false,
}); });
// Find and trigger corresponding flychecks // Find and trigger corresponding flychecks
for flycheck in &this.flycheck { for flycheck in world.flycheck.iter() {
for (id, _) in workspace_ids.clone() { for (id, _) in workspace_ids.clone() {
if id == flycheck.id() { if id == flycheck.id() {
updated = true; updated = true;
@ -819,23 +849,27 @@ impl GlobalState {
} }
} }
} }
// No specific flycheck was triggered, so let's trigger all of them.
if !updated {
for flycheck in world.flycheck.iter() {
flycheck.restart();
} }
// Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
this.fetch_workspaces_queue
.request_op(format!("DidSaveTextDocument {}", abs_path.display()));
} }
Ok(())
};
this.task_pool.handle.spawn_with_sender(move |_| {
if let Err(e) = std::panic::catch_unwind(task) {
tracing::error!("DidSaveTextDocument flycheck task panicked: {e:?}")
}
});
return Ok(());
} }
} }
// No specific flycheck was triggered, so let's trigger all of them. // No specific flycheck was triggered, so let's trigger all of them.
if !updated { for flycheck in this.flycheck.iter() {
for flycheck in &this.flycheck {
flycheck.restart(); flycheck.restart();
} }
}
Ok(()) Ok(())
})? })?
.on::<lsp_types::notification::DidChangeConfiguration>(|this, _params| { .on::<lsp_types::notification::DidChangeConfiguration>(|this, _params| {

View file

@ -175,10 +175,8 @@ impl GlobalState {
sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap() sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
} }
}; };
let mut res = Vec::new(); let res = ProjectWorkspace::run_all_build_scripts(&workspaces, &config, &progress);
for ws in workspaces.iter() {
res.push(ws.run_build_scripts(&config, &progress));
}
sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap(); sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
}); });
} }
@ -468,15 +466,27 @@ impl GlobalState {
let config = match self.config.flycheck() { let config = match self.config.flycheck() {
Some(it) => it, Some(it) => it,
None => { None => {
self.flycheck = Vec::new(); self.flycheck = Arc::new([]);
self.diagnostics.clear_check_all(); self.diagnostics.clear_check_all();
return; return;
} }
}; };
let sender = self.flycheck_sender.clone(); let sender = self.flycheck_sender.clone();
self.flycheck = self let invocation_strategy = match config {
.workspaces FlycheckConfig::CargoCommand { .. } => flycheck::InvocationStrategy::PerWorkspace,
FlycheckConfig::CustomCommand { invocation_strategy, .. } => invocation_strategy,
};
self.flycheck = match invocation_strategy {
flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn(
0,
Box::new(move |msg| sender.send(msg).unwrap()),
config.clone(),
self.config.root_path().clone(),
)],
flycheck::InvocationStrategy::PerWorkspace => {
self.workspaces
.iter() .iter()
.enumerate() .enumerate()
.filter_map(|(id, w)| match w { .filter_map(|(id, w)| match w {
@ -500,7 +510,10 @@ impl GlobalState {
root.to_path_buf(), root.to_path_buf(),
) )
}) })
.collect(); .collect()
}
}
.into();
} }
} }

View file

@ -645,7 +645,7 @@ impl ast::RecordPatFieldList {
} }
fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken { fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken {
let comma = match syntax match syntax
.siblings_with_tokens(Direction::Next) .siblings_with_tokens(Direction::Next)
.filter_map(|it| it.into_token()) .filter_map(|it| it.into_token())
.find(|it| it.kind() == T![,]) .find(|it| it.kind() == T![,])
@ -656,8 +656,7 @@ fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken {
ted::insert(Position::after(syntax), &comma); ted::insert(Position::after(syntax), &comma);
comma comma
} }
}; }
comma
} }
impl ast::StmtList { impl ast::StmtList {

View file

@ -92,7 +92,7 @@ impl<T> Parse<T> {
SyntaxNode::new_root(self.green.clone()) SyntaxNode::new_root(self.green.clone())
} }
pub fn errors(&self) -> &[SyntaxError] { pub fn errors(&self) -> &[SyntaxError] {
&*self.errors &self.errors
} }
} }

View file

@ -27,7 +27,6 @@
//! generator: pin //! generator: pin
//! hash: //! hash:
//! index: sized //! index: sized
//! infallible:
//! iterator: option //! iterator: option
//! iterators: iterator, fn //! iterators: iterator, fn
//! option: //! option:
@ -37,7 +36,7 @@
//! result: //! result:
//! sized: //! sized:
//! slice: //! slice:
//! try: infallible //! try:
//! unsize: sized //! unsize: sized
pub mod marker { pub mod marker {
@ -151,9 +150,6 @@ pub mod convert {
fn as_ref(&self) -> &T; fn as_ref(&self) -> &T;
} }
// endregion:as_ref // endregion:as_ref
// region:infallible
pub enum Infallible {}
// endregion:infallible
} }
pub mod ops { pub mod ops {
@ -330,7 +326,7 @@ pub mod ops {
Continue(C), Continue(C),
Break(B), Break(B),
} }
pub trait FromResidual<R = <Self as Try>::Residual> { pub trait FromResidual<R = Self::Residual> {
#[lang = "from_residual"] #[lang = "from_residual"]
fn from_residual(residual: R) -> Self; fn from_residual(residual: R) -> Self;
} }
@ -346,13 +342,13 @@ pub mod ops {
impl<B, C> Try for ControlFlow<B, C> { impl<B, C> Try for ControlFlow<B, C> {
type Output = C; type Output = C;
type Residual = ControlFlow<B, crate::convert::Infallible>; type Residual = ControlFlow<B, convert::Infallible>;
fn from_output(output: Self::Output) -> Self {} fn from_output(output: Self::Output) -> Self {}
fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {} fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {}
} }
impl<B, C> FromResidual for ControlFlow<B, C> { impl<B, C> FromResidual for ControlFlow<B, C> {
fn from_residual(residual: ControlFlow<B, crate::convert::Infallible>) -> Self {} fn from_residual(residual: ControlFlow<B, convert::Infallible>) -> Self {}
} }
} }
pub use self::try_::{ControlFlow, FromResidual, Try}; pub use self::try_::{ControlFlow, FromResidual, Try};
@ -473,33 +469,6 @@ pub mod option {
} }
} }
} }
// region:try
impl<T> crate::ops::Try for Option<T> {
type Output = T;
type Residual = Option<crate::convert::Infallible>;
#[inline]
fn from_output(output: Self::Output) -> Self {
Some(output)
}
#[inline]
fn branch(self) -> crate::ops::ControlFlow<Self::Residual, Self::Output> {
match self {
Some(v) => crate::ops::ControlFlow::Continue(v),
None => crate::ops::ControlFlow::Break(None),
}
}
}
impl<T> crate::ops::FromResidual for Option<T> {
#[inline]
fn from_residual(residual: Option<crate::convert::Infallible>) -> Self {
match residual {
None => None,
}
}
}
// endregion:try
} }
// endregion:option // endregion:option

View file

@ -24,6 +24,25 @@ Automatically refresh project info via `cargo metadata` on
-- --
Run build scripts (`build.rs`) for more precise code analysis. Run build scripts (`build.rs`) for more precise code analysis.
-- --
[[rust-analyzer.cargo.buildScripts.invocationLocation]]rust-analyzer.cargo.buildScripts.invocationLocation (default: `"workspace"`)::
+
--
Specifies the working directory for running build scripts.
- "workspace": run build scripts for a workspace in the workspace's root directory.
This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`.
- "root": run build scripts in the project's root directory.
This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
is set.
--
[[rust-analyzer.cargo.buildScripts.invocationStrategy]]rust-analyzer.cargo.buildScripts.invocationStrategy (default: `"per_workspace"`)::
+
--
Specifies the invocation strategy to use when running the build scripts command.
If `per_workspace` is set, the command will be executed for each workspace.
If `once` is set, the command will be executed once.
This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
is set.
--
[[rust-analyzer.cargo.buildScripts.overrideCommand]]rust-analyzer.cargo.buildScripts.overrideCommand (default: `null`):: [[rust-analyzer.cargo.buildScripts.overrideCommand]]rust-analyzer.cargo.buildScripts.overrideCommand (default: `null`)::
+ +
-- --
@ -118,6 +137,25 @@ List of features to activate. Defaults to
Set to `"all"` to pass `--all-features` to Cargo. Set to `"all"` to pass `--all-features` to Cargo.
-- --
[[rust-analyzer.checkOnSave.invocationLocation]]rust-analyzer.checkOnSave.invocationLocation (default: `"workspace"`)::
+
--
Specifies the working directory for running checks.
- "workspace": run checks for workspaces in the corresponding workspaces' root directories.
This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`.
- "root": run checks in the project's root directory.
This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
is set.
--
[[rust-analyzer.checkOnSave.invocationStrategy]]rust-analyzer.checkOnSave.invocationStrategy (default: `"per_workspace"`)::
+
--
Specifies the invocation strategy to use when running the checkOnSave command.
If `per_workspace` is set, the command will be executed for each workspace.
If `once` is set, the command will be executed once.
This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
is set.
--
[[rust-analyzer.checkOnSave.noDefaultFeatures]]rust-analyzer.checkOnSave.noDefaultFeatures (default: `null`):: [[rust-analyzer.checkOnSave.noDefaultFeatures]]rust-analyzer.checkOnSave.noDefaultFeatures (default: `null`)::
+ +
-- --

View file

@ -11,7 +11,7 @@
"dependencies": { "dependencies": {
"d3": "^7.6.1", "d3": "^7.6.1",
"d3-graphviz": "^4.1.1", "d3-graphviz": "^4.1.1",
"vscode-languageclient": "^8.0.0-next.14" "vscode-languageclient": "^8.0.2"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "~16.11.7", "@types/node": "~16.11.7",
@ -3791,39 +3791,39 @@
} }
}, },
"node_modules/vscode-jsonrpc": { "node_modules/vscode-jsonrpc": {
"version": "8.0.0-next.7", "version": "8.0.2",
"resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.0-next.7.tgz", "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.2.tgz",
"integrity": "sha512-JX/F31LEsims0dAlOTKFE4E+AJMiJvdRSRViifFJSqSN7EzeYyWlfuDchF7g91oRNPZOIWfibTkDf3/UMsQGzQ==", "integrity": "sha512-RY7HwI/ydoC1Wwg4gJ3y6LpU9FJRZAUnTYMXthqhFXXu77ErDd/xkREpGuk4MyYkk4a+XDWAMqe0S3KkelYQEQ==",
"engines": { "engines": {
"node": ">=14.0.0" "node": ">=14.0.0"
} }
}, },
"node_modules/vscode-languageclient": { "node_modules/vscode-languageclient": {
"version": "8.0.0-next.14", "version": "8.0.2",
"resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.0-next.14.tgz", "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.2.tgz",
"integrity": "sha512-NqjkOuDTMu8uo+PhoMsV72VO9Gd3wBi/ZpOrkRUOrWKQo7yUdiIw183g8wjH8BImgbK9ZP51HM7TI0ZhCnI1Mw==", "integrity": "sha512-lHlthJtphG9gibGb/y72CKqQUxwPsMXijJVpHEC2bvbFqxmkj9LwQ3aGU9dwjBLqsX1S4KjShYppLvg1UJDF/Q==",
"dependencies": { "dependencies": {
"minimatch": "^3.0.4", "minimatch": "^3.0.4",
"semver": "^7.3.5", "semver": "^7.3.5",
"vscode-languageserver-protocol": "3.17.0-next.16" "vscode-languageserver-protocol": "3.17.2"
}, },
"engines": { "engines": {
"vscode": "^1.66.0" "vscode": "^1.67.0"
} }
}, },
"node_modules/vscode-languageserver-protocol": { "node_modules/vscode-languageserver-protocol": {
"version": "3.17.0-next.16", "version": "3.17.2",
"resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.0-next.16.tgz", "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.2.tgz",
"integrity": "sha512-tx4DnXw9u3N7vw+bx6n2NKp6FoxoNwiP/biH83AS30I2AnTGyLd7afSeH6Oewn2E8jvB7K15bs12sMppkKOVeQ==", "integrity": "sha512-8kYisQ3z/SQ2kyjlNeQxbkkTNmVFoQCqkmGrzLH6A9ecPlgTbp3wDTnUNqaUxYr4vlAcloxx8zwy7G5WdguYNg==",
"dependencies": { "dependencies": {
"vscode-jsonrpc": "8.0.0-next.7", "vscode-jsonrpc": "8.0.2",
"vscode-languageserver-types": "3.17.0-next.9" "vscode-languageserver-types": "3.17.2"
} }
}, },
"node_modules/vscode-languageserver-types": { "node_modules/vscode-languageserver-types": {
"version": "3.17.0-next.9", "version": "3.17.2",
"resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.0-next.9.tgz", "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz",
"integrity": "sha512-9/PeDNPYduaoXRUzYpqmu4ZV9L01HGo0wH9FUt+sSHR7IXwA7xoXBfNUlv8gB9H0D2WwEmMomSy1NmhjKQyn3A==" "integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA=="
}, },
"node_modules/which": { "node_modules/which": {
"version": "2.0.2", "version": "2.0.2",
@ -6634,33 +6634,33 @@
} }
}, },
"vscode-jsonrpc": { "vscode-jsonrpc": {
"version": "8.0.0-next.7", "version": "8.0.2",
"resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.0-next.7.tgz", "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.2.tgz",
"integrity": "sha512-JX/F31LEsims0dAlOTKFE4E+AJMiJvdRSRViifFJSqSN7EzeYyWlfuDchF7g91oRNPZOIWfibTkDf3/UMsQGzQ==" "integrity": "sha512-RY7HwI/ydoC1Wwg4gJ3y6LpU9FJRZAUnTYMXthqhFXXu77ErDd/xkREpGuk4MyYkk4a+XDWAMqe0S3KkelYQEQ=="
}, },
"vscode-languageclient": { "vscode-languageclient": {
"version": "8.0.0-next.14", "version": "8.0.2",
"resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.0-next.14.tgz", "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.2.tgz",
"integrity": "sha512-NqjkOuDTMu8uo+PhoMsV72VO9Gd3wBi/ZpOrkRUOrWKQo7yUdiIw183g8wjH8BImgbK9ZP51HM7TI0ZhCnI1Mw==", "integrity": "sha512-lHlthJtphG9gibGb/y72CKqQUxwPsMXijJVpHEC2bvbFqxmkj9LwQ3aGU9dwjBLqsX1S4KjShYppLvg1UJDF/Q==",
"requires": { "requires": {
"minimatch": "^3.0.4", "minimatch": "^3.0.4",
"semver": "^7.3.5", "semver": "^7.3.5",
"vscode-languageserver-protocol": "3.17.0-next.16" "vscode-languageserver-protocol": "3.17.2"
} }
}, },
"vscode-languageserver-protocol": { "vscode-languageserver-protocol": {
"version": "3.17.0-next.16", "version": "3.17.2",
"resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.0-next.16.tgz", "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.2.tgz",
"integrity": "sha512-tx4DnXw9u3N7vw+bx6n2NKp6FoxoNwiP/biH83AS30I2AnTGyLd7afSeH6Oewn2E8jvB7K15bs12sMppkKOVeQ==", "integrity": "sha512-8kYisQ3z/SQ2kyjlNeQxbkkTNmVFoQCqkmGrzLH6A9ecPlgTbp3wDTnUNqaUxYr4vlAcloxx8zwy7G5WdguYNg==",
"requires": { "requires": {
"vscode-jsonrpc": "8.0.0-next.7", "vscode-jsonrpc": "8.0.2",
"vscode-languageserver-types": "3.17.0-next.9" "vscode-languageserver-types": "3.17.2"
} }
}, },
"vscode-languageserver-types": { "vscode-languageserver-types": {
"version": "3.17.0-next.9", "version": "3.17.2",
"resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.0-next.9.tgz", "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz",
"integrity": "sha512-9/PeDNPYduaoXRUzYpqmu4ZV9L01HGo0wH9FUt+sSHR7IXwA7xoXBfNUlv8gB9H0D2WwEmMomSy1NmhjKQyn3A==" "integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA=="
}, },
"which": { "which": {
"version": "2.0.2", "version": "2.0.2",

View file

@ -37,7 +37,7 @@
"dependencies": { "dependencies": {
"d3": "^7.6.1", "d3": "^7.6.1",
"d3-graphviz": "^4.1.1", "d3-graphviz": "^4.1.1",
"vscode-languageclient": "^8.0.0-next.14" "vscode-languageclient": "^8.0.2"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "~16.11.7", "@types/node": "~16.11.7",
@ -60,6 +60,7 @@
"onCommand:rust-analyzer.analyzerStatus", "onCommand:rust-analyzer.analyzerStatus",
"onCommand:rust-analyzer.memoryUsage", "onCommand:rust-analyzer.memoryUsage",
"onCommand:rust-analyzer.reloadWorkspace", "onCommand:rust-analyzer.reloadWorkspace",
"onCommand:rust-analyzer.startServer",
"workspaceContains:*/Cargo.toml", "workspaceContains:*/Cargo.toml",
"workspaceContains:*/rust-project.json" "workspaceContains:*/rust-project.json"
], ],
@ -191,6 +192,16 @@
"title": "Restart server", "title": "Restart server",
"category": "rust-analyzer" "category": "rust-analyzer"
}, },
{
"command": "rust-analyzer.startServer",
"title": "Start server",
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.stopServer",
"title": "Stop server",
"category": "rust-analyzer"
},
{ {
"command": "rust-analyzer.onEnter", "command": "rust-analyzer.onEnter",
"title": "Enhanced enter key", "title": "Enhanced enter key",
@ -421,6 +432,32 @@
"default": true, "default": true,
"type": "boolean" "type": "boolean"
}, },
"rust-analyzer.cargo.buildScripts.invocationLocation": {
"markdownDescription": "Specifies the working directory for running build scripts.\n- \"workspace\": run build scripts for a workspace in the workspace's root directory.\n This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`.\n- \"root\": run build scripts in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
"default": "workspace",
"type": "string",
"enum": [
"workspace",
"root"
],
"enumDescriptions": [
"The command will be executed in the corresponding workspace root.",
"The command will be executed in the project root."
]
},
"rust-analyzer.cargo.buildScripts.invocationStrategy": {
"markdownDescription": "Specifies the invocation strategy to use when running the build scripts command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
"default": "per_workspace",
"type": "string",
"enum": [
"per_workspace",
"once"
],
"enumDescriptions": [
"The command will be executed for each workspace.",
"The command will be executed once."
]
},
"rust-analyzer.cargo.buildScripts.overrideCommand": { "rust-analyzer.cargo.buildScripts.overrideCommand": {
"markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets\n```\n.", "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets\n```\n.",
"default": null, "default": null,
@ -546,6 +583,32 @@
} }
] ]
}, },
"rust-analyzer.checkOnSave.invocationLocation": {
"markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
"default": "workspace",
"type": "string",
"enum": [
"workspace",
"root"
],
"enumDescriptions": [
"The command will be executed in the corresponding workspace root.",
"The command will be executed in the project root."
]
},
"rust-analyzer.checkOnSave.invocationStrategy": {
"markdownDescription": "Specifies the invocation strategy to use when running the checkOnSave command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
"default": "per_workspace",
"type": "string",
"enum": [
"per_workspace",
"once"
],
"enumDescriptions": [
"The command will be executed for each workspace.",
"The command will be executed once."
]
},
"rust-analyzer.checkOnSave.noDefaultFeatures": { "rust-analyzer.checkOnSave.noDefaultFeatures": {
"markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.", "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.",
"default": null, "default": null,

View file

@ -35,8 +35,10 @@ export class AstInspector implements vscode.HoverProvider, vscode.DefinitionProv
}); });
constructor(ctx: Ctx) { constructor(ctx: Ctx) {
ctx.pushCleanup(vscode.languages.registerHoverProvider({ scheme: "rust-analyzer" }, this)); ctx.pushExtCleanup(
ctx.pushCleanup(vscode.languages.registerDefinitionProvider({ language: "rust" }, this)); vscode.languages.registerHoverProvider({ scheme: "rust-analyzer" }, this)
);
ctx.pushExtCleanup(vscode.languages.registerDefinitionProvider({ language: "rust" }, this));
vscode.workspace.onDidCloseTextDocument( vscode.workspace.onDidCloseTextDocument(
this.onDidCloseTextDocument, this.onDidCloseTextDocument,
this, this,
@ -52,8 +54,6 @@ export class AstInspector implements vscode.HoverProvider, vscode.DefinitionProv
this, this,
ctx.subscriptions ctx.subscriptions
); );
ctx.pushCleanup(this);
} }
dispose() { dispose() {
this.setRustEditor(undefined); this.setRustEditor(undefined);

View file

@ -0,0 +1,148 @@
import * as vscode from "vscode";
import * as os from "os";
import { Config } from "./config";
import { log, isValidExecutable } from "./util";
import { PersistentState } from "./persistent_state";
import { exec } from "child_process";
export async function bootstrap(
context: vscode.ExtensionContext,
config: Config,
state: PersistentState
): Promise<string> {
const path = await getServer(context, config, state);
if (!path) {
throw new Error(
"Rust Analyzer Language Server is not available. " +
"Please, ensure its [proper installation](https://rust-analyzer.github.io/manual.html#installation)."
);
}
log.info("Using server binary at", path);
if (!isValidExecutable(path)) {
if (config.serverPath) {
throw new Error(`Failed to execute ${path} --version. \`config.server.path\` or \`config.serverPath\` has been set explicitly.\
Consider removing this config or making a valid server binary available at that path.`);
} else {
throw new Error(`Failed to execute ${path} --version`);
}
}
return path;
}
async function patchelf(dest: vscode.Uri): Promise<void> {
await vscode.window.withProgress(
{
location: vscode.ProgressLocation.Notification,
title: "Patching rust-analyzer for NixOS",
},
async (progress, _) => {
const expression = `
{srcStr, pkgs ? import <nixpkgs> {}}:
pkgs.stdenv.mkDerivation {
name = "rust-analyzer";
src = /. + srcStr;
phases = [ "installPhase" "fixupPhase" ];
installPhase = "cp $src $out";
fixupPhase = ''
chmod 755 $out
patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" $out
'';
}
`;
const origFile = vscode.Uri.file(dest.fsPath + "-orig");
await vscode.workspace.fs.rename(dest, origFile, { overwrite: true });
try {
progress.report({ message: "Patching executable", increment: 20 });
await new Promise((resolve, reject) => {
const handle = exec(
`nix-build -E - --argstr srcStr '${origFile.fsPath}' -o '${dest.fsPath}'`,
(err, stdout, stderr) => {
if (err != null) {
reject(Error(stderr));
} else {
resolve(stdout);
}
}
);
handle.stdin?.write(expression);
handle.stdin?.end();
});
} finally {
await vscode.workspace.fs.delete(origFile);
}
}
);
}
async function getServer(
context: vscode.ExtensionContext,
config: Config,
state: PersistentState
): Promise<string | undefined> {
const explicitPath = serverPath(config);
if (explicitPath) {
if (explicitPath.startsWith("~/")) {
return os.homedir() + explicitPath.slice("~".length);
}
return explicitPath;
}
if (config.package.releaseTag === null) return "rust-analyzer";
const ext = process.platform === "win32" ? ".exe" : "";
const bundled = vscode.Uri.joinPath(context.extensionUri, "server", `rust-analyzer${ext}`);
const bundledExists = await vscode.workspace.fs.stat(bundled).then(
() => true,
() => false
);
if (bundledExists) {
let server = bundled;
if (await isNixOs()) {
await vscode.workspace.fs.createDirectory(config.globalStorageUri).then();
const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`);
let exists = await vscode.workspace.fs.stat(dest).then(
() => true,
() => false
);
if (exists && config.package.version !== state.serverVersion) {
await vscode.workspace.fs.delete(dest);
exists = false;
}
if (!exists) {
await vscode.workspace.fs.copy(bundled, dest);
await patchelf(dest);
}
server = dest;
}
await state.updateServerVersion(config.package.version);
return server.fsPath;
}
await state.updateServerVersion(undefined);
await vscode.window.showErrorMessage(
"Unfortunately we don't ship binaries for your platform yet. " +
"You need to manually clone the rust-analyzer repository and " +
"run `cargo xtask install --server` to build the language server from sources. " +
"If you feel that your platform should be supported, please create an issue " +
"about that [here](https://github.com/rust-lang/rust-analyzer/issues) and we " +
"will consider it."
);
return undefined;
}
function serverPath(config: Config): string | null {
return process.env.__RA_LSP_SERVER_DEBUG ?? config.serverPath;
}
async function isNixOs(): Promise<boolean> {
try {
const contents = (
await vscode.workspace.fs.readFile(vscode.Uri.file("/etc/os-release"))
).toString();
const idString = contents.split("\n").find((a) => a.startsWith("ID=")) || "ID=linux";
return idString.indexOf("nixos") !== -1;
} catch {
return false;
}
}

View file

@ -4,9 +4,7 @@ import * as ra from "../src/lsp_ext";
import * as Is from "vscode-languageclient/lib/common/utils/is"; import * as Is from "vscode-languageclient/lib/common/utils/is";
import { assert } from "./util"; import { assert } from "./util";
import { WorkspaceEdit } from "vscode"; import { WorkspaceEdit } from "vscode";
import { Workspace } from "./ctx"; import { substituteVSCodeVariables } from "./config";
import { substituteVariablesInEnv } from "./config";
import { outputChannel, traceOutputChannel } from "./main";
import { randomUUID } from "crypto"; import { randomUUID } from "crypto";
export interface Env { export interface Env {
@ -65,40 +63,42 @@ function renderHoverActions(actions: ra.CommandLinkGroup[]): vscode.MarkdownStri
} }
export async function createClient( export async function createClient(
serverPath: string, traceOutputChannel: vscode.OutputChannel,
workspace: Workspace, outputChannel: vscode.OutputChannel,
extraEnv: Env initializationOptions: vscode.WorkspaceConfiguration,
serverOptions: lc.ServerOptions
): Promise<lc.LanguageClient> { ): Promise<lc.LanguageClient> {
// '.' Is the fallback if no folder is open
// TODO?: Workspace folders support Uri's (eg: file://test.txt).
// It might be a good idea to test if the uri points to a file.
const newEnv = substituteVariablesInEnv(Object.assign({}, process.env, extraEnv));
const run: lc.Executable = {
command: serverPath,
options: { env: newEnv },
};
const serverOptions: lc.ServerOptions = {
run,
debug: run,
};
let initializationOptions = vscode.workspace.getConfiguration("rust-analyzer");
if (workspace.kind === "Detached Files") {
initializationOptions = {
detachedFiles: workspace.files.map((file) => file.uri.fsPath),
...initializationOptions,
};
}
const clientOptions: lc.LanguageClientOptions = { const clientOptions: lc.LanguageClientOptions = {
documentSelector: [{ scheme: "file", language: "rust" }], documentSelector: [{ scheme: "file", language: "rust" }],
initializationOptions, initializationOptions,
diagnosticCollectionName: "rustc", diagnosticCollectionName: "rustc",
traceOutputChannel: traceOutputChannel(), traceOutputChannel,
outputChannel: outputChannel(), outputChannel,
middleware: { middleware: {
workspace: {
// HACK: This is a workaround, when the client has been disposed, VSCode
// continues to emit events to the client and the default one for this event
// attempt to restart the client for no reason
async didChangeWatchedFile(event, next) {
if (client.isRunning()) {
await next(event);
}
},
async configuration(
params: lc.ConfigurationParams,
token: vscode.CancellationToken,
next: lc.ConfigurationRequest.HandlerSignature
) {
const resp = await next(params, token);
if (resp && Array.isArray(resp)) {
return resp.map((val) => {
return substituteVSCodeVariables(val);
});
} else {
return resp;
}
},
},
async provideHover( async provideHover(
document: vscode.TextDocument, document: vscode.TextDocument,
position: vscode.Position, position: vscode.Position,
@ -255,6 +255,9 @@ export async function createClient(
} }
class ExperimentalFeatures implements lc.StaticFeature { class ExperimentalFeatures implements lc.StaticFeature {
getState(): lc.FeatureState {
return { kind: "static" };
}
fillClientCapabilities(capabilities: lc.ClientCapabilities): void { fillClientCapabilities(capabilities: lc.ClientCapabilities): void {
const caps: any = capabilities.experimental ?? {}; const caps: any = capabilities.experimental ?? {};
caps.snippetTextEdit = true; caps.snippetTextEdit = true;

View file

@ -21,16 +21,16 @@ export function analyzerStatus(ctx: Ctx): Cmd {
readonly uri = vscode.Uri.parse("rust-analyzer-status://status"); readonly uri = vscode.Uri.parse("rust-analyzer-status://status");
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>(); readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
provideTextDocumentContent(_uri: vscode.Uri): vscode.ProviderResult<string> { async provideTextDocumentContent(_uri: vscode.Uri): Promise<string> {
if (!vscode.window.activeTextEditor) return ""; if (!vscode.window.activeTextEditor) return "";
const client = await ctx.getClient();
const params: ra.AnalyzerStatusParams = {}; const params: ra.AnalyzerStatusParams = {};
const doc = ctx.activeRustEditor?.document; const doc = ctx.activeRustEditor?.document;
if (doc != null) { if (doc != null) {
params.textDocument = params.textDocument = client.code2ProtocolConverter.asTextDocumentIdentifier(doc);
ctx.client.code2ProtocolConverter.asTextDocumentIdentifier(doc);
} }
return ctx.client.sendRequest(ra.analyzerStatus, params); return await client.sendRequest(ra.analyzerStatus, params);
} }
get onDidChange(): vscode.Event<vscode.Uri> { get onDidChange(): vscode.Event<vscode.Uri> {
@ -38,7 +38,7 @@ export function analyzerStatus(ctx: Ctx): Cmd {
} }
})(); })();
ctx.pushCleanup( ctx.pushExtCleanup(
vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-status", tdcp) vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-status", tdcp)
); );
@ -60,8 +60,13 @@ export function memoryUsage(ctx: Ctx): Cmd {
provideTextDocumentContent(_uri: vscode.Uri): vscode.ProviderResult<string> { provideTextDocumentContent(_uri: vscode.Uri): vscode.ProviderResult<string> {
if (!vscode.window.activeTextEditor) return ""; if (!vscode.window.activeTextEditor) return "";
return ctx.client.sendRequest(ra.memoryUsage).then((mem: any) => { return ctx
return "Per-query memory usage:\n" + mem + "\n(note: database has been cleared)"; .getClient()
.then((it) => it.sendRequest(ra.memoryUsage))
.then((mem: any) => {
return (
"Per-query memory usage:\n" + mem + "\n(note: database has been cleared)"
);
}); });
} }
@ -70,7 +75,7 @@ export function memoryUsage(ctx: Ctx): Cmd {
} }
})(); })();
ctx.pushCleanup( ctx.pushExtCleanup(
vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-memory", tdcp) vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-memory", tdcp)
); );
@ -83,23 +88,19 @@ export function memoryUsage(ctx: Ctx): Cmd {
export function shuffleCrateGraph(ctx: Ctx): Cmd { export function shuffleCrateGraph(ctx: Ctx): Cmd {
return async () => { return async () => {
const client = ctx.client; return ctx.getClient().then((it) => it.sendRequest(ra.shuffleCrateGraph));
if (!client) return;
await client.sendRequest(ra.shuffleCrateGraph);
}; };
} }
export function matchingBrace(ctx: Ctx): Cmd { export function matchingBrace(ctx: Ctx): Cmd {
return async () => { return async () => {
const editor = ctx.activeRustEditor; const editor = ctx.activeRustEditor;
const client = ctx.client; if (!editor) return;
if (!editor || !client) return;
const client = await ctx.getClient();
const response = await client.sendRequest(ra.matchingBrace, { const response = await client.sendRequest(ra.matchingBrace, {
textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
editor.document
),
positions: editor.selections.map((s) => positions: editor.selections.map((s) =>
client.code2ProtocolConverter.asPosition(s.active) client.code2ProtocolConverter.asPosition(s.active)
), ),
@ -116,14 +117,13 @@ export function matchingBrace(ctx: Ctx): Cmd {
export function joinLines(ctx: Ctx): Cmd { export function joinLines(ctx: Ctx): Cmd {
return async () => { return async () => {
const editor = ctx.activeRustEditor; const editor = ctx.activeRustEditor;
const client = ctx.client; if (!editor) return;
if (!editor || !client) return;
const client = await ctx.getClient();
const items: lc.TextEdit[] = await client.sendRequest(ra.joinLines, { const items: lc.TextEdit[] = await client.sendRequest(ra.joinLines, {
ranges: editor.selections.map((it) => client.code2ProtocolConverter.asRange(it)), ranges: editor.selections.map((it) => client.code2ProtocolConverter.asRange(it)),
textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
editor.document
),
}); });
const textEdits = await client.protocol2CodeConverter.asTextEdits(items); const textEdits = await client.protocol2CodeConverter.asTextEdits(items);
await editor.edit((builder) => { await editor.edit((builder) => {
@ -145,14 +145,12 @@ export function moveItemDown(ctx: Ctx): Cmd {
export function moveItem(ctx: Ctx, direction: ra.Direction): Cmd { export function moveItem(ctx: Ctx, direction: ra.Direction): Cmd {
return async () => { return async () => {
const editor = ctx.activeRustEditor; const editor = ctx.activeRustEditor;
const client = ctx.client; if (!editor) return;
if (!editor || !client) return; const client = await ctx.getClient();
const lcEdits = await client.sendRequest(ra.moveItem, { const lcEdits = await client.sendRequest(ra.moveItem, {
range: client.code2ProtocolConverter.asRange(editor.selection), range: client.code2ProtocolConverter.asRange(editor.selection),
textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
editor.document
),
direction, direction,
}); });
@ -166,13 +164,13 @@ export function moveItem(ctx: Ctx, direction: ra.Direction): Cmd {
export function onEnter(ctx: Ctx): Cmd { export function onEnter(ctx: Ctx): Cmd {
async function handleKeypress() { async function handleKeypress() {
const editor = ctx.activeRustEditor; const editor = ctx.activeRustEditor;
const client = ctx.client;
if (!editor || !client) return false; if (!editor) return false;
const client = await ctx.getClient();
const lcEdits = await client const lcEdits = await client
.sendRequest(ra.onEnter, { .sendRequest(ra.onEnter, {
textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
editor.document editor.document
), ),
position: client.code2ProtocolConverter.asPosition(editor.selection.active), position: client.code2ProtocolConverter.asPosition(editor.selection.active),
@ -198,14 +196,13 @@ export function onEnter(ctx: Ctx): Cmd {
export function parentModule(ctx: Ctx): Cmd { export function parentModule(ctx: Ctx): Cmd {
return async () => { return async () => {
const editor = vscode.window.activeTextEditor; const editor = vscode.window.activeTextEditor;
const client = ctx.client; if (!editor) return;
if (!editor || !client) return;
if (!(isRustDocument(editor.document) || isCargoTomlDocument(editor.document))) return; if (!(isRustDocument(editor.document) || isCargoTomlDocument(editor.document))) return;
const client = await ctx.getClient();
const locations = await client.sendRequest(ra.parentModule, { const locations = await client.sendRequest(ra.parentModule, {
textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
editor.document
),
position: client.code2ProtocolConverter.asPosition(editor.selection.active), position: client.code2ProtocolConverter.asPosition(editor.selection.active),
}); });
if (!locations) return; if (!locations) return;
@ -236,13 +233,11 @@ export function parentModule(ctx: Ctx): Cmd {
export function openCargoToml(ctx: Ctx): Cmd { export function openCargoToml(ctx: Ctx): Cmd {
return async () => { return async () => {
const editor = ctx.activeRustEditor; const editor = ctx.activeRustEditor;
const client = ctx.client; if (!editor) return;
if (!editor || !client) return;
const client = await ctx.getClient();
const response = await client.sendRequest(ra.openCargoToml, { const response = await client.sendRequest(ra.openCargoToml, {
textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
editor.document
),
}); });
if (!response) return; if (!response) return;
@ -259,12 +254,13 @@ export function openCargoToml(ctx: Ctx): Cmd {
export function ssr(ctx: Ctx): Cmd { export function ssr(ctx: Ctx): Cmd {
return async () => { return async () => {
const editor = vscode.window.activeTextEditor; const editor = vscode.window.activeTextEditor;
const client = ctx.client; if (!editor) return;
if (!editor || !client) return;
const client = await ctx.getClient();
const position = editor.selection.active; const position = editor.selection.active;
const selections = editor.selections; const selections = editor.selections;
const textDocument = ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( const textDocument = client.code2ProtocolConverter.asTextDocumentIdentifier(
editor.document editor.document
); );
@ -314,6 +310,10 @@ export function ssr(ctx: Ctx): Cmd {
export function serverVersion(ctx: Ctx): Cmd { export function serverVersion(ctx: Ctx): Cmd {
return async () => { return async () => {
if (!ctx.serverPath) {
void vscode.window.showWarningMessage(`rust-analyzer server is not running`);
return;
}
const { stdout } = spawnSync(ctx.serverPath, ["--version"], { encoding: "utf8" }); const { stdout } = spawnSync(ctx.serverPath, ["--version"], { encoding: "utf8" });
const versionString = stdout.slice(`rust-analyzer `.length).trim(); const versionString = stdout.slice(`rust-analyzer `.length).trim();
@ -354,21 +354,22 @@ export function syntaxTree(ctx: Ctx): Cmd {
} }
} }
provideTextDocumentContent( async provideTextDocumentContent(
uri: vscode.Uri, uri: vscode.Uri,
ct: vscode.CancellationToken ct: vscode.CancellationToken
): vscode.ProviderResult<string> { ): Promise<string> {
const rustEditor = ctx.activeRustEditor; const rustEditor = ctx.activeRustEditor;
if (!rustEditor) return ""; if (!rustEditor) return "";
const client = await ctx.getClient();
// When the range based query is enabled we take the range of the selection // When the range based query is enabled we take the range of the selection
const range = const range =
uri.query === "range=true" && !rustEditor.selection.isEmpty uri.query === "range=true" && !rustEditor.selection.isEmpty
? ctx.client.code2ProtocolConverter.asRange(rustEditor.selection) ? client.code2ProtocolConverter.asRange(rustEditor.selection)
: null; : null;
const params = { textDocument: { uri: rustEditor.document.uri.toString() }, range }; const params = { textDocument: { uri: rustEditor.document.uri.toString() }, range };
return ctx.client.sendRequest(ra.syntaxTree, params, ct); return client.sendRequest(ra.syntaxTree, params, ct);
} }
get onDidChange(): vscode.Event<vscode.Uri> { get onDidChange(): vscode.Event<vscode.Uri> {
@ -376,12 +377,11 @@ export function syntaxTree(ctx: Ctx): Cmd {
} }
})(); })();
void new AstInspector(ctx); ctx.pushExtCleanup(new AstInspector(ctx));
ctx.pushExtCleanup(
ctx.pushCleanup(
vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-syntax-tree", tdcp) vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-syntax-tree", tdcp)
); );
ctx.pushCleanup( ctx.pushExtCleanup(
vscode.languages.setLanguageConfiguration("ra_syntax_tree", { vscode.languages.setLanguageConfiguration("ra_syntax_tree", {
brackets: [["[", ")"]], brackets: [["[", ")"]],
}) })
@ -437,14 +437,14 @@ export function viewHir(ctx: Ctx): Cmd {
} }
} }
provideTextDocumentContent( async provideTextDocumentContent(
_uri: vscode.Uri, _uri: vscode.Uri,
ct: vscode.CancellationToken ct: vscode.CancellationToken
): vscode.ProviderResult<string> { ): Promise<string> {
const rustEditor = ctx.activeRustEditor; const rustEditor = ctx.activeRustEditor;
const client = ctx.client; if (!rustEditor) return "";
if (!rustEditor || !client) return "";
const client = await ctx.getClient();
const params = { const params = {
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier( textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
rustEditor.document rustEditor.document
@ -459,7 +459,7 @@ export function viewHir(ctx: Ctx): Cmd {
} }
})(); })();
ctx.pushCleanup( ctx.pushExtCleanup(
vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-hir", tdcp) vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-hir", tdcp)
); );
@ -503,13 +503,13 @@ export function viewFileText(ctx: Ctx): Cmd {
} }
} }
provideTextDocumentContent( async provideTextDocumentContent(
_uri: vscode.Uri, _uri: vscode.Uri,
ct: vscode.CancellationToken ct: vscode.CancellationToken
): vscode.ProviderResult<string> { ): Promise<string> {
const rustEditor = ctx.activeRustEditor; const rustEditor = ctx.activeRustEditor;
const client = ctx.client; if (!rustEditor) return "";
if (!rustEditor || !client) return ""; const client = await ctx.getClient();
const params = client.code2ProtocolConverter.asTextDocumentIdentifier( const params = client.code2ProtocolConverter.asTextDocumentIdentifier(
rustEditor.document rustEditor.document
@ -522,7 +522,7 @@ export function viewFileText(ctx: Ctx): Cmd {
} }
})(); })();
ctx.pushCleanup( ctx.pushExtCleanup(
vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-file-text", tdcp) vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-file-text", tdcp)
); );
@ -566,13 +566,13 @@ export function viewItemTree(ctx: Ctx): Cmd {
} }
} }
provideTextDocumentContent( async provideTextDocumentContent(
_uri: vscode.Uri, _uri: vscode.Uri,
ct: vscode.CancellationToken ct: vscode.CancellationToken
): vscode.ProviderResult<string> { ): Promise<string> {
const rustEditor = ctx.activeRustEditor; const rustEditor = ctx.activeRustEditor;
const client = ctx.client; if (!rustEditor) return "";
if (!rustEditor || !client) return ""; const client = await ctx.getClient();
const params = { const params = {
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier( textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
@ -587,7 +587,7 @@ export function viewItemTree(ctx: Ctx): Cmd {
} }
})(); })();
ctx.pushCleanup( ctx.pushExtCleanup(
vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-item-tree", tdcp) vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-item-tree", tdcp)
); );
@ -618,8 +618,8 @@ function crateGraph(ctx: Ctx, full: boolean): Cmd {
const params = { const params = {
full: full, full: full,
}; };
const client = await ctx.getClient();
const dot = await ctx.client.sendRequest(ra.viewCrateGraph, params); const dot = await client.sendRequest(ra.viewCrateGraph, params);
const uri = panel.webview.asWebviewUri(nodeModulesPath); const uri = panel.webview.asWebviewUri(nodeModulesPath);
const html = ` const html = `
@ -690,13 +690,13 @@ export function expandMacro(ctx: Ctx): Cmd {
eventEmitter = new vscode.EventEmitter<vscode.Uri>(); eventEmitter = new vscode.EventEmitter<vscode.Uri>();
async provideTextDocumentContent(_uri: vscode.Uri): Promise<string> { async provideTextDocumentContent(_uri: vscode.Uri): Promise<string> {
const editor = vscode.window.activeTextEditor; const editor = vscode.window.activeTextEditor;
const client = ctx.client; if (!editor) return "";
if (!editor || !client) return ""; const client = await ctx.getClient();
const position = editor.selection.active; const position = editor.selection.active;
const expanded = await client.sendRequest(ra.expandMacro, { const expanded = await client.sendRequest(ra.expandMacro, {
textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
editor.document editor.document
), ),
position, position,
@ -712,7 +712,7 @@ export function expandMacro(ctx: Ctx): Cmd {
} }
})(); })();
ctx.pushCleanup( ctx.pushExtCleanup(
vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-expand-macro", tdcp) vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-expand-macro", tdcp)
); );
@ -724,11 +724,11 @@ export function expandMacro(ctx: Ctx): Cmd {
} }
export function reloadWorkspace(ctx: Ctx): Cmd { export function reloadWorkspace(ctx: Ctx): Cmd {
return async () => ctx.client.sendRequest(ra.reloadWorkspace); return async () => (await ctx.getClient()).sendRequest(ra.reloadWorkspace);
} }
async function showReferencesImpl( async function showReferencesImpl(
client: LanguageClient, client: LanguageClient | undefined,
uri: string, uri: string,
position: lc.Position, position: lc.Position,
locations: lc.Location[] locations: lc.Location[]
@ -745,7 +745,7 @@ async function showReferencesImpl(
export function showReferences(ctx: Ctx): Cmd { export function showReferences(ctx: Ctx): Cmd {
return async (uri: string, position: lc.Position, locations: lc.Location[]) => { return async (uri: string, position: lc.Position, locations: lc.Location[]) => {
await showReferencesImpl(ctx.client, uri, position, locations); await showReferencesImpl(await ctx.getClient(), uri, position, locations);
}; };
} }
@ -762,25 +762,23 @@ export function applyActionGroup(_ctx: Ctx): Cmd {
export function gotoLocation(ctx: Ctx): Cmd { export function gotoLocation(ctx: Ctx): Cmd {
return async (locationLink: lc.LocationLink) => { return async (locationLink: lc.LocationLink) => {
const client = ctx.client; const client = await ctx.getClient();
if (client) {
const uri = client.protocol2CodeConverter.asUri(locationLink.targetUri); const uri = client.protocol2CodeConverter.asUri(locationLink.targetUri);
let range = client.protocol2CodeConverter.asRange(locationLink.targetSelectionRange); let range = client.protocol2CodeConverter.asRange(locationLink.targetSelectionRange);
// collapse the range to a cursor position // collapse the range to a cursor position
range = range.with({ end: range.start }); range = range.with({ end: range.start });
await vscode.window.showTextDocument(uri, { selection: range }); await vscode.window.showTextDocument(uri, { selection: range });
}
}; };
} }
export function openDocs(ctx: Ctx): Cmd { export function openDocs(ctx: Ctx): Cmd {
return async () => { return async () => {
const client = ctx.client;
const editor = vscode.window.activeTextEditor; const editor = vscode.window.activeTextEditor;
if (!editor || !client) { if (!editor) {
return; return;
} }
const client = await ctx.getClient();
const position = editor.selection.active; const position = editor.selection.active;
const textDocument = { uri: editor.document.uri.toString() }; const textDocument = { uri: editor.document.uri.toString() };
@ -795,20 +793,21 @@ export function openDocs(ctx: Ctx): Cmd {
export function cancelFlycheck(ctx: Ctx): Cmd { export function cancelFlycheck(ctx: Ctx): Cmd {
return async () => { return async () => {
await ctx.client.sendRequest(ra.cancelFlycheck); const client = await ctx.getClient();
await client.sendRequest(ra.cancelFlycheck);
}; };
} }
export function resolveCodeAction(ctx: Ctx): Cmd { export function resolveCodeAction(ctx: Ctx): Cmd {
const client = ctx.client;
return async (params: lc.CodeAction) => { return async (params: lc.CodeAction) => {
const client = await ctx.getClient();
params.command = undefined; params.command = undefined;
const item = await client.sendRequest(lc.CodeActionResolveRequest.type, params); const item = await client?.sendRequest(lc.CodeActionResolveRequest.type, params);
if (!item.edit) { if (!item?.edit) {
return; return;
} }
const itemEdit = item.edit; const itemEdit = item.edit;
const edit = await client.protocol2CodeConverter.asWorkspaceEdit(itemEdit); const edit = await client?.protocol2CodeConverter.asWorkspaceEdit(itemEdit);
// filter out all text edits and recreate the WorkspaceEdit without them so we can apply // filter out all text edits and recreate the WorkspaceEdit without them so we can apply
// snippet edits on our own // snippet edits on our own
const lcFileSystemEdit = { const lcFileSystemEdit = {
@ -847,11 +846,10 @@ export function run(ctx: Ctx): Cmd {
} }
export function peekTests(ctx: Ctx): Cmd { export function peekTests(ctx: Ctx): Cmd {
const client = ctx.client;
return async () => { return async () => {
const editor = ctx.activeRustEditor; const editor = ctx.activeRustEditor;
if (!editor || !client) return; if (!editor) return;
const client = await ctx.getClient();
await vscode.window.withProgress( await vscode.window.withProgress(
{ {
@ -937,10 +935,10 @@ export function newDebugConfig(ctx: Ctx): Cmd {
}; };
} }
export function linkToCommand(ctx: Ctx): Cmd { export function linkToCommand(_: Ctx): Cmd {
return async (commandId: string) => { return async (commandId: string) => {
const link = LINKED_COMMANDS.get(commandId); const link = LINKED_COMMANDS.get(commandId);
if (ctx.client && link) { if (link) {
const { command, arguments: args = [] } = link; const { command, arguments: args = [] } = link;
await vscode.commands.executeCommand(command, ...args); await vscode.commands.executeCommand(command, ...args);
} }

View file

@ -1,4 +1,5 @@
import path = require("path"); import * as path from "path";
import * as os from "os";
import * as vscode from "vscode"; import * as vscode from "vscode";
import { Env } from "./client"; import { Env } from "./client";
import { log } from "./util"; import { log } from "./util";
@ -10,23 +11,17 @@ export type RunnableEnvCfg =
export class Config { export class Config {
readonly extensionId = "rust-lang.rust-analyzer"; readonly extensionId = "rust-lang.rust-analyzer";
configureLang: vscode.Disposable | undefined;
readonly rootSection = "rust-analyzer"; readonly rootSection = "rust-analyzer";
private readonly requiresWorkspaceReloadOpts = [
"serverPath",
"server",
// FIXME: This shouldn't be here, changing this setting should reload
// `continueCommentsOnNewline` behavior without restart
"typing",
].map((opt) => `${this.rootSection}.${opt}`);
private readonly requiresReloadOpts = [ private readonly requiresReloadOpts = [
"cargo", "cargo",
"procMacro", "procMacro",
"serverPath",
"server",
"files", "files",
"lens", // works as lens.* "lens", // works as lens.*
] ].map((opt) => `${this.rootSection}.${opt}`);
.map((opt) => `${this.rootSection}.${opt}`)
.concat(this.requiresWorkspaceReloadOpts);
readonly package: { readonly package: {
version: string; version: string;
@ -44,6 +39,11 @@ export class Config {
ctx.subscriptions ctx.subscriptions
); );
this.refreshLogging(); this.refreshLogging();
this.configureLanguage();
}
dispose() {
this.configureLang?.dispose();
} }
private refreshLogging() { private refreshLogging() {
@ -57,34 +57,87 @@ export class Config {
private async onDidChangeConfiguration(event: vscode.ConfigurationChangeEvent) { private async onDidChangeConfiguration(event: vscode.ConfigurationChangeEvent) {
this.refreshLogging(); this.refreshLogging();
this.configureLanguage();
const requiresReloadOpt = this.requiresReloadOpts.find((opt) => const requiresReloadOpt = this.requiresReloadOpts.find((opt) =>
event.affectsConfiguration(opt) event.affectsConfiguration(opt)
); );
if (!requiresReloadOpt) return; if (!requiresReloadOpt) return;
const requiresWorkspaceReloadOpt = this.requiresWorkspaceReloadOpts.find((opt) => if (this.restartServerOnConfigChange) {
event.affectsConfiguration(opt)
);
if (!requiresWorkspaceReloadOpt && this.restartServerOnConfigChange) {
await vscode.commands.executeCommand("rust-analyzer.reload"); await vscode.commands.executeCommand("rust-analyzer.reload");
return; return;
} }
const message = requiresWorkspaceReloadOpt const message = `Changing "${requiresReloadOpt}" requires a server restart`;
? `Changing "${requiresWorkspaceReloadOpt}" requires a window reload` const userResponse = await vscode.window.showInformationMessage(message, "Restart now");
: `Changing "${requiresReloadOpt}" requires a reload`;
const userResponse = await vscode.window.showInformationMessage(message, "Reload now");
if (userResponse === "Reload now") { if (userResponse) {
const command = requiresWorkspaceReloadOpt const command = "rust-analyzer.reload";
? "workbench.action.reloadWindow"
: "rust-analyzer.reload";
if (userResponse === "Reload now") {
await vscode.commands.executeCommand(command); await vscode.commands.executeCommand(command);
} }
} }
/**
* Sets up additional language configuration that's impossible to do via a
* separate language-configuration.json file. See [1] for more information.
*
* [1]: https://github.com/Microsoft/vscode/issues/11514#issuecomment-244707076
*/
private configureLanguage() {
if (this.typingContinueCommentsOnNewline && !this.configureLang) {
const indentAction = vscode.IndentAction.None;
this.configureLang = vscode.languages.setLanguageConfiguration("rust", {
onEnterRules: [
{
// Doc single-line comment
// e.g. ///|
beforeText: /^\s*\/{3}.*$/,
action: { indentAction, appendText: "/// " },
},
{
// Parent doc single-line comment
// e.g. //!|
beforeText: /^\s*\/{2}\!.*$/,
action: { indentAction, appendText: "//! " },
},
{
// Begins an auto-closed multi-line comment (standard or parent doc)
// e.g. /** | */ or /*! | */
beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
afterText: /^\s*\*\/$/,
action: {
indentAction: vscode.IndentAction.IndentOutdent,
appendText: " * ",
},
},
{
// Begins a multi-line comment (standard or parent doc)
// e.g. /** ...| or /*! ...|
beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
action: { indentAction, appendText: " * " },
},
{
// Continues a multi-line comment
// e.g. * ...|
beforeText: /^(\ \ )*\ \*(\ ([^\*]|\*(?!\/))*)?$/,
action: { indentAction, appendText: "* " },
},
{
// Dedents after closing a multi-line comment
// e.g. */|
beforeText: /^(\ \ )*\ \*\/\s*$/,
action: { indentAction, removeText: 1 },
},
],
});
}
if (!this.typingContinueCommentsOnNewline && this.configureLang) {
this.configureLang.dispose();
this.configureLang = undefined;
}
} }
// We don't do runtime config validation here for simplicity. More on stackoverflow: // We don't do runtime config validation here for simplicity. More on stackoverflow:
@ -187,6 +240,37 @@ export class Config {
} }
} }
const VarRegex = new RegExp(/\$\{(.+?)\}/g);
export function substituteVSCodeVariableInString(val: string): string {
return val.replace(VarRegex, (substring: string, varName) => {
if (typeof varName === "string") {
return computeVscodeVar(varName) || substring;
} else {
return substring;
}
});
}
export function substituteVSCodeVariables(resp: any): any {
if (typeof resp === "string") {
return substituteVSCodeVariableInString(resp);
} else if (resp && Array.isArray(resp)) {
return resp.map((val) => {
return substituteVSCodeVariables(val);
});
} else if (resp && typeof resp === "object") {
const res: { [key: string]: any } = {};
for (const key in resp) {
const val = resp[key];
res[key] = substituteVSCodeVariables(val);
}
return res;
} else if (typeof resp === "function") {
return null;
}
return resp;
}
export function substituteVariablesInEnv(env: Env): Env { export function substituteVariablesInEnv(env: Env): Env {
const missingDeps = new Set<string>(); const missingDeps = new Set<string>();
// vscode uses `env:ENV_NAME` for env vars resolution, and it's easier // vscode uses `env:ENV_NAME` for env vars resolution, and it's easier
@ -233,7 +317,7 @@ export function substituteVariablesInEnv(env: Env): Env {
} }
} else { } else {
envWithDeps[dep] = { envWithDeps[dep] = {
value: computeVscodeVar(dep), value: computeVscodeVar(dep) || "${" + dep + "}",
deps: [], deps: [],
}; };
} }
@ -264,10 +348,8 @@ export function substituteVariablesInEnv(env: Env): Env {
return resolvedEnv; return resolvedEnv;
} }
function computeVscodeVar(varName: string): string { function computeVscodeVar(varName: string): string | null {
// https://code.visualstudio.com/docs/editor/variables-reference const workspaceFolder = () => {
const supportedVariables: { [k: string]: () => string } = {
workspaceFolder: () => {
const folders = vscode.workspace.workspaceFolders ?? []; const folders = vscode.workspace.workspaceFolders ?? [];
if (folders.length === 1) { if (folders.length === 1) {
// TODO: support for remote workspaces? // TODO: support for remote workspaces?
@ -283,18 +365,17 @@ function computeVscodeVar(varName: string): string {
// no workspace opened // no workspace opened
return ""; return "";
} }
}, };
// https://code.visualstudio.com/docs/editor/variables-reference
const supportedVariables: { [k: string]: () => string } = {
workspaceFolder,
workspaceFolderBasename: () => { workspaceFolderBasename: () => {
const workspaceFolder = computeVscodeVar("workspaceFolder"); return path.basename(workspaceFolder());
if (workspaceFolder) {
return path.basename(workspaceFolder);
} else {
return "";
}
}, },
cwd: () => process.cwd(), cwd: () => process.cwd(),
userHome: () => os.homedir(),
// see // see
// https://github.com/microsoft/vscode/blob/08ac1bb67ca2459496b272d8f4a908757f24f56f/src/vs/workbench/api/common/extHostVariableResolverService.ts#L81 // https://github.com/microsoft/vscode/blob/08ac1bb67ca2459496b272d8f4a908757f24f56f/src/vs/workbench/api/common/extHostVariableResolverService.ts#L81
@ -308,7 +389,7 @@ function computeVscodeVar(varName: string): string {
if (varName in supportedVariables) { if (varName in supportedVariables) {
return supportedVariables[varName](); return supportedVariables[varName]();
} else { } else {
// can't resolve, keep the expression as is // return "${" + varName + "}";
return "${" + varName + "}"; return null;
} }
} }

View file

@ -2,10 +2,12 @@ import * as vscode from "vscode";
import * as lc from "vscode-languageclient/node"; import * as lc from "vscode-languageclient/node";
import * as ra from "./lsp_ext"; import * as ra from "./lsp_ext";
import { Config } from "./config"; import { Config, substituteVariablesInEnv, substituteVSCodeVariables } from "./config";
import { createClient } from "./client"; import { createClient } from "./client";
import { isRustEditor, RustEditor } from "./util"; import { isRustEditor, log, RustEditor } from "./util";
import { ServerStatusParams } from "./lsp_ext"; import { ServerStatusParams } from "./lsp_ext";
import { PersistentState } from "./persistent_state";
import { bootstrap } from "./bootstrap";
export type Workspace = export type Workspace =
| { | {
@ -16,36 +18,154 @@ export type Workspace =
files: vscode.TextDocument[]; files: vscode.TextDocument[];
}; };
export type CommandFactory = {
enabled: (ctx: Ctx) => Cmd;
disabled?: (ctx: Ctx) => Cmd;
};
export class Ctx { export class Ctx {
private constructor( readonly statusBar: vscode.StatusBarItem;
readonly config: Config, readonly config: Config;
private readonly extCtx: vscode.ExtensionContext,
readonly client: lc.LanguageClient,
readonly serverPath: string,
readonly statusBar: vscode.StatusBarItem
) {}
static async create( private client: lc.LanguageClient | undefined;
config: Config, private _serverPath: string | undefined;
extCtx: vscode.ExtensionContext, private traceOutputChannel: vscode.OutputChannel | undefined;
serverPath: string, private outputChannel: vscode.OutputChannel | undefined;
workspace: Workspace private clientSubscriptions: Disposable[];
): Promise<Ctx> { private state: PersistentState;
const client = await createClient(serverPath, workspace, config.serverExtraEnv); private commandFactories: Record<string, CommandFactory>;
private commandDisposables: Disposable[];
const statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left); workspace: Workspace;
extCtx.subscriptions.push(statusBar);
statusBar.text = "rust-analyzer";
statusBar.tooltip = "ready";
statusBar.command = "rust-analyzer.analyzerStatus";
statusBar.show();
const res = new Ctx(config, extCtx, client, serverPath, statusBar); constructor(
readonly extCtx: vscode.ExtensionContext,
workspace: Workspace,
commandFactories: Record<string, CommandFactory>
) {
extCtx.subscriptions.push(this);
this.statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left);
this.statusBar.text = "rust-analyzer";
this.statusBar.tooltip = "ready";
this.statusBar.command = "rust-analyzer.analyzerStatus";
this.statusBar.show();
this.workspace = workspace;
this.clientSubscriptions = [];
this.commandDisposables = [];
this.commandFactories = commandFactories;
res.pushCleanup(client.start()); this.state = new PersistentState(extCtx.globalState);
await client.onReady(); this.config = new Config(extCtx);
client.onNotification(ra.serverStatus, (params) => res.setServerStatus(params));
return res; this.updateCommands();
}
dispose() {
this.config.dispose();
this.statusBar.dispose();
void this.disposeClient();
this.commandDisposables.forEach((disposable) => disposable.dispose());
}
clientFetcher() {
const self = this;
return {
get client(): lc.LanguageClient | undefined {
return self.client;
},
};
}
async getClient() {
if (!this.traceOutputChannel) {
this.traceOutputChannel = vscode.window.createOutputChannel(
"Rust Analyzer Language Server Trace"
);
this.pushExtCleanup(this.traceOutputChannel);
}
if (!this.outputChannel) {
this.outputChannel = vscode.window.createOutputChannel("Rust Analyzer Language Server");
this.pushExtCleanup(this.outputChannel);
}
if (!this.client) {
this._serverPath = await bootstrap(this.extCtx, this.config, this.state).catch(
(err) => {
let message = "bootstrap error. ";
message +=
'See the logs in "OUTPUT > Rust Analyzer Client" (should open automatically). ';
message +=
'To enable verbose logs use { "rust-analyzer.trace.extension": true }';
log.error("Bootstrap error", err);
throw new Error(message);
}
);
const newEnv = substituteVariablesInEnv(
Object.assign({}, process.env, this.config.serverExtraEnv)
);
const run: lc.Executable = {
command: this._serverPath,
options: { env: newEnv },
};
const serverOptions = {
run,
debug: run,
};
let rawInitializationOptions = vscode.workspace.getConfiguration("rust-analyzer");
if (this.workspace.kind === "Detached Files") {
rawInitializationOptions = {
detachedFiles: this.workspace.files.map((file) => file.uri.fsPath),
...rawInitializationOptions,
};
}
const initializationOptions = substituteVSCodeVariables(rawInitializationOptions);
this.client = await createClient(
this.traceOutputChannel,
this.outputChannel,
initializationOptions,
serverOptions
);
this.pushClientCleanup(
this.client.onNotification(ra.serverStatus, (params) =>
this.setServerStatus(params)
)
);
}
return this.client;
}
async activate() {
log.info("Activating language client");
const client = await this.getClient();
await client.start();
this.updateCommands();
return client;
}
async deactivate() {
log.info("Deactivating language client");
await this.client?.stop();
this.updateCommands();
}
async stop() {
log.info("Stopping language client");
await this.disposeClient();
this.updateCommands();
}
private async disposeClient() {
this.clientSubscriptions?.forEach((disposable) => disposable.dispose());
this.clientSubscriptions = [];
await this.client?.dispose();
this._serverPath = undefined;
this.client = undefined;
} }
get activeRustEditor(): RustEditor | undefined { get activeRustEditor(): RustEditor | undefined {
@ -53,29 +173,37 @@ export class Ctx {
return editor && isRustEditor(editor) ? editor : undefined; return editor && isRustEditor(editor) ? editor : undefined;
} }
get visibleRustEditors(): RustEditor[] {
return vscode.window.visibleTextEditors.filter(isRustEditor);
}
registerCommand(name: string, factory: (ctx: Ctx) => Cmd) {
const fullName = `rust-analyzer.${name}`;
const cmd = factory(this);
const d = vscode.commands.registerCommand(fullName, cmd);
this.pushCleanup(d);
}
get extensionPath(): string { get extensionPath(): string {
return this.extCtx.extensionPath; return this.extCtx.extensionPath;
} }
get globalState(): vscode.Memento {
return this.extCtx.globalState;
}
get subscriptions(): Disposable[] { get subscriptions(): Disposable[] {
return this.extCtx.subscriptions; return this.extCtx.subscriptions;
} }
get serverPath(): string | undefined {
return this._serverPath;
}
private updateCommands() {
this.commandDisposables.forEach((disposable) => disposable.dispose());
this.commandDisposables = [];
const fetchFactory = (factory: CommandFactory, fullName: string) => {
return this.client && this.client.isRunning()
? factory.enabled
: factory.disabled ||
((_) => () =>
vscode.window.showErrorMessage(
`command ${fullName} failed: rust-analyzer server is not running`
));
};
for (const [name, factory] of Object.entries(this.commandFactories)) {
const fullName = `rust-analyzer.${name}`;
const callback = fetchFactory(factory, fullName)(this);
this.commandDisposables.push(vscode.commands.registerCommand(fullName, callback));
}
}
setServerStatus(status: ServerStatusParams) { setServerStatus(status: ServerStatusParams) {
let icon = ""; let icon = "";
const statusBar = this.statusBar; const statusBar = this.statusBar;
@ -111,9 +239,13 @@ export class Ctx {
statusBar.text = `${icon}rust-analyzer`; statusBar.text = `${icon}rust-analyzer`;
} }
pushCleanup(d: Disposable) { pushExtCleanup(d: Disposable) {
this.extCtx.subscriptions.push(d); this.extCtx.subscriptions.push(d);
} }
private pushClientCleanup(d: Disposable) {
this.clientSubscriptions.push(d);
}
} }
export interface Disposable { export interface Disposable {

View file

@ -1,53 +1,37 @@
import * as vscode from "vscode"; import * as vscode from "vscode";
import * as lc from "vscode-languageclient/node"; import * as lc from "vscode-languageclient/node";
import * as os from "os";
import * as commands from "./commands"; import * as commands from "./commands";
import { Ctx } from "./ctx"; import { CommandFactory, Ctx, Workspace } from "./ctx";
import { Config } from "./config"; import { isRustDocument } from "./util";
import { log, isValidExecutable, isRustDocument } from "./util";
import { PersistentState } from "./persistent_state";
import { activateTaskProvider } from "./tasks"; import { activateTaskProvider } from "./tasks";
import { setContextValue } from "./util"; import { setContextValue } from "./util";
import { exec } from "child_process";
let ctx: Ctx | undefined;
const RUST_PROJECT_CONTEXT_NAME = "inRustProject"; const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
let TRACE_OUTPUT_CHANNEL: vscode.OutputChannel | null = null; export interface RustAnalyzerExtensionApi {
export function traceOutputChannel() { // FIXME: this should be non-optional
if (!TRACE_OUTPUT_CHANNEL) { readonly client?: lc.LanguageClient;
TRACE_OUTPUT_CHANNEL = vscode.window.createOutputChannel(
"Rust Analyzer Language Server Trace"
);
}
return TRACE_OUTPUT_CHANNEL;
}
let OUTPUT_CHANNEL: vscode.OutputChannel | null = null;
export function outputChannel() {
if (!OUTPUT_CHANNEL) {
OUTPUT_CHANNEL = vscode.window.createOutputChannel("Rust Analyzer Language Server");
}
return OUTPUT_CHANNEL;
} }
export interface RustAnalyzerExtensionApi { export async function deactivate() {
client?: lc.LanguageClient; await setContextValue(RUST_PROJECT_CONTEXT_NAME, undefined);
} }
export async function activate( export async function activate(
context: vscode.ExtensionContext context: vscode.ExtensionContext
): Promise<RustAnalyzerExtensionApi> { ): Promise<RustAnalyzerExtensionApi> {
// VS Code doesn't show a notification when an extension fails to activate if (vscode.extensions.getExtension("rust-lang.rust")) {
// so we do it ourselves. vscode.window
return await tryActivate(context).catch((err) => { .showWarningMessage(
void vscode.window.showErrorMessage(`Cannot activate rust-analyzer: ${err.message}`); `You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` +
throw err; "plugins enabled. These are known to conflict and cause various functions of " +
}); "both plugins to not work correctly. You should disable one of them.",
"Got it"
)
.then(() => {}, console.error);
} }
async function tryActivate(context: vscode.ExtensionContext): Promise<RustAnalyzerExtensionApi> {
// We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if // We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if
// only those are in use. // only those are in use.
// (r-a still somewhat works with Live Share, because commands are tunneled to the host) // (r-a still somewhat works with Live Share, because commands are tunneled to the host)
@ -65,351 +49,118 @@ async function tryActivate(context: vscode.ExtensionContext): Promise<RustAnalyz
return {}; return {};
} }
const config = new Config(context); const workspace: Workspace =
const state = new PersistentState(context.globalState); folders.length === 0
const serverPath = await bootstrap(context, config, state).catch((err) => { ? {
let message = "bootstrap error. ";
message += 'See the logs in "OUTPUT > Rust Analyzer Client" (should open automatically). ';
message += 'To enable verbose logs use { "rust-analyzer.trace.extension": true }';
log.error("Bootstrap error", err);
throw new Error(message);
});
if (folders.length === 0) {
ctx = await Ctx.create(config, context, serverPath, {
kind: "Detached Files", kind: "Detached Files",
files: rustDocuments, files: rustDocuments,
});
} else {
// Note: we try to start the server before we activate type hints so that it
// registers its `onDidChangeDocument` handler before us.
//
// This a horribly, horribly wrong way to deal with this problem.
ctx = await Ctx.create(config, context, serverPath, { kind: "Workspace Folder" });
ctx.pushCleanup(activateTaskProvider(ctx.config));
} }
await initCommonContext(context, ctx); : { kind: "Workspace Folder" };
warnAboutExtensionConflicts(); const ctx = new Ctx(context, workspace, createCommands());
// VS Code doesn't show a notification when an extension fails to activate
// so we do it ourselves.
const api = await activateServer(ctx).catch((err) => {
void vscode.window.showErrorMessage(
`Cannot activate rust-analyzer extension: ${err.message}`
);
throw err;
});
await setContextValue(RUST_PROJECT_CONTEXT_NAME, true);
return api;
}
if (config.typingContinueCommentsOnNewline) { async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> {
ctx.pushCleanup(configureLanguage()); if (ctx.workspace.kind === "Workspace Folder") {
ctx.pushExtCleanup(activateTaskProvider(ctx.config));
} }
vscode.workspace.onDidChangeConfiguration( vscode.workspace.onDidChangeConfiguration(
(_) => async (_) => {
ctx?.client await ctx
?.sendNotification("workspace/didChangeConfiguration", { settings: "" }) .clientFetcher()
.catch(log.error), .client?.sendNotification("workspace/didChangeConfiguration", { settings: "" });
},
null, null,
ctx.subscriptions ctx.subscriptions
); );
await ctx.activate();
return ctx.clientFetcher();
}
function createCommands(): Record<string, CommandFactory> {
return { return {
client: ctx.client, onEnter: {
enabled: commands.onEnter,
disabled: (_) => () => vscode.commands.executeCommand("default:type", { text: "\n" }),
},
reload: {
enabled: (ctx) => async () => {
void vscode.window.showInformationMessage("Reloading rust-analyzer...");
// FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed
await ctx.stop();
await ctx.activate();
},
disabled: (ctx) => async () => {
void vscode.window.showInformationMessage("Reloading rust-analyzer...");
await ctx.activate();
},
},
startServer: {
enabled: (ctx) => async () => {
await ctx.activate();
},
disabled: (ctx) => async () => {
await ctx.activate();
},
},
stopServer: {
enabled: (ctx) => async () => {
// FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed
await ctx.stop();
ctx.setServerStatus({
health: "ok",
quiescent: true,
message: "server is not running",
});
},
},
analyzerStatus: { enabled: commands.analyzerStatus },
memoryUsage: { enabled: commands.memoryUsage },
shuffleCrateGraph: { enabled: commands.shuffleCrateGraph },
reloadWorkspace: { enabled: commands.reloadWorkspace },
matchingBrace: { enabled: commands.matchingBrace },
joinLines: { enabled: commands.joinLines },
parentModule: { enabled: commands.parentModule },
syntaxTree: { enabled: commands.syntaxTree },
viewHir: { enabled: commands.viewHir },
viewFileText: { enabled: commands.viewFileText },
viewItemTree: { enabled: commands.viewItemTree },
viewCrateGraph: { enabled: commands.viewCrateGraph },
viewFullCrateGraph: { enabled: commands.viewFullCrateGraph },
expandMacro: { enabled: commands.expandMacro },
run: { enabled: commands.run },
copyRunCommandLine: { enabled: commands.copyRunCommandLine },
debug: { enabled: commands.debug },
newDebugConfig: { enabled: commands.newDebugConfig },
openDocs: { enabled: commands.openDocs },
openCargoToml: { enabled: commands.openCargoToml },
peekTests: { enabled: commands.peekTests },
moveItemUp: { enabled: commands.moveItemUp },
moveItemDown: { enabled: commands.moveItemDown },
cancelFlycheck: { enabled: commands.cancelFlycheck },
ssr: { enabled: commands.ssr },
serverVersion: { enabled: commands.serverVersion },
// Internal commands which are invoked by the server.
applyActionGroup: { enabled: commands.applyActionGroup },
applySnippetWorkspaceEdit: { enabled: commands.applySnippetWorkspaceEditCommand },
debugSingle: { enabled: commands.debugSingle },
gotoLocation: { enabled: commands.gotoLocation },
linkToCommand: { enabled: commands.linkToCommand },
resolveCodeAction: { enabled: commands.resolveCodeAction },
runSingle: { enabled: commands.runSingle },
showReferences: { enabled: commands.showReferences },
}; };
} }
async function initCommonContext(context: vscode.ExtensionContext, ctx: Ctx) {
// Register a "dumb" onEnter command for the case where server fails to
// start.
//
// FIXME: refactor command registration code such that commands are
// **always** registered, even if the server does not start. Use API like
// this perhaps?
//
// ```TypeScript
// registerCommand(
// factory: (Ctx) => ((Ctx) => any),
// fallback: () => any = () => vscode.window.showErrorMessage(
// "rust-analyzer is not available"
// ),
// )
const defaultOnEnter = vscode.commands.registerCommand("rust-analyzer.onEnter", () =>
vscode.commands.executeCommand("default:type", { text: "\n" })
);
context.subscriptions.push(defaultOnEnter);
await setContextValue(RUST_PROJECT_CONTEXT_NAME, true);
// Commands which invokes manually via command palette, shortcut, etc.
// Reloading is inspired by @DanTup maneuver: https://github.com/microsoft/vscode/issues/45774#issuecomment-373423895
ctx.registerCommand("reload", (_) => async () => {
void vscode.window.showInformationMessage("Reloading rust-analyzer...");
await doDeactivate();
while (context.subscriptions.length > 0) {
try {
context.subscriptions.pop()!.dispose();
} catch (err) {
log.error("Dispose error:", err);
}
}
await activate(context).catch(log.error);
});
ctx.registerCommand("analyzerStatus", commands.analyzerStatus);
ctx.registerCommand("memoryUsage", commands.memoryUsage);
ctx.registerCommand("shuffleCrateGraph", commands.shuffleCrateGraph);
ctx.registerCommand("reloadWorkspace", commands.reloadWorkspace);
ctx.registerCommand("matchingBrace", commands.matchingBrace);
ctx.registerCommand("joinLines", commands.joinLines);
ctx.registerCommand("parentModule", commands.parentModule);
ctx.registerCommand("syntaxTree", commands.syntaxTree);
ctx.registerCommand("viewHir", commands.viewHir);
ctx.registerCommand("viewFileText", commands.viewFileText);
ctx.registerCommand("viewItemTree", commands.viewItemTree);
ctx.registerCommand("viewCrateGraph", commands.viewCrateGraph);
ctx.registerCommand("viewFullCrateGraph", commands.viewFullCrateGraph);
ctx.registerCommand("expandMacro", commands.expandMacro);
ctx.registerCommand("run", commands.run);
ctx.registerCommand("copyRunCommandLine", commands.copyRunCommandLine);
ctx.registerCommand("debug", commands.debug);
ctx.registerCommand("newDebugConfig", commands.newDebugConfig);
ctx.registerCommand("openDocs", commands.openDocs);
ctx.registerCommand("openCargoToml", commands.openCargoToml);
ctx.registerCommand("peekTests", commands.peekTests);
ctx.registerCommand("moveItemUp", commands.moveItemUp);
ctx.registerCommand("moveItemDown", commands.moveItemDown);
ctx.registerCommand("cancelFlycheck", commands.cancelFlycheck);
defaultOnEnter.dispose();
ctx.registerCommand("onEnter", commands.onEnter);
ctx.registerCommand("ssr", commands.ssr);
ctx.registerCommand("serverVersion", commands.serverVersion);
// Internal commands which are invoked by the server.
ctx.registerCommand("runSingle", commands.runSingle);
ctx.registerCommand("debugSingle", commands.debugSingle);
ctx.registerCommand("showReferences", commands.showReferences);
ctx.registerCommand("applySnippetWorkspaceEdit", commands.applySnippetWorkspaceEditCommand);
ctx.registerCommand("resolveCodeAction", commands.resolveCodeAction);
ctx.registerCommand("applyActionGroup", commands.applyActionGroup);
ctx.registerCommand("gotoLocation", commands.gotoLocation);
ctx.registerCommand("linkToCommand", commands.linkToCommand);
}
export async function deactivate() {
TRACE_OUTPUT_CHANNEL?.dispose();
TRACE_OUTPUT_CHANNEL = null;
OUTPUT_CHANNEL?.dispose();
OUTPUT_CHANNEL = null;
await doDeactivate();
}
async function doDeactivate() {
await setContextValue(RUST_PROJECT_CONTEXT_NAME, undefined);
await ctx?.client.stop();
ctx = undefined;
}
async function bootstrap(
context: vscode.ExtensionContext,
config: Config,
state: PersistentState
): Promise<string> {
const path = await getServer(context, config, state);
if (!path) {
throw new Error(
"Rust Analyzer Language Server is not available. " +
"Please, ensure its [proper installation](https://rust-analyzer.github.io/manual.html#installation)."
);
}
log.info("Using server binary at", path);
if (!isValidExecutable(path)) {
if (config.serverPath) {
throw new Error(`Failed to execute ${path} --version. \`config.server.path\` or \`config.serverPath\` has been set explicitly.\
Consider removing this config or making a valid server binary available at that path.`);
} else {
throw new Error(`Failed to execute ${path} --version`);
}
}
return path;
}
async function patchelf(dest: vscode.Uri): Promise<void> {
await vscode.window.withProgress(
{
location: vscode.ProgressLocation.Notification,
title: "Patching rust-analyzer for NixOS",
},
async (progress, _) => {
const expression = `
{srcStr, pkgs ? import <nixpkgs> {}}:
pkgs.stdenv.mkDerivation {
name = "rust-analyzer";
src = /. + srcStr;
phases = [ "installPhase" "fixupPhase" ];
installPhase = "cp $src $out";
fixupPhase = ''
chmod 755 $out
patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" $out
'';
}
`;
const origFile = vscode.Uri.file(dest.fsPath + "-orig");
await vscode.workspace.fs.rename(dest, origFile, { overwrite: true });
try {
progress.report({ message: "Patching executable", increment: 20 });
await new Promise((resolve, reject) => {
const handle = exec(
`nix-build -E - --argstr srcStr '${origFile.fsPath}' -o '${dest.fsPath}'`,
(err, stdout, stderr) => {
if (err != null) {
reject(Error(stderr));
} else {
resolve(stdout);
}
}
);
handle.stdin?.write(expression);
handle.stdin?.end();
});
} finally {
await vscode.workspace.fs.delete(origFile);
}
}
);
}
async function getServer(
context: vscode.ExtensionContext,
config: Config,
state: PersistentState
): Promise<string | undefined> {
const explicitPath = serverPath(config);
if (explicitPath) {
if (explicitPath.startsWith("~/")) {
return os.homedir() + explicitPath.slice("~".length);
}
return explicitPath;
}
if (config.package.releaseTag === null) return "rust-analyzer";
const ext = process.platform === "win32" ? ".exe" : "";
const bundled = vscode.Uri.joinPath(context.extensionUri, "server", `rust-analyzer${ext}`);
const bundledExists = await vscode.workspace.fs.stat(bundled).then(
() => true,
() => false
);
if (bundledExists) {
let server = bundled;
if (await isNixOs()) {
await vscode.workspace.fs.createDirectory(config.globalStorageUri).then();
const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`);
let exists = await vscode.workspace.fs.stat(dest).then(
() => true,
() => false
);
if (exists && config.package.version !== state.serverVersion) {
await vscode.workspace.fs.delete(dest);
exists = false;
}
if (!exists) {
await vscode.workspace.fs.copy(bundled, dest);
await patchelf(dest);
}
server = dest;
}
await state.updateServerVersion(config.package.version);
return server.fsPath;
}
await state.updateServerVersion(undefined);
await vscode.window.showErrorMessage(
"Unfortunately we don't ship binaries for your platform yet. " +
"You need to manually clone the rust-analyzer repository and " +
"run `cargo xtask install --server` to build the language server from sources. " +
"If you feel that your platform should be supported, please create an issue " +
"about that [here](https://github.com/rust-lang/rust-analyzer/issues) and we " +
"will consider it."
);
return undefined;
}
function serverPath(config: Config): string | null {
return process.env.__RA_LSP_SERVER_DEBUG ?? config.serverPath;
}
async function isNixOs(): Promise<boolean> {
try {
const contents = (
await vscode.workspace.fs.readFile(vscode.Uri.file("/etc/os-release"))
).toString();
const idString = contents.split("\n").find((a) => a.startsWith("ID=")) || "ID=linux";
return idString.indexOf("nixos") !== -1;
} catch {
return false;
}
}
function warnAboutExtensionConflicts() {
if (vscode.extensions.getExtension("rust-lang.rust")) {
vscode.window
.showWarningMessage(
`You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` +
"plugins enabled. These are known to conflict and cause various functions of " +
"both plugins to not work correctly. You should disable one of them.",
"Got it"
)
.then(() => {}, console.error);
}
}
/**
* Sets up additional language configuration that's impossible to do via a
* separate language-configuration.json file. See [1] for more information.
*
* [1]: https://github.com/Microsoft/vscode/issues/11514#issuecomment-244707076
*/
function configureLanguage(): vscode.Disposable {
const indentAction = vscode.IndentAction.None;
return vscode.languages.setLanguageConfiguration("rust", {
onEnterRules: [
{
// Doc single-line comment
// e.g. ///|
beforeText: /^\s*\/{3}.*$/,
action: { indentAction, appendText: "/// " },
},
{
// Parent doc single-line comment
// e.g. //!|
beforeText: /^\s*\/{2}\!.*$/,
action: { indentAction, appendText: "//! " },
},
{
// Begins an auto-closed multi-line comment (standard or parent doc)
// e.g. /** | */ or /*! | */
beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
afterText: /^\s*\*\/$/,
action: { indentAction: vscode.IndentAction.IndentOutdent, appendText: " * " },
},
{
// Begins a multi-line comment (standard or parent doc)
// e.g. /** ...| or /*! ...|
beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
action: { indentAction, appendText: " * " },
},
{
// Continues a multi-line comment
// e.g. * ...|
beforeText: /^(\ \ )*\ \*(\ ([^\*]|\*(?!\/))*)?$/,
action: { indentAction, appendText: "* " },
},
{
// Dedents after closing a multi-line comment
// e.g. */|
beforeText: /^(\ \ )*\ \*\/\s*$/,
action: { indentAction, removeText: 1 },
},
],
});
}

View file

@ -18,9 +18,9 @@ export async function selectRunnable(
showButtons: boolean = true showButtons: boolean = true
): Promise<RunnableQuickPick | undefined> { ): Promise<RunnableQuickPick | undefined> {
const editor = ctx.activeRustEditor; const editor = ctx.activeRustEditor;
const client = ctx.client; if (!editor) return;
if (!editor || !client) return;
const client = await ctx.getClient();
const textDocument: lc.TextDocumentIdentifier = { const textDocument: lc.TextDocumentIdentifier = {
uri: editor.document.uri.toString(), uri: editor.document.uri.toString(),
}; };