mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-28 14:03:35 +00:00
Merge commit 'c60b1f641' into HEAD
This commit is contained in:
commit
e070dc5129
40 changed files with 827 additions and 407 deletions
4
Cargo.lock
generated
4
Cargo.lock
generated
|
@ -872,9 +872,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lsp-types"
|
name = "lsp-types"
|
||||||
version = "0.93.1"
|
version = "0.93.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a3bcfee315dde785ba887edb540b08765fd7df75a7d948844be6bf5712246734"
|
checksum = "9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"serde",
|
"serde",
|
||||||
|
|
|
@ -295,7 +295,9 @@ impl FlycheckActor {
|
||||||
} => {
|
} => {
|
||||||
let mut cmd = Command::new(toolchain::cargo());
|
let mut cmd = Command::new(toolchain::cargo());
|
||||||
cmd.arg(command);
|
cmd.arg(command);
|
||||||
cmd.args(&["--workspace", "--message-format=json"]);
|
cmd.current_dir(&self.root);
|
||||||
|
cmd.args(&["--workspace", "--message-format=json", "--manifest-path"])
|
||||||
|
.arg(self.root.join("Cargo.toml").as_os_str());
|
||||||
|
|
||||||
if let Some(target) = target_triple {
|
if let Some(target) = target_triple {
|
||||||
cmd.args(&["--target", target.as_str()]);
|
cmd.args(&["--target", target.as_str()]);
|
||||||
|
|
|
@ -662,8 +662,12 @@ fn desugar_future_path(orig: TypeRef) -> Path {
|
||||||
let mut generic_args: Vec<_> =
|
let mut generic_args: Vec<_> =
|
||||||
std::iter::repeat(None).take(path.segments().len() - 1).collect();
|
std::iter::repeat(None).take(path.segments().len() - 1).collect();
|
||||||
let mut last = GenericArgs::empty();
|
let mut last = GenericArgs::empty();
|
||||||
let binding =
|
let binding = AssociatedTypeBinding {
|
||||||
AssociatedTypeBinding { name: name![Output], type_ref: Some(orig), bounds: Vec::new() };
|
name: name![Output],
|
||||||
|
args: None,
|
||||||
|
type_ref: Some(orig),
|
||||||
|
bounds: Vec::new(),
|
||||||
|
};
|
||||||
last.bindings.push(binding);
|
last.bindings.push(binding);
|
||||||
generic_args.push(Some(Interned::new(last)));
|
generic_args.push(Some(Interned::new(last)));
|
||||||
|
|
||||||
|
|
|
@ -68,6 +68,9 @@ pub struct GenericArgs {
|
||||||
pub struct AssociatedTypeBinding {
|
pub struct AssociatedTypeBinding {
|
||||||
/// The name of the associated type.
|
/// The name of the associated type.
|
||||||
pub name: Name,
|
pub name: Name,
|
||||||
|
/// The generic arguments to the associated type. e.g. For `Trait<Assoc<'a, T> = &'a T>`, this
|
||||||
|
/// would be `['a, T]`.
|
||||||
|
pub args: Option<Interned<GenericArgs>>,
|
||||||
/// The type bound to this associated type (in `Item = T`, this would be the
|
/// The type bound to this associated type (in `Item = T`, this would be the
|
||||||
/// `T`). This can be `None` if there are bounds instead.
|
/// `T`). This can be `None` if there are bounds instead.
|
||||||
pub type_ref: Option<TypeRef>,
|
pub type_ref: Option<TypeRef>,
|
||||||
|
|
|
@ -163,6 +163,10 @@ pub(super) fn lower_generic_args(
|
||||||
ast::GenericArg::AssocTypeArg(assoc_type_arg) => {
|
ast::GenericArg::AssocTypeArg(assoc_type_arg) => {
|
||||||
if let Some(name_ref) = assoc_type_arg.name_ref() {
|
if let Some(name_ref) = assoc_type_arg.name_ref() {
|
||||||
let name = name_ref.as_name();
|
let name = name_ref.as_name();
|
||||||
|
let args = assoc_type_arg
|
||||||
|
.generic_arg_list()
|
||||||
|
.and_then(|args| lower_generic_args(lower_ctx, args))
|
||||||
|
.map(Interned::new);
|
||||||
let type_ref = assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it));
|
let type_ref = assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it));
|
||||||
let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
|
let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
|
||||||
l.bounds()
|
l.bounds()
|
||||||
|
@ -171,7 +175,7 @@ pub(super) fn lower_generic_args(
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
bindings.push(AssociatedTypeBinding { name, type_ref, bounds });
|
bindings.push(AssociatedTypeBinding { name, args, type_ref, bounds });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::GenericArg::LifetimeArg(lifetime_arg) => {
|
ast::GenericArg::LifetimeArg(lifetime_arg) => {
|
||||||
|
@ -214,6 +218,7 @@ fn lower_generic_args_from_fn_path(
|
||||||
let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
|
let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
|
||||||
bindings.push(AssociatedTypeBinding {
|
bindings.push(AssociatedTypeBinding {
|
||||||
name: name![Output],
|
name: name![Output],
|
||||||
|
args: None,
|
||||||
type_ref: Some(type_ref),
|
type_ref: Some(type_ref),
|
||||||
bounds: Vec::new(),
|
bounds: Vec::new(),
|
||||||
});
|
});
|
||||||
|
@ -222,6 +227,7 @@ fn lower_generic_args_from_fn_path(
|
||||||
let type_ref = TypeRef::Tuple(Vec::new());
|
let type_ref = TypeRef::Tuple(Vec::new());
|
||||||
bindings.push(AssociatedTypeBinding {
|
bindings.push(AssociatedTypeBinding {
|
||||||
name: name![Output],
|
name: name![Output],
|
||||||
|
args: None,
|
||||||
type_ref: Some(type_ref),
|
type_ref: Some(type_ref),
|
||||||
bounds: Vec::new(),
|
bounds: Vec::new(),
|
||||||
});
|
});
|
||||||
|
|
|
@ -11,9 +11,9 @@ use syntax::SmolStr;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
|
db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
|
||||||
from_placeholder_idx, to_chalk_trait_id, AdtId, AliasEq, AliasTy, Binders, CallableDefId,
|
from_placeholder_idx, to_chalk_trait_id, utils::generics, AdtId, AliasEq, AliasTy, Binders,
|
||||||
CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy, QuantifiedWhereClause,
|
CallableDefId, CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy,
|
||||||
Substitution, TraitRef, Ty, TyBuilder, TyKind, WhereClause,
|
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, WhereClause,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait TyExt {
|
pub trait TyExt {
|
||||||
|
@ -338,10 +338,13 @@ pub trait ProjectionTyExt {
|
||||||
|
|
||||||
impl ProjectionTyExt for ProjectionTy {
|
impl ProjectionTyExt for ProjectionTy {
|
||||||
fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef {
|
fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef {
|
||||||
TraitRef {
|
// FIXME: something like `Split` trait from chalk-solve might be nice.
|
||||||
trait_id: to_chalk_trait_id(self.trait_(db)),
|
let generics = generics(db.upcast(), from_assoc_type_id(self.associated_ty_id).into());
|
||||||
substitution: self.substitution.clone(),
|
let substitution = Substitution::from_iter(
|
||||||
}
|
Interner,
|
||||||
|
self.substitution.iter(Interner).skip(generics.len_self()),
|
||||||
|
);
|
||||||
|
TraitRef { trait_id: to_chalk_trait_id(self.trait_(db)), substitution }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn trait_(&self, db: &dyn HirDatabase) -> TraitId {
|
fn trait_(&self, db: &dyn HirDatabase) -> TraitId {
|
||||||
|
|
|
@ -289,16 +289,18 @@ impl HirDisplay for ProjectionTy {
|
||||||
return write!(f, "{}", TYPE_HINT_TRUNCATION);
|
return write!(f, "{}", TYPE_HINT_TRUNCATION);
|
||||||
}
|
}
|
||||||
|
|
||||||
let trait_ = f.db.trait_data(self.trait_(f.db));
|
let trait_ref = self.trait_ref(f.db);
|
||||||
write!(f, "<")?;
|
write!(f, "<")?;
|
||||||
self.self_type_parameter(f.db).hir_fmt(f)?;
|
fmt_trait_ref(&trait_ref, f, true)?;
|
||||||
write!(f, " as {}", trait_.name)?;
|
write!(f, ">::{}", f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id)).name)?;
|
||||||
if self.substitution.len(Interner) > 1 {
|
let proj_params_count =
|
||||||
|
self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
|
||||||
|
let proj_params = &self.substitution.as_slice(Interner)[..proj_params_count];
|
||||||
|
if !proj_params.is_empty() {
|
||||||
write!(f, "<")?;
|
write!(f, "<")?;
|
||||||
f.write_joined(&self.substitution.as_slice(Interner)[1..], ", ")?;
|
f.write_joined(proj_params, ", ")?;
|
||||||
write!(f, ">")?;
|
write!(f, ">")?;
|
||||||
}
|
}
|
||||||
write!(f, ">::{}", f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id)).name)?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -641,9 +643,12 @@ impl HirDisplay for Ty {
|
||||||
// Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
|
// Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
|
||||||
if f.display_target.is_test() {
|
if f.display_target.is_test() {
|
||||||
write!(f, "{}::{}", trait_.name, type_alias_data.name)?;
|
write!(f, "{}::{}", trait_.name, type_alias_data.name)?;
|
||||||
|
// Note that the generic args for the associated type come before those for the
|
||||||
|
// trait (including the self type).
|
||||||
|
// FIXME: reconsider the generic args order upon formatting?
|
||||||
if parameters.len(Interner) > 0 {
|
if parameters.len(Interner) > 0 {
|
||||||
write!(f, "<")?;
|
write!(f, "<")?;
|
||||||
f.write_joined(&*parameters.as_slice(Interner), ", ")?;
|
f.write_joined(parameters.as_slice(Interner), ", ")?;
|
||||||
write!(f, ">")?;
|
write!(f, ">")?;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -972,9 +977,20 @@ fn write_bounds_like_dyn_trait(
|
||||||
angle_open = true;
|
angle_open = true;
|
||||||
}
|
}
|
||||||
if let AliasTy::Projection(proj) = alias {
|
if let AliasTy::Projection(proj) = alias {
|
||||||
let type_alias =
|
let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id);
|
||||||
f.db.type_alias_data(from_assoc_type_id(proj.associated_ty_id));
|
let type_alias = f.db.type_alias_data(assoc_ty_id);
|
||||||
write!(f, "{} = ", type_alias.name)?;
|
write!(f, "{}", type_alias.name)?;
|
||||||
|
|
||||||
|
let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self();
|
||||||
|
if proj_arg_count > 0 {
|
||||||
|
write!(f, "<")?;
|
||||||
|
f.write_joined(
|
||||||
|
&proj.substitution.as_slice(Interner)[..proj_arg_count],
|
||||||
|
", ",
|
||||||
|
)?;
|
||||||
|
write!(f, ">")?;
|
||||||
|
}
|
||||||
|
write!(f, " = ")?;
|
||||||
}
|
}
|
||||||
ty.hir_fmt(f)?;
|
ty.hir_fmt(f)?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -157,7 +157,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
remaining_segments_for_ty,
|
remaining_segments_for_ty,
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
if let TyKind::Error = ty.kind(Interner) {
|
if ty.is_unknown() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -340,8 +340,8 @@ impl<'a> InferenceTable<'a> {
|
||||||
self.resolve_with_fallback(t, &|_, _, d, _| d)
|
self.resolve_with_fallback(t, &|_, _, d, _| d)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Unify two types and register new trait goals that arise from that.
|
/// Unify two relatable values (e.g. `Ty`) and register new trait goals that arise from that.
|
||||||
pub(crate) fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
|
pub(crate) fn unify<T: ?Sized + Zip<Interner>>(&mut self, ty1: &T, ty2: &T) -> bool {
|
||||||
let result = match self.try_unify(ty1, ty2) {
|
let result = match self.try_unify(ty1, ty2) {
|
||||||
Ok(r) => r,
|
Ok(r) => r,
|
||||||
Err(_) => return false,
|
Err(_) => return false,
|
||||||
|
@ -350,9 +350,13 @@ impl<'a> InferenceTable<'a> {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Unify two types and return new trait goals arising from it, so the
|
/// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the
|
||||||
/// caller needs to deal with them.
|
/// caller needs to deal with them.
|
||||||
pub(crate) fn try_unify<T: Zip<Interner>>(&mut self, t1: &T, t2: &T) -> InferResult<()> {
|
pub(crate) fn try_unify<T: ?Sized + Zip<Interner>>(
|
||||||
|
&mut self,
|
||||||
|
t1: &T,
|
||||||
|
t2: &T,
|
||||||
|
) -> InferResult<()> {
|
||||||
match self.var_unification_table.relate(
|
match self.var_unification_table.relate(
|
||||||
Interner,
|
Interner,
|
||||||
&self.db,
|
&self.db,
|
||||||
|
|
|
@ -81,7 +81,20 @@ pub type PlaceholderIndex = chalk_ir::PlaceholderIndex;
|
||||||
pub type VariableKind = chalk_ir::VariableKind<Interner>;
|
pub type VariableKind = chalk_ir::VariableKind<Interner>;
|
||||||
pub type VariableKinds = chalk_ir::VariableKinds<Interner>;
|
pub type VariableKinds = chalk_ir::VariableKinds<Interner>;
|
||||||
pub type CanonicalVarKinds = chalk_ir::CanonicalVarKinds<Interner>;
|
pub type CanonicalVarKinds = chalk_ir::CanonicalVarKinds<Interner>;
|
||||||
|
/// Represents generic parameters and an item bound by them. When the item has parent, the binders
|
||||||
|
/// also contain the generic parameters for its parent. See chalk's documentation for details.
|
||||||
|
///
|
||||||
|
/// One thing to keep in mind when working with `Binders` (and `Substitution`s, which represent
|
||||||
|
/// generic arguments) in rust-analyzer is that the ordering within *is* significant - the generic
|
||||||
|
/// parameters/arguments for an item MUST come before those for its parent. This is to facilitate
|
||||||
|
/// the integration with chalk-solve, which mildly puts constraints as such. See #13335 for its
|
||||||
|
/// motivation in detail.
|
||||||
pub type Binders<T> = chalk_ir::Binders<T>;
|
pub type Binders<T> = chalk_ir::Binders<T>;
|
||||||
|
/// Interned list of generic arguments for an item. When an item has parent, the `Substitution` for
|
||||||
|
/// it contains generic arguments for both its parent and itself. See chalk's documentation for
|
||||||
|
/// details.
|
||||||
|
///
|
||||||
|
/// See `Binders` for the constraint on the ordering.
|
||||||
pub type Substitution = chalk_ir::Substitution<Interner>;
|
pub type Substitution = chalk_ir::Substitution<Interner>;
|
||||||
pub type GenericArg = chalk_ir::GenericArg<Interner>;
|
pub type GenericArg = chalk_ir::GenericArg<Interner>;
|
||||||
pub type GenericArgData = chalk_ir::GenericArgData<Interner>;
|
pub type GenericArgData = chalk_ir::GenericArgData<Interner>;
|
||||||
|
@ -124,14 +137,6 @@ pub type ConstrainedSubst = chalk_ir::ConstrainedSubst<Interner>;
|
||||||
pub type Guidance = chalk_solve::Guidance<Interner>;
|
pub type Guidance = chalk_solve::Guidance<Interner>;
|
||||||
pub type WhereClause = chalk_ir::WhereClause<Interner>;
|
pub type WhereClause = chalk_ir::WhereClause<Interner>;
|
||||||
|
|
||||||
// FIXME: get rid of this
|
|
||||||
pub fn subst_prefix(s: &Substitution, n: usize) -> Substitution {
|
|
||||||
Substitution::from_iter(
|
|
||||||
Interner,
|
|
||||||
s.as_slice(Interner)[..std::cmp::min(s.len(Interner), n)].iter().cloned(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return an index of a parameter in the generic type parameter list by it's id.
|
/// Return an index of a parameter in the generic type parameter list by it's id.
|
||||||
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
|
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
|
||||||
generics(db.upcast(), id.parent).param_idx(id)
|
generics(db.upcast(), id.parent).param_idx(id)
|
||||||
|
@ -382,7 +387,6 @@ pub(crate) fn fold_tys_and_consts<T: HasInterner<Interner = Interner> + TypeFold
|
||||||
pub fn replace_errors_with_variables<T>(t: &T) -> Canonical<T>
|
pub fn replace_errors_with_variables<T>(t: &T) -> Canonical<T>
|
||||||
where
|
where
|
||||||
T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + Clone,
|
T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + Clone,
|
||||||
T: HasInterner<Interner = Interner>,
|
|
||||||
{
|
{
|
||||||
use chalk_ir::{
|
use chalk_ir::{
|
||||||
fold::{FallibleTypeFolder, TypeSuperFoldable},
|
fold::{FallibleTypeFolder, TypeSuperFoldable},
|
||||||
|
|
|
@ -447,12 +447,31 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
.db
|
.db
|
||||||
.trait_data(trait_ref.hir_trait_id())
|
.trait_data(trait_ref.hir_trait_id())
|
||||||
.associated_type_by_name(segment.name);
|
.associated_type_by_name(segment.name);
|
||||||
|
|
||||||
match found {
|
match found {
|
||||||
Some(associated_ty) => {
|
Some(associated_ty) => {
|
||||||
// FIXME handle type parameters on the segment
|
// FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
|
||||||
|
// generic params. It's inefficient to splice the `Substitution`s, so we may want
|
||||||
|
// that method to optionally take parent `Substitution` as we already know them at
|
||||||
|
// this point (`trait_ref.substitution`).
|
||||||
|
let substitution = self.substs_from_path_segment(
|
||||||
|
segment,
|
||||||
|
Some(associated_ty.into()),
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
let len_self =
|
||||||
|
generics(self.db.upcast(), associated_ty.into()).len_self();
|
||||||
|
let substitution = Substitution::from_iter(
|
||||||
|
Interner,
|
||||||
|
substitution
|
||||||
|
.iter(Interner)
|
||||||
|
.take(len_self)
|
||||||
|
.chain(trait_ref.substitution.iter(Interner)),
|
||||||
|
);
|
||||||
TyKind::Alias(AliasTy::Projection(ProjectionTy {
|
TyKind::Alias(AliasTy::Projection(ProjectionTy {
|
||||||
associated_ty_id: to_assoc_type_id(associated_ty),
|
associated_ty_id: to_assoc_type_id(associated_ty),
|
||||||
substitution: trait_ref.substitution,
|
substitution,
|
||||||
}))
|
}))
|
||||||
.intern(Interner)
|
.intern(Interner)
|
||||||
}
|
}
|
||||||
|
@ -590,36 +609,48 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
res,
|
res,
|
||||||
Some(segment.name.clone()),
|
Some(segment.name.clone()),
|
||||||
move |name, t, associated_ty| {
|
move |name, t, associated_ty| {
|
||||||
if name == segment.name {
|
if name != segment.name {
|
||||||
let substs = match self.type_param_mode {
|
return None;
|
||||||
ParamLoweringMode::Placeholder => {
|
|
||||||
// if we're lowering to placeholders, we have to put
|
|
||||||
// them in now
|
|
||||||
let generics = generics(
|
|
||||||
self.db.upcast(),
|
|
||||||
self.resolver
|
|
||||||
.generic_def()
|
|
||||||
.expect("there should be generics if there's a generic param"),
|
|
||||||
);
|
|
||||||
let s = generics.placeholder_subst(self.db);
|
|
||||||
s.apply(t.substitution.clone(), Interner)
|
|
||||||
}
|
|
||||||
ParamLoweringMode::Variable => t.substitution.clone(),
|
|
||||||
};
|
|
||||||
// We need to shift in the bound vars, since
|
|
||||||
// associated_type_shorthand_candidates does not do that
|
|
||||||
let substs = substs.shifted_in_from(Interner, self.in_binders);
|
|
||||||
// FIXME handle type parameters on the segment
|
|
||||||
Some(
|
|
||||||
TyKind::Alias(AliasTy::Projection(ProjectionTy {
|
|
||||||
associated_ty_id: to_assoc_type_id(associated_ty),
|
|
||||||
substitution: substs,
|
|
||||||
}))
|
|
||||||
.intern(Interner),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
|
||||||
|
// generic params. It's inefficient to splice the `Substitution`s, so we may want
|
||||||
|
// that method to optionally take parent `Substitution` as we already know them at
|
||||||
|
// this point (`t.substitution`).
|
||||||
|
let substs = self.substs_from_path_segment(
|
||||||
|
segment.clone(),
|
||||||
|
Some(associated_ty.into()),
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
);
|
||||||
|
|
||||||
|
let len_self = generics(self.db.upcast(), associated_ty.into()).len_self();
|
||||||
|
|
||||||
|
let substs = Substitution::from_iter(
|
||||||
|
Interner,
|
||||||
|
substs.iter(Interner).take(len_self).chain(t.substitution.iter(Interner)),
|
||||||
|
);
|
||||||
|
|
||||||
|
let substs = match self.type_param_mode {
|
||||||
|
ParamLoweringMode::Placeholder => {
|
||||||
|
// if we're lowering to placeholders, we have to put
|
||||||
|
// them in now
|
||||||
|
let generics = generics(self.db.upcast(), def);
|
||||||
|
let s = generics.placeholder_subst(self.db);
|
||||||
|
s.apply(substs, Interner)
|
||||||
|
}
|
||||||
|
ParamLoweringMode::Variable => substs,
|
||||||
|
};
|
||||||
|
// We need to shift in the bound vars, since
|
||||||
|
// associated_type_shorthand_candidates does not do that
|
||||||
|
let substs = substs.shifted_in_from(Interner, self.in_binders);
|
||||||
|
Some(
|
||||||
|
TyKind::Alias(AliasTy::Projection(ProjectionTy {
|
||||||
|
associated_ty_id: to_assoc_type_id(associated_ty),
|
||||||
|
substitution: substs,
|
||||||
|
}))
|
||||||
|
.intern(Interner),
|
||||||
|
)
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -777,7 +808,15 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
// handle defaults. In expression or pattern path segments without
|
// handle defaults. In expression or pattern path segments without
|
||||||
// explicitly specified type arguments, missing type arguments are inferred
|
// explicitly specified type arguments, missing type arguments are inferred
|
||||||
// (i.e. defaults aren't used).
|
// (i.e. defaults aren't used).
|
||||||
if !infer_args || had_explicit_args {
|
// Generic parameters for associated types are not supposed to have defaults, so we just
|
||||||
|
// ignore them.
|
||||||
|
let is_assoc_ty = if let GenericDefId::TypeAliasId(id) = def {
|
||||||
|
let container = id.lookup(self.db.upcast()).container;
|
||||||
|
matches!(container, ItemContainerId::TraitId(_))
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
};
|
||||||
|
if !is_assoc_ty && (!infer_args || had_explicit_args) {
|
||||||
let defaults = self.db.generic_defaults(def);
|
let defaults = self.db.generic_defaults(def);
|
||||||
assert_eq!(total_len, defaults.len());
|
assert_eq!(total_len, defaults.len());
|
||||||
let parent_from = item_len - substs.len();
|
let parent_from = item_len - substs.len();
|
||||||
|
@ -966,9 +1005,28 @@ impl<'a> TyLoweringContext<'a> {
|
||||||
None => return SmallVec::new(),
|
None => return SmallVec::new(),
|
||||||
Some(t) => t,
|
Some(t) => t,
|
||||||
};
|
};
|
||||||
|
// FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
|
||||||
|
// generic params. It's inefficient to splice the `Substitution`s, so we may want
|
||||||
|
// that method to optionally take parent `Substitution` as we already know them at
|
||||||
|
// this point (`super_trait_ref.substitution`).
|
||||||
|
let substitution = self.substs_from_path_segment(
|
||||||
|
// FIXME: This is hack. We shouldn't really build `PathSegment` directly.
|
||||||
|
PathSegment { name: &binding.name, args_and_bindings: binding.args.as_deref() },
|
||||||
|
Some(associated_ty.into()),
|
||||||
|
false, // this is not relevant
|
||||||
|
Some(super_trait_ref.self_type_parameter(Interner)),
|
||||||
|
);
|
||||||
|
let self_params = generics(self.db.upcast(), associated_ty.into()).len_self();
|
||||||
|
let substitution = Substitution::from_iter(
|
||||||
|
Interner,
|
||||||
|
substitution
|
||||||
|
.iter(Interner)
|
||||||
|
.take(self_params)
|
||||||
|
.chain(super_trait_ref.substitution.iter(Interner)),
|
||||||
|
);
|
||||||
let projection_ty = ProjectionTy {
|
let projection_ty = ProjectionTy {
|
||||||
associated_ty_id: to_assoc_type_id(associated_ty),
|
associated_ty_id: to_assoc_type_id(associated_ty),
|
||||||
substitution: super_trait_ref.substitution,
|
substitution,
|
||||||
};
|
};
|
||||||
let mut preds: SmallVec<[_; 1]> = SmallVec::with_capacity(
|
let mut preds: SmallVec<[_; 1]> = SmallVec::with_capacity(
|
||||||
binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
|
binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
|
||||||
|
|
|
@ -22,10 +22,10 @@ use crate::{
|
||||||
from_foreign_def_id,
|
from_foreign_def_id,
|
||||||
infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
|
infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
|
||||||
primitive::{FloatTy, IntTy, UintTy},
|
primitive::{FloatTy, IntTy, UintTy},
|
||||||
static_lifetime,
|
static_lifetime, to_chalk_trait_id,
|
||||||
utils::all_super_traits,
|
utils::all_super_traits,
|
||||||
AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
|
AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
|
||||||
Scalar, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
|
Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// This is used as a key for indexing impls.
|
/// This is used as a key for indexing impls.
|
||||||
|
@ -624,52 +624,76 @@ pub(crate) fn iterate_method_candidates<T>(
|
||||||
slot
|
slot
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Looks up the impl method that actually runs for the trait method `func`.
|
||||||
|
///
|
||||||
|
/// Returns `func` if it's not a method defined in a trait or the lookup failed.
|
||||||
pub fn lookup_impl_method(
|
pub fn lookup_impl_method(
|
||||||
self_ty: &Ty,
|
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
env: Arc<TraitEnvironment>,
|
env: Arc<TraitEnvironment>,
|
||||||
trait_: TraitId,
|
func: FunctionId,
|
||||||
|
fn_subst: Substitution,
|
||||||
|
) -> FunctionId {
|
||||||
|
let trait_id = match func.lookup(db.upcast()).container {
|
||||||
|
ItemContainerId::TraitId(id) => id,
|
||||||
|
_ => return func,
|
||||||
|
};
|
||||||
|
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
|
||||||
|
let fn_params = fn_subst.len(Interner) - trait_params;
|
||||||
|
let trait_ref = TraitRef {
|
||||||
|
trait_id: to_chalk_trait_id(trait_id),
|
||||||
|
substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).skip(fn_params)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let name = &db.function_data(func).name;
|
||||||
|
lookup_impl_method_for_trait_ref(trait_ref, db, env, name).unwrap_or(func)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lookup_impl_method_for_trait_ref(
|
||||||
|
trait_ref: TraitRef,
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
env: Arc<TraitEnvironment>,
|
||||||
name: &Name,
|
name: &Name,
|
||||||
) -> Option<FunctionId> {
|
) -> Option<FunctionId> {
|
||||||
let self_ty_fp = TyFingerprint::for_trait_impl(self_ty)?;
|
let self_ty = trait_ref.self_type_parameter(Interner);
|
||||||
let trait_impls = db.trait_impls_in_deps(env.krate);
|
let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?;
|
||||||
let impls = trait_impls.for_trait_and_self_ty(trait_, self_ty_fp);
|
let impls = db.trait_impls_in_deps(env.krate);
|
||||||
let mut table = InferenceTable::new(db, env.clone());
|
let impls = impls.for_trait_and_self_ty(trait_ref.hir_trait_id(), self_ty_fp);
|
||||||
find_matching_impl(impls, &mut table, &self_ty).and_then(|data| {
|
|
||||||
data.items.iter().find_map(|it| match it {
|
let table = InferenceTable::new(db, env);
|
||||||
AssocItemId::FunctionId(f) => (db.function_data(*f).name == *name).then(|| *f),
|
|
||||||
_ => None,
|
let impl_data = find_matching_impl(impls, table, trait_ref)?;
|
||||||
})
|
impl_data.items.iter().find_map(|it| match it {
|
||||||
|
AssocItemId::FunctionId(f) => (db.function_data(*f).name == *name).then(|| *f),
|
||||||
|
_ => None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_matching_impl(
|
fn find_matching_impl(
|
||||||
mut impls: impl Iterator<Item = ImplId>,
|
mut impls: impl Iterator<Item = ImplId>,
|
||||||
table: &mut InferenceTable<'_>,
|
mut table: InferenceTable<'_>,
|
||||||
self_ty: &Ty,
|
actual_trait_ref: TraitRef,
|
||||||
) -> Option<Arc<ImplData>> {
|
) -> Option<Arc<ImplData>> {
|
||||||
let db = table.db;
|
let db = table.db;
|
||||||
loop {
|
loop {
|
||||||
let impl_ = impls.next()?;
|
let impl_ = impls.next()?;
|
||||||
let r = table.run_in_snapshot(|table| {
|
let r = table.run_in_snapshot(|table| {
|
||||||
let impl_data = db.impl_data(impl_);
|
let impl_data = db.impl_data(impl_);
|
||||||
let substs =
|
let impl_substs =
|
||||||
TyBuilder::subst_for_def(db, impl_, None).fill_with_inference_vars(table).build();
|
TyBuilder::subst_for_def(db, impl_, None).fill_with_inference_vars(table).build();
|
||||||
let impl_ty = db.impl_self_ty(impl_).substitute(Interner, &substs);
|
let trait_ref = db
|
||||||
|
.impl_trait(impl_)
|
||||||
|
.expect("non-trait method in find_matching_impl")
|
||||||
|
.substitute(Interner, &impl_substs);
|
||||||
|
|
||||||
table
|
if !table.unify(&trait_ref, &actual_trait_ref) {
|
||||||
.unify(self_ty, &impl_ty)
|
return None;
|
||||||
.then(|| {
|
}
|
||||||
let wh_goals =
|
|
||||||
crate::chalk_db::convert_where_clauses(db, impl_.into(), &substs)
|
|
||||||
.into_iter()
|
|
||||||
.map(|b| b.cast(Interner));
|
|
||||||
|
|
||||||
let goal = crate::Goal::all(Interner, wh_goals);
|
let wcs = crate::chalk_db::convert_where_clauses(db, impl_.into(), &impl_substs)
|
||||||
|
.into_iter()
|
||||||
table.try_obligation(goal).map(|_| impl_data)
|
.map(|b| b.cast(Interner));
|
||||||
})
|
let goal = crate::Goal::all(Interner, wcs);
|
||||||
.flatten()
|
table.try_obligation(goal).map(|_| impl_data)
|
||||||
});
|
});
|
||||||
if r.is_some() {
|
if r.is_some() {
|
||||||
break r;
|
break r;
|
||||||
|
@ -1214,7 +1238,7 @@ fn is_valid_fn_candidate(
|
||||||
let expected_receiver =
|
let expected_receiver =
|
||||||
sig.map(|s| s.params()[0].clone()).substitute(Interner, &fn_subst);
|
sig.map(|s| s.params()[0].clone()).substitute(Interner, &fn_subst);
|
||||||
|
|
||||||
check_that!(table.unify(&receiver_ty, &expected_receiver));
|
check_that!(table.unify(receiver_ty, &expected_receiver));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let ItemContainerId::ImplId(impl_id) = container {
|
if let ItemContainerId::ImplId(impl_id) = container {
|
||||||
|
|
|
@ -196,3 +196,34 @@ fn test(
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn projection_type_correct_arguments_order() {
|
||||||
|
check_types_source_code(
|
||||||
|
r#"
|
||||||
|
trait Foo<T> {
|
||||||
|
type Assoc<U>;
|
||||||
|
}
|
||||||
|
fn f<T: Foo<i32>>(a: T::Assoc<usize>) {
|
||||||
|
a;
|
||||||
|
//^ <T as Foo<i32>>::Assoc<usize>
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn generic_associated_type_binding_in_impl_trait() {
|
||||||
|
check_types_source_code(
|
||||||
|
r#"
|
||||||
|
//- minicore: sized
|
||||||
|
trait Foo<T> {
|
||||||
|
type Assoc<U>;
|
||||||
|
}
|
||||||
|
fn f(a: impl Foo<i8, Assoc<i16> = i32>) {
|
||||||
|
a;
|
||||||
|
//^ impl Foo<i8, Assoc<i16> = i32>
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -3963,3 +3963,124 @@ fn g(t: &(dyn T + Send)) {
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn gats_in_path() {
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
//- minicore: deref
|
||||||
|
use core::ops::Deref;
|
||||||
|
trait PointerFamily {
|
||||||
|
type Pointer<T>: Deref<Target = T>;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f<P: PointerFamily>(p: P::Pointer<i32>) {
|
||||||
|
let a = *p;
|
||||||
|
//^ i32
|
||||||
|
}
|
||||||
|
fn g<P: PointerFamily>(p: <P as PointerFamily>::Pointer<i32>) {
|
||||||
|
let a = *p;
|
||||||
|
//^ i32
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn gats_with_impl_trait() {
|
||||||
|
// FIXME: the last function (`fn i()`) is not valid Rust as of this writing because you cannot
|
||||||
|
// specify the same associated type multiple times even if their arguments are different (c.f.
|
||||||
|
// `fn h()`, which is valid). Reconsider how to treat these invalid types.
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
//- minicore: deref
|
||||||
|
use core::ops::Deref;
|
||||||
|
|
||||||
|
trait Trait {
|
||||||
|
type Assoc<T>: Deref<Target = T>;
|
||||||
|
fn get<U>(&self) -> Self::Assoc<U>;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f<T>(v: impl Trait) {
|
||||||
|
let a = v.get::<i32>().deref();
|
||||||
|
//^ &i32
|
||||||
|
let a = v.get::<T>().deref();
|
||||||
|
//^ &T
|
||||||
|
}
|
||||||
|
fn g<'a, T: 'a>(v: impl Trait<Assoc<T> = &'a T>) {
|
||||||
|
let a = v.get::<T>();
|
||||||
|
//^ &T
|
||||||
|
let a = v.get::<()>();
|
||||||
|
//^ Trait::Assoc<(), impl Trait<Assoc<T> = &T>>
|
||||||
|
}
|
||||||
|
fn h<'a>(v: impl Trait<Assoc<i32> = &'a i32> + Trait<Assoc<i64> = &'a i64>) {
|
||||||
|
let a = v.get::<i32>();
|
||||||
|
//^ &i32
|
||||||
|
let a = v.get::<i64>();
|
||||||
|
//^ &i64
|
||||||
|
}
|
||||||
|
fn i<'a>(v: impl Trait<Assoc<i32> = &'a i32, Assoc<i64> = &'a i64>) {
|
||||||
|
let a = v.get::<i32>();
|
||||||
|
//^ &i32
|
||||||
|
let a = v.get::<i64>();
|
||||||
|
//^ &i64
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn gats_with_dyn() {
|
||||||
|
// This test is here to keep track of how we infer things despite traits with GATs being not
|
||||||
|
// object-safe currently.
|
||||||
|
// FIXME: reconsider how to treat these invalid types.
|
||||||
|
check_infer_with_mismatches(
|
||||||
|
r#"
|
||||||
|
//- minicore: deref
|
||||||
|
use core::ops::Deref;
|
||||||
|
|
||||||
|
trait Trait {
|
||||||
|
type Assoc<T>: Deref<Target = T>;
|
||||||
|
fn get<U>(&self) -> Self::Assoc<U>;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f<'a>(v: &dyn Trait<Assoc<i32> = &'a i32>) {
|
||||||
|
v.get::<i32>().deref();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
90..94 'self': &Self
|
||||||
|
127..128 'v': &(dyn Trait<Assoc<i32> = &i32>)
|
||||||
|
164..195 '{ ...f(); }': ()
|
||||||
|
170..171 'v': &(dyn Trait<Assoc<i32> = &i32>)
|
||||||
|
170..184 'v.get::<i32>()': &i32
|
||||||
|
170..192 'v.get:...eref()': &i32
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn gats_in_associated_type_binding() {
|
||||||
|
check_types(
|
||||||
|
r#"
|
||||||
|
trait Trait {
|
||||||
|
type Assoc<T>;
|
||||||
|
fn get<U>(&self) -> Self::Assoc<U>;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f<T>(t: T)
|
||||||
|
where
|
||||||
|
T: Trait<Assoc<i32> = u32>,
|
||||||
|
T: Trait<Assoc<isize> = usize>,
|
||||||
|
{
|
||||||
|
let a = t.get::<i32>();
|
||||||
|
//^ u32
|
||||||
|
let a = t.get::<isize>();
|
||||||
|
//^ usize
|
||||||
|
let a = t.get::<()>();
|
||||||
|
//^ Trait::Assoc<(), T>
|
||||||
|
}
|
||||||
|
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ use itertools::Itertools;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk,
|
chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk,
|
||||||
CallableDefId, Interner,
|
CallableDefId, Interner, ProjectionTyExt,
|
||||||
};
|
};
|
||||||
use hir_def::{AdtId, ItemContainerId, Lookup, TypeAliasId};
|
use hir_def::{AdtId, ItemContainerId, Lookup, TypeAliasId};
|
||||||
|
|
||||||
|
@ -63,17 +63,31 @@ impl DebugContext<'_> {
|
||||||
ItemContainerId::TraitId(t) => t,
|
ItemContainerId::TraitId(t) => t,
|
||||||
_ => panic!("associated type not in trait"),
|
_ => panic!("associated type not in trait"),
|
||||||
};
|
};
|
||||||
let trait_data = self.0.trait_data(trait_);
|
let trait_name = &self.0.trait_data(trait_).name;
|
||||||
let params = projection_ty.substitution.as_slice(Interner);
|
let trait_ref = projection_ty.trait_ref(self.0);
|
||||||
write!(fmt, "<{:?} as {}", ¶ms[0], trait_data.name,)?;
|
let trait_params = trait_ref.substitution.as_slice(Interner);
|
||||||
if params.len() > 1 {
|
let self_ty = trait_ref.self_type_parameter(Interner);
|
||||||
|
write!(fmt, "<{:?} as {}", self_ty, trait_name)?;
|
||||||
|
if trait_params.len() > 1 {
|
||||||
write!(
|
write!(
|
||||||
fmt,
|
fmt,
|
||||||
"<{}>",
|
"<{}>",
|
||||||
¶ms[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
|
trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
write!(fmt, ">::{}", type_alias_data.name)
|
write!(fmt, ">::{}", type_alias_data.name)?;
|
||||||
|
|
||||||
|
let proj_params_count = projection_ty.substitution.len(Interner) - trait_params.len();
|
||||||
|
let proj_params = &projection_ty.substitution.as_slice(Interner)[..proj_params_count];
|
||||||
|
if !proj_params.is_empty() {
|
||||||
|
write!(
|
||||||
|
fmt,
|
||||||
|
"<{}>",
|
||||||
|
proj_params.iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn debug_fn_def_id(
|
pub(crate) fn debug_fn_def_id(
|
||||||
|
|
|
@ -270,7 +270,7 @@ impl SourceAnalyzer {
|
||||||
let expr_id = self.expr_id(db, &call.clone().into())?;
|
let expr_id = self.expr_id(db, &call.clone().into())?;
|
||||||
let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
|
let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
|
||||||
|
|
||||||
Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, &substs))
|
Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_await_to_poll(
|
pub(crate) fn resolve_await_to_poll(
|
||||||
|
@ -311,7 +311,7 @@ impl SourceAnalyzer {
|
||||||
// HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself
|
// HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself
|
||||||
// doesn't have any generic parameters, so we skip building another subst for `poll()`.
|
// doesn't have any generic parameters, so we skip building another subst for `poll()`.
|
||||||
let substs = hir_ty::TyBuilder::subst_for_def(db, future_trait, None).push(ty).build();
|
let substs = hir_ty::TyBuilder::subst_for_def(db, future_trait, None).push(ty).build();
|
||||||
Some(self.resolve_impl_method_or_trait_def(db, poll_fn, &substs))
|
Some(self.resolve_impl_method_or_trait_def(db, poll_fn, substs))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_prefix_expr(
|
pub(crate) fn resolve_prefix_expr(
|
||||||
|
@ -331,7 +331,7 @@ impl SourceAnalyzer {
|
||||||
// don't have any generic parameters, so we skip building another subst for the methods.
|
// don't have any generic parameters, so we skip building another subst for the methods.
|
||||||
let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build();
|
let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build();
|
||||||
|
|
||||||
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
|
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_index_expr(
|
pub(crate) fn resolve_index_expr(
|
||||||
|
@ -351,7 +351,7 @@ impl SourceAnalyzer {
|
||||||
.push(base_ty.clone())
|
.push(base_ty.clone())
|
||||||
.push(index_ty.clone())
|
.push(index_ty.clone())
|
||||||
.build();
|
.build();
|
||||||
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
|
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_bin_expr(
|
pub(crate) fn resolve_bin_expr(
|
||||||
|
@ -372,7 +372,7 @@ impl SourceAnalyzer {
|
||||||
.push(rhs.clone())
|
.push(rhs.clone())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
|
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_try_expr(
|
pub(crate) fn resolve_try_expr(
|
||||||
|
@ -392,7 +392,7 @@ impl SourceAnalyzer {
|
||||||
// doesn't have any generic parameters, so we skip building another subst for `branch()`.
|
// doesn't have any generic parameters, so we skip building another subst for `branch()`.
|
||||||
let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build();
|
let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build();
|
||||||
|
|
||||||
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
|
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_field(
|
pub(crate) fn resolve_field(
|
||||||
|
@ -487,9 +487,9 @@ impl SourceAnalyzer {
|
||||||
|
|
||||||
let mut prefer_value_ns = false;
|
let mut prefer_value_ns = false;
|
||||||
let resolved = (|| {
|
let resolved = (|| {
|
||||||
|
let infer = self.infer.as_deref()?;
|
||||||
if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) {
|
if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) {
|
||||||
let expr_id = self.expr_id(db, &path_expr.into())?;
|
let expr_id = self.expr_id(db, &path_expr.into())?;
|
||||||
let infer = self.infer.as_ref()?;
|
|
||||||
if let Some(assoc) = infer.assoc_resolutions_for_expr(expr_id) {
|
if let Some(assoc) = infer.assoc_resolutions_for_expr(expr_id) {
|
||||||
let assoc = match assoc {
|
let assoc = match assoc {
|
||||||
AssocItemId::FunctionId(f_in_trait) => {
|
AssocItemId::FunctionId(f_in_trait) => {
|
||||||
|
@ -497,9 +497,12 @@ impl SourceAnalyzer {
|
||||||
None => assoc,
|
None => assoc,
|
||||||
Some(func_ty) => {
|
Some(func_ty) => {
|
||||||
if let TyKind::FnDef(_fn_def, subs) = func_ty.kind(Interner) {
|
if let TyKind::FnDef(_fn_def, subs) = func_ty.kind(Interner) {
|
||||||
self.resolve_impl_method(db, f_in_trait, subs)
|
self.resolve_impl_method_or_trait_def(
|
||||||
.map(AssocItemId::FunctionId)
|
db,
|
||||||
.unwrap_or(assoc)
|
f_in_trait,
|
||||||
|
subs.clone(),
|
||||||
|
)
|
||||||
|
.into()
|
||||||
} else {
|
} else {
|
||||||
assoc
|
assoc
|
||||||
}
|
}
|
||||||
|
@ -520,18 +523,18 @@ impl SourceAnalyzer {
|
||||||
prefer_value_ns = true;
|
prefer_value_ns = true;
|
||||||
} else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) {
|
} else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) {
|
||||||
let pat_id = self.pat_id(&path_pat.into())?;
|
let pat_id = self.pat_id(&path_pat.into())?;
|
||||||
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
|
if let Some(assoc) = infer.assoc_resolutions_for_pat(pat_id) {
|
||||||
return Some(PathResolution::Def(AssocItem::from(assoc).into()));
|
return Some(PathResolution::Def(AssocItem::from(assoc).into()));
|
||||||
}
|
}
|
||||||
if let Some(VariantId::EnumVariantId(variant)) =
|
if let Some(VariantId::EnumVariantId(variant)) =
|
||||||
self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
|
infer.variant_resolution_for_pat(pat_id)
|
||||||
{
|
{
|
||||||
return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
|
return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
|
||||||
}
|
}
|
||||||
} else if let Some(rec_lit) = parent().and_then(ast::RecordExpr::cast) {
|
} else if let Some(rec_lit) = parent().and_then(ast::RecordExpr::cast) {
|
||||||
let expr_id = self.expr_id(db, &rec_lit.into())?;
|
let expr_id = self.expr_id(db, &rec_lit.into())?;
|
||||||
if let Some(VariantId::EnumVariantId(variant)) =
|
if let Some(VariantId::EnumVariantId(variant)) =
|
||||||
self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
|
infer.variant_resolution_for_expr(expr_id)
|
||||||
{
|
{
|
||||||
return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
|
return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
|
||||||
}
|
}
|
||||||
|
@ -541,8 +544,7 @@ impl SourceAnalyzer {
|
||||||
|| parent().and_then(ast::TupleStructPat::cast).map(ast::Pat::from);
|
|| parent().and_then(ast::TupleStructPat::cast).map(ast::Pat::from);
|
||||||
if let Some(pat) = record_pat.or_else(tuple_struct_pat) {
|
if let Some(pat) = record_pat.or_else(tuple_struct_pat) {
|
||||||
let pat_id = self.pat_id(&pat)?;
|
let pat_id = self.pat_id(&pat)?;
|
||||||
let variant_res_for_pat =
|
let variant_res_for_pat = infer.variant_resolution_for_pat(pat_id);
|
||||||
self.infer.as_ref()?.variant_resolution_for_pat(pat_id);
|
|
||||||
if let Some(VariantId::EnumVariantId(variant)) = variant_res_for_pat {
|
if let Some(VariantId::EnumVariantId(variant)) = variant_res_for_pat {
|
||||||
return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
|
return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
|
||||||
}
|
}
|
||||||
|
@ -780,37 +782,22 @@ impl SourceAnalyzer {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_impl_method(
|
|
||||||
&self,
|
|
||||||
db: &dyn HirDatabase,
|
|
||||||
func: FunctionId,
|
|
||||||
substs: &Substitution,
|
|
||||||
) -> Option<FunctionId> {
|
|
||||||
let impled_trait = match func.lookup(db.upcast()).container {
|
|
||||||
ItemContainerId::TraitId(trait_id) => trait_id,
|
|
||||||
_ => return None,
|
|
||||||
};
|
|
||||||
if substs.is_empty(Interner) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
let self_ty = substs.at(Interner, 0).ty(Interner)?;
|
|
||||||
let krate = self.resolver.krate();
|
|
||||||
let trait_env = self.resolver.body_owner()?.as_generic_def_id().map_or_else(
|
|
||||||
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|
|
||||||
|d| db.trait_environment(d),
|
|
||||||
);
|
|
||||||
|
|
||||||
let fun_data = db.function_data(func);
|
|
||||||
method_resolution::lookup_impl_method(self_ty, db, trait_env, impled_trait, &fun_data.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resolve_impl_method_or_trait_def(
|
fn resolve_impl_method_or_trait_def(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
func: FunctionId,
|
func: FunctionId,
|
||||||
substs: &Substitution,
|
substs: Substitution,
|
||||||
) -> FunctionId {
|
) -> FunctionId {
|
||||||
self.resolve_impl_method(db, func, substs).unwrap_or(func)
|
let krate = self.resolver.krate();
|
||||||
|
let owner = match self.resolver.body_owner() {
|
||||||
|
Some(it) => it,
|
||||||
|
None => return func,
|
||||||
|
};
|
||||||
|
let env = owner.as_generic_def_id().map_or_else(
|
||||||
|
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|
||||||
|
|d| db.trait_environment(d),
|
||||||
|
);
|
||||||
|
method_resolution::lookup_impl_method(db, env, func, substs)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lang_trait_fn(
|
fn lang_trait_fn(
|
||||||
|
|
|
@ -1834,4 +1834,86 @@ fn f() {
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_bin_op_multiple_impl() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore: add
|
||||||
|
struct S;
|
||||||
|
impl core::ops::Add for S {
|
||||||
|
fn add(
|
||||||
|
//^^^
|
||||||
|
) {}
|
||||||
|
}
|
||||||
|
impl core::ops::Add<usize> for S {
|
||||||
|
fn add(
|
||||||
|
) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f() {
|
||||||
|
S +$0 S
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore: add
|
||||||
|
struct S;
|
||||||
|
impl core::ops::Add for S {
|
||||||
|
fn add(
|
||||||
|
) {}
|
||||||
|
}
|
||||||
|
impl core::ops::Add<usize> for S {
|
||||||
|
fn add(
|
||||||
|
//^^^
|
||||||
|
) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f() {
|
||||||
|
S +$0 0usize
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn path_call_multiple_trait_impl() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
trait Trait<T> {
|
||||||
|
fn f(_: T);
|
||||||
|
}
|
||||||
|
impl Trait<i32> for usize {
|
||||||
|
fn f(_: i32) {}
|
||||||
|
//^
|
||||||
|
}
|
||||||
|
impl Trait<i64> for usize {
|
||||||
|
fn f(_: i64) {}
|
||||||
|
}
|
||||||
|
fn main() {
|
||||||
|
usize::f$0(0i32);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
trait Trait<T> {
|
||||||
|
fn f(_: T);
|
||||||
|
}
|
||||||
|
impl Trait<i32> for usize {
|
||||||
|
fn f(_: i32) {}
|
||||||
|
}
|
||||||
|
impl Trait<i64> for usize {
|
||||||
|
fn f(_: i64) {}
|
||||||
|
//^
|
||||||
|
}
|
||||||
|
fn main() {
|
||||||
|
usize::f$0(0i64);
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ crossbeam-channel = "0.5.5"
|
||||||
dissimilar = "1.0.4"
|
dissimilar = "1.0.4"
|
||||||
itertools = "0.10.5"
|
itertools = "0.10.5"
|
||||||
scip = "0.1.1"
|
scip = "0.1.1"
|
||||||
lsp-types = { version = "0.93.1", features = ["proposed"] }
|
lsp-types = { version = "=0.93.2", features = ["proposed"] }
|
||||||
parking_lot = "0.12.1"
|
parking_lot = "0.12.1"
|
||||||
xflags = "0.3.0"
|
xflags = "0.3.0"
|
||||||
oorandom = "11.1.3"
|
oorandom = "11.1.3"
|
||||||
|
|
|
@ -11,7 +11,7 @@ use std::{env, fs, path::Path, process};
|
||||||
|
|
||||||
use lsp_server::Connection;
|
use lsp_server::Connection;
|
||||||
use project_model::ProjectManifest;
|
use project_model::ProjectManifest;
|
||||||
use rust_analyzer::{cli::flags, config::Config, from_json, lsp_ext::supports_utf8, Result};
|
use rust_analyzer::{cli::flags, config::Config, from_json, Result};
|
||||||
use vfs::AbsPathBuf;
|
use vfs::AbsPathBuf;
|
||||||
|
|
||||||
#[cfg(all(feature = "mimalloc"))]
|
#[cfg(all(feature = "mimalloc"))]
|
||||||
|
@ -191,11 +191,7 @@ fn run_server() -> Result<()> {
|
||||||
name: String::from("rust-analyzer"),
|
name: String::from("rust-analyzer"),
|
||||||
version: Some(rust_analyzer::version().to_string()),
|
version: Some(rust_analyzer::version().to_string()),
|
||||||
}),
|
}),
|
||||||
offset_encoding: if supports_utf8(config.caps()) {
|
offset_encoding: None,
|
||||||
Some("utf-8".to_string())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let initialize_result = serde_json::to_value(initialize_result).unwrap();
|
let initialize_result = serde_json::to_value(initialize_result).unwrap();
|
||||||
|
|
|
@ -6,19 +6,25 @@ use lsp_types::{
|
||||||
FileOperationFilter, FileOperationPattern, FileOperationPatternKind,
|
FileOperationFilter, FileOperationPattern, FileOperationPatternKind,
|
||||||
FileOperationRegistrationOptions, FoldingRangeProviderCapability, HoverProviderCapability,
|
FileOperationRegistrationOptions, FoldingRangeProviderCapability, HoverProviderCapability,
|
||||||
ImplementationProviderCapability, InlayHintOptions, InlayHintServerCapabilities, OneOf,
|
ImplementationProviderCapability, InlayHintOptions, InlayHintServerCapabilities, OneOf,
|
||||||
RenameOptions, SaveOptions, SelectionRangeProviderCapability, SemanticTokensFullOptions,
|
PositionEncodingKind, RenameOptions, SaveOptions, SelectionRangeProviderCapability,
|
||||||
SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions,
|
SemanticTokensFullOptions, SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities,
|
||||||
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
|
||||||
TypeDefinitionProviderCapability, WorkDoneProgressOptions,
|
TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions,
|
||||||
WorkspaceFileOperationsServerCapabilities, WorkspaceServerCapabilities,
|
WorkspaceFileOperationsServerCapabilities, WorkspaceServerCapabilities,
|
||||||
};
|
};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use crate::config::{Config, RustfmtConfig};
|
use crate::config::{Config, RustfmtConfig};
|
||||||
|
use crate::lsp_ext::supports_utf8;
|
||||||
use crate::semantic_tokens;
|
use crate::semantic_tokens;
|
||||||
|
|
||||||
pub fn server_capabilities(config: &Config) -> ServerCapabilities {
|
pub fn server_capabilities(config: &Config) -> ServerCapabilities {
|
||||||
ServerCapabilities {
|
ServerCapabilities {
|
||||||
|
position_encoding: if supports_utf8(config.caps()) {
|
||||||
|
Some(PositionEncodingKind::UTF8)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
|
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
|
||||||
open_close: Some(true),
|
open_close: Some(true),
|
||||||
change: Some(TextDocumentSyncKind::INCREMENTAL),
|
change: Some(TextDocumentSyncKind::INCREMENTAL),
|
||||||
|
|
|
@ -20,7 +20,7 @@ use crate::cli::{
|
||||||
load_cargo::{load_workspace, LoadCargoConfig},
|
load_cargo::{load_workspace, LoadCargoConfig},
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
|
use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
|
||||||
use crate::to_proto;
|
use crate::to_proto;
|
||||||
use crate::version::version;
|
use crate::version::version;
|
||||||
|
|
||||||
|
@ -126,7 +126,7 @@ impl LsifManager<'_> {
|
||||||
let line_index = self.db.line_index(file_id);
|
let line_index = self.db.line_index(file_id);
|
||||||
let line_index = LineIndex {
|
let line_index = LineIndex {
|
||||||
index: line_index,
|
index: line_index,
|
||||||
encoding: OffsetEncoding::Utf16,
|
encoding: PositionEncoding::Utf16,
|
||||||
endings: LineEndings::Unix,
|
endings: LineEndings::Unix,
|
||||||
};
|
};
|
||||||
let range_id = self.add_vertex(lsif::Vertex::Range {
|
let range_id = self.add_vertex(lsif::Vertex::Range {
|
||||||
|
@ -248,7 +248,7 @@ impl LsifManager<'_> {
|
||||||
let line_index = self.db.line_index(file_id);
|
let line_index = self.db.line_index(file_id);
|
||||||
let line_index = LineIndex {
|
let line_index = LineIndex {
|
||||||
index: line_index,
|
index: line_index,
|
||||||
encoding: OffsetEncoding::Utf16,
|
encoding: PositionEncoding::Utf16,
|
||||||
endings: LineEndings::Unix,
|
endings: LineEndings::Unix,
|
||||||
};
|
};
|
||||||
let result = folds
|
let result = folds
|
||||||
|
|
|
@ -5,7 +5,7 @@ use std::{
|
||||||
time::Instant,
|
time::Instant,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
|
use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
|
||||||
use hir::Name;
|
use hir::Name;
|
||||||
use ide::{
|
use ide::{
|
||||||
LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId,
|
LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId,
|
||||||
|
@ -91,7 +91,7 @@ impl flags::Scip {
|
||||||
|
|
||||||
let line_index = LineIndex {
|
let line_index = LineIndex {
|
||||||
index: db.line_index(file_id),
|
index: db.line_index(file_id),
|
||||||
encoding: OffsetEncoding::Utf8,
|
encoding: PositionEncoding::Utf8,
|
||||||
endings: LineEndings::Unix,
|
endings: LineEndings::Unix,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ use vfs::AbsPathBuf;
|
||||||
use crate::{
|
use crate::{
|
||||||
caps::completion_item_edit_resolve,
|
caps::completion_item_edit_resolve,
|
||||||
diagnostics::DiagnosticsMapConfig,
|
diagnostics::DiagnosticsMapConfig,
|
||||||
line_index::OffsetEncoding,
|
line_index::PositionEncoding,
|
||||||
lsp_ext::{self, supports_utf8, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
|
lsp_ext::{self, supports_utf8, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -948,11 +948,11 @@ impl Config {
|
||||||
.is_some()
|
.is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn offset_encoding(&self) -> OffsetEncoding {
|
pub fn position_encoding(&self) -> PositionEncoding {
|
||||||
if supports_utf8(&self.caps) {
|
if supports_utf8(&self.caps) {
|
||||||
OffsetEncoding::Utf8
|
PositionEncoding::Utf8
|
||||||
} else {
|
} else {
|
||||||
OffsetEncoding::Utf16
|
PositionEncoding::Utf16
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ use stdx::format_to;
|
||||||
use vfs::{AbsPath, AbsPathBuf};
|
use vfs::{AbsPath, AbsPathBuf};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
global_state::GlobalStateSnapshot, line_index::OffsetEncoding, lsp_ext,
|
global_state::GlobalStateSnapshot, line_index::PositionEncoding, lsp_ext,
|
||||||
to_proto::url_from_abs_path,
|
to_proto::url_from_abs_path,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -66,17 +66,17 @@ fn location(
|
||||||
let uri = url_from_abs_path(&file_name);
|
let uri = url_from_abs_path(&file_name);
|
||||||
|
|
||||||
let range = {
|
let range = {
|
||||||
let offset_encoding = snap.config.offset_encoding();
|
let position_encoding = snap.config.position_encoding();
|
||||||
lsp_types::Range::new(
|
lsp_types::Range::new(
|
||||||
position(&offset_encoding, span, span.line_start, span.column_start),
|
position(&position_encoding, span, span.line_start, span.column_start),
|
||||||
position(&offset_encoding, span, span.line_end, span.column_end),
|
position(&position_encoding, span, span.line_end, span.column_end),
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
lsp_types::Location::new(uri, range)
|
lsp_types::Location::new(uri, range)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn position(
|
fn position(
|
||||||
offset_encoding: &OffsetEncoding,
|
position_encoding: &PositionEncoding,
|
||||||
span: &DiagnosticSpan,
|
span: &DiagnosticSpan,
|
||||||
line_offset: usize,
|
line_offset: usize,
|
||||||
column_offset: usize,
|
column_offset: usize,
|
||||||
|
@ -93,9 +93,9 @@ fn position(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
let mut char_offset = 0;
|
let mut char_offset = 0;
|
||||||
let len_func = match offset_encoding {
|
let len_func = match position_encoding {
|
||||||
OffsetEncoding::Utf8 => char::len_utf8,
|
PositionEncoding::Utf8 => char::len_utf8,
|
||||||
OffsetEncoding::Utf16 => char::len_utf16,
|
PositionEncoding::Utf16 => char::len_utf16,
|
||||||
};
|
};
|
||||||
for c in line.text.chars() {
|
for c in line.text.chars() {
|
||||||
char_offset += 1;
|
char_offset += 1;
|
||||||
|
|
|
@ -8,7 +8,7 @@ use vfs::AbsPathBuf;
|
||||||
use crate::{
|
use crate::{
|
||||||
from_json,
|
from_json,
|
||||||
global_state::GlobalStateSnapshot,
|
global_state::GlobalStateSnapshot,
|
||||||
line_index::{LineIndex, OffsetEncoding},
|
line_index::{LineIndex, PositionEncoding},
|
||||||
lsp_ext,
|
lsp_ext,
|
||||||
lsp_utils::invalid_params_error,
|
lsp_utils::invalid_params_error,
|
||||||
Result,
|
Result,
|
||||||
|
@ -25,10 +25,10 @@ pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
|
||||||
|
|
||||||
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
|
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
|
||||||
let line_col = match line_index.encoding {
|
let line_col = match line_index.encoding {
|
||||||
OffsetEncoding::Utf8 => {
|
PositionEncoding::Utf8 => {
|
||||||
LineCol { line: position.line as u32, col: position.character as u32 }
|
LineCol { line: position.line as u32, col: position.character as u32 }
|
||||||
}
|
}
|
||||||
OffsetEncoding::Utf16 => {
|
PositionEncoding::Utf16 => {
|
||||||
let line_col =
|
let line_col =
|
||||||
LineColUtf16 { line: position.line as u32, col: position.character as u32 };
|
LineColUtf16 { line: position.line as u32, col: position.character as u32 };
|
||||||
line_index.index.to_utf8(line_col)
|
line_index.index.to_utf8(line_col)
|
||||||
|
|
|
@ -383,7 +383,7 @@ impl GlobalStateSnapshot {
|
||||||
pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
|
pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
|
||||||
let endings = self.vfs.read().1[&file_id];
|
let endings = self.vfs.read().1[&file_id];
|
||||||
let index = self.analysis.file_line_index(file_id)?;
|
let index = self.analysis.file_line_index(file_id)?;
|
||||||
let res = LineIndex { index, endings, encoding: self.config.offset_encoding() };
|
let res = LineIndex { index, endings, encoding: self.config.position_encoding() };
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub enum OffsetEncoding {
|
pub enum PositionEncoding {
|
||||||
Utf8,
|
Utf8,
|
||||||
Utf16,
|
Utf16,
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ pub enum OffsetEncoding {
|
||||||
pub(crate) struct LineIndex {
|
pub(crate) struct LineIndex {
|
||||||
pub(crate) index: Arc<ide::LineIndex>,
|
pub(crate) index: Arc<ide::LineIndex>,
|
||||||
pub(crate) endings: LineEndings,
|
pub(crate) endings: LineEndings,
|
||||||
pub(crate) encoding: OffsetEncoding,
|
pub(crate) encoding: PositionEncoding,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
use std::{collections::HashMap, path::PathBuf};
|
use std::{collections::HashMap, path::PathBuf};
|
||||||
|
|
||||||
use lsp_types::request::Request;
|
use lsp_types::request::Request;
|
||||||
|
use lsp_types::PositionEncodingKind;
|
||||||
use lsp_types::{
|
use lsp_types::{
|
||||||
notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams,
|
notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams,
|
||||||
PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
|
PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
|
||||||
|
@ -455,7 +456,15 @@ pub(crate) enum CodeLensResolveData {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn supports_utf8(caps: &lsp_types::ClientCapabilities) -> bool {
|
pub fn supports_utf8(caps: &lsp_types::ClientCapabilities) -> bool {
|
||||||
caps.offset_encoding.as_deref().unwrap_or_default().iter().any(|it| it == "utf-8")
|
match &caps.general {
|
||||||
|
Some(general) => general
|
||||||
|
.position_encodings
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.iter()
|
||||||
|
.any(|it| it == &PositionEncodingKind::UTF8),
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum MoveItem {}
|
pub enum MoveItem {}
|
||||||
|
|
|
@ -6,7 +6,7 @@ use lsp_server::Notification;
|
||||||
use crate::{
|
use crate::{
|
||||||
from_proto,
|
from_proto,
|
||||||
global_state::GlobalState,
|
global_state::GlobalState,
|
||||||
line_index::{LineEndings, LineIndex, OffsetEncoding},
|
line_index::{LineEndings, LineIndex, PositionEncoding},
|
||||||
LspError,
|
LspError,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -140,7 +140,7 @@ pub(crate) fn apply_document_changes(
|
||||||
index: Arc::new(ide::LineIndex::new(old_text)),
|
index: Arc::new(ide::LineIndex::new(old_text)),
|
||||||
// We don't care about line endings or offset encoding here.
|
// We don't care about line endings or offset encoding here.
|
||||||
endings: LineEndings::Unix,
|
endings: LineEndings::Unix,
|
||||||
encoding: OffsetEncoding::Utf16,
|
encoding: PositionEncoding::Utf16,
|
||||||
};
|
};
|
||||||
|
|
||||||
// The changes we got must be applied sequentially, but can cross lines so we
|
// The changes we got must be applied sequentially, but can cross lines so we
|
||||||
|
|
|
@ -607,30 +607,34 @@ impl GlobalState {
|
||||||
|
|
||||||
/// Handles a request.
|
/// Handles a request.
|
||||||
fn on_request(&mut self, req: Request) {
|
fn on_request(&mut self, req: Request) {
|
||||||
if self.shutdown_requested {
|
let mut dispatcher = RequestDispatcher { req: Some(req), global_state: self };
|
||||||
self.respond(lsp_server::Response::new_err(
|
dispatcher.on_sync_mut::<lsp_types::request::Shutdown>(|s, ()| {
|
||||||
req.id,
|
s.shutdown_requested = true;
|
||||||
lsp_server::ErrorCode::InvalidRequest as i32,
|
Ok(())
|
||||||
"Shutdown already requested.".to_owned(),
|
});
|
||||||
));
|
|
||||||
return;
|
if let RequestDispatcher { req: Some(req), global_state: this } = &mut dispatcher {
|
||||||
|
if this.shutdown_requested {
|
||||||
|
this.respond(lsp_server::Response::new_err(
|
||||||
|
req.id.clone(),
|
||||||
|
lsp_server::ErrorCode::InvalidRequest as i32,
|
||||||
|
"Shutdown already requested.".to_owned(),
|
||||||
|
));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Avoid flashing a bunch of unresolved references during initial load.
|
||||||
|
if this.workspaces.is_empty() && !this.is_quiescent() {
|
||||||
|
this.respond(lsp_server::Response::new_err(
|
||||||
|
req.id.clone(),
|
||||||
|
lsp_server::ErrorCode::ContentModified as i32,
|
||||||
|
"waiting for cargo metadata or cargo check".to_owned(),
|
||||||
|
));
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Avoid flashing a bunch of unresolved references during initial load.
|
dispatcher
|
||||||
if self.workspaces.is_empty() && !self.is_quiescent() {
|
|
||||||
self.respond(lsp_server::Response::new_err(
|
|
||||||
req.id,
|
|
||||||
lsp_server::ErrorCode::ContentModified as i32,
|
|
||||||
"waiting for cargo metadata or cargo check".to_owned(),
|
|
||||||
));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
RequestDispatcher { req: Some(req), global_state: self }
|
|
||||||
.on_sync_mut::<lsp_types::request::Shutdown>(|s, ()| {
|
|
||||||
s.shutdown_requested = true;
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
.on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
|
.on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
|
||||||
.on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
|
.on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
|
||||||
.on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
|
.on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
|
||||||
|
|
|
@ -21,7 +21,7 @@ use crate::{
|
||||||
cargo_target_spec::CargoTargetSpec,
|
cargo_target_spec::CargoTargetSpec,
|
||||||
config::{CallInfoConfig, Config},
|
config::{CallInfoConfig, Config},
|
||||||
global_state::GlobalStateSnapshot,
|
global_state::GlobalStateSnapshot,
|
||||||
line_index::{LineEndings, LineIndex, OffsetEncoding},
|
line_index::{LineEndings, LineIndex, PositionEncoding},
|
||||||
lsp_ext,
|
lsp_ext,
|
||||||
lsp_utils::invalid_params_error,
|
lsp_utils::invalid_params_error,
|
||||||
semantic_tokens, Result,
|
semantic_tokens, Result,
|
||||||
|
@ -30,8 +30,8 @@ use crate::{
|
||||||
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
|
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
|
||||||
let line_col = line_index.index.line_col(offset);
|
let line_col = line_index.index.line_col(offset);
|
||||||
match line_index.encoding {
|
match line_index.encoding {
|
||||||
OffsetEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
|
PositionEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
|
||||||
OffsetEncoding::Utf16 => {
|
PositionEncoding::Utf16 => {
|
||||||
let line_col = line_index.index.to_utf16(line_col);
|
let line_col = line_index.index.to_utf16(line_col);
|
||||||
lsp_types::Position::new(line_col.line, line_col.col)
|
lsp_types::Position::new(line_col.line, line_col.col)
|
||||||
}
|
}
|
||||||
|
@ -1394,7 +1394,7 @@ fn main() {
|
||||||
let line_index = LineIndex {
|
let line_index = LineIndex {
|
||||||
index: Arc::new(ide::LineIndex::new(text)),
|
index: Arc::new(ide::LineIndex::new(text)),
|
||||||
endings: LineEndings::Unix,
|
endings: LineEndings::Unix,
|
||||||
encoding: OffsetEncoding::Utf16,
|
encoding: PositionEncoding::Utf16,
|
||||||
};
|
};
|
||||||
let converted: Vec<lsp_types::FoldingRange> =
|
let converted: Vec<lsp_types::FoldingRange> =
|
||||||
folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();
|
folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();
|
||||||
|
|
|
@ -51,7 +51,7 @@ TypeArg =
|
||||||
Type
|
Type
|
||||||
|
|
||||||
AssocTypeArg =
|
AssocTypeArg =
|
||||||
NameRef GenericParamList? (':' TypeBoundList | ('=' Type | ConstArg))
|
NameRef GenericArgList? (':' TypeBoundList | ('=' Type | ConstArg))
|
||||||
|
|
||||||
LifetimeArg =
|
LifetimeArg =
|
||||||
Lifetime
|
Lifetime
|
||||||
|
|
|
@ -120,7 +120,7 @@ pub struct AssocTypeArg {
|
||||||
impl ast::HasTypeBounds for AssocTypeArg {}
|
impl ast::HasTypeBounds for AssocTypeArg {}
|
||||||
impl AssocTypeArg {
|
impl AssocTypeArg {
|
||||||
pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
|
pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
|
||||||
pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
|
pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
|
||||||
pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
|
pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
|
||||||
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
|
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
|
||||||
pub fn const_arg(&self) -> Option<ConstArg> { support::child(&self.syntax) }
|
pub fn const_arg(&self) -> Option<ConstArg> { support::child(&self.syntax) }
|
||||||
|
@ -142,16 +142,6 @@ impl ConstArg {
|
||||||
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
|
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
|
||||||
pub struct GenericParamList {
|
|
||||||
pub(crate) syntax: SyntaxNode,
|
|
||||||
}
|
|
||||||
impl GenericParamList {
|
|
||||||
pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
|
|
||||||
pub fn generic_params(&self) -> AstChildren<GenericParam> { support::children(&self.syntax) }
|
|
||||||
pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct TypeBoundList {
|
pub struct TypeBoundList {
|
||||||
pub(crate) syntax: SyntaxNode,
|
pub(crate) syntax: SyntaxNode,
|
||||||
|
@ -527,6 +517,16 @@ impl Abi {
|
||||||
pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
|
pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub struct GenericParamList {
|
||||||
|
pub(crate) syntax: SyntaxNode,
|
||||||
|
}
|
||||||
|
impl GenericParamList {
|
||||||
|
pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
|
||||||
|
pub fn generic_params(&self) -> AstChildren<GenericParam> { support::children(&self.syntax) }
|
||||||
|
pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct WhereClause {
|
pub struct WhereClause {
|
||||||
pub(crate) syntax: SyntaxNode,
|
pub(crate) syntax: SyntaxNode,
|
||||||
|
@ -1834,17 +1834,6 @@ impl AstNode for ConstArg {
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
}
|
}
|
||||||
impl AstNode for GenericParamList {
|
|
||||||
fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_PARAM_LIST }
|
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
|
||||||
if Self::can_cast(syntax.kind()) {
|
|
||||||
Some(Self { syntax })
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
|
||||||
}
|
|
||||||
impl AstNode for TypeBoundList {
|
impl AstNode for TypeBoundList {
|
||||||
fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND_LIST }
|
fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND_LIST }
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
|
@ -2153,6 +2142,17 @@ impl AstNode for Abi {
|
||||||
}
|
}
|
||||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
}
|
}
|
||||||
|
impl AstNode for GenericParamList {
|
||||||
|
fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_PARAM_LIST }
|
||||||
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
|
if Self::can_cast(syntax.kind()) {
|
||||||
|
Some(Self { syntax })
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||||
|
}
|
||||||
impl AstNode for WhereClause {
|
impl AstNode for WhereClause {
|
||||||
fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_CLAUSE }
|
fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_CLAUSE }
|
||||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||||
|
@ -4263,11 +4263,6 @@ impl std::fmt::Display for ConstArg {
|
||||||
std::fmt::Display::fmt(self.syntax(), f)
|
std::fmt::Display::fmt(self.syntax(), f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl std::fmt::Display for GenericParamList {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
std::fmt::Display::fmt(self.syntax(), f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl std::fmt::Display for TypeBoundList {
|
impl std::fmt::Display for TypeBoundList {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
std::fmt::Display::fmt(self.syntax(), f)
|
std::fmt::Display::fmt(self.syntax(), f)
|
||||||
|
@ -4408,6 +4403,11 @@ impl std::fmt::Display for Abi {
|
||||||
std::fmt::Display::fmt(self.syntax(), f)
|
std::fmt::Display::fmt(self.syntax(), f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl std::fmt::Display for GenericParamList {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
std::fmt::Display::fmt(self.syntax(), f)
|
||||||
|
}
|
||||||
|
}
|
||||||
impl std::fmt::Display for WhereClause {
|
impl std::fmt::Display for WhereClause {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
std::fmt::Display::fmt(self.syntax(), f)
|
std::fmt::Display::fmt(self.syntax(), f)
|
||||||
|
|
|
@ -479,7 +479,9 @@ It is not cheap enough to enable in prod, and this is a bug which should be fixe
|
||||||
### Configurability
|
### Configurability
|
||||||
|
|
||||||
rust-analyzer strives to be as configurable as possible while offering reasonable defaults where no configuration exists yet.
|
rust-analyzer strives to be as configurable as possible while offering reasonable defaults where no configuration exists yet.
|
||||||
|
The rule of thumb is to enable most features by default unless they are buggy or degrade performance too much.
|
||||||
There will always be features that some people find more annoying than helpful, so giving the users the ability to tweak or disable these is a big part of offering a good user experience.
|
There will always be features that some people find more annoying than helpful, so giving the users the ability to tweak or disable these is a big part of offering a good user experience.
|
||||||
|
Enabling them by default is a matter of discoverability, as many users end up don't know about some features even though they are presented in the manual.
|
||||||
Mind the code--architecture gap: at the moment, we are using fewer feature flags than we really should.
|
Mind the code--architecture gap: at the moment, we are using fewer feature flags than we really should.
|
||||||
|
|
||||||
### Serialization
|
### Serialization
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
<!---
|
<!---
|
||||||
lsp_ext.rs hash: 7b710095d773b978
|
lsp_ext.rs hash: 62068e53ac202dc8
|
||||||
|
|
||||||
If you need to change the above hash to make the test pass, please check if you
|
If you need to change the above hash to make the test pass, please check if you
|
||||||
need to adjust this doc as well and ping this issue:
|
need to adjust this doc as well and ping this issue:
|
||||||
|
@ -19,12 +19,6 @@ Requests, which are likely to always remain specific to `rust-analyzer` are unde
|
||||||
|
|
||||||
If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-lang/rust-analyzer/issues/4604).
|
If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-lang/rust-analyzer/issues/4604).
|
||||||
|
|
||||||
## UTF-8 offsets
|
|
||||||
|
|
||||||
rust-analyzer supports clangd's extension for opting into UTF-8 as the coordinate space for offsets (by default, LSP uses UTF-16 offsets).
|
|
||||||
|
|
||||||
https://clangd.llvm.org/extensions.html#utf-8-offsets
|
|
||||||
|
|
||||||
## Configuration in `initializationOptions`
|
## Configuration in `initializationOptions`
|
||||||
|
|
||||||
**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567
|
**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567
|
||||||
|
|
|
@ -3,7 +3,7 @@ import * as lc from "vscode-languageclient";
|
||||||
import * as ra from "./lsp_ext";
|
import * as ra from "./lsp_ext";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import { Ctx, Cmd } from "./ctx";
|
import { Ctx, Cmd, CtxInit } from "./ctx";
|
||||||
import { applySnippetWorkspaceEdit, applySnippetTextEdits } from "./snippets";
|
import { applySnippetWorkspaceEdit, applySnippetTextEdits } from "./snippets";
|
||||||
import { spawnSync } from "child_process";
|
import { spawnSync } from "child_process";
|
||||||
import { RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run";
|
import { RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run";
|
||||||
|
@ -16,14 +16,14 @@ import { LINKED_COMMANDS } from "./client";
|
||||||
export * from "./ast_inspector";
|
export * from "./ast_inspector";
|
||||||
export * from "./run";
|
export * from "./run";
|
||||||
|
|
||||||
export function analyzerStatus(ctx: Ctx): Cmd {
|
export function analyzerStatus(ctx: CtxInit): Cmd {
|
||||||
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
||||||
readonly uri = vscode.Uri.parse("rust-analyzer-status://status");
|
readonly uri = vscode.Uri.parse("rust-analyzer-status://status");
|
||||||
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
||||||
|
|
||||||
async provideTextDocumentContent(_uri: vscode.Uri): Promise<string> {
|
async provideTextDocumentContent(_uri: vscode.Uri): Promise<string> {
|
||||||
if (!vscode.window.activeTextEditor) return "";
|
if (!vscode.window.activeTextEditor) return "";
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
const params: ra.AnalyzerStatusParams = {};
|
const params: ra.AnalyzerStatusParams = {};
|
||||||
const doc = ctx.activeRustEditor?.document;
|
const doc = ctx.activeRustEditor?.document;
|
||||||
|
@ -52,7 +52,7 @@ export function analyzerStatus(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function memoryUsage(ctx: Ctx): Cmd {
|
export function memoryUsage(ctx: CtxInit): Cmd {
|
||||||
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
||||||
readonly uri = vscode.Uri.parse("rust-analyzer-memory://memory");
|
readonly uri = vscode.Uri.parse("rust-analyzer-memory://memory");
|
||||||
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
||||||
|
@ -60,14 +60,9 @@ export function memoryUsage(ctx: Ctx): Cmd {
|
||||||
provideTextDocumentContent(_uri: vscode.Uri): vscode.ProviderResult<string> {
|
provideTextDocumentContent(_uri: vscode.Uri): vscode.ProviderResult<string> {
|
||||||
if (!vscode.window.activeTextEditor) return "";
|
if (!vscode.window.activeTextEditor) return "";
|
||||||
|
|
||||||
return ctx
|
return ctx.client.sendRequest(ra.memoryUsage).then((mem: any) => {
|
||||||
.getClient()
|
return "Per-query memory usage:\n" + mem + "\n(note: database has been cleared)";
|
||||||
.then((it) => it.sendRequest(ra.memoryUsage))
|
});
|
||||||
.then((mem: any) => {
|
|
||||||
return (
|
|
||||||
"Per-query memory usage:\n" + mem + "\n(note: database has been cleared)"
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get onDidChange(): vscode.Event<vscode.Uri> {
|
get onDidChange(): vscode.Event<vscode.Uri> {
|
||||||
|
@ -86,18 +81,18 @@ export function memoryUsage(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function shuffleCrateGraph(ctx: Ctx): Cmd {
|
export function shuffleCrateGraph(ctx: CtxInit): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
return ctx.getClient().then((it) => it.sendRequest(ra.shuffleCrateGraph));
|
return ctx.client.sendRequest(ra.shuffleCrateGraph);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function matchingBrace(ctx: Ctx): Cmd {
|
export function matchingBrace(ctx: CtxInit): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
const editor = ctx.activeRustEditor;
|
const editor = ctx.activeRustEditor;
|
||||||
if (!editor) return;
|
if (!editor) return;
|
||||||
|
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
const response = await client.sendRequest(ra.matchingBrace, {
|
const response = await client.sendRequest(ra.matchingBrace, {
|
||||||
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
|
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
|
||||||
|
@ -114,12 +109,12 @@ export function matchingBrace(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function joinLines(ctx: Ctx): Cmd {
|
export function joinLines(ctx: CtxInit): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
const editor = ctx.activeRustEditor;
|
const editor = ctx.activeRustEditor;
|
||||||
if (!editor) return;
|
if (!editor) return;
|
||||||
|
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
const items: lc.TextEdit[] = await client.sendRequest(ra.joinLines, {
|
const items: lc.TextEdit[] = await client.sendRequest(ra.joinLines, {
|
||||||
ranges: editor.selections.map((it) => client.code2ProtocolConverter.asRange(it)),
|
ranges: editor.selections.map((it) => client.code2ProtocolConverter.asRange(it)),
|
||||||
|
@ -134,19 +129,19 @@ export function joinLines(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function moveItemUp(ctx: Ctx): Cmd {
|
export function moveItemUp(ctx: CtxInit): Cmd {
|
||||||
return moveItem(ctx, ra.Direction.Up);
|
return moveItem(ctx, ra.Direction.Up);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function moveItemDown(ctx: Ctx): Cmd {
|
export function moveItemDown(ctx: CtxInit): Cmd {
|
||||||
return moveItem(ctx, ra.Direction.Down);
|
return moveItem(ctx, ra.Direction.Down);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function moveItem(ctx: Ctx, direction: ra.Direction): Cmd {
|
export function moveItem(ctx: CtxInit, direction: ra.Direction): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
const editor = ctx.activeRustEditor;
|
const editor = ctx.activeRustEditor;
|
||||||
if (!editor) return;
|
if (!editor) return;
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
const lcEdits = await client.sendRequest(ra.moveItem, {
|
const lcEdits = await client.sendRequest(ra.moveItem, {
|
||||||
range: client.code2ProtocolConverter.asRange(editor.selection),
|
range: client.code2ProtocolConverter.asRange(editor.selection),
|
||||||
|
@ -161,13 +156,13 @@ export function moveItem(ctx: Ctx, direction: ra.Direction): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function onEnter(ctx: Ctx): Cmd {
|
export function onEnter(ctx: CtxInit): Cmd {
|
||||||
async function handleKeypress() {
|
async function handleKeypress() {
|
||||||
const editor = ctx.activeRustEditor;
|
const editor = ctx.activeRustEditor;
|
||||||
|
|
||||||
if (!editor) return false;
|
if (!editor) return false;
|
||||||
|
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
const lcEdits = await client
|
const lcEdits = await client
|
||||||
.sendRequest(ra.onEnter, {
|
.sendRequest(ra.onEnter, {
|
||||||
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
|
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
|
||||||
|
@ -193,13 +188,13 @@ export function onEnter(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function parentModule(ctx: Ctx): Cmd {
|
export function parentModule(ctx: CtxInit): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
const editor = vscode.window.activeTextEditor;
|
const editor = vscode.window.activeTextEditor;
|
||||||
if (!editor) return;
|
if (!editor) return;
|
||||||
if (!(isRustDocument(editor.document) || isCargoTomlDocument(editor.document))) return;
|
if (!(isRustDocument(editor.document) || isCargoTomlDocument(editor.document))) return;
|
||||||
|
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
const locations = await client.sendRequest(ra.parentModule, {
|
const locations = await client.sendRequest(ra.parentModule, {
|
||||||
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
|
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
|
||||||
|
@ -230,12 +225,12 @@ export function parentModule(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function openCargoToml(ctx: Ctx): Cmd {
|
export function openCargoToml(ctx: CtxInit): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
const editor = ctx.activeRustEditor;
|
const editor = ctx.activeRustEditor;
|
||||||
if (!editor) return;
|
if (!editor) return;
|
||||||
|
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
const response = await client.sendRequest(ra.openCargoToml, {
|
const response = await client.sendRequest(ra.openCargoToml, {
|
||||||
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
|
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
|
||||||
});
|
});
|
||||||
|
@ -251,12 +246,12 @@ export function openCargoToml(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function ssr(ctx: Ctx): Cmd {
|
export function ssr(ctx: CtxInit): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
const editor = vscode.window.activeTextEditor;
|
const editor = vscode.window.activeTextEditor;
|
||||||
if (!editor) return;
|
if (!editor) return;
|
||||||
|
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
const position = editor.selection.active;
|
const position = editor.selection.active;
|
||||||
const selections = editor.selections;
|
const selections = editor.selections;
|
||||||
|
@ -308,7 +303,7 @@ export function ssr(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function serverVersion(ctx: Ctx): Cmd {
|
export function serverVersion(ctx: CtxInit): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
if (!ctx.serverPath) {
|
if (!ctx.serverPath) {
|
||||||
void vscode.window.showWarningMessage(`rust-analyzer server is not running`);
|
void vscode.window.showWarningMessage(`rust-analyzer server is not running`);
|
||||||
|
@ -324,7 +319,7 @@ export function serverVersion(ctx: Ctx): Cmd {
|
||||||
// Opens the virtual file that will show the syntax tree
|
// Opens the virtual file that will show the syntax tree
|
||||||
//
|
//
|
||||||
// The contents of the file come from the `TextDocumentContentProvider`
|
// The contents of the file come from the `TextDocumentContentProvider`
|
||||||
export function syntaxTree(ctx: Ctx): Cmd {
|
export function syntaxTree(ctx: CtxInit): Cmd {
|
||||||
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
||||||
readonly uri = vscode.Uri.parse("rust-analyzer-syntax-tree://syntaxtree/tree.rast");
|
readonly uri = vscode.Uri.parse("rust-analyzer-syntax-tree://syntaxtree/tree.rast");
|
||||||
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
||||||
|
@ -360,7 +355,7 @@ export function syntaxTree(ctx: Ctx): Cmd {
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
const rustEditor = ctx.activeRustEditor;
|
const rustEditor = ctx.activeRustEditor;
|
||||||
if (!rustEditor) return "";
|
if (!rustEditor) return "";
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
// When the range based query is enabled we take the range of the selection
|
// When the range based query is enabled we take the range of the selection
|
||||||
const range =
|
const range =
|
||||||
|
@ -407,7 +402,7 @@ export function syntaxTree(ctx: Ctx): Cmd {
|
||||||
// Opens the virtual file that will show the HIR of the function containing the cursor position
|
// Opens the virtual file that will show the HIR of the function containing the cursor position
|
||||||
//
|
//
|
||||||
// The contents of the file come from the `TextDocumentContentProvider`
|
// The contents of the file come from the `TextDocumentContentProvider`
|
||||||
export function viewHir(ctx: Ctx): Cmd {
|
export function viewHir(ctx: CtxInit): Cmd {
|
||||||
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
||||||
readonly uri = vscode.Uri.parse("rust-analyzer-hir://viewHir/hir.rs");
|
readonly uri = vscode.Uri.parse("rust-analyzer-hir://viewHir/hir.rs");
|
||||||
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
||||||
|
@ -444,7 +439,7 @@ export function viewHir(ctx: Ctx): Cmd {
|
||||||
const rustEditor = ctx.activeRustEditor;
|
const rustEditor = ctx.activeRustEditor;
|
||||||
if (!rustEditor) return "";
|
if (!rustEditor) return "";
|
||||||
|
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
const params = {
|
const params = {
|
||||||
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
|
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
|
||||||
rustEditor.document
|
rustEditor.document
|
||||||
|
@ -473,7 +468,7 @@ export function viewHir(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function viewFileText(ctx: Ctx): Cmd {
|
export function viewFileText(ctx: CtxInit): Cmd {
|
||||||
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
||||||
readonly uri = vscode.Uri.parse("rust-analyzer-file-text://viewFileText/file.rs");
|
readonly uri = vscode.Uri.parse("rust-analyzer-file-text://viewFileText/file.rs");
|
||||||
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
||||||
|
@ -509,7 +504,7 @@ export function viewFileText(ctx: Ctx): Cmd {
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
const rustEditor = ctx.activeRustEditor;
|
const rustEditor = ctx.activeRustEditor;
|
||||||
if (!rustEditor) return "";
|
if (!rustEditor) return "";
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
const params = client.code2ProtocolConverter.asTextDocumentIdentifier(
|
const params = client.code2ProtocolConverter.asTextDocumentIdentifier(
|
||||||
rustEditor.document
|
rustEditor.document
|
||||||
|
@ -536,7 +531,7 @@ export function viewFileText(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function viewItemTree(ctx: Ctx): Cmd {
|
export function viewItemTree(ctx: CtxInit): Cmd {
|
||||||
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
const tdcp = new (class implements vscode.TextDocumentContentProvider {
|
||||||
readonly uri = vscode.Uri.parse("rust-analyzer-item-tree://viewItemTree/itemtree.rs");
|
readonly uri = vscode.Uri.parse("rust-analyzer-item-tree://viewItemTree/itemtree.rs");
|
||||||
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
||||||
|
@ -572,7 +567,7 @@ export function viewItemTree(ctx: Ctx): Cmd {
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
const rustEditor = ctx.activeRustEditor;
|
const rustEditor = ctx.activeRustEditor;
|
||||||
if (!rustEditor) return "";
|
if (!rustEditor) return "";
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
const params = {
|
const params = {
|
||||||
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
|
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
|
||||||
|
@ -601,7 +596,7 @@ export function viewItemTree(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function crateGraph(ctx: Ctx, full: boolean): Cmd {
|
function crateGraph(ctx: CtxInit, full: boolean): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
const nodeModulesPath = vscode.Uri.file(path.join(ctx.extensionPath, "node_modules"));
|
const nodeModulesPath = vscode.Uri.file(path.join(ctx.extensionPath, "node_modules"));
|
||||||
|
|
||||||
|
@ -618,7 +613,7 @@ function crateGraph(ctx: Ctx, full: boolean): Cmd {
|
||||||
const params = {
|
const params = {
|
||||||
full: full,
|
full: full,
|
||||||
};
|
};
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
const dot = await client.sendRequest(ra.viewCrateGraph, params);
|
const dot = await client.sendRequest(ra.viewCrateGraph, params);
|
||||||
const uri = panel.webview.asWebviewUri(nodeModulesPath);
|
const uri = panel.webview.asWebviewUri(nodeModulesPath);
|
||||||
|
|
||||||
|
@ -664,18 +659,18 @@ function crateGraph(ctx: Ctx, full: boolean): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function viewCrateGraph(ctx: Ctx): Cmd {
|
export function viewCrateGraph(ctx: CtxInit): Cmd {
|
||||||
return crateGraph(ctx, false);
|
return crateGraph(ctx, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function viewFullCrateGraph(ctx: Ctx): Cmd {
|
export function viewFullCrateGraph(ctx: CtxInit): Cmd {
|
||||||
return crateGraph(ctx, true);
|
return crateGraph(ctx, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Opens the virtual file that will show the syntax tree
|
// Opens the virtual file that will show the syntax tree
|
||||||
//
|
//
|
||||||
// The contents of the file come from the `TextDocumentContentProvider`
|
// The contents of the file come from the `TextDocumentContentProvider`
|
||||||
export function expandMacro(ctx: Ctx): Cmd {
|
export function expandMacro(ctx: CtxInit): Cmd {
|
||||||
function codeFormat(expanded: ra.ExpandedMacro): string {
|
function codeFormat(expanded: ra.ExpandedMacro): string {
|
||||||
let result = `// Recursive expansion of ${expanded.name}! macro\n`;
|
let result = `// Recursive expansion of ${expanded.name}! macro\n`;
|
||||||
result += "// " + "=".repeat(result.length - 3);
|
result += "// " + "=".repeat(result.length - 3);
|
||||||
|
@ -691,7 +686,7 @@ export function expandMacro(ctx: Ctx): Cmd {
|
||||||
async provideTextDocumentContent(_uri: vscode.Uri): Promise<string> {
|
async provideTextDocumentContent(_uri: vscode.Uri): Promise<string> {
|
||||||
const editor = vscode.window.activeTextEditor;
|
const editor = vscode.window.activeTextEditor;
|
||||||
if (!editor) return "";
|
if (!editor) return "";
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
const position = editor.selection.active;
|
const position = editor.selection.active;
|
||||||
|
|
||||||
|
@ -723,8 +718,8 @@ export function expandMacro(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function reloadWorkspace(ctx: Ctx): Cmd {
|
export function reloadWorkspace(ctx: CtxInit): Cmd {
|
||||||
return async () => (await ctx.getClient()).sendRequest(ra.reloadWorkspace);
|
return async () => ctx.client.sendRequest(ra.reloadWorkspace);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function showReferencesImpl(
|
async function showReferencesImpl(
|
||||||
|
@ -743,13 +738,13 @@ async function showReferencesImpl(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function showReferences(ctx: Ctx): Cmd {
|
export function showReferences(ctx: CtxInit): Cmd {
|
||||||
return async (uri: string, position: lc.Position, locations: lc.Location[]) => {
|
return async (uri: string, position: lc.Position, locations: lc.Location[]) => {
|
||||||
await showReferencesImpl(await ctx.getClient(), uri, position, locations);
|
await showReferencesImpl(ctx.client, uri, position, locations);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function applyActionGroup(_ctx: Ctx): Cmd {
|
export function applyActionGroup(_ctx: CtxInit): Cmd {
|
||||||
return async (actions: { label: string; arguments: lc.CodeAction }[]) => {
|
return async (actions: { label: string; arguments: lc.CodeAction }[]) => {
|
||||||
const selectedAction = await vscode.window.showQuickPick(actions);
|
const selectedAction = await vscode.window.showQuickPick(actions);
|
||||||
if (!selectedAction) return;
|
if (!selectedAction) return;
|
||||||
|
@ -760,9 +755,9 @@ export function applyActionGroup(_ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function gotoLocation(ctx: Ctx): Cmd {
|
export function gotoLocation(ctx: CtxInit): Cmd {
|
||||||
return async (locationLink: lc.LocationLink) => {
|
return async (locationLink: lc.LocationLink) => {
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
const uri = client.protocol2CodeConverter.asUri(locationLink.targetUri);
|
const uri = client.protocol2CodeConverter.asUri(locationLink.targetUri);
|
||||||
let range = client.protocol2CodeConverter.asRange(locationLink.targetSelectionRange);
|
let range = client.protocol2CodeConverter.asRange(locationLink.targetSelectionRange);
|
||||||
// collapse the range to a cursor position
|
// collapse the range to a cursor position
|
||||||
|
@ -772,13 +767,13 @@ export function gotoLocation(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function openDocs(ctx: Ctx): Cmd {
|
export function openDocs(ctx: CtxInit): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
const editor = vscode.window.activeTextEditor;
|
const editor = vscode.window.activeTextEditor;
|
||||||
if (!editor) {
|
if (!editor) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
const position = editor.selection.active;
|
const position = editor.selection.active;
|
||||||
const textDocument = { uri: editor.document.uri.toString() };
|
const textDocument = { uri: editor.document.uri.toString() };
|
||||||
|
@ -791,16 +786,16 @@ export function openDocs(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function cancelFlycheck(ctx: Ctx): Cmd {
|
export function cancelFlycheck(ctx: CtxInit): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
await client.sendRequest(ra.cancelFlycheck);
|
await client.sendRequest(ra.cancelFlycheck);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function resolveCodeAction(ctx: Ctx): Cmd {
|
export function resolveCodeAction(ctx: CtxInit): Cmd {
|
||||||
return async (params: lc.CodeAction) => {
|
return async (params: lc.CodeAction) => {
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
params.command = undefined;
|
params.command = undefined;
|
||||||
const item = await client?.sendRequest(lc.CodeActionResolveRequest.type, params);
|
const item = await client?.sendRequest(lc.CodeActionResolveRequest.type, params);
|
||||||
if (!item?.edit) {
|
if (!item?.edit) {
|
||||||
|
@ -825,13 +820,13 @@ export function resolveCodeAction(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function applySnippetWorkspaceEditCommand(_ctx: Ctx): Cmd {
|
export function applySnippetWorkspaceEditCommand(_ctx: CtxInit): Cmd {
|
||||||
return async (edit: vscode.WorkspaceEdit) => {
|
return async (edit: vscode.WorkspaceEdit) => {
|
||||||
await applySnippetWorkspaceEdit(edit);
|
await applySnippetWorkspaceEdit(edit);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function run(ctx: Ctx): Cmd {
|
export function run(ctx: CtxInit): Cmd {
|
||||||
let prevRunnable: RunnableQuickPick | undefined;
|
let prevRunnable: RunnableQuickPick | undefined;
|
||||||
|
|
||||||
return async () => {
|
return async () => {
|
||||||
|
@ -845,11 +840,11 @@ export function run(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function peekTests(ctx: Ctx): Cmd {
|
export function peekTests(ctx: CtxInit): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
const editor = ctx.activeRustEditor;
|
const editor = ctx.activeRustEditor;
|
||||||
if (!editor) return;
|
if (!editor) return;
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
|
|
||||||
await vscode.window.withProgress(
|
await vscode.window.withProgress(
|
||||||
{
|
{
|
||||||
|
@ -878,7 +873,7 @@ export function peekTests(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function runSingle(ctx: Ctx): Cmd {
|
export function runSingle(ctx: CtxInit): Cmd {
|
||||||
return async (runnable: ra.Runnable) => {
|
return async (runnable: ra.Runnable) => {
|
||||||
const editor = ctx.activeRustEditor;
|
const editor = ctx.activeRustEditor;
|
||||||
if (!editor) return;
|
if (!editor) return;
|
||||||
|
@ -895,7 +890,7 @@ export function runSingle(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function copyRunCommandLine(ctx: Ctx) {
|
export function copyRunCommandLine(ctx: CtxInit) {
|
||||||
let prevRunnable: RunnableQuickPick | undefined;
|
let prevRunnable: RunnableQuickPick | undefined;
|
||||||
return async () => {
|
return async () => {
|
||||||
const item = await selectRunnable(ctx, prevRunnable);
|
const item = await selectRunnable(ctx, prevRunnable);
|
||||||
|
@ -907,7 +902,7 @@ export function copyRunCommandLine(ctx: Ctx) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function debug(ctx: Ctx): Cmd {
|
export function debug(ctx: CtxInit): Cmd {
|
||||||
let prevDebuggee: RunnableQuickPick | undefined;
|
let prevDebuggee: RunnableQuickPick | undefined;
|
||||||
|
|
||||||
return async () => {
|
return async () => {
|
||||||
|
@ -920,13 +915,13 @@ export function debug(ctx: Ctx): Cmd {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function debugSingle(ctx: Ctx): Cmd {
|
export function debugSingle(ctx: CtxInit): Cmd {
|
||||||
return async (config: ra.Runnable) => {
|
return async (config: ra.Runnable) => {
|
||||||
await startDebugSession(ctx, config);
|
await startDebugSession(ctx, config);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function newDebugConfig(ctx: Ctx): Cmd {
|
export function newDebugConfig(ctx: CtxInit): Cmd {
|
||||||
return async () => {
|
return async () => {
|
||||||
const item = await selectRunnable(ctx, undefined, true, false);
|
const item = await selectRunnable(ctx, undefined, true, false);
|
||||||
if (!item) return;
|
if (!item) return;
|
||||||
|
|
|
@ -4,12 +4,17 @@ import * as ra from "./lsp_ext";
|
||||||
|
|
||||||
import { Config, substituteVariablesInEnv, substituteVSCodeVariables } from "./config";
|
import { Config, substituteVariablesInEnv, substituteVSCodeVariables } from "./config";
|
||||||
import { createClient } from "./client";
|
import { createClient } from "./client";
|
||||||
import { isRustEditor, log, RustEditor } from "./util";
|
import { isRustDocument, isRustEditor, log, RustEditor } from "./util";
|
||||||
import { ServerStatusParams } from "./lsp_ext";
|
import { ServerStatusParams } from "./lsp_ext";
|
||||||
import { PersistentState } from "./persistent_state";
|
import { PersistentState } from "./persistent_state";
|
||||||
import { bootstrap } from "./bootstrap";
|
import { bootstrap } from "./bootstrap";
|
||||||
|
|
||||||
|
// We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if
|
||||||
|
// only those are in use. We use "Empty" to represent these scenarios
|
||||||
|
// (r-a still somewhat works with Live Share, because commands are tunneled to the host)
|
||||||
|
|
||||||
export type Workspace =
|
export type Workspace =
|
||||||
|
| { kind: "Empty" }
|
||||||
| {
|
| {
|
||||||
kind: "Workspace Folder";
|
kind: "Workspace Folder";
|
||||||
}
|
}
|
||||||
|
@ -18,16 +23,39 @@ export type Workspace =
|
||||||
files: vscode.TextDocument[];
|
files: vscode.TextDocument[];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export function fetchWorkspace(): Workspace {
|
||||||
|
const folders = (vscode.workspace.workspaceFolders || []).filter(
|
||||||
|
(folder) => folder.uri.scheme === "file"
|
||||||
|
);
|
||||||
|
const rustDocuments = vscode.workspace.textDocuments.filter((document) =>
|
||||||
|
isRustDocument(document)
|
||||||
|
);
|
||||||
|
|
||||||
|
return folders.length === 0
|
||||||
|
? rustDocuments.length === 0
|
||||||
|
? { kind: "Empty" }
|
||||||
|
: {
|
||||||
|
kind: "Detached Files",
|
||||||
|
files: rustDocuments,
|
||||||
|
}
|
||||||
|
: { kind: "Workspace Folder" };
|
||||||
|
}
|
||||||
|
|
||||||
export type CommandFactory = {
|
export type CommandFactory = {
|
||||||
enabled: (ctx: Ctx) => Cmd;
|
enabled: (ctx: CtxInit) => Cmd;
|
||||||
disabled?: (ctx: Ctx) => Cmd;
|
disabled?: (ctx: Ctx) => Cmd;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type CtxInit = Ctx & {
|
||||||
|
readonly client: lc.LanguageClient;
|
||||||
|
};
|
||||||
|
|
||||||
export class Ctx {
|
export class Ctx {
|
||||||
readonly statusBar: vscode.StatusBarItem;
|
readonly statusBar: vscode.StatusBarItem;
|
||||||
readonly config: Config;
|
readonly config: Config;
|
||||||
|
readonly workspace: Workspace;
|
||||||
|
|
||||||
private client: lc.LanguageClient | undefined;
|
private _client: lc.LanguageClient | undefined;
|
||||||
private _serverPath: string | undefined;
|
private _serverPath: string | undefined;
|
||||||
private traceOutputChannel: vscode.OutputChannel | undefined;
|
private traceOutputChannel: vscode.OutputChannel | undefined;
|
||||||
private outputChannel: vscode.OutputChannel | undefined;
|
private outputChannel: vscode.OutputChannel | undefined;
|
||||||
|
@ -36,18 +64,17 @@ export class Ctx {
|
||||||
private commandFactories: Record<string, CommandFactory>;
|
private commandFactories: Record<string, CommandFactory>;
|
||||||
private commandDisposables: Disposable[];
|
private commandDisposables: Disposable[];
|
||||||
|
|
||||||
workspace: Workspace;
|
get client() {
|
||||||
|
return this._client;
|
||||||
|
}
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
readonly extCtx: vscode.ExtensionContext,
|
readonly extCtx: vscode.ExtensionContext,
|
||||||
workspace: Workspace,
|
commandFactories: Record<string, CommandFactory>,
|
||||||
commandFactories: Record<string, CommandFactory>
|
workspace: Workspace
|
||||||
) {
|
) {
|
||||||
extCtx.subscriptions.push(this);
|
extCtx.subscriptions.push(this);
|
||||||
this.statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left);
|
this.statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left);
|
||||||
this.statusBar.text = "rust-analyzer";
|
|
||||||
this.statusBar.tooltip = "ready";
|
|
||||||
this.statusBar.command = "rust-analyzer.analyzerStatus";
|
|
||||||
this.statusBar.show();
|
this.statusBar.show();
|
||||||
this.workspace = workspace;
|
this.workspace = workspace;
|
||||||
this.clientSubscriptions = [];
|
this.clientSubscriptions = [];
|
||||||
|
@ -57,7 +84,10 @@ export class Ctx {
|
||||||
this.state = new PersistentState(extCtx.globalState);
|
this.state = new PersistentState(extCtx.globalState);
|
||||||
this.config = new Config(extCtx);
|
this.config = new Config(extCtx);
|
||||||
|
|
||||||
this.updateCommands();
|
this.updateCommands("disable");
|
||||||
|
this.setServerStatus({
|
||||||
|
health: "stopped",
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
dispose() {
|
dispose() {
|
||||||
|
@ -67,16 +97,36 @@ export class Ctx {
|
||||||
this.commandDisposables.forEach((disposable) => disposable.dispose());
|
this.commandDisposables.forEach((disposable) => disposable.dispose());
|
||||||
}
|
}
|
||||||
|
|
||||||
clientFetcher() {
|
async onWorkspaceFolderChanges() {
|
||||||
const self = this;
|
const workspace = fetchWorkspace();
|
||||||
return {
|
if (workspace.kind === "Detached Files" && this.workspace.kind === "Detached Files") {
|
||||||
get client(): lc.LanguageClient | undefined {
|
if (workspace.files !== this.workspace.files) {
|
||||||
return self.client;
|
if (this.client?.isRunning()) {
|
||||||
},
|
// Ideally we wouldn't need to tear down the server here, but currently detached files
|
||||||
};
|
// are only specified at server start
|
||||||
|
await this.stopAndDispose();
|
||||||
|
await this.start();
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (workspace.kind === "Workspace Folder" && this.workspace.kind === "Workspace Folder") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (workspace.kind === "Empty") {
|
||||||
|
await this.stopAndDispose();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (this.client?.isRunning()) {
|
||||||
|
await this.restart();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async getClient() {
|
private async getOrCreateClient() {
|
||||||
|
if (this.workspace.kind === "Empty") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (!this.traceOutputChannel) {
|
if (!this.traceOutputChannel) {
|
||||||
this.traceOutputChannel = vscode.window.createOutputChannel(
|
this.traceOutputChannel = vscode.window.createOutputChannel(
|
||||||
"Rust Analyzer Language Server Trace"
|
"Rust Analyzer Language Server Trace"
|
||||||
|
@ -88,7 +138,7 @@ export class Ctx {
|
||||||
this.pushExtCleanup(this.outputChannel);
|
this.pushExtCleanup(this.outputChannel);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!this.client) {
|
if (!this._client) {
|
||||||
this._serverPath = await bootstrap(this.extCtx, this.config, this.state).catch(
|
this._serverPath = await bootstrap(this.extCtx, this.config, this.state).catch(
|
||||||
(err) => {
|
(err) => {
|
||||||
let message = "bootstrap error. ";
|
let message = "bootstrap error. ";
|
||||||
|
@ -125,47 +175,61 @@ export class Ctx {
|
||||||
|
|
||||||
const initializationOptions = substituteVSCodeVariables(rawInitializationOptions);
|
const initializationOptions = substituteVSCodeVariables(rawInitializationOptions);
|
||||||
|
|
||||||
this.client = await createClient(
|
this._client = await createClient(
|
||||||
this.traceOutputChannel,
|
this.traceOutputChannel,
|
||||||
this.outputChannel,
|
this.outputChannel,
|
||||||
initializationOptions,
|
initializationOptions,
|
||||||
serverOptions
|
serverOptions
|
||||||
);
|
);
|
||||||
this.pushClientCleanup(
|
this.pushClientCleanup(
|
||||||
this.client.onNotification(ra.serverStatus, (params) =>
|
this._client.onNotification(ra.serverStatus, (params) =>
|
||||||
this.setServerStatus(params)
|
this.setServerStatus(params)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return this.client;
|
return this._client;
|
||||||
}
|
}
|
||||||
|
|
||||||
async activate() {
|
async start() {
|
||||||
log.info("Activating language client");
|
log.info("Starting language client");
|
||||||
const client = await this.getClient();
|
const client = await this.getOrCreateClient();
|
||||||
|
if (!client) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
await client.start();
|
await client.start();
|
||||||
this.updateCommands();
|
this.updateCommands();
|
||||||
return client;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async deactivate() {
|
async restart() {
|
||||||
log.info("Deactivating language client");
|
// FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed
|
||||||
await this.client?.stop();
|
await this.stopAndDispose();
|
||||||
this.updateCommands();
|
await this.start();
|
||||||
}
|
}
|
||||||
|
|
||||||
async stop() {
|
async stop() {
|
||||||
|
if (!this._client) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
log.info("Stopping language client");
|
log.info("Stopping language client");
|
||||||
|
this.updateCommands("disable");
|
||||||
|
await this._client.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
async stopAndDispose() {
|
||||||
|
if (!this._client) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
log.info("Disposing language client");
|
||||||
|
this.updateCommands("disable");
|
||||||
await this.disposeClient();
|
await this.disposeClient();
|
||||||
this.updateCommands();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async disposeClient() {
|
private async disposeClient() {
|
||||||
this.clientSubscriptions?.forEach((disposable) => disposable.dispose());
|
this.clientSubscriptions?.forEach((disposable) => disposable.dispose());
|
||||||
this.clientSubscriptions = [];
|
this.clientSubscriptions = [];
|
||||||
await this.client?.dispose();
|
await this._client?.dispose();
|
||||||
this._serverPath = undefined;
|
this._serverPath = undefined;
|
||||||
this.client = undefined;
|
this._client = undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
get activeRustEditor(): RustEditor | undefined {
|
get activeRustEditor(): RustEditor | undefined {
|
||||||
|
@ -185,32 +249,41 @@ export class Ctx {
|
||||||
return this._serverPath;
|
return this._serverPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
private updateCommands() {
|
private updateCommands(forceDisable?: "disable") {
|
||||||
this.commandDisposables.forEach((disposable) => disposable.dispose());
|
this.commandDisposables.forEach((disposable) => disposable.dispose());
|
||||||
this.commandDisposables = [];
|
this.commandDisposables = [];
|
||||||
const fetchFactory = (factory: CommandFactory, fullName: string) => {
|
|
||||||
return this.client && this.client.isRunning()
|
const clientRunning = (!forceDisable && this._client?.isRunning()) ?? false;
|
||||||
? factory.enabled
|
const isClientRunning = function (_ctx: Ctx): _ctx is CtxInit {
|
||||||
: factory.disabled ||
|
return clientRunning;
|
||||||
((_) => () =>
|
|
||||||
vscode.window.showErrorMessage(
|
|
||||||
`command ${fullName} failed: rust-analyzer server is not running`
|
|
||||||
));
|
|
||||||
};
|
};
|
||||||
|
|
||||||
for (const [name, factory] of Object.entries(this.commandFactories)) {
|
for (const [name, factory] of Object.entries(this.commandFactories)) {
|
||||||
const fullName = `rust-analyzer.${name}`;
|
const fullName = `rust-analyzer.${name}`;
|
||||||
const callback = fetchFactory(factory, fullName)(this);
|
let callback;
|
||||||
|
if (isClientRunning(this)) {
|
||||||
|
// we asserted that `client` is defined
|
||||||
|
callback = factory.enabled(this);
|
||||||
|
} else if (factory.disabled) {
|
||||||
|
callback = factory.disabled(this);
|
||||||
|
} else {
|
||||||
|
callback = () =>
|
||||||
|
vscode.window.showErrorMessage(
|
||||||
|
`command ${fullName} failed: rust-analyzer server is not running`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
this.commandDisposables.push(vscode.commands.registerCommand(fullName, callback));
|
this.commandDisposables.push(vscode.commands.registerCommand(fullName, callback));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
setServerStatus(status: ServerStatusParams) {
|
setServerStatus(status: ServerStatusParams | { health: "stopped" }) {
|
||||||
let icon = "";
|
let icon = "";
|
||||||
const statusBar = this.statusBar;
|
const statusBar = this.statusBar;
|
||||||
switch (status.health) {
|
switch (status.health) {
|
||||||
case "ok":
|
case "ok":
|
||||||
statusBar.tooltip = status.message ?? "Ready";
|
statusBar.tooltip = (status.message ?? "Ready") + "\nClick to stop server.";
|
||||||
statusBar.command = undefined;
|
statusBar.command = "rust-analyzer.stopServer";
|
||||||
statusBar.color = undefined;
|
statusBar.color = undefined;
|
||||||
statusBar.backgroundColor = undefined;
|
statusBar.backgroundColor = undefined;
|
||||||
break;
|
break;
|
||||||
|
@ -234,6 +307,13 @@ export class Ctx {
|
||||||
statusBar.backgroundColor = new vscode.ThemeColor("statusBarItem.errorBackground");
|
statusBar.backgroundColor = new vscode.ThemeColor("statusBarItem.errorBackground");
|
||||||
icon = "$(error) ";
|
icon = "$(error) ";
|
||||||
break;
|
break;
|
||||||
|
case "stopped":
|
||||||
|
statusBar.tooltip = "Server is stopped.\nClick to start.";
|
||||||
|
statusBar.command = "rust-analyzer.startServer";
|
||||||
|
statusBar.color = undefined;
|
||||||
|
statusBar.backgroundColor = undefined;
|
||||||
|
statusBar.text = `$(stop-circle) rust-analyzer`;
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
if (!status.quiescent) icon = "$(sync~spin) ";
|
if (!status.quiescent) icon = "$(sync~spin) ";
|
||||||
statusBar.text = `${icon}rust-analyzer`;
|
statusBar.text = `${icon}rust-analyzer`;
|
||||||
|
|
|
@ -2,15 +2,13 @@ import * as vscode from "vscode";
|
||||||
import * as lc from "vscode-languageclient/node";
|
import * as lc from "vscode-languageclient/node";
|
||||||
|
|
||||||
import * as commands from "./commands";
|
import * as commands from "./commands";
|
||||||
import { CommandFactory, Ctx, Workspace } from "./ctx";
|
import { CommandFactory, Ctx, fetchWorkspace } from "./ctx";
|
||||||
import { isRustDocument } from "./util";
|
|
||||||
import { activateTaskProvider } from "./tasks";
|
import { activateTaskProvider } from "./tasks";
|
||||||
import { setContextValue } from "./util";
|
import { setContextValue } from "./util";
|
||||||
|
|
||||||
const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
|
const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
|
||||||
|
|
||||||
export interface RustAnalyzerExtensionApi {
|
export interface RustAnalyzerExtensionApi {
|
||||||
// FIXME: this should be non-optional
|
|
||||||
readonly client?: lc.LanguageClient;
|
readonly client?: lc.LanguageClient;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,32 +30,7 @@ export async function activate(
|
||||||
.then(() => {}, console.error);
|
.then(() => {}, console.error);
|
||||||
}
|
}
|
||||||
|
|
||||||
// We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if
|
const ctx = new Ctx(context, createCommands(), fetchWorkspace());
|
||||||
// only those are in use.
|
|
||||||
// (r-a still somewhat works with Live Share, because commands are tunneled to the host)
|
|
||||||
const folders = (vscode.workspace.workspaceFolders || []).filter(
|
|
||||||
(folder) => folder.uri.scheme === "file"
|
|
||||||
);
|
|
||||||
const rustDocuments = vscode.workspace.textDocuments.filter((document) =>
|
|
||||||
isRustDocument(document)
|
|
||||||
);
|
|
||||||
|
|
||||||
if (folders.length === 0 && rustDocuments.length === 0) {
|
|
||||||
// FIXME: Ideally we would choose not to activate at all (and avoid registering
|
|
||||||
// non-functional editor commands), but VS Code doesn't seem to have a good way of doing
|
|
||||||
// that
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
const workspace: Workspace =
|
|
||||||
folders.length === 0
|
|
||||||
? {
|
|
||||||
kind: "Detached Files",
|
|
||||||
files: rustDocuments,
|
|
||||||
}
|
|
||||||
: { kind: "Workspace Folder" };
|
|
||||||
|
|
||||||
const ctx = new Ctx(context, workspace, createCommands());
|
|
||||||
// VS Code doesn't show a notification when an extension fails to activate
|
// VS Code doesn't show a notification when an extension fails to activate
|
||||||
// so we do it ourselves.
|
// so we do it ourselves.
|
||||||
const api = await activateServer(ctx).catch((err) => {
|
const api = await activateServer(ctx).catch((err) => {
|
||||||
|
@ -75,18 +48,23 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> {
|
||||||
ctx.pushExtCleanup(activateTaskProvider(ctx.config));
|
ctx.pushExtCleanup(activateTaskProvider(ctx.config));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
vscode.workspace.onDidChangeWorkspaceFolders(
|
||||||
|
async (_) => ctx.onWorkspaceFolderChanges(),
|
||||||
|
null,
|
||||||
|
ctx.subscriptions
|
||||||
|
);
|
||||||
vscode.workspace.onDidChangeConfiguration(
|
vscode.workspace.onDidChangeConfiguration(
|
||||||
async (_) => {
|
async (_) => {
|
||||||
await ctx
|
await ctx.client?.sendNotification("workspace/didChangeConfiguration", {
|
||||||
.clientFetcher()
|
settings: "",
|
||||||
.client?.sendNotification("workspace/didChangeConfiguration", { settings: "" });
|
});
|
||||||
},
|
},
|
||||||
null,
|
null,
|
||||||
ctx.subscriptions
|
ctx.subscriptions
|
||||||
);
|
);
|
||||||
|
|
||||||
await ctx.activate();
|
await ctx.start();
|
||||||
return ctx.clientFetcher();
|
return ctx;
|
||||||
}
|
}
|
||||||
|
|
||||||
function createCommands(): Record<string, CommandFactory> {
|
function createCommands(): Record<string, CommandFactory> {
|
||||||
|
@ -98,33 +76,30 @@ function createCommands(): Record<string, CommandFactory> {
|
||||||
reload: {
|
reload: {
|
||||||
enabled: (ctx) => async () => {
|
enabled: (ctx) => async () => {
|
||||||
void vscode.window.showInformationMessage("Reloading rust-analyzer...");
|
void vscode.window.showInformationMessage("Reloading rust-analyzer...");
|
||||||
// FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed
|
await ctx.restart();
|
||||||
await ctx.stop();
|
|
||||||
await ctx.activate();
|
|
||||||
},
|
},
|
||||||
disabled: (ctx) => async () => {
|
disabled: (ctx) => async () => {
|
||||||
void vscode.window.showInformationMessage("Reloading rust-analyzer...");
|
void vscode.window.showInformationMessage("Reloading rust-analyzer...");
|
||||||
await ctx.activate();
|
await ctx.start();
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
startServer: {
|
startServer: {
|
||||||
enabled: (ctx) => async () => {
|
enabled: (ctx) => async () => {
|
||||||
await ctx.activate();
|
await ctx.start();
|
||||||
},
|
},
|
||||||
disabled: (ctx) => async () => {
|
disabled: (ctx) => async () => {
|
||||||
await ctx.activate();
|
await ctx.start();
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
stopServer: {
|
stopServer: {
|
||||||
enabled: (ctx) => async () => {
|
enabled: (ctx) => async () => {
|
||||||
// FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed
|
// FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed
|
||||||
await ctx.stop();
|
await ctx.stopAndDispose();
|
||||||
ctx.setServerStatus({
|
ctx.setServerStatus({
|
||||||
health: "ok",
|
health: "stopped",
|
||||||
quiescent: true,
|
|
||||||
message: "server is not running",
|
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
disabled: (_) => async () => {},
|
||||||
},
|
},
|
||||||
|
|
||||||
analyzerStatus: { enabled: commands.analyzerStatus },
|
analyzerStatus: { enabled: commands.analyzerStatus },
|
||||||
|
|
|
@ -3,7 +3,7 @@ import * as lc from "vscode-languageclient";
|
||||||
import * as ra from "./lsp_ext";
|
import * as ra from "./lsp_ext";
|
||||||
import * as tasks from "./tasks";
|
import * as tasks from "./tasks";
|
||||||
|
|
||||||
import { Ctx } from "./ctx";
|
import { CtxInit } from "./ctx";
|
||||||
import { makeDebugConfig } from "./debug";
|
import { makeDebugConfig } from "./debug";
|
||||||
import { Config, RunnableEnvCfg } from "./config";
|
import { Config, RunnableEnvCfg } from "./config";
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ const quickPickButtons = [
|
||||||
];
|
];
|
||||||
|
|
||||||
export async function selectRunnable(
|
export async function selectRunnable(
|
||||||
ctx: Ctx,
|
ctx: CtxInit,
|
||||||
prevRunnable?: RunnableQuickPick,
|
prevRunnable?: RunnableQuickPick,
|
||||||
debuggeeOnly = false,
|
debuggeeOnly = false,
|
||||||
showButtons: boolean = true
|
showButtons: boolean = true
|
||||||
|
@ -20,7 +20,7 @@ export async function selectRunnable(
|
||||||
const editor = ctx.activeRustEditor;
|
const editor = ctx.activeRustEditor;
|
||||||
if (!editor) return;
|
if (!editor) return;
|
||||||
|
|
||||||
const client = await ctx.getClient();
|
const client = ctx.client;
|
||||||
const textDocument: lc.TextDocumentIdentifier = {
|
const textDocument: lc.TextDocumentIdentifier = {
|
||||||
uri: editor.document.uri.toString(),
|
uri: editor.document.uri.toString(),
|
||||||
};
|
};
|
||||||
|
|
|
@ -13,4 +13,4 @@ serde = { version = "1.0.144", features = ["derive"] }
|
||||||
crossbeam-channel = "0.5.6"
|
crossbeam-channel = "0.5.6"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
lsp-types = "0.93.1"
|
lsp-types = "=0.93.2"
|
||||||
|
|
Loading…
Reference in a new issue