mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-15 17:28:09 +00:00
Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into find-cargo-toml-up-the-fs
This commit is contained in:
commit
39bd3b2bd7
40 changed files with 653 additions and 371 deletions
16
Cargo.lock
generated
16
Cargo.lock
generated
|
@ -129,7 +129,7 @@ checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
|||
[[package]]
|
||||
name = "chalk-derive"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=2939913fb7bb94ac2a6721087dc086be11410702#2939913fb7bb94ac2a6721087dc086be11410702"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -139,7 +139,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "chalk-engine"
|
||||
version = "0.9.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=2939913fb7bb94ac2a6721087dc086be11410702#2939913fb7bb94ac2a6721087dc086be11410702"
|
||||
dependencies = [
|
||||
"chalk-macros",
|
||||
"rustc-hash",
|
||||
|
@ -148,7 +148,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "chalk-ir"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=2939913fb7bb94ac2a6721087dc086be11410702#2939913fb7bb94ac2a6721087dc086be11410702"
|
||||
dependencies = [
|
||||
"chalk-derive",
|
||||
"chalk-engine",
|
||||
|
@ -159,7 +159,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "chalk-macros"
|
||||
version = "0.1.1"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=2939913fb7bb94ac2a6721087dc086be11410702#2939913fb7bb94ac2a6721087dc086be11410702"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
]
|
||||
|
@ -167,7 +167,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "chalk-rust-ir"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=2939913fb7bb94ac2a6721087dc086be11410702#2939913fb7bb94ac2a6721087dc086be11410702"
|
||||
dependencies = [
|
||||
"chalk-derive",
|
||||
"chalk-engine",
|
||||
|
@ -178,7 +178,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "chalk-solve"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=af48f302a1f571b3ca418f7c5aa639a144a34f75#af48f302a1f571b3ca418f7c5aa639a144a34f75"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=2939913fb7bb94ac2a6721087dc086be11410702#2939913fb7bb94ac2a6721087dc086be11410702"
|
||||
dependencies = [
|
||||
"chalk-derive",
|
||||
"chalk-engine",
|
||||
|
@ -1308,9 +1308,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rowan"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6d03d4eff7a4e8dcc362e4c06bb2b1b33af4bcd64336c7f40a31a05850336b6c"
|
||||
checksum = "1ea7cadf87a9d8432e85cb4eb86bd2e765ace60c24ef86e79084dcae5d1c5a19"
|
||||
dependencies = [
|
||||
"rustc-hash",
|
||||
"smol_str",
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
//! `AstTransformer`s are functions that replace nodes in an AST and can be easily combined.
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use hir::{db::HirDatabase, InFile, PathResolution};
|
||||
use hir::{InFile, PathResolution};
|
||||
use ra_ide_db::RootDatabase;
|
||||
use ra_syntax::ast::{self, AstNode};
|
||||
|
||||
pub trait AstTransform<'a> {
|
||||
|
@ -33,18 +34,18 @@ impl<'a> AstTransform<'a> for NullTransformer {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct SubstituteTypeParams<'a, DB: HirDatabase> {
|
||||
db: &'a DB,
|
||||
pub struct SubstituteTypeParams<'a> {
|
||||
db: &'a RootDatabase,
|
||||
substs: FxHashMap<hir::TypeParam, ast::TypeRef>,
|
||||
previous: Box<dyn AstTransform<'a> + 'a>,
|
||||
}
|
||||
|
||||
impl<'a, DB: HirDatabase> SubstituteTypeParams<'a, DB> {
|
||||
impl<'a> SubstituteTypeParams<'a> {
|
||||
pub fn for_trait_impl(
|
||||
db: &'a DB,
|
||||
db: &'a RootDatabase,
|
||||
trait_: hir::Trait,
|
||||
impl_block: ast::ImplBlock,
|
||||
) -> SubstituteTypeParams<'a, DB> {
|
||||
) -> SubstituteTypeParams<'a> {
|
||||
let substs = get_syntactic_substs(impl_block).unwrap_or_default();
|
||||
let generic_def: hir::GenericDef = trait_.into();
|
||||
let substs_by_param: FxHashMap<_, _> = generic_def
|
||||
|
@ -95,7 +96,7 @@ impl<'a, DB: HirDatabase> SubstituteTypeParams<'a, DB> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, DB: HirDatabase> AstTransform<'a> for SubstituteTypeParams<'a, DB> {
|
||||
impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> {
|
||||
fn get_substitution(
|
||||
&self,
|
||||
node: InFile<&ra_syntax::SyntaxNode>,
|
||||
|
@ -107,14 +108,14 @@ impl<'a, DB: HirDatabase> AstTransform<'a> for SubstituteTypeParams<'a, DB> {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct QualifyPaths<'a, DB: HirDatabase> {
|
||||
db: &'a DB,
|
||||
pub struct QualifyPaths<'a> {
|
||||
db: &'a RootDatabase,
|
||||
from: Option<hir::Module>,
|
||||
previous: Box<dyn AstTransform<'a> + 'a>,
|
||||
}
|
||||
|
||||
impl<'a, DB: HirDatabase> QualifyPaths<'a, DB> {
|
||||
pub fn new(db: &'a DB, from: Option<hir::Module>) -> Self {
|
||||
impl<'a> QualifyPaths<'a> {
|
||||
pub fn new(db: &'a RootDatabase, from: Option<hir::Module>) -> Self {
|
||||
Self { db, from, previous: Box::new(NullTransformer) }
|
||||
}
|
||||
|
||||
|
@ -168,7 +169,7 @@ pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: InFile<N>
|
|||
N::cast(result).unwrap()
|
||||
}
|
||||
|
||||
impl<'a, DB: HirDatabase> AstTransform<'a> for QualifyPaths<'a, DB> {
|
||||
impl<'a> AstTransform<'a> for QualifyPaths<'a> {
|
||||
fn get_substitution(
|
||||
&self,
|
||||
node: InFile<&ra_syntax::SyntaxNode>,
|
||||
|
|
|
@ -43,15 +43,35 @@ pub fn unwrap_trivial_block(block: ast::BlockExpr) -> ast::Expr {
|
|||
|
||||
pub fn extract_trivial_expression(block: &ast::BlockExpr) -> Option<ast::Expr> {
|
||||
let block = block.block()?;
|
||||
let expr = block.expr()?;
|
||||
let non_trivial_children = block.syntax().children().filter(|it| match it.kind() {
|
||||
WHITESPACE | T!['{'] | T!['}'] => false,
|
||||
_ => it != expr.syntax(),
|
||||
});
|
||||
if non_trivial_children.count() > 0 {
|
||||
return None;
|
||||
let has_anything_else = |thing: &SyntaxNode| -> bool {
|
||||
let mut non_trivial_children =
|
||||
block.syntax().children_with_tokens().filter(|it| match it.kind() {
|
||||
WHITESPACE | T!['{'] | T!['}'] => false,
|
||||
_ => it.as_node() != Some(thing),
|
||||
});
|
||||
non_trivial_children.next().is_some()
|
||||
};
|
||||
|
||||
if let Some(expr) = block.expr() {
|
||||
if has_anything_else(expr.syntax()) {
|
||||
return None;
|
||||
}
|
||||
return Some(expr);
|
||||
} else {
|
||||
// Unwrap `{ continue; }`
|
||||
let (stmt,) = block.statements().next_tuple()?;
|
||||
if has_anything_else(stmt.syntax()) {
|
||||
return None;
|
||||
}
|
||||
if let ast::Stmt::ExprStmt(expr_stmt) = stmt {
|
||||
let expr = expr_stmt.expr()?;
|
||||
match expr.syntax().kind() {
|
||||
CONTINUE_EXPR | BREAK_EXPR | RETURN_EXPR => return Some(expr),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(expr)
|
||||
None
|
||||
}
|
||||
|
||||
pub fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str {
|
||||
|
|
|
@ -21,9 +21,9 @@ ra_prof = { path = "../ra_prof" }
|
|||
ra_syntax = { path = "../ra_syntax" }
|
||||
test_utils = { path = "../test_utils" }
|
||||
|
||||
chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" }
|
||||
chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" }
|
||||
chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" }
|
||||
chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "2939913fb7bb94ac2a6721087dc086be11410702" }
|
||||
chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "2939913fb7bb94ac2a6721087dc086be11410702" }
|
||||
chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "2939913fb7bb94ac2a6721087dc086be11410702" }
|
||||
|
||||
lalrpop-intern = "0.15.1"
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ use crate::db::HirDatabase;
|
|||
|
||||
use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk};
|
||||
|
||||
use self::chalk::{from_chalk, ToChalk, TypeFamily};
|
||||
use self::chalk::{from_chalk, Interner, ToChalk};
|
||||
|
||||
pub(crate) mod chalk;
|
||||
mod builtin;
|
||||
|
@ -22,7 +22,7 @@ mod builtin;
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct TraitSolver {
|
||||
krate: CrateId,
|
||||
inner: Arc<Mutex<chalk_solve::Solver<TypeFamily>>>,
|
||||
inner: Arc<Mutex<chalk_solve::Solver<Interner>>>,
|
||||
}
|
||||
|
||||
/// We need eq for salsa
|
||||
|
@ -38,8 +38,8 @@ impl TraitSolver {
|
|||
fn solve(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<TypeFamily>>>,
|
||||
) -> Option<chalk_solve::Solution<TypeFamily>> {
|
||||
goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
|
||||
) -> Option<chalk_solve::Solution<Interner>> {
|
||||
let context = ChalkContext { db, krate: self.krate };
|
||||
log::debug!("solve goal: {:?}", goal);
|
||||
let mut solver = match self.inner.lock() {
|
||||
|
@ -110,7 +110,7 @@ pub(crate) fn trait_solver_query(
|
|||
TraitSolver { krate, inner: Arc::new(Mutex::new(create_chalk_solver())) }
|
||||
}
|
||||
|
||||
fn create_chalk_solver() -> chalk_solve::Solver<TypeFamily> {
|
||||
fn create_chalk_solver() -> chalk_solve::Solver<Interner> {
|
||||
let solver_choice =
|
||||
chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE, expected_answers: None };
|
||||
solver_choice.into_solver()
|
||||
|
@ -242,9 +242,9 @@ pub(crate) fn trait_solve_query(
|
|||
|
||||
fn solution_from_chalk(
|
||||
db: &impl HirDatabase,
|
||||
solution: chalk_solve::Solution<TypeFamily>,
|
||||
solution: chalk_solve::Solution<Interner>,
|
||||
) -> Solution {
|
||||
let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<TypeFamily>>| {
|
||||
let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| {
|
||||
let value = subst
|
||||
.value
|
||||
.into_iter()
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::{fmt, sync::Arc};
|
|||
|
||||
use log::debug;
|
||||
|
||||
use chalk_ir::{cast::Cast, GoalData, Parameter, PlaceholderIndex, TypeName, UniverseIndex};
|
||||
use chalk_ir::{cast::Cast, Goal, GoalData, Parameter, PlaceholderIndex, TypeName, UniverseIndex};
|
||||
|
||||
use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId};
|
||||
use ra_db::{
|
||||
|
@ -18,13 +18,14 @@ use crate::{
|
|||
};
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)]
|
||||
pub struct TypeFamily {}
|
||||
pub struct Interner {}
|
||||
|
||||
impl chalk_ir::family::TypeFamily for TypeFamily {
|
||||
impl chalk_ir::interner::Interner for Interner {
|
||||
type InternedType = Box<chalk_ir::TyData<Self>>;
|
||||
type InternedLifetime = chalk_ir::LifetimeData<Self>;
|
||||
type InternedParameter = chalk_ir::ParameterData<Self>;
|
||||
type InternedGoal = Arc<GoalData<Self>>;
|
||||
type InternedGoals = Vec<Goal<Self>>;
|
||||
type InternedSubstitution = Vec<Parameter<Self>>;
|
||||
type DefId = InternId;
|
||||
|
||||
|
@ -85,10 +86,18 @@ impl chalk_ir::family::TypeFamily for TypeFamily {
|
|||
Arc::new(goal)
|
||||
}
|
||||
|
||||
fn intern_goals(data: impl IntoIterator<Item = Goal<Self>>) -> Self::InternedGoals {
|
||||
data.into_iter().collect()
|
||||
}
|
||||
|
||||
fn goal_data(goal: &Arc<GoalData<Self>>) -> &GoalData<Self> {
|
||||
goal
|
||||
}
|
||||
|
||||
fn goals_data(goals: &Vec<Goal<Interner>>) -> &[Goal<Interner>] {
|
||||
goals
|
||||
}
|
||||
|
||||
fn intern_substitution<E>(
|
||||
data: impl IntoIterator<Item = Result<Parameter<Self>, E>>,
|
||||
) -> Result<Vec<Parameter<Self>>, E> {
|
||||
|
@ -100,20 +109,20 @@ impl chalk_ir::family::TypeFamily for TypeFamily {
|
|||
}
|
||||
}
|
||||
|
||||
impl chalk_ir::family::HasTypeFamily for TypeFamily {
|
||||
type TypeFamily = Self;
|
||||
impl chalk_ir::interner::HasInterner for Interner {
|
||||
type Interner = Self;
|
||||
}
|
||||
|
||||
pub type AssocTypeId = chalk_ir::AssocTypeId<TypeFamily>;
|
||||
pub type AssociatedTyDatum = chalk_rust_ir::AssociatedTyDatum<TypeFamily>;
|
||||
pub type TraitId = chalk_ir::TraitId<TypeFamily>;
|
||||
pub type TraitDatum = chalk_rust_ir::TraitDatum<TypeFamily>;
|
||||
pub type StructId = chalk_ir::StructId<TypeFamily>;
|
||||
pub type StructDatum = chalk_rust_ir::StructDatum<TypeFamily>;
|
||||
pub type ImplId = chalk_ir::ImplId<TypeFamily>;
|
||||
pub type ImplDatum = chalk_rust_ir::ImplDatum<TypeFamily>;
|
||||
pub type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
|
||||
pub type AssociatedTyDatum = chalk_rust_ir::AssociatedTyDatum<Interner>;
|
||||
pub type TraitId = chalk_ir::TraitId<Interner>;
|
||||
pub type TraitDatum = chalk_rust_ir::TraitDatum<Interner>;
|
||||
pub type StructId = chalk_ir::StructId<Interner>;
|
||||
pub type StructDatum = chalk_rust_ir::StructDatum<Interner>;
|
||||
pub type ImplId = chalk_ir::ImplId<Interner>;
|
||||
pub type ImplDatum = chalk_rust_ir::ImplDatum<Interner>;
|
||||
pub type AssociatedTyValueId = chalk_rust_ir::AssociatedTyValueId;
|
||||
pub type AssociatedTyValue = chalk_rust_ir::AssociatedTyValue<TypeFamily>;
|
||||
pub type AssociatedTyValue = chalk_rust_ir::AssociatedTyValue<Interner>;
|
||||
|
||||
pub(super) trait ToChalk {
|
||||
type Chalk;
|
||||
|
@ -129,8 +138,8 @@ where
|
|||
}
|
||||
|
||||
impl ToChalk for Ty {
|
||||
type Chalk = chalk_ir::Ty<TypeFamily>;
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Ty<TypeFamily> {
|
||||
type Chalk = chalk_ir::Ty<Interner>;
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Ty<Interner> {
|
||||
match self {
|
||||
Ty::Apply(apply_ty) => {
|
||||
let name = apply_ty.ctor.to_chalk(db);
|
||||
|
@ -148,7 +157,7 @@ impl ToChalk for Ty {
|
|||
ui: UniverseIndex::ROOT,
|
||||
idx: interned_id.as_intern_id().as_usize(),
|
||||
}
|
||||
.to_ty::<TypeFamily>()
|
||||
.to_ty::<Interner>()
|
||||
}
|
||||
Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx as usize).intern(),
|
||||
Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"),
|
||||
|
@ -169,7 +178,7 @@ impl ToChalk for Ty {
|
|||
}
|
||||
}
|
||||
}
|
||||
fn from_chalk(db: &impl HirDatabase, chalk: chalk_ir::Ty<TypeFamily>) -> Self {
|
||||
fn from_chalk(db: &impl HirDatabase, chalk: chalk_ir::Ty<Interner>) -> Self {
|
||||
match chalk.data().clone() {
|
||||
chalk_ir::TyData::Apply(apply_ty) => match apply_ty.name {
|
||||
TypeName::Error => Ty::Unknown,
|
||||
|
@ -205,13 +214,13 @@ impl ToChalk for Ty {
|
|||
}
|
||||
|
||||
impl ToChalk for Substs {
|
||||
type Chalk = chalk_ir::Substitution<TypeFamily>;
|
||||
type Chalk = chalk_ir::Substitution<Interner>;
|
||||
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Substitution<TypeFamily> {
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Substitution<Interner> {
|
||||
chalk_ir::Substitution::from(self.iter().map(|ty| ty.clone().to_chalk(db)))
|
||||
}
|
||||
|
||||
fn from_chalk(db: &impl HirDatabase, parameters: chalk_ir::Substitution<TypeFamily>) -> Substs {
|
||||
fn from_chalk(db: &impl HirDatabase, parameters: chalk_ir::Substitution<Interner>) -> Substs {
|
||||
let tys = parameters
|
||||
.into_iter()
|
||||
.map(|p| match p.ty() {
|
||||
|
@ -224,15 +233,15 @@ impl ToChalk for Substs {
|
|||
}
|
||||
|
||||
impl ToChalk for TraitRef {
|
||||
type Chalk = chalk_ir::TraitRef<TypeFamily>;
|
||||
type Chalk = chalk_ir::TraitRef<Interner>;
|
||||
|
||||
fn to_chalk(self: TraitRef, db: &impl HirDatabase) -> chalk_ir::TraitRef<TypeFamily> {
|
||||
fn to_chalk(self: TraitRef, db: &impl HirDatabase) -> chalk_ir::TraitRef<Interner> {
|
||||
let trait_id = self.trait_.to_chalk(db);
|
||||
let substitution = self.substs.to_chalk(db);
|
||||
chalk_ir::TraitRef { trait_id, substitution }
|
||||
}
|
||||
|
||||
fn from_chalk(db: &impl HirDatabase, trait_ref: chalk_ir::TraitRef<TypeFamily>) -> Self {
|
||||
fn from_chalk(db: &impl HirDatabase, trait_ref: chalk_ir::TraitRef<Interner>) -> Self {
|
||||
let trait_ = from_chalk(db, trait_ref.trait_id);
|
||||
let substs = from_chalk(db, trait_ref.substitution);
|
||||
TraitRef { trait_, substs }
|
||||
|
@ -252,9 +261,9 @@ impl ToChalk for hir_def::TraitId {
|
|||
}
|
||||
|
||||
impl ToChalk for TypeCtor {
|
||||
type Chalk = TypeName<TypeFamily>;
|
||||
type Chalk = TypeName<Interner>;
|
||||
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> TypeName<TypeFamily> {
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> TypeName<Interner> {
|
||||
match self {
|
||||
TypeCtor::AssociatedType(type_alias) => {
|
||||
let type_id = type_alias.to_chalk(db);
|
||||
|
@ -268,7 +277,7 @@ impl ToChalk for TypeCtor {
|
|||
}
|
||||
}
|
||||
|
||||
fn from_chalk(db: &impl HirDatabase, type_name: TypeName<TypeFamily>) -> TypeCtor {
|
||||
fn from_chalk(db: &impl HirDatabase, type_name: TypeName<Interner>) -> TypeCtor {
|
||||
match type_name {
|
||||
TypeName::Struct(struct_id) => db.lookup_intern_type_ctor(struct_id.into()),
|
||||
TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)),
|
||||
|
@ -317,9 +326,9 @@ impl ToChalk for AssocTyValue {
|
|||
}
|
||||
|
||||
impl ToChalk for GenericPredicate {
|
||||
type Chalk = chalk_ir::QuantifiedWhereClause<TypeFamily>;
|
||||
type Chalk = chalk_ir::QuantifiedWhereClause<Interner>;
|
||||
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::QuantifiedWhereClause<TypeFamily> {
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::QuantifiedWhereClause<Interner> {
|
||||
match self {
|
||||
GenericPredicate::Implemented(trait_ref) => {
|
||||
make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0)
|
||||
|
@ -337,7 +346,7 @@ impl ToChalk for GenericPredicate {
|
|||
|
||||
fn from_chalk(
|
||||
db: &impl HirDatabase,
|
||||
where_clause: chalk_ir::QuantifiedWhereClause<TypeFamily>,
|
||||
where_clause: chalk_ir::QuantifiedWhereClause<Interner>,
|
||||
) -> GenericPredicate {
|
||||
match where_clause.value {
|
||||
chalk_ir::WhereClause::Implemented(tr) => {
|
||||
|
@ -353,9 +362,9 @@ impl ToChalk for GenericPredicate {
|
|||
}
|
||||
|
||||
impl ToChalk for ProjectionTy {
|
||||
type Chalk = chalk_ir::AliasTy<TypeFamily>;
|
||||
type Chalk = chalk_ir::AliasTy<Interner>;
|
||||
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::AliasTy<TypeFamily> {
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::AliasTy<Interner> {
|
||||
chalk_ir::AliasTy {
|
||||
associated_ty_id: self.associated_ty.to_chalk(db),
|
||||
substitution: self.parameters.to_chalk(db),
|
||||
|
@ -364,7 +373,7 @@ impl ToChalk for ProjectionTy {
|
|||
|
||||
fn from_chalk(
|
||||
db: &impl HirDatabase,
|
||||
projection_ty: chalk_ir::AliasTy<TypeFamily>,
|
||||
projection_ty: chalk_ir::AliasTy<Interner>,
|
||||
) -> ProjectionTy {
|
||||
ProjectionTy {
|
||||
associated_ty: from_chalk(db, projection_ty.associated_ty_id),
|
||||
|
@ -374,28 +383,28 @@ impl ToChalk for ProjectionTy {
|
|||
}
|
||||
|
||||
impl ToChalk for super::ProjectionPredicate {
|
||||
type Chalk = chalk_ir::Normalize<TypeFamily>;
|
||||
type Chalk = chalk_ir::Normalize<Interner>;
|
||||
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Normalize<TypeFamily> {
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Normalize<Interner> {
|
||||
chalk_ir::Normalize { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) }
|
||||
}
|
||||
|
||||
fn from_chalk(_db: &impl HirDatabase, _normalize: chalk_ir::Normalize<TypeFamily>) -> Self {
|
||||
fn from_chalk(_db: &impl HirDatabase, _normalize: chalk_ir::Normalize<Interner>) -> Self {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToChalk for Obligation {
|
||||
type Chalk = chalk_ir::DomainGoal<TypeFamily>;
|
||||
type Chalk = chalk_ir::DomainGoal<Interner>;
|
||||
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::DomainGoal<TypeFamily> {
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::DomainGoal<Interner> {
|
||||
match self {
|
||||
Obligation::Trait(tr) => tr.to_chalk(db).cast(),
|
||||
Obligation::Projection(pr) => pr.to_chalk(db).cast(),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_chalk(_db: &impl HirDatabase, _goal: chalk_ir::DomainGoal<TypeFamily>) -> Self {
|
||||
fn from_chalk(_db: &impl HirDatabase, _goal: chalk_ir::DomainGoal<Interner>) -> Self {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
@ -418,16 +427,16 @@ where
|
|||
}
|
||||
|
||||
impl ToChalk for Arc<super::TraitEnvironment> {
|
||||
type Chalk = chalk_ir::Environment<TypeFamily>;
|
||||
type Chalk = chalk_ir::Environment<Interner>;
|
||||
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Environment<TypeFamily> {
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Environment<Interner> {
|
||||
let mut clauses = Vec::new();
|
||||
for pred in &self.predicates {
|
||||
if pred.is_error() {
|
||||
// for env, we just ignore errors
|
||||
continue;
|
||||
}
|
||||
let program_clause: chalk_ir::ProgramClause<TypeFamily> =
|
||||
let program_clause: chalk_ir::ProgramClause<Interner> =
|
||||
pred.clone().to_chalk(db).cast();
|
||||
clauses.push(program_clause.into_from_env_clause());
|
||||
}
|
||||
|
@ -436,7 +445,7 @@ impl ToChalk for Arc<super::TraitEnvironment> {
|
|||
|
||||
fn from_chalk(
|
||||
_db: &impl HirDatabase,
|
||||
_env: chalk_ir::Environment<TypeFamily>,
|
||||
_env: chalk_ir::Environment<Interner>,
|
||||
) -> Arc<super::TraitEnvironment> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
@ -444,7 +453,7 @@ impl ToChalk for Arc<super::TraitEnvironment> {
|
|||
|
||||
impl<T: ToChalk> ToChalk for super::InEnvironment<T>
|
||||
where
|
||||
T::Chalk: chalk_ir::family::HasTypeFamily<TypeFamily = TypeFamily>,
|
||||
T::Chalk: chalk_ir::interner::HasInterner<Interner = Interner>,
|
||||
{
|
||||
type Chalk = chalk_ir::InEnvironment<T::Chalk>;
|
||||
|
||||
|
@ -522,7 +531,7 @@ fn convert_where_clauses(
|
|||
db: &impl HirDatabase,
|
||||
def: GenericDefId,
|
||||
substs: &Substs,
|
||||
) -> Vec<chalk_ir::QuantifiedWhereClause<TypeFamily>> {
|
||||
) -> Vec<chalk_ir::QuantifiedWhereClause<Interner>> {
|
||||
let generic_predicates = db.generic_predicates(def);
|
||||
let mut result = Vec::with_capacity(generic_predicates.len());
|
||||
for pred in generic_predicates.iter() {
|
||||
|
@ -535,7 +544,7 @@ fn convert_where_clauses(
|
|||
result
|
||||
}
|
||||
|
||||
impl<'a, DB> chalk_solve::RustIrDatabase<TypeFamily> for ChalkContext<'a, DB>
|
||||
impl<'a, DB> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a, DB>
|
||||
where
|
||||
DB: HirDatabase,
|
||||
{
|
||||
|
@ -554,7 +563,7 @@ where
|
|||
fn impls_for_trait(
|
||||
&self,
|
||||
trait_id: TraitId,
|
||||
parameters: &[Parameter<TypeFamily>],
|
||||
parameters: &[Parameter<Interner>],
|
||||
) -> Vec<ImplId> {
|
||||
debug!("impls_for_trait {:?}", trait_id);
|
||||
let trait_: hir_def::TraitId = from_chalk(self.db, trait_id);
|
||||
|
@ -589,14 +598,14 @@ where
|
|||
fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> {
|
||||
self.db.associated_ty_value(self.krate, id)
|
||||
}
|
||||
fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<TypeFamily>> {
|
||||
fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<Interner>> {
|
||||
vec![]
|
||||
}
|
||||
fn local_impls_to_coherence_check(&self, _trait_id: TraitId) -> Vec<ImplId> {
|
||||
// We don't do coherence checking (yet)
|
||||
unimplemented!()
|
||||
}
|
||||
fn as_struct_id(&self, id: &TypeName<TypeFamily>) -> Option<StructId> {
|
||||
fn as_struct_id(&self, id: &TypeName<Interner>) -> Option<StructId> {
|
||||
match id {
|
||||
TypeName::Struct(struct_id) => Some(*struct_id),
|
||||
_ => None,
|
||||
|
|
|
@ -4,16 +4,11 @@ use indexmap::IndexMap;
|
|||
|
||||
use hir::db::AstDatabase;
|
||||
use ra_ide_db::RootDatabase;
|
||||
use ra_syntax::{
|
||||
ast::{self, DocCommentsOwner},
|
||||
match_ast, AstNode, TextRange,
|
||||
};
|
||||
use ra_syntax::{ast, match_ast, AstNode, TextRange};
|
||||
|
||||
use crate::{
|
||||
call_info::FnCallNode,
|
||||
display::{ShortLabel, ToNav},
|
||||
expand::descend_into_macros,
|
||||
goto_definition, references, FilePosition, NavigationTarget, RangeInfo,
|
||||
call_info::FnCallNode, display::ToNav, expand::descend_into_macros, goto_definition,
|
||||
references, FilePosition, NavigationTarget, RangeInfo,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -49,6 +44,7 @@ pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Optio
|
|||
let refs = references::find_all_refs(db, position, None)?;
|
||||
|
||||
let mut calls = CallLocations::default();
|
||||
let mut sb = hir::SourceBinder::new(db);
|
||||
|
||||
for reference in refs.info.references() {
|
||||
let file_id = reference.file_range.file_id;
|
||||
|
@ -62,12 +58,8 @@ pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Optio
|
|||
match_ast! {
|
||||
match node {
|
||||
ast::FnDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
token.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
let def = sb.to_def(token.with_value(it))?;
|
||||
Some(def.to_nav(sb.db))
|
||||
},
|
||||
_ => { None },
|
||||
}
|
||||
|
|
|
@ -125,7 +125,7 @@ impl NavigationTarget {
|
|||
}
|
||||
|
||||
/// Allows `NavigationTarget` to be created from a `NameOwner`
|
||||
pub(crate) fn from_named(
|
||||
fn from_named(
|
||||
db: &RootDatabase,
|
||||
node: InFile<&dyn ast::NameOwner>,
|
||||
docs: Option<String>,
|
||||
|
|
|
@ -145,25 +145,25 @@ fn extend_tokens_from_range(
|
|||
let src = db.parse_or_expand(expanded.file_id)?;
|
||||
let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?;
|
||||
|
||||
let validate = |token: SyntaxToken| {
|
||||
let validate = |token: &SyntaxToken| {
|
||||
let node = descend_into_macros(db, file_id, token.clone());
|
||||
if node.file_id == expanded.file_id
|
||||
node.file_id == expanded.file_id
|
||||
&& node.value.text_range().is_subrange(&parent.text_range())
|
||||
{
|
||||
Some(token)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
// Find the first and last text range under expanded parent
|
||||
let first = successors(Some(first_token), |token| {
|
||||
validate(skip_whitespace(token.prev_token()?, Direction::Prev)?)
|
||||
let token = token.prev_token()?;
|
||||
skip_whitespace(token, Direction::Prev)
|
||||
})
|
||||
.take_while(validate)
|
||||
.last()?;
|
||||
|
||||
let last = successors(Some(last_token), |token| {
|
||||
validate(skip_whitespace(token.next_token()?, Direction::Next)?)
|
||||
let token = token.next_token()?;
|
||||
skip_whitespace(token, Direction::Next)
|
||||
})
|
||||
.take_while(validate)
|
||||
.last()?;
|
||||
|
||||
let range = union_range(first.text_range(), last.text_range());
|
||||
|
@ -334,10 +334,12 @@ fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::mock_analysis::single_file;
|
||||
use test_utils::extract_offset;
|
||||
|
||||
use crate::mock_analysis::single_file;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn do_check(before: &str, afters: &[&str]) {
|
||||
let (cursor, before) = extract_offset(before);
|
||||
let (analysis, file_id) = single_file(&before);
|
||||
|
|
|
@ -227,6 +227,31 @@ fn foo() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_join_lines_diverging_block() {
|
||||
let before = r"
|
||||
fn foo() {
|
||||
loop {
|
||||
match x {
|
||||
92 => <|>{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
";
|
||||
let after = r"
|
||||
fn foo() {
|
||||
loop {
|
||||
match x {
|
||||
92 => <|>continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
";
|
||||
check_join_lines(before, after);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn join_lines_adds_comma_for_block_in_match_arm() {
|
||||
check_join_lines(
|
||||
|
|
|
@ -75,7 +75,7 @@ pub use crate::{
|
|||
runnables::{Runnable, RunnableKind, TestId},
|
||||
source_change::{FileSystemEdit, SourceChange, SourceFileEdit},
|
||||
ssr::SsrError,
|
||||
syntax_highlighting::HighlightedRange,
|
||||
syntax_highlighting::{tags, HighlightedRange},
|
||||
};
|
||||
|
||||
pub use hir::Documentation;
|
||||
|
|
|
@ -17,32 +17,32 @@ use crate::{
|
|||
};
|
||||
|
||||
pub mod tags {
|
||||
pub(crate) const FIELD: &str = "field";
|
||||
pub(crate) const FUNCTION: &str = "function";
|
||||
pub(crate) const MODULE: &str = "module";
|
||||
pub(crate) const CONSTANT: &str = "constant";
|
||||
pub(crate) const MACRO: &str = "macro";
|
||||
pub const FIELD: &str = "field";
|
||||
pub const FUNCTION: &str = "function";
|
||||
pub const MODULE: &str = "module";
|
||||
pub const CONSTANT: &str = "constant";
|
||||
pub const MACRO: &str = "macro";
|
||||
|
||||
pub(crate) const VARIABLE: &str = "variable";
|
||||
pub(crate) const VARIABLE_MUT: &str = "variable.mut";
|
||||
pub const VARIABLE: &str = "variable";
|
||||
pub const VARIABLE_MUT: &str = "variable.mut";
|
||||
|
||||
pub(crate) const TYPE: &str = "type";
|
||||
pub(crate) const TYPE_BUILTIN: &str = "type.builtin";
|
||||
pub(crate) const TYPE_SELF: &str = "type.self";
|
||||
pub(crate) const TYPE_PARAM: &str = "type.param";
|
||||
pub(crate) const TYPE_LIFETIME: &str = "type.lifetime";
|
||||
pub const TYPE: &str = "type";
|
||||
pub const TYPE_BUILTIN: &str = "type.builtin";
|
||||
pub const TYPE_SELF: &str = "type.self";
|
||||
pub const TYPE_PARAM: &str = "type.param";
|
||||
pub const TYPE_LIFETIME: &str = "type.lifetime";
|
||||
|
||||
pub(crate) const LITERAL_BYTE: &str = "literal.byte";
|
||||
pub(crate) const LITERAL_NUMERIC: &str = "literal.numeric";
|
||||
pub(crate) const LITERAL_CHAR: &str = "literal.char";
|
||||
pub const LITERAL_BYTE: &str = "literal.byte";
|
||||
pub const LITERAL_NUMERIC: &str = "literal.numeric";
|
||||
pub const LITERAL_CHAR: &str = "literal.char";
|
||||
|
||||
pub(crate) const LITERAL_COMMENT: &str = "comment";
|
||||
pub(crate) const LITERAL_STRING: &str = "string";
|
||||
pub(crate) const LITERAL_ATTRIBUTE: &str = "attribute";
|
||||
pub const LITERAL_COMMENT: &str = "comment";
|
||||
pub const LITERAL_STRING: &str = "string";
|
||||
pub const LITERAL_ATTRIBUTE: &str = "attribute";
|
||||
|
||||
pub(crate) const KEYWORD: &str = "keyword";
|
||||
pub(crate) const KEYWORD_UNSAFE: &str = "keyword.unsafe";
|
||||
pub(crate) const KEYWORD_CONTROL: &str = "keyword.control";
|
||||
pub const KEYWORD: &str = "keyword";
|
||||
pub const KEYWORD_UNSAFE: &str = "keyword.unsafe";
|
||||
pub const KEYWORD_CONTROL: &str = "keyword.control";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -12,7 +12,7 @@ doctest = false
|
|||
|
||||
[dependencies]
|
||||
itertools = "0.8.2"
|
||||
rowan = "0.9.0"
|
||||
rowan = "0.9.1"
|
||||
rustc_lexer = "0.1.0"
|
||||
rustc-hash = "1.1.0"
|
||||
arrayvec = "0.5.1"
|
||||
|
|
|
@ -1,12 +1,15 @@
|
|||
//! Advertizes the capabilities of the LSP Server.
|
||||
|
||||
use crate::semantic_tokens;
|
||||
|
||||
use lsp_types::{
|
||||
CallHierarchyServerCapability, CodeActionProviderCapability, CodeLensOptions,
|
||||
CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability,
|
||||
ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions,
|
||||
SelectionRangeProviderCapability, ServerCapabilities, SignatureHelpOptions,
|
||||
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
||||
TypeDefinitionProviderCapability, WorkDoneProgressOptions,
|
||||
SelectionRangeProviderCapability, SemanticTokensDocumentProvider, SemanticTokensLegend,
|
||||
SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities,
|
||||
SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
|
||||
TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions,
|
||||
};
|
||||
|
||||
pub fn server_capabilities() -> ServerCapabilities {
|
||||
|
@ -57,7 +60,20 @@ pub fn server_capabilities() -> ServerCapabilities {
|
|||
execute_command_provider: None,
|
||||
workspace: None,
|
||||
call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
|
||||
semantic_tokens_provider: None,
|
||||
semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensOptions(
|
||||
SemanticTokensOptions {
|
||||
legend: SemanticTokensLegend {
|
||||
token_types: semantic_tokens::supported_token_types().iter().cloned().collect(),
|
||||
token_modifiers: semantic_tokens::supported_token_modifiers()
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect(),
|
||||
},
|
||||
|
||||
document_provider: Some(SemanticTokensDocumentProvider::Bool(true)),
|
||||
..SemanticTokensOptions::default()
|
||||
},
|
||||
)),
|
||||
experimental: Default::default(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,11 +4,12 @@
|
|||
use lsp_types::{
|
||||
self, CreateFile, DiagnosticSeverity, DocumentChangeOperation, DocumentChanges, Documentation,
|
||||
Location, LocationLink, MarkupContent, MarkupKind, Position, Range, RenameFile, ResourceOp,
|
||||
SymbolKind, TextDocumentEdit, TextDocumentIdentifier, TextDocumentItem,
|
||||
TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier, WorkspaceEdit,
|
||||
SemanticTokenModifier, SemanticTokenType, SymbolKind, TextDocumentEdit, TextDocumentIdentifier,
|
||||
TextDocumentItem, TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier,
|
||||
WorkspaceEdit,
|
||||
};
|
||||
use ra_ide::{
|
||||
translate_offset_with_edit, CompletionItem, CompletionItemKind, FileId, FilePosition,
|
||||
tags, translate_offset_with_edit, CompletionItem, CompletionItemKind, FileId, FilePosition,
|
||||
FileRange, FileSystemEdit, Fold, FoldKind, InsertTextFormat, LineCol, LineIndex,
|
||||
NavigationTarget, RangeInfo, ReferenceAccess, Severity, SourceChange, SourceFileEdit,
|
||||
};
|
||||
|
@ -16,7 +17,7 @@ use ra_syntax::{SyntaxKind, TextRange, TextUnit};
|
|||
use ra_text_edit::{AtomTextEdit, TextEdit};
|
||||
use ra_vfs::LineEndings;
|
||||
|
||||
use crate::{req, world::WorldSnapshot, Result};
|
||||
use crate::{req, semantic_tokens, world::WorldSnapshot, Result};
|
||||
|
||||
pub trait Conv {
|
||||
type Output;
|
||||
|
@ -302,6 +303,76 @@ impl ConvWith<&FoldConvCtx<'_>> for Fold {
|
|||
}
|
||||
}
|
||||
|
||||
impl Conv for &'static str {
|
||||
type Output = (SemanticTokenType, Vec<SemanticTokenModifier>);
|
||||
|
||||
fn conv(self) -> (SemanticTokenType, Vec<SemanticTokenModifier>) {
|
||||
let token_type: SemanticTokenType = match self {
|
||||
tags::FIELD => SemanticTokenType::MEMBER,
|
||||
tags::FUNCTION => SemanticTokenType::FUNCTION,
|
||||
tags::MODULE => SemanticTokenType::NAMESPACE,
|
||||
tags::CONSTANT => {
|
||||
return (
|
||||
SemanticTokenType::VARIABLE,
|
||||
vec![SemanticTokenModifier::STATIC, SemanticTokenModifier::READONLY],
|
||||
)
|
||||
}
|
||||
tags::MACRO => SemanticTokenType::MACRO,
|
||||
|
||||
tags::VARIABLE => {
|
||||
return (SemanticTokenType::VARIABLE, vec![SemanticTokenModifier::READONLY])
|
||||
}
|
||||
tags::VARIABLE_MUT => SemanticTokenType::VARIABLE,
|
||||
|
||||
tags::TYPE => SemanticTokenType::TYPE,
|
||||
tags::TYPE_BUILTIN => SemanticTokenType::TYPE,
|
||||
tags::TYPE_SELF => {
|
||||
return (SemanticTokenType::TYPE, vec![SemanticTokenModifier::REFERENCE])
|
||||
}
|
||||
tags::TYPE_PARAM => SemanticTokenType::TYPE_PARAMETER,
|
||||
tags::TYPE_LIFETIME => {
|
||||
return (SemanticTokenType::LABEL, vec![SemanticTokenModifier::REFERENCE])
|
||||
}
|
||||
|
||||
tags::LITERAL_BYTE => SemanticTokenType::NUMBER,
|
||||
tags::LITERAL_NUMERIC => SemanticTokenType::NUMBER,
|
||||
tags::LITERAL_CHAR => SemanticTokenType::NUMBER,
|
||||
|
||||
tags::LITERAL_COMMENT => {
|
||||
return (SemanticTokenType::COMMENT, vec![SemanticTokenModifier::DOCUMENTATION])
|
||||
}
|
||||
|
||||
tags::LITERAL_STRING => SemanticTokenType::STRING,
|
||||
tags::LITERAL_ATTRIBUTE => SemanticTokenType::KEYWORD,
|
||||
|
||||
tags::KEYWORD => SemanticTokenType::KEYWORD,
|
||||
tags::KEYWORD_UNSAFE => SemanticTokenType::KEYWORD,
|
||||
tags::KEYWORD_CONTROL => SemanticTokenType::KEYWORD,
|
||||
unknown => panic!("Unknown semantic token: {}", unknown),
|
||||
};
|
||||
|
||||
(token_type, vec![])
|
||||
}
|
||||
}
|
||||
|
||||
impl Conv for (SemanticTokenType, Vec<SemanticTokenModifier>) {
|
||||
type Output = (u32, u32);
|
||||
|
||||
fn conv(self) -> Self::Output {
|
||||
let token_index =
|
||||
semantic_tokens::supported_token_types().iter().position(|it| *it == self.0).unwrap();
|
||||
let mut token_modifier_bitset = 0;
|
||||
for modifier in self.1.iter() {
|
||||
token_modifier_bitset |= semantic_tokens::supported_token_modifiers()
|
||||
.iter()
|
||||
.position(|it| it == modifier)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
(token_index as u32, token_modifier_bitset as u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ConvWith<CTX>, CTX> ConvWith<CTX> for Option<T> {
|
||||
type Output = Option<T::Output>;
|
||||
|
||||
|
|
|
@ -36,6 +36,7 @@ pub mod req;
|
|||
mod config;
|
||||
mod world;
|
||||
mod diagnostics;
|
||||
mod semantic_tokens;
|
||||
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
|
|
|
@ -528,6 +528,7 @@ fn on_request(
|
|||
.on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)?
|
||||
.on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)?
|
||||
.on::<req::Ssr>(handlers::handle_ssr)?
|
||||
.on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
|
||||
.finish();
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -16,8 +16,9 @@ use lsp_types::{
|
|||
CodeAction, CodeActionOrCommand, CodeActionResponse, CodeLens, Command, CompletionItem,
|
||||
Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange,
|
||||
FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position,
|
||||
PrepareRenameResponse, Range, RenameParams, SymbolInformation, TextDocumentIdentifier,
|
||||
TextEdit, WorkspaceEdit,
|
||||
PrepareRenameResponse, Range, RenameParams, SemanticTokenModifier, SemanticTokenType,
|
||||
SemanticTokens, SemanticTokensParams, SemanticTokensResult, SymbolInformation,
|
||||
TextDocumentIdentifier, TextEdit, WorkspaceEdit,
|
||||
};
|
||||
use ra_ide::{
|
||||
AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind,
|
||||
|
@ -38,6 +39,7 @@ use crate::{
|
|||
diagnostics::DiagnosticTask,
|
||||
from_json,
|
||||
req::{self, Decoration, InlayHint, InlayHintsParams, InlayKind},
|
||||
semantic_tokens::SemanticTokensBuilder,
|
||||
world::WorldSnapshot,
|
||||
LspError, Result,
|
||||
};
|
||||
|
@ -1068,3 +1070,25 @@ pub fn handle_call_hierarchy_outgoing(
|
|||
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
||||
pub fn handle_semantic_tokens(
|
||||
world: WorldSnapshot,
|
||||
params: SemanticTokensParams,
|
||||
) -> Result<Option<SemanticTokensResult>> {
|
||||
let _p = profile("handle_semantic_tokens");
|
||||
|
||||
let file_id = params.text_document.try_conv_with(&world)?;
|
||||
let line_index = world.analysis().file_line_index(file_id)?;
|
||||
|
||||
let mut builder = SemanticTokensBuilder::default();
|
||||
|
||||
for h in world.analysis().highlight(file_id)?.into_iter() {
|
||||
let type_and_modifiers: (SemanticTokenType, Vec<SemanticTokenModifier>) = h.tag.conv();
|
||||
let (token_type, token_modifiers) = type_and_modifiers.conv();
|
||||
builder.push(h.range.conv_with(&line_index), token_type, token_modifiers);
|
||||
}
|
||||
|
||||
let tokens = SemanticTokens { data: builder.build(), ..Default::default() };
|
||||
|
||||
Ok(Some(tokens.into()))
|
||||
}
|
||||
|
|
|
@ -12,9 +12,9 @@ pub use lsp_types::{
|
|||
DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType,
|
||||
PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken,
|
||||
PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange,
|
||||
SelectionRangeParams, ServerCapabilities, ShowMessageParams, SignatureHelp, SymbolKind,
|
||||
TextDocumentEdit, TextDocumentPositionParams, TextEdit, WorkDoneProgressParams, WorkspaceEdit,
|
||||
WorkspaceSymbolParams,
|
||||
SelectionRangeParams, SemanticTokensParams, SemanticTokensResult, ServerCapabilities,
|
||||
ShowMessageParams, SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams,
|
||||
TextEdit, WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
|
||||
};
|
||||
|
||||
pub enum AnalyzerStatus {}
|
||||
|
|
94
crates/rust-analyzer/src/semantic_tokens.rs
Normal file
94
crates/rust-analyzer/src/semantic_tokens.rs
Normal file
|
@ -0,0 +1,94 @@
|
|||
//! Semantic Tokens helpers
|
||||
|
||||
use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType};
|
||||
|
||||
const SUPPORTED_TYPES: &[SemanticTokenType] = &[
|
||||
SemanticTokenType::COMMENT,
|
||||
SemanticTokenType::KEYWORD,
|
||||
SemanticTokenType::STRING,
|
||||
SemanticTokenType::NUMBER,
|
||||
SemanticTokenType::REGEXP,
|
||||
SemanticTokenType::OPERATOR,
|
||||
SemanticTokenType::NAMESPACE,
|
||||
SemanticTokenType::TYPE,
|
||||
SemanticTokenType::STRUCT,
|
||||
SemanticTokenType::CLASS,
|
||||
SemanticTokenType::INTERFACE,
|
||||
SemanticTokenType::ENUM,
|
||||
SemanticTokenType::TYPE_PARAMETER,
|
||||
SemanticTokenType::FUNCTION,
|
||||
SemanticTokenType::MEMBER,
|
||||
SemanticTokenType::PROPERTY,
|
||||
SemanticTokenType::MACRO,
|
||||
SemanticTokenType::VARIABLE,
|
||||
SemanticTokenType::PARAMETER,
|
||||
SemanticTokenType::LABEL,
|
||||
];
|
||||
|
||||
const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
|
||||
SemanticTokenModifier::DOCUMENTATION,
|
||||
SemanticTokenModifier::DECLARATION,
|
||||
SemanticTokenModifier::DEFINITION,
|
||||
SemanticTokenModifier::REFERENCE,
|
||||
SemanticTokenModifier::STATIC,
|
||||
SemanticTokenModifier::ABSTRACT,
|
||||
SemanticTokenModifier::DEPRECATED,
|
||||
SemanticTokenModifier::ASYNC,
|
||||
SemanticTokenModifier::VOLATILE,
|
||||
SemanticTokenModifier::READONLY,
|
||||
];
|
||||
|
||||
/// Token types that the server supports
|
||||
pub(crate) fn supported_token_types() -> &'static [SemanticTokenType] {
|
||||
SUPPORTED_TYPES
|
||||
}
|
||||
|
||||
/// Token modifiers that the server supports
|
||||
pub(crate) fn supported_token_modifiers() -> &'static [SemanticTokenModifier] {
|
||||
SUPPORTED_MODIFIERS
|
||||
}
|
||||
|
||||
/// Tokens are encoded relative to each other.
|
||||
///
|
||||
/// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45
|
||||
#[derive(Default)]
|
||||
pub(crate) struct SemanticTokensBuilder {
|
||||
prev_line: u32,
|
||||
prev_char: u32,
|
||||
data: Vec<SemanticToken>,
|
||||
}
|
||||
|
||||
impl SemanticTokensBuilder {
|
||||
/// Push a new token onto the builder
|
||||
pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
|
||||
let mut push_line = range.start.line as u32;
|
||||
let mut push_char = range.start.character as u32;
|
||||
|
||||
if !self.data.is_empty() {
|
||||
push_line -= self.prev_line;
|
||||
if push_line == 0 {
|
||||
push_char -= self.prev_char;
|
||||
}
|
||||
}
|
||||
|
||||
// A token cannot be multiline
|
||||
let token_len = range.end.character - range.start.character;
|
||||
|
||||
let token = SemanticToken {
|
||||
delta_line: push_line,
|
||||
delta_start: push_char,
|
||||
length: token_len as u32,
|
||||
token_type: token_index,
|
||||
token_modifiers_bitset: modifier_bitset,
|
||||
};
|
||||
|
||||
self.data.push(token);
|
||||
|
||||
self.prev_line = range.start.line as u32;
|
||||
self.prev_char = range.start.character as u32;
|
||||
}
|
||||
|
||||
pub fn build(self) -> Vec<SemanticToken> {
|
||||
self.data
|
||||
}
|
||||
}
|
|
@ -32,6 +32,7 @@ module.exports = {
|
|||
"@typescript-eslint/semi": [
|
||||
"error",
|
||||
"always"
|
||||
]
|
||||
],
|
||||
"@typescript-eslint/no-unnecessary-type-assertion": "error"
|
||||
}
|
||||
};
|
||||
|
|
6
editors/code/package-lock.json
generated
6
editors/code/package-lock.json
generated
|
@ -1575,9 +1575,9 @@
|
|||
}
|
||||
},
|
||||
"typescript": {
|
||||
"version": "3.7.5",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.5.tgz",
|
||||
"integrity": "sha512-/P5lkRXkWHNAbcJIiHPfRoKqyd7bsyCma1hZNUGfn20qm64T6ZBlrzprymeu918H+mB/0rIg2gGK/BXkhhYgBw==",
|
||||
"version": "3.8.2",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.8.2.tgz",
|
||||
"integrity": "sha512-EgOVgL/4xfVrCMbhYKUQTdF37SQn4Iw73H5BgCrF1Abdun7Kwy/QZsE/ssAy0y4LxBbvua3PIbFsbRczWWnDdQ==",
|
||||
"dev": true
|
||||
},
|
||||
"typescript-formatter": {
|
||||
|
|
|
@ -42,7 +42,7 @@
|
|||
"eslint": "^6.8.0",
|
||||
"rollup": "^1.31.1",
|
||||
"tslib": "^1.10.0",
|
||||
"typescript": "^3.7.5",
|
||||
"typescript": "^3.8.2",
|
||||
"typescript-formatter": "^7.2.2",
|
||||
"vsce": "^1.73.0"
|
||||
},
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import * as vscode from 'vscode';
|
||||
|
||||
import * as ra from '../rust-analyzer-api';
|
||||
import { Ctx, Cmd } from '../ctx';
|
||||
|
||||
// Shows status of rust-analyzer (for debugging)
|
||||
|
@ -50,10 +51,7 @@ class TextDocumentContentProvider
|
|||
const client = this.ctx.client;
|
||||
if (!editor || !client) return '';
|
||||
|
||||
return client.sendRequest<string>(
|
||||
'rust-analyzer/analyzerStatus',
|
||||
null,
|
||||
);
|
||||
return client.sendRequest(ra.analyzerStatus, null);
|
||||
}
|
||||
|
||||
get onDidChange(): vscode.Event<vscode.Uri> {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as vscode from 'vscode';
|
||||
import * as lc from 'vscode-languageclient';
|
||||
import * as ra from '../rust-analyzer-api';
|
||||
|
||||
import { Ctx, Cmd } from '../ctx';
|
||||
|
||||
|
@ -26,12 +26,7 @@ export function expandMacro(ctx: Ctx): Cmd {
|
|||
};
|
||||
}
|
||||
|
||||
interface ExpandedMacro {
|
||||
name: string;
|
||||
expansion: string;
|
||||
}
|
||||
|
||||
function codeFormat(expanded: ExpandedMacro): string {
|
||||
function codeFormat(expanded: ra.ExpandedMacro): string {
|
||||
let result = `// Recursive expansion of ${expanded.name}! macro\n`;
|
||||
result += '// ' + '='.repeat(result.length - 3);
|
||||
result += '\n\n';
|
||||
|
@ -54,14 +49,11 @@ class TextDocumentContentProvider
|
|||
if (!editor || !client) return '';
|
||||
|
||||
const position = editor.selection.active;
|
||||
const request: lc.TextDocumentPositionParams = {
|
||||
|
||||
const expanded = await client.sendRequest(ra.expandMacro, {
|
||||
textDocument: { uri: editor.document.uri.toString() },
|
||||
position,
|
||||
};
|
||||
const expanded = await client.sendRequest<ExpandedMacro>(
|
||||
'rust-analyzer/expandMacro',
|
||||
request,
|
||||
);
|
||||
});
|
||||
|
||||
if (expanded == null) return 'Not available';
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import * as vscode from 'vscode';
|
||||
import * as lc from 'vscode-languageclient';
|
||||
import * as ra from '../rust-analyzer-api';
|
||||
|
||||
import { Ctx, Cmd } from '../ctx';
|
||||
import * as sourceChange from '../source_change';
|
||||
|
@ -16,9 +17,7 @@ export * from './ssr';
|
|||
export * from './server_version';
|
||||
|
||||
export function collectGarbage(ctx: Ctx): Cmd {
|
||||
return async () => {
|
||||
await ctx.client?.sendRequest<null>('rust-analyzer/collectGarbage', null);
|
||||
};
|
||||
return async () => ctx.client.sendRequest(ra.collectGarbage, null);
|
||||
}
|
||||
|
||||
export function showReferences(ctx: Ctx): Cmd {
|
||||
|
@ -36,13 +35,13 @@ export function showReferences(ctx: Ctx): Cmd {
|
|||
}
|
||||
|
||||
export function applySourceChange(ctx: Ctx): Cmd {
|
||||
return async (change: sourceChange.SourceChange) => {
|
||||
return async (change: ra.SourceChange) => {
|
||||
await sourceChange.applySourceChange(ctx, change);
|
||||
};
|
||||
}
|
||||
|
||||
export function selectAndApplySourceChange(ctx: Ctx): Cmd {
|
||||
return async (changes: sourceChange.SourceChange[]) => {
|
||||
return async (changes: ra.SourceChange[]) => {
|
||||
if (changes.length === 1) {
|
||||
await sourceChange.applySourceChange(ctx, changes[0]);
|
||||
} else if (changes.length > 0) {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import * as lc from 'vscode-languageclient';
|
||||
import * as ra from '../rust-analyzer-api';
|
||||
|
||||
import { Ctx, Cmd } from '../ctx';
|
||||
import { applySourceChange, SourceChange } from '../source_change';
|
||||
import { applySourceChange } from '../source_change';
|
||||
|
||||
export function joinLines(ctx: Ctx): Cmd {
|
||||
return async () => {
|
||||
|
@ -9,19 +9,10 @@ export function joinLines(ctx: Ctx): Cmd {
|
|||
const client = ctx.client;
|
||||
if (!editor || !client) return;
|
||||
|
||||
const request: JoinLinesParams = {
|
||||
const change = await client.sendRequest(ra.joinLines, {
|
||||
range: client.code2ProtocolConverter.asRange(editor.selection),
|
||||
textDocument: { uri: editor.document.uri.toString() },
|
||||
};
|
||||
const change = await client.sendRequest<SourceChange>(
|
||||
'rust-analyzer/joinLines',
|
||||
request,
|
||||
);
|
||||
});
|
||||
await applySourceChange(ctx, change);
|
||||
};
|
||||
}
|
||||
|
||||
interface JoinLinesParams {
|
||||
textDocument: lc.TextDocumentIdentifier;
|
||||
range: lc.Range;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as vscode from 'vscode';
|
||||
import * as lc from 'vscode-languageclient';
|
||||
import * as ra from '../rust-analyzer-api';
|
||||
|
||||
import { Ctx, Cmd } from '../ctx';
|
||||
|
||||
|
@ -9,16 +9,12 @@ export function matchingBrace(ctx: Ctx): Cmd {
|
|||
const client = ctx.client;
|
||||
if (!editor || !client) return;
|
||||
|
||||
const request: FindMatchingBraceParams = {
|
||||
const response = await client.sendRequest(ra.findMatchingBrace, {
|
||||
textDocument: { uri: editor.document.uri.toString() },
|
||||
offsets: editor.selections.map(s =>
|
||||
client.code2ProtocolConverter.asPosition(s.active),
|
||||
),
|
||||
};
|
||||
const response = await client.sendRequest<lc.Position[]>(
|
||||
'rust-analyzer/findMatchingBrace',
|
||||
request,
|
||||
);
|
||||
});
|
||||
editor.selections = editor.selections.map((sel, idx) => {
|
||||
const active = client.protocol2CodeConverter.asPosition(
|
||||
response[idx],
|
||||
|
@ -29,8 +25,3 @@ export function matchingBrace(ctx: Ctx): Cmd {
|
|||
editor.revealRange(editor.selection);
|
||||
};
|
||||
}
|
||||
|
||||
interface FindMatchingBraceParams {
|
||||
textDocument: lc.TextDocumentIdentifier;
|
||||
offsets: lc.Position[];
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import * as vscode from 'vscode';
|
||||
import * as lc from 'vscode-languageclient';
|
||||
import * as ra from '../rust-analyzer-api';
|
||||
|
||||
import { applySourceChange, SourceChange } from '../source_change';
|
||||
import { applySourceChange } from '../source_change';
|
||||
import { Cmd, Ctx } from '../ctx';
|
||||
|
||||
async function handleKeypress(ctx: Ctx) {
|
||||
|
@ -10,22 +10,15 @@ async function handleKeypress(ctx: Ctx) {
|
|||
|
||||
if (!editor || !client) return false;
|
||||
|
||||
const request: lc.TextDocumentPositionParams = {
|
||||
const change = await client.sendRequest(ra.onEnter, {
|
||||
textDocument: { uri: editor.document.uri.toString() },
|
||||
position: client.code2ProtocolConverter.asPosition(
|
||||
editor.selection.active,
|
||||
),
|
||||
};
|
||||
const change = await client.sendRequest<undefined | SourceChange>(
|
||||
'rust-analyzer/onEnter',
|
||||
request,
|
||||
).catch(
|
||||
(_error: any) => {
|
||||
// FIXME: switch to the more modern (?) typed request infrastructure
|
||||
// client.logFailedRequest(OnEnterRequest.type, error);
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
);
|
||||
}).catch(_error => {
|
||||
// client.logFailedRequest(OnEnterRequest.type, error);
|
||||
return null;
|
||||
});
|
||||
if (!change) return false;
|
||||
|
||||
await applySourceChange(ctx, change);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as vscode from 'vscode';
|
||||
import * as lc from 'vscode-languageclient';
|
||||
import * as ra from '../rust-analyzer-api';
|
||||
|
||||
import { Ctx, Cmd } from '../ctx';
|
||||
|
||||
|
@ -9,16 +9,12 @@ export function parentModule(ctx: Ctx): Cmd {
|
|||
const client = ctx.client;
|
||||
if (!editor || !client) return;
|
||||
|
||||
const request: lc.TextDocumentPositionParams = {
|
||||
const response = await client.sendRequest(ra.parentModule, {
|
||||
textDocument: { uri: editor.document.uri.toString() },
|
||||
position: client.code2ProtocolConverter.asPosition(
|
||||
editor.selection.active,
|
||||
),
|
||||
};
|
||||
const response = await client.sendRequest<lc.Location[]>(
|
||||
'rust-analyzer/parentModule',
|
||||
request,
|
||||
);
|
||||
});
|
||||
const loc = response[0];
|
||||
if (loc == null) return;
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import * as vscode from 'vscode';
|
||||
import * as lc from 'vscode-languageclient';
|
||||
import * as ra from '../rust-analyzer-api';
|
||||
|
||||
import { Ctx, Cmd } from '../ctx';
|
||||
|
||||
|
@ -14,16 +15,13 @@ export function run(ctx: Ctx): Cmd {
|
|||
const textDocument: lc.TextDocumentIdentifier = {
|
||||
uri: editor.document.uri.toString(),
|
||||
};
|
||||
const params: RunnablesParams = {
|
||||
|
||||
const runnables = await client.sendRequest(ra.runnables, {
|
||||
textDocument,
|
||||
position: client.code2ProtocolConverter.asPosition(
|
||||
editor.selection.active,
|
||||
),
|
||||
};
|
||||
const runnables = await client.sendRequest<Runnable[]>(
|
||||
'rust-analyzer/runnables',
|
||||
params,
|
||||
);
|
||||
});
|
||||
const items: RunnableQuickPick[] = [];
|
||||
if (prevRunnable) {
|
||||
items.push(prevRunnable);
|
||||
|
@ -48,7 +46,7 @@ export function run(ctx: Ctx): Cmd {
|
|||
}
|
||||
|
||||
export function runSingle(ctx: Ctx): Cmd {
|
||||
return async (runnable: Runnable) => {
|
||||
return async (runnable: ra.Runnable) => {
|
||||
const editor = ctx.activeRustEditor;
|
||||
if (!editor) return;
|
||||
|
||||
|
@ -64,26 +62,13 @@ export function runSingle(ctx: Ctx): Cmd {
|
|||
};
|
||||
}
|
||||
|
||||
interface RunnablesParams {
|
||||
textDocument: lc.TextDocumentIdentifier;
|
||||
position?: lc.Position;
|
||||
}
|
||||
|
||||
interface Runnable {
|
||||
label: string;
|
||||
bin: string;
|
||||
args: string[];
|
||||
env: { [index: string]: string };
|
||||
cwd?: string;
|
||||
}
|
||||
|
||||
class RunnableQuickPick implements vscode.QuickPickItem {
|
||||
public label: string;
|
||||
public description?: string | undefined;
|
||||
public detail?: string | undefined;
|
||||
public picked?: boolean | undefined;
|
||||
|
||||
constructor(public runnable: Runnable) {
|
||||
constructor(public runnable: ra.Runnable) {
|
||||
this.label = runnable.label;
|
||||
}
|
||||
}
|
||||
|
@ -96,7 +81,7 @@ interface CargoTaskDefinition extends vscode.TaskDefinition {
|
|||
env?: { [key: string]: string };
|
||||
}
|
||||
|
||||
function createTask(spec: Runnable): vscode.Task {
|
||||
function createTask(spec: ra.Runnable): vscode.Task {
|
||||
const TASK_SOURCE = 'Rust';
|
||||
const definition: CargoTaskDefinition = {
|
||||
type: 'cargo',
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import { Ctx, Cmd } from '../ctx';
|
||||
import { applySourceChange, SourceChange } from '../source_change';
|
||||
import * as vscode from 'vscode';
|
||||
import * as ra from "../rust-analyzer-api";
|
||||
|
||||
import { Ctx, Cmd } from '../ctx';
|
||||
import { applySourceChange } from '../source_change';
|
||||
|
||||
export function ssr(ctx: Ctx): Cmd {
|
||||
return async () => {
|
||||
|
@ -21,16 +23,8 @@ export function ssr(ctx: Ctx): Cmd {
|
|||
|
||||
if (!request) return;
|
||||
|
||||
const ssrRequest: SsrRequest = { arg: request };
|
||||
const change = await client.sendRequest<SourceChange>(
|
||||
'rust-analyzer/ssr',
|
||||
ssrRequest,
|
||||
);
|
||||
const change = await client.sendRequest(ra.ssr, { arg: request });
|
||||
|
||||
await applySourceChange(ctx, change);
|
||||
};
|
||||
}
|
||||
|
||||
interface SsrRequest {
|
||||
arg: string;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as vscode from 'vscode';
|
||||
import * as lc from 'vscode-languageclient';
|
||||
import * as ra from '../rust-analyzer-api';
|
||||
|
||||
import { Ctx, Cmd } from '../ctx';
|
||||
|
||||
|
@ -61,13 +61,8 @@ function afterLs(f: () => void) {
|
|||
setTimeout(f, 10);
|
||||
}
|
||||
|
||||
interface SyntaxTreeParams {
|
||||
textDocument: lc.TextDocumentIdentifier;
|
||||
range?: lc.Range;
|
||||
}
|
||||
|
||||
class TextDocumentContentProvider
|
||||
implements vscode.TextDocumentContentProvider {
|
||||
class TextDocumentContentProvider implements vscode.TextDocumentContentProvider {
|
||||
uri = vscode.Uri.parse('rust-analyzer://syntaxtree');
|
||||
eventEmitter = new vscode.EventEmitter<vscode.Uri>();
|
||||
|
||||
|
@ -79,23 +74,15 @@ class TextDocumentContentProvider
|
|||
const client = this.ctx.client;
|
||||
if (!editor || !client) return '';
|
||||
|
||||
let range: lc.Range | undefined;
|
||||
|
||||
// When the range based query is enabled we take the range of the selection
|
||||
if (uri.query === 'range=true') {
|
||||
range = editor.selection.isEmpty
|
||||
? undefined
|
||||
: client.code2ProtocolConverter.asRange(editor.selection);
|
||||
}
|
||||
const range = uri.query === 'range=true' && !editor.selection.isEmpty
|
||||
? client.code2ProtocolConverter.asRange(editor.selection)
|
||||
: null;
|
||||
|
||||
const request: SyntaxTreeParams = {
|
||||
return client.sendRequest(ra.syntaxTree, {
|
||||
textDocument: { uri: editor.document.uri.toString() },
|
||||
range,
|
||||
};
|
||||
return client.sendRequest<string>(
|
||||
'rust-analyzer/syntaxTree',
|
||||
request,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
get onDidChange(): vscode.Event<vscode.Uri> {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as vscode from 'vscode';
|
||||
import * as lc from 'vscode-languageclient';
|
||||
import * as ra from './rust-analyzer-api';
|
||||
|
||||
import { ColorTheme, TextMateRuleSettings } from './color_theme';
|
||||
|
||||
|
@ -8,29 +8,25 @@ import { sendRequestWithRetry } from './util';
|
|||
|
||||
export function activateHighlighting(ctx: Ctx) {
|
||||
const highlighter = new Highlighter(ctx);
|
||||
const client = ctx.client;
|
||||
if (client != null) {
|
||||
client.onNotification(
|
||||
'rust-analyzer/publishDecorations',
|
||||
(params: PublishDecorationsParams) => {
|
||||
if (!ctx.config.highlightingOn) return;
|
||||
|
||||
const targetEditor = vscode.window.visibleTextEditors.find(
|
||||
editor => {
|
||||
const unescapedUri = unescape(
|
||||
editor.document.uri.toString(),
|
||||
);
|
||||
// Unescaped URI looks like:
|
||||
// file:///c:/Workspace/ra-test/src/main.rs
|
||||
return unescapedUri === params.uri;
|
||||
},
|
||||
ctx.client.onNotification(ra.publishDecorations, params => {
|
||||
if (!ctx.config.highlightingOn) return;
|
||||
|
||||
const targetEditor = vscode.window.visibleTextEditors.find(
|
||||
editor => {
|
||||
const unescapedUri = unescape(
|
||||
editor.document.uri.toString(),
|
||||
);
|
||||
if (!targetEditor) return;
|
||||
|
||||
highlighter.setHighlights(targetEditor, params.decorations);
|
||||
// Unescaped URI looks like:
|
||||
// file:///c:/Workspace/ra-test/src/main.rs
|
||||
return unescapedUri === params.uri;
|
||||
},
|
||||
);
|
||||
}
|
||||
if (!targetEditor) return;
|
||||
|
||||
highlighter.setHighlights(targetEditor, params.decorations);
|
||||
});
|
||||
|
||||
|
||||
vscode.workspace.onDidChangeConfiguration(
|
||||
_ => highlighter.removeHighlights(),
|
||||
|
@ -45,13 +41,10 @@ export function activateHighlighting(ctx: Ctx) {
|
|||
const client = ctx.client;
|
||||
if (!client) return;
|
||||
|
||||
const params: lc.TextDocumentIdentifier = {
|
||||
uri: editor.document.uri.toString(),
|
||||
};
|
||||
const decorations = await sendRequestWithRetry<Decoration[]>(
|
||||
const decorations = await sendRequestWithRetry(
|
||||
client,
|
||||
'rust-analyzer/decorationsRequest',
|
||||
params,
|
||||
ra.decorationsRequest,
|
||||
{ uri: editor.document.uri.toString() },
|
||||
);
|
||||
highlighter.setHighlights(editor, decorations);
|
||||
},
|
||||
|
@ -60,17 +53,6 @@ export function activateHighlighting(ctx: Ctx) {
|
|||
);
|
||||
}
|
||||
|
||||
interface PublishDecorationsParams {
|
||||
uri: string;
|
||||
decorations: Decoration[];
|
||||
}
|
||||
|
||||
interface Decoration {
|
||||
range: lc.Range;
|
||||
tag: string;
|
||||
bindingHash?: string;
|
||||
}
|
||||
|
||||
// Based on this HSL-based color generator: https://gist.github.com/bendc/76c48ce53299e6078a76
|
||||
function fancify(seed: string, shade: 'light' | 'dark') {
|
||||
const random = randomU32Numbers(hashString(seed));
|
||||
|
@ -108,7 +90,7 @@ class Highlighter {
|
|||
this.decorations = null;
|
||||
}
|
||||
|
||||
public setHighlights(editor: vscode.TextEditor, highlights: Decoration[]) {
|
||||
public setHighlights(editor: vscode.TextEditor, highlights: ra.Decoration[]) {
|
||||
const client = this.ctx.client;
|
||||
if (!client) return;
|
||||
// Initialize decorations if necessary
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as vscode from 'vscode';
|
||||
import * as lc from 'vscode-languageclient';
|
||||
import * as ra from './rust-analyzer-api';
|
||||
|
||||
import { Ctx } from './ctx';
|
||||
import { log, sendRequestWithRetry } from './util';
|
||||
|
@ -39,16 +39,6 @@ export function activateInlayHints(ctx: Ctx) {
|
|||
void hintsUpdater.setEnabled(ctx.config.displayInlayHints);
|
||||
}
|
||||
|
||||
interface InlayHintsParams {
|
||||
textDocument: lc.TextDocumentIdentifier;
|
||||
}
|
||||
|
||||
interface InlayHint {
|
||||
range: vscode.Range;
|
||||
kind: "TypeHint" | "ParameterHint";
|
||||
label: string;
|
||||
}
|
||||
|
||||
const typeHintDecorationType = vscode.window.createTextEditorDecorationType({
|
||||
after: {
|
||||
color: new vscode.ThemeColor('rust_analyzer.inlayHint'),
|
||||
|
@ -107,9 +97,9 @@ class HintsUpdater {
|
|||
if (newHints == null) return;
|
||||
|
||||
const newTypeDecorations = newHints
|
||||
.filter(hint => hint.kind === 'TypeHint')
|
||||
.filter(hint => hint.kind === ra.InlayKind.TypeHint)
|
||||
.map(hint => ({
|
||||
range: hint.range,
|
||||
range: this.ctx.client.protocol2CodeConverter.asRange(hint.range),
|
||||
renderOptions: {
|
||||
after: {
|
||||
contentText: `: ${hint.label}`,
|
||||
|
@ -119,9 +109,9 @@ class HintsUpdater {
|
|||
this.setTypeDecorations(editor, newTypeDecorations);
|
||||
|
||||
const newParameterDecorations = newHints
|
||||
.filter(hint => hint.kind === 'ParameterHint')
|
||||
.filter(hint => hint.kind === ra.InlayKind.ParameterHint)
|
||||
.map(hint => ({
|
||||
range: hint.range,
|
||||
range: this.ctx.client.protocol2CodeConverter.asRange(hint.range),
|
||||
renderOptions: {
|
||||
before: {
|
||||
contentText: `${hint.label}: `,
|
||||
|
@ -151,20 +141,15 @@ class HintsUpdater {
|
|||
);
|
||||
}
|
||||
|
||||
private async queryHints(documentUri: string): Promise<InlayHint[] | null> {
|
||||
private async queryHints(documentUri: string): Promise<ra.InlayHint[] | null> {
|
||||
this.pending.get(documentUri)?.cancel();
|
||||
|
||||
const tokenSource = new vscode.CancellationTokenSource();
|
||||
this.pending.set(documentUri, tokenSource);
|
||||
|
||||
const request: InlayHintsParams = { textDocument: { uri: documentUri } };
|
||||
const request = { textDocument: { uri: documentUri } };
|
||||
|
||||
return sendRequestWithRetry<InlayHint[]>(
|
||||
this.ctx.client,
|
||||
'rust-analyzer/inlayHints',
|
||||
request,
|
||||
tokenSource.token
|
||||
)
|
||||
return sendRequestWithRetry(this.ctx.client, ra.inlayHints, request, tokenSource.token)
|
||||
.catch(_ => null)
|
||||
.finally(() => {
|
||||
if (!tokenSource.token.isCancellationRequested) {
|
||||
|
|
|
@ -4,41 +4,61 @@ import { log } from "../util";
|
|||
|
||||
const GITHUB_API_ENDPOINT_URL = "https://api.github.com";
|
||||
|
||||
|
||||
/**
|
||||
* Fetches the release with `releaseTag` (or just latest release when not specified)
|
||||
* from GitHub `repo` and returns metadata about `artifactFileName` shipped with
|
||||
* this release or `null` if no such artifact was published.
|
||||
* Fetches the release with `releaseTag` from GitHub `repo` and
|
||||
* returns metadata about `artifactFileName` shipped with
|
||||
* this release.
|
||||
*
|
||||
* @throws Error upon network failure or if no such repository, release, or artifact exists.
|
||||
*/
|
||||
export async function fetchArtifactReleaseInfo(
|
||||
repo: GithubRepo, artifactFileName: string, releaseTag?: string
|
||||
): Promise<null | ArtifactReleaseInfo> {
|
||||
repo: GithubRepo,
|
||||
artifactFileName: string,
|
||||
releaseTag: string
|
||||
): Promise<ArtifactReleaseInfo> {
|
||||
|
||||
const repoOwner = encodeURIComponent(repo.owner);
|
||||
const repoName = encodeURIComponent(repo.name);
|
||||
|
||||
const apiEndpointPath = releaseTag
|
||||
? `/repos/${repoOwner}/${repoName}/releases/tags/${releaseTag}`
|
||||
: `/repos/${repoOwner}/${repoName}/releases/latest`;
|
||||
const apiEndpointPath = `/repos/${repoOwner}/${repoName}/releases/tags/${releaseTag}`;
|
||||
|
||||
const requestUrl = GITHUB_API_ENDPOINT_URL + apiEndpointPath;
|
||||
|
||||
// We skip runtime type checks for simplicity (here we cast from `any` to `GithubRelease`)
|
||||
|
||||
log.debug("Issuing request for released artifacts metadata to", requestUrl);
|
||||
|
||||
// FIXME: handle non-ok response
|
||||
const response: GithubRelease = await fetch(requestUrl, {
|
||||
headers: { Accept: "application/vnd.github.v3+json" }
|
||||
})
|
||||
.then(res => res.json());
|
||||
const response = await fetch(requestUrl, { headers: { Accept: "application/vnd.github.v3+json" } });
|
||||
|
||||
const artifact = response.assets.find(artifact => artifact.name === artifactFileName);
|
||||
if (!response.ok) {
|
||||
log.error("Error fetching artifact release info", {
|
||||
requestUrl,
|
||||
releaseTag,
|
||||
artifactFileName,
|
||||
response: {
|
||||
headers: response.headers,
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
}
|
||||
});
|
||||
|
||||
if (!artifact) return null;
|
||||
throw new Error(
|
||||
`Got response ${response.status} when trying to fetch ` +
|
||||
`"${artifactFileName}" artifact release info for ${releaseTag} release`
|
||||
);
|
||||
}
|
||||
|
||||
// We skip runtime type checks for simplicity (here we cast from `any` to `GithubRelease`)
|
||||
const release: GithubRelease = await response.json();
|
||||
|
||||
const artifact = release.assets.find(artifact => artifact.name === artifactFileName);
|
||||
|
||||
if (!artifact) {
|
||||
throw new Error(
|
||||
`Artifact ${artifactFileName} was not found in ${release.name} release!`
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
releaseName: response.name,
|
||||
releaseName: release.name,
|
||||
downloadUrl: artifact.browser_download_url
|
||||
};
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ export async function ensureServerBinary(source: null | BinarySource): Promise<n
|
|||
|
||||
async function downloadServer(source: BinarySource.GithubRelease): Promise<boolean> {
|
||||
try {
|
||||
const releaseInfo = (await fetchArtifactReleaseInfo(source.repo, source.file, source.version))!;
|
||||
const releaseInfo = await fetchArtifactReleaseInfo(source.repo, source.file, source.version);
|
||||
|
||||
await downloadArtifact(releaseInfo, source.file, source.dir, "language server");
|
||||
await setServerVersion(source.storage, releaseInfo.releaseName);
|
||||
|
|
117
editors/code/src/rust-analyzer-api.ts
Normal file
117
editors/code/src/rust-analyzer-api.ts
Normal file
|
@ -0,0 +1,117 @@
|
|||
/**
|
||||
* This file mirrors `crates/rust-analyzer/src/req.rs` declarations.
|
||||
*/
|
||||
|
||||
import * as lc from "vscode-languageclient";
|
||||
|
||||
type Option<T> = null | T;
|
||||
type Vec<T> = T[];
|
||||
type FxHashMap<K extends PropertyKey, V> = Record<K, V>;
|
||||
|
||||
function request<TParams, TResult>(method: string) {
|
||||
return new lc.RequestType<TParams, TResult, unknown>(`rust-analyzer/${method}`);
|
||||
}
|
||||
function notification<TParam>(method: string) {
|
||||
return new lc.NotificationType<TParam>(method);
|
||||
}
|
||||
|
||||
|
||||
export const analyzerStatus = request<null, string>("analyzerStatus");
|
||||
|
||||
|
||||
export const collectGarbage = request<null, null>("collectGarbage");
|
||||
|
||||
|
||||
export interface SyntaxTreeParams {
|
||||
textDocument: lc.TextDocumentIdentifier;
|
||||
range: Option<lc.Range>;
|
||||
}
|
||||
export const syntaxTree = request<SyntaxTreeParams, string>("syntaxTree");
|
||||
|
||||
|
||||
export interface ExpandMacroParams {
|
||||
textDocument: lc.TextDocumentIdentifier;
|
||||
position: Option<lc.Position>;
|
||||
}
|
||||
export interface ExpandedMacro {
|
||||
name: string;
|
||||
expansion: string;
|
||||
}
|
||||
export const expandMacro = request<ExpandMacroParams, Option<ExpandedMacro>>("expandMacro");
|
||||
|
||||
|
||||
export interface FindMatchingBraceParams {
|
||||
textDocument: lc.TextDocumentIdentifier;
|
||||
offsets: Vec<lc.Position>;
|
||||
}
|
||||
export const findMatchingBrace = request<FindMatchingBraceParams, Vec<lc.Position>>("findMatchingBrace");
|
||||
|
||||
|
||||
export interface PublishDecorationsParams {
|
||||
uri: string;
|
||||
decorations: Vec<Decoration>;
|
||||
}
|
||||
export interface Decoration {
|
||||
range: lc.Range;
|
||||
tag: string;
|
||||
bindingHash: Option<string>;
|
||||
}
|
||||
export const decorationsRequest = request<lc.TextDocumentIdentifier, Vec<Decoration>>("decorationsRequest");
|
||||
|
||||
|
||||
export const parentModule = request<lc.TextDocumentPositionParams, Vec<lc.Location>>("parentModule");
|
||||
|
||||
|
||||
export interface JoinLinesParams {
|
||||
textDocument: lc.TextDocumentIdentifier;
|
||||
range: lc.Range;
|
||||
}
|
||||
export const joinLines = request<JoinLinesParams, SourceChange>("joinLines");
|
||||
|
||||
|
||||
export const onEnter = request<lc.TextDocumentPositionParams, Option<SourceChange>>("onEnter");
|
||||
|
||||
export interface RunnablesParams {
|
||||
textDocument: lc.TextDocumentIdentifier;
|
||||
position: Option<lc.Position>;
|
||||
}
|
||||
export interface Runnable {
|
||||
range: lc.Range;
|
||||
label: string;
|
||||
bin: string;
|
||||
args: Vec<string>;
|
||||
env: FxHashMap<string, string>;
|
||||
cwd: Option<string>;
|
||||
}
|
||||
export const runnables = request<RunnablesParams, Vec<Runnable>>("runnables");
|
||||
|
||||
|
||||
export const enum InlayKind {
|
||||
TypeHint = "TypeHint",
|
||||
ParameterHint = "ParameterHint",
|
||||
}
|
||||
export interface InlayHint {
|
||||
range: lc.Range;
|
||||
kind: InlayKind;
|
||||
label: string;
|
||||
}
|
||||
export interface InlayHintsParams {
|
||||
textDocument: lc.TextDocumentIdentifier;
|
||||
}
|
||||
export const inlayHints = request<InlayHintsParams, Vec<InlayHint>>("inlayHints");
|
||||
|
||||
|
||||
export interface SsrParams {
|
||||
arg: string;
|
||||
}
|
||||
export const ssr = request<SsrParams, SourceChange>("ssr");
|
||||
|
||||
|
||||
export const publishDecorations = notification<PublishDecorationsParams>("publishDecorations");
|
||||
|
||||
|
||||
export interface SourceChange {
|
||||
label: string;
|
||||
workspaceEdit: lc.WorkspaceEdit;
|
||||
cursorPosition: Option<lc.TextDocumentPositionParams>;
|
||||
}
|
|
@ -1,15 +1,10 @@
|
|||
import * as vscode from 'vscode';
|
||||
import * as lc from 'vscode-languageclient';
|
||||
import * as ra from './rust-analyzer-api';
|
||||
|
||||
import { Ctx } from './ctx';
|
||||
|
||||
export interface SourceChange {
|
||||
label: string;
|
||||
workspaceEdit: lc.WorkspaceEdit;
|
||||
cursorPosition?: lc.TextDocumentPositionParams;
|
||||
}
|
||||
|
||||
export async function applySourceChange(ctx: Ctx, change: SourceChange) {
|
||||
export async function applySourceChange(ctx: Ctx, change: ra.SourceChange) {
|
||||
const client = ctx.client;
|
||||
if (!client) return;
|
||||
|
||||
|
|
|
@ -20,21 +20,21 @@ export const log = {
|
|||
}
|
||||
};
|
||||
|
||||
export async function sendRequestWithRetry<R>(
|
||||
export async function sendRequestWithRetry<TParam, TRet>(
|
||||
client: lc.LanguageClient,
|
||||
method: string,
|
||||
param: unknown,
|
||||
reqType: lc.RequestType<TParam, TRet, unknown>,
|
||||
param: TParam,
|
||||
token?: vscode.CancellationToken,
|
||||
): Promise<R> {
|
||||
): Promise<TRet> {
|
||||
for (const delay of [2, 4, 6, 8, 10, null]) {
|
||||
try {
|
||||
return await (token
|
||||
? client.sendRequest(method, param, token)
|
||||
: client.sendRequest(method, param)
|
||||
? client.sendRequest(reqType, param, token)
|
||||
: client.sendRequest(reqType, param)
|
||||
);
|
||||
} catch (error) {
|
||||
if (delay === null) {
|
||||
log.error("LSP request timed out", { method, param, error });
|
||||
log.error("LSP request timed out", { method: reqType.method, param, error });
|
||||
throw error;
|
||||
}
|
||||
|
||||
|
@ -43,7 +43,7 @@ export async function sendRequestWithRetry<R>(
|
|||
}
|
||||
|
||||
if (error.code !== lc.ErrorCodes.ContentModified) {
|
||||
log.error("LSP request failed", { method, param, error });
|
||||
log.error("LSP request failed", { method: reqType.method, param, error });
|
||||
throw error;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue