Merge branch 'master' of github.com:rust-analyzer/rust-analyzer

This commit is contained in:
Benjamin Coenen 2020-05-01 16:26:30 +02:00
commit dc34162450
68 changed files with 1926 additions and 590 deletions

View file

@ -39,7 +39,6 @@ jobs:
with: with:
toolchain: stable toolchain: stable
profile: minimal profile: minimal
target: x86_64-unknown-linux-musl
override: true override: true
- name: Install Nodejs - name: Install Nodejs

4
.vscode/launch.json vendored
View file

@ -41,7 +41,7 @@
"outFiles": [ "outFiles": [
"${workspaceFolder}/editors/code/out/**/*.js" "${workspaceFolder}/editors/code/out/**/*.js"
], ],
"preLaunchTask": "Build Extension", "preLaunchTask": "Build Server and Extension",
"skipFiles": [ "skipFiles": [
"<node_internals>/**/*.js" "<node_internals>/**/*.js"
], ],
@ -62,7 +62,7 @@
"outFiles": [ "outFiles": [
"${workspaceFolder}/editors/code/out/**/*.js" "${workspaceFolder}/editors/code/out/**/*.js"
], ],
"preLaunchTask": "Build Extension", "preLaunchTask": "Build Server (Release) and Extension",
"skipFiles": [ "skipFiles": [
"<node_internals>/**/*.js" "<node_internals>/**/*.js"
], ],

31
.vscode/tasks.json vendored
View file

@ -4,7 +4,7 @@
"version": "2.0.0", "version": "2.0.0",
"tasks": [ "tasks": [
{ {
"label": "Build Extension", "label": "Build Extension in Background",
"group": "build", "group": "build",
"type": "npm", "type": "npm",
"script": "watch", "script": "watch",
@ -15,6 +15,17 @@
}, },
"isBackground": true, "isBackground": true,
}, },
{
"label": "Build Extension",
"group": "build",
"type": "npm",
"script": "build",
"path": "editors/code/",
"problemMatcher": {
"base": "$tsc",
"fileLocation": ["relative", "${workspaceFolder}/editors/code/"]
},
},
{ {
"label": "Build Server", "label": "Build Server",
"group": "build", "group": "build",
@ -22,5 +33,23 @@
"command": "cargo build --package rust-analyzer", "command": "cargo build --package rust-analyzer",
"problemMatcher": "$rustc" "problemMatcher": "$rustc"
}, },
{
"label": "Build Server (Release)",
"group": "build",
"type": "shell",
"command": "cargo build --release --package rust-analyzer",
"problemMatcher": "$rustc"
},
{
"label": "Build Server and Extension",
"dependsOn": ["Build Server", "Build Extension"],
"problemMatcher": "$rustc"
},
{
"label": "Build Server (Release) and Extension",
"dependsOn": ["Build Server (Release)", "Build Extension"],
"problemMatcher": "$rustc"
}
] ]
} }

12
Cargo.lock generated
View file

@ -68,9 +68,9 @@ dependencies = [
[[package]] [[package]]
name = "base64" name = "base64"
version = "0.11.0" version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" checksum = "7d5ca2cd0adc3f48f9e9ea5a6bbdf9ccc0bfade884847e484d452414c7ccffb3"
[[package]] [[package]]
name = "bitflags" name = "bitflags"
@ -645,9 +645,9 @@ dependencies = [
[[package]] [[package]]
name = "lsp-types" name = "lsp-types"
version = "0.73.0" version = "0.74.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93d0cf64ea141b43d9e055f6b9df13f0bce32b103d84237509ce0a571ab9b159" checksum = "820f746e5716ab9a2d664794636188bd003023b72e55404ee27105dc22869922"
dependencies = [ dependencies = [
"base64", "base64",
"bitflags", "bitflags",
@ -1193,9 +1193,9 @@ dependencies = [
[[package]] [[package]]
name = "ra_vfs" name = "ra_vfs"
version = "0.5.3" version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58a265769d5e5655345a9fcbd870a1a7c3658558c0d8efaed79e0669358f46b8" checksum = "fcaa5615f420134aea7667253db101d03a5c5f300eac607872dc2a36407b2ac9"
dependencies = [ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"jod-thread", "jod-thread",

View file

@ -1,11 +1,12 @@
use ra_ide_db::RootDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, NameOwner}, ast::{self, AstNode, NameOwner},
TextSize, TextSize,
}; };
use stdx::format_to; use stdx::format_to;
use crate::{Assist, AssistCtx, AssistId}; use crate::{utils::FamousDefs, Assist, AssistCtx, AssistId};
use ra_ide_db::RootDatabase; use test_utils::tested_by;
// Assist add_from_impl_for_enum // Assist add_from_impl_for_enum
// //
@ -41,7 +42,8 @@ pub(crate) fn add_from_impl_for_enum(ctx: AssistCtx) -> Option<Assist> {
_ => return None, _ => return None,
}; };
if already_has_from_impl(ctx.sema, &variant) { if existing_from_impl(ctx.sema, &variant).is_some() {
tested_by!(test_add_from_impl_already_exists);
return None; return None;
} }
@ -70,41 +72,33 @@ impl From<{0}> for {1} {{
) )
} }
fn already_has_from_impl( fn existing_from_impl(
sema: &'_ hir::Semantics<'_, RootDatabase>, sema: &'_ hir::Semantics<'_, RootDatabase>,
variant: &ast::EnumVariant, variant: &ast::EnumVariant,
) -> bool { ) -> Option<()> {
let scope = sema.scope(&variant.syntax()); let variant = sema.to_def(variant)?;
let enum_ = variant.parent_enum(sema.db);
let krate = enum_.module(sema.db).krate();
let from_path = ast::make::path_from_text("From"); let from_trait = FamousDefs(sema, krate).core_convert_From()?;
let from_hir_path = match hir::Path::from_ast(from_path) {
Some(p) => p,
None => return false,
};
let from_trait = match scope.resolve_hir_path(&from_hir_path) {
Some(hir::PathResolution::Def(hir::ModuleDef::Trait(t))) => t,
_ => return false,
};
let e: hir::Enum = match sema.to_def(&variant.parent_enum()) { let enum_type = enum_.ty(sema.db);
Some(e) => e,
None => return false,
};
let e_ty = e.ty(sema.db);
let hir_enum_var: hir::EnumVariant = match sema.to_def(variant) { let wrapped_type = variant.fields(sema.db).get(0)?.signature_ty(sema.db);
Some(ev) => ev,
None => return false,
};
let var_ty = hir_enum_var.fields(sema.db)[0].signature_ty(sema.db);
e_ty.impls_trait(sema.db, from_trait, &[var_ty]) if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) {
Some(())
} else {
None
}
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::helpers::{check_assist, check_assist_not_applicable}; use crate::helpers::{check_assist, check_assist_not_applicable};
use test_utils::covers;
#[test] #[test]
fn test_add_from_impl_for_enum() { fn test_add_from_impl_for_enum() {
@ -136,36 +130,40 @@ mod tests {
); );
} }
fn check_not_applicable(ra_fixture: &str) {
let fixture =
format!("//- main.rs crate:main deps:core\n{}\n{}", ra_fixture, FamousDefs::FIXTURE);
check_assist_not_applicable(add_from_impl_for_enum, &fixture)
}
#[test] #[test]
fn test_add_from_impl_no_element() { fn test_add_from_impl_no_element() {
check_assist_not_applicable(add_from_impl_for_enum, "enum A { <|>One }"); check_not_applicable("enum A { <|>One }");
} }
#[test] #[test]
fn test_add_from_impl_more_than_one_element_in_tuple() { fn test_add_from_impl_more_than_one_element_in_tuple() {
check_assist_not_applicable(add_from_impl_for_enum, "enum A { <|>One(u32, String) }"); check_not_applicable("enum A { <|>One(u32, String) }");
} }
#[test] #[test]
fn test_add_from_impl_struct_variant() { fn test_add_from_impl_struct_variant() {
check_assist_not_applicable(add_from_impl_for_enum, "enum A { <|>One { x: u32 } }"); check_not_applicable("enum A { <|>One { x: u32 } }");
} }
#[test] #[test]
fn test_add_from_impl_already_exists() { fn test_add_from_impl_already_exists() {
check_assist_not_applicable( covers!(test_add_from_impl_already_exists);
add_from_impl_for_enum, check_not_applicable(
r#"enum A { <|>One(u32), } r#"
enum A { <|>One(u32), }
impl From<u32> for A { impl From<u32> for A {
fn from(v: u32) -> Self { fn from(v: u32) -> Self {
A::One(v) A::One(v)
} }
} }
"#,
pub trait From<T> {
fn from(T) -> Self;
}"#,
); );
} }

View file

@ -1,11 +1,10 @@
use ra_fmt::unwrap_trivial_block; use ra_fmt::unwrap_trivial_block;
use ra_syntax::{ use ra_syntax::{
ast::{self, make}, ast::{self, edit::IndentLevel, make},
AstNode, AstNode,
}; };
use crate::{Assist, AssistCtx, AssistId}; use crate::{utils::TryEnum, Assist, AssistCtx, AssistId};
use ast::edit::IndentLevel;
// Assist: replace_if_let_with_match // Assist: replace_if_let_with_match
// //
@ -44,15 +43,21 @@ pub(crate) fn replace_if_let_with_match(ctx: AssistCtx) -> Option<Assist> {
ast::ElseBranch::IfExpr(_) => return None, ast::ElseBranch::IfExpr(_) => return None,
}; };
ctx.add_assist(AssistId("replace_if_let_with_match"), "Replace with match", |edit| { let sema = ctx.sema;
ctx.add_assist(AssistId("replace_if_let_with_match"), "Replace with match", move |edit| {
let match_expr = { let match_expr = {
let then_arm = { let then_arm = {
let then_expr = unwrap_trivial_block(then_block); let then_expr = unwrap_trivial_block(then_block);
make::match_arm(vec![pat], then_expr) make::match_arm(vec![pat.clone()], then_expr)
}; };
let else_arm = { let else_arm = {
let pattern = sema
.type_of_pat(&pat)
.and_then(|ty| TryEnum::from_ty(sema, &ty))
.map(|it| it.sad_pattern())
.unwrap_or_else(|| make::placeholder_pat().into());
let else_expr = unwrap_trivial_block(else_block); let else_expr = unwrap_trivial_block(else_block);
make::match_arm(vec![make::placeholder_pat().into()], else_expr) make::match_arm(vec![pattern], else_expr)
}; };
make::expr_match(expr, make::match_arm_list(vec![then_arm, else_arm])) make::expr_match(expr, make::match_arm_list(vec![then_arm, else_arm]))
}; };
@ -68,6 +73,7 @@ pub(crate) fn replace_if_let_with_match(ctx: AssistCtx) -> Option<Assist> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::helpers::{check_assist, check_assist_target}; use crate::helpers::{check_assist, check_assist_target};
#[test] #[test]
@ -145,4 +151,64 @@ impl VariantData {
}", }",
); );
} }
#[test]
fn special_case_option() {
check_assist(
replace_if_let_with_match,
r#"
enum Option<T> { Some(T), None }
use Option::*;
fn foo(x: Option<i32>) {
<|>if let Some(x) = x {
println!("{}", x)
} else {
println!("none")
}
}
"#,
r#"
enum Option<T> { Some(T), None }
use Option::*;
fn foo(x: Option<i32>) {
<|>match x {
Some(x) => println!("{}", x),
None => println!("none"),
}
}
"#,
);
}
#[test]
fn special_case_result() {
check_assist(
replace_if_let_with_match,
r#"
enum Result<T, E> { Ok(T), Err(E) }
use Result::*;
fn foo(x: Result<i32, ()>) {
<|>if let Ok(x) = x {
println!("{}", x)
} else {
println!("none")
}
}
"#,
r#"
enum Result<T, E> { Ok(T), Err(E) }
use Result::*;
fn foo(x: Result<i32, ()>) {
<|>match x {
Ok(x) => println!("{}", x),
Err(_) => println!("none"),
}
}
"#,
);
}
} }

View file

@ -1,6 +1,5 @@
use std::iter::once; use std::iter::once;
use hir::Adt;
use ra_syntax::{ use ra_syntax::{
ast::{ ast::{
self, self,
@ -12,6 +11,7 @@ use ra_syntax::{
use crate::{ use crate::{
assist_ctx::{Assist, AssistCtx}, assist_ctx::{Assist, AssistCtx},
utils::TryEnum,
AssistId, AssistId,
}; };
@ -45,20 +45,10 @@ pub(crate) fn replace_let_with_if_let(ctx: AssistCtx) -> Option<Assist> {
let init = let_stmt.initializer()?; let init = let_stmt.initializer()?;
let original_pat = let_stmt.pat()?; let original_pat = let_stmt.pat()?;
let ty = ctx.sema.type_of_expr(&init)?; let ty = ctx.sema.type_of_expr(&init)?;
let enum_ = match ty.as_adt() { let happy_variant = TryEnum::from_ty(ctx.sema, &ty).map(|it| it.happy_case());
Some(Adt::Enum(it)) => it,
_ => return None,
};
let happy_case =
[("Result", "Ok"), ("Option", "Some")].iter().find_map(|(known_type, happy_case)| {
if &enum_.name(ctx.db).to_string() == known_type {
return Some(happy_case);
}
None
});
ctx.add_assist(AssistId("replace_let_with_if_let"), "Replace with if-let", |edit| { ctx.add_assist(AssistId("replace_let_with_if_let"), "Replace with if-let", |edit| {
let with_placeholder: ast::Pat = match happy_case { let with_placeholder: ast::Pat = match happy_variant {
None => make::placeholder_pat().into(), None => make::placeholder_pat().into(),
Some(var_name) => make::tuple_struct_pat( Some(var_name) => make::tuple_struct_pat(
make::path_unqualified(make::path_segment(make::name_ref(var_name))), make::path_unqualified(make::path_segment(make::name_ref(var_name))),

View file

@ -1,12 +1,11 @@
use std::iter; use std::iter;
use ra_syntax::{ use ra_syntax::{
ast::{self, make}, ast::{self, edit::IndentLevel, make},
AstNode, AstNode,
}; };
use crate::{Assist, AssistCtx, AssistId}; use crate::{utils::TryEnum, Assist, AssistCtx, AssistId};
use ast::edit::IndentLevel;
// Assist: replace_unwrap_with_match // Assist: replace_unwrap_with_match
// //
@ -38,42 +37,27 @@ pub(crate) fn replace_unwrap_with_match(ctx: AssistCtx) -> Option<Assist> {
} }
let caller = method_call.expr()?; let caller = method_call.expr()?;
let ty = ctx.sema.type_of_expr(&caller)?; let ty = ctx.sema.type_of_expr(&caller)?;
let happy_variant = TryEnum::from_ty(ctx.sema, &ty)?.happy_case();
let type_name = ty.as_adt()?.name(ctx.sema.db).to_string(); ctx.add_assist(AssistId("replace_unwrap_with_match"), "Replace unwrap with match", |edit| {
let ok_path = make::path_unqualified(make::path_segment(make::name_ref(happy_variant)));
let it = make::bind_pat(make::name("a")).into();
let ok_tuple = make::tuple_struct_pat(ok_path, iter::once(it)).into();
for (unwrap_type, variant_name) in [("Result", "Ok"), ("Option", "Some")].iter() { let bind_path = make::path_unqualified(make::path_segment(make::name_ref("a")));
if &type_name == unwrap_type { let ok_arm = make::match_arm(iter::once(ok_tuple), make::expr_path(bind_path));
return ctx.add_assist(
AssistId("replace_unwrap_with_match"),
"Replace unwrap with match",
|edit| {
let ok_path =
make::path_unqualified(make::path_segment(make::name_ref(variant_name)));
let it = make::bind_pat(make::name("a")).into();
let ok_tuple = make::tuple_struct_pat(ok_path, iter::once(it)).into();
let bind_path = make::path_unqualified(make::path_segment(make::name_ref("a"))); let unreachable_call = make::unreachable_macro_call().into();
let ok_arm = make::match_arm(iter::once(ok_tuple), make::expr_path(bind_path)); let err_arm = make::match_arm(iter::once(make::placeholder_pat().into()), unreachable_call);
let unreachable_call = make::unreachable_macro_call().into(); let match_arm_list = make::match_arm_list(vec![ok_arm, err_arm]);
let err_arm = make::match_arm( let match_expr = make::expr_match(caller.clone(), match_arm_list);
iter::once(make::placeholder_pat().into()), let match_expr = IndentLevel::from_node(method_call.syntax()).increase_indent(match_expr);
unreachable_call,
);
let match_arm_list = make::match_arm_list(vec![ok_arm, err_arm]); edit.target(method_call.syntax().text_range());
let match_expr = make::expr_match(caller.clone(), match_arm_list); edit.set_cursor(caller.syntax().text_range().start());
let match_expr = edit.replace_ast::<ast::Expr>(method_call.into(), match_expr);
IndentLevel::from_node(method_call.syntax()).increase_indent(match_expr); })
edit.target(method_call.syntax().text_range());
edit.set_cursor(caller.syntax().text_range().start());
edit.replace_ast::<ast::Expr>(method_call.into(), match_expr);
},
);
}
}
None
} }
#[cfg(test)] #[cfg(test)]

View file

@ -8,4 +8,5 @@ test_utils::marks![
test_not_inline_mut_variable test_not_inline_mut_variable
test_not_applicable_if_variable_unused test_not_applicable_if_variable_unused
change_visibility_field_false_positive change_visibility_field_false_positive
test_add_from_impl_already_exists
]; ];

View file

@ -1,7 +1,9 @@
//! Assorted functions shared by several assists. //! Assorted functions shared by several assists.
pub(crate) mod insert_use; pub(crate) mod insert_use;
use hir::Semantics; use std::iter;
use hir::{Adt, Crate, Semantics, Trait, Type};
use ra_ide_db::RootDatabase; use ra_ide_db::RootDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, make, NameOwner}, ast::{self, make, NameOwner},
@ -99,3 +101,109 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
_ => None, _ => None,
} }
} }
#[derive(Clone, Copy)]
pub(crate) enum TryEnum {
Result,
Option,
}
impl TryEnum {
const ALL: [TryEnum; 2] = [TryEnum::Option, TryEnum::Result];
pub(crate) fn from_ty(sema: &Semantics<RootDatabase>, ty: &Type) -> Option<TryEnum> {
let enum_ = match ty.as_adt() {
Some(Adt::Enum(it)) => it,
_ => return None,
};
TryEnum::ALL.iter().find_map(|&var| {
if &enum_.name(sema.db).to_string() == var.type_name() {
return Some(var);
}
None
})
}
pub(crate) fn happy_case(self) -> &'static str {
match self {
TryEnum::Result => "Ok",
TryEnum::Option => "Some",
}
}
pub(crate) fn sad_pattern(self) -> ast::Pat {
match self {
TryEnum::Result => make::tuple_struct_pat(
make::path_unqualified(make::path_segment(make::name_ref("Err"))),
iter::once(make::placeholder_pat().into()),
)
.into(),
TryEnum::Option => make::bind_pat(make::name("None")).into(),
}
}
fn type_name(self) -> &'static str {
match self {
TryEnum::Result => "Result",
TryEnum::Option => "Option",
}
}
}
/// Helps with finding well-know things inside the standard library. This is
/// somewhat similar to the known paths infra inside hir, but it different; We
/// want to make sure that IDE specific paths don't become interesting inside
/// the compiler itself as well.
pub(crate) struct FamousDefs<'a, 'b>(pub(crate) &'a Semantics<'b, RootDatabase>, pub(crate) Crate);
#[allow(non_snake_case)]
impl FamousDefs<'_, '_> {
#[cfg(test)]
pub(crate) const FIXTURE: &'static str = r#"
//- /libcore.rs crate:core
pub mod convert{
pub trait From<T> {
fn from(T) -> Self;
}
}
pub mod prelude { pub use crate::convert::From }
#[prelude_import]
pub use prelude::*;
"#;
pub(crate) fn core_convert_From(&self) -> Option<Trait> {
self.find_trait("core:convert:From")
}
fn find_trait(&self, path: &str) -> Option<Trait> {
let db = self.0.db;
let mut path = path.split(':');
let trait_ = path.next_back()?;
let std_crate = path.next()?;
let std_crate = self
.1
.dependencies(db)
.into_iter()
.find(|dep| &dep.name.to_string() == std_crate)?
.krate;
let mut module = std_crate.root_module(db)?;
for segment in path {
module = module.children(db).find_map(|child| {
let name = child.name(db)?;
if &name.to_string() == segment {
Some(child)
} else {
None
}
})?;
}
let def =
module.scope(db, None).into_iter().find(|(name, _def)| &name.to_string() == trait_)?.1;
match def {
hir::ScopeDef::ModuleDef(hir::ModuleDef::Trait(it)) => Some(it),
_ => None,
}
}
}

View file

@ -6,7 +6,7 @@ authors = ["rust-analyzer developers"]
[dependencies] [dependencies]
crossbeam-channel = "0.4.0" crossbeam-channel = "0.4.0"
lsp-types = { version = "0.73.0", features = ["proposed"] } lsp-types = { version = "0.74.0", features = ["proposed"] }
log = "0.4.8" log = "0.4.8"
cargo_metadata = "0.9.1" cargo_metadata = "0.9.1"
serde_json = "1.0.48" serde_json = "1.0.48"

View file

@ -57,18 +57,17 @@ pub fn extract_trivial_expression(block: &ast::BlockExpr) -> Option<ast::Expr> {
return None; return None;
} }
return Some(expr); return Some(expr);
} else { }
// Unwrap `{ continue; }` // Unwrap `{ continue; }`
let (stmt,) = block.statements().next_tuple()?; let (stmt,) = block.statements().next_tuple()?;
if let ast::Stmt::ExprStmt(expr_stmt) = stmt { if let ast::Stmt::ExprStmt(expr_stmt) = stmt {
if has_anything_else(expr_stmt.syntax()) { if has_anything_else(expr_stmt.syntax()) {
return None; return None;
} }
let expr = expr_stmt.expr()?; let expr = expr_stmt.expr()?;
match expr.syntax().kind() { match expr.syntax().kind() {
CONTINUE_EXPR | BREAK_EXPR | RETURN_EXPR => return Some(expr), CONTINUE_EXPR | BREAK_EXPR | RETURN_EXPR => return Some(expr),
_ => (), _ => (),
}
} }
} }
None None

View file

@ -953,6 +953,16 @@ impl TypeParam {
pub fn module(self, db: &dyn HirDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
self.id.parent.module(db.upcast()).into() self.id.parent.module(db.upcast()).into()
} }
pub fn ty(self, db: &dyn HirDatabase) -> Type {
let resolver = self.id.parent.resolver(db.upcast());
let environment = TraitEnvironment::lower(db, &resolver);
let ty = Ty::Placeholder(self.id);
Type {
krate: self.id.parent.module(db.upcast()).krate,
ty: InEnvironment { value: ty, environment },
}
}
} }
// FIXME: rename from `ImplDef` to `Impl` // FIXME: rename from `ImplDef` to `Impl`
@ -1157,18 +1167,21 @@ impl Type {
pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> { pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> {
if let Ty::Apply(a_ty) = &self.ty.value { if let Ty::Apply(a_ty) = &self.ty.value {
if let TypeCtor::Adt(AdtId::StructId(s)) = a_ty.ctor { let variant_id = match a_ty.ctor {
let var_def = s.into(); TypeCtor::Adt(AdtId::StructId(s)) => s.into(),
return db TypeCtor::Adt(AdtId::UnionId(u)) => u.into(),
.field_types(var_def) _ => return Vec::new(),
.iter() };
.map(|(local_id, ty)| {
let def = Field { parent: var_def.into(), id: local_id }; return db
let ty = ty.clone().subst(&a_ty.parameters); .field_types(variant_id)
(def, self.derived(ty)) .iter()
}) .map(|(local_id, ty)| {
.collect(); let def = Field { parent: variant_id.into(), id: local_id };
} let ty = ty.clone().subst(&a_ty.parameters);
(def, self.derived(ty))
})
.collect();
}; };
Vec::new() Vec::new()
} }

View file

@ -9,6 +9,7 @@ use hir_def::{
AsMacroCall, TraitId, AsMacroCall, TraitId,
}; };
use hir_expand::ExpansionInfo; use hir_expand::ExpansionInfo;
use hir_ty::associated_type_shorthand_candidates;
use itertools::Itertools; use itertools::Itertools;
use ra_db::{FileId, FileRange}; use ra_db::{FileId, FileRange};
use ra_prof::profile; use ra_prof::profile;
@ -24,8 +25,9 @@ use crate::{
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{resolve_hir_path, SourceAnalyzer}, source_analyzer::{resolve_hir_path, SourceAnalyzer},
AssocItem, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, AssocItem, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef,
Name, Origin, Path, ScopeDef, Trait, Type, TypeParam, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam,
}; };
use resolver::TypeNs;
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum PathResolution { pub enum PathResolution {
@ -40,6 +42,44 @@ pub enum PathResolution {
AssocItem(AssocItem), AssocItem(AssocItem),
} }
impl PathResolution {
fn in_type_ns(&self) -> Option<TypeNs> {
match self {
PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
Some(TypeNs::BuiltinType(*builtin))
}
PathResolution::Def(ModuleDef::Const(_))
| PathResolution::Def(ModuleDef::EnumVariant(_))
| PathResolution::Def(ModuleDef::Function(_))
| PathResolution::Def(ModuleDef::Module(_))
| PathResolution::Def(ModuleDef::Static(_))
| PathResolution::Def(ModuleDef::Trait(_)) => None,
PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
Some(TypeNs::TypeAliasId((*alias).into()))
}
PathResolution::Local(_) | PathResolution::Macro(_) => None,
PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
PathResolution::AssocItem(AssocItem::Const(_))
| PathResolution::AssocItem(AssocItem::Function(_)) => None,
PathResolution::AssocItem(AssocItem::TypeAlias(alias)) => {
Some(TypeNs::TypeAliasId((*alias).into()))
}
}
}
/// Returns an iterator over associated types that may be specified after this path (using
/// `Ty::Assoc` syntax).
pub fn assoc_type_shorthand_candidates<R>(
&self,
db: &dyn HirDatabase,
mut cb: impl FnMut(TypeAlias) -> Option<R>,
) -> Option<R> {
associated_type_shorthand_candidates(db, self.in_type_ns()?, |_, _, id| cb(id.into()))
}
}
/// Primary API to get semantic information, like types, from syntax trees. /// Primary API to get semantic information, like types, from syntax trees.
pub struct Semantics<'db, DB> { pub struct Semantics<'db, DB> {
pub db: &'db DB, pub db: &'db DB,

View file

@ -182,10 +182,6 @@ impl ExprCollector<'_> {
self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr) self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr)
} }
ast::Expr::TryBlockExpr(e) => {
let body = self.collect_block_opt(e.body());
self.alloc_expr(Expr::TryBlock { body }, syntax_ptr)
}
ast::Expr::BlockExpr(e) => self.collect_block(e), ast::Expr::BlockExpr(e) => self.collect_block(e),
ast::Expr::LoopExpr(e) => { ast::Expr::LoopExpr(e) => {
let body = self.collect_block_opt(e.loop_body()); let body = self.collect_block_opt(e.loop_body());

View file

@ -101,9 +101,6 @@ pub enum Expr {
Try { Try {
expr: ExprId, expr: ExprId,
}, },
TryBlock {
body: ExprId,
},
Cast { Cast {
expr: ExprId, expr: ExprId,
type_ref: TypeRef, type_ref: TypeRef,
@ -239,7 +236,6 @@ impl Expr {
f(*expr); f(*expr);
} }
} }
Expr::TryBlock { body } => f(*body),
Expr::Loop { body } => f(*body), Expr::Loop { body } => f(*body),
Expr::While { condition, body } => { Expr::While { condition, body } => {
f(*condition); f(*condition);

View file

@ -73,11 +73,6 @@ impl<'a> InferenceContext<'a> {
self.coerce_merge_branch(&then_ty, &else_ty) self.coerce_merge_branch(&then_ty, &else_ty)
} }
Expr::Block { statements, tail } => self.infer_block(statements, *tail, expected), Expr::Block { statements, tail } => self.infer_block(statements, *tail, expected),
Expr::TryBlock { body } => {
let _inner = self.infer_expr(*body, expected);
// FIXME should be std::result::Result<{inner}, _>
Ty::Unknown
}
Expr::Loop { body } => { Expr::Loop { body } => {
self.infer_expr(*body, &Expectation::has_type(Ty::unit())); self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
// FIXME handle break with value // FIXME handle break with value

View file

@ -66,7 +66,8 @@ pub use autoderef::autoderef;
pub use infer::{InferTy, InferenceResult}; pub use infer::{InferTy, InferenceResult};
pub use lower::CallableDef; pub use lower::CallableDef;
pub use lower::{ pub use lower::{
callable_item_sig, ImplTraitLoweringMode, TyDefId, TyLoweringContext, ValueTyDefId, associated_type_shorthand_candidates, callable_item_sig, ImplTraitLoweringMode, TyDefId,
TyLoweringContext, ValueTyDefId,
}; };
pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment};

View file

@ -17,9 +17,9 @@ use hir_def::{
path::{GenericArg, Path, PathSegment, PathSegments}, path::{GenericArg, Path, PathSegment, PathSegments},
resolver::{HasResolver, Resolver, TypeNs}, resolver::{HasResolver, Resolver, TypeNs},
type_ref::{TypeBound, TypeRef}, type_ref::{TypeBound, TypeRef},
AdtId, AssocContainerId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, AdtId, AssocContainerId, AssocItemId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId,
ImplId, LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, HasModule, ImplId, LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId,
VariantId, UnionId, VariantId,
}; };
use ra_arena::map::ArenaMap; use ra_arena::map::ArenaMap;
use ra_db::CrateId; use ra_db::CrateId;
@ -34,6 +34,7 @@ use crate::{
Binders, BoundVar, DebruijnIndex, FnSig, GenericPredicate, PolyFnSig, ProjectionPredicate, Binders, BoundVar, DebruijnIndex, FnSig, GenericPredicate, PolyFnSig, ProjectionPredicate,
ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk,
}; };
use hir_expand::name::Name;
#[derive(Debug)] #[derive(Debug)]
pub struct TyLoweringContext<'a> { pub struct TyLoweringContext<'a> {
@ -383,61 +384,38 @@ impl Ty {
res: Option<TypeNs>, res: Option<TypeNs>,
segment: PathSegment<'_>, segment: PathSegment<'_>,
) -> Ty { ) -> Ty {
let traits_from_env: Vec<_> = match res { if let Some(res) = res {
Some(TypeNs::SelfType(impl_id)) => match ctx.db.impl_trait(impl_id) { let ty =
None => return Ty::Unknown, associated_type_shorthand_candidates(ctx.db, res, move |name, t, associated_ty| {
Some(trait_ref) => vec![trait_ref.value], if name == segment.name {
}, let substs = match ctx.type_param_mode {
Some(TypeNs::GenericParam(param_id)) => { TypeParamLoweringMode::Placeholder => {
let predicates = ctx.db.generic_predicates_for_param(param_id); // if we're lowering to placeholders, we have to put
let mut traits_: Vec<_> = predicates // them in now
.iter() let s = Substs::type_params(
.filter_map(|pred| match &pred.value { ctx.db,
GenericPredicate::Implemented(tr) => Some(tr.clone()), ctx.resolver.generic_def().expect(
_ => None, "there should be generics if there's a generic param",
}) ),
.collect(); );
// Handle `Self::Type` referring to own associated type in trait definitions t.substs.clone().subst_bound_vars(&s)
if let GenericDefId::TraitId(trait_id) = param_id.parent { }
let generics = generics(ctx.db.upcast(), trait_id.into()); TypeParamLoweringMode::Variable => t.substs.clone(),
if generics.params.types[param_id.local_id].provenance
== TypeParamProvenance::TraitSelf
{
let trait_ref = TraitRef {
trait_: trait_id,
substs: Substs::bound_vars(&generics, DebruijnIndex::INNERMOST),
}; };
traits_.push(trait_ref); // FIXME handle type parameters on the segment
return Some(Ty::Projection(ProjectionTy {
associated_ty,
parameters: substs,
}));
} }
}
traits_ None
} });
_ => return Ty::Unknown,
}; ty.unwrap_or(Ty::Unknown)
let traits = traits_from_env.into_iter().flat_map(|t| all_super_trait_refs(ctx.db, t)); } else {
for t in traits { Ty::Unknown
if let Some(associated_ty) =
ctx.db.trait_data(t.trait_).associated_type_by_name(&segment.name)
{
let substs = match ctx.type_param_mode {
TypeParamLoweringMode::Placeholder => {
// if we're lowering to placeholders, we have to put
// them in now
let s = Substs::type_params(
ctx.db,
ctx.resolver
.generic_def()
.expect("there should be generics if there's a generic param"),
);
t.substs.subst_bound_vars(&s)
}
TypeParamLoweringMode::Variable => t.substs,
};
// FIXME handle (forbid) type parameters on the segment
return Ty::Projection(ProjectionTy { associated_ty, parameters: substs });
}
} }
Ty::Unknown
} }
fn from_hir_path_inner( fn from_hir_path_inner(
@ -694,6 +672,61 @@ pub fn callable_item_sig(db: &dyn HirDatabase, def: CallableDef) -> PolyFnSig {
} }
} }
pub fn associated_type_shorthand_candidates<R>(
db: &dyn HirDatabase,
res: TypeNs,
mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
) -> Option<R> {
let traits_from_env: Vec<_> = match res {
TypeNs::SelfType(impl_id) => match db.impl_trait(impl_id) {
None => vec![],
Some(trait_ref) => vec![trait_ref.value],
},
TypeNs::GenericParam(param_id) => {
let predicates = db.generic_predicates_for_param(param_id);
let mut traits_: Vec<_> = predicates
.iter()
.filter_map(|pred| match &pred.value {
GenericPredicate::Implemented(tr) => Some(tr.clone()),
_ => None,
})
.collect();
// Handle `Self::Type` referring to own associated type in trait definitions
if let GenericDefId::TraitId(trait_id) = param_id.parent {
let generics = generics(db.upcast(), trait_id.into());
if generics.params.types[param_id.local_id].provenance
== TypeParamProvenance::TraitSelf
{
let trait_ref = TraitRef {
trait_: trait_id,
substs: Substs::bound_vars(&generics, DebruijnIndex::INNERMOST),
};
traits_.push(trait_ref);
}
}
traits_
}
_ => vec![],
};
for t in traits_from_env.into_iter().flat_map(move |t| all_super_trait_refs(db, t)) {
let data = db.trait_data(t.trait_);
for (name, assoc_id) in &data.items {
match assoc_id {
AssocItemId::TypeAliasId(alias) => {
if let Some(result) = cb(name, &t, *alias) {
return Some(result);
}
}
AssocItemId::FunctionId(_) | AssocItemId::ConstId(_) => {}
}
}
}
None
}
/// Build the type of all specific fields of a struct or enum variant. /// Build the type of all specific fields of a struct or enum variant.
pub(crate) fn field_types_query( pub(crate) fn field_types_query(
db: &dyn HirDatabase, db: &dyn HirDatabase,

View file

@ -249,6 +249,44 @@ mod tests {
); );
} }
#[test]
fn test_union_field_completion() {
assert_debug_snapshot!(
do_ref_completion(
r"
union Un {
field: u8,
other: u16,
}
fn foo(u: Un) {
u.<|>
}
",
),
@r###"
[
CompletionItem {
label: "field",
source_range: 140..140,
delete: 140..140,
insert: "field",
kind: Field,
detail: "u8",
},
CompletionItem {
label: "other",
source_range: 140..140,
delete: 140..140,
insert: "other",
kind: Field,
detail: "u16",
},
]
"###
);
}
#[test] #[test]
fn test_method_completion() { fn test_method_completion() {
assert_debug_snapshot!( assert_debug_snapshot!(

View file

@ -5,19 +5,29 @@ use ra_syntax::AstNode;
use test_utils::tested_by; use test_utils::tested_by;
use crate::completion::{CompletionContext, Completions}; use crate::completion::{CompletionContext, Completions};
use rustc_hash::FxHashSet;
pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionContext) { pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionContext) {
let path = match &ctx.path_prefix { let path = match &ctx.path_prefix {
Some(path) => path.clone(), Some(path) => path.clone(),
_ => return, _ => return,
}; };
let def = match ctx.scope().resolve_hir_path(&path) { let scope = ctx.scope();
Some(PathResolution::Def(def)) => def, let context_module = scope.module();
_ => return,
let res = match scope.resolve_hir_path(&path) {
Some(res) => res,
None => return,
}; };
let context_module = ctx.scope().module();
match def { // Add associated types on type parameters and `Self`.
hir::ModuleDef::Module(module) => { res.assoc_type_shorthand_candidates(ctx.db, |alias| {
acc.add_type_alias(ctx, alias);
None::<()>
});
match res {
PathResolution::Def(hir::ModuleDef::Module(module)) => {
let module_scope = module.scope(ctx.db, context_module); let module_scope = module.scope(ctx.db, context_module);
for (name, def) in module_scope { for (name, def) in module_scope {
if ctx.use_item_syntax.is_some() { if ctx.use_item_syntax.is_some() {
@ -35,7 +45,8 @@ pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
acc.add_resolution(ctx, name.to_string(), &def); acc.add_resolution(ctx, name.to_string(), &def);
} }
} }
hir::ModuleDef::Adt(_) | hir::ModuleDef::TypeAlias(_) => { PathResolution::Def(def @ hir::ModuleDef::Adt(_))
| PathResolution::Def(def @ hir::ModuleDef::TypeAlias(_)) => {
if let hir::ModuleDef::Adt(Adt::Enum(e)) = def { if let hir::ModuleDef::Adt(Adt::Enum(e)) = def {
for variant in e.variants(ctx.db) { for variant in e.variants(ctx.db) {
acc.add_enum_variant(ctx, variant, None); acc.add_enum_variant(ctx, variant, None);
@ -46,8 +57,10 @@ pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db), hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db),
_ => unreachable!(), _ => unreachable!(),
}; };
// Iterate assoc types separately
// FIXME: complete T::AssocType // XXX: For parity with Rust bug #22519, this does not complete Ty::AssocType.
// (where AssocType is defined on a trait, not an inherent impl)
let krate = ctx.krate; let krate = ctx.krate;
if let Some(krate) = krate { if let Some(krate) = krate {
let traits_in_scope = ctx.scope().traits_in_scope(); let traits_in_scope = ctx.scope().traits_in_scope();
@ -65,6 +78,7 @@ pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
None::<()> None::<()>
}); });
// Iterate assoc types separately
ty.iterate_impl_items(ctx.db, krate, |item| { ty.iterate_impl_items(ctx.db, krate, |item| {
if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) {
return None; return None;
@ -77,7 +91,8 @@ pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
}); });
} }
} }
hir::ModuleDef::Trait(t) => { PathResolution::Def(hir::ModuleDef::Trait(t)) => {
// Handles `Trait::assoc` as well as `<Ty as Trait>::assoc`.
for item in t.items(ctx.db) { for item in t.items(ctx.db) {
if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) {
continue; continue;
@ -91,8 +106,38 @@ pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
} }
} }
} }
PathResolution::TypeParam(_) | PathResolution::SelfType(_) => {
if let Some(krate) = ctx.krate {
let ty = match res {
PathResolution::TypeParam(param) => param.ty(ctx.db),
PathResolution::SelfType(impl_def) => impl_def.target_ty(ctx.db),
_ => return,
};
let traits_in_scope = ctx.scope().traits_in_scope();
let mut seen = FxHashSet::default();
ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| {
if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) {
return None;
}
// We might iterate candidates of a trait multiple times here, so deduplicate
// them.
if seen.insert(item) {
match item {
hir::AssocItem::Function(func) => {
acc.add_function(ctx, func, None);
}
hir::AssocItem::Const(ct) => acc.add_const(ctx, ct),
hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty),
}
}
None::<()>
});
}
}
_ => {} _ => {}
}; }
} }
#[cfg(test)] #[cfg(test)]
@ -843,6 +888,211 @@ mod tests {
); );
} }
#[test]
fn completes_ty_param_assoc_ty() {
assert_debug_snapshot!(
do_reference_completion(
"
//- /lib.rs
trait Super {
type Ty;
const CONST: u8;
fn func() {}
fn method(&self) {}
}
trait Sub: Super {
type SubTy;
const C2: ();
fn subfunc() {}
fn submethod(&self) {}
}
fn foo<T: Sub>() {
T::<|>
}
"
),
@r###"
[
CompletionItem {
label: "C2",
source_range: 219..219,
delete: 219..219,
insert: "C2",
kind: Const,
detail: "const C2: ();",
},
CompletionItem {
label: "CONST",
source_range: 219..219,
delete: 219..219,
insert: "CONST",
kind: Const,
detail: "const CONST: u8;",
},
CompletionItem {
label: "SubTy",
source_range: 219..219,
delete: 219..219,
insert: "SubTy",
kind: TypeAlias,
detail: "type SubTy;",
},
CompletionItem {
label: "Ty",
source_range: 219..219,
delete: 219..219,
insert: "Ty",
kind: TypeAlias,
detail: "type Ty;",
},
CompletionItem {
label: "func()",
source_range: 219..219,
delete: 219..219,
insert: "func()$0",
kind: Function,
lookup: "func",
detail: "fn func()",
},
CompletionItem {
label: "method()",
source_range: 219..219,
delete: 219..219,
insert: "method()$0",
kind: Method,
lookup: "method",
detail: "fn method(&self)",
},
CompletionItem {
label: "subfunc()",
source_range: 219..219,
delete: 219..219,
insert: "subfunc()$0",
kind: Function,
lookup: "subfunc",
detail: "fn subfunc()",
},
CompletionItem {
label: "submethod()",
source_range: 219..219,
delete: 219..219,
insert: "submethod()$0",
kind: Method,
lookup: "submethod",
detail: "fn submethod(&self)",
},
]
"###
);
}
#[test]
fn completes_self_param_assoc_ty() {
assert_debug_snapshot!(
do_reference_completion(
"
//- /lib.rs
trait Super {
type Ty;
const CONST: u8 = 0;
fn func() {}
fn method(&self) {}
}
trait Sub: Super {
type SubTy;
const C2: () = ();
fn subfunc() {}
fn submethod(&self) {}
}
struct Wrap<T>(T);
impl<T> Super for Wrap<T> {}
impl<T> Sub for Wrap<T> {
fn subfunc() {
// Should be able to assume `Self: Sub + Super`
Self::<|>
}
}
"
),
@r###"
[
CompletionItem {
label: "C2",
source_range: 365..365,
delete: 365..365,
insert: "C2",
kind: Const,
detail: "const C2: () = ();",
},
CompletionItem {
label: "CONST",
source_range: 365..365,
delete: 365..365,
insert: "CONST",
kind: Const,
detail: "const CONST: u8 = 0;",
},
CompletionItem {
label: "SubTy",
source_range: 365..365,
delete: 365..365,
insert: "SubTy",
kind: TypeAlias,
detail: "type SubTy;",
},
CompletionItem {
label: "Ty",
source_range: 365..365,
delete: 365..365,
insert: "Ty",
kind: TypeAlias,
detail: "type Ty;",
},
CompletionItem {
label: "func()",
source_range: 365..365,
delete: 365..365,
insert: "func()$0",
kind: Function,
lookup: "func",
detail: "fn func()",
},
CompletionItem {
label: "method()",
source_range: 365..365,
delete: 365..365,
insert: "method()$0",
kind: Method,
lookup: "method",
detail: "fn method(&self)",
},
CompletionItem {
label: "subfunc()",
source_range: 365..365,
delete: 365..365,
insert: "subfunc()$0",
kind: Function,
lookup: "subfunc",
detail: "fn subfunc()",
},
CompletionItem {
label: "submethod()",
source_range: 365..365,
delete: 365..365,
insert: "submethod()$0",
kind: Method,
lookup: "submethod",
detail: "fn submethod(&self)",
},
]
"###
);
}
#[test] #[test]
fn completes_type_alias() { fn completes_type_alias() {
assert_debug_snapshot!( assert_debug_snapshot!(

View file

@ -53,7 +53,7 @@ fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &T
// Variants with trivial paths are already added by the existing completion logic, // Variants with trivial paths are already added by the existing completion logic,
// so we should avoid adding these twice // so we should avoid adding these twice
if path.segments.len() > 1 { if path.segments.len() > 1 {
acc.add_enum_variant(ctx, variant, Some(path.to_string())); acc.add_qualified_enum_variant(ctx, variant, path);
} }
} }
} }
@ -1173,6 +1173,7 @@ mod tests {
delete: 248..250, delete: 248..250,
insert: "Foo::Bar", insert: "Foo::Bar",
kind: EnumVariant, kind: EnumVariant,
lookup: "Bar",
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
@ -1181,6 +1182,7 @@ mod tests {
delete: 248..250, delete: 248..250,
insert: "Foo::Baz", insert: "Foo::Baz",
kind: EnumVariant, kind: EnumVariant,
lookup: "Baz",
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
@ -1189,6 +1191,7 @@ mod tests {
delete: 248..250, delete: 248..250,
insert: "Foo::Quux", insert: "Foo::Quux",
kind: EnumVariant, kind: EnumVariant,
lookup: "Quux",
detail: "()", detail: "()",
}, },
] ]
@ -1231,6 +1234,7 @@ mod tests {
delete: 219..221, delete: 219..221,
insert: "Foo::Bar", insert: "Foo::Bar",
kind: EnumVariant, kind: EnumVariant,
lookup: "Bar",
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
@ -1239,6 +1243,7 @@ mod tests {
delete: 219..221, delete: 219..221,
insert: "Foo::Baz", insert: "Foo::Baz",
kind: EnumVariant, kind: EnumVariant,
lookup: "Baz",
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
@ -1247,6 +1252,7 @@ mod tests {
delete: 219..221, delete: 219..221,
insert: "Foo::Quux", insert: "Foo::Quux",
kind: EnumVariant, kind: EnumVariant,
lookup: "Quux",
detail: "()", detail: "()",
}, },
] ]
@ -1285,6 +1291,7 @@ mod tests {
delete: 185..186, delete: 185..186,
insert: "Foo::Bar", insert: "Foo::Bar",
kind: EnumVariant, kind: EnumVariant,
lookup: "Bar",
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
@ -1293,6 +1300,7 @@ mod tests {
delete: 185..186, delete: 185..186,
insert: "Foo::Baz", insert: "Foo::Baz",
kind: EnumVariant, kind: EnumVariant,
lookup: "Baz",
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
@ -1301,6 +1309,7 @@ mod tests {
delete: 185..186, delete: 185..186,
insert: "Foo::Quux", insert: "Foo::Quux",
kind: EnumVariant, kind: EnumVariant,
lookup: "Quux",
detail: "()", detail: "()",
}, },
CompletionItem { CompletionItem {
@ -1353,6 +1362,7 @@ mod tests {
delete: 98..99, delete: 98..99,
insert: "m::E::V", insert: "m::E::V",
kind: EnumVariant, kind: EnumVariant,
lookup: "V",
detail: "()", detail: "()",
}, },
] ]

View file

@ -1,6 +1,6 @@
//! This modules takes care of rendering various definitions as completion items. //! This modules takes care of rendering various definitions as completion items.
use hir::{Docs, HasAttrs, HasSource, HirDisplay, ScopeDef, StructKind, Type}; use hir::{Docs, HasAttrs, HasSource, HirDisplay, ModPath, ScopeDef, StructKind, Type};
use ra_syntax::ast::NameOwner; use ra_syntax::ast::NameOwner;
use stdx::SepBy; use stdx::SepBy;
use test_utils::tested_by; use test_utils::tested_by;
@ -246,14 +246,37 @@ impl Completions {
.add_to(self); .add_to(self);
} }
pub(crate) fn add_qualified_enum_variant(
&mut self,
ctx: &CompletionContext,
variant: hir::EnumVariant,
path: ModPath,
) {
self.add_enum_variant_impl(ctx, variant, None, Some(path))
}
pub(crate) fn add_enum_variant( pub(crate) fn add_enum_variant(
&mut self, &mut self,
ctx: &CompletionContext, ctx: &CompletionContext,
variant: hir::EnumVariant, variant: hir::EnumVariant,
local_name: Option<String>, local_name: Option<String>,
) {
self.add_enum_variant_impl(ctx, variant, local_name, None)
}
fn add_enum_variant_impl(
&mut self,
ctx: &CompletionContext,
variant: hir::EnumVariant,
local_name: Option<String>,
path: Option<ModPath>,
) { ) {
let is_deprecated = is_deprecated(variant, ctx.db); let is_deprecated = is_deprecated(variant, ctx.db);
let name = local_name.unwrap_or_else(|| variant.name(ctx.db).to_string()); let name = local_name.unwrap_or_else(|| variant.name(ctx.db).to_string());
let qualified_name = match &path {
Some(it) => it.to_string(),
None => name.to_string(),
};
let detail_types = variant let detail_types = variant
.fields(ctx.db) .fields(ctx.db)
.into_iter() .into_iter()
@ -271,16 +294,23 @@ impl Completions {
.surround_with("{ ", " }") .surround_with("{ ", " }")
.to_string(), .to_string(),
}; };
let mut res = let mut res = CompletionItem::new(
CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.clone()) CompletionKind::Reference,
.kind(CompletionItemKind::EnumVariant) ctx.source_range(),
.set_documentation(variant.docs(ctx.db)) qualified_name.clone(),
.set_deprecated(is_deprecated) )
.detail(detail); .kind(CompletionItemKind::EnumVariant)
.set_documentation(variant.docs(ctx.db))
.set_deprecated(is_deprecated)
.detail(detail);
if path.is_some() {
res = res.lookup_by(name);
}
if variant_kind == StructKind::Tuple { if variant_kind == StructKind::Tuple {
let params = Params::Anonymous(variant.fields(ctx.db).len()); let params = Params::Anonymous(variant.fields(ctx.db).len());
res = res.add_call_parens(ctx, name, params) res = res.add_call_parens(ctx, qualified_name, params)
} }
res.add_to(self); res.add_to(self);

View file

@ -26,6 +26,8 @@ pub struct FunctionSignature {
pub kind: CallableKind, pub kind: CallableKind,
/// Optional visibility /// Optional visibility
pub visibility: Option<String>, pub visibility: Option<String>,
/// Qualifiers like `async`, `unsafe`, ...
pub qualifier: FunctionQualifier,
/// Name of the function /// Name of the function
pub name: Option<String>, pub name: Option<String>,
/// Documentation for the function /// Documentation for the function
@ -46,6 +48,16 @@ pub struct FunctionSignature {
pub has_self_param: bool, pub has_self_param: bool,
} }
#[derive(Debug, Default)]
pub struct FunctionQualifier {
// `async` and `const` are mutually exclusive. Do we need to enforcing it here?
pub is_async: bool,
pub is_const: bool,
pub is_unsafe: bool,
/// The string `extern ".."`
pub extern_abi: Option<String>,
}
impl FunctionSignature { impl FunctionSignature {
pub(crate) fn with_doc_opt(mut self, doc: Option<Documentation>) -> Self { pub(crate) fn with_doc_opt(mut self, doc: Option<Documentation>) -> Self {
self.doc = doc; self.doc = doc;
@ -83,6 +95,8 @@ impl FunctionSignature {
FunctionSignature { FunctionSignature {
kind: CallableKind::StructConstructor, kind: CallableKind::StructConstructor,
visibility: node.visibility().map(|n| n.syntax().text().to_string()), visibility: node.visibility().map(|n| n.syntax().text().to_string()),
// Do we need `const`?
qualifier: Default::default(),
name: node.name().map(|n| n.text().to_string()), name: node.name().map(|n| n.text().to_string()),
ret_type: node.name().map(|n| n.text().to_string()), ret_type: node.name().map(|n| n.text().to_string()),
parameters: params, parameters: params,
@ -128,6 +142,8 @@ impl FunctionSignature {
FunctionSignature { FunctionSignature {
kind: CallableKind::VariantConstructor, kind: CallableKind::VariantConstructor,
visibility: None, visibility: None,
// Do we need `const`?
qualifier: Default::default(),
name: Some(name), name: Some(name),
ret_type: None, ret_type: None,
parameters: params, parameters: params,
@ -151,6 +167,7 @@ impl FunctionSignature {
FunctionSignature { FunctionSignature {
kind: CallableKind::Macro, kind: CallableKind::Macro,
visibility: None, visibility: None,
qualifier: Default::default(),
name: node.name().map(|n| n.text().to_string()), name: node.name().map(|n| n.text().to_string()),
ret_type: None, ret_type: None,
parameters: params, parameters: params,
@ -223,6 +240,12 @@ impl From<&'_ ast::FnDef> for FunctionSignature {
FunctionSignature { FunctionSignature {
kind: CallableKind::Function, kind: CallableKind::Function,
visibility: node.visibility().map(|n| n.syntax().text().to_string()), visibility: node.visibility().map(|n| n.syntax().text().to_string()),
qualifier: FunctionQualifier {
is_async: node.async_token().is_some(),
is_const: node.const_token().is_some(),
is_unsafe: node.unsafe_token().is_some(),
extern_abi: node.abi().map(|n| n.to_string()),
},
name: node.name().map(|n| n.text().to_string()), name: node.name().map(|n| n.text().to_string()),
ret_type: node ret_type: node
.ret_type() .ret_type()
@ -246,6 +269,23 @@ impl Display for FunctionSignature {
write!(f, "{} ", t)?; write!(f, "{} ", t)?;
} }
if self.qualifier.is_async {
write!(f, "async ")?;
}
if self.qualifier.is_const {
write!(f, "const ")?;
}
if self.qualifier.is_unsafe {
write!(f, "unsafe ")?;
}
if let Some(extern_abi) = &self.qualifier.extern_abi {
// Keyword `extern` is included in the string.
write!(f, "{} ", extern_abi)?;
}
if let Some(name) = &self.name { if let Some(name) = &self.name {
match self.kind { match self.kind {
CallableKind::Function => write!(f, "fn {}", name)?, CallableKind::Function => write!(f, "fn {}", name)?,

View file

@ -844,4 +844,29 @@ fn func(foo: i32) { if true { <|>foo; }; }
&["fn foo()\n```\n\n<- `\u{3000}` here"], &["fn foo()\n```\n\n<- `\u{3000}` here"],
); );
} }
#[test]
fn test_hover_function_show_qualifiers() {
check_hover_result(
"
//- /lib.rs
async fn foo<|>() {}
",
&["async fn foo()"],
);
check_hover_result(
"
//- /lib.rs
pub const unsafe fn foo<|>() {}
",
&["pub const unsafe fn foo()"],
);
check_hover_result(
r#"
//- /lib.rs
pub(crate) async unsafe extern "C" fn foo<|>() {}
"#,
&[r#"pub(crate) async unsafe extern "C" fn foo()"#],
);
}
} }

View file

@ -131,6 +131,9 @@ fn has_comma_after(node: &SyntaxNode) -> bool {
fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
let block = ast::Block::cast(token.parent())?; let block = ast::Block::cast(token.parent())?;
let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
if !block_expr.is_standalone() {
return None;
}
let expr = extract_trivial_expression(&block_expr)?; let expr = extract_trivial_expression(&block_expr)?;
let block_range = block_expr.syntax().text_range(); let block_range = block_expr.syntax().text_range();
@ -662,4 +665,67 @@ fn main() {
", ",
) )
} }
#[test]
fn join_lines_mandatory_blocks_block() {
check_join_lines(
r"
<|>fn foo() {
92
}
",
r"
<|>fn foo() { 92
}
",
);
check_join_lines(
r"
fn foo() {
<|>if true {
92
}
}
",
r"
fn foo() {
<|>if true { 92
}
}
",
);
check_join_lines(
r"
fn foo() {
<|>loop {
92
}
}
",
r"
fn foo() {
<|>loop { 92
}
}
",
);
check_join_lines(
r"
fn foo() {
<|>unsafe {
92
}
}
",
r"
fn foo() {
<|>unsafe { 92
}
}
",
);
}
} }

View file

@ -84,7 +84,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar
T![box] => box_expr(p, None), T![box] => box_expr(p, None),
T![for] => for_expr(p, None), T![for] => for_expr(p, None),
T![while] => while_expr(p, None), T![while] => while_expr(p, None),
T![try] => try_block_expr(p, None), T![try] => try_expr(p, None),
LIFETIME if la == T![:] => { LIFETIME if la == T![:] => {
let m = p.start(); let m = p.start();
label(p); label(p);
@ -134,7 +134,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar
} }
}; };
let blocklike = match done.kind() { let blocklike = match done.kind() {
IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR | TRY_BLOCK_EXPR => { IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR | TRY_EXPR => {
BlockLike::Block BlockLike::Block
} }
_ => BlockLike::NotBlock, _ => BlockLike::NotBlock,
@ -532,9 +532,25 @@ fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker {
// fn foo() { // fn foo() {
// let _ = try {}; // let _ = try {};
// } // }
fn try_block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { fn try_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
assert!(p.at(T![try])); assert!(p.at(T![try]));
let m = m.unwrap_or_else(|| p.start()); let m = m.unwrap_or_else(|| p.start());
// Special-case `try!` as macro.
// This is a hack until we do proper edition support
if p.nth_at(1, T![!]) {
// test try_macro_fallback
// fn foo() { try!(Ok(())); }
let path = p.start();
let path_segment = p.start();
let name_ref = p.start();
p.bump_remap(IDENT);
name_ref.complete(p, NAME_REF);
path_segment.complete(p, PATH_SEGMENT);
path.complete(p, PATH);
let _block_like = items::macro_call_after_excl(p);
return m.complete(p, MACRO_CALL);
}
p.bump(T![try]); p.bump(T![try]);
block(p); block(p);
m.complete(p, TRY_EXPR) m.complete(p, TRY_EXPR)

View file

@ -415,6 +415,17 @@ pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike {
if p.at(IDENT) { if p.at(IDENT) {
name(p); name(p);
} }
// Special-case `macro_rules! try`.
// This is a hack until we do proper edition support
// test try_macro_rules
// macro_rules! try { () => {} }
if p.at(T![try]) {
let m = p.start();
p.bump_remap(IDENT);
m.complete(p, NAME);
}
match p.current() { match p.current() {
T!['{'] => { T!['{'] => {
token_tree(p); token_tree(p);

View file

@ -47,7 +47,7 @@ fn use_tree(p: &mut Parser, top_level: bool) {
// use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) // use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`)
// use {path::from::root}; // Rust 2015 // use {path::from::root}; // Rust 2015
// use ::{some::arbritrary::path}; // Rust 2015 // use ::{some::arbritrary::path}; // Rust 2015
// use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig // use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting
T!['{'] => { T!['{'] => {
use_tree_list(p); use_tree_list(p);
} }

View file

@ -191,7 +191,6 @@ pub enum SyntaxKind {
RECORD_LIT, RECORD_LIT,
RECORD_FIELD_LIST, RECORD_FIELD_LIST,
RECORD_FIELD, RECORD_FIELD,
TRY_BLOCK_EXPR,
BOX_EXPR, BOX_EXPR,
CALL_EXPR, CALL_EXPR,
INDEX_EXPR, INDEX_EXPR,

View file

@ -1,15 +1,17 @@
//! Driver for proc macro server //! Driver for proc macro server
use crate::{expand_task, list_macros}; use crate::ProcMacroSrv;
use ra_proc_macro::msg::{self, Message}; use ra_proc_macro::msg::{self, Message};
use std::io; use std::io;
pub fn run() -> io::Result<()> { pub fn run() -> io::Result<()> {
let mut srv = ProcMacroSrv::default();
while let Some(req) = read_request()? { while let Some(req) = read_request()? {
let res = match req { let res = match req {
msg::Request::ListMacro(task) => Ok(msg::Response::ListMacro(list_macros(&task))), msg::Request::ListMacro(task) => srv.list_macros(&task).map(msg::Response::ListMacro),
msg::Request::ExpansionMacro(task) => { msg::Request::ExpansionMacro(task) => {
expand_task(&task).map(msg::Response::ExpansionMacro) srv.expand(&task).map(msg::Response::ExpansionMacro)
} }
}; };

View file

@ -2,13 +2,12 @@
use crate::{proc_macro::bridge, rustc_server::TokenStream}; use crate::{proc_macro::bridge, rustc_server::TokenStream};
use std::fs::File; use std::fs::File;
use std::path::Path; use std::path::{Path, PathBuf};
use goblin::{mach::Mach, Object}; use goblin::{mach::Mach, Object};
use libloading::Library; use libloading::Library;
use memmap::Mmap; use memmap::Mmap;
use ra_proc_macro::ProcMacroKind; use ra_proc_macro::ProcMacroKind;
use std::io; use std::io;
const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
@ -109,23 +108,21 @@ impl ProcMacroLibraryLibloading {
} }
} }
type ProcMacroLibraryImpl = ProcMacroLibraryLibloading;
pub struct Expander { pub struct Expander {
libs: Vec<ProcMacroLibraryImpl>, inner: ProcMacroLibraryLibloading,
} }
impl Expander { impl Expander {
pub fn new(lib: &Path) -> Result<Expander, String> { pub fn new(lib: &Path) -> io::Result<Expander> {
// Some libraries for dynamic loading require canonicalized path even when it is // Some libraries for dynamic loading require canonicalized path even when it is
// already absolute // already absolute
let lib = lib let lib = lib.canonicalize()?;
.canonicalize()
.unwrap_or_else(|err| panic!("Cannot canonicalize {}: {:?}", lib.display(), err));
let library = ProcMacroLibraryImpl::open(&lib).map_err(|e| e.to_string())?; let lib = ensure_file_with_lock_free_access(&lib)?;
Ok(Expander { libs: vec![library] }) let library = ProcMacroLibraryLibloading::open(&lib)?;
Ok(Expander { inner: library })
} }
pub fn expand( pub fn expand(
@ -141,38 +138,36 @@ impl Expander {
TokenStream::with_subtree(attr.clone()) TokenStream::with_subtree(attr.clone())
}); });
for lib in &self.libs { for proc_macro in &self.inner.exported_macros {
for proc_macro in &lib.exported_macros { match proc_macro {
match proc_macro { bridge::client::ProcMacro::CustomDerive { trait_name, client, .. }
bridge::client::ProcMacro::CustomDerive { trait_name, client, .. } if *trait_name == macro_name =>
if *trait_name == macro_name => {
{ let res = client.run(
let res = client.run( &crate::proc_macro::bridge::server::SameThread,
&crate::proc_macro::bridge::server::SameThread, crate::rustc_server::Rustc::default(),
crate::rustc_server::Rustc::default(), parsed_body,
parsed_body, );
); return res.map(|it| it.subtree);
return res.map(|it| it.subtree);
}
bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => {
let res = client.run(
&crate::proc_macro::bridge::server::SameThread,
crate::rustc_server::Rustc::default(),
parsed_body,
);
return res.map(|it| it.subtree);
}
bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => {
let res = client.run(
&crate::proc_macro::bridge::server::SameThread,
crate::rustc_server::Rustc::default(),
parsed_attributes,
parsed_body,
);
return res.map(|it| it.subtree);
}
_ => continue,
} }
bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => {
let res = client.run(
&crate::proc_macro::bridge::server::SameThread,
crate::rustc_server::Rustc::default(),
parsed_body,
);
return res.map(|it| it.subtree);
}
bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => {
let res = client.run(
&crate::proc_macro::bridge::server::SameThread,
crate::rustc_server::Rustc::default(),
parsed_attributes,
parsed_body,
);
return res.map(|it| it.subtree);
}
_ => continue,
} }
} }
@ -180,9 +175,9 @@ impl Expander {
} }
pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
self.libs self.inner
.exported_macros
.iter() .iter()
.flat_map(|it| &it.exported_macros)
.map(|proc_macro| match proc_macro { .map(|proc_macro| match proc_macro {
bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
(trait_name.to_string(), ProcMacroKind::CustomDerive) (trait_name.to_string(), ProcMacroKind::CustomDerive)
@ -197,3 +192,33 @@ impl Expander {
.collect() .collect()
} }
} }
/// Copy the dylib to temp directory to prevent locking in Windows
#[cfg(windows)]
fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> {
use std::{ffi::OsString, time::SystemTime};
let mut to = std::env::temp_dir();
let file_name = path.file_name().ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!("File path is invalid: {}", path.display()),
)
})?;
// generate a time deps unique number
let t = SystemTime::now().duration_since(std::time::UNIX_EPOCH).expect("Time went backwards");
let mut unique_name = OsString::from(t.as_millis().to_string());
unique_name.push(file_name);
to.push(unique_name);
std::fs::copy(path, &to).unwrap();
Ok(to)
}
#[cfg(unix)]
fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> {
Ok(path.to_path_buf())
}

View file

@ -21,28 +21,46 @@ mod dylib;
use proc_macro::bridge::client::TokenStream; use proc_macro::bridge::client::TokenStream;
use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask};
use std::path::Path; use std::{
collections::{hash_map::Entry, HashMap},
fs,
path::{Path, PathBuf},
time::SystemTime,
};
pub(crate) fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> { #[derive(Default)]
let expander = create_expander(&task.lib); pub(crate) struct ProcMacroSrv {
expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>,
}
match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { impl ProcMacroSrv {
Ok(expansion) => Ok(ExpansionResult { expansion }), pub fn expand(&mut self, task: &ExpansionTask) -> Result<ExpansionResult, String> {
Err(msg) => { let expander = self.expander(&task.lib)?;
Err(format!("Cannot perform expansion for {}: error {:?}", &task.macro_name, msg)) match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) {
Ok(expansion) => Ok(ExpansionResult { expansion }),
Err(msg) => {
Err(format!("Cannot perform expansion for {}: error {:?}", &task.macro_name, msg))
}
} }
} }
}
pub(crate) fn list_macros(task: &ListMacrosTask) -> ListMacrosResult { pub fn list_macros(&mut self, task: &ListMacrosTask) -> Result<ListMacrosResult, String> {
let expander = create_expander(&task.lib); let expander = self.expander(&task.lib)?;
Ok(ListMacrosResult { macros: expander.list_macros() })
}
ListMacrosResult { macros: expander.list_macros() } fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> {
} let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| {
format!("Failed to get file metadata for {}: {:?}", path.display(), err)
})?;
fn create_expander(lib: &Path) -> dylib::Expander { Ok(match self.expanders.entry((path.to_path_buf(), time)) {
dylib::Expander::new(lib) Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| {
.unwrap_or_else(|err| panic!("Cannot create expander for {}: {:?}", lib.display(), err)) format!("Cannot create expander for {}: {:?}", path.display(), err)
})?),
Entry::Occupied(e) => e.into_mut(),
})
}
} }
pub mod cli; pub mod cli;

View file

@ -1,7 +1,7 @@
//! utils used in proc-macro tests //! utils used in proc-macro tests
use crate::dylib; use crate::dylib;
use crate::list_macros; use crate::ProcMacroSrv;
pub use difference::Changeset as __Changeset; pub use difference::Changeset as __Changeset;
use ra_proc_macro::ListMacrosTask; use ra_proc_macro::ListMacrosTask;
use std::str::FromStr; use std::str::FromStr;
@ -59,7 +59,7 @@ pub fn assert_expand(
pub fn list(crate_name: &str, version: &str) -> Vec<String> { pub fn list(crate_name: &str, version: &str) -> Vec<String> {
let path = fixtures::dylib_path(crate_name, version); let path = fixtures::dylib_path(crate_name, version);
let task = ListMacrosTask { lib: path }; let task = ListMacrosTask { lib: path };
let mut srv = ProcMacroSrv::default();
let res = list_macros(&task); let res = srv.list_macros(&task).unwrap();
res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect()
} }

View file

@ -30,8 +30,9 @@ pub fn init_from(spec: &str) {
pub type Label = &'static str; pub type Label = &'static str;
/// This function starts a profiling scope in the current execution stack with a given description. /// This function starts a profiling scope in the current execution stack with a given description.
/// It returns a Profile structure and measure elapsed time between this method invocation and Profile structure drop. /// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop.
/// It supports nested profiling scopes in case when this function invoked multiple times at the execution stack. In this case the profiling information will be nested at the output. /// It supports nested profiling scopes in case when this function is invoked multiple times at the execution stack.
/// In this case the profiling information will be nested at the output.
/// Profiling information is being printed in the stderr. /// Profiling information is being printed in the stderr.
/// ///
/// # Example /// # Example
@ -58,36 +59,35 @@ pub type Label = &'static str;
/// ``` /// ```
pub fn profile(label: Label) -> Profiler { pub fn profile(label: Label) -> Profiler {
assert!(!label.is_empty()); assert!(!label.is_empty());
let enabled = PROFILING_ENABLED.load(Ordering::Relaxed)
&& PROFILE_STACK.with(|stack| stack.borrow_mut().push(label)); if PROFILING_ENABLED.load(Ordering::Relaxed)
let label = if enabled { Some(label) } else { None }; && PROFILE_STACK.with(|stack| stack.borrow_mut().push(label))
Profiler { label, detail: None } {
Profiler(Some(ProfilerImpl { label, detail: None }))
} else {
Profiler(None)
}
} }
pub struct Profiler { pub struct Profiler(Option<ProfilerImpl>);
label: Option<Label>,
struct ProfilerImpl {
label: Label,
detail: Option<String>, detail: Option<String>,
} }
impl Profiler { impl Profiler {
pub fn detail(mut self, detail: impl FnOnce() -> String) -> Profiler { pub fn detail(mut self, detail: impl FnOnce() -> String) -> Profiler {
if self.label.is_some() { if let Some(profiler) = &mut self.0 {
self.detail = Some(detail()) profiler.detail = Some(detail())
} }
self self
} }
} }
impl Drop for Profiler { impl Drop for ProfilerImpl {
fn drop(&mut self) { fn drop(&mut self) {
match self { PROFILE_STACK.with(|it| it.borrow_mut().pop(self.label, self.detail.take()));
Profiler { label: Some(label), detail } => {
PROFILE_STACK.with(|stack| {
stack.borrow_mut().pop(label, detail.take());
});
}
Profiler { label: None, .. } => (),
}
} }
} }
@ -179,21 +179,18 @@ impl ProfileStack {
pub fn pop(&mut self, label: Label, detail: Option<String>) { pub fn pop(&mut self, label: Label, detail: Option<String>) {
let start = self.starts.pop().unwrap(); let start = self.starts.pop().unwrap();
let duration = start.elapsed(); let duration = start.elapsed();
let level = self.starts.len();
self.messages.finish(Message { duration, label, detail }); self.messages.finish(Message { duration, label, detail });
if level == 0 { if self.starts.is_empty() {
let longer_than = self.filter.longer_than; let longer_than = self.filter.longer_than;
// Convert to millis for comparison to avoid problems with rounding // Convert to millis for comparison to avoid problems with rounding
// (otherwise we could print `0ms` despite user's `>0` filter when // (otherwise we could print `0ms` despite user's `>0` filter when
// `duration` is just a few nanos). // `duration` is just a few nanos).
if duration.as_millis() > longer_than.as_millis() { if duration.as_millis() > longer_than.as_millis() {
let stderr = stderr();
if let Some(root) = self.messages.root() { if let Some(root) = self.messages.root() {
print(&self.messages, root, 0, longer_than, &mut stderr.lock()); print(&self.messages, root, 0, longer_than, &mut stderr().lock());
} }
} }
self.messages.clear(); self.messages.clear();
assert!(self.starts.is_empty())
} }
} }
} }

View file

@ -16,7 +16,9 @@ use crate::{
}; };
pub use self::{ pub use self::{
expr_extensions::{ArrayExprKind, BinOp, ElseBranch, LiteralKind, PrefixOp, RangeOp}, expr_extensions::{
ArrayExprKind, BinOp, BlockModifier, ElseBranch, LiteralKind, PrefixOp, RangeOp,
},
extensions::{ extensions::{
AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents, AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents,
StructKind, TypeBoundKind, VisibilityKind, StructKind, TypeBoundKind, VisibilityKind,
@ -242,6 +244,21 @@ fn test_comments_preserve_trailing_whitespace() {
); );
} }
#[test]
fn test_four_slash_line_comment() {
let file = SourceFile::parse(
r#"
//// too many slashes to be a doc comment
/// doc comment
mod foo {}
"#,
)
.ok()
.unwrap();
let module = file.syntax().descendants().find_map(Module::cast).unwrap();
assert_eq!("doc comment", module.doc_comment_text().unwrap());
}
#[test] #[test]
fn test_where_predicates() { fn test_where_predicates() {
fn assert_bound(text: &str, bound: Option<TypeBound>) { fn assert_bound(text: &str, bound: Option<TypeBound>) {

View file

@ -16,7 +16,7 @@ impl ast::Expr {
| ast::Expr::WhileExpr(_) | ast::Expr::WhileExpr(_)
| ast::Expr::BlockExpr(_) | ast::Expr::BlockExpr(_)
| ast::Expr::MatchExpr(_) | ast::Expr::MatchExpr(_)
| ast::Expr::TryBlockExpr(_) => true, | ast::Expr::TryExpr(_) => true,
_ => false, _ => false,
} }
} }
@ -359,7 +359,22 @@ impl ast::Literal {
} }
} }
pub enum BlockModifier {
Async(SyntaxToken),
Unsafe(SyntaxToken),
}
impl ast::BlockExpr { impl ast::BlockExpr {
pub fn modifier(&self) -> Option<BlockModifier> {
if let Some(token) = self.async_token() {
return Some(BlockModifier::Async(token));
}
if let Some(token) = self.unsafe_token() {
return Some(BlockModifier::Unsafe(token));
}
None
}
/// false if the block is an intrinsic part of the syntax and can't be /// false if the block is an intrinsic part of the syntax and can't be
/// replaced with arbitrary expression. /// replaced with arbitrary expression.
/// ///
@ -368,12 +383,15 @@ impl ast::BlockExpr {
/// const FOO: () = { stand_alone }; /// const FOO: () = { stand_alone };
/// ``` /// ```
pub fn is_standalone(&self) -> bool { pub fn is_standalone(&self) -> bool {
let kind = match self.syntax().parent() { if self.modifier().is_some() {
return false;
}
let parent = match self.syntax().parent() {
Some(it) => it,
None => return true, None => return true,
Some(it) => it.kind(),
}; };
match kind { match parent.kind() {
FN_DEF | MATCH_ARM | IF_EXPR | WHILE_EXPR | LOOP_EXPR | TRY_BLOCK_EXPR => false, FN_DEF | IF_EXPR | WHILE_EXPR | LOOP_EXPR => false,
_ => true, _ => true,
} }
} }

View file

@ -475,16 +475,6 @@ impl LoopExpr {
pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) } pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TryBlockExpr {
pub(crate) syntax: SyntaxNode,
}
impl ast::AttrsOwner for TryBlockExpr {}
impl TryBlockExpr {
pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
pub fn body(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ForExpr { pub struct ForExpr {
pub(crate) syntax: SyntaxNode, pub(crate) syntax: SyntaxNode,
@ -554,6 +544,7 @@ impl ast::AttrsOwner for BlockExpr {}
impl BlockExpr { impl BlockExpr {
pub fn label(&self) -> Option<Label> { support::child(&self.syntax) } pub fn label(&self) -> Option<Label> { support::child(&self.syntax) }
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
pub fn block(&self) -> Option<Block> { support::child(&self.syntax) } pub fn block(&self) -> Option<Block> { support::child(&self.syntax) }
} }
@ -1249,6 +1240,7 @@ pub struct PathSegment {
} }
impl PathSegment { impl PathSegment {
pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) } pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) } pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) } pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
pub fn type_arg_list(&self) -> Option<TypeArgList> { support::child(&self.syntax) } pub fn type_arg_list(&self) -> Option<TypeArgList> { support::child(&self.syntax) }
@ -1473,7 +1465,6 @@ pub enum Expr {
FieldExpr(FieldExpr), FieldExpr(FieldExpr),
AwaitExpr(AwaitExpr), AwaitExpr(AwaitExpr),
TryExpr(TryExpr), TryExpr(TryExpr),
TryBlockExpr(TryBlockExpr),
CastExpr(CastExpr), CastExpr(CastExpr),
RefExpr(RefExpr), RefExpr(RefExpr),
PrefixExpr(PrefixExpr), PrefixExpr(PrefixExpr),
@ -1956,17 +1947,6 @@ impl AstNode for LoopExpr {
} }
fn syntax(&self) -> &SyntaxNode { &self.syntax } fn syntax(&self) -> &SyntaxNode { &self.syntax }
} }
impl AstNode for TryBlockExpr {
fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_BLOCK_EXPR }
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })
} else {
None
}
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl AstNode for ForExpr { impl AstNode for ForExpr {
fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR } fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR }
fn cast(syntax: SyntaxNode) -> Option<Self> { fn cast(syntax: SyntaxNode) -> Option<Self> {
@ -3308,9 +3288,6 @@ impl From<AwaitExpr> for Expr {
impl From<TryExpr> for Expr { impl From<TryExpr> for Expr {
fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) } fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) }
} }
impl From<TryBlockExpr> for Expr {
fn from(node: TryBlockExpr) -> Expr { Expr::TryBlockExpr(node) }
}
impl From<CastExpr> for Expr { impl From<CastExpr> for Expr {
fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) } fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) }
} }
@ -3341,9 +3318,8 @@ impl AstNode for Expr {
TUPLE_EXPR | ARRAY_EXPR | PAREN_EXPR | PATH_EXPR | LAMBDA_EXPR | IF_EXPR TUPLE_EXPR | ARRAY_EXPR | PAREN_EXPR | PATH_EXPR | LAMBDA_EXPR | IF_EXPR
| LOOP_EXPR | FOR_EXPR | WHILE_EXPR | CONTINUE_EXPR | BREAK_EXPR | LABEL | LOOP_EXPR | FOR_EXPR | WHILE_EXPR | CONTINUE_EXPR | BREAK_EXPR | LABEL
| BLOCK_EXPR | RETURN_EXPR | MATCH_EXPR | RECORD_LIT | CALL_EXPR | INDEX_EXPR | BLOCK_EXPR | RETURN_EXPR | MATCH_EXPR | RECORD_LIT | CALL_EXPR | INDEX_EXPR
| METHOD_CALL_EXPR | FIELD_EXPR | AWAIT_EXPR | TRY_EXPR | TRY_BLOCK_EXPR | METHOD_CALL_EXPR | FIELD_EXPR | AWAIT_EXPR | TRY_EXPR | CAST_EXPR | REF_EXPR
| CAST_EXPR | REF_EXPR | PREFIX_EXPR | RANGE_EXPR | BIN_EXPR | LITERAL | MACRO_CALL | PREFIX_EXPR | RANGE_EXPR | BIN_EXPR | LITERAL | MACRO_CALL | BOX_EXPR => true,
| BOX_EXPR => true,
_ => false, _ => false,
} }
} }
@ -3371,7 +3347,6 @@ impl AstNode for Expr {
FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }), FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }),
AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }), AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }),
TRY_EXPR => Expr::TryExpr(TryExpr { syntax }), TRY_EXPR => Expr::TryExpr(TryExpr { syntax }),
TRY_BLOCK_EXPR => Expr::TryBlockExpr(TryBlockExpr { syntax }),
CAST_EXPR => Expr::CastExpr(CastExpr { syntax }), CAST_EXPR => Expr::CastExpr(CastExpr { syntax }),
REF_EXPR => Expr::RefExpr(RefExpr { syntax }), REF_EXPR => Expr::RefExpr(RefExpr { syntax }),
PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }), PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }),
@ -3408,7 +3383,6 @@ impl AstNode for Expr {
Expr::FieldExpr(it) => &it.syntax, Expr::FieldExpr(it) => &it.syntax,
Expr::AwaitExpr(it) => &it.syntax, Expr::AwaitExpr(it) => &it.syntax,
Expr::TryExpr(it) => &it.syntax, Expr::TryExpr(it) => &it.syntax,
Expr::TryBlockExpr(it) => &it.syntax,
Expr::CastExpr(it) => &it.syntax, Expr::CastExpr(it) => &it.syntax,
Expr::RefExpr(it) => &it.syntax, Expr::RefExpr(it) => &it.syntax,
Expr::PrefixExpr(it) => &it.syntax, Expr::PrefixExpr(it) => &it.syntax,
@ -3889,11 +3863,6 @@ impl std::fmt::Display for LoopExpr {
std::fmt::Display::fmt(self.syntax(), f) std::fmt::Display::fmt(self.syntax(), f)
} }
} }
impl std::fmt::Display for TryBlockExpr {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for ForExpr { impl std::fmt::Display for ForExpr {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f) std::fmt::Display::fmt(self.syntax(), f)

View file

@ -22,8 +22,7 @@ pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path {
pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path { pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path {
path_from_text(&format!("{}::{}", qual, segment)) path_from_text(&format!("{}::{}", qual, segment))
} }
fn path_from_text(text: &str) -> ast::Path {
pub fn path_from_text(text: &str) -> ast::Path {
ast_from_text(text) ast_from_text(text)
} }

View file

@ -13,7 +13,12 @@ impl Comment {
} }
pub fn prefix(&self) -> &'static str { pub fn prefix(&self) -> &'static str {
prefix_by_kind(self.kind()) for (prefix, k) in COMMENT_PREFIX_TO_KIND.iter() {
if *k == self.kind() && self.text().starts_with(prefix) {
return prefix;
}
}
unreachable!()
} }
} }
@ -48,6 +53,7 @@ pub enum CommentPlacement {
const COMMENT_PREFIX_TO_KIND: &[(&str, CommentKind)] = { const COMMENT_PREFIX_TO_KIND: &[(&str, CommentKind)] = {
use {CommentPlacement::*, CommentShape::*}; use {CommentPlacement::*, CommentShape::*};
&[ &[
("////", CommentKind { shape: Line, doc: None }),
("///", CommentKind { shape: Line, doc: Some(Outer) }), ("///", CommentKind { shape: Line, doc: Some(Outer) }),
("//!", CommentKind { shape: Line, doc: Some(Inner) }), ("//!", CommentKind { shape: Line, doc: Some(Inner) }),
("/**", CommentKind { shape: Block, doc: Some(Outer) }), ("/**", CommentKind { shape: Block, doc: Some(Outer) }),
@ -69,15 +75,6 @@ fn kind_by_prefix(text: &str) -> CommentKind {
panic!("bad comment text: {:?}", text) panic!("bad comment text: {:?}", text)
} }
fn prefix_by_kind(kind: CommentKind) -> &'static str {
for (prefix, k) in COMMENT_PREFIX_TO_KIND.iter() {
if *k == kind {
return prefix;
}
}
unreachable!()
}
impl Whitespace { impl Whitespace {
pub fn spans_multiple_lines(&self) -> bool { pub fn spans_multiple_lines(&self) -> bool {
let text = self.text(); let text = self.text();

View file

@ -96,6 +96,7 @@ pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
ast::RecordField(it) => validate_numeric_name(it.name_ref(), &mut errors), ast::RecordField(it) => validate_numeric_name(it.name_ref(), &mut errors),
ast::Visibility(it) => validate_visibility(it, &mut errors), ast::Visibility(it) => validate_visibility(it, &mut errors),
ast::RangeExpr(it) => validate_range_expr(it, &mut errors), ast::RangeExpr(it) => validate_range_expr(it, &mut errors),
ast::PathSegment(it) => validate_crate_keyword_in_path_segment(it, &mut errors),
_ => (), _ => (),
} }
} }
@ -222,3 +223,60 @@ fn validate_range_expr(expr: ast::RangeExpr, errors: &mut Vec<SyntaxError>) {
)); ));
} }
} }
fn validate_crate_keyword_in_path_segment(
segment: ast::PathSegment,
errors: &mut Vec<SyntaxError>,
) {
const ERR_MSG: &str = "The `crate` keyword is only allowed as the first segment of a path";
let crate_token = match segment.crate_token() {
None => return,
Some(it) => it,
};
// Disallow both ::crate and foo::crate
let mut path = segment.parent_path();
if segment.coloncolon_token().is_some() || path.qualifier().is_some() {
errors.push(SyntaxError::new(ERR_MSG, crate_token.text_range()));
return;
}
// For expressions and types, validation is complete, but we still have
// to handle invalid UseItems like this:
//
// use foo:{crate::bar::baz};
//
// To handle this we must inspect the parent `UseItem`s and `UseTree`s
// but right now we're looking deep inside the nested `Path` nodes because
// `Path`s are left-associative:
//
// ((crate)::bar)::baz)
// ^ current value of path
//
// So we need to climb to the top
while let Some(parent) = path.parent_path() {
path = parent;
}
// Now that we've found the whole path we need to see if there's a prefix
// somewhere in the UseTree hierarchy. This check is arbitrarily deep
// because rust allows arbitrary nesting like so:
//
// use {foo::{{{{crate::bar::baz}}}}};
for node in path.syntax().ancestors().skip(1) {
match_ast! {
match node {
ast::UseTree(it) => if let Some(tree_path) = it.path() {
// Even a top-level path exists within a `UseTree` so we must explicitly
// allow our path but disallow anything else
if tree_path != path {
errors.push(SyntaxError::new(ERR_MSG, crate_token.text_range()));
}
},
ast::UseTreeList(_it) => continue,
_ => return,
}
};
}
}

View file

@ -0,0 +1,91 @@
SOURCE_FILE@0..98
USE_ITEM@0..12
USE_KW@0..3 "use"
WHITESPACE@3..4 " "
USE_TREE@4..11
PATH@4..11
PATH_SEGMENT@4..11
COLON2@4..6 "::"
CRATE_KW@6..11 "crate"
SEMICOLON@11..12 ";"
WHITESPACE@12..13 "\n"
USE_ITEM@13..54
USE_KW@13..16 "use"
WHITESPACE@16..17 " "
USE_TREE@17..53
USE_TREE_LIST@17..53
L_CURLY@17..18 "{"
USE_TREE@18..23
PATH@18..23
PATH_SEGMENT@18..23
CRATE_KW@18..23 "crate"
COMMA@23..24 ","
WHITESPACE@24..25 " "
USE_TREE@25..52
PATH@25..28
PATH_SEGMENT@25..28
NAME_REF@25..28
IDENT@25..28 "foo"
COLON2@28..30 "::"
USE_TREE_LIST@30..52
L_CURLY@30..31 "{"
USE_TREE@31..51
PATH@31..51
PATH@31..46
PATH@31..41
PATH@31..36
PATH_SEGMENT@31..36
CRATE_KW@31..36 "crate"
COLON2@36..38 "::"
PATH_SEGMENT@38..41
NAME_REF@38..41
IDENT@38..41 "foo"
COLON2@41..43 "::"
PATH_SEGMENT@43..46
NAME_REF@43..46
IDENT@43..46 "bar"
COLON2@46..48 "::"
PATH_SEGMENT@48..51
NAME_REF@48..51
IDENT@48..51 "baz"
R_CURLY@51..52 "}"
R_CURLY@52..53 "}"
SEMICOLON@53..54 ";"
WHITESPACE@54..55 "\n"
USE_ITEM@55..72
USE_KW@55..58 "use"
WHITESPACE@58..59 " "
USE_TREE@59..71
PATH@59..71
PATH@59..64
PATH_SEGMENT@59..64
NAME_REF@59..64
IDENT@59..64 "hello"
COLON2@64..66 "::"
PATH_SEGMENT@66..71
CRATE_KW@66..71 "crate"
SEMICOLON@71..72 ";"
WHITESPACE@72..73 "\n"
USE_ITEM@73..97
USE_KW@73..76 "use"
WHITESPACE@76..77 " "
USE_TREE@77..96
PATH@77..96
PATH@77..89
PATH@77..82
PATH_SEGMENT@77..82
NAME_REF@77..82
IDENT@77..82 "hello"
COLON2@82..84 "::"
PATH_SEGMENT@84..89
CRATE_KW@84..89 "crate"
COLON2@89..91 "::"
PATH_SEGMENT@91..96
NAME_REF@91..96
IDENT@91..96 "there"
SEMICOLON@96..97 ";"
WHITESPACE@97..98 "\n"
error 6..11: The `crate` keyword is only allowed as the first segment of a path
error 31..36: The `crate` keyword is only allowed as the first segment of a path
error 66..71: The `crate` keyword is only allowed as the first segment of a path
error 84..89: The `crate` keyword is only allowed as the first segment of a path

View file

@ -0,0 +1,4 @@
use ::crate;
use {crate, foo::{crate::foo::bar::baz}};
use hello::crate;
use hello::crate::there;

View file

@ -1,4 +1,4 @@
SOURCE_FILE@0..250 SOURCE_FILE@0..249
USE_ITEM@0..58 USE_ITEM@0..58
USE_KW@0..3 "use" USE_KW@0..3 "use"
WHITESPACE@3..4 " " WHITESPACE@3..4 " "
@ -104,32 +104,33 @@ SOURCE_FILE@0..250
WHITESPACE@166..167 " " WHITESPACE@166..167 " "
COMMENT@167..179 "// Rust 2015" COMMENT@167..179 "// Rust 2015"
WHITESPACE@179..180 "\n" WHITESPACE@179..180 "\n"
USE_ITEM@180..206 USE_ITEM@180..205
USE_KW@180..183 "use" USE_KW@180..183 "use"
WHITESPACE@183..184 " " WHITESPACE@183..184 " "
USE_TREE@184..205 USE_TREE@184..204
COLON2@184..186 "::" COLON2@184..186 "::"
USE_TREE_LIST@186..205 USE_TREE_LIST@186..204
L_CURLY@186..187 "{" L_CURLY@186..187 "{"
USE_TREE@187..204 USE_TREE@187..203
USE_TREE_LIST@187..204 USE_TREE_LIST@187..203
L_CURLY@187..188 "{" L_CURLY@187..188 "{"
USE_TREE@188..203 USE_TREE@188..202
USE_TREE_LIST@188..203 USE_TREE_LIST@188..202
L_CURLY@188..189 "{" L_CURLY@188..189 "{"
USE_TREE@189..202 USE_TREE@189..201
PATH@189..202 PATH@189..201
PATH@189..194 PATH@189..193
PATH_SEGMENT@189..194 PATH_SEGMENT@189..193
CRATE_KW@189..194 "crate" NAME_REF@189..193
COLON2@194..196 "::" IDENT@189..193 "root"
PATH_SEGMENT@196..202 COLON2@193..195 "::"
NAME_REF@196..202 PATH_SEGMENT@195..201
IDENT@196..202 "export" NAME_REF@195..201
R_CURLY@202..203 "}" IDENT@195..201 "export"
R_CURLY@203..204 "}" R_CURLY@201..202 "}"
R_CURLY@204..205 "}" R_CURLY@202..203 "}"
SEMICOLON@205..206 ";" R_CURLY@203..204 "}"
WHITESPACE@206..207 " " SEMICOLON@204..205 ";"
COMMENT@207..249 "// Nonsensical but pe ..." WHITESPACE@205..206 " "
WHITESPACE@249..250 "\n" COMMENT@206..248 "// Nonsensical but pe ..."
WHITESPACE@248..249 "\n"

View file

@ -1,4 +1,4 @@
use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`)
use {path::from::root}; // Rust 2015 use {path::from::root}; // Rust 2015
use ::{some::arbritrary::path}; // Rust 2015 use ::{some::arbritrary::path}; // Rust 2015
use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting

View file

@ -0,0 +1,35 @@
SOURCE_FILE@0..27
FN_DEF@0..26
FN_KW@0..2 "fn"
WHITESPACE@2..3 " "
NAME@3..6
IDENT@3..6 "foo"
PARAM_LIST@6..8
L_PAREN@6..7 "("
R_PAREN@7..8 ")"
WHITESPACE@8..9 " "
BLOCK_EXPR@9..26
BLOCK@9..26
L_CURLY@9..10 "{"
WHITESPACE@10..11 " "
EXPR_STMT@11..24
MACRO_CALL@11..23
PATH@11..14
PATH_SEGMENT@11..14
NAME_REF@11..14
IDENT@11..14 "try"
BANG@14..15 "!"
TOKEN_TREE@15..23
L_PAREN@15..16 "("
IDENT@16..18 "Ok"
TOKEN_TREE@18..22
L_PAREN@18..19 "("
TOKEN_TREE@19..21
L_PAREN@19..20 "("
R_PAREN@20..21 ")"
R_PAREN@21..22 ")"
R_PAREN@22..23 ")"
SEMICOLON@23..24 ";"
WHITESPACE@24..25 " "
R_CURLY@25..26 "}"
WHITESPACE@26..27 "\n"

View file

@ -0,0 +1 @@
fn foo() { try!(Ok(())); }

View file

@ -0,0 +1,27 @@
SOURCE_FILE@0..30
MACRO_CALL@0..29
PATH@0..11
PATH_SEGMENT@0..11
NAME_REF@0..11
IDENT@0..11 "macro_rules"
BANG@11..12 "!"
WHITESPACE@12..13 " "
NAME@13..16
IDENT@13..16 "try"
WHITESPACE@16..17 " "
TOKEN_TREE@17..29
L_CURLY@17..18 "{"
WHITESPACE@18..19 " "
TOKEN_TREE@19..21
L_PAREN@19..20 "("
R_PAREN@20..21 ")"
WHITESPACE@21..22 " "
EQ@22..23 "="
R_ANGLE@23..24 ">"
WHITESPACE@24..25 " "
TOKEN_TREE@25..27
L_CURLY@25..26 "{"
R_CURLY@26..27 "}"
WHITESPACE@27..28 " "
R_CURLY@28..29 "}"
WHITESPACE@29..30 "\n"

View file

@ -0,0 +1 @@
macro_rules! try { () => {} }

View file

@ -20,7 +20,7 @@ globset = "0.4.4"
itertools = "0.9.0" itertools = "0.9.0"
jod-thread = "0.1.0" jod-thread = "0.1.0"
log = "0.4.8" log = "0.4.8"
lsp-types = { version = "0.73.0", features = ["proposed"] } lsp-types = { version = "0.74.0", features = ["proposed"] }
parking_lot = "0.10.0" parking_lot = "0.10.0"
pico-args = "0.3.1" pico-args = "0.3.1"
rand = { version = "0.7.3", features = ["small_rng"] } rand = { version = "0.7.3", features = ["small_rng"] }
@ -39,7 +39,7 @@ ra_prof = { path = "../ra_prof" }
ra_project_model = { path = "../ra_project_model" } ra_project_model = { path = "../ra_project_model" }
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_text_edit = { path = "../ra_text_edit" } ra_text_edit = { path = "../ra_text_edit" }
ra_vfs = "0.5.2" ra_vfs = "0.6.0"
# This should only be used in CLI # This should only be used in CLI
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }

View file

@ -16,7 +16,7 @@ pub fn server_capabilities() -> ServerCapabilities {
ServerCapabilities { ServerCapabilities {
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
open_close: Some(true), open_close: Some(true),
change: Some(TextDocumentSyncKind::Full), change: Some(TextDocumentSyncKind::Incremental),
will_save: None, will_save: None,
will_save_wait_until: None, will_save_wait_until: None,
save: Some(SaveOptions::default()), save: Some(SaveOptions::default()),

View file

@ -150,7 +150,7 @@ impl ConvWith<(&LineIndex, LineEndings)> for CompletionItem {
detail: self.detail().map(|it| it.to_string()), detail: self.detail().map(|it| it.to_string()),
filter_text: Some(self.lookup().to_string()), filter_text: Some(self.lookup().to_string()),
kind: self.kind().map(|it| it.conv()), kind: self.kind().map(|it| it.conv()),
text_edit: Some(text_edit), text_edit: Some(text_edit.into()),
additional_text_edits: Some(additional_text_edits), additional_text_edits: Some(additional_text_edits),
documentation: self.documentation().map(|it| it.conv()), documentation: self.documentation().map(|it| it.conv()),
deprecated: Some(self.deprecated()), deprecated: Some(self.deprecated()),

View file

@ -6,9 +6,12 @@ mod subscriptions;
pub(crate) mod pending_requests; pub(crate) mod pending_requests;
use std::{ use std::{
borrow::Cow,
env, env,
error::Error, error::Error,
fmt, panic, fmt,
ops::Range,
panic,
path::PathBuf, path::PathBuf,
sync::Arc, sync::Arc,
time::{Duration, Instant}, time::{Duration, Instant},
@ -18,11 +21,12 @@ use crossbeam_channel::{never, select, unbounded, RecvError, Sender};
use itertools::Itertools; use itertools::Itertools;
use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response};
use lsp_types::{ use lsp_types::{
NumberOrString, WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressCreateParams, DidChangeTextDocumentParams, NumberOrString, TextDocumentContentChangeEvent, WorkDoneProgress,
WorkDoneProgressEnd, WorkDoneProgressReport, WorkDoneProgressBegin, WorkDoneProgressCreateParams, WorkDoneProgressEnd,
WorkDoneProgressReport,
}; };
use ra_flycheck::{url_from_path_with_drive_lowercasing, CheckTask}; use ra_flycheck::{url_from_path_with_drive_lowercasing, CheckTask};
use ra_ide::{Canceled, FileId, LibraryData, SourceRootId}; use ra_ide::{Canceled, FileId, LibraryData, LineIndex, SourceRootId};
use ra_prof::profile; use ra_prof::profile;
use ra_project_model::{PackageRoot, ProjectWorkspace}; use ra_project_model::{PackageRoot, ProjectWorkspace};
use ra_vfs::{VfsFile, VfsTask, Watch}; use ra_vfs::{VfsFile, VfsTask, Watch};
@ -33,6 +37,7 @@ use threadpool::ThreadPool;
use crate::{ use crate::{
config::{Config, FilesWatcher}, config::{Config, FilesWatcher},
conv::{ConvWith, TryConvWith},
diagnostics::DiagnosticTask, diagnostics::DiagnosticTask,
main_loop::{ main_loop::{
pending_requests::{PendingRequest, PendingRequests}, pending_requests::{PendingRequest, PendingRequests},
@ -579,12 +584,16 @@ fn on_notification(
Err(not) => not, Err(not) => not,
}; };
let not = match notification_cast::<req::DidChangeTextDocument>(not) { let not = match notification_cast::<req::DidChangeTextDocument>(not) {
Ok(mut params) => { Ok(params) => {
let uri = params.text_document.uri; let DidChangeTextDocumentParams { text_document, content_changes } = params;
let world = state.snapshot();
let file_id = text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id)?;
let uri = text_document.uri;
let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?;
let text = state.vfs.write().change_file_overlay(&path, |old_text| {
params.content_changes.pop().ok_or_else(|| "empty changes".to_string())?.text; apply_document_changes(old_text, Cow::Borrowed(&line_index), content_changes);
state.vfs.write().change_file_overlay(path.as_path(), text); });
return Ok(()); return Ok(());
} }
Err(not) => not, Err(not) => not,
@ -653,6 +662,48 @@ fn on_notification(
Ok(()) Ok(())
} }
fn apply_document_changes(
old_text: &mut String,
mut line_index: Cow<'_, LineIndex>,
content_changes: Vec<TextDocumentContentChangeEvent>,
) {
// The changes we got must be applied sequentially, but can cross lines so we
// have to keep our line index updated.
// Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we
// remember the last valid line in the index and only rebuild it if needed.
enum IndexValid {
All,
UpToLine(u64),
}
impl IndexValid {
fn covers(&self, line: u64) -> bool {
match *self {
IndexValid::UpToLine(to) => to >= line,
_ => true,
}
}
}
let mut index_valid = IndexValid::All;
for change in content_changes {
match change.range {
Some(range) => {
if !index_valid.covers(range.start.line) {
line_index = Cow::Owned(LineIndex::new(&old_text));
}
index_valid = IndexValid::UpToLine(range.start.line);
let range = range.conv_with(&line_index);
old_text.replace_range(Range::<usize>::from(range), &change.text);
}
None => {
*old_text = change.text;
index_valid = IndexValid::UpToLine(0);
}
}
}
}
fn on_check_task( fn on_check_task(
task: CheckTask, task: CheckTask,
world_state: &mut WorldState, world_state: &mut WorldState,
@ -958,3 +1009,64 @@ where
{ {
Request::new(id, R::METHOD.to_string(), params) Request::new(id, R::METHOD.to_string(), params)
} }
#[cfg(test)]
mod tests {
use std::borrow::Cow;
use lsp_types::{Position, Range, TextDocumentContentChangeEvent};
use ra_ide::LineIndex;
#[test]
fn apply_document_changes() {
fn run(text: &mut String, changes: Vec<TextDocumentContentChangeEvent>) {
let line_index = Cow::Owned(LineIndex::new(&text));
super::apply_document_changes(text, line_index, changes);
}
macro_rules! c {
[$($sl:expr, $sc:expr; $el:expr, $ec:expr => $text:expr),+] => {
vec![$(TextDocumentContentChangeEvent {
range: Some(Range {
start: Position { line: $sl, character: $sc },
end: Position { line: $el, character: $ec },
}),
range_length: None,
text: String::from($text),
}),+]
};
}
let mut text = String::new();
run(&mut text, vec![]);
assert_eq!(text, "");
run(
&mut text,
vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: String::from("the"),
}],
);
assert_eq!(text, "the");
run(&mut text, c![0, 3; 0, 3 => " quick"]);
assert_eq!(text, "the quick");
run(&mut text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]);
assert_eq!(text, "quick foxes");
run(&mut text, c![0, 11; 0, 11 => "\ndream"]);
assert_eq!(text, "quick foxes\ndream");
run(&mut text, c![1, 0; 1, 0 => "have "]);
assert_eq!(text, "quick foxes\nhave dream");
run(&mut text, c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"]);
assert_eq!(text, "the quick foxes\nhave quiet dreams\n");
run(&mut text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]);
assert_eq!(text, "the quick foxes\n\nhave quiet dreams\n\n");
run(
&mut text,
c![1, 0; 1, 0 => "DREAM", 2, 0; 2, 0 => "they ", 3, 0; 3, 0 => "DON'T THEY?"],
);
assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n");
run(&mut text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]);
assert_eq!(text, "the quick \nthey have quiet dreams\n");
}
}

View file

@ -326,10 +326,10 @@ pub fn handle_workspace_symbol(
pub fn handle_goto_definition( pub fn handle_goto_definition(
world: WorldSnapshot, world: WorldSnapshot,
params: req::TextDocumentPositionParams, params: req::GotoDefinitionParams,
) -> Result<Option<req::GotoDefinitionResponse>> { ) -> Result<Option<req::GotoDefinitionResponse>> {
let _p = profile("handle_goto_definition"); let _p = profile("handle_goto_definition");
let position = params.try_conv_with(&world)?; let position = params.text_document_position_params.try_conv_with(&world)?;
let nav_info = match world.analysis().goto_definition(position)? { let nav_info = match world.analysis().goto_definition(position)? {
None => return Ok(None), None => return Ok(None),
Some(it) => it, Some(it) => it,
@ -340,10 +340,10 @@ pub fn handle_goto_definition(
pub fn handle_goto_implementation( pub fn handle_goto_implementation(
world: WorldSnapshot, world: WorldSnapshot,
params: req::TextDocumentPositionParams, params: req::GotoImplementationParams,
) -> Result<Option<req::GotoImplementationResponse>> { ) -> Result<Option<req::GotoImplementationResponse>> {
let _p = profile("handle_goto_implementation"); let _p = profile("handle_goto_implementation");
let position = params.try_conv_with(&world)?; let position = params.text_document_position_params.try_conv_with(&world)?;
let nav_info = match world.analysis().goto_implementation(position)? { let nav_info = match world.analysis().goto_implementation(position)? {
None => return Ok(None), None => return Ok(None),
Some(it) => it, Some(it) => it,
@ -354,10 +354,10 @@ pub fn handle_goto_implementation(
pub fn handle_goto_type_definition( pub fn handle_goto_type_definition(
world: WorldSnapshot, world: WorldSnapshot,
params: req::TextDocumentPositionParams, params: req::GotoTypeDefinitionParams,
) -> Result<Option<req::GotoTypeDefinitionResponse>> { ) -> Result<Option<req::GotoTypeDefinitionResponse>> {
let _p = profile("handle_goto_type_definition"); let _p = profile("handle_goto_type_definition");
let position = params.try_conv_with(&world)?; let position = params.text_document_position_params.try_conv_with(&world)?;
let nav_info = match world.analysis().goto_type_definition(position)? { let nav_info = match world.analysis().goto_type_definition(position)? {
None => return Ok(None), None => return Ok(None),
Some(it) => it, Some(it) => it,
@ -487,10 +487,10 @@ pub fn handle_folding_range(
pub fn handle_signature_help( pub fn handle_signature_help(
world: WorldSnapshot, world: WorldSnapshot,
params: req::TextDocumentPositionParams, params: req::SignatureHelpParams,
) -> Result<Option<req::SignatureHelp>> { ) -> Result<Option<req::SignatureHelp>> {
let _p = profile("handle_signature_help"); let _p = profile("handle_signature_help");
let position = params.try_conv_with(&world)?; let position = params.text_document_position_params.try_conv_with(&world)?;
if let Some(call_info) = world.analysis().call_info(position)? { if let Some(call_info) = world.analysis().call_info(position)? {
let concise = !world.config.call_info_full; let concise = !world.config.call_info_full;
let mut active_parameter = call_info.active_parameter.map(|it| it as i64); let mut active_parameter = call_info.active_parameter.map(|it| it as i64);
@ -509,12 +509,9 @@ pub fn handle_signature_help(
} }
} }
pub fn handle_hover( pub fn handle_hover(world: WorldSnapshot, params: req::HoverParams) -> Result<Option<Hover>> {
world: WorldSnapshot,
params: req::TextDocumentPositionParams,
) -> Result<Option<Hover>> {
let _p = profile("handle_hover"); let _p = profile("handle_hover");
let position = params.try_conv_with(&world)?; let position = params.text_document_position_params.try_conv_with(&world)?;
let info = match world.analysis().hover(position)? { let info = match world.analysis().hover(position)? {
None => return Ok(None), None => return Ok(None),
Some(info) => info, Some(info) => info,
@ -878,8 +875,14 @@ pub fn handle_code_lens(
.map(|it| { .map(|it| {
let range = it.node_range.conv_with(&line_index); let range = it.node_range.conv_with(&line_index);
let pos = range.start; let pos = range.start;
let lens_params = let lens_params = req::GotoImplementationParams {
req::TextDocumentPositionParams::new(params.text_document.clone(), pos); text_document_position_params: req::TextDocumentPositionParams::new(
params.text_document.clone(),
pos,
),
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
};
CodeLens { CodeLens {
range, range,
command: None, command: None,
@ -894,7 +897,7 @@ pub fn handle_code_lens(
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
enum CodeLensResolveData { enum CodeLensResolveData {
Impls(req::TextDocumentPositionParams), Impls(req::GotoImplementationParams),
} }
pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Result<CodeLens> { pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Result<CodeLens> {
@ -927,7 +930,7 @@ pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Re
title, title,
command: "rust-analyzer.showReferences".into(), command: "rust-analyzer.showReferences".into(),
arguments: Some(vec![ arguments: Some(vec![
to_value(&lens_params.text_document.uri).unwrap(), to_value(&lens_params.text_document_position_params.text_document.uri).unwrap(),
to_value(code_lens.range.start).unwrap(), to_value(code_lens.range.start).unwrap(),
to_value(locations).unwrap(), to_value(locations).unwrap(),
]), ]),
@ -944,16 +947,16 @@ pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Re
pub fn handle_document_highlight( pub fn handle_document_highlight(
world: WorldSnapshot, world: WorldSnapshot,
params: req::TextDocumentPositionParams, params: req::DocumentHighlightParams,
) -> Result<Option<Vec<DocumentHighlight>>> { ) -> Result<Option<Vec<DocumentHighlight>>> {
let _p = profile("handle_document_highlight"); let _p = profile("handle_document_highlight");
let file_id = params.text_document.try_conv_with(&world)?; let file_id = params.text_document_position_params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id)?; let line_index = world.analysis().file_line_index(file_id)?;
let refs = match world let refs = match world.analysis().find_all_refs(
.analysis() params.text_document_position_params.try_conv_with(&world)?,
.find_all_refs(params.try_conv_with(&world)?, Some(SearchScope::single_file(file_id)))? Some(SearchScope::single_file(file_id)),
{ )? {
None => return Ok(None), None => return Ok(None),
Some(refs) => refs, Some(refs) => refs,
}; };

View file

@ -8,14 +8,15 @@ pub use lsp_types::{
notification::*, request::*, ApplyWorkspaceEditParams, CodeActionParams, CodeLens, notification::*, request::*, ApplyWorkspaceEditParams, CodeActionParams, CodeLens,
CodeLensParams, CompletionParams, CompletionResponse, ConfigurationItem, ConfigurationParams, CodeLensParams, CompletionParams, CompletionResponse, ConfigurationItem, ConfigurationParams,
DiagnosticTag, DidChangeConfigurationParams, DidChangeWatchedFilesParams, DiagnosticTag, DidChangeConfigurationParams, DidChangeWatchedFilesParams,
DidChangeWatchedFilesRegistrationOptions, DocumentOnTypeFormattingParams, DocumentSymbolParams, DidChangeWatchedFilesRegistrationOptions, DocumentHighlightParams,
DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType, DocumentOnTypeFormattingParams, DocumentSymbolParams, DocumentSymbolResponse,
PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken, FileSystemWatcher, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams,
PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange, InitializeResult, MessageType, PartialResultParams, ProgressParams, ProgressParamsValue,
SelectionRangeParams, SemanticTokensParams, SemanticTokensRangeParams, ProgressToken, PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams,
SelectionRange, SelectionRangeParams, SemanticTokensParams, SemanticTokensRangeParams,
SemanticTokensRangeResult, SemanticTokensResult, ServerCapabilities, ShowMessageParams, SemanticTokensRangeResult, SemanticTokensResult, ServerCapabilities, ShowMessageParams,
SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, TextEdit, SignatureHelp, SignatureHelpParams, SymbolKind, TextDocumentEdit, TextDocumentPositionParams,
WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams, TextEdit, WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
}; };
use std::path::PathBuf; use std::path::PathBuf;

View file

@ -4,64 +4,69 @@ use std::ops;
use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens}; use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens};
pub(crate) const ATTRIBUTE: SemanticTokenType = SemanticTokenType::new("attribute"); macro_rules! define_semantic_token_types {
pub(crate) const BUILTIN_TYPE: SemanticTokenType = SemanticTokenType::new("builtinType"); ($(($ident:ident, $string:literal)),*$(,)?) => {
pub(crate) const ENUM_MEMBER: SemanticTokenType = SemanticTokenType::new("enumMember"); $(pub(crate) const $ident: SemanticTokenType = SemanticTokenType::new($string);)*
pub(crate) const LIFETIME: SemanticTokenType = SemanticTokenType::new("lifetime");
pub(crate) const TYPE_ALIAS: SemanticTokenType = SemanticTokenType::new("typeAlias");
pub(crate) const UNION: SemanticTokenType = SemanticTokenType::new("union");
pub(crate) const UNRESOLVED_REFERENCE: SemanticTokenType =
SemanticTokenType::new("unresolvedReference");
pub(crate) const FORMAT_SPECIFIER: SemanticTokenType = SemanticTokenType::new("formatSpecifier");
pub(crate) const CONSTANT: SemanticTokenModifier = SemanticTokenModifier::new("constant"); pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
pub(crate) const CONTROL_FLOW: SemanticTokenModifier = SemanticTokenModifier::new("controlFlow"); SemanticTokenType::COMMENT,
pub(crate) const MUTABLE: SemanticTokenModifier = SemanticTokenModifier::new("mutable"); SemanticTokenType::KEYWORD,
pub(crate) const UNSAFE: SemanticTokenModifier = SemanticTokenModifier::new("unsafe"); SemanticTokenType::STRING,
SemanticTokenType::NUMBER,
SemanticTokenType::REGEXP,
SemanticTokenType::OPERATOR,
SemanticTokenType::NAMESPACE,
SemanticTokenType::TYPE,
SemanticTokenType::STRUCT,
SemanticTokenType::CLASS,
SemanticTokenType::INTERFACE,
SemanticTokenType::ENUM,
SemanticTokenType::TYPE_PARAMETER,
SemanticTokenType::FUNCTION,
SemanticTokenType::MEMBER,
SemanticTokenType::PROPERTY,
SemanticTokenType::MACRO,
SemanticTokenType::VARIABLE,
SemanticTokenType::PARAMETER,
SemanticTokenType::LABEL,
$($ident),*
];
};
}
pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[ define_semantic_token_types![
SemanticTokenType::COMMENT, (ATTRIBUTE, "attribute"),
SemanticTokenType::KEYWORD, (BUILTIN_TYPE, "builtinType"),
SemanticTokenType::STRING, (ENUM_MEMBER, "enumMember"),
SemanticTokenType::NUMBER, (LIFETIME, "lifetime"),
SemanticTokenType::REGEXP, (TYPE_ALIAS, "typeAlias"),
SemanticTokenType::OPERATOR, (UNION, "union"),
SemanticTokenType::NAMESPACE, (UNRESOLVED_REFERENCE, "unresolvedReference"),
SemanticTokenType::TYPE, (FORMAT_SPECIFIER, "formatSpecifier"),
SemanticTokenType::STRUCT,
SemanticTokenType::CLASS,
SemanticTokenType::INTERFACE,
SemanticTokenType::ENUM,
SemanticTokenType::TYPE_PARAMETER,
SemanticTokenType::FUNCTION,
SemanticTokenType::MEMBER,
SemanticTokenType::PROPERTY,
SemanticTokenType::MACRO,
SemanticTokenType::VARIABLE,
SemanticTokenType::PARAMETER,
SemanticTokenType::LABEL,
ATTRIBUTE,
BUILTIN_TYPE,
ENUM_MEMBER,
LIFETIME,
TYPE_ALIAS,
UNION,
UNRESOLVED_REFERENCE,
FORMAT_SPECIFIER,
]; ];
pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[ macro_rules! define_semantic_token_modifiers {
SemanticTokenModifier::DOCUMENTATION, ($(($ident:ident, $string:literal)),*$(,)?) => {
SemanticTokenModifier::DECLARATION, $(pub(crate) const $ident: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
SemanticTokenModifier::DEFINITION,
SemanticTokenModifier::STATIC, pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
SemanticTokenModifier::ABSTRACT, SemanticTokenModifier::DOCUMENTATION,
SemanticTokenModifier::DEPRECATED, SemanticTokenModifier::DECLARATION,
SemanticTokenModifier::READONLY, SemanticTokenModifier::DEFINITION,
CONSTANT, SemanticTokenModifier::STATIC,
MUTABLE, SemanticTokenModifier::ABSTRACT,
UNSAFE, SemanticTokenModifier::DEPRECATED,
CONTROL_FLOW, SemanticTokenModifier::READONLY,
$($ident),*
];
};
}
define_semantic_token_modifiers![
(CONSTANT, "constant"),
(CONTROL_FLOW, "controlFlow"),
(MUTABLE, "mutable"),
(UNSAFE, "unsafe"),
]; ];
#[derive(Default)] #[derive(Default)]

View file

@ -4,8 +4,8 @@ use std::{collections::HashMap, path::PathBuf, time::Instant};
use lsp_types::{ use lsp_types::{
CodeActionContext, DidOpenTextDocumentParams, DocumentFormattingParams, FormattingOptions, CodeActionContext, DidOpenTextDocumentParams, DocumentFormattingParams, FormattingOptions,
PartialResultParams, Position, Range, TextDocumentItem, TextDocumentPositionParams, GotoDefinitionParams, HoverParams, PartialResultParams, Position, Range, TextDocumentItem,
WorkDoneProgressParams, TextDocumentPositionParams, WorkDoneProgressParams,
}; };
use rust_analyzer::req::{ use rust_analyzer::req::{
CodeActionParams, CodeActionRequest, Completion, CompletionParams, DidOpenTextDocument, CodeActionParams, CodeActionRequest, Completion, CompletionParams, DidOpenTextDocument,
@ -610,10 +610,14 @@ fn main() { message(); }
}) })
.server(); .server();
server.wait_until_workspace_is_loaded(); server.wait_until_workspace_is_loaded();
let res = server.send_request::<GotoDefinition>(TextDocumentPositionParams::new( let res = server.send_request::<GotoDefinition>(GotoDefinitionParams {
server.doc_id("src/main.rs"), text_document_position_params: TextDocumentPositionParams::new(
Position::new(2, 15), server.doc_id("src/main.rs"),
)); Position::new(2, 15),
),
work_done_progress_params: Default::default(),
partial_result_params: Default::default(),
});
assert!(format!("{}", res).contains("hello.rs")); assert!(format!("{}", res).contains("hello.rs"));
} }
@ -692,10 +696,13 @@ pub fn foo(_input: TokenStream) -> TokenStream {
.root("bar") .root("bar")
.server(); .server();
server.wait_until_workspace_is_loaded(); server.wait_until_workspace_is_loaded();
let res = server.send_request::<HoverRequest>(TextDocumentPositionParams::new( let res = server.send_request::<HoverRequest>(HoverParams {
server.doc_id("foo/src/main.rs"), text_document_position_params: TextDocumentPositionParams::new(
Position::new(7, 9), server.doc_id("foo/src/main.rs"),
)); Position::new(7, 9),
),
work_done_progress_params: Default::default(),
});
let value = res.get("contents").unwrap().get("value").unwrap().to_string(); let value = res.get("contents").unwrap().get("value").unwrap().to_string();
assert_eq!(value, r#""```rust\nfoo::Bar\nfn bar()\n```""#) assert_eq!(value, r#""```rust\nfoo::Bar\nfn bar()\n```""#)

View file

@ -35,7 +35,7 @@ The syntax tree consists of three layers:
* AST * AST
Of these, only GreenNodes store the actual data, the other two layers are (non-trivial) views into green tree. Of these, only GreenNodes store the actual data, the other two layers are (non-trivial) views into green tree.
Red-green terminology comes from Roslyn ([link](https://docs.microsoft.com/en-ie/archive/blogs/ericlippert/persistence-facades-and-roslyns-red-green-trees)) and gives the name to the `rowan` library. Green and syntax nodes are defined in rowan, ast is defined in rust-analyzer. Red-green terminology comes from Roslyn ([link](https://ericlippert.com/2012/06/08/red-green-trees/)) and gives the name to the `rowan` library. Green and syntax nodes are defined in rowan, ast is defined in rust-analyzer.
Syntax trees are a semi-transient data structure. Syntax trees are a semi-transient data structure.
In general, frontend does not keep syntax trees for all files in memory. In general, frontend does not keep syntax trees for all files in memory.

View file

@ -140,8 +140,8 @@ space or `;` depending on the return type of the function.
When completing a function call, `()` are automatically inserted. If a function When completing a function call, `()` are automatically inserted. If a function
takes arguments, the cursor is positioned inside the parenthesis. takes arguments, the cursor is positioned inside the parenthesis.
There are postifx completions, which can be triggerd by typing something like There are postfix completions, which can be triggered by typing something like
`foo().if`. The word after `.` determines postifx completion. Possible variants are: `foo().if`. The word after `.` determines postfix completion. Possible variants are:
- `expr.if` -> `if expr {}` - `expr.if` -> `if expr {}`
- `expr.match` -> `match expr {}` - `expr.match` -> `match expr {}`

View file

@ -111,7 +111,7 @@ Here are some useful self-diagnostic commands:
=== rust-analyzer Language Server Binary === rust-analyzer Language Server Binary
Other editors generally require the `rust-analyzer` binary to be in `$PATH`. Other editors generally require the `rust-analyzer` binary to be in `$PATH`.
You can download the pre-built binary from the https://github.com/rust-analyzer/rust-analyzer/releases[releases] page. Typically, you then need to rename the binary for your platform, e.g. `rust-analyzer-mac` if you're on Mac OS, to `rust-analzyer` and make it executable in addition to moving it into a directory in your `$PATH`. You can download the pre-built binary from the https://github.com/rust-analyzer/rust-analyzer/releases[releases] page. Typically, you then need to rename the binary for your platform, e.g. `rust-analyzer-mac` if you're on Mac OS, to `rust-analyzer` and make it executable in addition to moving it into a directory in your `$PATH`.
On Linux to install the `rust-analyzer` binary into `~/.local/bin`, this commands could be used On Linux to install the `rust-analyzer` binary into `~/.local/bin`, this commands could be used
@ -169,13 +169,15 @@ The are several LSP client implementations for vim:
1. Install coc.nvim by following the instructions at 1. Install coc.nvim by following the instructions at
https://github.com/neoclide/coc.nvim[coc.nvim] https://github.com/neoclide/coc.nvim[coc.nvim]
(nodejs required) (Node.js required)
2. Run `:CocInstall coc-rust-analyzer` to install 2. Run `:CocInstall coc-rust-analyzer` to install
https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer], https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
this extension implements _most_ of the features supported in the VSCode extension: this extension implements _most_ of the features supported in the VSCode extension:
* automatically install and upgrade stable/nightly releases
* same configurations as VSCode extension, `rust-analyzer.serverPath`, `rust-analyzer.cargo.features` etc. * same configurations as VSCode extension, `rust-analyzer.serverPath`, `rust-analyzer.cargo.features` etc.
* same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc. * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc.
* highlighting and inlay_hints are not implemented yet * inlay hints for method chaining support, _Neovim Only_
* semantic highlighting is not implemented yet
==== LanguageClient-neovim ==== LanguageClient-neovim
@ -195,7 +197,7 @@ let g:LanguageClient_serverCommands = {
==== YouCompleteMe ==== YouCompleteMe
1. Install YouCompleteMe by following the instructions 1. Install YouCompleteMe by following the instructions
https://ycm-core.github.io/YouCompleteMe/#rust-semantic-completion[here] https://github.com/ycm-core/lsp-examples#rust-rust-analyzer[here]
2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists): 2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists):
+ +
@ -212,6 +214,21 @@ let g:ycm_language_server =
\ ] \ ]
---- ----
==== ALE
To add the LSP server to https://github.com/dense-analysis/ale[ale]:
[source,vim]
----
call ale#linter#Define('rust', {
\ 'name': 'rust-analyzer',
\ 'lsp': 'stdio',
\ 'executable': 'rust-analyzer',
\ 'command': '%e',
\ 'project_root': '.',
\})
----
==== nvim-lsp ==== nvim-lsp
NeoVim 0.5 (not yet released) has built-in language server support. NeoVim 0.5 (not yet released) has built-in language server support.
@ -229,9 +246,9 @@ You also need the `LSP` package. To install it:
* Type `Install Package Control`, press enter * Type `Install Package Control`, press enter
2. In the command palette, run `Package control: Install package`, and in the list that pops up, type `LSP` and press enter. 2. In the command palette, run `Package control: Install package`, and in the list that pops up, type `LSP` and press enter.
Finally, with your Rust project open, in the command palette, run `LSP: Enable Language Server In Project` or `LSP: Enable Language Server Globally`, then select `rust-analyzer` in the list that pops up to enable the rust-analyzer LSP. The latter means that rust-analzyer is enabled by default in Rust projects. Finally, with your Rust project open, in the command palette, run `LSP: Enable Language Server In Project` or `LSP: Enable Language Server Globally`, then select `rust-analyzer` in the list that pops up to enable the rust-analyzer LSP. The latter means that rust-analyzer is enabled by default in Rust projects.
If it worked, you should see "rust-analzyer, Line X, Column Y" on the left side of the bottom bar, and after waiting a bit, functionality like tooltips on hovering over variables should become available. If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the bottom bar, and after waiting a bit, functionality like tooltips on hovering over variables should become available.
If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> section on installing the language server binary. If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> section on installing the language server binary.

View file

@ -27,6 +27,7 @@
"scripts": { "scripts": {
"vscode:prepublish": "tsc && rollup -c", "vscode:prepublish": "tsc && rollup -c",
"package": "vsce package -o rust-analyzer.vsix", "package": "vsce package -o rust-analyzer.vsix",
"build": "tsc",
"watch": "tsc --watch", "watch": "tsc --watch",
"lint": "tsfmt --verify && eslint -c .eslintrc.js --ext ts ./src", "lint": "tsfmt --verify && eslint -c .eslintrc.js --ext ts ./src",
"fix": " tsfmt -r && eslint -c .eslintrc.js --ext ts ./src --fix" "fix": " tsfmt -r && eslint -c .eslintrc.js --ext ts ./src --fix"
@ -388,6 +389,28 @@
"description": "Enable Proc macro support, cargo.loadOutDirsFromCheck must be enabled.", "description": "Enable Proc macro support, cargo.loadOutDirsFromCheck must be enabled.",
"type": "boolean", "type": "boolean",
"default": false "default": false
},
"rust-analyzer.debug.engine": {
"type": "string",
"enum": [
"auto",
"vadimcn.vscode-lldb",
"ms-vscode.cpptools"
],
"default": "auto",
"description": "Preffered debug engine.",
"markdownEnumDescriptions": [
"First try to use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb), if it's not installed try to use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools).",
"Use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb)",
"Use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools)"
]
},
"rust-analyzer.debug.sourceFileMap": {
"type": "object",
"description": "Optional source file mappings passed to the debug engine.",
"default": {
"/rustc/<id>": "${env:USERPROFILE}/.rustup/toolchains/<toolchain-id>/lib/rustlib/src/rust"
}
} }
} }
}, },

106
editors/code/src/cargo.ts Normal file
View file

@ -0,0 +1,106 @@
import * as cp from 'child_process';
import * as readline from 'readline';
import { OutputChannel } from 'vscode';
interface CompilationArtifact {
fileName: string;
name: string;
kind: string;
isTest: boolean;
}
export class Cargo {
rootFolder: string;
env?: Record<string, string>;
output: OutputChannel;
public constructor(cargoTomlFolder: string, output: OutputChannel, env: Record<string, string> | undefined = undefined) {
this.rootFolder = cargoTomlFolder;
this.output = output;
this.env = env;
}
public async artifactsFromArgs(cargoArgs: string[]): Promise<CompilationArtifact[]> {
const artifacts: CompilationArtifact[] = [];
try {
await this.runCargo(cargoArgs,
message => {
if (message.reason === 'compiler-artifact' && message.executable) {
const isBinary = message.target.crate_types.includes('bin');
const isBuildScript = message.target.kind.includes('custom-build');
if ((isBinary && !isBuildScript) || message.profile.test) {
artifacts.push({
fileName: message.executable,
name: message.target.name,
kind: message.target.kind[0],
isTest: message.profile.test
});
}
}
else if (message.reason === 'compiler-message') {
this.output.append(message.message.rendered);
}
},
stderr => {
this.output.append(stderr);
}
);
}
catch (err) {
this.output.show(true);
throw new Error(`Cargo invocation has failed: ${err}`);
}
return artifacts;
}
public async executableFromArgs(args: string[]): Promise<string> {
const cargoArgs = [...args]; // to remain args unchanged
cargoArgs.push("--message-format=json");
const artifacts = await this.artifactsFromArgs(cargoArgs);
if (artifacts.length === 0) {
throw new Error('No compilation artifacts');
} else if (artifacts.length > 1) {
throw new Error('Multiple compilation artifacts are not supported.');
}
return artifacts[0].fileName;
}
runCargo(
cargoArgs: string[],
onStdoutJson: (obj: any) => void,
onStderrString: (data: string) => void
): Promise<number> {
return new Promise<number>((resolve, reject) => {
const cargo = cp.spawn('cargo', cargoArgs, {
stdio: ['ignore', 'pipe', 'pipe'],
cwd: this.rootFolder,
env: this.env,
});
cargo.on('error', err => {
reject(new Error(`could not launch cargo: ${err}`));
});
cargo.stderr.on('data', chunk => {
onStderrString(chunk.toString());
});
const rl = readline.createInterface({ input: cargo.stdout });
rl.on('line', line => {
const message = JSON.parse(line);
onStdoutJson(message);
});
cargo.on('exit', (exitCode, _) => {
if (exitCode === 0)
resolve(exitCode);
else
reject(new Error(`exit code: ${exitCode}.`));
});
});
}
}

View file

@ -1,8 +1,10 @@
import * as vscode from 'vscode'; import * as vscode from 'vscode';
import * as lc from 'vscode-languageclient'; import * as lc from 'vscode-languageclient';
import * as ra from '../rust-analyzer-api'; import * as ra from '../rust-analyzer-api';
import * as os from "os";
import { Ctx, Cmd } from '../ctx'; import { Ctx, Cmd } from '../ctx';
import { Cargo } from '../cargo';
export function run(ctx: Ctx): Cmd { export function run(ctx: Ctx): Cmd {
let prevRunnable: RunnableQuickPick | undefined; let prevRunnable: RunnableQuickPick | undefined;
@ -62,25 +64,69 @@ export function runSingle(ctx: Ctx): Cmd {
}; };
} }
function getLldbDebugConfig(config: ra.Runnable, sourceFileMap: Record<string, string>): vscode.DebugConfiguration {
return {
type: "lldb",
request: "launch",
name: config.label,
cargo: {
args: config.args,
},
args: config.extraArgs,
cwd: config.cwd,
sourceMap: sourceFileMap
};
}
const debugOutput = vscode.window.createOutputChannel("Debug");
async function getCppvsDebugConfig(config: ra.Runnable, sourceFileMap: Record<string, string>): Promise<vscode.DebugConfiguration> {
debugOutput.clear();
const cargo = new Cargo(config.cwd || '.', debugOutput);
const executable = await cargo.executableFromArgs(config.args);
// if we are here, there were no compilation errors.
return {
type: (os.platform() === "win32") ? "cppvsdbg" : 'cppdbg',
request: "launch",
name: config.label,
program: executable,
args: config.extraArgs,
cwd: config.cwd,
sourceFileMap: sourceFileMap,
};
}
export function debugSingle(ctx: Ctx): Cmd { export function debugSingle(ctx: Ctx): Cmd {
return async (config: ra.Runnable) => { return async (config: ra.Runnable) => {
const editor = ctx.activeRustEditor; const editor = ctx.activeRustEditor;
if (!editor) return; if (!editor) return;
if (!vscode.extensions.getExtension("vadimcn.vscode-lldb")) {
vscode.window.showErrorMessage("Install `vadimcn.vscode-lldb` extension for debugging"); const lldbId = "vadimcn.vscode-lldb";
const cpptoolsId = "ms-vscode.cpptools";
const debugEngineId = ctx.config.debug.engine;
let debugEngine = null;
if (debugEngineId === "auto") {
debugEngine = vscode.extensions.getExtension(lldbId);
if (!debugEngine) {
debugEngine = vscode.extensions.getExtension(cpptoolsId);
}
}
else {
debugEngine = vscode.extensions.getExtension(debugEngineId);
}
if (!debugEngine) {
vscode.window.showErrorMessage(`Install [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=${lldbId})`
+ ` or [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=${cpptoolsId}) extension for debugging.`);
return; return;
} }
const debugConfig = { const debugConfig = lldbId === debugEngine.id
type: "lldb", ? getLldbDebugConfig(config, ctx.config.debug.sourceFileMap)
request: "launch", : await getCppvsDebugConfig(config, ctx.config.debug.sourceFileMap);
name: config.label,
cargo: {
args: config.args,
},
args: config.extraArgs,
cwd: config.cwd
};
return vscode.debug.startDebugging(undefined, debugConfig); return vscode.debug.startDebugging(undefined, debugConfig);
}; };

View file

@ -92,7 +92,6 @@ export class Config {
get askBeforeDownload() { return this.get<boolean>("updates.askBeforeDownload"); } get askBeforeDownload() { return this.get<boolean>("updates.askBeforeDownload"); }
get traceExtension() { return this.get<boolean>("trace.extension"); } get traceExtension() { return this.get<boolean>("trace.extension"); }
get inlayHints() { get inlayHints() {
return { return {
typeHints: this.get<boolean>("inlayHints.typeHints"), typeHints: this.get<boolean>("inlayHints.typeHints"),
@ -107,4 +106,12 @@ export class Config {
command: this.get<string>("checkOnSave.command"), command: this.get<string>("checkOnSave.command"),
}; };
} }
get debug() {
return {
engine: this.get<string>("debug.engine"),
sourceFileMap: this.get<Record<string, string>>("debug.sourceFileMap"),
};
}
} }

View file

@ -162,7 +162,6 @@ pub(crate) const KINDS_SRC: KindsSrc = KindsSrc {
"RECORD_LIT", "RECORD_LIT",
"RECORD_FIELD_LIST", "RECORD_FIELD_LIST",
"RECORD_FIELD", "RECORD_FIELD",
"TRY_BLOCK_EXPR",
"BOX_EXPR", "BOX_EXPR",
// postfix // postfix
"CALL_EXPR", "CALL_EXPR",
@ -440,7 +439,6 @@ pub(crate) const AST_SRC: AstSrc = AstSrc {
} }
struct IfExpr: AttrsOwner { T![if], Condition } struct IfExpr: AttrsOwner { T![if], Condition }
struct LoopExpr: AttrsOwner, LoopBodyOwner { T![loop] } struct LoopExpr: AttrsOwner, LoopBodyOwner { T![loop] }
struct TryBlockExpr: AttrsOwner { T![try], body: BlockExpr }
struct ForExpr: AttrsOwner, LoopBodyOwner { struct ForExpr: AttrsOwner, LoopBodyOwner {
T![for], T![for],
Pat, Pat,
@ -451,7 +449,7 @@ pub(crate) const AST_SRC: AstSrc = AstSrc {
struct ContinueExpr: AttrsOwner { T![continue], T![lifetime] } struct ContinueExpr: AttrsOwner { T![continue], T![lifetime] }
struct BreakExpr: AttrsOwner { T![break], T![lifetime], Expr } struct BreakExpr: AttrsOwner { T![break], T![lifetime], Expr }
struct Label { T![lifetime] } struct Label { T![lifetime] }
struct BlockExpr: AttrsOwner { Label, T![unsafe], Block } struct BlockExpr: AttrsOwner { Label, T![unsafe], T![async], Block }
struct ReturnExpr: AttrsOwner { Expr } struct ReturnExpr: AttrsOwner { Expr }
struct CallExpr: ArgListOwner { Expr } struct CallExpr: ArgListOwner { Expr }
struct MethodCallExpr: AttrsOwner, ArgListOwner { struct MethodCallExpr: AttrsOwner, ArgListOwner {
@ -595,7 +593,7 @@ pub(crate) const AST_SRC: AstSrc = AstSrc {
qualifier: Path, qualifier: Path,
} }
struct PathSegment { struct PathSegment {
T![::], T![<], NameRef, TypeArgList, ParamList, RetType, PathType, T![>] T![::], T![crate], T![<], NameRef, TypeArgList, ParamList, RetType, PathType, T![>]
} }
struct TypeArgList { struct TypeArgList {
T![::], T![::],
@ -722,7 +720,6 @@ pub(crate) const AST_SRC: AstSrc = AstSrc {
FieldExpr, FieldExpr,
AwaitExpr, AwaitExpr,
TryExpr, TryExpr,
TryBlockExpr,
CastExpr, CastExpr,
RefExpr, RefExpr,
PrefixExpr, PrefixExpr,

View file

@ -50,21 +50,19 @@ fn dist_server(nightly: bool) -> Result<()> {
if cfg!(target_os = "linux") { if cfg!(target_os = "linux") {
std::env::set_var("CC", "clang"); std::env::set_var("CC", "clang");
run!( run!(
"cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release "cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release"
--target x86_64-unknown-linux-musl
"
// We'd want to add, but that requires setting the right linker somehow // We'd want to add, but that requires setting the right linker somehow
// --features=jemalloc // --features=jemalloc
)?; )?;
if !nightly { if !nightly {
run!("strip ./target/x86_64-unknown-linux-musl/release/rust-analyzer")?; run!("strip ./target/release/rust-analyzer")?;
} }
} else { } else {
run!("cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release")?; run!("cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release")?;
} }
let (src, dst) = if cfg!(target_os = "linux") { let (src, dst) = if cfg!(target_os = "linux") {
("./target/x86_64-unknown-linux-musl/release/rust-analyzer", "./dist/rust-analyzer-linux") ("./target/release/rust-analyzer", "./dist/rust-analyzer-linux")
} else if cfg!(target_os = "windows") { } else if cfg!(target_os = "windows") {
("./target/release/rust-analyzer.exe", "./dist/rust-analyzer-windows.exe") ("./target/release/rust-analyzer.exe", "./dist/rust-analyzer-windows.exe")
} else if cfg!(target_os = "macos") { } else if cfg!(target_os = "macos") {

View file

@ -10,23 +10,19 @@ pub mod pre_commit;
pub mod codegen; pub mod codegen;
mod ast_src; mod ast_src;
use anyhow::Context;
use std::{ use std::{
env, env,
io::Write,
path::{Path, PathBuf}, path::{Path, PathBuf},
process::{Command, Stdio},
}; };
use walkdir::{DirEntry, WalkDir}; use walkdir::{DirEntry, WalkDir};
use crate::{ use crate::{
codegen::Mode, codegen::Mode,
not_bash::{date_iso, fs2, pushd, rm_rf, run}, not_bash::{date_iso, fs2, pushd, pushenv, rm_rf, run},
}; };
pub use anyhow::Result; pub use anyhow::{bail, Context as _, Result};
const TOOLCHAIN: &str = "stable";
pub fn project_root() -> PathBuf { pub fn project_root() -> PathBuf {
Path::new( Path::new(
@ -55,54 +51,44 @@ pub fn rust_files(path: &Path) -> impl Iterator<Item = PathBuf> {
pub fn run_rustfmt(mode: Mode) -> Result<()> { pub fn run_rustfmt(mode: Mode) -> Result<()> {
let _dir = pushd(project_root()); let _dir = pushd(project_root());
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
ensure_rustfmt()?; ensure_rustfmt()?;
match mode {
let check = if mode == Mode::Verify { "--check" } else { "" }; Mode::Overwrite => run!("cargo fmt"),
run!("rustup run {} -- cargo fmt -- {}", TOOLCHAIN, check)?; Mode::Verify => run!("cargo fmt -- --check"),
}?;
Ok(()) Ok(())
} }
fn reformat(text: impl std::fmt::Display) -> Result<String> { fn reformat(text: impl std::fmt::Display) -> Result<String> {
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
ensure_rustfmt()?; ensure_rustfmt()?;
let mut rustfmt = Command::new("rustup") let stdout = run!(
.args(&["run", TOOLCHAIN, "--", "rustfmt", "--config-path"]) "rustfmt --config-path {} --config fn_single_line=true", project_root().join("rustfmt.toml").display();
.arg(project_root().join("rustfmt.toml")) <text.to_string().as_bytes()
.args(&["--config", "fn_single_line=true"]) )?;
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()?;
write!(rustfmt.stdin.take().unwrap(), "{}", text)?;
let output = rustfmt.wait_with_output()?;
let stdout = String::from_utf8(output.stdout)?;
let preamble = "Generated file, do not edit by hand, see `xtask/src/codegen`"; let preamble = "Generated file, do not edit by hand, see `xtask/src/codegen`";
Ok(format!("//! {}\n\n{}", preamble, stdout)) Ok(format!("//! {}\n\n{}\n", preamble, stdout))
} }
fn ensure_rustfmt() -> Result<()> { fn ensure_rustfmt() -> Result<()> {
match Command::new("rustup") let out = run!("rustfmt --version")?;
.args(&["run", TOOLCHAIN, "--", "cargo", "fmt", "--version"]) if !out.contains("stable") {
.stderr(Stdio::null()) bail!(
.stdout(Stdio::null()) "Failed to run rustfmt from toolchain 'stable'. \
.status() Please run `rustup component add rustfmt --toolchain stable` to install it.",
{ )
Ok(status) if status.success() => return Ok(()), }
_ => (),
};
run!("rustup toolchain install {}", TOOLCHAIN)?;
run!("rustup component add rustfmt --toolchain {}", TOOLCHAIN)?;
Ok(()) Ok(())
} }
pub fn run_clippy() -> Result<()> { pub fn run_clippy() -> Result<()> {
match Command::new("rustup") if run!("cargo clippy --version").is_err() {
.args(&["run", TOOLCHAIN, "--", "cargo", "clippy", "--version"]) bail!(
.stderr(Stdio::null()) "Failed run cargo clippy. \
.stdout(Stdio::null()) Please run `rustup component add clippy` to install it.",
.status() )
{ }
Ok(status) if status.success() => (),
_ => install_clippy().context("install clippy")?,
};
let allowed_lints = [ let allowed_lints = [
"clippy::collapsible_if", "clippy::collapsible_if",
@ -110,27 +96,24 @@ pub fn run_clippy() -> Result<()> {
"clippy::nonminimal_bool", "clippy::nonminimal_bool",
"clippy::redundant_pattern_matching", "clippy::redundant_pattern_matching",
]; ];
run!( run!("cargo clippy --all-features --all-targets -- -A {}", allowed_lints.join(" -A "))?;
"rustup run {} -- cargo clippy --all-features --all-targets -- -A {}",
TOOLCHAIN,
allowed_lints.join(" -A ")
)?;
Ok(())
}
fn install_clippy() -> Result<()> {
run!("rustup toolchain install {}", TOOLCHAIN)?;
run!("rustup component add clippy --toolchain {}", TOOLCHAIN)?;
Ok(()) Ok(())
} }
pub fn run_fuzzer() -> Result<()> { pub fn run_fuzzer() -> Result<()> {
let _d = pushd("./crates/ra_syntax"); let _d = pushd("./crates/ra_syntax");
let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly");
if run!("cargo fuzz --help").is_err() { if run!("cargo fuzz --help").is_err() {
run!("cargo install cargo-fuzz")?; run!("cargo install cargo-fuzz")?;
}; };
run!("rustup run nightly -- cargo fuzz run parser")?; // Expecting nightly rustc
let out = run!("rustc --version")?;
if !out.contains("nightly") {
bail!("fuzz tests require nightly rustc")
}
run!("cargo fuzz run parser")?;
Ok(()) Ok(())
} }

View file

@ -3,6 +3,8 @@
use std::{ use std::{
cell::RefCell, cell::RefCell,
env, env,
ffi::OsString,
io::Write,
path::{Path, PathBuf}, path::{Path, PathBuf},
process::{Command, Stdio}, process::{Command, Stdio},
}; };
@ -57,7 +59,10 @@ macro_rules! _run {
run!($($expr),*; echo = true) run!($($expr),*; echo = true)
}; };
($($expr:expr),* ; echo = $echo:expr) => { ($($expr:expr),* ; echo = $echo:expr) => {
$crate::not_bash::run_process(format!($($expr),*), $echo) $crate::not_bash::run_process(format!($($expr),*), $echo, None)
};
($($expr:expr),* ; <$stdin:expr) => {
$crate::not_bash::run_process(format!($($expr),*), false, Some($stdin))
}; };
} }
pub(crate) use _run as run; pub(crate) use _run as run;
@ -77,6 +82,21 @@ impl Drop for Pushd {
} }
} }
pub struct Pushenv {
_p: (),
}
pub fn pushenv(var: &str, value: &str) -> Pushenv {
Env::with(|env| env.pushenv(var.into(), value.into()));
Pushenv { _p: () }
}
impl Drop for Pushenv {
fn drop(&mut self) {
Env::with(|env| env.popenv())
}
}
pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> { pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> {
let path = path.as_ref(); let path = path.as_ref();
if !path.exists() { if !path.exists() {
@ -90,15 +110,15 @@ pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> {
} }
#[doc(hidden)] #[doc(hidden)]
pub fn run_process(cmd: String, echo: bool) -> Result<String> { pub fn run_process(cmd: String, echo: bool, stdin: Option<&[u8]>) -> Result<String> {
run_process_inner(&cmd, echo).with_context(|| format!("process `{}` failed", cmd)) run_process_inner(&cmd, echo, stdin).with_context(|| format!("process `{}` failed", cmd))
} }
pub fn date_iso() -> Result<String> { pub fn date_iso() -> Result<String> {
run!("date --iso --utc") run!("date --iso --utc")
} }
fn run_process_inner(cmd: &str, echo: bool) -> Result<String> { fn run_process_inner(cmd: &str, echo: bool, stdin: Option<&[u8]>) -> Result<String> {
let mut args = shelx(cmd); let mut args = shelx(cmd);
let binary = args.remove(0); let binary = args.remove(0);
let current_dir = Env::with(|it| it.cwd().to_path_buf()); let current_dir = Env::with(|it| it.cwd().to_path_buf());
@ -107,12 +127,17 @@ fn run_process_inner(cmd: &str, echo: bool) -> Result<String> {
println!("> {}", cmd) println!("> {}", cmd)
} }
let output = Command::new(binary) let mut command = Command::new(binary);
.args(args) command.args(args).current_dir(current_dir).stderr(Stdio::inherit());
.current_dir(current_dir) let output = match stdin {
.stdin(Stdio::null()) None => command.stdin(Stdio::null()).output(),
.stderr(Stdio::inherit()) Some(stdin) => {
.output()?; command.stdin(Stdio::piped()).stdout(Stdio::piped());
let mut process = command.spawn()?;
process.stdin.take().unwrap().write_all(stdin)?;
process.wait_with_output()
}
}?;
let stdout = String::from_utf8(output.stdout)?; let stdout = String::from_utf8(output.stdout)?;
if echo { if echo {
@ -133,13 +158,15 @@ fn shelx(cmd: &str) -> Vec<String> {
struct Env { struct Env {
pushd_stack: Vec<PathBuf>, pushd_stack: Vec<PathBuf>,
pushenv_stack: Vec<(OsString, Option<OsString>)>,
} }
impl Env { impl Env {
fn with<F: FnOnce(&mut Env) -> T, T>(f: F) -> T { fn with<F: FnOnce(&mut Env) -> T, T>(f: F) -> T {
thread_local! { thread_local! {
static ENV: RefCell<Env> = RefCell::new(Env { static ENV: RefCell<Env> = RefCell::new(Env {
pushd_stack: vec![env::current_dir().unwrap()] pushd_stack: vec![env::current_dir().unwrap()],
pushenv_stack: vec![],
}); });
} }
ENV.with(|it| f(&mut *it.borrow_mut())) ENV.with(|it| f(&mut *it.borrow_mut()))
@ -154,6 +181,17 @@ impl Env {
self.pushd_stack.pop().unwrap(); self.pushd_stack.pop().unwrap();
env::set_current_dir(self.cwd()).unwrap(); env::set_current_dir(self.cwd()).unwrap();
} }
fn pushenv(&mut self, var: OsString, value: OsString) {
self.pushenv_stack.push((var.clone(), env::var_os(&var)));
env::set_var(var, value)
}
fn popenv(&mut self) {
let (var, value) = self.pushenv_stack.pop().unwrap();
match value {
None => env::remove_var(var),
Some(value) => env::set_var(var, value),
}
}
fn cwd(&self) -> &Path { fn cwd(&self) -> &Path {
self.pushd_stack.last().unwrap() self.pushd_stack.last().unwrap()
} }