⬆️ rust-analyzer

This commit is contained in:
Laurențiu Nicola 2022-08-02 09:05:16 +03:00
parent a1f1b95d00
commit 9d2cb42a41
45 changed files with 766 additions and 242 deletions

View file

@ -34,8 +34,21 @@ jobs:
git config --global user.email "runner@gha.local" git config --global user.email "runner@gha.local"
git config --global user.name "Github Action" git config --global user.name "Github Action"
rm Cargo.lock rm Cargo.lock
# Fix names for crates that were published before switch to kebab-case.
cargo workspaces rename --from base-db base_db
cargo workspaces rename --from hir-def hir_def
cargo workspaces rename --from hir-expand hir_expand
cargo workspaces rename --from hir-ty hir_ty
cargo workspaces rename --from ide-assists ide_assists
cargo workspaces rename --from ide-completion ide_completion
cargo workspaces rename --from ide-db ide_db
cargo workspaces rename --from ide-diagnostics ide_diagnostics
cargo workspaces rename --from ide-ssr ide_ssr
cargo workspaces rename --from proc-macro-api proc_macro_api
cargo workspaces rename --from proc-macro-srv proc_macro_srv
cargo workspaces rename --from project-model project_model
cargo workspaces rename --from test-utils test_utils
cargo workspaces rename --from text-edit text_edit
cargo workspaces rename ra_ap_%n cargo workspaces rename ra_ap_%n
find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} + find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} +
# Fix names for crates that were published before switch to kebab-case.
find crates -name 'Cargo.toml' -exec sed -i "s/ra_ap_base-db/ra_ap_base_db/g; s/ra_ap_hir-def/ra_ap_hir_def/g; s/ra_ap_hir-expand/ra_ap_hir_expand/g; s/ra_ap_hir-ty/ra_ap_hir_ty/g; s/ra_ap_ide-assists/ra_ap_ide_assists/g; s/ra_ap_ide-completion/ra_ap_ide_completion/g; s/ra_ap_ide-db/ra_ap_ide_db/g; s/ra_ap_ide-diagnostics/ra_ap_ide_diagnostics/g; s/ra_ap_ide-ssr/ra_ap_ide_ssr/g; s/ra_ap_proc-macro-api/ra_ap_proc_macro_api/g; s/ra_ap_proc-macro-srv/ra_ap_proc_macro_srv/g; s/ra_ap_project-model/ra_ap_project_model/g; s/ra_ap_test-utils/ra_ap_test_utils/g; s/ra_ap_text-edit/ra_ap_text_edit/g" {} +
cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$PATCH cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$PATCH

View file

@ -124,13 +124,24 @@ impl RawAttrs {
pub(crate) fn merge(&self, other: Self) -> Self { pub(crate) fn merge(&self, other: Self) -> Self {
// FIXME: This needs to fixup `AttrId`s // FIXME: This needs to fixup `AttrId`s
match (&self.entries, &other.entries) { match (&self.entries, other.entries) {
(None, None) => Self::EMPTY, (None, None) => Self::EMPTY,
(Some(entries), None) | (None, Some(entries)) => { (None, entries @ Some(_)) => Self { entries },
Self { entries: Some(entries.clone()) } (Some(entries), None) => Self { entries: Some(entries.clone()) },
}
(Some(a), Some(b)) => { (Some(a), Some(b)) => {
Self { entries: Some(a.iter().chain(b.iter()).cloned().collect()) } let last_ast_index = a.last().map_or(0, |it| it.id.ast_index + 1);
Self {
entries: Some(
a.iter()
.cloned()
.chain(b.iter().map(|it| {
let mut it = it.clone();
it.id.ast_index += last_ast_index;
it
}))
.collect(),
),
}
} }
} }
} }

View file

@ -5,6 +5,7 @@ use std::collections::hash_map::Entry;
use base_db::CrateId; use base_db::CrateId;
use hir_expand::{name::Name, AstId, MacroCallId}; use hir_expand::{name::Name, AstId, MacroCallId};
use itertools::Itertools;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use profile::Count; use profile::Count;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -97,15 +98,14 @@ pub(crate) enum BuiltinShadowMode {
impl ItemScope { impl ItemScope {
pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a { pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a {
// FIXME: shadowing // FIXME: shadowing
let keys: FxHashSet<_> = self self.types
.types
.keys() .keys()
.chain(self.values.keys()) .chain(self.values.keys())
.chain(self.macros.keys()) .chain(self.macros.keys())
.chain(self.unresolved.iter()) .chain(self.unresolved.iter())
.collect(); .sorted()
.unique()
keys.into_iter().map(move |name| (name, self.get(name))) .map(move |name| (name, self.get(name)))
} }
pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ { pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {

View file

@ -1055,7 +1055,7 @@ impl DefCollector<'_> {
}; };
let mut res = ReachedFixedPoint::Yes; let mut res = ReachedFixedPoint::Yes;
macros.retain(|directive| { macros.retain(|directive| {
let resolver2 = |path| { let resolver = |path| {
let resolved_res = self.def_map.resolve_path_fp_with_macro( let resolved_res = self.def_map.resolve_path_fp_with_macro(
self.db, self.db,
ResolveMode::Other, ResolveMode::Other,
@ -1068,7 +1068,7 @@ impl DefCollector<'_> {
.take_macros() .take_macros()
.map(|it| (it, macro_id_to_def_id(self.db, it))) .map(|it| (it, macro_id_to_def_id(self.db, it)))
}; };
let resolver = |path| resolver2(path).map(|(_, it)| it); let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
match &directive.kind { match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to } => { MacroDirectiveKind::FnLike { ast_id, expand_to } => {
@ -1077,7 +1077,7 @@ impl DefCollector<'_> {
ast_id, ast_id,
*expand_to, *expand_to,
self.def_map.krate, self.def_map.krate,
&resolver, &resolver_def_id,
&mut |_err| (), &mut |_err| (),
); );
if let Ok(Ok(call_id)) = call_id { if let Ok(Ok(call_id)) = call_id {
@ -1093,7 +1093,7 @@ impl DefCollector<'_> {
*derive_attr, *derive_attr,
*derive_pos as u32, *derive_pos as u32,
self.def_map.krate, self.def_map.krate,
&resolver2, &resolver,
); );
if let Ok((macro_id, def_id, call_id)) = id { if let Ok((macro_id, def_id, call_id)) = id {
@ -1158,7 +1158,7 @@ impl DefCollector<'_> {
} }
} }
let def = match resolver(path.clone()) { let def = match resolver_def_id(path.clone()) {
Some(def) if def.is_attribute() => def, Some(def) if def.is_attribute() => def,
_ => return true, _ => return true,
}; };
@ -1292,7 +1292,8 @@ impl DefCollector<'_> {
true true
}); });
// Attribute resolution can add unresolved macro invocations, so concatenate the lists. // Attribute resolution can add unresolved macro invocations, so concatenate the lists.
self.unresolved_macros.extend(macros); macros.extend(mem::take(&mut self.unresolved_macros));
self.unresolved_macros = macros;
for (module_id, depth, container, macro_call_id) in resolved { for (module_id, depth, container, macro_call_id) in resolved {
self.collect_macro_expansion(module_id, macro_call_id, depth, container); self.collect_macro_expansion(module_id, macro_call_id, depth, container);

View file

@ -34,6 +34,7 @@ pub trait TyExt {
fn callable_sig(&self, db: &dyn HirDatabase) -> Option<CallableSig>; fn callable_sig(&self, db: &dyn HirDatabase) -> Option<CallableSig>;
fn strip_references(&self) -> &Ty; fn strip_references(&self) -> &Ty;
fn strip_reference(&self) -> &Ty;
/// If this is a `dyn Trait`, returns that trait. /// If this is a `dyn Trait`, returns that trait.
fn dyn_trait(&self) -> Option<TraitId>; fn dyn_trait(&self) -> Option<TraitId>;
@ -182,6 +183,10 @@ impl TyExt for Ty {
t t
} }
fn strip_reference(&self) -> &Ty {
self.as_reference().map_or(self, |(ty, _, _)| ty)
}
fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>> { fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>> {
match self.kind(Interner) { match self.kind(Interner) {
TyKind::OpaqueType(opaque_ty_id, subst) => { TyKind::OpaqueType(opaque_ty_id, subst) => {

View file

@ -2769,6 +2769,10 @@ impl Type {
self.derived(self.ty.strip_references().clone()) self.derived(self.ty.strip_references().clone())
} }
pub fn strip_reference(&self) -> Type {
self.derived(self.ty.strip_reference().clone())
}
pub fn is_unknown(&self) -> bool { pub fn is_unknown(&self) -> bool {
self.ty.is_unknown() self.ty.is_unknown()
} }

View file

@ -324,6 +324,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_type(ty) self.imp.resolve_type(ty)
} }
pub fn resolve_trait(&self, trait_: &ast::Path) -> Option<Trait> {
self.imp.resolve_trait(trait_)
}
// FIXME: Figure out a nice interface to inspect adjustments // FIXME: Figure out a nice interface to inspect adjustments
pub fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> { pub fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
self.imp.is_implicit_reborrow(expr) self.imp.is_implicit_reborrow(expr)
@ -924,7 +928,12 @@ impl<'db> SemanticsImpl<'db> {
} }
fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> { fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(|it| it.value) self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
|InFile { file_id, value }| {
self.cache(find_root(value.syntax()), file_id);
value
},
)
} }
fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange { fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
@ -1009,6 +1018,20 @@ impl<'db> SemanticsImpl<'db> {
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty)) Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
} }
fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
let analyze = self.analyze(path.syntax())?;
let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
let ctx = body::LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
let hir_path = Path::from_src(path.clone(), &ctx)?;
match analyze
.resolver
.resolve_path_in_type_ns_fully(self.db.upcast(), hir_path.mod_path())?
{
TypeNs::TraitId(id) => Some(Trait { id }),
_ => None,
}
}
fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> { fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
self.analyze(expr.syntax())?.is_implicit_reborrow(self.db, expr) self.analyze(expr.syntax())?.is_implicit_reborrow(self.db, expr)
} }

View file

@ -36,7 +36,7 @@ use crate::{
// pub struct Baz; // pub struct Baz;
// } // }
// //
// use foo::{Baz, Bar}; // use foo::{Bar, Baz};
// //
// fn qux(bar: Bar, baz: Baz) {} // fn qux(bar: Bar, baz: Baz) {}
// ``` // ```
@ -281,7 +281,7 @@ mod foo {
pub fn f() {} pub fn f() {}
} }
use foo::{Baz, Bar, f}; use foo::{Bar, Baz, f};
fn qux(bar: Bar, baz: Baz) { fn qux(bar: Bar, baz: Baz) {
f(); f();
@ -351,7 +351,7 @@ mod foo {
pub fn f() {} pub fn f() {}
} }
use foo::{Baz, Bar, f}; use foo::{Bar, Baz, f};
fn qux(bar: Bar, baz: Baz) { fn qux(bar: Bar, baz: Baz) {
f(); f();
@ -440,7 +440,7 @@ mod foo {
} }
} }
use foo::{bar::{Baz, Bar, f}, baz::*}; use foo::{bar::{Bar, Baz, f}, baz::*};
fn qux(bar: Bar, baz: Baz) { fn qux(bar: Bar, baz: Baz) {
f(); f();
@ -561,7 +561,7 @@ mod foo {
use foo::{ use foo::{
bar::{*, f}, bar::{*, f},
baz::{g, qux::{q, h}} baz::{g, qux::{h, q}}
}; };
fn qux(bar: Bar, baz: Baz) { fn qux(bar: Bar, baz: Baz) {

View file

@ -7,7 +7,7 @@ use ide_db::{
imports::insert_use::remove_path_if_in_use_stmt, imports::insert_use::remove_path_if_in_use_stmt,
path_transform::PathTransform, path_transform::PathTransform,
search::{FileReference, SearchScope}, search::{FileReference, SearchScope},
syntax_helpers::node_ext::expr_as_name_ref, syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
RootDatabase, RootDatabase,
}; };
use itertools::{izip, Itertools}; use itertools::{izip, Itertools};
@ -301,7 +301,16 @@ fn inline(
params: &[(ast::Pat, Option<ast::Type>, hir::Param)], params: &[(ast::Pat, Option<ast::Type>, hir::Param)],
CallInfo { node, arguments, generic_arg_list }: &CallInfo, CallInfo { node, arguments, generic_arg_list }: &CallInfo,
) -> ast::Expr { ) -> ast::Expr {
let body = fn_body.clone_for_update(); let body = if sema.hir_file_for(fn_body.syntax()).is_macro() {
cov_mark::hit!(inline_call_defined_in_macro);
if let Some(body) = ast::BlockExpr::cast(insert_ws_into(fn_body.syntax().clone())) {
body
} else {
fn_body.clone_for_update()
}
} else {
fn_body.clone_for_update()
};
let usages_for_locals = |local| { let usages_for_locals = |local| {
Definition::Local(local) Definition::Local(local)
.usages(sema) .usages(sema)
@ -1144,6 +1153,41 @@ fn bar() -> u32 {
x x
}) + foo() }) + foo()
} }
"#,
)
}
#[test]
fn inline_call_defined_in_macro() {
cov_mark::check!(inline_call_defined_in_macro);
check_assist(
inline_call,
r#"
macro_rules! define_foo {
() => { fn foo() -> u32 {
let x = 0;
x
} };
}
define_foo!();
fn bar() -> u32 {
foo$0()
}
"#,
r#"
macro_rules! define_foo {
() => { fn foo() -> u32 {
let x = 0;
x
} };
}
define_foo!();
fn bar() -> u32 {
{
let x = 0;
x
}
}
"#, "#,
) )
} }

View file

@ -535,7 +535,7 @@ mod foo {
pub struct Baz; pub struct Baz;
} }
use foo::{Baz, Bar}; use foo::{Bar, Baz};
fn qux(bar: Bar, baz: Baz) {} fn qux(bar: Bar, baz: Baz) {}
"#####, "#####,

View file

@ -400,7 +400,7 @@ impl Completions {
) { ) {
if let PathCompletionCtx { kind: PathKind::Pat { pat_ctx }, .. } = path_ctx { if let PathCompletionCtx { kind: PathKind::Pat { pat_ctx }, .. } = path_ctx {
cov_mark::hit!(enum_variant_pattern_path); cov_mark::hit!(enum_variant_pattern_path);
self.add_variant_pat(ctx, pat_ctx, variant, local_name); self.add_variant_pat(ctx, pat_ctx, Some(path_ctx), variant, local_name);
return; return;
} }
@ -484,12 +484,14 @@ impl Completions {
&mut self, &mut self,
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
pattern_ctx: &PatternContext, pattern_ctx: &PatternContext,
path_ctx: Option<&PathCompletionCtx>,
variant: hir::Variant, variant: hir::Variant,
local_name: Option<hir::Name>, local_name: Option<hir::Name>,
) { ) {
self.add_opt(render_variant_pat( self.add_opt(render_variant_pat(
RenderContext::new(ctx), RenderContext::new(ctx),
pattern_ctx, pattern_ctx,
path_ctx,
variant, variant,
local_name.clone(), local_name.clone(),
None, None,
@ -504,7 +506,14 @@ impl Completions {
path: hir::ModPath, path: hir::ModPath,
) { ) {
let path = Some(&path); let path = Some(&path);
self.add_opt(render_variant_pat(RenderContext::new(ctx), pattern_ctx, variant, None, path)); self.add_opt(render_variant_pat(
RenderContext::new(ctx),
pattern_ctx,
None,
variant,
None,
path,
));
} }
pub(crate) fn add_struct_pat( pub(crate) fn add_struct_pat(

View file

@ -115,7 +115,7 @@ pub(crate) fn complete_attribute_path(
}); });
acc.add_nameref_keywords_with_colon(ctx); acc.add_nameref_keywords_with_colon(ctx);
} }
Qualified::Infer | Qualified::With { .. } => {} Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
} }
let attributes = annotated_item_kind.and_then(|kind| { let attributes = annotated_item_kind.and_then(|kind| {

View file

@ -97,7 +97,7 @@ pub(crate) fn complete_derive_path(
}); });
acc.add_nameref_keywords_with_colon(ctx); acc.add_nameref_keywords_with_colon(ctx);
} }
Qualified::Infer | Qualified::With { .. } => {} Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
} }
} }

View file

@ -11,7 +11,14 @@ pub(crate) fn complete_expr_path(
acc: &mut Completions, acc: &mut Completions,
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx, path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
&ExprCtx { expr_ctx: &ExprCtx,
) {
let _p = profile::span("complete_expr_path");
if !ctx.qualifier_ctx.none() {
return;
}
let &ExprCtx {
in_block_expr, in_block_expr,
in_loop_body, in_loop_body,
after_if_expr, after_if_expr,
@ -23,12 +30,7 @@ pub(crate) fn complete_expr_path(
ref impl_, ref impl_,
in_match_guard, in_match_guard,
.. ..
}: &ExprCtx, } = expr_ctx;
) {
let _p = profile::span("complete_expr_path");
if !ctx.qualifier_ctx.none() {
return;
}
let wants_mut_token = let wants_mut_token =
ref_expr_parent.as_ref().map(|it| it.mut_token().is_none()).unwrap_or(false); ref_expr_parent.as_ref().map(|it| it.mut_token().is_none()).unwrap_or(false);
@ -46,11 +48,32 @@ pub(crate) fn complete_expr_path(
}; };
match qualified { match qualified {
Qualified::Infer => ctx Qualified::TypeAnchor { ty: None, trait_: None } => ctx
.traits_in_scope() .traits_in_scope()
.iter() .iter()
.flat_map(|&it| hir::Trait::from(it).items(ctx.sema.db)) .flat_map(|&it| hir::Trait::from(it).items(ctx.sema.db))
.for_each(|item| add_assoc_item(acc, item)), .for_each(|item| add_assoc_item(acc, item)),
Qualified::TypeAnchor { trait_: Some(trait_), .. } => {
trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item))
}
Qualified::TypeAnchor { ty: Some(ty), trait_: None } => {
if let Some(hir::Adt::Enum(e)) = ty.as_adt() {
cov_mark::hit!(completes_variant_through_alias);
acc.add_enum_variants(ctx, path_ctx, e);
}
ctx.iterate_path_candidates(&ty, |item| {
add_assoc_item(acc, item);
});
// Iterate assoc types separately
ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
if let hir::AssocItem::TypeAlias(ty) = item {
acc.add_type_alias(ctx, ty)
}
None::<()>
});
}
Qualified::With { resolution: None, .. } => {} Qualified::With { resolution: None, .. } => {}
Qualified::With { resolution: Some(resolution), .. } => { Qualified::With { resolution: Some(resolution), .. } => {
// Add associated types on type parameters and `Self`. // Add associated types on type parameters and `Self`.
@ -179,10 +202,21 @@ pub(crate) fn complete_expr_path(
} }
} }
} }
ctx.process_all_names(&mut |name, def| { ctx.process_all_names(&mut |name, def| match def {
if scope_def_applicable(def) { ScopeDef::ModuleDef(hir::ModuleDef::Trait(t)) => {
acc.add_path_resolution(ctx, path_ctx, name, def); let assocs = t.items_with_supertraits(ctx.db);
match &*assocs {
// traits with no assoc items are unusable as expressions since
// there is no associated item path that can be constructed with them
[] => (),
// FIXME: Render the assoc item with the trait qualified
&[_item] => acc.add_path_resolution(ctx, path_ctx, name, def),
// FIXME: Append `::` to the thing here, since a trait on its own won't work
[..] => acc.add_path_resolution(ctx, path_ctx, name, def),
}
} }
_ if scope_def_applicable(def) => acc.add_path_resolution(ctx, path_ctx, name, def),
_ => (),
}); });
if is_func_update.is_none() { if is_func_update.is_none() {

View file

@ -66,7 +66,7 @@ pub(crate) fn complete_item_list(
}); });
acc.add_nameref_keywords_with_colon(ctx); acc.add_nameref_keywords_with_colon(ctx);
} }
Qualified::Infer | Qualified::No | Qualified::With { .. } => {} Qualified::TypeAnchor { .. } | Qualified::No | Qualified::With { .. } => {}
} }
} }

View file

@ -74,7 +74,7 @@ pub(crate) fn complete_pattern(
hir::ModuleDef::Variant(variant) hir::ModuleDef::Variant(variant)
if refutable || single_variant_enum(variant.parent_enum(ctx.db)) => if refutable || single_variant_enum(variant.parent_enum(ctx.db)) =>
{ {
acc.add_variant_pat(ctx, pattern_ctx, variant, Some(name.clone())); acc.add_variant_pat(ctx, pattern_ctx, None, variant, Some(name.clone()));
true true
} }
hir::ModuleDef::Adt(hir::Adt::Enum(e)) => refutable || single_variant_enum(e), hir::ModuleDef::Adt(hir::Adt::Enum(e)) => refutable || single_variant_enum(e),
@ -180,6 +180,6 @@ pub(crate) fn complete_pattern_path(
acc.add_nameref_keywords_with_colon(ctx); acc.add_nameref_keywords_with_colon(ctx);
} }
Qualified::Infer | Qualified::With { .. } => {} Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
} }
} }

View file

@ -49,11 +49,27 @@ pub(crate) fn complete_type_path(
}; };
match qualified { match qualified {
Qualified::Infer => ctx Qualified::TypeAnchor { ty: None, trait_: None } => ctx
.traits_in_scope() .traits_in_scope()
.iter() .iter()
.flat_map(|&it| hir::Trait::from(it).items(ctx.sema.db)) .flat_map(|&it| hir::Trait::from(it).items(ctx.sema.db))
.for_each(|item| add_assoc_item(acc, item)), .for_each(|item| add_assoc_item(acc, item)),
Qualified::TypeAnchor { trait_: Some(trait_), .. } => {
trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item))
}
Qualified::TypeAnchor { ty: Some(ty), trait_: None } => {
ctx.iterate_path_candidates(&ty, |item| {
add_assoc_item(acc, item);
});
// Iterate assoc types separately
ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
if let hir::AssocItem::TypeAlias(ty) = item {
acc.add_type_alias(ctx, ty)
}
None::<()>
});
}
Qualified::With { resolution: None, .. } => {} Qualified::With { resolution: None, .. } => {}
Qualified::With { resolution: Some(resolution), .. } => { Qualified::With { resolution: Some(resolution), .. } => {
// Add associated types on type parameters and `Self`. // Add associated types on type parameters and `Self`.

View file

@ -115,6 +115,6 @@ pub(crate) fn complete_use_path(
}); });
acc.add_nameref_keywords_with_colon(ctx); acc.add_nameref_keywords_with_colon(ctx);
} }
Qualified::Infer | Qualified::With { resolution: None, .. } => {} Qualified::TypeAnchor { .. } | Qualified::With { resolution: None, .. } => {}
} }
} }

View file

@ -29,7 +29,7 @@ pub(crate) fn complete_vis_path(
acc.add_super_keyword(ctx, *super_chain_len); acc.add_super_keyword(ctx, *super_chain_len);
} }
Qualified::Absolute | Qualified::Infer | Qualified::With { .. } => {} Qualified::Absolute | Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
Qualified::No => { Qualified::No => {
if !has_in_token { if !has_in_token {
cov_mark::hit!(kw_completion_in); cov_mark::hit!(kw_completion_in);

View file

@ -193,7 +193,10 @@ pub(super) enum Qualified {
super_chain_len: Option<usize>, super_chain_len: Option<usize>,
}, },
/// <_>:: /// <_>::
Infer, TypeAnchor {
ty: Option<hir::Type>,
trait_: Option<hir::Trait>,
},
/// Whether the path is an absolute path /// Whether the path is an absolute path
Absolute, Absolute,
} }

View file

@ -162,11 +162,52 @@ impl<'a> CompletionContext<'a> {
} }
/// Calculate the expected type and name of the cursor position. /// Calculate the expected type and name of the cursor position.
fn expected_type_and_name(&self) -> (Option<Type>, Option<NameOrNameRef>) { fn expected_type_and_name(
&self,
name_like: &ast::NameLike,
) -> (Option<Type>, Option<NameOrNameRef>) {
let mut node = match self.token.parent() { let mut node = match self.token.parent() {
Some(it) => it, Some(it) => it,
None => return (None, None), None => return (None, None),
}; };
let strip_refs = |mut ty: Type| match name_like {
ast::NameLike::NameRef(n) => {
let p = match n.syntax().parent() {
Some(it) => it,
None => return ty,
};
let top_syn = match_ast! {
match p {
ast::FieldExpr(e) => e
.syntax()
.ancestors()
.map_while(ast::FieldExpr::cast)
.last()
.map(|it| it.syntax().clone()),
ast::PathSegment(e) => e
.syntax()
.ancestors()
.skip(1)
.take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
.find_map(ast::PathExpr::cast)
.map(|it| it.syntax().clone()),
_ => None
}
};
let top_syn = match top_syn {
Some(it) => it,
None => return ty,
};
for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
cov_mark::hit!(expected_type_fn_param_ref);
ty = ty.strip_reference();
}
ty
}
_ => ty,
};
loop { loop {
break match_ast! { break match_ast! {
match node { match node {
@ -199,13 +240,9 @@ impl<'a> CompletionContext<'a> {
self.token.clone(), self.token.clone(),
).map(|ap| { ).map(|ap| {
let name = ap.ident().map(NameOrNameRef::Name); let name = ap.ident().map(NameOrNameRef::Name);
let ty = if has_ref(&self.token) {
cov_mark::hit!(expected_type_fn_param_ref); let ty = strip_refs(ap.ty);
ap.ty.remove_ref() (Some(ty), name)
} else {
Some(ap.ty)
};
(ty, name)
}) })
.unwrap_or((None, None)) .unwrap_or((None, None))
}, },
@ -330,8 +367,6 @@ impl<'a> CompletionContext<'a> {
return None; return None;
} }
(self.expected_type, self.expected_name) = self.expected_type_and_name();
// Overwrite the path kind for derives // Overwrite the path kind for derives
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx { if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
if let Some(ast::NameLike::NameRef(name_ref)) = if let Some(ast::NameLike::NameRef(name_ref)) =
@ -389,6 +424,7 @@ impl<'a> CompletionContext<'a> {
return Some(analysis); return Some(analysis);
} }
}; };
(self.expected_type, self.expected_name) = self.expected_type_and_name(&name_like);
let analysis = match name_like { let analysis = match name_like {
ast::NameLike::Lifetime(lifetime) => CompletionAnalysis::Lifetime( ast::NameLike::Lifetime(lifetime) => CompletionAnalysis::Lifetime(
Self::classify_lifetime(&self.sema, original_file, lifetime)?, Self::classify_lifetime(&self.sema, original_file, lifetime)?,
@ -556,7 +592,7 @@ impl<'a> CompletionContext<'a> {
has_call_parens: false, has_call_parens: false,
has_macro_bang: false, has_macro_bang: false,
qualified: Qualified::No, qualified: Qualified::No,
parent: path.parent_path(), parent: None,
path: path.clone(), path: path.clone(),
kind: PathKind::Item { kind: ItemListKind::SourceFile }, kind: PathKind::Item { kind: ItemListKind::SourceFile },
has_type_args: false, has_type_args: false,
@ -791,92 +827,125 @@ impl<'a> CompletionContext<'a> {
PathKind::Type { location: location.unwrap_or(TypeLocation::Other) } PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
}; };
let mut kind_macro_call = |it: ast::MacroCall| {
path_ctx.has_macro_bang = it.excl_token().is_some();
let parent = it.syntax().parent()?;
// Any path in an item list will be treated as a macro call by the parser
let kind = match_ast! {
match parent {
ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
ast::MacroType(ty) => make_path_kind_type(ty.into()),
ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
Some(it) => match_ast! {
match it {
ast::Trait(_) => ItemListKind::Trait,
ast::Impl(it) => if it.trait_().is_some() {
ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
} else {
ItemListKind::Impl
},
_ => return None
}
},
None => return None,
} },
ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
_ => return None,
}
};
Some(kind)
};
let make_path_kind_attr = |meta: ast::Meta| {
let attr = meta.parent_attr()?;
let kind = attr.kind();
let attached = attr.syntax().parent()?;
let is_trailing_outer_attr = kind != AttrKind::Inner
&& non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next)
.is_none();
let annotated_item_kind =
if is_trailing_outer_attr { None } else { Some(attached.kind()) };
Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
};
// Infer the path kind // Infer the path kind
let parent = path.syntax().parent()?; let parent = path.syntax().parent()?;
let kind = match_ast! { let kind = match_ast! {
match parent { match parent {
ast::PathType(it) => make_path_kind_type(it.into()), ast::PathType(it) => make_path_kind_type(it.into()),
ast::PathExpr(it) => { ast::PathExpr(it) => {
if let Some(p) = it.syntax().parent() { if let Some(p) = it.syntax().parent() {
if ast::ExprStmt::can_cast(p.kind()) { if ast::ExprStmt::can_cast(p.kind()) {
if let Some(kind) = inbetween_body_and_decl_check(p) { if let Some(kind) = inbetween_body_and_decl_check(p) {
return Some(make_res(NameRefKind::Keyword(kind))); return Some(make_res(NameRefKind::Keyword(kind)));
}
} }
} }
}
path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
make_path_kind_expr(it.into()) make_path_kind_expr(it.into())
}, },
ast::TupleStructPat(it) => { ast::TupleStructPat(it) => {
path_ctx.has_call_parens = true; path_ctx.has_call_parens = true;
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
}, },
ast::RecordPat(it) => { ast::RecordPat(it) => {
path_ctx.has_call_parens = true; path_ctx.has_call_parens = true;
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
}, },
ast::PathPat(it) => { ast::PathPat(it) => {
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
}, },
ast::MacroCall(it) => { ast::MacroCall(it) => {
// A macro call in this position is usually a result of parsing recovery, so check that // A macro call in this position is usually a result of parsing recovery, so check that
if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) { if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
return Some(make_res(NameRefKind::Keyword(kind))); return Some(make_res(NameRefKind::Keyword(kind)));
}
kind_macro_call(it)?
},
ast::Meta(meta) => make_path_kind_attr(meta)?,
ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
ast::UseTree(_) => PathKind::Use,
// completing inside a qualifier
ast::Path(parent) => {
path_ctx.parent = Some(parent.clone());
let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
match_ast! {
match parent {
ast::PathType(it) => make_path_kind_type(it.into()),
ast::PathExpr(it) => {
path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
make_path_kind_expr(it.into())
},
ast::TupleStructPat(it) => {
path_ctx.has_call_parens = true;
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
},
ast::RecordPat(it) => {
path_ctx.has_call_parens = true;
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
},
ast::PathPat(it) => {
PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
},
ast::MacroCall(it) => {
kind_macro_call(it)?
},
ast::Meta(meta) => make_path_kind_attr(meta)?,
ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
ast::UseTree(_) => PathKind::Use,
ast::RecordExpr(it) => make_path_kind_expr(it.into()),
_ => return None,
} }
}
path_ctx.has_macro_bang = it.excl_token().is_some(); },
let parent = it.syntax().parent()?; ast::RecordExpr(it) => make_path_kind_expr(it.into()),
// Any path in an item list will be treated as a macro call by the parser _ => return None,
match_ast! {
match parent {
ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
ast::MacroType(ty) => make_path_kind_type(ty.into()),
ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
Some(it) => match_ast! {
match it {
ast::Trait(_) => ItemListKind::Trait,
ast::Impl(it) => if it.trait_().is_some() {
ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
} else {
ItemListKind::Impl
},
_ => return None
}
},
None => return None,
} },
ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
_ => return None,
}
}
},
ast::Meta(meta) => {
let attr = meta.parent_attr()?;
let kind = attr.kind();
let attached = attr.syntax().parent()?;
let is_trailing_outer_attr = kind != AttrKind::Inner
&& non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
let annotated_item_kind = if is_trailing_outer_attr {
None
} else {
Some(attached.kind())
};
PathKind::Attr {
attr_ctx: AttrCtx {
kind,
annotated_item_kind,
}
}
},
ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
ast::UseTree(_) => PathKind::Use,
_ => return None,
} }
}; };
@ -884,49 +953,53 @@ impl<'a> CompletionContext<'a> {
path_ctx.has_type_args = segment.generic_arg_list().is_some(); path_ctx.has_type_args = segment.generic_arg_list().is_some();
// calculate the qualifier context // calculate the qualifier context
if let Some((path, use_tree_parent)) = path_or_use_tree_qualifier(&path) { if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
path_ctx.use_tree_parent = use_tree_parent; path_ctx.use_tree_parent = use_tree_parent;
if !use_tree_parent && segment.coloncolon_token().is_some() { if !use_tree_parent && segment.coloncolon_token().is_some() {
path_ctx.qualified = Qualified::Absolute; path_ctx.qualified = Qualified::Absolute;
} else { } else {
let path = path let qualifier = qualifier
.segment() .segment()
.and_then(|it| find_node_in_file(original_file, &it)) .and_then(|it| find_node_in_file(original_file, &it))
.map(|it| it.parent_path()); .map(|it| it.parent_path());
if let Some(path) = path { if let Some(qualifier) = qualifier {
// `<_>::$0` let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
let is_infer_qualifier = path.qualifier().is_none() Some(ast::PathSegmentKind::Type {
&& matches!( type_ref: Some(type_ref),
path.segment().and_then(|it| it.kind()), trait_ref,
Some(ast::PathSegmentKind::Type { }) if qualifier.qualifier().is_none() => Some((type_ref, trait_ref)),
type_ref: Some(ast::Type::InferType(_)), _ => None,
trait_ref: None, };
})
);
path_ctx.qualified = if is_infer_qualifier { path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
Qualified::Infer let ty = match ty {
ast::Type::InferType(_) => None,
ty => sema.resolve_type(&ty),
};
let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
Qualified::TypeAnchor { ty, trait_ }
} else { } else {
let res = sema.resolve_path(&path); let res = sema.resolve_path(&qualifier);
// For understanding how and why super_chain_len is calculated the way it // For understanding how and why super_chain_len is calculated the way it
// is check the documentation at it's definition // is check the documentation at it's definition
let mut segment_count = 0; let mut segment_count = 0;
let super_count = iter::successors(Some(path.clone()), |p| p.qualifier()) let super_count =
.take_while(|p| { iter::successors(Some(qualifier.clone()), |p| p.qualifier())
p.segment() .take_while(|p| {
.and_then(|s| { p.segment()
segment_count += 1; .and_then(|s| {
s.super_token() segment_count += 1;
}) s.super_token()
.is_some() })
}) .is_some()
.count(); })
.count();
let super_chain_len = let super_chain_len =
if segment_count > super_count { None } else { Some(super_count) }; if segment_count > super_count { None } else { Some(super_count) };
Qualified::With { path, resolution: res, super_chain_len } Qualified::With { path: qualifier, resolution: res, super_chain_len }
} }
}; };
} }
@ -1141,19 +1214,6 @@ fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
Some((use_tree.path()?, true)) Some((use_tree.path()?, true))
} }
fn has_ref(token: &SyntaxToken) -> bool {
let mut token = token.clone();
for skip in [SyntaxKind::IDENT, SyntaxKind::WHITESPACE, T![mut]] {
if token.kind() == skip {
token = match token.prev_token() {
Some(it) => it,
None => return false,
}
}
}
token.kind() == T![&]
}
pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool { pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool {
// oh my ... // oh my ...
(|| { (|| {

View file

@ -391,3 +391,23 @@ fn foo($0: Foo) {}
expect![[r#"ty: ?, name: ?"#]], expect![[r#"ty: ?, name: ?"#]],
); );
} }
#[test]
fn expected_type_ref_prefix_on_field() {
check_expected_type_and_name(
r#"
fn foo(_: &mut i32) {}
struct S {
field: i32,
}
fn main() {
let s = S {
field: 100,
};
foo(&mut s.f$0);
}
"#,
expect!["ty: i32, name: ?"],
);
}

View file

@ -1271,8 +1271,8 @@ fn main() {
st S [] st S []
st &mut S [type] st &mut S [type]
st S [] st S []
fn main() []
fn foo() [] fn foo() []
fn main() []
"#]], "#]],
); );
check_relevance( check_relevance(
@ -1288,8 +1288,8 @@ fn main() {
lc s [type+name+local] lc s [type+name+local]
st S [type] st S [type]
st S [] st S []
fn main() []
fn foo() [] fn foo() []
fn main() []
"#]], "#]],
); );
check_relevance( check_relevance(
@ -1305,8 +1305,8 @@ fn main() {
lc ssss [type+local] lc ssss [type+local]
st S [type] st S [type]
st S [] st S []
fn main() []
fn foo() [] fn foo() []
fn main() []
"#]], "#]],
); );
} }
@ -1342,12 +1342,11 @@ fn main() {
lc &t [type+local] lc &t [type+local]
st S [] st S []
st &S [type] st &S [type]
st T []
st S [] st S []
fn main() [] st T []
fn foo() [] fn foo() []
fn main() []
md core [] md core []
tt Sized []
"#]], "#]],
) )
} }
@ -1389,12 +1388,11 @@ fn main() {
lc &mut t [type+local] lc &mut t [type+local]
st S [] st S []
st &mut S [type] st &mut S [type]
st T []
st S [] st S []
fn main() [] st T []
fn foo() [] fn foo() []
fn main() []
md core [] md core []
tt Sized []
"#]], "#]],
) )
} }
@ -1485,14 +1483,13 @@ fn main() {
expect![[r#" expect![[r#"
st S [] st S []
st &S [type] st &S [type]
st T []
st S [] st S []
fn main() [] st T []
fn bar() [] fn bar() []
fn &bar() [type] fn &bar() [type]
fn foo() [] fn foo() []
fn main() []
md core [] md core []
tt Sized []
"#]], "#]],
) )
} }
@ -1636,8 +1633,8 @@ fn foo() {
ev Foo::B [type_could_unify] ev Foo::B [type_could_unify]
fn foo() [] fn foo() []
en Foo [] en Foo []
fn baz() []
fn bar() [] fn bar() []
fn baz() []
"#]], "#]],
); );
} }
@ -1727,9 +1724,9 @@ fn f() {
} }
"#, "#,
expect![[r#" expect![[r#"
md std []
st Buffer [] st Buffer []
fn f() [] fn f() []
md std []
tt BufRead (use std::io::BufRead) [requires_import] tt BufRead (use std::io::BufRead) [requires_import]
st BufReader (use std::io::BufReader) [requires_import] st BufReader (use std::io::BufReader) [requires_import]
st BufWriter (use std::io::BufWriter) [requires_import] st BufWriter (use std::io::BufWriter) [requires_import]

View file

@ -85,7 +85,9 @@ fn render(
item.ref_match(ref_match, path_ctx.path.syntax().text_range().start()); item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
} }
FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => { FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => {
item.ref_match(ref_match, receiver.syntax().text_range().start()); if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) {
item.ref_match(ref_match, original_expr.syntax().text_range().start());
}
} }
_ => (), _ => (),
} }

View file

@ -6,7 +6,7 @@ use itertools::Itertools;
use syntax::SmolStr; use syntax::SmolStr;
use crate::{ use crate::{
context::{ParamContext, ParamKind, PatternContext}, context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext},
render::{ render::{
variant::{format_literal_label, visible_fields}, variant::{format_literal_label, visible_fields},
RenderContext, RenderContext,
@ -42,6 +42,7 @@ pub(crate) fn render_struct_pat(
pub(crate) fn render_variant_pat( pub(crate) fn render_variant_pat(
ctx: RenderContext<'_>, ctx: RenderContext<'_>,
pattern_ctx: &PatternContext, pattern_ctx: &PatternContext,
path_ctx: Option<&PathCompletionCtx>,
variant: hir::Variant, variant: hir::Variant,
local_name: Option<Name>, local_name: Option<Name>,
path: Option<&hir::ModPath>, path: Option<&hir::ModPath>,
@ -58,9 +59,23 @@ pub(crate) fn render_variant_pat(
(name.to_smol_str(), name.escaped().to_smol_str()) (name.to_smol_str(), name.escaped().to_smol_str())
} }
}; };
let kind = variant.kind(ctx.db());
let label = format_literal_label(name.as_str(), kind); let (label, pat) = match path_ctx {
let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?; Some(PathCompletionCtx { has_call_parens: true, .. }) => (name, escaped_name.to_string()),
_ => {
let kind = variant.kind(ctx.db());
let label = format_literal_label(name.as_str(), kind);
let pat = render_pat(
&ctx,
pattern_ctx,
&escaped_name,
kind,
&visible_fields,
fields_omitted,
)?;
(label, pat)
}
};
Some(build_completion(ctx, label, pat, variant)) Some(build_completion(ctx, label, pat, variant))
} }

View file

@ -23,8 +23,6 @@ mod type_pos;
mod use_tree; mod use_tree;
mod visibility; mod visibility;
use std::mem;
use hir::{db::DefDatabase, PrefixKind, Semantics}; use hir::{db::DefDatabase, PrefixKind, Semantics};
use ide_db::{ use ide_db::{
base_db::{fixture::ChangeFixture, FileLoader, FilePosition}, base_db::{fixture::ChangeFixture, FileLoader, FilePosition},
@ -107,12 +105,9 @@ fn completion_list_with_config(
) -> String { ) -> String {
// filter out all but one builtintype completion for smaller test outputs // filter out all but one builtintype completion for smaller test outputs
let items = get_all_items(config, ra_fixture, trigger_character); let items = get_all_items(config, ra_fixture, trigger_character);
let mut bt_seen = false;
let items = items let items = items
.into_iter() .into_iter()
.filter(|it| { .filter(|it| it.kind() != CompletionItemKind::BuiltinType || it.label() == "u32")
it.kind() != CompletionItemKind::BuiltinType || !mem::replace(&mut bt_seen, true)
})
.filter(|it| include_keywords || it.kind() != CompletionItemKind::Keyword) .filter(|it| include_keywords || it.kind() != CompletionItemKind::Keyword)
.filter(|it| include_keywords || it.kind() != CompletionItemKind::Snippet) .filter(|it| include_keywords || it.kind() != CompletionItemKind::Snippet)
.sorted_by_key(|it| (it.kind(), it.label().to_owned(), it.detail().map(ToOwned::to_owned))) .sorted_by_key(|it| (it.kind(), it.label().to_owned(), it.detail().map(ToOwned::to_owned)))

View file

@ -44,7 +44,6 @@ fn baz() {
st Record st Record
st Tuple st Tuple
st Unit st Unit
tt Trait
un Union un Union
ev TupleV() TupleV(u32) ev TupleV() TupleV(u32)
bt u32 bt u32
@ -137,7 +136,6 @@ impl Unit {
st Record st Record
st Tuple st Tuple
st Unit st Unit
tt Trait
tp TypeParam tp TypeParam
un Union un Union
ev TupleV() TupleV(u32) ev TupleV() TupleV(u32)
@ -653,3 +651,22 @@ fn main() {
"]], "]],
); );
} }
#[test]
fn complete_record_expr_path() {
check(
r#"
struct Zulu;
impl Zulu {
fn test() -> Self { }
}
fn boi(val: Zulu) { }
fn main() {
boi(Zulu:: $0 {});
}
"#,
expect![[r#"
fn test() fn() -> Zulu
"#]],
);
}

View file

@ -443,7 +443,7 @@ fn foo() {
} }
"#, "#,
expect![[r#" expect![[r#"
bn TupleVariant() TupleVariant($1)$0 bn TupleVariant TupleVariant
"#]], "#]],
); );
check_empty( check_empty(
@ -458,7 +458,7 @@ fn foo() {
} }
"#, "#,
expect![[r#" expect![[r#"
bn RecordVariant {} RecordVariant { field$1 }$0 bn RecordVariant RecordVariant
"#]], "#]],
); );
} }

View file

@ -167,7 +167,6 @@ fn main() {
st Foo st Foo
st Foo {} Foo { foo1: u32, foo2: u32 } st Foo {} Foo { foo1: u32, foo2: u32 }
tt Default tt Default
tt Sized
bt u32 bt u32
kw crate:: kw crate::
kw self:: kw self::

View file

@ -674,7 +674,60 @@ fn bar() -> Bar {
expect![[r#" expect![[r#"
fn foo() (as Foo) fn() -> Self fn foo() (as Foo) fn() -> Self
"#]], "#]],
) );
}
#[test]
fn type_anchor_type() {
check(
r#"
trait Foo {
fn foo() -> Self;
}
struct Bar;
impl Bar {
fn bar() {}
}
impl Foo for Bar {
fn foo() -> {
Bar
}
}
fn bar() -> Bar {
<Bar>::$0
}
"#,
expect![[r#"
fn bar() fn()
fn foo() (as Foo) fn() -> Self
"#]],
);
}
#[test]
fn type_anchor_type_trait() {
check(
r#"
trait Foo {
fn foo() -> Self;
}
struct Bar;
impl Bar {
fn bar() {}
}
impl Foo for Bar {
fn foo() -> {
Bar
}
}
fn bar() -> Bar {
<Bar as Foo>::$0
}
"#,
expect![[r#"
fn foo() (as Foo) fn() -> Self
"#]],
);
} }
#[test] #[test]

View file

@ -13,7 +13,7 @@ mod html;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
use hir::{InFile, Name, Semantics}; use hir::{Name, Semantics};
use ide_db::{FxHashMap, RootDatabase}; use ide_db::{FxHashMap, RootDatabase};
use syntax::{ use syntax::{
ast, AstNode, AstToken, NodeOrToken, SyntaxKind::*, SyntaxNode, TextRange, WalkEvent, T, ast, AstNode, AstToken, NodeOrToken, SyntaxKind::*, SyntaxNode, TextRange, WalkEvent, T,
@ -325,7 +325,7 @@ fn traverse(
Leave(NodeOrToken::Node(node)) => { Leave(NodeOrToken::Node(node)) => {
// Doc comment highlighting injection, we do this when leaving the node // Doc comment highlighting injection, we do this when leaving the node
// so that we overwrite the highlighting of the doc comment itself. // so that we overwrite the highlighting of the doc comment itself.
inject::doc_comment(hl, sema, InFile::new(file_id.into(), &node)); inject::doc_comment(hl, sema, file_id, &node);
continue; continue;
} }
}; };

View file

@ -5,7 +5,8 @@ use std::mem;
use either::Either; use either::Either;
use hir::{InFile, Semantics}; use hir::{InFile, Semantics};
use ide_db::{ use ide_db::{
active_parameter::ActiveParameter, defs::Definition, rust_doc::is_rust_fence, SymbolKind, active_parameter::ActiveParameter, base_db::FileId, defs::Definition, rust_doc::is_rust_fence,
SymbolKind,
}; };
use syntax::{ use syntax::{
ast::{self, AstNode, IsString, QuoteOffsets}, ast::{self, AstNode, IsString, QuoteOffsets},
@ -81,16 +82,18 @@ pub(super) fn ra_fixture(
const RUSTDOC_FENCE_LENGTH: usize = 3; const RUSTDOC_FENCE_LENGTH: usize = 3;
const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
/// Injection of syntax highlighting of doctests. /// Injection of syntax highlighting of doctests and intra doc links.
pub(super) fn doc_comment( pub(super) fn doc_comment(
hl: &mut Highlights, hl: &mut Highlights,
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
InFile { file_id: src_file_id, value: node }: InFile<&SyntaxNode>, src_file_id: FileId,
node: &SyntaxNode,
) { ) {
let (attributes, def) = match doc_attributes(sema, node) { let (attributes, def) = match doc_attributes(sema, node) {
Some(it) => it, Some(it) => it,
None => return, None => return,
}; };
let src_file_id = src_file_id.into();
// Extract intra-doc links and emit highlights for them. // Extract intra-doc links and emit highlights for them.
if let Some((docs, doc_mapping)) = attributes.docs_with_rangemap(sema.db) { if let Some((docs, doc_mapping)) = attributes.docs_with_rangemap(sema.db) {

View file

@ -0,0 +1,51 @@
<style>
body { margin: 0; }
pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
.lifetime { color: #DFAF8F; font-style: italic; }
.label { color: #DFAF8F; font-style: italic; }
.comment { color: #7F9F7F; }
.documentation { color: #629755; }
.intra_doc_link { font-style: italic; }
.injected { opacity: 0.65 ; }
.struct, .enum { color: #7CB8BB; }
.enum_variant { color: #BDE0F3; }
.string_literal { color: #CC9393; }
.field { color: #94BFF3; }
.function { color: #93E0E3; }
.function.unsafe { color: #BC8383; }
.trait.unsafe { color: #BC8383; }
.operator.unsafe { color: #BC8383; }
.mutable.unsafe { color: #BC8383; text-decoration: underline; }
.keyword.unsafe { color: #BC8383; font-weight: bold; }
.macro.unsafe { color: #BC8383; }
.parameter { color: #94BFF3; }
.text { color: #DCDCCC; }
.type { color: #7CB8BB; }
.builtin_type { color: #8CD0D3; }
.type_param { color: #DFAF8F; }
.attribute { color: #94BFF3; }
.numeric_literal { color: #BFEBBF; }
.bool_literal { color: #BFE6EB; }
.macro { color: #94BFF3; }
.derive { color: #94BFF3; font-style: italic; }
.module { color: #AFD8AF; }
.value_param { color: #DCDCCC; }
.variable { color: #DCDCCC; }
.format_specifier { color: #CC696B; }
.mutable { text-decoration: underline; }
.escape_sequence { color: #94BFF3; }
.keyword { color: #F0DFAF; font-weight: bold; }
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="comment documentation">//! </span><span class="struct documentation injected intra_doc_link">[Struct]</span>
<span class="comment documentation">//! This is an intra doc injection test for modules</span>
<span class="comment documentation">//! </span><span class="struct documentation injected intra_doc_link">[Struct]</span>
<span class="comment documentation">//! This is an intra doc injection test for modules</span>
<span class="keyword">pub</span> <span class="keyword">struct</span> <span class="struct declaration public">Struct</span><span class="semicolon">;</span>
</code></pre>

View file

@ -0,0 +1,50 @@
<style>
body { margin: 0; }
pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
.lifetime { color: #DFAF8F; font-style: italic; }
.label { color: #DFAF8F; font-style: italic; }
.comment { color: #7F9F7F; }
.documentation { color: #629755; }
.intra_doc_link { font-style: italic; }
.injected { opacity: 0.65 ; }
.struct, .enum { color: #7CB8BB; }
.enum_variant { color: #BDE0F3; }
.string_literal { color: #CC9393; }
.field { color: #94BFF3; }
.function { color: #93E0E3; }
.function.unsafe { color: #BC8383; }
.trait.unsafe { color: #BC8383; }
.operator.unsafe { color: #BC8383; }
.mutable.unsafe { color: #BC8383; text-decoration: underline; }
.keyword.unsafe { color: #BC8383; font-weight: bold; }
.macro.unsafe { color: #BC8383; }
.parameter { color: #94BFF3; }
.text { color: #DCDCCC; }
.type { color: #7CB8BB; }
.builtin_type { color: #8CD0D3; }
.type_param { color: #DFAF8F; }
.attribute { color: #94BFF3; }
.numeric_literal { color: #BFEBBF; }
.bool_literal { color: #BFE6EB; }
.macro { color: #94BFF3; }
.derive { color: #94BFF3; font-style: italic; }
.module { color: #AFD8AF; }
.value_param { color: #DCDCCC; }
.variable { color: #DCDCCC; }
.format_specifier { color: #CC696B; }
.mutable { text-decoration: underline; }
.escape_sequence { color: #94BFF3; }
.keyword { color: #F0DFAF; font-weight: bold; }
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[crate::foo::Struct]</span>
<span class="comment documentation">/// This is an intra doc injection test for modules</span>
<span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[crate::foo::Struct]</span>
<span class="comment documentation">/// This is an intra doc injection test for modules</span>
<span class="keyword">mod</span> <span class="module declaration">foo</span><span class="semicolon">;</span>
</code></pre>

View file

@ -915,6 +915,52 @@ fn main() {
} }
#[test] #[test]
fn test_mod_hl_injection() {
check_highlighting(
r##"
//- /foo.rs
//! [Struct]
//! This is an intra doc injection test for modules
//! [Struct]
//! This is an intra doc injection test for modules
pub struct Struct;
//- /lib.rs crate:foo
/// [crate::foo::Struct]
/// This is an intra doc injection test for modules
/// [crate::foo::Struct]
/// This is an intra doc injection test for modules
mod foo;
"##,
expect_file!["./test_data/highlight_module_docs_inline.html"],
false,
);
check_highlighting(
r##"
//- /lib.rs crate:foo
/// [crate::foo::Struct]
/// This is an intra doc injection test for modules
/// [crate::foo::Struct]
/// This is an intra doc injection test for modules
mod foo;
//- /foo.rs
//! [Struct]
//! This is an intra doc injection test for modules
//! [Struct]
//! This is an intra doc injection test for modules
pub struct Struct;
"##,
expect_file!["./test_data/highlight_module_docs_outline.html"],
false,
);
}
#[test]
#[cfg_attr(
all(unix, not(target_pointer_width = "64")),
ignore = "depends on `DefaultHasher` outputs"
)]
fn test_rainbow_highlighting() { fn test_rainbow_highlighting() {
check_highlighting( check_highlighting(
r#" r#"

View file

@ -2,12 +2,13 @@
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#[cfg(feature = "tracking")]
use std::sync::atomic::AtomicUsize; use std::sync::atomic::AtomicUsize;
/// Represents a struct used to enforce a numerical limit. /// Represents a struct used to enforce a numerical limit.
pub struct Limit { pub struct Limit {
upper_bound: usize, upper_bound: usize,
#[allow(unused)] #[cfg(feature = "tracking")]
max: AtomicUsize, max: AtomicUsize,
} }
@ -15,14 +16,22 @@ impl Limit {
/// Creates a new limit. /// Creates a new limit.
#[inline] #[inline]
pub const fn new(upper_bound: usize) -> Self { pub const fn new(upper_bound: usize) -> Self {
Self { upper_bound, max: AtomicUsize::new(0) } Self {
upper_bound,
#[cfg(feature = "tracking")]
max: AtomicUsize::new(0),
}
} }
/// Creates a new limit. /// Creates a new limit.
#[inline] #[inline]
#[cfg(feature = "tracking")] #[cfg(feature = "tracking")]
pub const fn new_tracking(upper_bound: usize) -> Self { pub const fn new_tracking(upper_bound: usize) -> Self {
Self { upper_bound, max: AtomicUsize::new(1) } Self {
upper_bound,
#[cfg(feature = "tracking")]
max: AtomicUsize::new(1),
}
} }
/// Gets the underlying numeric limit. /// Gets the underlying numeric limit.

View file

@ -54,7 +54,7 @@ fn path_for_qualifier(
mut qual: CompletedMarker, mut qual: CompletedMarker,
) -> CompletedMarker { ) -> CompletedMarker {
loop { loop {
let use_tree = matches!(p.nth(2), T![*] | T!['{']); let use_tree = mode == Mode::Use && matches!(p.nth(2), T![*] | T!['{']);
if p.at(T![::]) && !use_tree { if p.at(T![::]) && !use_tree {
let path = qual.precede(p); let path = qual.precede(p);
p.bump(T![::]); p.bump(T![::]);

View file

@ -39,6 +39,8 @@ pub(crate) struct ProcMacroSrv {
expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>, expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>,
} }
const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
impl ProcMacroSrv { impl ProcMacroSrv {
pub fn expand(&mut self, task: ExpandMacro) -> Result<FlatTree, PanicMessage> { pub fn expand(&mut self, task: ExpandMacro) -> Result<FlatTree, PanicMessage> {
let expander = self.expander(task.lib.as_ref()).map_err(|err| { let expander = self.expander(task.lib.as_ref()).map_err(|err| {
@ -66,13 +68,18 @@ impl ProcMacroSrv {
// FIXME: replace this with std's scoped threads once they stabilize // FIXME: replace this with std's scoped threads once they stabilize
// (then remove dependency on crossbeam) // (then remove dependency on crossbeam)
let result = crossbeam::scope(|s| { let result = crossbeam::scope(|s| {
let res = s let res = match s
.builder()
.stack_size(EXPANDER_STACK_SIZE)
.name(task.macro_name.clone())
.spawn(|_| { .spawn(|_| {
expander expander
.expand(&task.macro_name, &macro_body, attributes.as_ref()) .expand(&task.macro_name, &macro_body, attributes.as_ref())
.map(|it| FlatTree::new(&it)) .map(|it| FlatTree::new(&it))
}) }) {
.join(); Ok(handle) => handle.join(),
Err(e) => std::panic::resume_unwind(Box::new(e)),
};
match res { match res {
Ok(res) => res, Ok(res) => res,

View file

@ -66,7 +66,9 @@ pub fn load_workspace(
}; };
let crate_graph = ws.to_crate_graph( let crate_graph = ws.to_crate_graph(
&mut |_, path: &AbsPath| load_proc_macro(proc_macro_client.as_ref(), path, &[]), &mut |_, path: &AbsPath| {
load_proc_macro(proc_macro_client.as_ref().map_err(|e| &**e), path, &[])
},
&mut |path: &AbsPath| { &mut |path: &AbsPath| {
let contents = loader.load_sync(path); let contents = loader.load_sync(path);
let path = vfs::VfsPath::from(path.to_path_buf()); let path = vfs::VfsPath::from(path.to_path_buf());

View file

@ -303,6 +303,9 @@ impl GlobalState {
let files_config = self.config.files(); let files_config = self.config.files();
let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude); let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
let standalone_server_name =
format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
if self.proc_macro_clients.is_empty() { if self.proc_macro_clients.is_empty() {
if let Some((path, args)) = self.config.proc_macro_srv() { if let Some((path, args)) = self.config.proc_macro_srv() {
self.proc_macro_clients = self self.proc_macro_clients = self
@ -316,10 +319,8 @@ impl GlobalState {
tracing::info!("Found a cargo workspace..."); tracing::info!("Found a cargo workspace...");
if let Some(sysroot) = sysroot.as_ref() { if let Some(sysroot) = sysroot.as_ref() {
tracing::info!("Found a cargo workspace with a sysroot..."); tracing::info!("Found a cargo workspace with a sysroot...");
let server_path = sysroot let server_path =
.root() sysroot.root().join("libexec").join(&standalone_server_name);
.join("libexec")
.join("rust-analyzer-proc-macro-srv");
if std::fs::metadata(&server_path).is_ok() { if std::fs::metadata(&server_path).is_ok() {
tracing::info!( tracing::info!(
"And the server exists at {}", "And the server exists at {}",
@ -389,7 +390,10 @@ impl GlobalState {
let mut crate_graph = CrateGraph::default(); let mut crate_graph = CrateGraph::default();
for (idx, ws) in self.workspaces.iter().enumerate() { for (idx, ws) in self.workspaces.iter().enumerate() {
let proc_macro_client = self.proc_macro_clients[idx].as_ref(); let proc_macro_client = match self.proc_macro_clients.get(idx) {
Some(res) => res.as_ref().map_err(|e| &**e),
None => Err("Proc macros are disabled"),
};
let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| { let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| {
load_proc_macro( load_proc_macro(
proc_macro_client, proc_macro_client,
@ -573,7 +577,7 @@ impl SourceRootConfig {
/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace` /// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
/// with an identity dummy expander. /// with an identity dummy expander.
pub(crate) fn load_proc_macro( pub(crate) fn load_proc_macro(
server: Result<&ProcMacroServer, &String>, server: Result<&ProcMacroServer, &str>,
path: &AbsPath, path: &AbsPath,
dummy_replace: &[Box<str>], dummy_replace: &[Box<str>],
) -> ProcMacroLoadResult { ) -> ProcMacroLoadResult {

View file

@ -880,7 +880,6 @@ impl ForExpr {
pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) } pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) } pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
pub fn iterable(&self) -> Option<Expr> { support::child(&self.syntax) }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -890,7 +889,6 @@ pub struct IfExpr {
impl ast::HasAttrs for IfExpr {} impl ast::HasAttrs for IfExpr {}
impl IfExpr { impl IfExpr {
pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) } pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
pub fn condition(&self) -> Option<Expr> { support::child(&self.syntax) }
pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) } pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
} }
@ -1051,7 +1049,6 @@ pub struct WhileExpr {
impl ast::HasAttrs for WhileExpr {} impl ast::HasAttrs for WhileExpr {}
impl WhileExpr { impl WhileExpr {
pub fn while_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![while]) } pub fn while_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![while]) }
pub fn condition(&self) -> Option<Expr> { support::child(&self.syntax) }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -1170,7 +1167,6 @@ pub struct MatchGuard {
} }
impl MatchGuard { impl MatchGuard {
pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) } pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
pub fn condition(&self) -> Option<Expr> { support::child(&self.syntax) }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]

View file

@ -806,6 +806,19 @@ impl ast::GenericParamList {
} }
} }
impl ast::ForExpr {
pub fn iterable(&self) -> Option<ast::Expr> {
// If the iterable is a BlockExpr, check if the body is missing.
// If it is assume the iterable is the expression that is missing instead.
let mut exprs = support::children(self.syntax());
let first = exprs.next();
match first {
Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first),
first => first,
}
}
}
impl ast::HasLoopBody for ast::ForExpr { impl ast::HasLoopBody for ast::ForExpr {
fn loop_body(&self) -> Option<ast::BlockExpr> { fn loop_body(&self) -> Option<ast::BlockExpr> {
let mut exprs = support::children(self.syntax()); let mut exprs = support::children(self.syntax());
@ -815,6 +828,19 @@ impl ast::HasLoopBody for ast::ForExpr {
} }
} }
impl ast::WhileExpr {
pub fn condition(&self) -> Option<ast::Expr> {
// If the condition is a BlockExpr, check if the body is missing.
// If it is assume the condition is the expression that is missing instead.
let mut exprs = support::children(self.syntax());
let first = exprs.next();
match first {
Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first),
first => first,
}
}
}
impl ast::HasLoopBody for ast::WhileExpr { impl ast::HasLoopBody for ast::WhileExpr {
fn loop_body(&self) -> Option<ast::BlockExpr> { fn loop_body(&self) -> Option<ast::BlockExpr> {
let mut exprs = support::children(self.syntax()); let mut exprs = support::children(self.syntax());
@ -835,3 +861,15 @@ impl From<ast::Adt> for ast::Item {
} }
} }
} }
impl ast::IfExpr {
pub fn condition(&self) -> Option<ast::Expr> {
support::child(&self.syntax)
}
}
impl ast::MatchGuard {
pub fn condition(&self) -> Option<ast::Expr> {
support::child(&self.syntax)
}
}

View file

@ -682,6 +682,8 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r
| "value" | "value"
| "trait" | "trait"
| "self_ty" | "self_ty"
| "iterable"
| "condition"
); );
if manually_implemented { if manually_implemented {
return; return;

View file

@ -210,7 +210,8 @@ Release process is handled by `release`, `dist` and `promote` xtasks, `release`
./rust-rust-analyzer # Note the name! ./rust-rust-analyzer # Note the name!
``` ```
Additionally, it assumes that the remote for `rust-analyzer` is called `upstream` (I use `origin` to point to my fork). The remote for `rust-analyzer` must be called `upstream` (I use `origin` to point to my fork).
In addition, for `xtask promote` (see below), `rust-rust-analyzer` must have a `rust-analyzer` remote pointing to this repository on GitHub.
`release` calls the GitHub API calls to scrape pull request comments and categorize them in the changelog. `release` calls the GitHub API calls to scrape pull request comments and categorize them in the changelog.
This step uses the `curl` and `jq` applications, which need to be available in `PATH`. This step uses the `curl` and `jq` applications, which need to be available in `PATH`.
@ -225,13 +226,13 @@ Release steps:
* push it to `upstream`. This triggers GitHub Actions which: * push it to `upstream`. This triggers GitHub Actions which:
* runs `cargo xtask dist` to package binaries and VS Code extension * runs `cargo xtask dist` to package binaries and VS Code extension
* makes a GitHub release * makes a GitHub release
* pushes VS Code extension to the marketplace * publishes the VS Code extension to the marketplace
* call the GitHub API for PR details * call the GitHub API for PR details
* create a new changelog in `rust-analyzer.github.io` * create a new changelog in `rust-analyzer.github.io`
3. While the release is in progress, fill in the changelog 3. While the release is in progress, fill in the changelog
4. Commit & push the changelog 4. Commit & push the changelog
5. Tweet 5. Tweet
6. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's submodule. 6. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's subtree.
Self-approve the PR. Self-approve the PR.
If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console. If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console.

View file

@ -77,18 +77,12 @@ impl flags::Promote {
cmd!(sh, "git switch master").run()?; cmd!(sh, "git switch master").run()?;
cmd!(sh, "git fetch upstream").run()?; cmd!(sh, "git fetch upstream").run()?;
cmd!(sh, "git reset --hard upstream/master").run()?; cmd!(sh, "git reset --hard upstream/master").run()?;
cmd!(sh, "git submodule update --recursive").run()?;
let date = date_iso(sh)?; let date = date_iso(sh)?;
let branch = format!("rust-analyzer-{date}"); let branch = format!("rust-analyzer-{date}");
cmd!(sh, "git switch -c {branch}").run()?; cmd!(sh, "git switch -c {branch}").run()?;
{ cmd!(sh, "git subtree pull -P src/tools/rust-analyzer rust-analyzer master").run()?;
let _dir = sh.push_dir("src/tools/rust-analyzer");
cmd!(sh, "git fetch origin").run()?;
cmd!(sh, "git reset --hard origin/release").run()?;
}
cmd!(sh, "git add src/tools/rust-analyzer").run()?;
cmd!(sh, "git commit -m':arrow_up: rust-analyzer'").run()?;
if !self.dry_run { if !self.dry_run {
cmd!(sh, "git push -u origin {branch}").run()?; cmd!(sh, "git push -u origin {branch}").run()?;
cmd!( cmd!(