mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-14 08:57:34 +00:00
Merge branch 'master' of github.com:rust-analyzer/rust-analyzer into fix_4311
This commit is contained in:
commit
c4d128e454
61 changed files with 659 additions and 569 deletions
|
@ -4,14 +4,13 @@ use ra_db::FileRange;
|
|||
use ra_fmt::{leading_indent, reindent};
|
||||
use ra_ide_db::RootDatabase;
|
||||
use ra_syntax::{
|
||||
algo::{self, find_covering_element, find_node_at_offset},
|
||||
algo::{self, find_covering_element, find_node_at_offset, SyntaxRewriter},
|
||||
AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize,
|
||||
TokenAtOffset,
|
||||
};
|
||||
use ra_text_edit::TextEditBuilder;
|
||||
|
||||
use crate::{AssistAction, AssistFile, AssistId, AssistLabel, GroupLabel, ResolvedAssist};
|
||||
use algo::SyntaxRewriter;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Assist(pub(crate) Vec<AssistInfo>);
|
||||
|
@ -38,13 +37,10 @@ impl AssistInfo {
|
|||
|
||||
pub(crate) fn into_resolved(self) -> Option<ResolvedAssist> {
|
||||
let label = self.label;
|
||||
let group_label = self.group_label;
|
||||
self.action.map(|action| ResolvedAssist { label, group_label, action })
|
||||
self.action.map(|action| ResolvedAssist { label, action })
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type AssistHandler = fn(AssistCtx) -> Option<Assist>;
|
||||
|
||||
/// `AssistCtx` allows to apply an assist or check if it could be applied.
|
||||
///
|
||||
/// Assists use a somewhat over-engineered approach, given the current needs. The
|
||||
|
@ -100,7 +96,7 @@ impl<'a> AssistCtx<'a> {
|
|||
label: impl Into<String>,
|
||||
f: impl FnOnce(&mut ActionBuilder),
|
||||
) -> Option<Assist> {
|
||||
let label = AssistLabel::new(label.into(), id);
|
||||
let label = AssistLabel::new(id, label.into(), None);
|
||||
|
||||
let mut info = AssistInfo::new(label);
|
||||
if self.should_compute_edit {
|
||||
|
@ -116,7 +112,8 @@ impl<'a> AssistCtx<'a> {
|
|||
}
|
||||
|
||||
pub(crate) fn add_assist_group(self, group_name: impl Into<String>) -> AssistGroup<'a> {
|
||||
AssistGroup { ctx: self, group_name: group_name.into(), assists: Vec::new() }
|
||||
let group = GroupLabel(group_name.into());
|
||||
AssistGroup { ctx: self, group, assists: Vec::new() }
|
||||
}
|
||||
|
||||
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
|
||||
|
@ -146,7 +143,7 @@ impl<'a> AssistCtx<'a> {
|
|||
|
||||
pub(crate) struct AssistGroup<'a> {
|
||||
ctx: AssistCtx<'a>,
|
||||
group_name: String,
|
||||
group: GroupLabel,
|
||||
assists: Vec<AssistInfo>,
|
||||
}
|
||||
|
||||
|
@ -157,9 +154,9 @@ impl<'a> AssistGroup<'a> {
|
|||
label: impl Into<String>,
|
||||
f: impl FnOnce(&mut ActionBuilder),
|
||||
) {
|
||||
let label = AssistLabel::new(label.into(), id);
|
||||
let label = AssistLabel::new(id, label.into(), Some(self.group.clone()));
|
||||
|
||||
let mut info = AssistInfo::new(label).with_group(GroupLabel(self.group_name.clone()));
|
||||
let mut info = AssistInfo::new(label).with_group(self.group.clone());
|
||||
if self.ctx.should_compute_edit {
|
||||
let action = {
|
||||
let mut edit = ActionBuilder::new(&self.ctx);
|
||||
|
|
|
@ -30,6 +30,10 @@ fn check(assist_id: &str, before: &str, after: &str) {
|
|||
)
|
||||
});
|
||||
|
||||
let actual = assist.action.edit.apply(&before);
|
||||
let actual = {
|
||||
let mut actual = before.clone();
|
||||
assist.action.edit.apply(&mut actual);
|
||||
actual
|
||||
};
|
||||
assert_eq_text!(after, &actual);
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ use ra_syntax::{
|
|||
|
||||
use crate::{
|
||||
ast_transform::{self, AstTransform, QualifyPaths, SubstituteTypeParams},
|
||||
utils::{get_missing_impl_items, resolve_target_trait},
|
||||
utils::{get_missing_assoc_items, resolve_target_trait},
|
||||
Assist, AssistCtx, AssistId,
|
||||
};
|
||||
|
||||
|
@ -112,25 +112,25 @@ fn add_missing_impl_members_inner(
|
|||
|
||||
let trait_ = resolve_target_trait(&ctx.sema, &impl_node)?;
|
||||
|
||||
let def_name = |item: &ast::ImplItem| -> Option<SmolStr> {
|
||||
let def_name = |item: &ast::AssocItem| -> Option<SmolStr> {
|
||||
match item {
|
||||
ast::ImplItem::FnDef(def) => def.name(),
|
||||
ast::ImplItem::TypeAliasDef(def) => def.name(),
|
||||
ast::ImplItem::ConstDef(def) => def.name(),
|
||||
ast::AssocItem::FnDef(def) => def.name(),
|
||||
ast::AssocItem::TypeAliasDef(def) => def.name(),
|
||||
ast::AssocItem::ConstDef(def) => def.name(),
|
||||
}
|
||||
.map(|it| it.text().clone())
|
||||
};
|
||||
|
||||
let missing_items = get_missing_impl_items(&ctx.sema, &impl_node)
|
||||
let missing_items = get_missing_assoc_items(&ctx.sema, &impl_node)
|
||||
.iter()
|
||||
.map(|i| match i {
|
||||
hir::AssocItem::Function(i) => ast::ImplItem::FnDef(i.source(ctx.db).value),
|
||||
hir::AssocItem::TypeAlias(i) => ast::ImplItem::TypeAliasDef(i.source(ctx.db).value),
|
||||
hir::AssocItem::Const(i) => ast::ImplItem::ConstDef(i.source(ctx.db).value),
|
||||
hir::AssocItem::Function(i) => ast::AssocItem::FnDef(i.source(ctx.db).value),
|
||||
hir::AssocItem::TypeAlias(i) => ast::AssocItem::TypeAliasDef(i.source(ctx.db).value),
|
||||
hir::AssocItem::Const(i) => ast::AssocItem::ConstDef(i.source(ctx.db).value),
|
||||
})
|
||||
.filter(|t| def_name(&t).is_some())
|
||||
.filter(|t| match t {
|
||||
ast::ImplItem::FnDef(def) => match mode {
|
||||
ast::AssocItem::FnDef(def) => match mode {
|
||||
AddMissingImplMembersMode::DefaultMethodsOnly => def.body().is_some(),
|
||||
AddMissingImplMembersMode::NoDefaultMethods => def.body().is_none(),
|
||||
},
|
||||
|
@ -145,7 +145,7 @@ fn add_missing_impl_members_inner(
|
|||
let sema = ctx.sema;
|
||||
|
||||
ctx.add_assist(AssistId(assist_id), label, |edit| {
|
||||
let n_existing_items = impl_item_list.impl_items().count();
|
||||
let n_existing_items = impl_item_list.assoc_items().count();
|
||||
let source_scope = sema.scope_for_def(trait_);
|
||||
let target_scope = sema.scope(impl_item_list.syntax());
|
||||
let ast_transform = QualifyPaths::new(&target_scope, &source_scope)
|
||||
|
@ -154,13 +154,13 @@ fn add_missing_impl_members_inner(
|
|||
.into_iter()
|
||||
.map(|it| ast_transform::apply(&*ast_transform, it))
|
||||
.map(|it| match it {
|
||||
ast::ImplItem::FnDef(def) => ast::ImplItem::FnDef(add_body(def)),
|
||||
ast::AssocItem::FnDef(def) => ast::AssocItem::FnDef(add_body(def)),
|
||||
_ => it,
|
||||
})
|
||||
.map(|it| edit::remove_attrs_and_docs(&it));
|
||||
let new_impl_item_list = impl_item_list.append_items(items);
|
||||
let cursor_position = {
|
||||
let first_new_item = new_impl_item_list.impl_items().nth(n_existing_items).unwrap();
|
||||
let first_new_item = new_impl_item_list.assoc_items().nth(n_existing_items).unwrap();
|
||||
first_new_item.syntax().text_range().start()
|
||||
};
|
||||
|
||||
|
|
|
@ -162,8 +162,8 @@ fn find_struct_impl(ctx: &AssistCtx, strukt: &ast::StructDef) -> Option<Option<a
|
|||
|
||||
fn has_new_fn(imp: &ast::ImplDef) -> bool {
|
||||
if let Some(il) = imp.item_list() {
|
||||
for item in il.impl_items() {
|
||||
if let ast::ImplItem::FnDef(f) = item {
|
||||
for item in il.assoc_items() {
|
||||
if let ast::AssocItem::FnDef(f) = item {
|
||||
if let Some(name) = f.name() {
|
||||
if name.text().eq_ignore_ascii_case("new") {
|
||||
return true;
|
||||
|
|
|
@ -47,8 +47,7 @@ fn add_vis(ctx: AssistCtx) -> Option<Assist> {
|
|||
return None;
|
||||
}
|
||||
(vis_offset(&parent), keyword.text_range())
|
||||
} else {
|
||||
let field_name: ast::Name = ctx.find_node_at_offset()?;
|
||||
} else if let Some(field_name) = ctx.find_node_at_offset::<ast::Name>() {
|
||||
let field = field_name.syntax().ancestors().find_map(ast::RecordFieldDef::cast)?;
|
||||
if field.name()? != field_name {
|
||||
tested_by!(change_visibility_field_false_positive);
|
||||
|
@ -58,6 +57,13 @@ fn add_vis(ctx: AssistCtx) -> Option<Assist> {
|
|||
return None;
|
||||
}
|
||||
(vis_offset(field.syntax()), field_name.syntax().text_range())
|
||||
} else if let Some(field) = ctx.find_node_at_offset::<ast::TupleFieldDef>() {
|
||||
if field.visibility().is_some() {
|
||||
return None;
|
||||
}
|
||||
(vis_offset(field.syntax()), field.syntax().text_range())
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
|
||||
ctx.add_assist(AssistId("change_visibility"), "Change visibility to pub(crate)", |edit| {
|
||||
|
@ -129,7 +135,8 @@ mod tests {
|
|||
change_visibility,
|
||||
r"struct S { <|>field: u32 }",
|
||||
r"struct S { <|>pub(crate) field: u32 }",
|
||||
)
|
||||
);
|
||||
check_assist(change_visibility, r"struct S ( <|>u32 )", r"struct S ( <|>pub(crate) u32 )");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -17,13 +17,13 @@ mod doc_tests;
|
|||
pub mod utils;
|
||||
pub mod ast_transform;
|
||||
|
||||
use hir::Semantics;
|
||||
use ra_db::{FileId, FileRange};
|
||||
use ra_ide_db::RootDatabase;
|
||||
use ra_syntax::{TextRange, TextSize};
|
||||
use ra_text_edit::TextEdit;
|
||||
|
||||
pub(crate) use crate::assist_ctx::{Assist, AssistCtx, AssistHandler};
|
||||
use hir::Semantics;
|
||||
pub(crate) use crate::assist_ctx::{Assist, AssistCtx};
|
||||
|
||||
/// Unique identifier of the assist, should not be shown to the user
|
||||
/// directly.
|
||||
|
@ -32,19 +32,20 @@ pub struct AssistId(pub &'static str);
|
|||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AssistLabel {
|
||||
pub id: AssistId,
|
||||
/// Short description of the assist, as shown in the UI.
|
||||
pub label: String,
|
||||
pub id: AssistId,
|
||||
pub group: Option<GroupLabel>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct GroupLabel(pub String);
|
||||
|
||||
impl AssistLabel {
|
||||
pub(crate) fn new(label: String, id: AssistId) -> AssistLabel {
|
||||
pub(crate) fn new(id: AssistId, label: String, group: Option<GroupLabel>) -> AssistLabel {
|
||||
// FIXME: make fields private, so that this invariant can't be broken
|
||||
assert!(label.starts_with(|c: char| c.is_uppercase()));
|
||||
AssistLabel { label, id }
|
||||
AssistLabel { id, label, group }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,7 +61,6 @@ pub struct AssistAction {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct ResolvedAssist {
|
||||
pub label: AssistLabel,
|
||||
pub group_label: Option<GroupLabel>,
|
||||
pub action: AssistAction,
|
||||
}
|
||||
|
||||
|
@ -109,7 +109,9 @@ pub fn resolved_assists(db: &RootDatabase, range: FileRange) -> Vec<ResolvedAssi
|
|||
}
|
||||
|
||||
mod handlers {
|
||||
use crate::AssistHandler;
|
||||
use crate::{Assist, AssistCtx};
|
||||
|
||||
pub(crate) type Handler = fn(AssistCtx) -> Option<Assist>;
|
||||
|
||||
mod add_custom_impl;
|
||||
mod add_derive;
|
||||
|
@ -145,12 +147,13 @@ mod handlers {
|
|||
mod reorder_fields;
|
||||
mod unwrap_block;
|
||||
|
||||
pub(crate) fn all() -> &'static [AssistHandler] {
|
||||
pub(crate) fn all() -> &'static [Handler] {
|
||||
&[
|
||||
// These are alphabetic for the foolish consistency
|
||||
add_custom_impl::add_custom_impl,
|
||||
add_derive::add_derive,
|
||||
add_explicit_type::add_explicit_type,
|
||||
add_from_impl_for_enum::add_from_impl_for_enum,
|
||||
add_function::add_function,
|
||||
add_impl::add_impl,
|
||||
add_new::add_new,
|
||||
|
@ -176,17 +179,18 @@ mod handlers {
|
|||
raw_string::remove_hash,
|
||||
remove_dbg::remove_dbg,
|
||||
remove_mut::remove_mut,
|
||||
reorder_fields::reorder_fields,
|
||||
replace_if_let_with_match::replace_if_let_with_match,
|
||||
replace_let_with_if_let::replace_let_with_if_let,
|
||||
replace_qualified_name_with_use::replace_qualified_name_with_use,
|
||||
replace_unwrap_with_match::replace_unwrap_with_match,
|
||||
split_import::split_import,
|
||||
add_from_impl_for_enum::add_from_impl_for_enum,
|
||||
unwrap_block::unwrap_block,
|
||||
// These are manually sorted for better priorities
|
||||
add_missing_impl_members::add_missing_impl_members,
|
||||
add_missing_impl_members::add_missing_default_members,
|
||||
reorder_fields::reorder_fields,
|
||||
// Are you sure you want to add new assist here, and not to the
|
||||
// sorted list above?
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@ -195,12 +199,12 @@ mod handlers {
|
|||
mod helpers {
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir::Semantics;
|
||||
use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt};
|
||||
use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase};
|
||||
use test_utils::{add_cursor, assert_eq_text, extract_range_or_offset, RangeOrOffset};
|
||||
|
||||
use crate::{AssistCtx, AssistFile, AssistHandler};
|
||||
use hir::Semantics;
|
||||
use crate::{handlers::Handler, AssistCtx, AssistFile};
|
||||
|
||||
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
|
||||
let (mut db, file_id) = RootDatabase::with_single_file(text);
|
||||
|
@ -210,22 +214,18 @@ mod helpers {
|
|||
(db, file_id)
|
||||
}
|
||||
|
||||
pub(crate) fn check_assist(
|
||||
assist: AssistHandler,
|
||||
ra_fixture_before: &str,
|
||||
ra_fixture_after: &str,
|
||||
) {
|
||||
pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_after: &str) {
|
||||
check(assist, ra_fixture_before, ExpectedResult::After(ra_fixture_after));
|
||||
}
|
||||
|
||||
// FIXME: instead of having a separate function here, maybe use
|
||||
// `extract_ranges` and mark the target as `<target> </target>` in the
|
||||
// fixuture?
|
||||
pub(crate) fn check_assist_target(assist: AssistHandler, ra_fixture: &str, target: &str) {
|
||||
pub(crate) fn check_assist_target(assist: Handler, ra_fixture: &str, target: &str) {
|
||||
check(assist, ra_fixture, ExpectedResult::Target(target));
|
||||
}
|
||||
|
||||
pub(crate) fn check_assist_not_applicable(assist: AssistHandler, ra_fixture: &str) {
|
||||
pub(crate) fn check_assist_not_applicable(assist: Handler, ra_fixture: &str) {
|
||||
check(assist, ra_fixture, ExpectedResult::NotApplicable);
|
||||
}
|
||||
|
||||
|
@ -235,7 +235,7 @@ mod helpers {
|
|||
Target(&'a str),
|
||||
}
|
||||
|
||||
fn check(assist: AssistHandler, before: &str, expected: ExpectedResult) {
|
||||
fn check(assist: Handler, before: &str, expected: ExpectedResult) {
|
||||
let (text_without_caret, file_with_caret_id, range_or_offset, db) =
|
||||
if before.contains("//-") {
|
||||
let (mut db, position) = RootDatabase::with_position(before);
|
||||
|
@ -261,13 +261,13 @@ mod helpers {
|
|||
(Some(assist), ExpectedResult::After(after)) => {
|
||||
let action = assist.0[0].action.clone().unwrap();
|
||||
|
||||
let assisted_file_text = if let AssistFile::TargetFile(file_id) = action.file {
|
||||
let mut actual = if let AssistFile::TargetFile(file_id) = action.file {
|
||||
db.file_text(file_id).as_ref().to_owned()
|
||||
} else {
|
||||
text_without_caret
|
||||
};
|
||||
action.edit.apply(&mut actual);
|
||||
|
||||
let mut actual = action.edit.apply(&assisted_file_text);
|
||||
match action.cursor_position {
|
||||
None => {
|
||||
if let RangeOrOffset::Offset(before_cursor_pos) = range_or_offset {
|
||||
|
|
|
@ -13,7 +13,7 @@ use rustc_hash::FxHashSet;
|
|||
|
||||
pub(crate) use insert_use::insert_use_statement;
|
||||
|
||||
pub fn get_missing_impl_items(
|
||||
pub fn get_missing_assoc_items(
|
||||
sema: &Semantics<RootDatabase>,
|
||||
impl_def: &ast::ImplDef,
|
||||
) -> Vec<hir::AssocItem> {
|
||||
|
@ -23,21 +23,21 @@ pub fn get_missing_impl_items(
|
|||
let mut impl_type = FxHashSet::default();
|
||||
|
||||
if let Some(item_list) = impl_def.item_list() {
|
||||
for item in item_list.impl_items() {
|
||||
for item in item_list.assoc_items() {
|
||||
match item {
|
||||
ast::ImplItem::FnDef(f) => {
|
||||
ast::AssocItem::FnDef(f) => {
|
||||
if let Some(n) = f.name() {
|
||||
impl_fns_consts.insert(n.syntax().to_string());
|
||||
}
|
||||
}
|
||||
|
||||
ast::ImplItem::TypeAliasDef(t) => {
|
||||
ast::AssocItem::TypeAliasDef(t) => {
|
||||
if let Some(n) = t.name() {
|
||||
impl_type.insert(n.syntax().to_string());
|
||||
}
|
||||
}
|
||||
|
||||
ast::ImplItem::ConstDef(c) => {
|
||||
ast::AssocItem::ConstDef(c) => {
|
||||
if let Some(n) = c.name() {
|
||||
impl_fns_consts.insert(n.syntax().to_string());
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ pub use crate::conv::url_from_path_with_drive_lowercasing;
|
|||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum FlycheckConfig {
|
||||
CargoCommand { command: String, all_targets: bool, extra_args: Vec<String> },
|
||||
CargoCommand { command: String, all_targets: bool, all_features: bool, extra_args: Vec<String> },
|
||||
CustomCommand { command: String, args: Vec<String> },
|
||||
}
|
||||
|
||||
|
@ -215,7 +215,7 @@ impl FlycheckThread {
|
|||
self.check_process = None;
|
||||
|
||||
let mut cmd = match &self.config {
|
||||
FlycheckConfig::CargoCommand { command, all_targets, extra_args } => {
|
||||
FlycheckConfig::CargoCommand { command, all_targets, all_features, extra_args } => {
|
||||
let mut cmd = Command::new(cargo_binary());
|
||||
cmd.arg(command);
|
||||
cmd.args(&["--workspace", "--message-format=json", "--manifest-path"]);
|
||||
|
@ -223,6 +223,9 @@ impl FlycheckThread {
|
|||
if *all_targets {
|
||||
cmd.arg("--all-targets");
|
||||
}
|
||||
if *all_features {
|
||||
cmd.arg("--all-features");
|
||||
}
|
||||
cmd.args(extra_args);
|
||||
cmd
|
||||
}
|
||||
|
|
|
@ -1211,7 +1211,7 @@ impl Type {
|
|||
|
||||
// This would be nicer if it just returned an iterator, but that runs into
|
||||
// lifetime problems, because we need to borrow temp `CrateImplDefs`.
|
||||
pub fn iterate_impl_items<T>(
|
||||
pub fn iterate_assoc_items<T>(
|
||||
self,
|
||||
db: &dyn HirDatabase,
|
||||
krate: Crate,
|
||||
|
|
|
@ -118,11 +118,12 @@ fn lower_enum(
|
|||
module_id: ModuleId,
|
||||
) {
|
||||
let expander = CfgExpander::new(db, ast.file_id, module_id.krate);
|
||||
let variants =
|
||||
ast.value.variant_list().into_iter().flat_map(|it| it.variants()).filter(|var| {
|
||||
let attrs = expander.parse_attrs(var);
|
||||
expander.is_cfg_enabled(&attrs)
|
||||
});
|
||||
let variants = ast
|
||||
.value
|
||||
.variant_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| it.variants())
|
||||
.filter(|var| expander.is_cfg_enabled(var));
|
||||
for var in variants {
|
||||
trace.alloc(
|
||||
|| var.clone(),
|
||||
|
@ -215,8 +216,7 @@ fn lower_struct(
|
|||
match &ast.value {
|
||||
ast::StructKind::Tuple(fl) => {
|
||||
for (i, fd) in fl.fields().enumerate() {
|
||||
let attrs = expander.parse_attrs(&fd);
|
||||
if !expander.is_cfg_enabled(&attrs) {
|
||||
if !expander.is_cfg_enabled(&fd) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -233,8 +233,7 @@ fn lower_struct(
|
|||
}
|
||||
ast::StructKind::Record(fl) => {
|
||||
for fd in fl.fields() {
|
||||
let attrs = expander.parse_attrs(&fd);
|
||||
if !expander.is_cfg_enabled(&attrs) {
|
||||
if !expander.is_cfg_enabled(&fd) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -60,7 +60,8 @@ impl CfgExpander {
|
|||
Attrs::new(owner, &self.hygiene)
|
||||
}
|
||||
|
||||
pub(crate) fn is_cfg_enabled(&self, attrs: &Attrs) -> bool {
|
||||
pub(crate) fn is_cfg_enabled(&self, owner: &dyn ast::AttrsOwner) -> bool {
|
||||
let attrs = self.parse_attrs(owner);
|
||||
attrs.is_cfg_enabled(&self.cfg_options)
|
||||
}
|
||||
}
|
||||
|
@ -141,12 +142,8 @@ impl Expander {
|
|||
InFile { file_id: self.current_file_id, value }
|
||||
}
|
||||
|
||||
pub(crate) fn parse_attrs(&self, owner: &dyn ast::AttrsOwner) -> Attrs {
|
||||
self.cfg_expander.parse_attrs(owner)
|
||||
}
|
||||
|
||||
pub(crate) fn is_cfg_enabled(&self, attrs: &Attrs) -> bool {
|
||||
self.cfg_expander.is_cfg_enabled(attrs)
|
||||
pub(crate) fn is_cfg_enabled(&self, owner: &dyn ast::AttrsOwner) -> bool {
|
||||
self.cfg_expander.is_cfg_enabled(owner)
|
||||
}
|
||||
|
||||
fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
|
||||
|
|
|
@ -162,8 +162,7 @@ impl ExprCollector<'_> {
|
|||
|
||||
fn collect_expr(&mut self, expr: ast::Expr) -> ExprId {
|
||||
let syntax_ptr = AstPtr::new(&expr);
|
||||
let attrs = self.expander.parse_attrs(&expr);
|
||||
if !self.expander.is_cfg_enabled(&attrs) {
|
||||
if !self.expander.is_cfg_enabled(&expr) {
|
||||
return self.missing_expr();
|
||||
}
|
||||
match expr {
|
||||
|
@ -329,8 +328,7 @@ impl ExprCollector<'_> {
|
|||
.fields()
|
||||
.inspect(|field| field_ptrs.push(AstPtr::new(field)))
|
||||
.filter_map(|field| {
|
||||
let attrs = self.expander.parse_attrs(&field);
|
||||
if !self.expander.is_cfg_enabled(&attrs) {
|
||||
if !self.expander.is_cfg_enabled(&field) {
|
||||
return None;
|
||||
}
|
||||
let name = field.field_name()?.as_name();
|
||||
|
@ -575,9 +573,16 @@ impl ExprCollector<'_> {
|
|||
self.body.item_scope.define_def(def);
|
||||
if let Some(name) = name {
|
||||
let vis = crate::visibility::Visibility::Public; // FIXME determine correctly
|
||||
self.body
|
||||
.item_scope
|
||||
.push_res(name.as_name(), crate::per_ns::PerNs::from_def(def, vis));
|
||||
let has_constructor = match def {
|
||||
ModuleDefId::AdtId(AdtId::StructId(s)) => {
|
||||
self.db.struct_data(s).variant_data.kind() != StructKind::Record
|
||||
}
|
||||
_ => true,
|
||||
};
|
||||
self.body.item_scope.push_res(
|
||||
name.as_name(),
|
||||
crate::per_ns::PerNs::from_def(def, vis, has_constructor),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ use hir_expand::{
|
|||
};
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::ast::{
|
||||
self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner, TypeBoundsOwner,
|
||||
self, AssocItem, AstNode, ModuleItemOwner, NameOwner, TypeAscriptionOwner, TypeBoundsOwner,
|
||||
VisibilityOwner,
|
||||
};
|
||||
|
||||
|
@ -164,7 +164,7 @@ impl TraitData {
|
|||
items.extend(collect_items(
|
||||
db,
|
||||
&mut expander,
|
||||
item_list.impl_items(),
|
||||
item_list.assoc_items(),
|
||||
src.file_id,
|
||||
container,
|
||||
));
|
||||
|
@ -219,7 +219,7 @@ impl ImplData {
|
|||
if let Some(item_list) = src.value.item_list() {
|
||||
let mut expander = Expander::new(db, impl_loc.ast_id.file_id, module_id);
|
||||
items.extend(
|
||||
collect_items(db, &mut expander, item_list.impl_items(), src.file_id, container)
|
||||
collect_items(db, &mut expander, item_list.assoc_items(), src.file_id, container)
|
||||
.into_iter()
|
||||
.map(|(_, item)| item),
|
||||
);
|
||||
|
@ -304,7 +304,7 @@ fn collect_items_in_macro(
|
|||
let mut res = collect_items(
|
||||
db,
|
||||
expander,
|
||||
items.value.items().filter_map(|it| ImplItem::cast(it.syntax().clone())),
|
||||
items.value.items().filter_map(|it| AssocItem::cast(it.syntax().clone())),
|
||||
items.file_id,
|
||||
container,
|
||||
);
|
||||
|
@ -325,31 +325,30 @@ fn collect_items_in_macro(
|
|||
fn collect_items(
|
||||
db: &dyn DefDatabase,
|
||||
expander: &mut Expander,
|
||||
impl_items: impl Iterator<Item = ImplItem>,
|
||||
assoc_items: impl Iterator<Item = AssocItem>,
|
||||
file_id: crate::HirFileId,
|
||||
container: AssocContainerId,
|
||||
) -> Vec<(Name, AssocItemId)> {
|
||||
let items = db.ast_id_map(file_id);
|
||||
|
||||
impl_items
|
||||
assoc_items
|
||||
.filter_map(|item_node| match item_node {
|
||||
ast::ImplItem::FnDef(it) => {
|
||||
ast::AssocItem::FnDef(it) => {
|
||||
let name = it.name().map_or_else(Name::missing, |it| it.as_name());
|
||||
let attrs = expander.parse_attrs(&it);
|
||||
if !expander.is_cfg_enabled(&attrs) {
|
||||
if !expander.is_cfg_enabled(&it) {
|
||||
return None;
|
||||
}
|
||||
let def = FunctionLoc { container, ast_id: AstId::new(file_id, items.ast_id(&it)) }
|
||||
.intern(db);
|
||||
Some((name, def.into()))
|
||||
}
|
||||
ast::ImplItem::ConstDef(it) => {
|
||||
ast::AssocItem::ConstDef(it) => {
|
||||
let name = it.name().map_or_else(Name::missing, |it| it.as_name());
|
||||
let def = ConstLoc { container, ast_id: AstId::new(file_id, items.ast_id(&it)) }
|
||||
.intern(db);
|
||||
Some((name, def.into()))
|
||||
}
|
||||
ast::ImplItem::TypeAliasDef(it) => {
|
||||
ast::AssocItem::TypeAliasDef(it) => {
|
||||
let name = it.name().map_or_else(Name::missing, |it| it.as_name());
|
||||
let def =
|
||||
TypeAliasLoc { container, ast_id: AstId::new(file_id, items.ast_id(&it)) }
|
||||
|
|
|
@ -151,13 +151,20 @@ impl ItemScope {
|
|||
}
|
||||
|
||||
impl PerNs {
|
||||
pub(crate) fn from_def(def: ModuleDefId, v: Visibility) -> PerNs {
|
||||
pub(crate) fn from_def(def: ModuleDefId, v: Visibility, has_constructor: bool) -> PerNs {
|
||||
match def {
|
||||
ModuleDefId::ModuleId(_) => PerNs::types(def, v),
|
||||
ModuleDefId::FunctionId(_) => PerNs::values(def, v),
|
||||
ModuleDefId::AdtId(adt) => match adt {
|
||||
AdtId::StructId(_) | AdtId::UnionId(_) => PerNs::both(def, def, v),
|
||||
AdtId::UnionId(_) => PerNs::types(def, v),
|
||||
AdtId::EnumId(_) => PerNs::types(def, v),
|
||||
AdtId::StructId(_) => {
|
||||
if has_constructor {
|
||||
PerNs::both(def, def, v)
|
||||
} else {
|
||||
PerNs::types(def, v)
|
||||
}
|
||||
}
|
||||
},
|
||||
ModuleDefId::EnumVariantId(_) => PerNs::both(def, def, v),
|
||||
ModuleDefId::ConstId(_) | ModuleDefId::StaticId(_) => PerNs::values(def, v),
|
||||
|
|
|
@ -830,7 +830,7 @@ impl ModCollector<'_, '_> {
|
|||
let module = ModuleId { krate: self.def_collector.def_map.krate, local_id: res };
|
||||
let def: ModuleDefId = module.into();
|
||||
self.def_collector.def_map.modules[self.module_id].scope.define_def(def);
|
||||
self.def_collector.update(self.module_id, &[(name, PerNs::from_def(def, vis))], vis);
|
||||
self.def_collector.update(self.module_id, &[(name, PerNs::from_def(def, vis, false))], vis);
|
||||
res
|
||||
}
|
||||
|
||||
|
@ -844,6 +844,8 @@ impl ModCollector<'_, '_> {
|
|||
let name = def.name.clone();
|
||||
let container = ContainerId::ModuleId(module);
|
||||
let vis = &def.visibility;
|
||||
let mut has_constructor = false;
|
||||
|
||||
let def: ModuleDefId = match def.kind {
|
||||
raw::DefKind::Function(ast_id) => FunctionLoc {
|
||||
container: container.into(),
|
||||
|
@ -851,7 +853,8 @@ impl ModCollector<'_, '_> {
|
|||
}
|
||||
.intern(self.def_collector.db)
|
||||
.into(),
|
||||
raw::DefKind::Struct(ast_id) => {
|
||||
raw::DefKind::Struct(ast_id, mode) => {
|
||||
has_constructor = mode != raw::StructDefKind::Record;
|
||||
StructLoc { container, ast_id: AstId::new(self.file_id, ast_id) }
|
||||
.intern(self.def_collector.db)
|
||||
.into()
|
||||
|
@ -894,7 +897,11 @@ impl ModCollector<'_, '_> {
|
|||
.def_map
|
||||
.resolve_visibility(self.def_collector.db, self.module_id, vis)
|
||||
.unwrap_or(Visibility::Public);
|
||||
self.def_collector.update(self.module_id, &[(name, PerNs::from_def(def, vis))], vis)
|
||||
self.def_collector.update(
|
||||
self.module_id,
|
||||
&[(name, PerNs::from_def(def, vis, has_constructor))],
|
||||
vis,
|
||||
)
|
||||
}
|
||||
|
||||
fn collect_derives(&mut self, attrs: &Attrs, def: &raw::DefData) {
|
||||
|
|
|
@ -155,10 +155,17 @@ pub(super) struct DefData {
|
|||
pub(super) visibility: RawVisibility,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub(super) enum StructDefKind {
|
||||
Record,
|
||||
Tuple,
|
||||
Unit,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub(super) enum DefKind {
|
||||
Function(FileAstId<ast::FnDef>),
|
||||
Struct(FileAstId<ast::StructDef>),
|
||||
Struct(FileAstId<ast::StructDef>, StructDefKind),
|
||||
Union(FileAstId<ast::UnionDef>),
|
||||
Enum(FileAstId<ast::EnumDef>),
|
||||
Const(FileAstId<ast::ConstDef>),
|
||||
|
@ -171,7 +178,7 @@ impl DefKind {
|
|||
pub fn ast_id(&self) -> FileAstId<ast::ModuleItem> {
|
||||
match self {
|
||||
DefKind::Function(it) => it.upcast(),
|
||||
DefKind::Struct(it) => it.upcast(),
|
||||
DefKind::Struct(it, _) => it.upcast(),
|
||||
DefKind::Union(it) => it.upcast(),
|
||||
DefKind::Enum(it) => it.upcast(),
|
||||
DefKind::Const(it) => it.upcast(),
|
||||
|
@ -236,9 +243,14 @@ impl RawItemsCollector {
|
|||
return;
|
||||
}
|
||||
ast::ModuleItem::StructDef(it) => {
|
||||
let kind = match it.kind() {
|
||||
ast::StructKind::Record(_) => StructDefKind::Record,
|
||||
ast::StructKind::Tuple(_) => StructDefKind::Tuple,
|
||||
ast::StructKind::Unit => StructDefKind::Unit,
|
||||
};
|
||||
let id = self.source_ast_id_map.ast_id(&it);
|
||||
let name = it.name();
|
||||
(DefKind::Struct(id), name)
|
||||
(DefKind::Struct(id, kind), name)
|
||||
}
|
||||
ast::ModuleItem::UnionDef(it) => {
|
||||
let id = self.source_ast_id_map.ast_id(&it);
|
||||
|
|
|
@ -67,7 +67,7 @@ fn crate_def_map_smoke_test() {
|
|||
⋮Baz: t v
|
||||
⋮E: t
|
||||
⋮EXT: v
|
||||
⋮U: t v
|
||||
⋮U: t
|
||||
⋮ext: v
|
||||
"###)
|
||||
}
|
||||
|
|
|
@ -19,12 +19,12 @@ fn macro_rules_are_globally_visible() {
|
|||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Foo: t v
|
||||
⋮Foo: t
|
||||
⋮nested: t
|
||||
⋮
|
||||
⋮crate::nested
|
||||
⋮Bar: t v
|
||||
⋮Baz: t v
|
||||
⋮Bar: t
|
||||
⋮Baz: t
|
||||
"###);
|
||||
}
|
||||
|
||||
|
@ -91,13 +91,13 @@ fn macro_rules_from_other_crates_are_visible() {
|
|||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮Foo: t v
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::bar
|
||||
⋮Bar: t v
|
||||
⋮Foo: t v
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
"###);
|
||||
}
|
||||
|
@ -124,13 +124,13 @@ fn macro_rules_export_with_local_inner_macros_are_visible() {
|
|||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮Foo: t v
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::bar
|
||||
⋮Bar: t v
|
||||
⋮Foo: t v
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
"###);
|
||||
}
|
||||
|
@ -161,13 +161,13 @@ fn local_inner_macros_makes_local_macros_usable() {
|
|||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Bar: t v
|
||||
⋮Foo: t v
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
⋮
|
||||
⋮crate::bar
|
||||
⋮Bar: t v
|
||||
⋮Foo: t v
|
||||
⋮Bar: t
|
||||
⋮Foo: t
|
||||
⋮bar: t
|
||||
"###);
|
||||
}
|
||||
|
@ -204,7 +204,7 @@ fn unexpanded_macro_should_expand_by_fixedpoint_loop() {
|
|||
);
|
||||
assert_snapshot!(map, @r###"
|
||||
⋮crate
|
||||
⋮Foo: t v
|
||||
⋮Foo: t
|
||||
⋮bar: m
|
||||
⋮foo: m
|
||||
"###);
|
||||
|
|
|
@ -269,7 +269,7 @@ fn test() { S.foo()<|>; }
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn infer_impl_items_generated_by_macros() {
|
||||
fn infer_assoc_items_generated_by_macros() {
|
||||
let t = type_at(
|
||||
r#"
|
||||
//- /main.rs
|
||||
|
@ -288,7 +288,7 @@ fn test() { S.foo()<|>; }
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn infer_impl_items_generated_by_macros_chain() {
|
||||
fn infer_assoc_items_generated_by_macros_chain() {
|
||||
let t = type_at(
|
||||
r#"
|
||||
//- /main.rs
|
||||
|
|
|
@ -534,6 +534,35 @@ fn foo(b: Bar) {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn issue_4235_name_conflicts() {
|
||||
assert_snapshot!(
|
||||
infer(r#"
|
||||
struct FOO {}
|
||||
static FOO:FOO = FOO {};
|
||||
|
||||
impl FOO {
|
||||
fn foo(&self) {}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let a = &FOO;
|
||||
a.foo();
|
||||
}
|
||||
"#), @r###"
|
||||
32..38 'FOO {}': FOO
|
||||
64..68 'self': &FOO
|
||||
70..72 '{}': ()
|
||||
86..120 '{ ...o(); }': ()
|
||||
96..97 'a': &FOO
|
||||
100..104 '&FOO': &FOO
|
||||
101..104 'FOO': FOO
|
||||
110..111 'a': &FOO
|
||||
110..117 'a.foo()': ()
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn issue_4053_diesel_where_clauses() {
|
||||
assert_snapshot!(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use ra_assists::{resolved_assists, AssistAction, AssistLabel};
|
||||
use ra_assists::{resolved_assists, AssistAction};
|
||||
use ra_db::{FilePosition, FileRange};
|
||||
use ra_ide_db::RootDatabase;
|
||||
|
||||
|
@ -21,27 +21,22 @@ pub(crate) fn assists(db: &RootDatabase, frange: FileRange) -> Vec<Assist> {
|
|||
.into_iter()
|
||||
.map(|assist| {
|
||||
let file_id = frange.file_id;
|
||||
let assist_label = &assist.label;
|
||||
Assist {
|
||||
id: assist_label.id,
|
||||
label: assist_label.label.clone(),
|
||||
group_label: assist.group_label.map(|it| it.0),
|
||||
source_change: action_to_edit(assist.action, file_id, assist_label),
|
||||
id: assist.label.id,
|
||||
label: assist.label.label.clone(),
|
||||
group_label: assist.label.group.map(|it| it.0),
|
||||
source_change: action_to_edit(assist.action, file_id, assist.label.label.clone()),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn action_to_edit(
|
||||
action: AssistAction,
|
||||
file_id: FileId,
|
||||
assist_label: &AssistLabel,
|
||||
) -> SourceChange {
|
||||
fn action_to_edit(action: AssistAction, file_id: FileId, label: String) -> SourceChange {
|
||||
let file_id = match action.file {
|
||||
ra_assists::AssistFile::TargetFile(it) => it,
|
||||
_ => file_id,
|
||||
};
|
||||
let file_edit = SourceFileEdit { file_id, edit: action.edit };
|
||||
SourceChange::source_file_edit(assist_label.label.clone(), file_edit)
|
||||
SourceChange::source_file_edit(label, file_edit)
|
||||
.with_cursor_opt(action.cursor_position.map(|offset| FilePosition { offset, file_id }))
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
|
|||
});
|
||||
|
||||
// Iterate assoc types separately
|
||||
ty.iterate_impl_items(ctx.db, krate, |item| {
|
||||
ty.iterate_assoc_items(ctx.db, krate, |item| {
|
||||
if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) {
|
||||
return None;
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
//! ```
|
||||
|
||||
use hir::{self, Docs, HasSource};
|
||||
use ra_assists::utils::get_missing_impl_items;
|
||||
use ra_assists::utils::get_missing_assoc_items;
|
||||
use ra_syntax::{
|
||||
ast::{self, edit, ImplDef},
|
||||
AstNode, SyntaxKind, SyntaxNode, TextRange, T,
|
||||
|
@ -50,7 +50,7 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
|
|||
if let Some((trigger, impl_def)) = completion_match(ctx) {
|
||||
match trigger.kind() {
|
||||
SyntaxKind::NAME_REF => {
|
||||
get_missing_impl_items(&ctx.sema, &impl_def).iter().for_each(|item| match item {
|
||||
get_missing_assoc_items(&ctx.sema, &impl_def).iter().for_each(|item| match item {
|
||||
hir::AssocItem::Function(fn_item) => {
|
||||
add_function_impl(&trigger, acc, ctx, &fn_item)
|
||||
}
|
||||
|
@ -64,34 +64,40 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
|
|||
}
|
||||
|
||||
SyntaxKind::FN_DEF => {
|
||||
for missing_fn in get_missing_impl_items(&ctx.sema, &impl_def).iter().filter_map(
|
||||
|item| match item {
|
||||
for missing_fn in
|
||||
get_missing_assoc_items(&ctx.sema, &impl_def).iter().filter_map(|item| {
|
||||
match item {
|
||||
hir::AssocItem::Function(fn_item) => Some(fn_item),
|
||||
_ => None,
|
||||
},
|
||||
) {
|
||||
}
|
||||
})
|
||||
{
|
||||
add_function_impl(&trigger, acc, ctx, &missing_fn);
|
||||
}
|
||||
}
|
||||
|
||||
SyntaxKind::TYPE_ALIAS_DEF => {
|
||||
for missing_fn in get_missing_impl_items(&ctx.sema, &impl_def).iter().filter_map(
|
||||
|item| match item {
|
||||
for missing_fn in
|
||||
get_missing_assoc_items(&ctx.sema, &impl_def).iter().filter_map(|item| {
|
||||
match item {
|
||||
hir::AssocItem::TypeAlias(type_item) => Some(type_item),
|
||||
_ => None,
|
||||
},
|
||||
) {
|
||||
}
|
||||
})
|
||||
{
|
||||
add_type_alias_impl(&trigger, acc, ctx, &missing_fn);
|
||||
}
|
||||
}
|
||||
|
||||
SyntaxKind::CONST_DEF => {
|
||||
for missing_fn in get_missing_impl_items(&ctx.sema, &impl_def).iter().filter_map(
|
||||
|item| match item {
|
||||
for missing_fn in
|
||||
get_missing_assoc_items(&ctx.sema, &impl_def).iter().filter_map(|item| {
|
||||
match item {
|
||||
hir::AssocItem::Const(const_item) => Some(const_item),
|
||||
_ => None,
|
||||
},
|
||||
) {
|
||||
}
|
||||
})
|
||||
{
|
||||
add_const_impl(&trigger, acc, ctx, &missing_fn);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ use ra_syntax::{
|
|||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextSize,
|
||||
};
|
||||
use ra_text_edit::AtomTextEdit;
|
||||
use ra_text_edit::Indel;
|
||||
|
||||
use crate::{call_info::ActiveParameter, completion::CompletionConfig, FilePosition};
|
||||
|
||||
|
@ -76,7 +76,7 @@ impl<'a> CompletionContext<'a> {
|
|||
// actual completion.
|
||||
let file_with_fake_ident = {
|
||||
let parse = db.parse(position.file_id);
|
||||
let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string());
|
||||
let edit = Indel::insert(position.offset, "intellijRulezz".to_string());
|
||||
parse.reparse(&edit).tree()
|
||||
};
|
||||
let fake_ident_token =
|
||||
|
|
|
@ -62,8 +62,8 @@ impl fmt::Debug for CompletionItem {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut s = f.debug_struct("CompletionItem");
|
||||
s.field("label", &self.label()).field("source_range", &self.source_range());
|
||||
if self.text_edit().as_atoms().len() == 1 {
|
||||
let atom = &self.text_edit().as_atoms()[0];
|
||||
if self.text_edit().as_indels().len() == 1 {
|
||||
let atom = &self.text_edit().as_indels()[0];
|
||||
s.field("delete", &atom.delete);
|
||||
s.field("insert", &atom.insert);
|
||||
} else {
|
||||
|
|
|
@ -64,7 +64,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
|||
.unwrap_or_else(|| RelativePath::new(""))
|
||||
.join(&d.candidate);
|
||||
let create_file = FileSystemEdit::CreateFile { source_root, path };
|
||||
let fix = SourceChange::file_system_edit("create module", create_file);
|
||||
let fix = SourceChange::file_system_edit("Create module", create_file);
|
||||
res.borrow_mut().push(Diagnostic {
|
||||
range: sema.diagnostics_range(d).range,
|
||||
message: d.message(),
|
||||
|
@ -92,7 +92,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
|||
algo::diff(&d.ast(db).syntax(), &field_list.syntax()).into_text_edit(&mut builder);
|
||||
|
||||
Some(SourceChange::source_file_edit_from(
|
||||
"fill struct fields",
|
||||
"Fill struct fields",
|
||||
file_id,
|
||||
builder.finish(),
|
||||
))
|
||||
|
@ -117,7 +117,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
|||
let node = d.ast(db);
|
||||
let replacement = format!("Ok({})", node.syntax());
|
||||
let edit = TextEdit::replace(node.syntax().text_range(), replacement);
|
||||
let fix = SourceChange::source_file_edit_from("wrap with ok", file_id, edit);
|
||||
let fix = SourceChange::source_file_edit_from("Wrap with ok", file_id, edit);
|
||||
res.borrow_mut().push(Diagnostic {
|
||||
range: sema.diagnostics_range(d).range,
|
||||
message: d.message(),
|
||||
|
@ -199,7 +199,7 @@ fn check_struct_shorthand_initialization(
|
|||
message: "Shorthand struct initialization".to_string(),
|
||||
severity: Severity::WeakWarning,
|
||||
fix: Some(SourceChange::source_file_edit(
|
||||
"use struct shorthand initialization",
|
||||
"Use struct shorthand initialization",
|
||||
SourceFileEdit { file_id, edit },
|
||||
)),
|
||||
});
|
||||
|
@ -241,7 +241,11 @@ mod tests {
|
|||
diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before));
|
||||
let mut fix = diagnostic.fix.unwrap();
|
||||
let edit = fix.source_file_edits.pop().unwrap().edit;
|
||||
let actual = edit.apply(&before);
|
||||
let actual = {
|
||||
let mut actual = before.to_string();
|
||||
edit.apply(&mut actual);
|
||||
actual
|
||||
};
|
||||
assert_eq_text!(after, &actual);
|
||||
}
|
||||
|
||||
|
@ -256,7 +260,11 @@ mod tests {
|
|||
let mut fix = diagnostic.fix.unwrap();
|
||||
let edit = fix.source_file_edits.pop().unwrap().edit;
|
||||
let target_file_contents = analysis.file_text(file_position.file_id).unwrap();
|
||||
let actual = edit.apply(&target_file_contents);
|
||||
let actual = {
|
||||
let mut actual = target_file_contents.to_string();
|
||||
edit.apply(&mut actual);
|
||||
actual
|
||||
};
|
||||
|
||||
// Strip indent and empty lines from `after`, to match the behaviour of
|
||||
// `parse_fixture` called from `analysis_and_position`.
|
||||
|
@ -288,7 +296,11 @@ mod tests {
|
|||
let diagnostic = analysis.diagnostics(file_id).unwrap().pop().unwrap();
|
||||
let mut fix = diagnostic.fix.unwrap();
|
||||
let edit = fix.source_file_edits.pop().unwrap().edit;
|
||||
let actual = edit.apply(&before);
|
||||
let actual = {
|
||||
let mut actual = before.to_string();
|
||||
edit.apply(&mut actual);
|
||||
actual
|
||||
};
|
||||
assert_eq_text!(after, &actual);
|
||||
}
|
||||
|
||||
|
@ -606,7 +618,7 @@ mod tests {
|
|||
range: 0..8,
|
||||
fix: Some(
|
||||
SourceChange {
|
||||
label: "create module",
|
||||
label: "Create module",
|
||||
source_file_edits: [],
|
||||
file_system_edits: [
|
||||
CreateFile {
|
||||
|
@ -655,17 +667,17 @@ mod tests {
|
|||
range: 224..233,
|
||||
fix: Some(
|
||||
SourceChange {
|
||||
label: "fill struct fields",
|
||||
label: "Fill struct fields",
|
||||
source_file_edits: [
|
||||
SourceFileEdit {
|
||||
file_id: FileId(
|
||||
1,
|
||||
),
|
||||
edit: TextEdit {
|
||||
atoms: [
|
||||
AtomTextEdit {
|
||||
delete: 3..9,
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "{a:42, b: ()}",
|
||||
delete: 3..9,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
|
@ -569,7 +569,11 @@ fn foo() {
|
|||
let (sel, before) = extract_range(before);
|
||||
let parse = SourceFile::parse(&before);
|
||||
let result = join_lines(&parse.tree(), sel);
|
||||
let actual = result.apply(&before);
|
||||
let actual = {
|
||||
let mut actual = before.to_string();
|
||||
result.apply(&mut actual);
|
||||
actual
|
||||
};
|
||||
assert_eq_text!(after, &actual);
|
||||
}
|
||||
|
||||
|
|
|
@ -122,7 +122,7 @@ fn rename_mod(
|
|||
source_file_edits.extend(ref_edits);
|
||||
}
|
||||
|
||||
Some(SourceChange::from_edits("rename", source_file_edits, file_system_edits))
|
||||
Some(SourceChange::from_edits("Rename", source_file_edits, file_system_edits))
|
||||
}
|
||||
|
||||
fn rename_reference(
|
||||
|
@ -141,7 +141,7 @@ fn rename_reference(
|
|||
return None;
|
||||
}
|
||||
|
||||
Some(RangeInfo::new(range, SourceChange::source_file_edits("rename", edit)))
|
||||
Some(RangeInfo::new(range, SourceChange::source_file_edits("Rename", edit)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -530,17 +530,17 @@ mod tests {
|
|||
RangeInfo {
|
||||
range: 4..7,
|
||||
info: SourceChange {
|
||||
label: "rename",
|
||||
label: "Rename",
|
||||
source_file_edits: [
|
||||
SourceFileEdit {
|
||||
file_id: FileId(
|
||||
2,
|
||||
),
|
||||
edit: TextEdit {
|
||||
atoms: [
|
||||
AtomTextEdit {
|
||||
delete: 4..7,
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 4..7,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -582,17 +582,17 @@ mod tests {
|
|||
RangeInfo {
|
||||
range: 4..7,
|
||||
info: SourceChange {
|
||||
label: "rename",
|
||||
label: "Rename",
|
||||
source_file_edits: [
|
||||
SourceFileEdit {
|
||||
file_id: FileId(
|
||||
1,
|
||||
),
|
||||
edit: TextEdit {
|
||||
atoms: [
|
||||
AtomTextEdit {
|
||||
delete: 4..7,
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 4..7,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -665,17 +665,17 @@ mod tests {
|
|||
RangeInfo {
|
||||
range: 8..11,
|
||||
info: SourceChange {
|
||||
label: "rename",
|
||||
label: "Rename",
|
||||
source_file_edits: [
|
||||
SourceFileEdit {
|
||||
file_id: FileId(
|
||||
2,
|
||||
),
|
||||
edit: TextEdit {
|
||||
atoms: [
|
||||
AtomTextEdit {
|
||||
delete: 8..11,
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 8..11,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -685,10 +685,10 @@ mod tests {
|
|||
1,
|
||||
),
|
||||
edit: TextEdit {
|
||||
atoms: [
|
||||
AtomTextEdit {
|
||||
delete: 27..30,
|
||||
indels: [
|
||||
Indel {
|
||||
insert: "foo2",
|
||||
delete: 27..30,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -720,13 +720,13 @@ mod tests {
|
|||
if let Some(change) = source_change {
|
||||
for edit in change.info.source_file_edits {
|
||||
file_id = Some(edit.file_id);
|
||||
for atom in edit.edit.as_atoms() {
|
||||
text_edit_builder.replace(atom.delete, atom.insert.clone());
|
||||
for indel in edit.edit.as_indels() {
|
||||
text_edit_builder.replace(indel.delete, indel.insert.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
let result =
|
||||
text_edit_builder.finish().apply(&*analysis.file_text(file_id.unwrap()).unwrap());
|
||||
let mut result = analysis.file_text(file_id.unwrap()).unwrap().to_string();
|
||||
text_edit_builder.finish().apply(&mut result);
|
||||
assert_eq_text!(expected, &*result);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ use ra_syntax::{
|
|||
};
|
||||
|
||||
use crate::FileId;
|
||||
use ast::DocCommentsOwner;
|
||||
use std::fmt::Display;
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -37,6 +38,7 @@ pub enum RunnableKind {
|
|||
Test { test_id: TestId, attr: TestAttr },
|
||||
TestMod { path: String },
|
||||
Bench { test_id: TestId },
|
||||
DocTest { test_id: TestId },
|
||||
Bin,
|
||||
}
|
||||
|
||||
|
@ -81,6 +83,8 @@ fn runnable_fn(sema: &Semantics<RootDatabase>, fn_def: ast::FnDef) -> Option<Run
|
|||
RunnableKind::Test { test_id, attr }
|
||||
} else if fn_def.has_atom_attr("bench") {
|
||||
RunnableKind::Bench { test_id }
|
||||
} else if has_doc_test(&fn_def) {
|
||||
RunnableKind::DocTest { test_id }
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
|
@ -117,6 +121,10 @@ fn has_test_related_attribute(fn_def: &ast::FnDef) -> bool {
|
|||
.any(|attribute_text| attribute_text.contains("test"))
|
||||
}
|
||||
|
||||
fn has_doc_test(fn_def: &ast::FnDef) -> bool {
|
||||
fn_def.doc_comment_text().map_or(false, |comment| comment.contains("```"))
|
||||
}
|
||||
|
||||
fn runnable_mod(sema: &Semantics<RootDatabase>, module: ast::Module) -> Option<Runnable> {
|
||||
let has_test_function = module
|
||||
.item_list()?
|
||||
|
@ -194,6 +202,41 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_runnables_doc_test() {
|
||||
let (analysis, pos) = analysis_and_position(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
<|> //empty
|
||||
fn main() {}
|
||||
|
||||
/// ```
|
||||
/// let x = 5;
|
||||
/// ```
|
||||
fn foo() {}
|
||||
"#,
|
||||
);
|
||||
let runnables = analysis.runnables(pos.file_id).unwrap();
|
||||
assert_debug_snapshot!(&runnables,
|
||||
@r###"
|
||||
[
|
||||
Runnable {
|
||||
range: 1..21,
|
||||
kind: Bin,
|
||||
},
|
||||
Runnable {
|
||||
range: 22..64,
|
||||
kind: DocTest {
|
||||
test_id: Path(
|
||||
"foo",
|
||||
),
|
||||
},
|
||||
},
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_runnables_module() {
|
||||
let (analysis, pos) = analysis_and_position(
|
||||
|
|
|
@ -35,8 +35,10 @@ impl SourceChange {
|
|||
/// Creates a new SourceChange with the given label,
|
||||
/// containing only the given `SourceFileEdits`.
|
||||
pub(crate) fn source_file_edits<L: Into<String>>(label: L, edits: Vec<SourceFileEdit>) -> Self {
|
||||
let label = label.into();
|
||||
assert!(label.starts_with(char::is_uppercase));
|
||||
SourceChange {
|
||||
label: label.into(),
|
||||
label: label,
|
||||
source_file_edits: edits,
|
||||
file_system_edits: vec![],
|
||||
cursor_position: None,
|
||||
|
|
|
@ -401,6 +401,7 @@ fn render_replace(
|
|||
ignored_comments: &Vec<Comment>,
|
||||
template: &SsrTemplate,
|
||||
) -> String {
|
||||
let edit = {
|
||||
let mut builder = TextEditBuilder::default();
|
||||
for element in template.template.descendants() {
|
||||
if let Some(var) = template.placeholders.get(&element) {
|
||||
|
@ -410,7 +411,12 @@ fn render_replace(
|
|||
for comment in ignored_comments {
|
||||
builder.insert(template.template.text_range().end(), comment.syntax().to_string())
|
||||
}
|
||||
builder.finish().apply(&template.template.text().to_string())
|
||||
builder.finish()
|
||||
};
|
||||
|
||||
let mut text = template.template.text().to_string();
|
||||
edit.apply(&mut text);
|
||||
text
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -505,7 +511,9 @@ mod tests {
|
|||
);
|
||||
|
||||
let edit = replace(&matches, &query.template);
|
||||
assert_eq!(edit.apply(input), "fn main() { bar(1+2); }");
|
||||
let mut after = input.to_string();
|
||||
edit.apply(&mut after);
|
||||
assert_eq!(after, "fn main() { bar(1+2); }");
|
||||
}
|
||||
|
||||
fn assert_ssr_transform(query: &str, input: &str, result: &str) {
|
||||
|
@ -513,7 +521,9 @@ mod tests {
|
|||
let code = SourceFile::parse(input).tree();
|
||||
let matches = find(&query.pattern, code.syntax());
|
||||
let edit = replace(&matches, &query.template);
|
||||
assert_eq!(edit.apply(input), result);
|
||||
let mut after = input.to_string();
|
||||
edit.apply(&mut after);
|
||||
assert_eq!(after, result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -13,7 +13,11 @@ pub fn check_action<F: Fn(&SourceFile, TextSize) -> Option<TextEdit>>(
|
|||
let (before_cursor_pos, before) = extract_offset(before);
|
||||
let file = SourceFile::parse(&before).ok().unwrap();
|
||||
let result = f(&file, before_cursor_pos).expect("code action is not applicable");
|
||||
let actual = result.apply(&before);
|
||||
let actual = {
|
||||
let mut actual = before.to_string();
|
||||
result.apply(&mut actual);
|
||||
actual
|
||||
};
|
||||
let actual_cursor_pos =
|
||||
result.apply_to_offset(before_cursor_pos).expect("cursor position is affected by the edit");
|
||||
let actual = add_cursor(&actual, actual_cursor_pos);
|
||||
|
|
|
@ -142,10 +142,13 @@ mod tests {
|
|||
fn do_type_char(char_typed: char, before: &str) -> Option<(String, SingleFileChange)> {
|
||||
let (offset, before) = extract_offset(before);
|
||||
let edit = TextEdit::insert(offset, char_typed.to_string());
|
||||
let before = edit.apply(&before);
|
||||
let mut before = before.to_string();
|
||||
edit.apply(&mut before);
|
||||
let parse = SourceFile::parse(&before);
|
||||
on_char_typed_inner(&parse.tree(), offset, char_typed)
|
||||
.map(|it| (it.edit.apply(&before), it))
|
||||
on_char_typed_inner(&parse.tree(), offset, char_typed).map(|it| {
|
||||
it.edit.apply(&mut before);
|
||||
(before.to_string(), it)
|
||||
})
|
||||
}
|
||||
|
||||
fn type_char(char_typed: char, before: &str, after: &str) {
|
||||
|
|
|
@ -44,7 +44,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
|
|||
|
||||
Some(
|
||||
SourceChange::source_file_edit(
|
||||
"on enter",
|
||||
"On enter",
|
||||
SourceFileEdit { edit, file_id: position.file_id },
|
||||
)
|
||||
.with_cursor(FilePosition { offset: cursor_position, file_id: position.file_id }),
|
||||
|
@ -96,7 +96,8 @@ mod tests {
|
|||
let result = analysis.on_enter(FilePosition { offset, file_id }).unwrap()?;
|
||||
|
||||
assert_eq!(result.source_file_edits.len(), 1);
|
||||
let actual = result.source_file_edits[0].edit.apply(&before);
|
||||
let mut actual = before.to_string();
|
||||
result.source_file_edits[0].edit.apply(&mut actual);
|
||||
let actual = add_cursor(&actual, result.cursor_position.unwrap().offset);
|
||||
Some(actual)
|
||||
}
|
||||
|
|
|
@ -31,9 +31,19 @@ pub(crate) struct Utf16Char {
|
|||
}
|
||||
|
||||
impl Utf16Char {
|
||||
/// Returns the length in 8-bit UTF-8 code units.
|
||||
fn len(&self) -> TextSize {
|
||||
self.end - self.start
|
||||
}
|
||||
|
||||
/// Returns the length in 16-bit UTF-16 code units.
|
||||
fn len_utf16(&self) -> usize {
|
||||
if self.len() == TextSize::from(4) {
|
||||
2
|
||||
} else {
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LineIndex {
|
||||
|
@ -110,7 +120,7 @@ impl LineIndex {
|
|||
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
|
||||
for c in utf16_chars {
|
||||
if c.end <= col {
|
||||
res -= usize::from(c.len()) - 1;
|
||||
res -= usize::from(c.len()) - c.len_utf16();
|
||||
} else {
|
||||
// From here on, all utf16 characters come *after* the character we are mapping,
|
||||
// so we don't need to take them into account
|
||||
|
@ -125,7 +135,7 @@ impl LineIndex {
|
|||
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
|
||||
for c in utf16_chars {
|
||||
if col > u32::from(c.start) {
|
||||
col += u32::from(c.len()) - 1;
|
||||
col += u32::from(c.len()) - c.len_utf16() as u32;
|
||||
} else {
|
||||
// From here on, all utf16 characters come *after* the character we are mapping,
|
||||
// so we don't need to take them into account
|
||||
|
@ -204,6 +214,9 @@ const C: char = 'メ';
|
|||
|
||||
// UTF-16 to UTF-8
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
|
||||
|
||||
let col_index = LineIndex::new("a𐐏b");
|
||||
assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
use std::convert::TryInto;
|
||||
|
||||
use ra_syntax::{TextRange, TextSize};
|
||||
use ra_text_edit::{AtomTextEdit, TextEdit};
|
||||
use ra_text_edit::{Indel, TextEdit};
|
||||
|
||||
use crate::line_index::{LineCol, LineIndex, Utf16Char};
|
||||
|
||||
|
@ -182,14 +182,14 @@ struct TranslatedEdit<'a> {
|
|||
}
|
||||
|
||||
struct Edits<'a> {
|
||||
edits: &'a [AtomTextEdit],
|
||||
edits: &'a [Indel],
|
||||
current: Option<TranslatedEdit<'a>>,
|
||||
acc_diff: i64,
|
||||
}
|
||||
|
||||
impl<'a> Edits<'a> {
|
||||
fn from_text_edit(text_edit: &'a TextEdit) -> Edits<'a> {
|
||||
let mut x = Edits { edits: text_edit.as_atoms(), current: None, acc_diff: 0 };
|
||||
let mut x = Edits { edits: text_edit.as_indels(), current: None, acc_diff: 0 };
|
||||
x.advance_edit();
|
||||
x
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
|
|||
// * Items(SmallVec<[P<ast::Item>; 1]>) -> token_tree_to_items
|
||||
//
|
||||
// * TraitItems(SmallVec<[ast::TraitItem; 1]>)
|
||||
// * ImplItems(SmallVec<[ast::ImplItem; 1]>)
|
||||
// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
|
||||
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
|
||||
|
||||
pub fn token_tree_to_syntax_node(
|
||||
|
|
|
@ -56,6 +56,9 @@ pub struct CargoConfig {
|
|||
|
||||
/// Runs cargo check on launch to figure out the correct values of OUT_DIR
|
||||
pub load_out_dirs_from_check: bool,
|
||||
|
||||
/// rustc target
|
||||
pub target: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for CargoConfig {
|
||||
|
@ -65,6 +68,7 @@ impl Default for CargoConfig {
|
|||
all_features: true,
|
||||
features: Vec::new(),
|
||||
load_out_dirs_from_check: false,
|
||||
target: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -160,6 +164,9 @@ impl CargoWorkspace {
|
|||
if let Some(parent) = cargo_toml.parent() {
|
||||
meta.current_dir(parent);
|
||||
}
|
||||
if let Some(target) = cargo_features.target.as_ref() {
|
||||
meta.other_options(&[String::from("--filter-platform"), target.clone()]);
|
||||
}
|
||||
let meta = meta.exec().with_context(|| {
|
||||
format!("Failed to run `cargo metadata --manifest-path {}`", cargo_toml.display())
|
||||
})?;
|
||||
|
|
|
@ -543,7 +543,7 @@ impl ProjectWorkspace {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_rustc_cfg_options() -> CfgOptions {
|
||||
pub fn get_rustc_cfg_options(target: Option<&String>) -> CfgOptions {
|
||||
let mut cfg_options = CfgOptions::default();
|
||||
|
||||
// Some nightly-only cfgs, which are required for stdlib
|
||||
|
@ -558,10 +558,12 @@ pub fn get_rustc_cfg_options() -> CfgOptions {
|
|||
|
||||
match (|| -> Result<String> {
|
||||
// `cfg(test)` and `cfg(debug_assertion)` are handled outside, so we suppress them here.
|
||||
let output = Command::new("rustc")
|
||||
.args(&["--print", "cfg", "-O"])
|
||||
.output()
|
||||
.context("Failed to get output from rustc --print cfg -O")?;
|
||||
let mut cmd = Command::new("rustc");
|
||||
cmd.args(&["--print", "cfg", "-O"]);
|
||||
if let Some(target) = target {
|
||||
cmd.args(&["--target", target.as_str()]);
|
||||
}
|
||||
let output = cmd.output().context("Failed to get output from rustc --print cfg -O")?;
|
||||
if !output.status.success() {
|
||||
bail!(
|
||||
"rustc --print cfg -O exited with exit code ({})",
|
||||
|
|
|
@ -79,7 +79,7 @@ where
|
|||
|
||||
impl ast::ItemList {
|
||||
#[must_use]
|
||||
pub fn append_items(&self, items: impl IntoIterator<Item = ast::ImplItem>) -> ast::ItemList {
|
||||
pub fn append_items(&self, items: impl IntoIterator<Item = ast::AssocItem>) -> ast::ItemList {
|
||||
let mut res = self.clone();
|
||||
if !self.syntax().text().contains_char('\n') {
|
||||
res = make_multiline(res);
|
||||
|
@ -89,8 +89,8 @@ impl ast::ItemList {
|
|||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn append_item(&self, item: ast::ImplItem) -> ast::ItemList {
|
||||
let (indent, position) = match self.impl_items().last() {
|
||||
pub fn append_item(&self, item: ast::AssocItem) -> ast::ItemList {
|
||||
let (indent, position) = match self.assoc_items().last() {
|
||||
Some(it) => (
|
||||
leading_indent(it.syntax()).unwrap_or_default().to_string(),
|
||||
InsertPosition::After(it.syntax().clone().into()),
|
||||
|
|
|
@ -196,7 +196,7 @@ pub struct ItemList {
|
|||
impl ast::ModuleItemOwner for ItemList {}
|
||||
impl ItemList {
|
||||
pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
|
||||
pub fn impl_items(&self) -> AstChildren<ImplItem> { support::children(&self.syntax) }
|
||||
pub fn assoc_items(&self) -> AstChildren<AssocItem> { support::children(&self.syntax) }
|
||||
pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
|
||||
}
|
||||
|
||||
|
@ -1429,13 +1429,13 @@ impl ast::AttrsOwner for ModuleItem {}
|
|||
impl ast::VisibilityOwner for ModuleItem {}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum ImplItem {
|
||||
pub enum AssocItem {
|
||||
FnDef(FnDef),
|
||||
TypeAliasDef(TypeAliasDef),
|
||||
ConstDef(ConstDef),
|
||||
}
|
||||
impl ast::NameOwner for ImplItem {}
|
||||
impl ast::AttrsOwner for ImplItem {}
|
||||
impl ast::NameOwner for AssocItem {}
|
||||
impl ast::AttrsOwner for AssocItem {}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum ExternItem {
|
||||
|
@ -3167,16 +3167,16 @@ impl AstNode for ModuleItem {
|
|||
}
|
||||
}
|
||||
}
|
||||
impl From<FnDef> for ImplItem {
|
||||
fn from(node: FnDef) -> ImplItem { ImplItem::FnDef(node) }
|
||||
impl From<FnDef> for AssocItem {
|
||||
fn from(node: FnDef) -> AssocItem { AssocItem::FnDef(node) }
|
||||
}
|
||||
impl From<TypeAliasDef> for ImplItem {
|
||||
fn from(node: TypeAliasDef) -> ImplItem { ImplItem::TypeAliasDef(node) }
|
||||
impl From<TypeAliasDef> for AssocItem {
|
||||
fn from(node: TypeAliasDef) -> AssocItem { AssocItem::TypeAliasDef(node) }
|
||||
}
|
||||
impl From<ConstDef> for ImplItem {
|
||||
fn from(node: ConstDef) -> ImplItem { ImplItem::ConstDef(node) }
|
||||
impl From<ConstDef> for AssocItem {
|
||||
fn from(node: ConstDef) -> AssocItem { AssocItem::ConstDef(node) }
|
||||
}
|
||||
impl AstNode for ImplItem {
|
||||
impl AstNode for AssocItem {
|
||||
fn can_cast(kind: SyntaxKind) -> bool {
|
||||
match kind {
|
||||
FN_DEF | TYPE_ALIAS_DEF | CONST_DEF => true,
|
||||
|
@ -3185,18 +3185,18 @@ impl AstNode for ImplItem {
|
|||
}
|
||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||
let res = match syntax.kind() {
|
||||
FN_DEF => ImplItem::FnDef(FnDef { syntax }),
|
||||
TYPE_ALIAS_DEF => ImplItem::TypeAliasDef(TypeAliasDef { syntax }),
|
||||
CONST_DEF => ImplItem::ConstDef(ConstDef { syntax }),
|
||||
FN_DEF => AssocItem::FnDef(FnDef { syntax }),
|
||||
TYPE_ALIAS_DEF => AssocItem::TypeAliasDef(TypeAliasDef { syntax }),
|
||||
CONST_DEF => AssocItem::ConstDef(ConstDef { syntax }),
|
||||
_ => return None,
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxNode {
|
||||
match self {
|
||||
ImplItem::FnDef(it) => &it.syntax,
|
||||
ImplItem::TypeAliasDef(it) => &it.syntax,
|
||||
ImplItem::ConstDef(it) => &it.syntax,
|
||||
AssocItem::FnDef(it) => &it.syntax,
|
||||
AssocItem::TypeAliasDef(it) => &it.syntax,
|
||||
AssocItem::ConstDef(it) => &it.syntax,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3641,7 +3641,7 @@ impl std::fmt::Display for ModuleItem {
|
|||
std::fmt::Display::fmt(self.syntax(), f)
|
||||
}
|
||||
}
|
||||
impl std::fmt::Display for ImplItem {
|
||||
impl std::fmt::Display for AssocItem {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self.syntax(), f)
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::{
|
|||
str::{self, FromStr},
|
||||
};
|
||||
|
||||
use ra_text_edit::AtomTextEdit;
|
||||
use ra_text_edit::Indel;
|
||||
|
||||
use crate::{validation, AstNode, SourceFile, TextRange};
|
||||
|
||||
|
@ -22,7 +22,7 @@ pub fn check_parser(text: &str) {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct CheckReparse {
|
||||
text: String,
|
||||
edit: AtomTextEdit,
|
||||
edit: Indel,
|
||||
edited_text: String,
|
||||
}
|
||||
|
||||
|
@ -43,7 +43,7 @@ impl CheckReparse {
|
|||
TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap());
|
||||
let edited_text =
|
||||
format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]);
|
||||
let edit = AtomTextEdit { delete, insert };
|
||||
let edit = Indel { delete, insert };
|
||||
Some(CheckReparse { text, edit, edited_text })
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ pub mod fuzz;
|
|||
|
||||
use std::{marker::PhantomData, sync::Arc};
|
||||
|
||||
use ra_text_edit::AtomTextEdit;
|
||||
use ra_text_edit::Indel;
|
||||
use stdx::format_to;
|
||||
|
||||
use crate::syntax_node::GreenNode;
|
||||
|
@ -126,13 +126,13 @@ impl Parse<SourceFile> {
|
|||
buf
|
||||
}
|
||||
|
||||
pub fn reparse(&self, edit: &AtomTextEdit) -> Parse<SourceFile> {
|
||||
self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
|
||||
pub fn reparse(&self, indel: &Indel) -> Parse<SourceFile> {
|
||||
self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel))
|
||||
}
|
||||
|
||||
fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<Parse<SourceFile>> {
|
||||
fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
|
||||
// FIXME: validation errors are not handled here
|
||||
parsing::incremental_reparse(self.tree().syntax(), edit, self.errors.to_vec()).map(
|
||||
parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map(
|
||||
|(green_node, errors, _reparsed_range)| Parse {
|
||||
green: green_node,
|
||||
errors: Arc::new(errors),
|
||||
|
@ -141,8 +141,9 @@ impl Parse<SourceFile> {
|
|||
)
|
||||
}
|
||||
|
||||
fn full_reparse(&self, edit: &AtomTextEdit) -> Parse<SourceFile> {
|
||||
let text = edit.apply(self.tree().syntax().text().to_string());
|
||||
fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
|
||||
let mut text = self.tree().syntax().text().to_string();
|
||||
indel.apply(&mut text);
|
||||
SourceFile::parse(&text)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
//! and try to parse only this block.
|
||||
|
||||
use ra_parser::Reparser;
|
||||
use ra_text_edit::AtomTextEdit;
|
||||
use ra_text_edit::Indel;
|
||||
|
||||
use crate::{
|
||||
algo,
|
||||
|
@ -24,7 +24,7 @@ use crate::{
|
|||
|
||||
pub(crate) fn incremental_reparse(
|
||||
node: &SyntaxNode,
|
||||
edit: &AtomTextEdit,
|
||||
edit: &Indel,
|
||||
errors: Vec<SyntaxError>,
|
||||
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
||||
if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) {
|
||||
|
@ -39,7 +39,7 @@ pub(crate) fn incremental_reparse(
|
|||
|
||||
fn reparse_token<'node>(
|
||||
root: &'node SyntaxNode,
|
||||
edit: &AtomTextEdit,
|
||||
edit: &Indel,
|
||||
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
||||
let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone();
|
||||
let prev_token_kind = prev_token.kind();
|
||||
|
@ -88,7 +88,7 @@ fn reparse_token<'node>(
|
|||
|
||||
fn reparse_block<'node>(
|
||||
root: &'node SyntaxNode,
|
||||
edit: &AtomTextEdit,
|
||||
edit: &Indel,
|
||||
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
||||
let (node, reparser) = find_reparsable_node(root, edit.delete)?;
|
||||
let text = get_text_after_edit(node.clone().into(), edit);
|
||||
|
@ -108,15 +108,15 @@ fn reparse_block<'node>(
|
|||
Some((node.replace_with(green), new_parser_errors, node.text_range()))
|
||||
}
|
||||
|
||||
fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String {
|
||||
let edit =
|
||||
AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone());
|
||||
fn get_text_after_edit(element: SyntaxElement, edit: &Indel) -> String {
|
||||
let edit = Indel::replace(edit.delete - element.text_range().start(), edit.insert.clone());
|
||||
|
||||
let text = match element {
|
||||
let mut text = match element {
|
||||
NodeOrToken::Token(token) => token.text().to_string(),
|
||||
NodeOrToken::Node(node) => node.text().to_string(),
|
||||
};
|
||||
edit.apply(text)
|
||||
edit.apply(&mut text);
|
||||
text
|
||||
}
|
||||
|
||||
fn is_contextual_kw(text: &str) -> bool {
|
||||
|
@ -167,7 +167,7 @@ fn merge_errors(
|
|||
old_errors: Vec<SyntaxError>,
|
||||
new_errors: Vec<SyntaxError>,
|
||||
range_before_reparse: TextRange,
|
||||
edit: &AtomTextEdit,
|
||||
edit: &Indel,
|
||||
) -> Vec<SyntaxError> {
|
||||
let mut res = Vec::new();
|
||||
|
||||
|
@ -198,8 +198,12 @@ mod tests {
|
|||
|
||||
fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
|
||||
let (range, before) = extract_range(before);
|
||||
let edit = AtomTextEdit::replace(range, replace_with.to_owned());
|
||||
let after = edit.apply(before.clone());
|
||||
let edit = Indel::replace(range, replace_with.to_owned());
|
||||
let after = {
|
||||
let mut after = before.clone();
|
||||
edit.apply(&mut after);
|
||||
after
|
||||
};
|
||||
|
||||
let fully_reparsed = SourceFile::parse(&after);
|
||||
let incrementally_reparsed: Parse<SourceFile> = {
|
||||
|
|
|
@ -1,36 +1,144 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
mod text_edit;
|
||||
//! Representation of a `TextEdit`.
|
||||
//!
|
||||
//! `rust-analyzer` never mutates text itself and only sends diffs to clients,
|
||||
//! so `TextEdit` is the ultimate representation of the work done by
|
||||
//! rust-analyzer.
|
||||
|
||||
use text_size::{TextRange, TextSize};
|
||||
|
||||
pub use crate::text_edit::{TextEdit, TextEditBuilder};
|
||||
|
||||
/// Must not overlap with other `AtomTextEdit`s
|
||||
/// `InsertDelete` -- a single "atomic" change to text
|
||||
///
|
||||
/// Must not overlap with other `InDel`s
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AtomTextEdit {
|
||||
pub struct Indel {
|
||||
pub insert: String,
|
||||
/// Refers to offsets in the original text
|
||||
pub delete: TextRange,
|
||||
pub insert: String,
|
||||
}
|
||||
|
||||
impl AtomTextEdit {
|
||||
pub fn replace(range: TextRange, replace_with: String) -> AtomTextEdit {
|
||||
AtomTextEdit { delete: range, insert: replace_with }
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TextEdit {
|
||||
indels: Vec<Indel>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TextEditBuilder {
|
||||
indels: Vec<Indel>,
|
||||
}
|
||||
|
||||
impl Indel {
|
||||
pub fn insert(offset: TextSize, text: String) -> Indel {
|
||||
Indel::replace(TextRange::empty(offset), text)
|
||||
}
|
||||
pub fn delete(range: TextRange) -> Indel {
|
||||
Indel::replace(range, String::new())
|
||||
}
|
||||
pub fn replace(range: TextRange, replace_with: String) -> Indel {
|
||||
Indel { delete: range, insert: replace_with }
|
||||
}
|
||||
|
||||
pub fn delete(range: TextRange) -> AtomTextEdit {
|
||||
AtomTextEdit::replace(range, String::new())
|
||||
}
|
||||
|
||||
pub fn insert(offset: TextSize, text: String) -> AtomTextEdit {
|
||||
AtomTextEdit::replace(TextRange::empty(offset), text)
|
||||
}
|
||||
|
||||
pub fn apply(&self, mut text: String) -> String {
|
||||
pub fn apply(&self, text: &mut String) {
|
||||
let start: usize = self.delete.start().into();
|
||||
let end: usize = self.delete.end().into();
|
||||
text.replace_range(start..end, &self.insert);
|
||||
text
|
||||
}
|
||||
}
|
||||
|
||||
impl TextEdit {
|
||||
pub fn insert(offset: TextSize, text: String) -> TextEdit {
|
||||
let mut builder = TextEditBuilder::default();
|
||||
builder.insert(offset, text);
|
||||
builder.finish()
|
||||
}
|
||||
|
||||
pub fn delete(range: TextRange) -> TextEdit {
|
||||
let mut builder = TextEditBuilder::default();
|
||||
builder.delete(range);
|
||||
builder.finish()
|
||||
}
|
||||
|
||||
pub fn replace(range: TextRange, replace_with: String) -> TextEdit {
|
||||
let mut builder = TextEditBuilder::default();
|
||||
builder.replace(range, replace_with);
|
||||
builder.finish()
|
||||
}
|
||||
|
||||
pub(crate) fn from_indels(mut indels: Vec<Indel>) -> TextEdit {
|
||||
indels.sort_by_key(|a| (a.delete.start(), a.delete.end()));
|
||||
for (a1, a2) in indels.iter().zip(indels.iter().skip(1)) {
|
||||
assert!(a1.delete.end() <= a2.delete.start())
|
||||
}
|
||||
TextEdit { indels }
|
||||
}
|
||||
|
||||
pub fn as_indels(&self) -> &[Indel] {
|
||||
&self.indels
|
||||
}
|
||||
|
||||
pub fn apply(&self, text: &mut String) {
|
||||
match self.indels.len() {
|
||||
0 => return,
|
||||
1 => {
|
||||
self.indels[0].apply(text);
|
||||
return;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
let mut total_len = TextSize::of(&*text);
|
||||
for indel in self.indels.iter() {
|
||||
total_len += TextSize::of(&indel.insert);
|
||||
total_len -= indel.delete.end() - indel.delete.start();
|
||||
}
|
||||
let mut buf = String::with_capacity(total_len.into());
|
||||
let mut prev = 0;
|
||||
for indel in self.indels.iter() {
|
||||
let start: usize = indel.delete.start().into();
|
||||
let end: usize = indel.delete.end().into();
|
||||
if start > prev {
|
||||
buf.push_str(&text[prev..start]);
|
||||
}
|
||||
buf.push_str(&indel.insert);
|
||||
prev = end;
|
||||
}
|
||||
buf.push_str(&text[prev..text.len()]);
|
||||
assert_eq!(TextSize::of(&buf), total_len);
|
||||
|
||||
// FIXME: figure out a way to mutate the text in-place or reuse the
|
||||
// memory in some other way
|
||||
*text = buf
|
||||
}
|
||||
|
||||
pub fn apply_to_offset(&self, offset: TextSize) -> Option<TextSize> {
|
||||
let mut res = offset;
|
||||
for indel in self.indels.iter() {
|
||||
if indel.delete.start() >= offset {
|
||||
break;
|
||||
}
|
||||
if offset < indel.delete.end() {
|
||||
return None;
|
||||
}
|
||||
res += TextSize::of(&indel.insert);
|
||||
res -= indel.delete.len();
|
||||
}
|
||||
Some(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl TextEditBuilder {
|
||||
pub fn replace(&mut self, range: TextRange, replace_with: String) {
|
||||
self.indels.push(Indel::replace(range, replace_with))
|
||||
}
|
||||
pub fn delete(&mut self, range: TextRange) {
|
||||
self.indels.push(Indel::delete(range))
|
||||
}
|
||||
pub fn insert(&mut self, offset: TextSize, text: String) {
|
||||
self.indels.push(Indel::insert(offset, text))
|
||||
}
|
||||
pub fn finish(self) -> TextEdit {
|
||||
TextEdit::from_indels(self.indels)
|
||||
}
|
||||
pub fn invalidates_offset(&self, offset: TextSize) -> bool {
|
||||
self.indels.iter().any(|indel| indel.delete.contains_inclusive(offset))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,102 +0,0 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use crate::AtomTextEdit;
|
||||
|
||||
use text_size::{TextRange, TextSize};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TextEdit {
|
||||
atoms: Vec<AtomTextEdit>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TextEditBuilder {
|
||||
atoms: Vec<AtomTextEdit>,
|
||||
}
|
||||
|
||||
impl TextEditBuilder {
|
||||
pub fn replace(&mut self, range: TextRange, replace_with: String) {
|
||||
self.atoms.push(AtomTextEdit::replace(range, replace_with))
|
||||
}
|
||||
pub fn delete(&mut self, range: TextRange) {
|
||||
self.atoms.push(AtomTextEdit::delete(range))
|
||||
}
|
||||
pub fn insert(&mut self, offset: TextSize, text: String) {
|
||||
self.atoms.push(AtomTextEdit::insert(offset, text))
|
||||
}
|
||||
pub fn finish(self) -> TextEdit {
|
||||
TextEdit::from_atoms(self.atoms)
|
||||
}
|
||||
pub fn invalidates_offset(&self, offset: TextSize) -> bool {
|
||||
self.atoms.iter().any(|atom| atom.delete.contains_inclusive(offset))
|
||||
}
|
||||
}
|
||||
|
||||
impl TextEdit {
|
||||
pub fn insert(offset: TextSize, text: String) -> TextEdit {
|
||||
let mut builder = TextEditBuilder::default();
|
||||
builder.insert(offset, text);
|
||||
builder.finish()
|
||||
}
|
||||
|
||||
pub fn delete(range: TextRange) -> TextEdit {
|
||||
let mut builder = TextEditBuilder::default();
|
||||
builder.delete(range);
|
||||
builder.finish()
|
||||
}
|
||||
|
||||
pub fn replace(range: TextRange, replace_with: String) -> TextEdit {
|
||||
let mut builder = TextEditBuilder::default();
|
||||
builder.replace(range, replace_with);
|
||||
builder.finish()
|
||||
}
|
||||
|
||||
pub(crate) fn from_atoms(mut atoms: Vec<AtomTextEdit>) -> TextEdit {
|
||||
atoms.sort_by_key(|a| (a.delete.start(), a.delete.end()));
|
||||
for (a1, a2) in atoms.iter().zip(atoms.iter().skip(1)) {
|
||||
assert!(a1.delete.end() <= a2.delete.start())
|
||||
}
|
||||
TextEdit { atoms }
|
||||
}
|
||||
|
||||
pub fn as_atoms(&self) -> &[AtomTextEdit] {
|
||||
&self.atoms
|
||||
}
|
||||
|
||||
pub fn apply(&self, text: &str) -> String {
|
||||
let mut total_len = TextSize::of(text);
|
||||
for atom in self.atoms.iter() {
|
||||
total_len += TextSize::of(&atom.insert);
|
||||
total_len -= atom.delete.end() - atom.delete.start();
|
||||
}
|
||||
let mut buf = String::with_capacity(total_len.into());
|
||||
let mut prev = 0;
|
||||
for atom in self.atoms.iter() {
|
||||
let start: usize = atom.delete.start().into();
|
||||
let end: usize = atom.delete.end().into();
|
||||
if start > prev {
|
||||
buf.push_str(&text[prev..start]);
|
||||
}
|
||||
buf.push_str(&atom.insert);
|
||||
prev = end;
|
||||
}
|
||||
buf.push_str(&text[prev..text.len()]);
|
||||
assert_eq!(TextSize::of(&buf), total_len);
|
||||
buf
|
||||
}
|
||||
|
||||
pub fn apply_to_offset(&self, offset: TextSize) -> Option<TextSize> {
|
||||
let mut res = offset;
|
||||
for atom in self.atoms.iter() {
|
||||
if atom.delete.start() >= offset {
|
||||
break;
|
||||
}
|
||||
if offset < atom.delete.end() {
|
||||
return None;
|
||||
}
|
||||
res += TextSize::of(&atom.insert);
|
||||
res -= atom.delete.len();
|
||||
}
|
||||
Some(res)
|
||||
}
|
||||
}
|
|
@ -17,10 +17,10 @@ pub fn server_capabilities() -> ServerCapabilities {
|
|||
ServerCapabilities {
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
|
||||
open_close: Some(true),
|
||||
change: Some(if env::var("RA_PROFILE").is_ok() {
|
||||
TextDocumentSyncKind::Incremental
|
||||
} else {
|
||||
change: Some(if env::var("RA_NO_INCREMENTAL_SYNC").is_ok() {
|
||||
TextDocumentSyncKind::Full
|
||||
} else {
|
||||
TextDocumentSyncKind::Incremental
|
||||
}),
|
||||
will_save: None,
|
||||
will_save_wait_until: None,
|
||||
|
|
|
@ -27,7 +27,7 @@ impl CargoTargetSpec {
|
|||
RunnableKind::Test { test_id, attr } => {
|
||||
args.push("test".to_string());
|
||||
if let Some(spec) = spec {
|
||||
spec.push_to(&mut args);
|
||||
spec.push_to(&mut args, kind);
|
||||
}
|
||||
extra_args.push(test_id.to_string());
|
||||
if let TestId::Path(_) = test_id {
|
||||
|
@ -35,13 +35,13 @@ impl CargoTargetSpec {
|
|||
}
|
||||
extra_args.push("--nocapture".to_string());
|
||||
if attr.ignore {
|
||||
extra_args.push("--ignored".to_string())
|
||||
extra_args.push("--ignored".to_string());
|
||||
}
|
||||
}
|
||||
RunnableKind::TestMod { path } => {
|
||||
args.push("test".to_string());
|
||||
if let Some(spec) = spec {
|
||||
spec.push_to(&mut args);
|
||||
spec.push_to(&mut args, kind);
|
||||
}
|
||||
extra_args.push(path.to_string());
|
||||
extra_args.push("--nocapture".to_string());
|
||||
|
@ -49,7 +49,7 @@ impl CargoTargetSpec {
|
|||
RunnableKind::Bench { test_id } => {
|
||||
args.push("bench".to_string());
|
||||
if let Some(spec) = spec {
|
||||
spec.push_to(&mut args);
|
||||
spec.push_to(&mut args, kind);
|
||||
}
|
||||
extra_args.push(test_id.to_string());
|
||||
if let TestId::Path(_) = test_id {
|
||||
|
@ -57,10 +57,19 @@ impl CargoTargetSpec {
|
|||
}
|
||||
extra_args.push("--nocapture".to_string());
|
||||
}
|
||||
RunnableKind::DocTest { test_id } => {
|
||||
args.push("test".to_string());
|
||||
args.push("--doc".to_string());
|
||||
if let Some(spec) = spec {
|
||||
spec.push_to(&mut args, kind);
|
||||
}
|
||||
extra_args.push(test_id.to_string());
|
||||
extra_args.push("--nocapture".to_string());
|
||||
}
|
||||
RunnableKind::Bin => {
|
||||
args.push("run".to_string());
|
||||
if let Some(spec) = spec {
|
||||
spec.push_to(&mut args);
|
||||
spec.push_to(&mut args, kind);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -91,9 +100,14 @@ impl CargoTargetSpec {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
pub(crate) fn push_to(self, buf: &mut Vec<String>) {
|
||||
pub(crate) fn push_to(self, buf: &mut Vec<String>, kind: &RunnableKind) {
|
||||
buf.push("--package".to_string());
|
||||
buf.push(self.package);
|
||||
|
||||
// Can't mix --doc with other target flags
|
||||
if let RunnableKind::DocTest { .. } = kind {
|
||||
return;
|
||||
}
|
||||
match self.target_kind {
|
||||
TargetKind::Bin => {
|
||||
buf.push("--bin".to_string());
|
||||
|
|
|
@ -149,7 +149,7 @@ pub(crate) fn load(
|
|||
|
||||
// FIXME: cfg options?
|
||||
let default_cfg_options = {
|
||||
let mut opts = get_rustc_cfg_options();
|
||||
let mut opts = get_rustc_cfg_options(None);
|
||||
opts.insert_atom("test".into());
|
||||
opts.insert_atom("debug_assertion".into());
|
||||
opts
|
||||
|
|
|
@ -89,6 +89,7 @@ impl Default for Config {
|
|||
check: Some(FlycheckConfig::CargoCommand {
|
||||
command: "check".to_string(),
|
||||
all_targets: true,
|
||||
all_features: true,
|
||||
extra_args: Vec::new(),
|
||||
}),
|
||||
|
||||
|
@ -131,6 +132,7 @@ impl Config {
|
|||
set(value, "/cargo/allFeatures", &mut self.cargo.all_features);
|
||||
set(value, "/cargo/features", &mut self.cargo.features);
|
||||
set(value, "/cargo/loadOutDirsFromCheck", &mut self.cargo.load_out_dirs_from_check);
|
||||
set(value, "/cargo/target", &mut self.cargo.target);
|
||||
|
||||
match get(value, "/procMacro/enable") {
|
||||
Some(true) => {
|
||||
|
@ -172,12 +174,13 @@ impl Config {
|
|||
}
|
||||
// otherwise configure command customizations
|
||||
_ => {
|
||||
if let Some(FlycheckConfig::CargoCommand { command, extra_args, all_targets })
|
||||
if let Some(FlycheckConfig::CargoCommand { command, extra_args, all_targets, all_features })
|
||||
= &mut self.check
|
||||
{
|
||||
set(value, "/checkOnSave/extraArgs", extra_args);
|
||||
set(value, "/checkOnSave/command", command);
|
||||
set(value, "/checkOnSave/allTargets", all_targets);
|
||||
set(value, "/checkOnSave/allFeatures", all_features);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -15,7 +15,7 @@ use ra_ide::{
|
|||
ReferenceAccess, Severity, SourceChange, SourceFileEdit,
|
||||
};
|
||||
use ra_syntax::{SyntaxKind, TextRange, TextSize};
|
||||
use ra_text_edit::{AtomTextEdit, TextEdit};
|
||||
use ra_text_edit::{Indel, TextEdit};
|
||||
use ra_vfs::LineEndings;
|
||||
|
||||
use crate::{
|
||||
|
@ -124,23 +124,22 @@ impl ConvWith<(&LineIndex, LineEndings)> for CompletionItem {
|
|||
let mut text_edit = None;
|
||||
// LSP does not allow arbitrary edits in completion, so we have to do a
|
||||
// non-trivial mapping here.
|
||||
for atom_edit in self.text_edit().as_atoms() {
|
||||
if atom_edit.delete.contains_range(self.source_range()) {
|
||||
text_edit = Some(if atom_edit.delete == self.source_range() {
|
||||
atom_edit.conv_with((ctx.0, ctx.1))
|
||||
for indel in self.text_edit().as_indels() {
|
||||
if indel.delete.contains_range(self.source_range()) {
|
||||
text_edit = Some(if indel.delete == self.source_range() {
|
||||
indel.conv_with((ctx.0, ctx.1))
|
||||
} else {
|
||||
assert!(self.source_range().end() == atom_edit.delete.end());
|
||||
let range1 =
|
||||
TextRange::new(atom_edit.delete.start(), self.source_range().start());
|
||||
assert!(self.source_range().end() == indel.delete.end());
|
||||
let range1 = TextRange::new(indel.delete.start(), self.source_range().start());
|
||||
let range2 = self.source_range();
|
||||
let edit1 = AtomTextEdit::replace(range1, String::new());
|
||||
let edit2 = AtomTextEdit::replace(range2, atom_edit.insert.clone());
|
||||
let edit1 = Indel::replace(range1, String::new());
|
||||
let edit2 = Indel::replace(range2, indel.insert.clone());
|
||||
additional_text_edits.push(edit1.conv_with((ctx.0, ctx.1)));
|
||||
edit2.conv_with((ctx.0, ctx.1))
|
||||
})
|
||||
} else {
|
||||
assert!(self.source_range().intersect(atom_edit.delete).is_none());
|
||||
additional_text_edits.push(atom_edit.conv_with((ctx.0, ctx.1)));
|
||||
assert!(self.source_range().intersect(indel.delete).is_none());
|
||||
additional_text_edits.push(indel.conv_with((ctx.0, ctx.1)));
|
||||
}
|
||||
}
|
||||
let text_edit = text_edit.unwrap();
|
||||
|
@ -257,11 +256,11 @@ impl ConvWith<(&LineIndex, LineEndings)> for TextEdit {
|
|||
type Output = Vec<lsp_types::TextEdit>;
|
||||
|
||||
fn conv_with(self, ctx: (&LineIndex, LineEndings)) -> Vec<lsp_types::TextEdit> {
|
||||
self.as_atoms().iter().map_conv_with(ctx).collect()
|
||||
self.as_indels().iter().map_conv_with(ctx).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl ConvWith<(&LineIndex, LineEndings)> for &AtomTextEdit {
|
||||
impl ConvWith<(&LineIndex, LineEndings)> for &Indel {
|
||||
type Output = lsp_types::TextEdit;
|
||||
|
||||
fn conv_with(
|
||||
|
@ -522,7 +521,7 @@ impl TryConvWith<&WorldSnapshot> for SourceFileEdit {
|
|||
let line_index = world.analysis().file_line_index(self.file_id)?;
|
||||
let line_endings = world.file_line_endings(self.file_id);
|
||||
let edits =
|
||||
self.edit.as_atoms().iter().map_conv_with((&line_index, line_endings)).collect();
|
||||
self.edit.as_indels().iter().map_conv_with((&line_index, line_endings)).collect();
|
||||
Ok(TextDocumentEdit { text_document, edits })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -676,13 +676,13 @@ fn apply_document_changes(
|
|||
// remember the last valid line in the index and only rebuild it if needed.
|
||||
enum IndexValid {
|
||||
All,
|
||||
UpToLine(u64),
|
||||
UpToLineExclusive(u64),
|
||||
}
|
||||
|
||||
impl IndexValid {
|
||||
fn covers(&self, line: u64) -> bool {
|
||||
match *self {
|
||||
IndexValid::UpToLine(to) => to >= line,
|
||||
IndexValid::UpToLineExclusive(to) => to > line,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
@ -692,10 +692,10 @@ fn apply_document_changes(
|
|||
for change in content_changes {
|
||||
match change.range {
|
||||
Some(range) => {
|
||||
if !index_valid.covers(range.start.line) {
|
||||
if !index_valid.covers(range.end.line) {
|
||||
line_index = Cow::Owned(LineIndex::new(&old_text));
|
||||
}
|
||||
index_valid = IndexValid::UpToLine(range.start.line);
|
||||
index_valid = IndexValid::UpToLineExclusive(range.start.line);
|
||||
let range = range.conv_with(&line_index);
|
||||
let mut text = old_text.to_owned();
|
||||
match std::panic::catch_unwind(move || {
|
||||
|
@ -713,7 +713,7 @@ fn apply_document_changes(
|
|||
}
|
||||
None => {
|
||||
*old_text = change.text;
|
||||
index_valid = IndexValid::UpToLine(0);
|
||||
index_valid = IndexValid::UpToLineExclusive(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1083,5 +1083,17 @@ mod tests {
|
|||
assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n");
|
||||
run(&mut text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]);
|
||||
assert_eq!(text, "the quick \nthey have quiet dreams\n");
|
||||
|
||||
text = String::from("❤️");
|
||||
run(&mut text, c![0, 0; 0, 0 => "a"]);
|
||||
assert_eq!(text, "a❤️");
|
||||
|
||||
text = String::from("a\nb");
|
||||
run(&mut text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]);
|
||||
assert_eq!(text, "adcb");
|
||||
|
||||
text = String::from("a\nb");
|
||||
run(&mut text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]);
|
||||
assert_eq!(text, "ațc\ncb");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -835,6 +835,7 @@ pub fn handle_code_lens(
|
|||
for runnable in world.analysis().runnables(file_id)? {
|
||||
let title = match &runnable.kind {
|
||||
RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => "▶️\u{fe0e}Run Test",
|
||||
RunnableKind::DocTest { .. } => "▶️\u{fe0e}Run Doctest",
|
||||
RunnableKind::Bench { .. } => "Run Bench",
|
||||
RunnableKind::Bin => "Run",
|
||||
}
|
||||
|
@ -1018,6 +1019,7 @@ fn to_lsp_runnable(
|
|||
RunnableKind::Test { test_id, .. } => format!("test {}", test_id),
|
||||
RunnableKind::TestMod { path } => format!("test-mod {}", path),
|
||||
RunnableKind::Bench { test_id } => format!("bench {}", test_id),
|
||||
RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id),
|
||||
RunnableKind::Bin => "run binary".to_string(),
|
||||
};
|
||||
Ok(req::Runnable {
|
||||
|
|
|
@ -131,7 +131,7 @@ impl WorldState {
|
|||
|
||||
// FIXME: Read default cfgs from config
|
||||
let default_cfg_options = {
|
||||
let mut opts = get_rustc_cfg_options();
|
||||
let mut opts = get_rustc_cfg_options(config.cargo.target.as_ref());
|
||||
opts.insert_atom("test".into());
|
||||
opts.insert_atom("debug_assertion".into());
|
||||
opts
|
||||
|
|
|
@ -337,7 +337,7 @@ fn main() {}
|
|||
"arguments": [
|
||||
{
|
||||
"cursorPosition": null,
|
||||
"label": "create module",
|
||||
"label": "Create module",
|
||||
"workspaceEdit": {
|
||||
"documentChanges": [
|
||||
{
|
||||
|
@ -349,9 +349,9 @@ fn main() {}
|
|||
}
|
||||
],
|
||||
"command": "rust-analyzer.applySourceChange",
|
||||
"title": "create module"
|
||||
"title": "Create module"
|
||||
},
|
||||
"title": "create module"
|
||||
"title": "Create module"
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
@ -420,7 +420,7 @@ fn main() {{}}
|
|||
"arguments": [
|
||||
{
|
||||
"cursorPosition": null,
|
||||
"label": "create module",
|
||||
"label": "Create module",
|
||||
"workspaceEdit": {
|
||||
"documentChanges": [
|
||||
{
|
||||
|
@ -432,9 +432,9 @@ fn main() {{}}
|
|||
}
|
||||
],
|
||||
"command": "rust-analyzer.applySourceChange",
|
||||
"title": "create module"
|
||||
"title": "Create module"
|
||||
},
|
||||
"title": "create module"
|
||||
"title": "Create module"
|
||||
}
|
||||
]),
|
||||
);
|
||||
|
@ -500,7 +500,7 @@ fn main() {{}}
|
|||
"position": { "character": 4, "line": 1 },
|
||||
"textDocument": { "uri": "file:///[..]src/m0.rs" }
|
||||
},
|
||||
"label": "on enter",
|
||||
"label": "On enter",
|
||||
"workspaceEdit": {
|
||||
"documentChanges": [
|
||||
{
|
||||
|
@ -552,7 +552,7 @@ version = \"0.0.0\"
|
|||
"position": { "line": 1, "character": 4 },
|
||||
"textDocument": { "uri": "file:///[..]src/main.rs" }
|
||||
},
|
||||
"label": "on enter",
|
||||
"label": "On enter",
|
||||
"workspaceEdit": {
|
||||
"documentChanges": [
|
||||
{
|
||||
|
|
|
@ -57,7 +57,11 @@ To disable this notification put the following to `settings.json`
|
|||
----
|
||||
====
|
||||
|
||||
The server binary is stored in `~/.config/Code/User/globalStorage/matklad.rust-analyzer` (Linux) or in `~/.Library/Application Support/Code/User/globalStorage/matklad.rust-analyzer` (macOS) or in `%APPDATA%\Code\User\globalStorage` (Windows).
|
||||
The server binary is stored in:
|
||||
|
||||
* Linux: `~/.config/Code/User/globalStorage/matklad.rust-analyzer`
|
||||
* macOS: `~/Library/Application Support/Code/User/globalStorage/matklad.rust-analyzer`
|
||||
* Windows: `%APPDATA%\Code\User\globalStorage`
|
||||
|
||||
Note that we only support the latest version of VS Code.
|
||||
|
||||
|
@ -159,11 +163,11 @@ Emacs support is maintained as part of the https://github.com/emacs-lsp/lsp-mode
|
|||
3. Run `lsp` in a Rust buffer.
|
||||
4. (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys.
|
||||
|
||||
=== Vim
|
||||
=== Vim/NeoVim
|
||||
|
||||
Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
|
||||
Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>. Not needed if the extension can install/update it on its own, coc-rust-analyzer is one example.
|
||||
|
||||
The are several LSP client implementations for vim:
|
||||
The are several LSP client implementations for vim or neovim:
|
||||
|
||||
==== coc-rust-analyzer
|
||||
|
||||
|
|
5
editors/code/package-lock.json
generated
5
editors/code/package-lock.json
generated
|
@ -1066,11 +1066,6 @@
|
|||
"integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=",
|
||||
"dev": true
|
||||
},
|
||||
"jsonc-parser": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-2.2.1.tgz",
|
||||
"integrity": "sha512-o6/yDBYccGvTz1+QFevz6l6OBZ2+fMVu2JZ9CIhzsYRX4mjaK5IyX9eldUdCmga16zlgQxyrj5pt9kzuj2C02w=="
|
||||
},
|
||||
"leven": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz",
|
||||
|
|
|
@ -33,7 +33,6 @@
|
|||
"fix": " tsfmt -r && eslint -c .eslintrc.js --ext ts ./src --fix"
|
||||
},
|
||||
"dependencies": {
|
||||
"jsonc-parser": "^2.2.1",
|
||||
"node-fetch": "^2.6.0",
|
||||
"vscode-languageclient": "7.0.0-next.1"
|
||||
},
|
||||
|
@ -233,6 +232,14 @@
|
|||
"default": false,
|
||||
"markdownDescription": "Run `cargo check` on startup to get the correct value for package OUT_DIRs"
|
||||
},
|
||||
"rust-analyzer.cargo.target": {
|
||||
"type": [
|
||||
"null",
|
||||
"string"
|
||||
],
|
||||
"default": null,
|
||||
"description": "Specify the compilation target"
|
||||
},
|
||||
"rust-analyzer.rustfmt.extraArgs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
|
@ -288,6 +295,11 @@
|
|||
"default": true,
|
||||
"markdownDescription": "Check all targets and tests (will be passed as `--all-targets`)"
|
||||
},
|
||||
"rust-analyzer.checkOnSave.allFeatures": {
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
"markdownDescription": "Check with all features (will be passed as `--all-features`)"
|
||||
},
|
||||
"rust-analyzer.inlayHints.typeHints": {
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
|
|
|
@ -1,129 +0,0 @@
|
|||
import * as fs from 'fs';
|
||||
import * as jsonc from 'jsonc-parser';
|
||||
import * as path from 'path';
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
export interface TextMateRuleSettings {
|
||||
foreground?: string;
|
||||
background?: string;
|
||||
fontStyle?: string;
|
||||
}
|
||||
|
||||
export class ColorTheme {
|
||||
private rules: Map<string, TextMateRuleSettings> = new Map();
|
||||
|
||||
static load(): ColorTheme {
|
||||
// Find out current color theme
|
||||
const themeName = vscode.workspace
|
||||
.getConfiguration('workbench')
|
||||
.get('colorTheme');
|
||||
|
||||
if (typeof themeName !== 'string') {
|
||||
// console.warn('workbench.colorTheme is', themeName)
|
||||
return new ColorTheme();
|
||||
}
|
||||
return loadThemeNamed(themeName);
|
||||
}
|
||||
|
||||
static fromRules(rules: TextMateRule[]): ColorTheme {
|
||||
const res = new ColorTheme();
|
||||
for (const rule of rules) {
|
||||
const scopes = typeof rule.scope === 'undefined'
|
||||
? []
|
||||
: typeof rule.scope === 'string'
|
||||
? [rule.scope]
|
||||
: rule.scope;
|
||||
|
||||
for (const scope of scopes) {
|
||||
res.rules.set(scope, rule.settings);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
lookup(scopes: string[]): TextMateRuleSettings {
|
||||
let res: TextMateRuleSettings = {};
|
||||
for (const scope of scopes) {
|
||||
this.rules.forEach((value, key) => {
|
||||
if (scope.startsWith(key)) {
|
||||
res = mergeRuleSettings(res, value);
|
||||
}
|
||||
});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
mergeFrom(other: ColorTheme) {
|
||||
other.rules.forEach((value, key) => {
|
||||
const merged = mergeRuleSettings(this.rules.get(key), value);
|
||||
this.rules.set(key, merged);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function loadThemeNamed(themeName: string): ColorTheme {
|
||||
function isTheme(extension: vscode.Extension<unknown>): boolean {
|
||||
return (
|
||||
extension.extensionKind === vscode.ExtensionKind.UI &&
|
||||
extension.packageJSON.contributes &&
|
||||
extension.packageJSON.contributes.themes
|
||||
);
|
||||
}
|
||||
|
||||
const themePaths: string[] = vscode.extensions.all
|
||||
.filter(isTheme)
|
||||
.flatMap(
|
||||
ext => ext.packageJSON.contributes.themes
|
||||
.filter((it: any) => (it.id || it.label) === themeName)
|
||||
.map((it: any) => path.join(ext.extensionPath, it.path))
|
||||
);
|
||||
|
||||
const res = new ColorTheme();
|
||||
for (const themePath of themePaths) {
|
||||
res.mergeFrom(loadThemeFile(themePath));
|
||||
}
|
||||
|
||||
const globalCustomizations: any = vscode.workspace.getConfiguration('editor').get('tokenColorCustomizations');
|
||||
res.mergeFrom(ColorTheme.fromRules(globalCustomizations?.textMateRules ?? []));
|
||||
|
||||
const themeCustomizations: any = vscode.workspace.getConfiguration('editor.tokenColorCustomizations').get(`[${themeName}]`);
|
||||
res.mergeFrom(ColorTheme.fromRules(themeCustomizations?.textMateRules ?? []));
|
||||
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
function loadThemeFile(themePath: string): ColorTheme {
|
||||
let text;
|
||||
try {
|
||||
text = fs.readFileSync(themePath, 'utf8');
|
||||
} catch {
|
||||
return new ColorTheme();
|
||||
}
|
||||
const obj = jsonc.parse(text);
|
||||
const tokenColors: TextMateRule[] = obj?.tokenColors ?? [];
|
||||
const res = ColorTheme.fromRules(tokenColors);
|
||||
|
||||
for (const include of obj?.include ?? []) {
|
||||
const includePath = path.join(path.dirname(themePath), include);
|
||||
res.mergeFrom(loadThemeFile(includePath));
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
interface TextMateRule {
|
||||
scope: string | string[];
|
||||
settings: TextMateRuleSettings;
|
||||
}
|
||||
|
||||
function mergeRuleSettings(
|
||||
defaultSetting: TextMateRuleSettings | undefined,
|
||||
override: TextMateRuleSettings,
|
||||
): TextMateRuleSettings {
|
||||
return {
|
||||
foreground: override.foreground ?? defaultSetting?.foreground,
|
||||
background: override.background ?? defaultSetting?.background,
|
||||
fontStyle: override.fontStyle ?? defaultSetting?.fontStyle,
|
||||
};
|
||||
}
|
|
@ -373,7 +373,7 @@ pub(crate) const AST_SRC: AstSrc = AstSrc {
|
|||
|
||||
struct ItemList: ModuleItemOwner {
|
||||
T!['{'],
|
||||
impl_items: [ImplItem],
|
||||
assoc_items: [AssocItem],
|
||||
T!['}']
|
||||
}
|
||||
|
||||
|
@ -685,7 +685,7 @@ pub(crate) const AST_SRC: AstSrc = AstSrc {
|
|||
}
|
||||
|
||||
/* impl blocks can also contain MacroCall */
|
||||
enum ImplItem: NameOwner, AttrsOwner {
|
||||
enum AssocItem: NameOwner, AttrsOwner {
|
||||
FnDef, TypeAliasDef, ConstDef
|
||||
}
|
||||
|
||||
|
|
|
@ -115,7 +115,6 @@ impl TidyDocs {
|
|||
"ra_prof",
|
||||
"ra_project_model",
|
||||
"ra_syntax",
|
||||
"ra_text_edit",
|
||||
"ra_tt",
|
||||
"ra_hir_ty",
|
||||
];
|
||||
|
|
Loading…
Reference in a new issue