mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-15 09:27:27 +00:00
Merge #11639
11639: internal: Re-arrange ide_db modules r=Veykril a=Veykril Thins out the `helpers` module by giving some items more appropriate places to live bors r+ Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
commit
5fae65dd28
86 changed files with 671 additions and 684 deletions
|
@ -149,6 +149,7 @@ fn resolve_doc_path(
|
|||
};
|
||||
|
||||
let modpath = {
|
||||
// FIXME: this is not how we should get a mod path here
|
||||
let ast_path = ast::SourceFile::parse(&format!("type T = {};", link))
|
||||
.syntax_node()
|
||||
.descendants()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use hir::Semantics;
|
||||
use ide_db::{
|
||||
helpers::{insert_whitespace_into_node::insert_ws_into, pick_best_token},
|
||||
helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into,
|
||||
RootDatabase,
|
||||
};
|
||||
use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T};
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ide_db::helpers::node_ext::vis_eq;
|
||||
use ide_db::syntax_helpers::node_ext::vis_eq;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use syntax::{
|
||||
|
|
|
@ -2,10 +2,9 @@ use hir::Semantics;
|
|||
use ide_db::{
|
||||
base_db::{FileId, FilePosition},
|
||||
defs::{Definition, IdentClass},
|
||||
helpers::{
|
||||
for_each_break_and_continue_expr, for_each_tail_expr, node_ext::walk_expr, pick_best_token,
|
||||
},
|
||||
helpers::pick_best_token,
|
||||
search::{FileReference, ReferenceCategory, SearchScope},
|
||||
syntax_helpers::node_ext::{for_each_break_and_continue_expr, for_each_tail_expr, walk_expr},
|
||||
RootDatabase,
|
||||
};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
|
|
@ -10,7 +10,8 @@ use hir::{HasSource, Semantics};
|
|||
use ide_db::{
|
||||
base_db::FileRange,
|
||||
defs::{Definition, IdentClass},
|
||||
helpers::{pick_best_token, FamousDefs},
|
||||
famous_defs::FamousDefs,
|
||||
helpers::pick_best_token,
|
||||
FxIndexSet, RootDatabase,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
|
|
@ -6,10 +6,8 @@ use hir::{AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay, Seman
|
|||
use ide_db::{
|
||||
base_db::SourceDatabase,
|
||||
defs::Definition,
|
||||
helpers::{
|
||||
generated_lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
|
||||
FamousDefs,
|
||||
},
|
||||
famous_defs::FamousDefs,
|
||||
generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
|
||||
RootDatabase,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use either::Either;
|
||||
use hir::{known, Callable, HasVisibility, HirDisplay, Semantics, TypeInfo};
|
||||
use ide_db::{base_db::FileRange, helpers::FamousDefs, RootDatabase};
|
||||
use ide_db::{base_db::FileRange, famous_defs::FamousDefs, RootDatabase};
|
||||
use itertools::Itertools;
|
||||
use stdx::to_lower_snake_case;
|
||||
use syntax::{
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ide_assists::utils::extract_trivial_expression;
|
||||
use ide_db::helpers::node_ext::expr_as_name_ref;
|
||||
use ide_db::syntax_helpers::node_ext::expr_as_name_ref;
|
||||
use itertools::Itertools;
|
||||
use syntax::{
|
||||
ast::{self, AstNode, AstToken, IsString},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Syntax highlighting for format macro strings.
|
||||
use ide_db::{
|
||||
helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier},
|
||||
syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier},
|
||||
SymbolKind,
|
||||
};
|
||||
use syntax::{ast, TextRange};
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use hir::{AsAssocItem, HasVisibility, Semantics};
|
||||
use ide_db::{
|
||||
defs::{Definition, IdentClass, NameClass, NameRefClass},
|
||||
helpers::FamousDefs,
|
||||
famous_defs::FamousDefs,
|
||||
RootDatabase, SymbolKind,
|
||||
};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
|
|
@ -5,8 +5,7 @@ use std::mem;
|
|||
use either::Either;
|
||||
use hir::{InFile, Semantics};
|
||||
use ide_db::{
|
||||
active_parameter::ActiveParameter, defs::Definition, helpers::rust_doc::is_rust_fence,
|
||||
SymbolKind,
|
||||
active_parameter::ActiveParameter, defs::Definition, rust_doc::is_rust_fence, SymbolKind,
|
||||
};
|
||||
use syntax::{
|
||||
ast::{self, AstNode, IsString, QuoteOffsets},
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
//! module, and we use to statically check that we only produce snippet
|
||||
//! assists if we are allowed to.
|
||||
|
||||
use ide_db::helpers::{insert_use::InsertUseConfig, SnippetCap};
|
||||
use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
|
||||
|
||||
use crate::AssistKind;
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::mem;
|
|||
use hir::Semantics;
|
||||
use ide_db::{
|
||||
base_db::{AnchoredPathBuf, FileId, FileRange},
|
||||
helpers::SnippetCap,
|
||||
SnippetCap,
|
||||
};
|
||||
use ide_db::{
|
||||
label::Label,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use hir::HirDisplay;
|
||||
use ide_db::helpers::node_ext::walk_ty;
|
||||
use ide_db::syntax_helpers::node_ext::walk_ty;
|
||||
use syntax::ast::{self, AstNode, LetStmt, Param};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use hir::HasSource;
|
||||
use ide_db::{helpers::insert_whitespace_into_node::insert_ws_into, traits::resolve_target_trait};
|
||||
use ide_db::{
|
||||
syntax_helpers::insert_whitespace_into_node::insert_ws_into, traits::resolve_target_trait,
|
||||
};
|
||||
use syntax::ast::{self, make, AstNode};
|
||||
|
||||
use crate::{
|
||||
|
|
|
@ -2,8 +2,8 @@ use std::iter::{self, Peekable};
|
|||
|
||||
use either::Either;
|
||||
use hir::{Adt, Crate, HasAttrs, HasSource, ModuleDef, Semantics};
|
||||
use ide_db::helpers::{mod_path_to_ast, FamousDefs};
|
||||
use ide_db::RootDatabase;
|
||||
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
|
||||
use itertools::Itertools;
|
||||
use syntax::ast::{self, make, AstNode, HasName, MatchArmList, MatchExpr, Pat};
|
||||
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
use ide_db::helpers::{
|
||||
import_assets::{ImportAssets, ImportCandidate},
|
||||
insert_use::{insert_use, ImportScope},
|
||||
mod_path_to_ast,
|
||||
use ide_db::{
|
||||
helpers::mod_path_to_ast,
|
||||
imports::{
|
||||
import_assets::{ImportAssets, ImportCandidate},
|
||||
insert_use::{insert_use, ImportScope},
|
||||
},
|
||||
};
|
||||
use syntax::{ast, AstNode, NodeOrToken, SyntaxElement};
|
||||
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
use hir::{known, AsAssocItem, Semantics};
|
||||
use ide_db::{
|
||||
helpers::{
|
||||
for_each_tail_expr,
|
||||
node_ext::{block_as_lone_tail, is_pattern_cond, preorder_expr},
|
||||
FamousDefs,
|
||||
famous_defs::FamousDefs,
|
||||
syntax_helpers::node_ext::{
|
||||
block_as_lone_tail, for_each_tail_expr, is_pattern_cond, preorder_expr,
|
||||
},
|
||||
RootDatabase,
|
||||
};
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
use ide_db::{
|
||||
helpers::{mod_path_to_ast, FamousDefs},
|
||||
traits::resolve_target_trait,
|
||||
};
|
||||
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait};
|
||||
use syntax::ast::{self, AstNode, HasName};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use hir::known;
|
||||
use ide_db::helpers::FamousDefs;
|
||||
use ide_db::famous_defs::FamousDefs;
|
||||
use stdx::format_to;
|
||||
use syntax::{
|
||||
ast::{self, edit_in_place::Indent, make, HasArgList, HasLoopBody},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::iter::once;
|
||||
|
||||
use ide_db::helpers::node_ext::{is_pattern_cond, single_let};
|
||||
use ide_db::syntax_helpers::node_ext::{is_pattern_cond, single_let};
|
||||
use syntax::{
|
||||
ast::{
|
||||
self,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::iter::once;
|
||||
|
||||
use ide_db::helpers::node_ext::is_pattern_cond;
|
||||
use ide_db::syntax_helpers::node_ext::is_pattern_cond;
|
||||
use syntax::{
|
||||
ast::{
|
||||
self,
|
||||
|
|
|
@ -5,13 +5,11 @@ use either::Either;
|
|||
use hir::{HirDisplay, InFile, Local, ModuleDef, Semantics, TypeInfo};
|
||||
use ide_db::{
|
||||
defs::{Definition, NameRefClass},
|
||||
helpers::{
|
||||
insert_use::{insert_use, ImportScope},
|
||||
mod_path_to_ast,
|
||||
node_ext::{preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr},
|
||||
FamousDefs,
|
||||
},
|
||||
famous_defs::FamousDefs,
|
||||
helpers::mod_path_to_ast,
|
||||
imports::insert_use::{insert_use, ImportScope},
|
||||
search::{FileReference, ReferenceCategory, SearchScope},
|
||||
syntax_helpers::node_ext::{preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr},
|
||||
FxIndexSet, RootDatabase,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
|
|
@ -4,10 +4,8 @@ use either::Either;
|
|||
use hir::{Module, ModuleDef, Name, Variant};
|
||||
use ide_db::{
|
||||
defs::Definition,
|
||||
helpers::{
|
||||
insert_use::{insert_use, ImportScope, InsertUseConfig},
|
||||
mod_path_to_ast,
|
||||
},
|
||||
helpers::mod_path_to_ast,
|
||||
imports::insert_use::{insert_use, ImportScope, InsertUseConfig},
|
||||
search::FileReference,
|
||||
RootDatabase,
|
||||
};
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use either::Either;
|
||||
use ide_db::helpers::node_ext::walk_ty;
|
||||
use ide_db::syntax_helpers::node_ext::walk_ty;
|
||||
use itertools::Itertools;
|
||||
use syntax::{
|
||||
ast::{self, edit::IndentLevel, AstNode, HasGenericParams, HasName},
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ide_db::{helpers::FamousDefs, RootDatabase};
|
||||
use ide_db::{famous_defs::FamousDefs, RootDatabase};
|
||||
use syntax::ast::{self, AstNode, HasName};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ide_db::helpers::FamousDefs;
|
||||
use ide_db::famous_defs::FamousDefs;
|
||||
use itertools::Itertools;
|
||||
use stdx::format_to;
|
||||
use syntax::{
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::fmt::Display;
|
||||
|
||||
use ide_db::{helpers::FamousDefs, RootDatabase};
|
||||
use ide_db::{famous_defs::FamousDefs, RootDatabase};
|
||||
use syntax::{
|
||||
ast::{self, HasName},
|
||||
AstNode, SyntaxNode,
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use ide_db::helpers::FamousDefs;
|
||||
use ide_db::RootDatabase;
|
||||
use ide_db::{famous_defs::FamousDefs, RootDatabase};
|
||||
use syntax::ast::{self, AstNode, HasName};
|
||||
|
||||
use crate::{utils::generate_trait_impl_text, AssistContext, AssistId, AssistKind, Assists};
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use hir::{HasSource, HirDisplay, Module, Semantics, TypeInfo};
|
||||
use ide_db::helpers::FamousDefs;
|
||||
use ide_db::{
|
||||
base_db::FileId,
|
||||
defs::{Definition, NameRefClass},
|
||||
helpers::SnippetCap,
|
||||
RootDatabase,
|
||||
famous_defs::FamousDefs,
|
||||
RootDatabase, SnippetCap,
|
||||
};
|
||||
use stdx::to_lower_snake_case;
|
||||
use syntax::{
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ide_db::helpers::FamousDefs;
|
||||
use ide_db::famous_defs::FamousDefs;
|
||||
use stdx::{format_to, to_lower_snake_case};
|
||||
use syntax::ast::{self, AstNode, HasName, HasVisibility};
|
||||
|
||||
|
|
|
@ -4,9 +4,10 @@ use hir::{db::HirDatabase, PathResolution, Semantics, TypeInfo};
|
|||
use ide_db::{
|
||||
base_db::{FileId, FileRange},
|
||||
defs::Definition,
|
||||
helpers::{insert_use::remove_path_if_in_use_stmt, node_ext::expr_as_name_ref},
|
||||
imports::insert_use::remove_path_if_in_use_stmt,
|
||||
path_transform::PathTransform,
|
||||
search::{FileReference, SearchScope},
|
||||
syntax_helpers::node_ext::expr_as_name_ref,
|
||||
RootDatabase,
|
||||
};
|
||||
use itertools::{izip, Itertools};
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ide_db::helpers::node_ext::is_pattern_cond;
|
||||
use ide_db::syntax_helpers::node_ext::is_pattern_cond;
|
||||
use syntax::{
|
||||
ast::{self, AstNode},
|
||||
T,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ide_db::helpers::merge_imports::{try_merge_imports, try_merge_trees, MergeBehavior};
|
||||
use ide_db::imports::merge_imports::{try_merge_imports, try_merge_trees, MergeBehavior};
|
||||
use syntax::{algo::neighbor, ast, ted, AstNode};
|
||||
|
||||
use crate::{
|
||||
|
|
|
@ -2,7 +2,7 @@ use hir::{HirDisplay, ModuleDef, PathResolution, Semantics};
|
|||
use ide_db::{
|
||||
assists::{AssistId, AssistKind},
|
||||
defs::Definition,
|
||||
helpers::node_ext::preorder_expr,
|
||||
syntax_helpers::node_ext::preorder_expr,
|
||||
RootDatabase,
|
||||
};
|
||||
use stdx::to_upper_snake_case;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use hir::{ItemInNs, ModuleDef};
|
||||
use ide_db::{
|
||||
assists::{AssistId, AssistKind},
|
||||
helpers::import_assets::item_for_path_search,
|
||||
imports::import_assets::item_for_path_search,
|
||||
};
|
||||
use syntax::{ast, AstNode};
|
||||
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
use std::iter;
|
||||
|
||||
use hir::AsAssocItem;
|
||||
use ide_db::helpers::{
|
||||
import_assets::{ImportCandidate, LocatedImport},
|
||||
mod_path_to_ast,
|
||||
};
|
||||
use ide_db::RootDatabase;
|
||||
use ide_db::{
|
||||
helpers::mod_path_to_ast,
|
||||
imports::import_assets::{ImportCandidate, LocatedImport},
|
||||
};
|
||||
use syntax::{
|
||||
ast,
|
||||
ast::{make, HasArgList},
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
use hir::{InFile, ModuleDef};
|
||||
use ide_db::{
|
||||
helpers::{
|
||||
import_assets::NameToImport, insert_whitespace_into_node::insert_ws_into, mod_path_to_ast,
|
||||
},
|
||||
items_locator,
|
||||
helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator,
|
||||
syntax_helpers::insert_whitespace_into_node::insert_ws_into,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use syntax::{
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::iter::{self, successors};
|
|||
use either::Either;
|
||||
use ide_db::{
|
||||
defs::NameClass,
|
||||
helpers::node_ext::{is_pattern_cond, single_let},
|
||||
syntax_helpers::node_ext::{is_pattern_cond, single_let},
|
||||
ty_filter::TryEnum,
|
||||
RootDatabase,
|
||||
};
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use hir::AsAssocItem;
|
||||
use ide_db::helpers::{
|
||||
insert_use::{insert_use, ImportScope},
|
||||
mod_path_to_ast,
|
||||
use ide_db::{
|
||||
helpers::mod_path_to_ast,
|
||||
imports::insert_use::{insert_use, ImportScope},
|
||||
};
|
||||
use syntax::{
|
||||
ast::{self, make},
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
use ide_db::helpers::{for_each_tail_expr, node_ext::walk_expr, FamousDefs};
|
||||
use ide_db::{
|
||||
famous_defs::FamousDefs,
|
||||
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use syntax::{
|
||||
ast::{self, Expr},
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
use std::iter;
|
||||
|
||||
use ide_db::helpers::{for_each_tail_expr, node_ext::walk_expr, FamousDefs};
|
||||
use ide_db::{
|
||||
famous_defs::FamousDefs,
|
||||
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
|
||||
};
|
||||
use syntax::{
|
||||
ast::{self, make, Expr},
|
||||
match_ast, AstNode,
|
||||
|
|
|
@ -5,12 +5,9 @@ use expect_test::expect;
|
|||
use hir::{db::DefDatabase, Semantics};
|
||||
use ide_db::{
|
||||
base_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt},
|
||||
helpers::{
|
||||
insert_use::{ImportGranularity, InsertUseConfig},
|
||||
SnippetCap,
|
||||
},
|
||||
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
||||
source_change::FileSystemEdit,
|
||||
RootDatabase,
|
||||
RootDatabase, SnippetCap,
|
||||
};
|
||||
use stdx::{format_to, trim_indent};
|
||||
use syntax::TextRange;
|
||||
|
|
|
@ -6,9 +6,7 @@ use itertools::Itertools;
|
|||
|
||||
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
|
||||
use hir::{db::HirDatabase, HirDisplay, Semantics};
|
||||
use ide_db::{
|
||||
helpers::FamousDefs, helpers::SnippetCap, path_transform::PathTransform, RootDatabase,
|
||||
};
|
||||
use ide_db::{famous_defs::FamousDefs, path_transform::PathTransform, RootDatabase, SnippetCap};
|
||||
use stdx::format_to;
|
||||
use syntax::{
|
||||
ast::{
|
||||
|
|
|
@ -3,12 +3,10 @@
|
|||
//! This module uses a bit of static metadata to provide completions for builtin-in attributes and lints.
|
||||
|
||||
use ide_db::{
|
||||
helpers::{
|
||||
generated_lints::{
|
||||
Lint, CLIPPY_LINTS, CLIPPY_LINT_GROUPS, DEFAULT_LINTS, FEATURES, RUSTDOC_LINTS,
|
||||
},
|
||||
parse_tt_as_comma_sep_paths,
|
||||
generated::lints::{
|
||||
Lint, CLIPPY_LINTS, CLIPPY_LINT_GROUPS, DEFAULT_LINTS, FEATURES, RUSTDOC_LINTS,
|
||||
},
|
||||
syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
|
||||
SymbolKind,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! Completion for derives
|
||||
use hir::{HasAttrs, MacroDef, MacroKind};
|
||||
use ide_db::{
|
||||
helpers::{import_assets::ImportAssets, insert_use::ImportScope},
|
||||
imports::{import_assets::ImportAssets, insert_use::ImportScope},
|
||||
SymbolKind,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! Completion for lints
|
||||
use ide_db::{helpers::generated_lints::Lint, SymbolKind};
|
||||
use ide_db::{generated::lints::Lint, SymbolKind};
|
||||
use syntax::{ast, T};
|
||||
|
||||
use crate::{context::CompletionContext, item::CompletionItem, Completions};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! See [`import_on_the_fly`].
|
||||
use hir::ItemInNs;
|
||||
use ide_db::helpers::{
|
||||
use ide_db::imports::{
|
||||
import_assets::{ImportAssets, ImportCandidate, LocatedImport},
|
||||
insert_use::ImportScope,
|
||||
};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Completes identifiers in format string literals.
|
||||
|
||||
use ide_db::helpers::format_string::is_format_string;
|
||||
use ide_db::syntax_helpers::format_string::is_format_string;
|
||||
use itertools::Itertools;
|
||||
use syntax::{ast, AstToken, TextRange, TextSize};
|
||||
|
||||
|
|
|
@ -3,10 +3,7 @@
|
|||
mod format_like;
|
||||
|
||||
use hir::{Documentation, HasAttrs};
|
||||
use ide_db::{
|
||||
helpers::{insert_use::ImportScope, SnippetCap},
|
||||
ty_filter::TryEnum,
|
||||
};
|
||||
use ide_db::{imports::insert_use::ImportScope, ty_filter::TryEnum, SnippetCap};
|
||||
use syntax::{
|
||||
ast::{self, AstNode, AstToken},
|
||||
SyntaxKind::{EXPR_STMT, STMT_LIST},
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
//
|
||||
// image::https://user-images.githubusercontent.com/48062697/113020656-b560f500-917a-11eb-87de-02991f61beb8.gif[]
|
||||
|
||||
use ide_db::helpers::SnippetCap;
|
||||
use ide_db::SnippetCap;
|
||||
use syntax::ast::{self, AstToken};
|
||||
|
||||
use crate::{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! This file provides snippet completions, like `pd` => `eprintln!(...)`.
|
||||
|
||||
use hir::Documentation;
|
||||
use ide_db::helpers::{insert_use::ImportScope, SnippetCap};
|
||||
use ide_db::{imports::insert_use::ImportScope, SnippetCap};
|
||||
use syntax::T;
|
||||
|
||||
use crate::{
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
//! module, and we use to statically check that we only produce snippet
|
||||
//! completions if we are allowed to.
|
||||
|
||||
use ide_db::helpers::{insert_use::InsertUseConfig, SnippetCap};
|
||||
use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
|
||||
|
||||
use crate::snippet::Snippet;
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ use hir::{
|
|||
use ide_db::{
|
||||
active_parameter::ActiveParameter,
|
||||
base_db::{FilePosition, SourceDatabase},
|
||||
helpers::FamousDefs,
|
||||
famous_defs::FamousDefs,
|
||||
RootDatabase,
|
||||
};
|
||||
use syntax::{
|
||||
|
|
|
@ -4,12 +4,12 @@ use std::fmt;
|
|||
|
||||
use hir::{Documentation, Mutability};
|
||||
use ide_db::{
|
||||
helpers::{
|
||||
helpers::mod_path_to_ast,
|
||||
imports::{
|
||||
import_assets::LocatedImport,
|
||||
insert_use::{self, ImportScope, InsertUseConfig},
|
||||
mod_path_to_ast, SnippetCap,
|
||||
},
|
||||
SymbolKind,
|
||||
SnippetCap, SymbolKind,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use stdx::{impl_from, never};
|
||||
|
|
|
@ -14,10 +14,10 @@ mod snippet;
|
|||
use completions::flyimport::position_for_import;
|
||||
use ide_db::{
|
||||
base_db::FilePosition,
|
||||
helpers::{
|
||||
helpers::mod_path_to_ast,
|
||||
imports::{
|
||||
import_assets::NameToImport,
|
||||
insert_use::{self, ImportScope},
|
||||
mod_path_to_ast,
|
||||
},
|
||||
items_locator, RootDatabase,
|
||||
};
|
||||
|
|
|
@ -12,10 +12,7 @@ pub(crate) mod struct_literal;
|
|||
mod builder_ext;
|
||||
|
||||
use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef};
|
||||
use ide_db::{
|
||||
helpers::{item_name, SnippetCap},
|
||||
RootDatabase, SymbolKind,
|
||||
};
|
||||
use ide_db::{helpers::item_name, RootDatabase, SnippetCap, SymbolKind};
|
||||
use syntax::{SmolStr, SyntaxKind, TextRange};
|
||||
|
||||
use crate::{
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! Renderer for patterns.
|
||||
|
||||
use hir::{db::HirDatabase, HasAttrs, HasVisibility, Name, StructKind};
|
||||
use ide_db::helpers::SnippetCap;
|
||||
use ide_db::SnippetCap;
|
||||
use itertools::Itertools;
|
||||
use syntax::SmolStr;
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! Renderer for `struct` literal.
|
||||
|
||||
use hir::{db::HirDatabase, HasAttrs, HasVisibility, Name, StructKind};
|
||||
use ide_db::helpers::SnippetCap;
|
||||
use ide_db::SnippetCap;
|
||||
use itertools::Itertools;
|
||||
use syntax::SmolStr;
|
||||
|
||||
|
|
|
@ -102,7 +102,7 @@ use std::ops::Deref;
|
|||
// }
|
||||
// ----
|
||||
|
||||
use ide_db::helpers::{import_assets::LocatedImport, insert_use::ImportScope};
|
||||
use ide_db::imports::{import_assets::LocatedImport, insert_use::ImportScope};
|
||||
use itertools::Itertools;
|
||||
use syntax::{ast, AstNode, GreenNode, SyntaxNode};
|
||||
|
||||
|
|
|
@ -27,11 +27,8 @@ use std::mem;
|
|||
use hir::{db::DefDatabase, PrefixKind, Semantics};
|
||||
use ide_db::{
|
||||
base_db::{fixture::ChangeFixture, FileLoader, FilePosition},
|
||||
helpers::{
|
||||
insert_use::{ImportGranularity, InsertUseConfig},
|
||||
SnippetCap,
|
||||
},
|
||||
RootDatabase,
|
||||
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
||||
RootDatabase, SnippetCap,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use stdx::{format_to, trim_indent};
|
||||
|
|
|
@ -1,28 +1,15 @@
|
|||
//! A module with ide helpers for high-level ide features.
|
||||
pub mod famous_defs;
|
||||
pub mod generated_lints;
|
||||
pub mod import_assets;
|
||||
pub mod insert_use;
|
||||
pub mod merge_imports;
|
||||
pub mod insert_whitespace_into_node;
|
||||
pub mod node_ext;
|
||||
pub mod rust_doc;
|
||||
pub mod format_string;
|
||||
//! Random assortment of ide helpers for high-level ide features that don't fit in any other module.
|
||||
|
||||
use std::collections::VecDeque;
|
||||
|
||||
use base_db::FileId;
|
||||
use hir::{ItemInNs, MacroDef, ModuleDef, Name, Semantics};
|
||||
use itertools::Itertools;
|
||||
use syntax::{
|
||||
ast::{self, make, HasLoopBody},
|
||||
AstNode, AstToken, Preorder, RustLanguage, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent,
|
||||
T,
|
||||
ast::{self, make},
|
||||
AstToken, SyntaxKind, SyntaxToken, TokenAtOffset,
|
||||
};
|
||||
|
||||
use crate::{defs::Definition, RootDatabase};
|
||||
|
||||
pub use self::famous_defs::FamousDefs;
|
||||
use crate::{defs::Definition, generated, RootDatabase};
|
||||
|
||||
pub fn item_name(db: &RootDatabase, item: ItemInNs) -> Option<Name> {
|
||||
match item {
|
||||
|
@ -91,216 +78,16 @@ pub fn visit_file_defs(
|
|||
module.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into()));
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct SnippetCap {
|
||||
_private: (),
|
||||
}
|
||||
|
||||
impl SnippetCap {
|
||||
pub const fn new(allow_snippets: bool) -> Option<SnippetCap> {
|
||||
if allow_snippets {
|
||||
Some(SnippetCap { _private: () })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Calls `cb` on each expression inside `expr` that is at "tail position".
|
||||
/// Does not walk into `break` or `return` expressions.
|
||||
/// Note that modifying the tree while iterating it will cause undefined iteration which might
|
||||
/// potentially results in an out of bounds panic.
|
||||
pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
|
||||
match expr {
|
||||
ast::Expr::BlockExpr(b) => {
|
||||
match b.modifier() {
|
||||
Some(
|
||||
ast::BlockModifier::Async(_)
|
||||
| ast::BlockModifier::Try(_)
|
||||
| ast::BlockModifier::Const(_),
|
||||
) => return cb(expr),
|
||||
|
||||
Some(ast::BlockModifier::Label(label)) => {
|
||||
for_each_break_expr(Some(label), b.stmt_list(), &mut |b| {
|
||||
cb(&ast::Expr::BreakExpr(b))
|
||||
});
|
||||
}
|
||||
Some(ast::BlockModifier::Unsafe(_)) => (),
|
||||
None => (),
|
||||
}
|
||||
if let Some(stmt_list) = b.stmt_list() {
|
||||
if let Some(e) = stmt_list.tail_expr() {
|
||||
for_each_tail_expr(&e, cb);
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::Expr::IfExpr(if_) => {
|
||||
let mut if_ = if_.clone();
|
||||
loop {
|
||||
if let Some(block) = if_.then_branch() {
|
||||
for_each_tail_expr(&ast::Expr::BlockExpr(block), cb);
|
||||
}
|
||||
match if_.else_branch() {
|
||||
Some(ast::ElseBranch::IfExpr(it)) => if_ = it,
|
||||
Some(ast::ElseBranch::Block(block)) => {
|
||||
for_each_tail_expr(&ast::Expr::BlockExpr(block), cb);
|
||||
break;
|
||||
}
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::Expr::LoopExpr(l) => {
|
||||
for_each_break_expr(l.label(), l.loop_body().and_then(|it| it.stmt_list()), &mut |b| {
|
||||
cb(&ast::Expr::BreakExpr(b))
|
||||
})
|
||||
}
|
||||
ast::Expr::MatchExpr(m) => {
|
||||
if let Some(arms) = m.match_arm_list() {
|
||||
arms.arms().filter_map(|arm| arm.expr()).for_each(|e| for_each_tail_expr(&e, cb));
|
||||
}
|
||||
}
|
||||
ast::Expr::ArrayExpr(_)
|
||||
| ast::Expr::AwaitExpr(_)
|
||||
| ast::Expr::BinExpr(_)
|
||||
| ast::Expr::BoxExpr(_)
|
||||
| ast::Expr::BreakExpr(_)
|
||||
| ast::Expr::CallExpr(_)
|
||||
| ast::Expr::CastExpr(_)
|
||||
| ast::Expr::ClosureExpr(_)
|
||||
| ast::Expr::ContinueExpr(_)
|
||||
| ast::Expr::FieldExpr(_)
|
||||
| ast::Expr::ForExpr(_)
|
||||
| ast::Expr::IndexExpr(_)
|
||||
| ast::Expr::Literal(_)
|
||||
| ast::Expr::MacroCall(_)
|
||||
| ast::Expr::MacroStmts(_)
|
||||
| ast::Expr::MethodCallExpr(_)
|
||||
| ast::Expr::ParenExpr(_)
|
||||
| ast::Expr::PathExpr(_)
|
||||
| ast::Expr::PrefixExpr(_)
|
||||
| ast::Expr::RangeExpr(_)
|
||||
| ast::Expr::RecordExpr(_)
|
||||
| ast::Expr::RefExpr(_)
|
||||
| ast::Expr::ReturnExpr(_)
|
||||
| ast::Expr::TryExpr(_)
|
||||
| ast::Expr::TupleExpr(_)
|
||||
| ast::Expr::WhileExpr(_)
|
||||
| ast::Expr::LetExpr(_)
|
||||
| ast::Expr::UnderscoreExpr(_)
|
||||
| ast::Expr::YieldExpr(_) => cb(expr),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn for_each_break_and_continue_expr(
|
||||
label: Option<ast::Label>,
|
||||
body: Option<ast::StmtList>,
|
||||
cb: &mut dyn FnMut(ast::Expr),
|
||||
) {
|
||||
let label = label.and_then(|lbl| lbl.lifetime());
|
||||
if let Some(b) = body {
|
||||
let tree_depth_iterator = TreeWithDepthIterator::new(b);
|
||||
for (expr, depth) in tree_depth_iterator {
|
||||
match expr {
|
||||
ast::Expr::BreakExpr(b)
|
||||
if (depth == 0 && b.lifetime().is_none())
|
||||
|| eq_label_lt(&label, &b.lifetime()) =>
|
||||
{
|
||||
cb(ast::Expr::BreakExpr(b));
|
||||
}
|
||||
ast::Expr::ContinueExpr(c)
|
||||
if (depth == 0 && c.lifetime().is_none())
|
||||
|| eq_label_lt(&label, &c.lifetime()) =>
|
||||
{
|
||||
cb(ast::Expr::ContinueExpr(c));
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn for_each_break_expr(
|
||||
label: Option<ast::Label>,
|
||||
body: Option<ast::StmtList>,
|
||||
cb: &mut dyn FnMut(ast::BreakExpr),
|
||||
) {
|
||||
let label = label.and_then(|lbl| lbl.lifetime());
|
||||
if let Some(b) = body {
|
||||
let tree_depth_iterator = TreeWithDepthIterator::new(b);
|
||||
for (expr, depth) in tree_depth_iterator {
|
||||
match expr {
|
||||
ast::Expr::BreakExpr(b)
|
||||
if (depth == 0 && b.lifetime().is_none())
|
||||
|| eq_label_lt(&label, &b.lifetime()) =>
|
||||
{
|
||||
cb(b);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn eq_label_lt(lt1: &Option<ast::Lifetime>, lt2: &Option<ast::Lifetime>) -> bool {
|
||||
lt1.as_ref().zip(lt2.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text())
|
||||
}
|
||||
|
||||
struct TreeWithDepthIterator {
|
||||
preorder: Preorder<RustLanguage>,
|
||||
depth: u32,
|
||||
}
|
||||
|
||||
impl TreeWithDepthIterator {
|
||||
fn new(body: ast::StmtList) -> Self {
|
||||
let preorder = body.syntax().preorder();
|
||||
Self { preorder, depth: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for TreeWithDepthIterator {
|
||||
type Item = (ast::Expr, u32);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while let Some(event) = self.preorder.find_map(|ev| match ev {
|
||||
WalkEvent::Enter(it) => ast::Expr::cast(it).map(WalkEvent::Enter),
|
||||
WalkEvent::Leave(it) => ast::Expr::cast(it).map(WalkEvent::Leave),
|
||||
}) {
|
||||
match event {
|
||||
WalkEvent::Enter(
|
||||
ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_),
|
||||
) => {
|
||||
self.depth += 1;
|
||||
}
|
||||
WalkEvent::Leave(
|
||||
ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_),
|
||||
) => {
|
||||
self.depth -= 1;
|
||||
}
|
||||
WalkEvent::Enter(ast::Expr::BlockExpr(e)) if e.label().is_some() => {
|
||||
self.depth += 1;
|
||||
}
|
||||
WalkEvent::Leave(ast::Expr::BlockExpr(e)) if e.label().is_some() => {
|
||||
self.depth -= 1;
|
||||
}
|
||||
WalkEvent::Enter(expr) => return Some((expr, self.depth)),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks if the given lint is equal or is contained by the other lint which may or may not be a group.
|
||||
pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
|
||||
if lint == lint_is {
|
||||
return true;
|
||||
}
|
||||
|
||||
if let Some(group) = generated_lints::DEFAULT_LINT_GROUPS
|
||||
if let Some(group) = generated::lints::DEFAULT_LINT_GROUPS
|
||||
.iter()
|
||||
.chain(generated_lints::CLIPPY_LINT_GROUPS.iter())
|
||||
.chain(generated_lints::RUSTDOC_LINT_GROUPS.iter())
|
||||
.chain(generated::lints::CLIPPY_LINT_GROUPS.iter())
|
||||
.chain(generated::lints::RUSTDOC_LINT_GROUPS.iter())
|
||||
.find(|&check| check.lint.label == lint_is)
|
||||
{
|
||||
group.children.contains(&lint)
|
||||
|
@ -308,30 +95,3 @@ pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
|
|||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses the input token tree as comma separated plain paths.
|
||||
pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
|
||||
let r_paren = input.r_paren_token();
|
||||
let tokens =
|
||||
input.syntax().children_with_tokens().skip(1).map_while(|it| match it.into_token() {
|
||||
// seeing a keyword means the attribute is unclosed so stop parsing here
|
||||
Some(tok) if tok.kind().is_keyword() => None,
|
||||
// don't include the right token tree parenthesis if it exists
|
||||
tok @ Some(_) if tok == r_paren => None,
|
||||
// only nodes that we can find are other TokenTrees, those are unexpected in this parse though
|
||||
None => None,
|
||||
Some(tok) => Some(tok),
|
||||
});
|
||||
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
|
||||
let paths = input_expressions
|
||||
.into_iter()
|
||||
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
|
||||
.filter_map(|mut tokens| {
|
||||
syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
|
||||
ast::Expr::PathExpr(it) => it.path(),
|
||||
_ => None,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
Some(paths)
|
||||
}
|
||||
|
|
|
@ -12,12 +12,11 @@ use syntax::{
|
|||
};
|
||||
|
||||
use crate::{
|
||||
helpers::item_name,
|
||||
items_locator::{self, AssocItemSearch, DEFAULT_QUERY_SEARCH_LIMIT},
|
||||
RootDatabase,
|
||||
};
|
||||
|
||||
use super::item_name;
|
||||
|
||||
/// A candidate for import, derived during various IDE activities:
|
||||
/// * completion with imports on the fly proposals
|
||||
/// * completion edit resolve requests
|
|
@ -12,7 +12,7 @@ use syntax::{
|
|||
};
|
||||
|
||||
use crate::{
|
||||
helpers::merge_imports::{
|
||||
imports::merge_imports::{
|
||||
common_prefix, eq_attrs, eq_visibility, try_merge_imports, use_tree_path_cmp, MergeBehavior,
|
||||
},
|
||||
RootDatabase,
|
|
@ -7,7 +7,7 @@ use syntax::{
|
|||
ted,
|
||||
};
|
||||
|
||||
use crate::helpers::node_ext::vis_eq;
|
||||
use crate::syntax_helpers::node_ext::vis_eq;
|
||||
|
||||
/// What type of merges are allowed.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
|
@ -13,7 +13,7 @@ use syntax::{ast, AstNode, SyntaxKind::NAME};
|
|||
|
||||
use crate::{
|
||||
defs::{Definition, NameClass},
|
||||
helpers::import_assets::NameToImport,
|
||||
imports::import_assets::NameToImport,
|
||||
symbol_index, RootDatabase,
|
||||
};
|
||||
|
||||
|
|
|
@ -4,21 +4,38 @@
|
|||
|
||||
mod apply_change;
|
||||
|
||||
pub mod active_parameter;
|
||||
pub mod assists;
|
||||
pub mod defs;
|
||||
pub mod famous_defs;
|
||||
pub mod helpers;
|
||||
pub mod items_locator;
|
||||
pub mod label;
|
||||
pub mod line_index;
|
||||
pub mod symbol_index;
|
||||
pub mod defs;
|
||||
pub mod items_locator;
|
||||
pub mod source_change;
|
||||
pub mod ty_filter;
|
||||
pub mod traits;
|
||||
pub mod helpers;
|
||||
pub mod path_transform;
|
||||
|
||||
pub mod search;
|
||||
pub mod rename;
|
||||
pub mod active_parameter;
|
||||
pub mod rust_doc;
|
||||
pub mod search;
|
||||
pub mod source_change;
|
||||
pub mod symbol_index;
|
||||
pub mod traits;
|
||||
pub mod ty_filter;
|
||||
|
||||
pub mod imports {
|
||||
pub mod import_assets;
|
||||
pub mod insert_use;
|
||||
pub mod merge_imports;
|
||||
}
|
||||
|
||||
pub mod generated {
|
||||
pub mod lints;
|
||||
}
|
||||
|
||||
pub mod syntax_helpers {
|
||||
pub mod node_ext;
|
||||
pub mod insert_whitespace_into_node;
|
||||
pub mod format_string;
|
||||
}
|
||||
|
||||
use std::{fmt, mem::ManuallyDrop, sync::Arc};
|
||||
|
||||
|
@ -42,14 +59,14 @@ pub type FxIndexMap<K, V> =
|
|||
indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
|
||||
|
||||
#[salsa::database(
|
||||
base_db::SourceDatabaseStorage,
|
||||
base_db::SourceDatabaseExtStorage,
|
||||
LineIndexDatabaseStorage,
|
||||
symbol_index::SymbolsDatabaseStorage,
|
||||
hir::db::InternDatabaseStorage,
|
||||
base_db::SourceDatabaseStorage,
|
||||
hir::db::AstDatabaseStorage,
|
||||
hir::db::DefDatabaseStorage,
|
||||
hir::db::HirDatabaseStorage
|
||||
hir::db::HirDatabaseStorage,
|
||||
hir::db::InternDatabaseStorage,
|
||||
LineIndexDatabaseStorage,
|
||||
symbol_index::SymbolsDatabaseStorage
|
||||
)]
|
||||
pub struct RootDatabase {
|
||||
// We use `ManuallyDrop` here because every codegen unit that contains a
|
||||
|
@ -61,9 +78,7 @@ pub struct RootDatabase {
|
|||
|
||||
impl Drop for RootDatabase {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
ManuallyDrop::drop(&mut self.storage);
|
||||
}
|
||||
unsafe { ManuallyDrop::drop(&mut self.storage) };
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -117,7 +132,7 @@ impl RootDatabase {
|
|||
db.set_crate_graph_with_durability(Default::default(), Durability::HIGH);
|
||||
db.set_local_roots_with_durability(Default::default(), Durability::HIGH);
|
||||
db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
|
||||
db.set_enable_proc_attr_macros(Default::default());
|
||||
db.set_enable_proc_attr_macros(false);
|
||||
db.update_lru_capacity(lru_capacity);
|
||||
db
|
||||
}
|
||||
|
@ -204,6 +219,21 @@ impl From<FileSymbolKind> for SymbolKind {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct SnippetCap {
|
||||
_private: (),
|
||||
}
|
||||
|
||||
impl SnippetCap {
|
||||
pub const fn new(allow_snippets: bool) -> Option<SnippetCap> {
|
||||
if allow_snippets {
|
||||
Some(SnippetCap { _private: () })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
mod sourcegen_lints;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)`
|
||||
//! representation.
|
||||
use std::iter;
|
||||
use std::{iter, mem};
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{TextRange, TextSize};
|
||||
|
@ -59,8 +59,7 @@ impl LineIndex {
|
|||
let mut utf16_chars = Vec::new();
|
||||
|
||||
let mut newlines = vec![0.into()];
|
||||
let mut curr_row = 0.into();
|
||||
let mut curr_col = 0.into();
|
||||
let mut curr_row @ mut curr_col = 0.into();
|
||||
let mut line = 0;
|
||||
for c in text.chars() {
|
||||
let c_len = TextSize::of(c);
|
||||
|
@ -70,8 +69,7 @@ impl LineIndex {
|
|||
|
||||
// Save any utf-16 characters seen in the previous line
|
||||
if !utf16_chars.is_empty() {
|
||||
utf16_lines.insert(line, utf16_chars);
|
||||
utf16_chars = Vec::new();
|
||||
utf16_lines.insert(line, mem::take(&mut utf16_chars));
|
||||
}
|
||||
|
||||
// Prepare for processing the next line
|
||||
|
@ -165,4 +163,138 @@ impl LineIndex {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_line_index() {
|
||||
let text = "hello\nworld";
|
||||
let table = [
|
||||
(00, 0, 0),
|
||||
(01, 0, 1),
|
||||
(05, 0, 5),
|
||||
(06, 1, 0),
|
||||
(07, 1, 1),
|
||||
(08, 1, 2),
|
||||
(10, 1, 4),
|
||||
(11, 1, 5),
|
||||
(12, 1, 6),
|
||||
];
|
||||
|
||||
let index = LineIndex::new(text);
|
||||
for &(offset, line, col) in &table {
|
||||
assert_eq!(index.line_col(offset.into()), LineCol { line, col });
|
||||
}
|
||||
|
||||
let text = "\nhello\nworld";
|
||||
let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)];
|
||||
let index = LineIndex::new(text);
|
||||
for &(offset, line, col) in &table {
|
||||
assert_eq!(index.line_col(offset.into()), LineCol { line, col });
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_char_len() {
|
||||
assert_eq!('メ'.len_utf8(), 3);
|
||||
assert_eq!('メ'.len_utf16(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_index() {
|
||||
let col_index = LineIndex::new(
|
||||
"
|
||||
const C: char = 'x';
|
||||
",
|
||||
);
|
||||
assert_eq!(col_index.utf16_lines.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_single_char() {
|
||||
let col_index = LineIndex::new(
|
||||
"
|
||||
const C: char = 'メ';
|
||||
",
|
||||
);
|
||||
|
||||
assert_eq!(col_index.utf16_lines.len(), 1);
|
||||
assert_eq!(col_index.utf16_lines[&1].len(), 1);
|
||||
assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
|
||||
|
||||
// UTF-8 to UTF-16, no changes
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
|
||||
|
||||
// UTF-8 to UTF-16
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20);
|
||||
|
||||
// UTF-16 to UTF-8, no changes
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
|
||||
|
||||
// UTF-16 to UTF-8
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
|
||||
|
||||
let col_index = LineIndex::new("a𐐏b");
|
||||
assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_string() {
|
||||
let col_index = LineIndex::new(
|
||||
"
|
||||
const C: char = \"メ メ\";
|
||||
",
|
||||
);
|
||||
|
||||
assert_eq!(col_index.utf16_lines.len(), 1);
|
||||
assert_eq!(col_index.utf16_lines[&1].len(), 2);
|
||||
assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
|
||||
assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() });
|
||||
|
||||
// UTF-8 to UTF-16
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
|
||||
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19);
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21);
|
||||
|
||||
assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15);
|
||||
|
||||
// UTF-16 to UTF-8
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
|
||||
|
||||
// メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24
|
||||
|
||||
assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_splitlines() {
|
||||
fn r(lo: u32, hi: u32) -> TextRange {
|
||||
TextRange::new(lo.into(), hi.into())
|
||||
}
|
||||
|
||||
let text = "a\nbb\nccc\n";
|
||||
let line_index = LineIndex::new(text);
|
||||
|
||||
let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>();
|
||||
let expected = vec![r(0, 2), r(2, 5), r(5, 9)];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let text = "";
|
||||
let line_index = LineIndex::new(text);
|
||||
|
||||
let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>();
|
||||
let expected = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let text = "\n";
|
||||
let line_index = LineIndex::new(text);
|
||||
|
||||
let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>();
|
||||
let expected = vec![r(0, 1)];
|
||||
assert_eq!(actual, expected)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,133 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_line_index() {
|
||||
let text = "hello\nworld";
|
||||
let table = [
|
||||
(00, 0, 0),
|
||||
(01, 0, 1),
|
||||
(05, 0, 5),
|
||||
(06, 1, 0),
|
||||
(07, 1, 1),
|
||||
(08, 1, 2),
|
||||
(10, 1, 4),
|
||||
(11, 1, 5),
|
||||
(12, 1, 6),
|
||||
];
|
||||
|
||||
let index = LineIndex::new(text);
|
||||
for &(offset, line, col) in &table {
|
||||
assert_eq!(index.line_col(offset.into()), LineCol { line, col });
|
||||
}
|
||||
|
||||
let text = "\nhello\nworld";
|
||||
let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)];
|
||||
let index = LineIndex::new(text);
|
||||
for &(offset, line, col) in &table {
|
||||
assert_eq!(index.line_col(offset.into()), LineCol { line, col });
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_char_len() {
|
||||
assert_eq!('メ'.len_utf8(), 3);
|
||||
assert_eq!('メ'.len_utf16(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_index() {
|
||||
let col_index = LineIndex::new(
|
||||
"
|
||||
const C: char = 'x';
|
||||
",
|
||||
);
|
||||
assert_eq!(col_index.utf16_lines.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_single_char() {
|
||||
let col_index = LineIndex::new(
|
||||
"
|
||||
const C: char = 'メ';
|
||||
",
|
||||
);
|
||||
|
||||
assert_eq!(col_index.utf16_lines.len(), 1);
|
||||
assert_eq!(col_index.utf16_lines[&1].len(), 1);
|
||||
assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
|
||||
|
||||
// UTF-8 to UTF-16, no changes
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
|
||||
|
||||
// UTF-8 to UTF-16
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20);
|
||||
|
||||
// UTF-16 to UTF-8, no changes
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
|
||||
|
||||
// UTF-16 to UTF-8
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
|
||||
|
||||
let col_index = LineIndex::new("a𐐏b");
|
||||
assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_string() {
|
||||
let col_index = LineIndex::new(
|
||||
"
|
||||
const C: char = \"メ メ\";
|
||||
",
|
||||
);
|
||||
|
||||
assert_eq!(col_index.utf16_lines.len(), 1);
|
||||
assert_eq!(col_index.utf16_lines[&1].len(), 2);
|
||||
assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
|
||||
assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() });
|
||||
|
||||
// UTF-8 to UTF-16
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
|
||||
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19);
|
||||
assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21);
|
||||
|
||||
assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15);
|
||||
|
||||
// UTF-16 to UTF-8
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
|
||||
|
||||
// メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space
|
||||
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24
|
||||
|
||||
assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_splitlines() {
|
||||
fn r(lo: u32, hi: u32) -> TextRange {
|
||||
TextRange::new(lo.into(), hi.into())
|
||||
}
|
||||
|
||||
let text = "a\nbb\nccc\n";
|
||||
let line_index = LineIndex::new(text);
|
||||
|
||||
let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>();
|
||||
let expected = vec![r(0, 2), r(2, 5), r(5, 9)];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let text = "";
|
||||
let line_index = LineIndex::new(text);
|
||||
|
||||
let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>();
|
||||
let expected = vec![];
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let text = "\n";
|
||||
let line_index = LineIndex::new(text);
|
||||
|
||||
let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>();
|
||||
let expected = vec![r(0, 1)];
|
||||
assert_eq!(actual, expected)
|
||||
}
|
|
@ -10,7 +10,7 @@
|
|||
//!
|
||||
//! Another can of worms are macros:
|
||||
//!
|
||||
//! ```
|
||||
//! ```ignore
|
||||
//! macro_rules! m { () => { fn f() {} } }
|
||||
//! m!();
|
||||
//! fn main() {
|
||||
|
@ -34,9 +34,9 @@ use text_edit::{TextEdit, TextEditBuilder};
|
|||
|
||||
use crate::{
|
||||
defs::Definition,
|
||||
helpers::node_ext::expr_as_name_ref,
|
||||
search::FileReference,
|
||||
source_change::{FileSystemEdit, SourceChange},
|
||||
syntax_helpers::node_ext::expr_as_name_ref,
|
||||
RootDatabase,
|
||||
};
|
||||
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
//! Various helper functions to work with SyntaxNodes.
|
||||
use itertools::Itertools;
|
||||
use parser::T;
|
||||
use syntax::{
|
||||
ast::{self, PathSegmentKind, VisibilityKind},
|
||||
AstNode, WalkEvent,
|
||||
ast::{self, HasLoopBody, PathSegmentKind, VisibilityKind},
|
||||
AstNode, Preorder, RustLanguage, WalkEvent,
|
||||
};
|
||||
|
||||
pub fn expr_as_name_ref(expr: &ast::Expr) -> Option<ast::NameRef> {
|
||||
|
@ -242,3 +244,215 @@ pub fn is_pattern_cond(expr: ast::Expr) -> bool {
|
|||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Calls `cb` on each expression inside `expr` that is at "tail position".
|
||||
/// Does not walk into `break` or `return` expressions.
|
||||
/// Note that modifying the tree while iterating it will cause undefined iteration which might
|
||||
/// potentially results in an out of bounds panic.
|
||||
pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
|
||||
match expr {
|
||||
ast::Expr::BlockExpr(b) => {
|
||||
match b.modifier() {
|
||||
Some(
|
||||
ast::BlockModifier::Async(_)
|
||||
| ast::BlockModifier::Try(_)
|
||||
| ast::BlockModifier::Const(_),
|
||||
) => return cb(expr),
|
||||
|
||||
Some(ast::BlockModifier::Label(label)) => {
|
||||
for_each_break_expr(Some(label), b.stmt_list(), &mut |b| {
|
||||
cb(&ast::Expr::BreakExpr(b))
|
||||
});
|
||||
}
|
||||
Some(ast::BlockModifier::Unsafe(_)) => (),
|
||||
None => (),
|
||||
}
|
||||
if let Some(stmt_list) = b.stmt_list() {
|
||||
if let Some(e) = stmt_list.tail_expr() {
|
||||
for_each_tail_expr(&e, cb);
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::Expr::IfExpr(if_) => {
|
||||
let mut if_ = if_.clone();
|
||||
loop {
|
||||
if let Some(block) = if_.then_branch() {
|
||||
for_each_tail_expr(&ast::Expr::BlockExpr(block), cb);
|
||||
}
|
||||
match if_.else_branch() {
|
||||
Some(ast::ElseBranch::IfExpr(it)) => if_ = it,
|
||||
Some(ast::ElseBranch::Block(block)) => {
|
||||
for_each_tail_expr(&ast::Expr::BlockExpr(block), cb);
|
||||
break;
|
||||
}
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::Expr::LoopExpr(l) => {
|
||||
for_each_break_expr(l.label(), l.loop_body().and_then(|it| it.stmt_list()), &mut |b| {
|
||||
cb(&ast::Expr::BreakExpr(b))
|
||||
})
|
||||
}
|
||||
ast::Expr::MatchExpr(m) => {
|
||||
if let Some(arms) = m.match_arm_list() {
|
||||
arms.arms().filter_map(|arm| arm.expr()).for_each(|e| for_each_tail_expr(&e, cb));
|
||||
}
|
||||
}
|
||||
ast::Expr::ArrayExpr(_)
|
||||
| ast::Expr::AwaitExpr(_)
|
||||
| ast::Expr::BinExpr(_)
|
||||
| ast::Expr::BoxExpr(_)
|
||||
| ast::Expr::BreakExpr(_)
|
||||
| ast::Expr::CallExpr(_)
|
||||
| ast::Expr::CastExpr(_)
|
||||
| ast::Expr::ClosureExpr(_)
|
||||
| ast::Expr::ContinueExpr(_)
|
||||
| ast::Expr::FieldExpr(_)
|
||||
| ast::Expr::ForExpr(_)
|
||||
| ast::Expr::IndexExpr(_)
|
||||
| ast::Expr::Literal(_)
|
||||
| ast::Expr::MacroCall(_)
|
||||
| ast::Expr::MacroStmts(_)
|
||||
| ast::Expr::MethodCallExpr(_)
|
||||
| ast::Expr::ParenExpr(_)
|
||||
| ast::Expr::PathExpr(_)
|
||||
| ast::Expr::PrefixExpr(_)
|
||||
| ast::Expr::RangeExpr(_)
|
||||
| ast::Expr::RecordExpr(_)
|
||||
| ast::Expr::RefExpr(_)
|
||||
| ast::Expr::ReturnExpr(_)
|
||||
| ast::Expr::TryExpr(_)
|
||||
| ast::Expr::TupleExpr(_)
|
||||
| ast::Expr::WhileExpr(_)
|
||||
| ast::Expr::LetExpr(_)
|
||||
| ast::Expr::UnderscoreExpr(_)
|
||||
| ast::Expr::YieldExpr(_) => cb(expr),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn for_each_break_and_continue_expr(
|
||||
label: Option<ast::Label>,
|
||||
body: Option<ast::StmtList>,
|
||||
cb: &mut dyn FnMut(ast::Expr),
|
||||
) {
|
||||
let label = label.and_then(|lbl| lbl.lifetime());
|
||||
if let Some(b) = body {
|
||||
let tree_depth_iterator = TreeWithDepthIterator::new(b);
|
||||
for (expr, depth) in tree_depth_iterator {
|
||||
match expr {
|
||||
ast::Expr::BreakExpr(b)
|
||||
if (depth == 0 && b.lifetime().is_none())
|
||||
|| eq_label_lt(&label, &b.lifetime()) =>
|
||||
{
|
||||
cb(ast::Expr::BreakExpr(b));
|
||||
}
|
||||
ast::Expr::ContinueExpr(c)
|
||||
if (depth == 0 && c.lifetime().is_none())
|
||||
|| eq_label_lt(&label, &c.lifetime()) =>
|
||||
{
|
||||
cb(ast::Expr::ContinueExpr(c));
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn for_each_break_expr(
|
||||
label: Option<ast::Label>,
|
||||
body: Option<ast::StmtList>,
|
||||
cb: &mut dyn FnMut(ast::BreakExpr),
|
||||
) {
|
||||
let label = label.and_then(|lbl| lbl.lifetime());
|
||||
if let Some(b) = body {
|
||||
let tree_depth_iterator = TreeWithDepthIterator::new(b);
|
||||
for (expr, depth) in tree_depth_iterator {
|
||||
match expr {
|
||||
ast::Expr::BreakExpr(b)
|
||||
if (depth == 0 && b.lifetime().is_none())
|
||||
|| eq_label_lt(&label, &b.lifetime()) =>
|
||||
{
|
||||
cb(b);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn eq_label_lt(lt1: &Option<ast::Lifetime>, lt2: &Option<ast::Lifetime>) -> bool {
|
||||
lt1.as_ref().zip(lt2.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text())
|
||||
}
|
||||
|
||||
struct TreeWithDepthIterator {
|
||||
preorder: Preorder<RustLanguage>,
|
||||
depth: u32,
|
||||
}
|
||||
|
||||
impl TreeWithDepthIterator {
|
||||
fn new(body: ast::StmtList) -> Self {
|
||||
let preorder = body.syntax().preorder();
|
||||
Self { preorder, depth: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for TreeWithDepthIterator {
|
||||
type Item = (ast::Expr, u32);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while let Some(event) = self.preorder.find_map(|ev| match ev {
|
||||
WalkEvent::Enter(it) => ast::Expr::cast(it).map(WalkEvent::Enter),
|
||||
WalkEvent::Leave(it) => ast::Expr::cast(it).map(WalkEvent::Leave),
|
||||
}) {
|
||||
match event {
|
||||
WalkEvent::Enter(
|
||||
ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_),
|
||||
) => {
|
||||
self.depth += 1;
|
||||
}
|
||||
WalkEvent::Leave(
|
||||
ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_),
|
||||
) => {
|
||||
self.depth -= 1;
|
||||
}
|
||||
WalkEvent::Enter(ast::Expr::BlockExpr(e)) if e.label().is_some() => {
|
||||
self.depth += 1;
|
||||
}
|
||||
WalkEvent::Leave(ast::Expr::BlockExpr(e)) if e.label().is_some() => {
|
||||
self.depth -= 1;
|
||||
}
|
||||
WalkEvent::Enter(expr) => return Some((expr, self.depth)),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses the input token tree as comma separated plain paths.
|
||||
pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
|
||||
let r_paren = input.r_paren_token();
|
||||
let tokens =
|
||||
input.syntax().children_with_tokens().skip(1).map_while(|it| match it.into_token() {
|
||||
// seeing a keyword means the attribute is unclosed so stop parsing here
|
||||
Some(tok) if tok.kind().is_keyword() => None,
|
||||
// don't include the right token tree parenthesis if it exists
|
||||
tok @ Some(_) if tok == r_paren => None,
|
||||
// only nodes that we can find are other TokenTrees, those are unexpected in this parse though
|
||||
None => None,
|
||||
Some(tok) => Some(tok),
|
||||
});
|
||||
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
|
||||
let paths = input_expressions
|
||||
.into_iter()
|
||||
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
|
||||
.filter_map(|mut tokens| {
|
||||
syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
|
||||
ast::Expr::PathExpr(it) => it.path(),
|
||||
_ => None,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
Some(paths)
|
||||
}
|
|
@ -44,7 +44,7 @@ pub struct LintGroup {
|
|||
|
||||
let contents = sourcegen::add_preamble("sourcegen_lints", sourcegen::reformat(contents));
|
||||
|
||||
let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
|
||||
let destination = project_root().join("crates/ide_db/src/generated/lints.rs");
|
||||
sourcegen::ensure_file_contents(destination.as_path(), &contents);
|
||||
}
|
||||
|
||||
|
|
|
@ -77,4 +77,147 @@ pub fn get_missing_assoc_items(
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod tests {
|
||||
use base_db::{fixture::ChangeFixture, FilePosition};
|
||||
use expect_test::{expect, Expect};
|
||||
use hir::Semantics;
|
||||
use syntax::ast::{self, AstNode};
|
||||
|
||||
use crate::RootDatabase;
|
||||
|
||||
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
|
||||
pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
|
||||
let change_fixture = ChangeFixture::parse(ra_fixture);
|
||||
let mut database = RootDatabase::default();
|
||||
database.apply_change(change_fixture.change);
|
||||
let (file_id, range_or_offset) =
|
||||
change_fixture.file_position.expect("expected a marker ($0)");
|
||||
let offset = range_or_offset.expect_offset();
|
||||
(database, FilePosition { file_id, offset })
|
||||
}
|
||||
|
||||
fn check_trait(ra_fixture: &str, expect: Expect) {
|
||||
let (db, position) = position(ra_fixture);
|
||||
let sema = Semantics::new(&db);
|
||||
let file = sema.parse(position.file_id);
|
||||
let impl_block: ast::Impl =
|
||||
sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
|
||||
let trait_ = crate::traits::resolve_target_trait(&sema, &impl_block);
|
||||
let actual = match trait_ {
|
||||
Some(trait_) => trait_.name(&db).to_string(),
|
||||
None => String::new(),
|
||||
};
|
||||
expect.assert_eq(&actual);
|
||||
}
|
||||
|
||||
fn check_missing_assoc(ra_fixture: &str, expect: Expect) {
|
||||
let (db, position) = position(ra_fixture);
|
||||
let sema = Semantics::new(&db);
|
||||
let file = sema.parse(position.file_id);
|
||||
let impl_block: ast::Impl =
|
||||
sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
|
||||
let items = crate::traits::get_missing_assoc_items(&sema, &impl_block);
|
||||
let actual = items
|
||||
.into_iter()
|
||||
.map(|item| item.name(&db).unwrap().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
expect.assert_eq(&actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_trait() {
|
||||
check_trait(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
fn bar();
|
||||
}
|
||||
impl Foo for u8 {
|
||||
$0
|
||||
}
|
||||
"#,
|
||||
expect![["Foo"]],
|
||||
);
|
||||
check_trait(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
fn bar();
|
||||
}
|
||||
impl Foo for u8 {
|
||||
fn bar() {
|
||||
fn baz() {
|
||||
$0
|
||||
}
|
||||
baz();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
expect![["Foo"]],
|
||||
);
|
||||
check_trait(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
fn bar();
|
||||
}
|
||||
pub struct Bar;
|
||||
impl Bar {
|
||||
$0
|
||||
}
|
||||
"#,
|
||||
expect![[""]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_assoc_items() {
|
||||
check_missing_assoc(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
const FOO: u8;
|
||||
fn bar();
|
||||
}
|
||||
impl Foo for u8 {
|
||||
$0
|
||||
}"#,
|
||||
expect![[r#"
|
||||
FOO
|
||||
bar"#]],
|
||||
);
|
||||
|
||||
check_missing_assoc(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
const FOO: u8;
|
||||
fn bar();
|
||||
}
|
||||
impl Foo for u8 {
|
||||
const FOO: u8 = 10;
|
||||
$0
|
||||
}"#,
|
||||
expect![[r#"
|
||||
bar"#]],
|
||||
);
|
||||
|
||||
check_missing_assoc(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
const FOO: u8;
|
||||
fn bar();
|
||||
}
|
||||
impl Foo for u8 {
|
||||
const FOO: u8 = 10;
|
||||
fn bar() {$0}
|
||||
}"#,
|
||||
expect![[r#""#]],
|
||||
);
|
||||
|
||||
check_missing_assoc(
|
||||
r#"
|
||||
pub struct Foo;
|
||||
impl Foo {
|
||||
fn bar() {$0}
|
||||
}"#,
|
||||
expect![[r#""#]],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,141 +0,0 @@
|
|||
use base_db::{fixture::ChangeFixture, FilePosition};
|
||||
use expect_test::{expect, Expect};
|
||||
use hir::Semantics;
|
||||
use syntax::ast::{self, AstNode};
|
||||
|
||||
use crate::RootDatabase;
|
||||
|
||||
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
|
||||
pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
|
||||
let change_fixture = ChangeFixture::parse(ra_fixture);
|
||||
let mut database = RootDatabase::default();
|
||||
database.apply_change(change_fixture.change);
|
||||
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
||||
let offset = range_or_offset.expect_offset();
|
||||
(database, FilePosition { file_id, offset })
|
||||
}
|
||||
|
||||
fn check_trait(ra_fixture: &str, expect: Expect) {
|
||||
let (db, position) = position(ra_fixture);
|
||||
let sema = Semantics::new(&db);
|
||||
let file = sema.parse(position.file_id);
|
||||
let impl_block: ast::Impl =
|
||||
sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
|
||||
let trait_ = crate::traits::resolve_target_trait(&sema, &impl_block);
|
||||
let actual = match trait_ {
|
||||
Some(trait_) => trait_.name(&db).to_string(),
|
||||
None => String::new(),
|
||||
};
|
||||
expect.assert_eq(&actual);
|
||||
}
|
||||
|
||||
fn check_missing_assoc(ra_fixture: &str, expect: Expect) {
|
||||
let (db, position) = position(ra_fixture);
|
||||
let sema = Semantics::new(&db);
|
||||
let file = sema.parse(position.file_id);
|
||||
let impl_block: ast::Impl =
|
||||
sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
|
||||
let items = crate::traits::get_missing_assoc_items(&sema, &impl_block);
|
||||
let actual = items
|
||||
.into_iter()
|
||||
.map(|item| item.name(&db).unwrap().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
expect.assert_eq(&actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_trait() {
|
||||
check_trait(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
fn bar();
|
||||
}
|
||||
impl Foo for u8 {
|
||||
$0
|
||||
}
|
||||
"#,
|
||||
expect![["Foo"]],
|
||||
);
|
||||
check_trait(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
fn bar();
|
||||
}
|
||||
impl Foo for u8 {
|
||||
fn bar() {
|
||||
fn baz() {
|
||||
$0
|
||||
}
|
||||
baz();
|
||||
}
|
||||
}
|
||||
"#,
|
||||
expect![["Foo"]],
|
||||
);
|
||||
check_trait(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
fn bar();
|
||||
}
|
||||
pub struct Bar;
|
||||
impl Bar {
|
||||
$0
|
||||
}
|
||||
"#,
|
||||
expect![[""]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_assoc_items() {
|
||||
check_missing_assoc(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
const FOO: u8;
|
||||
fn bar();
|
||||
}
|
||||
impl Foo for u8 {
|
||||
$0
|
||||
}"#,
|
||||
expect![[r#"
|
||||
FOO
|
||||
bar"#]],
|
||||
);
|
||||
|
||||
check_missing_assoc(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
const FOO: u8;
|
||||
fn bar();
|
||||
}
|
||||
impl Foo for u8 {
|
||||
const FOO: u8 = 10;
|
||||
$0
|
||||
}"#,
|
||||
expect![[r#"
|
||||
bar"#]],
|
||||
);
|
||||
|
||||
check_missing_assoc(
|
||||
r#"
|
||||
pub trait Foo {
|
||||
const FOO: u8;
|
||||
fn bar();
|
||||
}
|
||||
impl Foo for u8 {
|
||||
const FOO: u8 = 10;
|
||||
fn bar() {$0}
|
||||
}"#,
|
||||
expect![[r#""#]],
|
||||
);
|
||||
|
||||
check_missing_assoc(
|
||||
r#"
|
||||
pub struct Foo;
|
||||
impl Foo {
|
||||
fn bar() {$0}
|
||||
}"#,
|
||||
expect![[r#""#]],
|
||||
);
|
||||
}
|
|
@ -3,7 +3,7 @@ use hir::{
|
|||
db::{AstDatabase, HirDatabase},
|
||||
known, AssocItem, HirDisplay, InFile, Type,
|
||||
};
|
||||
use ide_db::{assists::Assist, helpers::FamousDefs, source_change::SourceChange};
|
||||
use ide_db::{assists::Assist, famous_defs::FamousDefs, source_change::SourceChange};
|
||||
use rustc_hash::FxHashMap;
|
||||
use stdx::format_to;
|
||||
use syntax::{
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use hir::{db::AstDatabase, TypeInfo};
|
||||
use ide_db::{assists::Assist, helpers::for_each_tail_expr, source_change::SourceChange};
|
||||
use ide_db::{
|
||||
assists::Assist, source_change::SourceChange, syntax_helpers::node_ext::for_each_tail_expr,
|
||||
};
|
||||
use syntax::AstNode;
|
||||
use text_edit::TextEdit;
|
||||
|
||||
|
|
|
@ -14,8 +14,8 @@ use ide::{
|
|||
AssistConfig, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode, HighlightRelatedConfig,
|
||||
HoverConfig, HoverDocFormat, InlayHintsConfig, JoinLinesConfig, Snippet, SnippetScope,
|
||||
};
|
||||
use ide_db::helpers::{
|
||||
insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
|
||||
use ide_db::{
|
||||
imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
|
||||
SnippetCap,
|
||||
};
|
||||
use lsp_types::{ClientCapabilities, MarkupKind};
|
||||
|
|
|
@ -13,8 +13,8 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use ide::{Change, CompletionConfig, FilePosition, TextSize};
|
||||
use ide_db::helpers::{
|
||||
insert_use::{ImportGranularity, InsertUseConfig},
|
||||
use ide_db::{
|
||||
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
||||
SnippetCap,
|
||||
};
|
||||
use project_model::CargoConfig;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! Transforms markdown
|
||||
use ide_db::helpers::rust_doc::is_rust_fence;
|
||||
use ide_db::rust_doc::is_rust_fence;
|
||||
|
||||
const RUSTDOC_FENCE: &str = "```";
|
||||
|
||||
|
|
|
@ -188,7 +188,7 @@ https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-
|
|||
fn deny_clippy(path: &Path, text: &str) {
|
||||
let ignore = &[
|
||||
// The documentation in string literals may contain anything for its own purposes
|
||||
"ide_db/src/helpers/generated_lints.rs",
|
||||
"ide_db/src/generated/lints.rs",
|
||||
// The tests test clippy lint hovers
|
||||
"ide/src/hover/tests.rs",
|
||||
// The tests test clippy lint completions
|
||||
|
@ -279,7 +279,7 @@ fn check_todo(path: &Path, text: &str) {
|
|||
// `ast::make`.
|
||||
"ast/make.rs",
|
||||
// The documentation in string literals may contain anything for its own purposes
|
||||
"ide_db/src/helpers/generated_lints.rs",
|
||||
"ide_db/src/generated/lints.rs",
|
||||
"ide_assists/src/utils/gen_trait_fn_body.rs",
|
||||
"ide_assists/src/tests/generated.rs",
|
||||
// The tests for missing fields
|
||||
|
@ -315,7 +315,7 @@ fn check_dbg(path: &Path, text: &str) {
|
|||
"ide_completion/src/tests/proc_macros.rs",
|
||||
// The documentation in string literals may contain anything for its own purposes
|
||||
"ide_completion/src/lib.rs",
|
||||
"ide_db/src/helpers/generated_lints.rs",
|
||||
"ide_db/src/generated/lints.rs",
|
||||
// test for doc test for remove_dbg
|
||||
"src/tests/generated.rs",
|
||||
];
|
||||
|
|
Loading…
Reference in a new issue