mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 04:23:25 +00:00
clippy::redudant_borrow
This commit is contained in:
parent
d6737e55fb
commit
c9b4ac5be4
114 changed files with 285 additions and 285 deletions
|
@ -190,7 +190,7 @@ impl From<Fixture> for FileMeta {
|
|||
edition: f
|
||||
.edition
|
||||
.as_ref()
|
||||
.map_or(Edition::Edition2018, |v| Edition::from_str(&v).unwrap()),
|
||||
.map_or(Edition::Edition2018, |v| Edition::from_str(v).unwrap()),
|
||||
env: f.env.into_iter().collect(),
|
||||
introduce_new_source_root: f.introduce_new_source_root,
|
||||
}
|
||||
|
|
|
@ -1112,7 +1112,7 @@ impl Function {
|
|||
.collect();
|
||||
sink.push(MissingFields {
|
||||
file: source_ptr.file_id,
|
||||
field_list_parent: AstPtr::new(&record_expr),
|
||||
field_list_parent: AstPtr::new(record_expr),
|
||||
field_list_parent_path: record_expr
|
||||
.path()
|
||||
.map(|path| AstPtr::new(&path)),
|
||||
|
@ -2531,13 +2531,13 @@ impl Type {
|
|||
match ty.kind(&Interner) {
|
||||
TyKind::Adt(_, substs) => {
|
||||
cb(type_.derived(ty.clone()));
|
||||
walk_substs(db, type_, &substs, cb);
|
||||
walk_substs(db, type_, substs, cb);
|
||||
}
|
||||
TyKind::AssociatedType(_, substs) => {
|
||||
if let Some(_) = ty.associated_type_parent_trait(db) {
|
||||
cb(type_.derived(ty.clone()));
|
||||
}
|
||||
walk_substs(db, type_, &substs, cb);
|
||||
walk_substs(db, type_, substs, cb);
|
||||
}
|
||||
TyKind::OpaqueType(_, subst) => {
|
||||
if let Some(bounds) = ty.impl_trait_bounds(db) {
|
||||
|
@ -2577,7 +2577,7 @@ impl Type {
|
|||
TyKind::FnDef(_, substs)
|
||||
| TyKind::Tuple(_, substs)
|
||||
| TyKind::Closure(.., substs) => {
|
||||
walk_substs(db, type_, &substs, cb);
|
||||
walk_substs(db, type_, substs, cb);
|
||||
}
|
||||
TyKind::Function(hir_ty::FnPointer { substitution, .. }) => {
|
||||
walk_substs(db, type_, &substitution.0, cb);
|
||||
|
|
|
@ -192,7 +192,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
node: &SyntaxNode,
|
||||
offset: TextSize,
|
||||
) -> Option<N> {
|
||||
if let Some(it) = find_node_at_offset(&node, offset) {
|
||||
if let Some(it) = find_node_at_offset(node, offset) {
|
||||
return Some(it);
|
||||
}
|
||||
|
||||
|
@ -744,7 +744,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
return None;
|
||||
}
|
||||
|
||||
let func = self.resolve_method_call(&method_call_expr).map(Function::from)?;
|
||||
let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
|
||||
let res = match func.self_param(self.db)?.access(self.db) {
|
||||
Access::Shared | Access::Exclusive => true,
|
||||
Access::Owned => false,
|
||||
|
|
|
@ -222,7 +222,7 @@ impl SourceAnalyzer {
|
|||
Pat::Path(path) => path,
|
||||
_ => return None,
|
||||
};
|
||||
let res = resolve_hir_path(db, &self.resolver, &path)?;
|
||||
let res = resolve_hir_path(db, &self.resolver, path)?;
|
||||
match res {
|
||||
PathResolution::Def(def) => Some(def),
|
||||
_ => None,
|
||||
|
@ -329,7 +329,7 @@ impl SourceAnalyzer {
|
|||
|
||||
let (variant, missing_fields, _exhaustive) =
|
||||
record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
|
||||
let res = self.missing_fields(db, krate, &substs, variant, missing_fields);
|
||||
let res = self.missing_fields(db, krate, substs, variant, missing_fields);
|
||||
Some(res)
|
||||
}
|
||||
|
||||
|
@ -347,7 +347,7 @@ impl SourceAnalyzer {
|
|||
|
||||
let (variant, missing_fields, _exhaustive) =
|
||||
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
|
||||
let res = self.missing_fields(db, krate, &substs, variant, missing_fields);
|
||||
let res = self.missing_fields(db, krate, substs, variant, missing_fields);
|
||||
Some(res)
|
||||
}
|
||||
|
||||
|
|
|
@ -1002,16 +1002,16 @@ impl From<ast::LiteralKind> for Literal {
|
|||
if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) {
|
||||
return Literal::Float(Default::default(), builtin);
|
||||
} else if let builtin @ Some(_) =
|
||||
lit.suffix().and_then(|it| BuiltinInt::from_suffix(&it))
|
||||
lit.suffix().and_then(|it| BuiltinInt::from_suffix(it))
|
||||
{
|
||||
Literal::Int(lit.value().unwrap_or(0) as i128, builtin)
|
||||
} else {
|
||||
let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(&it));
|
||||
let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(it));
|
||||
Literal::Uint(lit.value().unwrap_or(0), builtin)
|
||||
}
|
||||
}
|
||||
LiteralKind::FloatNumber(lit) => {
|
||||
let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(&it));
|
||||
let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(it));
|
||||
Literal::Float(Default::default(), ty)
|
||||
}
|
||||
LiteralKind::ByteString(bs) => {
|
||||
|
|
|
@ -198,7 +198,7 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
|
|||
}
|
||||
Expr::Lambda { args, body: body_expr, .. } => {
|
||||
let scope = scopes.new_scope(scope);
|
||||
scopes.add_params_bindings(body, scope, &args);
|
||||
scopes.add_params_bindings(body, scope, args);
|
||||
compute_expr_scopes(*body_expr, body, scopes, scope);
|
||||
}
|
||||
Expr::Match { expr, arms } => {
|
||||
|
|
|
@ -280,7 +280,7 @@ impl GenericParams {
|
|||
sm.type_params.insert(param_id, Either::Right(type_param.clone()));
|
||||
|
||||
let type_ref = TypeRef::Path(name.into());
|
||||
self.fill_bounds(&lower_ctx, &type_param, Either::Left(type_ref));
|
||||
self.fill_bounds(lower_ctx, &type_param, Either::Left(type_ref));
|
||||
}
|
||||
for lifetime_param in params.lifetime_params() {
|
||||
let name =
|
||||
|
@ -289,7 +289,7 @@ impl GenericParams {
|
|||
let param_id = self.lifetimes.alloc(param);
|
||||
sm.lifetime_params.insert(param_id, lifetime_param.clone());
|
||||
let lifetime_ref = LifetimeRef::new_name(name);
|
||||
self.fill_bounds(&lower_ctx, &lifetime_param, Either::Right(lifetime_ref));
|
||||
self.fill_bounds(lower_ctx, &lifetime_param, Either::Right(lifetime_ref));
|
||||
}
|
||||
for const_param in params.const_params() {
|
||||
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
|
||||
|
|
|
@ -823,7 +823,7 @@ fn is_intrinsic_fn_unsafe(name: &Name) -> bool {
|
|||
known::type_name,
|
||||
known::variant_count,
|
||||
]
|
||||
.contains(&name)
|
||||
.contains(name)
|
||||
}
|
||||
|
||||
fn lower_abi(abi: ast::Abi) -> Interned<str> {
|
||||
|
@ -855,7 +855,7 @@ impl UseTreeLowering<'_> {
|
|||
// E.g. `use something::{inner}` (prefix is `None`, path is `something`)
|
||||
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
|
||||
Some(path) => {
|
||||
match ModPath::from_src(self.db, path, &self.hygiene) {
|
||||
match ModPath::from_src(self.db, path, self.hygiene) {
|
||||
Some(it) => Some(it),
|
||||
None => return None, // FIXME: report errors somewhere
|
||||
}
|
||||
|
@ -874,7 +874,7 @@ impl UseTreeLowering<'_> {
|
|||
} else {
|
||||
let is_glob = tree.star_token().is_some();
|
||||
let path = match tree.path() {
|
||||
Some(path) => Some(ModPath::from_src(self.db, path, &self.hygiene)?),
|
||||
Some(path) => Some(ModPath::from_src(self.db, path, self.hygiene)?),
|
||||
None => None,
|
||||
};
|
||||
let alias = tree.rename().map(|a| {
|
||||
|
|
|
@ -500,7 +500,7 @@ impl DefCollector<'_> {
|
|||
let (per_ns, _) = self.def_map.resolve_path(
|
||||
self.db,
|
||||
self.def_map.root,
|
||||
&path,
|
||||
path,
|
||||
BuiltinShadowMode::Other,
|
||||
);
|
||||
|
||||
|
@ -722,7 +722,7 @@ impl DefCollector<'_> {
|
|||
if import.is_extern_crate {
|
||||
let res = self.def_map.resolve_name_in_extern_prelude(
|
||||
self.db,
|
||||
&import
|
||||
import
|
||||
.path
|
||||
.as_ident()
|
||||
.expect("extern crate should have been desugared to one-element path"),
|
||||
|
@ -1351,7 +1351,7 @@ impl ModCollector<'_, '_> {
|
|||
let imports = Import::from_use(
|
||||
self.def_collector.db,
|
||||
krate,
|
||||
&self.item_tree,
|
||||
self.item_tree,
|
||||
ItemTreeId::new(self.file_id, import_id),
|
||||
);
|
||||
self.def_collector.unresolved_imports.extend(imports.into_iter().map(
|
||||
|
@ -1368,7 +1368,7 @@ impl ModCollector<'_, '_> {
|
|||
import: Import::from_extern_crate(
|
||||
self.def_collector.db,
|
||||
krate,
|
||||
&self.item_tree,
|
||||
self.item_tree,
|
||||
ItemTreeId::new(self.file_id, import_id),
|
||||
),
|
||||
status: PartialResolvedImport::Unresolved,
|
||||
|
@ -1889,7 +1889,7 @@ impl ModCollector<'_, '_> {
|
|||
self.def_collector.def_map.with_ancestor_maps(
|
||||
self.def_collector.db,
|
||||
self.module_id,
|
||||
&mut |map, module| map[module].scope.get_legacy_macro(&name),
|
||||
&mut |map, module| map[module].scope.get_legacy_macro(name),
|
||||
)
|
||||
})
|
||||
},
|
||||
|
@ -1993,7 +1993,7 @@ mod tests {
|
|||
}
|
||||
|
||||
fn do_resolve(code: &str) -> DefMap {
|
||||
let (db, _file_id) = TestDB::with_single_file(&code);
|
||||
let (db, _file_id) = TestDB::with_single_file(code);
|
||||
let krate = db.test_crate();
|
||||
|
||||
let edition = db.crate_graph()[krate].edition;
|
||||
|
|
|
@ -93,7 +93,7 @@ impl DefMap {
|
|||
let mut vis = match visibility {
|
||||
RawVisibility::Module(path) => {
|
||||
let (result, remaining) =
|
||||
self.resolve_path(db, original_module, &path, BuiltinShadowMode::Module);
|
||||
self.resolve_path(db, original_module, path, BuiltinShadowMode::Module);
|
||||
if remaining.is_some() {
|
||||
return None;
|
||||
}
|
||||
|
@ -205,7 +205,7 @@ impl DefMap {
|
|||
None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
|
||||
};
|
||||
log::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
|
||||
self.resolve_name_in_crate_root_or_extern_prelude(db, &segment)
|
||||
self.resolve_name_in_crate_root_or_extern_prelude(db, segment)
|
||||
}
|
||||
PathKind::Plain => {
|
||||
let (_, segment) = match segments.next() {
|
||||
|
@ -222,7 +222,7 @@ impl DefMap {
|
|||
if path.segments().len() == 1 { shadow } else { BuiltinShadowMode::Module };
|
||||
|
||||
log::debug!("resolving {:?} in module", segment);
|
||||
self.resolve_name_in_module(db, original_module, &segment, prefer_module)
|
||||
self.resolve_name_in_module(db, original_module, segment, prefer_module)
|
||||
}
|
||||
PathKind::Super(lvl) => {
|
||||
let mut module = original_module;
|
||||
|
@ -269,7 +269,7 @@ impl DefMap {
|
|||
Some((_, segment)) => segment,
|
||||
None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
|
||||
};
|
||||
if let Some(def) = self.extern_prelude.get(&segment) {
|
||||
if let Some(def) = self.extern_prelude.get(segment) {
|
||||
log::debug!("absolute path {:?} resolved to crate {:?}", path, def);
|
||||
PerNs::types(*def, Visibility::Public)
|
||||
} else {
|
||||
|
@ -319,13 +319,13 @@ impl DefMap {
|
|||
};
|
||||
|
||||
// Since it is a qualified path here, it should not contains legacy macros
|
||||
module_data.scope.get(&segment)
|
||||
module_data.scope.get(segment)
|
||||
}
|
||||
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
|
||||
// enum variant
|
||||
cov_mark::hit!(can_import_enum_variant);
|
||||
let enum_data = db.enum_data(e);
|
||||
match enum_data.variant(&segment) {
|
||||
match enum_data.variant(segment) {
|
||||
Some(local_id) => {
|
||||
let variant = EnumVariantId { parent: e, local_id };
|
||||
match &*enum_data.variants[local_id].variant_data {
|
||||
|
|
|
@ -208,13 +208,13 @@ fn lower_generic_args_from_fn_path(
|
|||
let params = params?;
|
||||
let mut param_types = Vec::new();
|
||||
for param in params.params() {
|
||||
let type_ref = TypeRef::from_ast_opt(&ctx, param.ty());
|
||||
let type_ref = TypeRef::from_ast_opt(ctx, param.ty());
|
||||
param_types.push(type_ref);
|
||||
}
|
||||
let arg = GenericArg::Type(TypeRef::Tuple(param_types));
|
||||
args.push(arg);
|
||||
if let Some(ret_type) = ret_type {
|
||||
let type_ref = TypeRef::from_ast_opt(&ctx, ret_type.ty());
|
||||
let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
|
||||
bindings.push(AssociatedTypeBinding {
|
||||
name: name![Output],
|
||||
type_ref: Some(type_ref),
|
||||
|
|
|
@ -133,7 +133,7 @@ impl Resolver {
|
|||
Some(it) => it,
|
||||
None => return PerNs::none(),
|
||||
};
|
||||
let (module_res, segment_index) = item_map.resolve_path(db, module, &path, shadow);
|
||||
let (module_res, segment_index) = item_map.resolve_path(db, module, path, shadow);
|
||||
if segment_index.is_some() {
|
||||
return PerNs::none();
|
||||
}
|
||||
|
@ -150,7 +150,7 @@ impl Resolver {
|
|||
path: &ModPath,
|
||||
) -> Option<TraitId> {
|
||||
let (item_map, module) = self.module_scope()?;
|
||||
let (module_res, ..) = item_map.resolve_path(db, module, &path, BuiltinShadowMode::Module);
|
||||
let (module_res, ..) = item_map.resolve_path(db, module, path, BuiltinShadowMode::Module);
|
||||
match module_res.take_types()? {
|
||||
ModuleDefId::TraitId(it) => Some(it),
|
||||
_ => None,
|
||||
|
@ -325,7 +325,7 @@ impl Resolver {
|
|||
path: &ModPath,
|
||||
) -> Option<MacroDefId> {
|
||||
let (item_map, module) = self.module_scope()?;
|
||||
item_map.resolve_path(db, module, &path, BuiltinShadowMode::Other).0.take_macros()
|
||||
item_map.resolve_path(db, module, path, BuiltinShadowMode::Other).0.take_macros()
|
||||
}
|
||||
|
||||
pub fn process_all_names(&self, db: &dyn DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
|
||||
|
@ -561,7 +561,7 @@ impl ModuleItemMap {
|
|||
path: &ModPath,
|
||||
) -> Option<ResolveValueResult> {
|
||||
let (module_def, idx) =
|
||||
self.def_map.resolve_path_locally(db, self.module_id, &path, BuiltinShadowMode::Other);
|
||||
self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
|
||||
match idx {
|
||||
None => {
|
||||
let value = to_value_ns(module_def)?;
|
||||
|
@ -591,7 +591,7 @@ impl ModuleItemMap {
|
|||
path: &ModPath,
|
||||
) -> Option<(TypeNs, Option<usize>)> {
|
||||
let (module_def, idx) =
|
||||
self.def_map.resolve_path_locally(db, self.module_id, &path, BuiltinShadowMode::Other);
|
||||
self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
|
||||
let res = to_type_ns(module_def)?;
|
||||
Some((res, idx))
|
||||
}
|
||||
|
|
|
@ -128,7 +128,7 @@ impl TypeRef {
|
|||
/// Converts an `ast::TypeRef` to a `hir::TypeRef`.
|
||||
pub fn from_ast(ctx: &LowerCtx, node: ast::Type) -> Self {
|
||||
match node {
|
||||
ast::Type::ParenType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()),
|
||||
ast::Type::ParenType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
|
||||
ast::Type::TupleType(inner) => {
|
||||
TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect())
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ impl TypeRef {
|
|||
.unwrap_or(TypeRef::Error)
|
||||
}
|
||||
ast::Type::PtrType(inner) => {
|
||||
let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty());
|
||||
let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
|
||||
let mutability = Mutability::from_mutable(inner.mut_token().is_some());
|
||||
TypeRef::RawPtr(Box::new(inner_ty), mutability)
|
||||
}
|
||||
|
@ -156,13 +156,13 @@ impl TypeRef {
|
|||
.map(ConstScalar::usize_from_literal_expr)
|
||||
.unwrap_or(ConstScalar::Unknown);
|
||||
|
||||
TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())), len)
|
||||
TypeRef::Array(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())), len)
|
||||
}
|
||||
ast::Type::SliceType(inner) => {
|
||||
TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())))
|
||||
TypeRef::Slice(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())))
|
||||
}
|
||||
ast::Type::RefType(inner) => {
|
||||
let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty());
|
||||
let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
|
||||
let lifetime = inner.lifetime().map(|lt| LifetimeRef::new(<));
|
||||
let mutability = Mutability::from_mutable(inner.mut_token().is_some());
|
||||
TypeRef::Reference(Box::new(inner_ty), lifetime, mutability)
|
||||
|
@ -180,7 +180,7 @@ impl TypeRef {
|
|||
is_varargs = param.dotdotdot_token().is_some();
|
||||
}
|
||||
|
||||
pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(&ctx, it)).collect()
|
||||
pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(ctx, it)).collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
@ -188,7 +188,7 @@ impl TypeRef {
|
|||
TypeRef::Fn(params, is_varargs)
|
||||
}
|
||||
// for types are close enough for our purposes to the inner type for now...
|
||||
ast::Type::ForType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()),
|
||||
ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
|
||||
ast::Type::ImplTraitType(inner) => {
|
||||
TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
|
||||
}
|
||||
|
@ -229,7 +229,7 @@ impl TypeRef {
|
|||
TypeRef::RawPtr(type_ref, _)
|
||||
| TypeRef::Reference(type_ref, ..)
|
||||
| TypeRef::Array(type_ref, _)
|
||||
| TypeRef::Slice(type_ref) => go(&type_ref, f),
|
||||
| TypeRef::Slice(type_ref) => go(type_ref, f),
|
||||
TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => {
|
||||
for bound in bounds {
|
||||
match bound.as_ref() {
|
||||
|
|
|
@ -354,7 +354,7 @@ fn concat_expand(
|
|||
// concat works with string and char literals, so remove any quotes.
|
||||
// It also works with integer, float and boolean literals, so just use the rest
|
||||
// as-is.
|
||||
let component = unquote_str(&it).unwrap_or_else(|| it.text.to_string());
|
||||
let component = unquote_str(it).unwrap_or_else(|| it.text.to_string());
|
||||
text.push_str(&component);
|
||||
}
|
||||
// handle boolean literals
|
||||
|
@ -417,7 +417,7 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, mbe::ExpandError> {
|
|||
tt.token_trees
|
||||
.get(0)
|
||||
.and_then(|tt| match tt {
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(&it),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(it),
|
||||
_ => None,
|
||||
})
|
||||
.ok_or_else(|| mbe::ExpandError::ConversionError)
|
||||
|
@ -561,7 +561,7 @@ mod tests {
|
|||
use syntax::ast::NameOwner;
|
||||
|
||||
fn expand_builtin_macro(ra_fixture: &str) -> String {
|
||||
let (db, file_id) = TestDB::with_single_file(&ra_fixture);
|
||||
let (db, file_id) = TestDB::with_single_file(ra_fixture);
|
||||
let parsed = db.parse(file_id);
|
||||
let mut macro_rules: Vec<_> =
|
||||
parsed.syntax_node().descendants().filter_map(ast::MacroRules::cast).collect();
|
||||
|
|
|
@ -78,7 +78,7 @@ mod tests {
|
|||
use super::*;
|
||||
|
||||
fn test_remove_derives_up_to(attr: usize, ra_fixture: &str, expect: Expect) {
|
||||
let (db, file_id) = TestDB::with_single_file(&ra_fixture);
|
||||
let (db, file_id) = TestDB::with_single_file(ra_fixture);
|
||||
let parsed = db.parse(file_id);
|
||||
|
||||
let mut items: Vec<_> =
|
||||
|
|
|
@ -51,7 +51,7 @@ impl ProcMacroExpander {
|
|||
// Proc macros have access to the environment variables of the invoking crate.
|
||||
let env = &krate_graph[calling_crate].env;
|
||||
|
||||
proc_macro.expander.expand(&tt, attr_arg, &env).map_err(mbe::ExpandError::from)
|
||||
proc_macro.expander.expand(tt, attr_arg, env).map_err(mbe::ExpandError::from)
|
||||
}
|
||||
None => Err(mbe::ExpandError::UnresolvedProcMacro),
|
||||
}
|
||||
|
|
|
@ -528,7 +528,7 @@ impl SplitWildcard {
|
|||
smallvec![NonExhaustive]
|
||||
}
|
||||
TyKind::Never => SmallVec::new(),
|
||||
_ if cx.is_uninhabited(&pcx.ty) => SmallVec::new(),
|
||||
_ if cx.is_uninhabited(pcx.ty) => SmallVec::new(),
|
||||
TyKind::Adt(..) | TyKind::Tuple(..) | TyKind::Ref(..) => smallvec![Single],
|
||||
// This type is one for which we cannot list constructors, like `str` or `f64`.
|
||||
_ => smallvec![NonExhaustive],
|
||||
|
|
|
@ -645,7 +645,7 @@ impl SubPatSet {
|
|||
(Seq { subpats: s_set }, Seq { subpats: mut o_set }) => {
|
||||
s_set.retain(|i, s_sub_set| {
|
||||
// Missing entries count as full.
|
||||
let o_sub_set = o_set.remove(&i).unwrap_or(Full);
|
||||
let o_sub_set = o_set.remove(i).unwrap_or(Full);
|
||||
s_sub_set.union(o_sub_set);
|
||||
// We drop full entries.
|
||||
!s_sub_set.is_full()
|
||||
|
@ -656,7 +656,7 @@ impl SubPatSet {
|
|||
(Alt { subpats: s_set, .. }, Alt { subpats: mut o_set, .. }) => {
|
||||
s_set.retain(|i, s_sub_set| {
|
||||
// Missing entries count as empty.
|
||||
let o_sub_set = o_set.remove(&i).unwrap_or(Empty);
|
||||
let o_sub_set = o_set.remove(i).unwrap_or(Empty);
|
||||
s_sub_set.union(o_sub_set);
|
||||
// We drop empty entries.
|
||||
!s_sub_set.is_empty()
|
||||
|
@ -898,7 +898,7 @@ impl Usefulness {
|
|||
} else {
|
||||
witnesses
|
||||
.into_iter()
|
||||
.map(|witness| witness.apply_constructor(pcx, &ctor, ctor_wild_subpatterns))
|
||||
.map(|witness| witness.apply_constructor(pcx, ctor, ctor_wild_subpatterns))
|
||||
.collect()
|
||||
};
|
||||
WithWitnesses(new_witnesses)
|
||||
|
|
|
@ -782,7 +782,7 @@ impl Expectation {
|
|||
fn adjust_for_branches(&self, table: &mut unify::InferenceTable) -> Expectation {
|
||||
match self {
|
||||
Expectation::HasType(ety) => {
|
||||
let ety = table.resolve_ty_shallow(&ety);
|
||||
let ety = table.resolve_ty_shallow(ety);
|
||||
if !ety.is_ty_var() {
|
||||
Expectation::HasType(ety)
|
||||
} else {
|
||||
|
|
|
@ -109,7 +109,7 @@ impl<'a> InferenceContext<'a> {
|
|||
}
|
||||
|
||||
// Consider coercing the subtype to a DST
|
||||
if let Ok(ret) = self.try_coerce_unsized(&from_ty, &to_ty) {
|
||||
if let Ok(ret) = self.try_coerce_unsized(&from_ty, to_ty) {
|
||||
return Ok(ret);
|
||||
}
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ impl<'a> InferenceContext<'a> {
|
|||
/// Infer type of expression with possibly implicit coerce to the expected type.
|
||||
/// Return the type after possible coercion.
|
||||
pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
|
||||
let ty = self.infer_expr_inner(expr, &expected);
|
||||
let ty = self.infer_expr_inner(expr, expected);
|
||||
let ty = if let Some(target) = expected.only_has_type(&mut self.table) {
|
||||
if !self.coerce(&ty, &target) {
|
||||
self.result
|
||||
|
@ -135,11 +135,11 @@ impl<'a> InferenceContext<'a> {
|
|||
let mut both_arms_diverge = Diverges::Always;
|
||||
|
||||
let mut result_ty = self.table.new_type_var();
|
||||
let then_ty = self.infer_expr_inner(*then_branch, &expected);
|
||||
let then_ty = self.infer_expr_inner(*then_branch, expected);
|
||||
both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
|
||||
result_ty = self.coerce_merge_branch(Some(*then_branch), &result_ty, &then_ty);
|
||||
let else_ty = match else_branch {
|
||||
Some(else_branch) => self.infer_expr_inner(*else_branch, &expected),
|
||||
Some(else_branch) => self.infer_expr_inner(*else_branch, expected),
|
||||
None => TyBuilder::unit(),
|
||||
};
|
||||
both_arms_diverge &= self.diverges;
|
||||
|
@ -330,8 +330,8 @@ impl<'a> InferenceContext<'a> {
|
|||
.infer_method_call(
|
||||
tgt_expr,
|
||||
*receiver,
|
||||
&args,
|
||||
&method_name,
|
||||
args,
|
||||
method_name,
|
||||
generic_args.as_deref(),
|
||||
),
|
||||
Expr::Match { expr, arms } => {
|
||||
|
@ -993,7 +993,7 @@ impl<'a> InferenceContext<'a> {
|
|||
}
|
||||
|
||||
fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
|
||||
let callable_ty = self.resolve_ty_shallow(&callable_ty);
|
||||
let callable_ty = self.resolve_ty_shallow(callable_ty);
|
||||
if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(&Interner) {
|
||||
let def: CallableDefId = from_chalk(self.db, *fn_def);
|
||||
let generic_predicates = self.db.generic_predicates(def.into());
|
||||
|
|
|
@ -192,7 +192,7 @@ impl<'a> InferenceContext<'a> {
|
|||
Pat::Path(path) => {
|
||||
// FIXME use correct resolver for the surrounding expression
|
||||
let resolver = self.resolver.clone();
|
||||
self.infer_path(&resolver, &path, pat.into()).unwrap_or(self.err_ty())
|
||||
self.infer_path(&resolver, path, pat.into()).unwrap_or(self.err_ty())
|
||||
}
|
||||
Pat::Bind { mode, name: _, subpat } => {
|
||||
let mode = if mode == &BindingAnnotation::Unannotated {
|
||||
|
|
|
@ -43,11 +43,11 @@ impl<'a> InferenceContext<'a> {
|
|||
}
|
||||
let ty = self.make_ty(type_ref);
|
||||
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
|
||||
let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver);
|
||||
let ctx = crate::lower::TyLoweringContext::new(self.db, resolver);
|
||||
let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
|
||||
self.resolve_ty_assoc_item(
|
||||
ty,
|
||||
&path.segments().last().expect("path had at least one segment").name,
|
||||
path.segments().last().expect("path had at least one segment").name,
|
||||
id,
|
||||
)?
|
||||
} else {
|
||||
|
@ -154,7 +154,7 @@ impl<'a> InferenceContext<'a> {
|
|||
let segment =
|
||||
remaining_segments.last().expect("there should be at least one segment here");
|
||||
|
||||
self.resolve_ty_assoc_item(ty, &segment.name, id)
|
||||
self.resolve_ty_assoc_item(ty, segment.name, id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -331,7 +331,7 @@ impl chalk_ir::interner::Interner for Interner {
|
|||
&self,
|
||||
clauses: &'a Self::InternedProgramClauses,
|
||||
) -> &'a [chalk_ir::ProgramClause<Self>] {
|
||||
&clauses
|
||||
clauses
|
||||
}
|
||||
|
||||
fn intern_quantified_where_clauses<E>(
|
||||
|
@ -373,7 +373,7 @@ impl chalk_ir::interner::Interner for Interner {
|
|||
&self,
|
||||
canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
|
||||
) -> &'a [chalk_ir::CanonicalVarKind<Self>] {
|
||||
&canonical_var_kinds
|
||||
canonical_var_kinds
|
||||
}
|
||||
|
||||
fn intern_constraints<E>(
|
||||
|
@ -413,7 +413,7 @@ impl chalk_ir::interner::Interner for Interner {
|
|||
&self,
|
||||
variances: &'a Self::InternedVariances,
|
||||
) -> &'a [chalk_ir::Variance] {
|
||||
&variances
|
||||
variances
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -238,7 +238,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
// away instead of two.
|
||||
let actual_opaque_type_data = self
|
||||
.with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
|
||||
ctx.lower_impl_trait(&bounds)
|
||||
ctx.lower_impl_trait(bounds)
|
||||
});
|
||||
self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data;
|
||||
|
||||
|
@ -421,7 +421,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
let found = self
|
||||
.db
|
||||
.trait_data(trait_ref.hir_trait_id())
|
||||
.associated_type_by_name(&segment.name);
|
||||
.associated_type_by_name(segment.name);
|
||||
match found {
|
||||
Some(associated_ty) => {
|
||||
// FIXME handle type parameters on the segment
|
||||
|
@ -505,7 +505,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
pub(crate) fn lower_path(&self, path: &Path) -> (Ty, Option<TypeNs>) {
|
||||
// Resolve the path (in type namespace)
|
||||
if let Some(type_ref) = path.type_anchor() {
|
||||
let (ty, res) = self.lower_ty_ext(&type_ref);
|
||||
let (ty, res) = self.lower_ty_ext(type_ref);
|
||||
return self.lower_ty_relative_path(ty, res, path.segments());
|
||||
}
|
||||
let (resolution, remaining_index) =
|
||||
|
|
|
@ -372,7 +372,7 @@ pub(crate) fn lookup_method(
|
|||
db,
|
||||
env,
|
||||
krate,
|
||||
&traits_in_scope,
|
||||
traits_in_scope,
|
||||
visible_from_module,
|
||||
Some(name),
|
||||
LookupMode::MethodCall,
|
||||
|
@ -484,7 +484,7 @@ fn iterate_method_candidates_impl(
|
|||
LookupMode::Path => {
|
||||
// No autoderef for path lookups
|
||||
iterate_method_candidates_for_self_ty(
|
||||
&ty,
|
||||
ty,
|
||||
db,
|
||||
env,
|
||||
krate,
|
||||
|
@ -513,7 +513,7 @@ fn iterate_method_candidates_with_autoref(
|
|||
db,
|
||||
env.clone(),
|
||||
krate,
|
||||
&traits_in_scope,
|
||||
traits_in_scope,
|
||||
visible_from_module,
|
||||
name,
|
||||
&mut callback,
|
||||
|
@ -531,7 +531,7 @@ fn iterate_method_candidates_with_autoref(
|
|||
db,
|
||||
env.clone(),
|
||||
krate,
|
||||
&traits_in_scope,
|
||||
traits_in_scope,
|
||||
visible_from_module,
|
||||
name,
|
||||
&mut callback,
|
||||
|
@ -549,7 +549,7 @@ fn iterate_method_candidates_with_autoref(
|
|||
db,
|
||||
env,
|
||||
krate,
|
||||
&traits_in_scope,
|
||||
traits_in_scope,
|
||||
visible_from_module,
|
||||
name,
|
||||
&mut callback,
|
||||
|
@ -593,7 +593,7 @@ fn iterate_method_candidates_by_receiver(
|
|||
db,
|
||||
env.clone(),
|
||||
krate,
|
||||
&traits_in_scope,
|
||||
traits_in_scope,
|
||||
name,
|
||||
Some(receiver_ty),
|
||||
&mut callback,
|
||||
|
@ -870,7 +870,7 @@ fn transform_receiver_ty(
|
|||
.fill_with_unknown()
|
||||
.build(),
|
||||
AssocContainerId::ImplId(impl_id) => {
|
||||
let impl_substs = inherent_impl_substs(db, env, impl_id, &self_ty)?;
|
||||
let impl_substs = inherent_impl_substs(db, env, impl_id, self_ty)?;
|
||||
TyBuilder::subst_for_def(db, function_id)
|
||||
.use_parent_substs(&impl_substs)
|
||||
.fill_with_unknown()
|
||||
|
|
|
@ -208,7 +208,7 @@ pub(crate) fn diagnostics(
|
|||
match sema.to_module_def(file_id) {
|
||||
Some(m) => m.diagnostics(db, &mut sink, internal_diagnostics),
|
||||
None => {
|
||||
sink.push(UnlinkedFile { file_id, node: SyntaxNodePtr::new(&parse.tree().syntax()) });
|
||||
sink.push(UnlinkedFile { file_id, node: SyntaxNodePtr::new(parse.tree().syntax()) });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -222,7 +222,7 @@ fn diagnostic_with_fix<D: DiagnosticWithFixes>(
|
|||
resolve: &AssistResolveStrategy,
|
||||
) -> Diagnostic {
|
||||
Diagnostic::error(sema.diagnostics_display_range(d.display_source()).range, d.message())
|
||||
.with_fixes(d.fixes(&sema, resolve))
|
||||
.with_fixes(d.fixes(sema, resolve))
|
||||
.with_code(Some(d.code()))
|
||||
}
|
||||
|
||||
|
@ -232,7 +232,7 @@ fn warning_with_fix<D: DiagnosticWithFixes>(
|
|||
resolve: &AssistResolveStrategy,
|
||||
) -> Diagnostic {
|
||||
Diagnostic::hint(sema.diagnostics_display_range(d.display_source()).range, d.message())
|
||||
.with_fixes(d.fixes(&sema, resolve))
|
||||
.with_fixes(d.fixes(sema, resolve))
|
||||
.with_code(Some(d.code()))
|
||||
}
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ impl DiagnosticWithFixes for NoSuchField {
|
|||
) -> Option<Vec<Assist>> {
|
||||
let root = sema.db.parse_or_expand(self.file)?;
|
||||
missing_record_expr_field_fixes(
|
||||
&sema,
|
||||
sema,
|
||||
self.file.original_file(sema.db),
|
||||
&self.field.to_node(&root),
|
||||
)
|
||||
|
|
|
@ -37,7 +37,7 @@ impl DiagnosticWithFixes for MissingFields {
|
|||
|
||||
let edit = {
|
||||
let mut builder = TextEdit::builder();
|
||||
algo::diff(&old_field_list.syntax(), &new_field_list.syntax())
|
||||
algo::diff(old_field_list.syntax(), new_field_list.syntax())
|
||||
.into_text_edit(&mut builder);
|
||||
builder.finish()
|
||||
};
|
||||
|
@ -45,7 +45,7 @@ impl DiagnosticWithFixes for MissingFields {
|
|||
"fill_missing_fields",
|
||||
"Fill struct fields",
|
||||
SourceChange::from_text_edit(self.file.original_file(sema.db), edit),
|
||||
sema.original_range(&field_list_parent.syntax()).range,
|
||||
sema.original_range(field_list_parent.syntax()).range,
|
||||
)])
|
||||
}
|
||||
}
|
||||
|
|
|
@ -151,18 +151,18 @@ pub(crate) fn resolve_doc_path_for_def(
|
|||
) -> Option<hir::ModuleDef> {
|
||||
match def {
|
||||
Definition::ModuleDef(def) => match def {
|
||||
hir::ModuleDef::Module(it) => it.resolve_doc_path(db, &link, ns),
|
||||
hir::ModuleDef::Function(it) => it.resolve_doc_path(db, &link, ns),
|
||||
hir::ModuleDef::Adt(it) => it.resolve_doc_path(db, &link, ns),
|
||||
hir::ModuleDef::Variant(it) => it.resolve_doc_path(db, &link, ns),
|
||||
hir::ModuleDef::Const(it) => it.resolve_doc_path(db, &link, ns),
|
||||
hir::ModuleDef::Static(it) => it.resolve_doc_path(db, &link, ns),
|
||||
hir::ModuleDef::Trait(it) => it.resolve_doc_path(db, &link, ns),
|
||||
hir::ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, &link, ns),
|
||||
hir::ModuleDef::Module(it) => it.resolve_doc_path(db, link, ns),
|
||||
hir::ModuleDef::Function(it) => it.resolve_doc_path(db, link, ns),
|
||||
hir::ModuleDef::Adt(it) => it.resolve_doc_path(db, link, ns),
|
||||
hir::ModuleDef::Variant(it) => it.resolve_doc_path(db, link, ns),
|
||||
hir::ModuleDef::Const(it) => it.resolve_doc_path(db, link, ns),
|
||||
hir::ModuleDef::Static(it) => it.resolve_doc_path(db, link, ns),
|
||||
hir::ModuleDef::Trait(it) => it.resolve_doc_path(db, link, ns),
|
||||
hir::ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
|
||||
hir::ModuleDef::BuiltinType(_) => None,
|
||||
},
|
||||
Definition::Macro(it) => it.resolve_doc_path(db, &link, ns),
|
||||
Definition::Field(it) => it.resolve_doc_path(db, &link, ns),
|
||||
Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
|
||||
Definition::Field(it) => it.resolve_doc_path(db, link, ns),
|
||||
Definition::SelfType(_)
|
||||
| Definition::Local(_)
|
||||
| Definition::GenericParam(_)
|
||||
|
|
|
@ -328,7 +328,7 @@ mod tests {
|
|||
use super::*;
|
||||
|
||||
fn do_check(before: &str, afters: &[&str]) {
|
||||
let (analysis, position) = fixture::position(&before);
|
||||
let (analysis, position) = fixture::position(before);
|
||||
let before = analysis.file_text(position.file_id).unwrap();
|
||||
let range = TextRange::empty(position.offset);
|
||||
let mut frange = FileRange { file_id: position.file_id, range };
|
||||
|
|
|
@ -57,7 +57,7 @@ pub(crate) fn goto_definition(
|
|||
},
|
||||
ast::Name(name) => {
|
||||
let def = NameClass::classify(&sema, &name)?.referenced_or_defined(sema.db);
|
||||
try_find_trait_item_definition(&sema.db, &def)
|
||||
try_find_trait_item_definition(sema.db, &def)
|
||||
.or_else(|| def.try_to_nav(sema.db))
|
||||
},
|
||||
ast::Lifetime(lt) => if let Some(name_class) = NameClass::classify_lifetime(&sema, <) {
|
||||
|
|
|
@ -288,7 +288,7 @@ fn runnable_action(
|
|||
) -> Option<HoverAction> {
|
||||
match def {
|
||||
Definition::ModuleDef(it) => match it {
|
||||
ModuleDef::Module(it) => runnable_mod(&sema, it).map(|it| HoverAction::Runnable(it)),
|
||||
ModuleDef::Module(it) => runnable_mod(sema, it).map(|it| HoverAction::Runnable(it)),
|
||||
ModuleDef::Function(func) => {
|
||||
let src = func.source(sema.db)?;
|
||||
if src.file_id != file_id.into() {
|
||||
|
@ -297,7 +297,7 @@ fn runnable_action(
|
|||
return None;
|
||||
}
|
||||
|
||||
runnable_fn(&sema, func).map(HoverAction::Runnable)
|
||||
runnable_fn(sema, func).map(HoverAction::Runnable)
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
|
@ -432,7 +432,7 @@ fn hover_for_definition(
|
|||
return match def {
|
||||
Definition::Macro(it) => match &it.source(db)?.value {
|
||||
Either::Left(mac) => {
|
||||
let label = macro_label(&mac);
|
||||
let label = macro_label(mac);
|
||||
from_def_source_labeled(db, it, Some(label), mod_path)
|
||||
}
|
||||
Either::Right(_) => {
|
||||
|
@ -516,7 +516,7 @@ fn hover_for_keyword(
|
|||
if !token.kind().is_keyword() {
|
||||
return None;
|
||||
}
|
||||
let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()?).krate());
|
||||
let famous_defs = FamousDefs(sema, sema.scope(&token.parent()?).krate());
|
||||
// std exposes {}_keyword modules with docstrings on the root to document keywords
|
||||
let keyword_mod = format!("{}_keyword", token.text());
|
||||
let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;
|
||||
|
|
|
@ -96,7 +96,7 @@ fn get_chaining_hints(
|
|||
}
|
||||
|
||||
let krate = sema.scope(expr.syntax()).module().map(|it| it.krate());
|
||||
let famous_defs = FamousDefs(&sema, krate);
|
||||
let famous_defs = FamousDefs(sema, krate);
|
||||
|
||||
let mut tokens = expr
|
||||
.syntax()
|
||||
|
@ -165,7 +165,7 @@ fn get_param_name_hints(
|
|||
};
|
||||
Some((param_name, arg))
|
||||
})
|
||||
.filter(|(param_name, arg)| !should_hide_param_name_hint(sema, &callable, param_name, &arg))
|
||||
.filter(|(param_name, arg)| !should_hide_param_name_hint(sema, &callable, param_name, arg))
|
||||
.map(|(param_name, arg)| InlayHint {
|
||||
range: arg.syntax().text_range(),
|
||||
kind: InlayKind::ParameterHint,
|
||||
|
@ -187,7 +187,7 @@ fn get_bind_pat_hints(
|
|||
}
|
||||
|
||||
let krate = sema.scope(pat.syntax()).module().map(|it| it.krate());
|
||||
let famous_defs = FamousDefs(&sema, krate);
|
||||
let famous_defs = FamousDefs(sema, krate);
|
||||
|
||||
let ty = sema.type_of_pat(&pat.clone().into())?;
|
||||
|
||||
|
|
|
@ -60,7 +60,7 @@ fn remove_newlines(edit: &mut TextEditBuilder, token: &SyntaxToken, range: TextR
|
|||
let pos: TextSize = (pos as u32).into();
|
||||
let offset = token.text_range().start() + range.start() + pos;
|
||||
if !edit.invalidates_offset(offset) {
|
||||
remove_newline(edit, &token, offset);
|
||||
remove_newline(edit, token, offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -282,20 +282,20 @@ impl Analysis {
|
|||
file_id: FileId,
|
||||
text_range: Option<TextRange>,
|
||||
) -> Cancellable<String> {
|
||||
self.with_db(|db| syntax_tree::syntax_tree(&db, file_id, text_range))
|
||||
self.with_db(|db| syntax_tree::syntax_tree(db, file_id, text_range))
|
||||
}
|
||||
|
||||
pub fn view_hir(&self, position: FilePosition) -> Cancellable<String> {
|
||||
self.with_db(|db| view_hir::view_hir(&db, position))
|
||||
self.with_db(|db| view_hir::view_hir(db, position))
|
||||
}
|
||||
|
||||
pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> {
|
||||
self.with_db(|db| view_item_tree::view_item_tree(&db, file_id))
|
||||
self.with_db(|db| view_item_tree::view_item_tree(db, file_id))
|
||||
}
|
||||
|
||||
/// Renders the crate graph to GraphViz "dot" syntax.
|
||||
pub fn view_crate_graph(&self) -> Cancellable<Result<String, String>> {
|
||||
self.with_db(|db| view_crate_graph::view_crate_graph(&db))
|
||||
self.with_db(|db| view_crate_graph::view_crate_graph(db))
|
||||
}
|
||||
|
||||
pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<ExpandedMacro>> {
|
||||
|
@ -315,7 +315,7 @@ impl Analysis {
|
|||
/// up minor stuff like continuing the comment.
|
||||
/// The edit will be a snippet (with `$0`).
|
||||
pub fn on_enter(&self, position: FilePosition) -> Cancellable<Option<TextEdit>> {
|
||||
self.with_db(|db| typing::on_enter(&db, position))
|
||||
self.with_db(|db| typing::on_enter(db, position))
|
||||
}
|
||||
|
||||
/// Returns an edit which should be applied after a character was typed.
|
||||
|
@ -331,7 +331,7 @@ impl Analysis {
|
|||
if !typing::TRIGGER_CHARS.contains(char_typed) {
|
||||
return Ok(None);
|
||||
}
|
||||
self.with_db(|db| typing::on_char_typed(&db, position, char_typed))
|
||||
self.with_db(|db| typing::on_char_typed(db, position, char_typed))
|
||||
}
|
||||
|
||||
/// Returns a tree representation of symbols in the file. Useful to draw a
|
||||
|
|
|
@ -62,7 +62,7 @@ pub(crate) fn find_all_refs(
|
|||
if let Some(name) = get_name_of_item_declaration(&syntax, position) {
|
||||
(NameClass::classify(sema, &name)?.referenced_or_defined(sema.db), true)
|
||||
} else {
|
||||
(find_def(&sema, &syntax, position)?, false)
|
||||
(find_def(sema, &syntax, position)?, false)
|
||||
};
|
||||
|
||||
let mut usages = def.usages(sema).set_scope(search_scope).include_self_refs().all();
|
||||
|
|
|
@ -64,7 +64,7 @@ pub(crate) fn prepare_rename(
|
|||
}
|
||||
};
|
||||
let name_like = sema
|
||||
.find_node_at_offset_with_descend(&syntax, position.offset)
|
||||
.find_node_at_offset_with_descend(syntax, position.offset)
|
||||
.ok_or_else(|| format_err!("No references found at position"))?;
|
||||
let node = match &name_like {
|
||||
ast::NameLike::Name(it) => it.syntax(),
|
||||
|
@ -104,7 +104,7 @@ pub(crate) fn rename_with_semantics(
|
|||
|
||||
let def = find_definition(sema, syntax, position)?;
|
||||
match def {
|
||||
Definition::ModuleDef(ModuleDef::Module(module)) => rename_mod(&sema, module, new_name),
|
||||
Definition::ModuleDef(ModuleDef::Module(module)) => rename_mod(sema, module, new_name),
|
||||
Definition::SelfType(_) => bail!("Cannot rename `Self`"),
|
||||
Definition::ModuleDef(ModuleDef::BuiltinType(_)) => bail!("Cannot rename builtin type"),
|
||||
def => rename_reference(sema, def, new_name),
|
||||
|
@ -323,7 +323,7 @@ fn rename_reference(
|
|||
}
|
||||
let mut source_change = SourceChange::default();
|
||||
source_change.extend(usages.iter().map(|(&file_id, references)| {
|
||||
(file_id, source_edit_from_references(&references, def, new_name))
|
||||
(file_id, source_edit_from_references(references, def, new_name))
|
||||
}));
|
||||
|
||||
let (file_id, edit) = source_edit_from_def(sema, def, new_name)?;
|
||||
|
@ -413,7 +413,7 @@ fn rename_self_to_param(
|
|||
let mut source_change = SourceChange::default();
|
||||
source_change.insert_source_edit(file_id.original_file(sema.db), edit);
|
||||
source_change.extend(usages.iter().map(|(&file_id, references)| {
|
||||
(file_id, source_edit_from_references(&references, def, new_name))
|
||||
(file_id, source_edit_from_references(references, def, new_name))
|
||||
}));
|
||||
Ok(source_change)
|
||||
}
|
||||
|
|
|
@ -158,7 +158,7 @@ fn find_related_tests(
|
|||
search_scope: Option<SearchScope>,
|
||||
tests: &mut FxHashSet<Runnable>,
|
||||
) {
|
||||
if let Some(refs) = references::find_all_refs(&sema, position, search_scope) {
|
||||
if let Some(refs) = references::find_all_refs(sema, position, search_scope) {
|
||||
for (file_id, refs) in refs.references {
|
||||
let file = sema.parse(file_id);
|
||||
let file = file.syntax();
|
||||
|
@ -169,10 +169,10 @@ fn find_related_tests(
|
|||
});
|
||||
|
||||
for fn_def in functions {
|
||||
if let Some(runnable) = as_test_runnable(&sema, &fn_def) {
|
||||
if let Some(runnable) = as_test_runnable(sema, &fn_def) {
|
||||
// direct test
|
||||
tests.insert(runnable);
|
||||
} else if let Some(module) = parent_test_module(&sema, &fn_def) {
|
||||
} else if let Some(module) = parent_test_module(sema, &fn_def) {
|
||||
// indirect test
|
||||
find_related_tests_in_module(sema, &fn_def, &module, tests);
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ fn find_related_tests_in_module(
|
|||
}
|
||||
|
||||
fn as_test_runnable(sema: &Semantics<RootDatabase>, fn_def: &ast::Fn) -> Option<Runnable> {
|
||||
if test_related_attribute(&fn_def).is_some() {
|
||||
if test_related_attribute(fn_def).is_some() {
|
||||
let function = sema.to_def(fn_def)?;
|
||||
runnable_fn(sema, function)
|
||||
} else {
|
||||
|
|
|
@ -323,7 +323,7 @@ fn traverse(
|
|||
if let Some(token) = element.as_token().cloned().and_then(ast::String::cast) {
|
||||
if token.is_raw() {
|
||||
let expanded = element_to_highlight.as_token().unwrap().clone();
|
||||
if inject::ra_fixture(hl, &sema, token, expanded).is_some() {
|
||||
if inject::ra_fixture(hl, sema, token, expanded).is_some() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -334,7 +334,7 @@ fn traverse(
|
|||
}
|
||||
|
||||
if let Some((mut highlight, binding_hash)) = highlight::element(
|
||||
&sema,
|
||||
sema,
|
||||
krate,
|
||||
&mut bindings_shadow_count,
|
||||
syntactic_name_ref_highlighting,
|
||||
|
|
|
@ -449,12 +449,12 @@ fn highlight_method_call(
|
|||
krate: Option<hir::Crate>,
|
||||
method_call: &ast::MethodCallExpr,
|
||||
) -> Option<Highlight> {
|
||||
let func = sema.resolve_method_call(&method_call)?;
|
||||
let func = sema.resolve_method_call(method_call)?;
|
||||
|
||||
let mut h = SymbolKind::Function.into();
|
||||
h |= HlMod::Associated;
|
||||
|
||||
if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(&method_call) {
|
||||
if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(method_call) {
|
||||
h |= HlMod::Unsafe;
|
||||
}
|
||||
if func.is_async(sema.db) {
|
||||
|
|
|
@ -23,7 +23,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
|
|||
let hl_ranges = highlight(db, file_id, None, false);
|
||||
let text = parse.tree().syntax().to_string();
|
||||
let mut buf = String::new();
|
||||
buf.push_str(&STYLE);
|
||||
buf.push_str(STYLE);
|
||||
buf.push_str("<pre><code>");
|
||||
for r in &hl_ranges {
|
||||
let chunk = html_escape(&text[r.range]);
|
||||
|
|
|
@ -23,7 +23,7 @@ pub(super) fn ra_fixture(
|
|||
literal: ast::String,
|
||||
expanded: SyntaxToken,
|
||||
) -> Option<()> {
|
||||
let active_parameter = ActiveParameter::at_token(&sema, expanded)?;
|
||||
let active_parameter = ActiveParameter::at_token(sema, expanded)?;
|
||||
if !active_parameter.ident().map_or(false, |name| name.text().starts_with("ra_fixture")) {
|
||||
return None;
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ pub(super) fn doc_comment(
|
|||
}
|
||||
|
||||
for attr in attributes.by_key("doc").attrs() {
|
||||
let InFile { file_id, value: src } = attrs_source_map.source_of(&attr);
|
||||
let InFile { file_id, value: src } = attrs_source_map.source_of(attr);
|
||||
if file_id != node.file_id {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -88,12 +88,12 @@ fn on_enter_in_comment(
|
|||
if comment.text().ends_with(' ') {
|
||||
cov_mark::hit!(continues_end_of_line_comment_with_space);
|
||||
remove_trailing_whitespace = true;
|
||||
} else if !followed_by_comment(&comment) {
|
||||
} else if !followed_by_comment(comment) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
let indent = node_indent(&file, comment.syntax())?;
|
||||
let indent = node_indent(file, comment.syntax())?;
|
||||
let inserted = format!("\n{}{} $0", indent, prefix);
|
||||
let delete = if remove_trailing_whitespace {
|
||||
let trimmed_len = comment.text().trim_end().len() as u32;
|
||||
|
@ -188,7 +188,7 @@ mod tests {
|
|||
use crate::fixture;
|
||||
|
||||
fn apply_on_enter(before: &str) -> Option<String> {
|
||||
let (analysis, position) = fixture::position(&before);
|
||||
let (analysis, position) = fixture::position(before);
|
||||
let result = analysis.on_enter(position).unwrap()?;
|
||||
|
||||
let mut actual = analysis.file_text(position.file_id).unwrap().to_string();
|
||||
|
|
|
@ -88,7 +88,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
|
|||
// We pick a single indentation level for the whole block comment based on the
|
||||
// comment where the assist was invoked. This will be prepended to the
|
||||
// contents of each line comment when they're put into the block comment.
|
||||
let indentation = IndentLevel::from_token(&comment.syntax());
|
||||
let indentation = IndentLevel::from_token(comment.syntax());
|
||||
|
||||
let block_comment_body =
|
||||
comments.into_iter().map(|c| line_comment_text(indentation, c)).join("\n");
|
||||
|
@ -167,7 +167,7 @@ fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String {
|
|||
if contents.is_empty() {
|
||||
contents.to_owned()
|
||||
} else {
|
||||
indentation.to_string() + &contents
|
||||
indentation.to_string() + contents
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext)
|
|||
"Convert to guarded return",
|
||||
target,
|
||||
|edit| {
|
||||
let if_indent_level = IndentLevel::from_node(&if_expr.syntax());
|
||||
let if_indent_level = IndentLevel::from_node(if_expr.syntax());
|
||||
let new_block = match if_let_pat {
|
||||
None => {
|
||||
// If.
|
||||
|
@ -174,7 +174,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext)
|
|||
.take_while(|i| *i != end_of_then),
|
||||
);
|
||||
replace_children(
|
||||
&parent_block.syntax(),
|
||||
parent_block.syntax(),
|
||||
RangeInclusive::new(
|
||||
if_expr.clone().syntax().clone().into(),
|
||||
if_expr.syntax().clone().into(),
|
||||
|
|
|
@ -76,7 +76,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext) -> Option
|
|||
let module = ctx.sema.scope(&insert_after).module()?;
|
||||
|
||||
let vars_defined_in_body_and_outlive =
|
||||
vars_defined_in_body_and_outlive(ctx, &body, &node.parent().as_ref().unwrap_or(&node));
|
||||
vars_defined_in_body_and_outlive(ctx, &body, node.parent().as_ref().unwrap_or(&node));
|
||||
let ret_ty = body_return_ty(ctx, &body)?;
|
||||
|
||||
// FIXME: we compute variables that outlive here just to check `never!` condition
|
||||
|
@ -808,7 +808,7 @@ trait HasTokenAtOffset {
|
|||
|
||||
impl HasTokenAtOffset for SyntaxNode {
|
||||
fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> {
|
||||
SyntaxNode::token_at_offset(&self, offset)
|
||||
SyntaxNode::token_at_offset(self, offset)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -854,7 +854,7 @@ fn vars_defined_in_body_and_outlive(
|
|||
body: &FunctionBody,
|
||||
parent: &SyntaxNode,
|
||||
) -> Vec<OutlivedLocal> {
|
||||
let vars_defined_in_body = vars_defined_in_body(&body, ctx);
|
||||
let vars_defined_in_body = vars_defined_in_body(body, ctx);
|
||||
vars_defined_in_body
|
||||
.into_iter()
|
||||
.filter_map(|var| var_outlives_body(ctx, body, var, parent))
|
||||
|
@ -868,7 +868,7 @@ fn is_defined_before(
|
|||
src: &hir::InFile<Either<ast::IdentPat, ast::SelfParam>>,
|
||||
) -> bool {
|
||||
src.file_id.original_file(ctx.db()) == ctx.frange.file_id
|
||||
&& !body.contains_node(&either_syntax(&src.value))
|
||||
&& !body.contains_node(either_syntax(&src.value))
|
||||
}
|
||||
|
||||
fn either_syntax(value: &Either<ast::IdentPat, ast::SelfParam>) -> &SyntaxNode {
|
||||
|
|
|
@ -212,7 +212,7 @@ impl ExtendedEnum {
|
|||
}
|
||||
|
||||
fn resolve_enum_def(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
|
||||
sema.type_of_expr(&expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
|
||||
sema.type_of_expr(expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
|
||||
Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
|
||||
_ => {
|
||||
if ty.is_bool() {
|
||||
|
@ -228,7 +228,7 @@ fn resolve_tuple_of_enum_def(
|
|||
sema: &Semantics<RootDatabase>,
|
||||
expr: &ast::Expr,
|
||||
) -> Option<Vec<ExtendedEnum>> {
|
||||
sema.type_of_expr(&expr)?
|
||||
sema.type_of_expr(expr)?
|
||||
.tuple_fields(sema.db)
|
||||
.iter()
|
||||
.map(|ty| {
|
||||
|
|
|
@ -43,7 +43,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext) -> O
|
|||
_ => return None,
|
||||
};
|
||||
|
||||
let current_module = ctx.sema.scope(&path.syntax()).module()?;
|
||||
let current_module = ctx.sema.scope(path.syntax()).module()?;
|
||||
let target_module = def.module(ctx.db())?;
|
||||
|
||||
let vis = target_module.visibility_of(ctx.db(), &def)?;
|
||||
|
|
|
@ -47,7 +47,7 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext) ->
|
|||
let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text()));
|
||||
|
||||
// Return early if we've found an existing new fn
|
||||
let impl_def = find_struct_impl(&ctx, &parent_enum, &fn_name)?;
|
||||
let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
|
||||
|
||||
let target = variant.syntax().text_range();
|
||||
acc.add(
|
||||
|
|
|
@ -136,7 +136,7 @@ fn generate_enum_projection_method(
|
|||
format!("{}_{}", props.fn_name_prefix, &to_lower_snake_case(&variant_name.text()));
|
||||
|
||||
// Return early if we've found an existing new fn
|
||||
let impl_def = find_struct_impl(&ctx, &parent_enum, &fn_name)?;
|
||||
let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
|
||||
|
||||
let target = variant.syntax().text_range();
|
||||
acc.add(AssistId(assist_id, AssistKind::Generate), assist_description, target, |builder| {
|
||||
|
|
|
@ -59,7 +59,7 @@ pub(crate) fn generate_function(acc: &mut Assists, ctx: &AssistContext) -> Optio
|
|||
None => None,
|
||||
};
|
||||
|
||||
let function_builder = FunctionBuilder::from_call(&ctx, &call, &path, target_module)?;
|
||||
let function_builder = FunctionBuilder::from_call(ctx, &call, &path, target_module)?;
|
||||
|
||||
let target = call.syntax().text_range();
|
||||
acc.add(
|
||||
|
@ -128,12 +128,12 @@ impl FunctionBuilder {
|
|||
file = in_file;
|
||||
target
|
||||
}
|
||||
None => next_space_for_fn_after_call_site(&call)?,
|
||||
None => next_space_for_fn_after_call_site(call)?,
|
||||
};
|
||||
let needs_pub = target_module.is_some();
|
||||
let target_module = target_module.or_else(|| ctx.sema.scope(target.syntax()).module())?;
|
||||
let fn_name = fn_name(&path)?;
|
||||
let (type_params, params) = fn_args(ctx, target_module, &call)?;
|
||||
let fn_name = fn_name(path)?;
|
||||
let (type_params, params) = fn_args(ctx, target_module, call)?;
|
||||
|
||||
// should_render_snippet intends to express a rough level of confidence about
|
||||
// the correctness of the return type.
|
||||
|
|
|
@ -75,7 +75,7 @@ pub(crate) fn generate_getter_impl(
|
|||
if mutable {
|
||||
format_to!(fn_name, "_mut");
|
||||
}
|
||||
let impl_def = find_struct_impl(&ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?;
|
||||
let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?;
|
||||
|
||||
let (id, label) = if mutable {
|
||||
("generate_getter_mut", "Generate a mut getter method")
|
||||
|
|
|
@ -36,7 +36,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
|
|||
};
|
||||
|
||||
// Return early if we've found an existing new fn
|
||||
let impl_def = find_struct_impl(&ctx, &ast::Adt::Struct(strukt.clone()), "new")?;
|
||||
let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), "new")?;
|
||||
|
||||
let target = strukt.syntax().text_range();
|
||||
acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| {
|
||||
|
|
|
@ -39,7 +39,7 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext) -> Option<
|
|||
// Return early if we've found an existing fn
|
||||
let fn_name = to_lower_snake_case(&field_name.to_string());
|
||||
let impl_def = find_struct_impl(
|
||||
&ctx,
|
||||
ctx,
|
||||
&ast::Adt::Struct(strukt.clone()),
|
||||
format!("set_{}", fn_name).as_str(),
|
||||
)?;
|
||||
|
|
|
@ -85,7 +85,7 @@ fn whitespace_start(it: SyntaxElement) -> Option<TextSize> {
|
|||
}
|
||||
|
||||
fn adjusted_macro_contents(macro_call: &ast::MacroCall) -> Option<String> {
|
||||
let contents = get_valid_macrocall_contents(¯o_call, "dbg")?;
|
||||
let contents = get_valid_macrocall_contents(macro_call, "dbg")?;
|
||||
let macro_text_with_brackets = macro_call.token_tree()?.syntax().text();
|
||||
let macro_text_in_brackets = macro_text_with_brackets.slice(TextRange::new(
|
||||
TextSize::of('('),
|
||||
|
|
|
@ -28,7 +28,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext) -> Option<(
|
|||
.or_else(|| ctx.find_node_at_offset::<ast::RecordPat>().map(Either::Right))?;
|
||||
|
||||
let path = record.as_ref().either(|it| it.path(), |it| it.path())?;
|
||||
let ranks = compute_fields_ranks(&path, &ctx)?;
|
||||
let ranks = compute_fields_ranks(&path, ctx)?;
|
||||
let get_rank_of_field =
|
||||
|of: Option<_>| *ranks.get(&of.unwrap_or_default()).unwrap_or(&usize::MAX);
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ fn add_assist(
|
|||
let insert_pos = adt.syntax().text_range().end();
|
||||
let impl_def_with_items =
|
||||
impl_def_from_trait(&ctx.sema, &annotated_name, trait_, trait_path);
|
||||
update_attribute(builder, &input, &trait_name, &attr);
|
||||
update_attribute(builder, input, &trait_name, attr);
|
||||
let trait_path = format!("{}", trait_path);
|
||||
match (ctx.config.snippet_cap, impl_def_with_items) {
|
||||
(None, _) => {
|
||||
|
|
|
@ -169,7 +169,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext)
|
|||
}
|
||||
|
||||
fn is_pat_wildcard_or_sad(sema: &hir::Semantics<RootDatabase>, pat: &ast::Pat) -> bool {
|
||||
sema.type_of_pat(&pat)
|
||||
sema.type_of_pat(pat)
|
||||
.and_then(|ty| TryEnum::from_ty(sema, &ty))
|
||||
.map(|it| it.sad_pattern().syntax().text() == pat.syntax().text())
|
||||
.unwrap_or_else(|| matches!(pat, ast::Pat::WildcardPat(_)))
|
||||
|
|
|
@ -123,7 +123,7 @@ impl TailReturnCollector {
|
|||
fn handle_exprs(&mut self, expr: &Expr, collect_break: bool) {
|
||||
match expr {
|
||||
Expr::BlockExpr(block_expr) => {
|
||||
self.collect_jump_exprs(&block_expr, collect_break);
|
||||
self.collect_jump_exprs(block_expr, collect_break);
|
||||
}
|
||||
Expr::ReturnExpr(ret_expr) => {
|
||||
if let Some(ret_expr_arg) = &ret_expr.expr() {
|
||||
|
|
|
@ -74,7 +74,7 @@ pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) {
|
|||
#[track_caller]
|
||||
fn check_doc_test(assist_id: &str, before: &str, after: &str) {
|
||||
let after = trim_indent(after);
|
||||
let (db, file_id, selection) = RootDatabase::with_range_or_offset(&before);
|
||||
let (db, file_id, selection) = RootDatabase::with_range_or_offset(before);
|
||||
let before = db.file_text(file_id).to_string();
|
||||
let frange = FileRange { file_id, range: selection.into() };
|
||||
|
||||
|
|
|
@ -492,7 +492,7 @@ pub(crate) fn add_method_to_adt(
|
|||
let start_offset = impl_def
|
||||
.and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
|
||||
.unwrap_or_else(|| {
|
||||
buf = generate_impl_text(&adt, &buf);
|
||||
buf = generate_impl_text(adt, &buf);
|
||||
adt.syntax().text_range().end()
|
||||
});
|
||||
|
||||
|
|
|
@ -187,7 +187,7 @@ fn from_method_call(expr: &ast::Expr) -> Option<String> {
|
|||
}
|
||||
}
|
||||
|
||||
normalize(&name)
|
||||
normalize(name)
|
||||
}
|
||||
|
||||
fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<String> {
|
||||
|
|
|
@ -13,7 +13,7 @@ pub(crate) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
_ => return complete_undotted_self(acc, ctx),
|
||||
};
|
||||
|
||||
let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
|
||||
let receiver_ty = match ctx.sema.type_of_expr(dot_receiver) {
|
||||
Some(ty) => ty,
|
||||
_ => return,
|
||||
};
|
||||
|
|
|
@ -34,7 +34,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
|
||||
let receiver_text = get_receiver_text(dot_receiver, receiver_is_ambiguous_float_literal);
|
||||
|
||||
let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
|
||||
let receiver_ty = match ctx.sema.type_of_expr(dot_receiver) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
|
@ -50,7 +50,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
postfix_snippet(
|
||||
ctx,
|
||||
cap,
|
||||
&dot_receiver,
|
||||
dot_receiver,
|
||||
"ifl",
|
||||
"if let Ok {}",
|
||||
&format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text),
|
||||
|
@ -60,7 +60,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
postfix_snippet(
|
||||
ctx,
|
||||
cap,
|
||||
&dot_receiver,
|
||||
dot_receiver,
|
||||
"while",
|
||||
"while let Ok {}",
|
||||
&format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text),
|
||||
|
@ -71,7 +71,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
postfix_snippet(
|
||||
ctx,
|
||||
cap,
|
||||
&dot_receiver,
|
||||
dot_receiver,
|
||||
"ifl",
|
||||
"if let Some {}",
|
||||
&format!("if let Some($1) = {} {{\n $0\n}}", receiver_text),
|
||||
|
@ -81,7 +81,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
postfix_snippet(
|
||||
ctx,
|
||||
cap,
|
||||
&dot_receiver,
|
||||
dot_receiver,
|
||||
"while",
|
||||
"while let Some {}",
|
||||
&format!("while let Some($1) = {} {{\n $0\n}}", receiver_text),
|
||||
|
@ -93,7 +93,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
postfix_snippet(
|
||||
ctx,
|
||||
cap,
|
||||
&dot_receiver,
|
||||
dot_receiver,
|
||||
"if",
|
||||
"if expr {}",
|
||||
&format!("if {} {{\n $0\n}}", receiver_text),
|
||||
|
@ -102,22 +102,22 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
postfix_snippet(
|
||||
ctx,
|
||||
cap,
|
||||
&dot_receiver,
|
||||
dot_receiver,
|
||||
"while",
|
||||
"while expr {}",
|
||||
&format!("while {} {{\n $0\n}}", receiver_text),
|
||||
)
|
||||
.add_to(acc);
|
||||
postfix_snippet(ctx, cap, &dot_receiver, "not", "!expr", &format!("!{}", receiver_text))
|
||||
postfix_snippet(ctx, cap, dot_receiver, "not", "!expr", &format!("!{}", receiver_text))
|
||||
.add_to(acc);
|
||||
}
|
||||
|
||||
postfix_snippet(ctx, cap, &dot_receiver, "ref", "&expr", &format!("&{}", receiver_text))
|
||||
postfix_snippet(ctx, cap, dot_receiver, "ref", "&expr", &format!("&{}", receiver_text))
|
||||
.add_to(acc);
|
||||
postfix_snippet(
|
||||
ctx,
|
||||
cap,
|
||||
&dot_receiver,
|
||||
dot_receiver,
|
||||
"refm",
|
||||
"&mut expr",
|
||||
&format!("&mut {}", receiver_text),
|
||||
|
|
|
@ -53,7 +53,7 @@ pub(crate) fn add_format_like_completions(
|
|||
for (label, macro_name) in KINDS {
|
||||
let snippet = parser.into_suggestion(macro_name);
|
||||
|
||||
postfix_snippet(ctx, cap, &dot_receiver, label, macro_name, &snippet).add_to(acc);
|
||||
postfix_snippet(ctx, cap, dot_receiver, label, macro_name, &snippet).add_to(acc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
|
|||
None => return,
|
||||
};
|
||||
|
||||
let resolution = match ctx.sema.resolve_path(&path) {
|
||||
let resolution = match ctx.sema.resolve_path(path) {
|
||||
Some(res) => res,
|
||||
None => return,
|
||||
};
|
||||
|
|
|
@ -467,7 +467,7 @@ impl<'a> CompletionContext<'a> {
|
|||
self.expected_type = expected_type;
|
||||
self.expected_name = expected_name;
|
||||
|
||||
let name_like = match find_node_at_offset(&&file_with_fake_ident, offset) {
|
||||
let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
|
|
|
@ -115,12 +115,12 @@ pub(crate) fn determine_location(
|
|||
) -> Option<ImmediateLocation> {
|
||||
let node = match name_like {
|
||||
ast::NameLike::NameRef(name_ref) => {
|
||||
if ast::RecordExprField::for_field_name(&name_ref).is_some() {
|
||||
if ast::RecordExprField::for_field_name(name_ref).is_some() {
|
||||
return sema
|
||||
.find_node_at_offset_with_macros(original_file, offset)
|
||||
.map(ImmediateLocation::RecordExpr);
|
||||
}
|
||||
if ast::RecordPatField::for_field_name_ref(&name_ref).is_some() {
|
||||
if ast::RecordPatField::for_field_name_ref(name_ref).is_some() {
|
||||
return sema
|
||||
.find_node_at_offset_with_macros(original_file, offset)
|
||||
.map(ImmediateLocation::RecordPat);
|
||||
|
@ -128,7 +128,7 @@ pub(crate) fn determine_location(
|
|||
maximize_name_ref(name_ref)
|
||||
}
|
||||
ast::NameLike::Name(name) => {
|
||||
if ast::RecordPatField::for_field_name(&name).is_some() {
|
||||
if ast::RecordPatField::for_field_name(name).is_some() {
|
||||
return sema
|
||||
.find_node_at_offset_with_macros(original_file, offset)
|
||||
.map(ImmediateLocation::RecordPat);
|
||||
|
|
|
@ -86,7 +86,7 @@ impl<'a> RenderContext<'a> {
|
|||
}
|
||||
|
||||
fn db(&self) -> &'a RootDatabase {
|
||||
&self.completion.db
|
||||
self.completion.db
|
||||
}
|
||||
|
||||
fn source_range(&self) -> TextRange {
|
||||
|
|
|
@ -75,10 +75,10 @@ fn render_pat(
|
|||
) -> Option<String> {
|
||||
let mut pat = match kind {
|
||||
StructKind::Tuple if ctx.snippet_cap().is_some() => {
|
||||
render_tuple_as_pat(&fields, &name, fields_omitted)
|
||||
render_tuple_as_pat(fields, name, fields_omitted)
|
||||
}
|
||||
StructKind::Record => {
|
||||
render_record_as_pat(ctx.db(), ctx.snippet_cap(), &fields, &name, fields_omitted)
|
||||
render_record_as_pat(ctx.db(), ctx.snippet_cap(), fields, name, fields_omitted)
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
|
@ -86,7 +86,7 @@ fn render_pat(
|
|||
if ctx.completion.is_param {
|
||||
pat.push(':');
|
||||
pat.push(' ');
|
||||
pat.push_str(&name);
|
||||
pat.push_str(name);
|
||||
}
|
||||
if ctx.snippet_cap().is_some() {
|
||||
pat.push_str("$0");
|
||||
|
|
|
@ -162,7 +162,7 @@ impl ActiveParameter {
|
|||
}
|
||||
|
||||
pub fn at_token(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Self> {
|
||||
let (signature, active_parameter) = call_info_impl(&sema, token)?;
|
||||
let (signature, active_parameter) = call_info_impl(sema, token)?;
|
||||
|
||||
let idx = active_parameter?;
|
||||
let mut params = signature.params(sema.db);
|
||||
|
|
|
@ -323,7 +323,7 @@ fn import_for_item(
|
|||
}
|
||||
|
||||
let segment_import =
|
||||
find_import_for_segment(db, original_item_candidate, &unresolved_first_segment)?;
|
||||
find_import_for_segment(db, original_item_candidate, unresolved_first_segment)?;
|
||||
let trait_item_to_import = item_as_assoc(db, original_item)
|
||||
.and_then(|assoc| assoc.containing_trait(db))
|
||||
.map(|trait_| ItemInNs::from(ModuleDef::from(trait_)));
|
||||
|
@ -383,7 +383,7 @@ fn find_import_for_segment(
|
|||
original_item
|
||||
} else {
|
||||
let matching_module =
|
||||
module_with_segment_name(db, &unresolved_first_segment, original_item)?;
|
||||
module_with_segment_name(db, unresolved_first_segment, original_item)?;
|
||||
ItemInNs::from(ModuleDef::from(matching_module))
|
||||
})
|
||||
}
|
||||
|
|
|
@ -124,7 +124,7 @@ fn recursive_merge(
|
|||
.map(|tree_list| tree_list.use_trees().any(tree_is_self))
|
||||
.unwrap_or(false)
|
||||
};
|
||||
match (tree_contains_self(&lhs_t), tree_contains_self(&rhs_t)) {
|
||||
match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) {
|
||||
(true, false) => continue,
|
||||
(false, true) => {
|
||||
*lhs_t = rhs_t;
|
||||
|
|
|
@ -409,7 +409,7 @@ impl<'a> FindUsages<'a> {
|
|||
if let Some(ast::NameLike::NameRef(name_ref)) =
|
||||
sema.find_node_at_offset_with_descend(&tree, offset)
|
||||
{
|
||||
if self.found_self_ty_name_ref(&self_ty, &name_ref, sink) {
|
||||
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -424,7 +424,7 @@ impl<'a> FindUsages<'a> {
|
|||
name_ref: &ast::NameRef,
|
||||
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
|
||||
) -> bool {
|
||||
match NameRefClass::classify(self.sema, &name_ref) {
|
||||
match NameRefClass::classify(self.sema, name_ref) {
|
||||
Some(NameRefClass::Definition(Definition::SelfType(impl_)))
|
||||
if impl_.self_ty(self.sema.db) == *self_ty =>
|
||||
{
|
||||
|
@ -464,13 +464,13 @@ impl<'a> FindUsages<'a> {
|
|||
name_ref: &ast::NameRef,
|
||||
sink: &mut dyn FnMut(FileId, FileReference) -> bool,
|
||||
) -> bool {
|
||||
match NameRefClass::classify(self.sema, &name_ref) {
|
||||
match NameRefClass::classify(self.sema, name_ref) {
|
||||
Some(NameRefClass::Definition(def)) if def == self.def => {
|
||||
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
|
||||
let reference = FileReference {
|
||||
range,
|
||||
name: ast::NameLike::NameRef(name_ref.clone()),
|
||||
access: reference_access(&def, &name_ref),
|
||||
access: reference_access(&def, name_ref),
|
||||
};
|
||||
sink(file_id, reference)
|
||||
}
|
||||
|
@ -480,7 +480,7 @@ impl<'a> FindUsages<'a> {
|
|||
let reference = FileReference {
|
||||
range,
|
||||
name: ast::NameLike::NameRef(name_ref.clone()),
|
||||
access: reference_access(&def, &name_ref),
|
||||
access: reference_access(&def, name_ref),
|
||||
};
|
||||
sink(file_id, reference)
|
||||
} else {
|
||||
|
@ -491,10 +491,10 @@ impl<'a> FindUsages<'a> {
|
|||
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
|
||||
let access = match self.def {
|
||||
Definition::Field(_) if field == self.def => {
|
||||
reference_access(&field, &name_ref)
|
||||
reference_access(&field, name_ref)
|
||||
}
|
||||
Definition::Local(l) if local == l => {
|
||||
reference_access(&Definition::Local(local), &name_ref)
|
||||
reference_access(&Definition::Local(local), name_ref)
|
||||
}
|
||||
_ => return false,
|
||||
};
|
||||
|
|
|
@ -382,7 +382,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
|
|||
code: Option<T>,
|
||||
) -> Result<(), MatchFailed> {
|
||||
match (pattern, code) {
|
||||
(Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()),
|
||||
(Some(p), Some(c)) => self.attempt_match_node(phase, p.syntax(), c.syntax()),
|
||||
(None, None) => Ok(()),
|
||||
(Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
|
||||
(None, Some(c)) => {
|
||||
|
@ -478,7 +478,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
|
|||
if Some(first_token.text()) == next_pattern_token.as_deref() {
|
||||
if let Some(SyntaxElement::Node(p)) = pattern.next() {
|
||||
// We have a subtree that starts with the next token in our pattern.
|
||||
self.attempt_match_token_tree(phase, &p, &n)?;
|
||||
self.attempt_match_token_tree(phase, &p, n)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -609,7 +609,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
|
|||
expr: &ast::Expr,
|
||||
) -> Result<usize, MatchFailed> {
|
||||
use hir::HirDisplay;
|
||||
let code_type = self.sema.type_of_expr(&expr).ok_or_else(|| {
|
||||
let code_type = self.sema.type_of_expr(expr).ok_or_else(|| {
|
||||
match_error!("Failed to get receiver type for `{}`", expr.syntax().text())
|
||||
})?;
|
||||
// Temporary needed to make the borrow checker happy.
|
||||
|
|
|
@ -84,16 +84,16 @@ impl ReplacementRenderer<'_> {
|
|||
fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) {
|
||||
match node_or_token {
|
||||
SyntaxElement::Token(token) => {
|
||||
self.render_token(&token);
|
||||
self.render_token(token);
|
||||
}
|
||||
SyntaxElement::Node(child_node) => {
|
||||
self.render_node(&child_node);
|
||||
self.render_node(child_node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_node(&mut self, node: &SyntaxNode) {
|
||||
if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) {
|
||||
if let Some(mod_path) = self.match_info.rendered_template_paths.get(node) {
|
||||
self.out.push_str(&mod_path.to_string());
|
||||
// Emit everything except for the segment's name-ref, since we already effectively
|
||||
// emitted that as part of `mod_path`.
|
||||
|
@ -107,12 +107,12 @@ impl ReplacementRenderer<'_> {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
self.render_node_children(&node);
|
||||
self.render_node_children(node);
|
||||
}
|
||||
}
|
||||
|
||||
fn render_token(&mut self, token: &SyntaxToken) {
|
||||
if let Some(placeholder) = self.rule.get_placeholder(&token) {
|
||||
if let Some(placeholder) = self.rule.get_placeholder(token) {
|
||||
if let Some(placeholder_value) =
|
||||
self.match_info.placeholder_values.get(&placeholder.ident)
|
||||
{
|
||||
|
|
|
@ -211,7 +211,7 @@ impl<'db> ResolutionScope<'db> {
|
|||
// First try resolving the whole path. This will work for things like
|
||||
// `std::collections::HashMap`, but will fail for things like
|
||||
// `std::collections::HashMap::new`.
|
||||
if let Some(resolution) = self.scope.speculative_resolve(&path) {
|
||||
if let Some(resolution) = self.scope.speculative_resolve(path) {
|
||||
return Some(resolution);
|
||||
}
|
||||
// Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if
|
||||
|
|
|
@ -173,7 +173,7 @@ impl<'db> MatchFinder<'db> {
|
|||
if !is_search_permitted(code) {
|
||||
return;
|
||||
}
|
||||
self.try_add_match(rule, &code, restrict_range, matches_out);
|
||||
self.try_add_match(rule, code, restrict_range, matches_out);
|
||||
// If we've got a macro call, we already tried matching it pre-expansion, which is the only
|
||||
// way to match the whole macro, now try expanding it and matching the expansion.
|
||||
if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
|
||||
|
|
|
@ -129,7 +129,7 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
|
|||
let matched_strings: Vec<String> =
|
||||
match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
|
||||
if matched_strings != expected && !expected.is_empty() {
|
||||
print_match_debug_info(&match_finder, position.file_id, &expected[0]);
|
||||
print_match_debug_info(&match_finder, position.file_id, expected[0]);
|
||||
}
|
||||
assert_eq!(matched_strings, expected);
|
||||
}
|
||||
|
|
|
@ -121,7 +121,7 @@ impl Match {
|
|||
|
||||
/// Matching errors are added to the `Match`.
|
||||
pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match {
|
||||
let mut res = match_loop(pattern, &input);
|
||||
let mut res = match_loop(pattern, input);
|
||||
res.bound_count = count(res.bindings.bindings());
|
||||
return res;
|
||||
|
||||
|
@ -202,7 +202,7 @@ impl BindingsBuilder {
|
|||
}
|
||||
|
||||
fn push_nested(&mut self, parent: &mut BindingsIdx, child: &BindingsIdx) {
|
||||
let BindingsIdx(idx, nidx) = self.copy(&child);
|
||||
let BindingsIdx(idx, nidx) = self.copy(child);
|
||||
self.nodes[parent.0].push(LinkNode::Node(Rc::new(BindingKind::Nested(idx, nidx))));
|
||||
}
|
||||
|
||||
|
@ -221,7 +221,7 @@ impl BindingsBuilder {
|
|||
|
||||
fn build_inner(&self, bindings: &mut Bindings, link_nodes: &[LinkNode<Rc<BindingKind>>]) {
|
||||
let mut nodes = Vec::new();
|
||||
self.collect_nodes(&link_nodes, &mut nodes);
|
||||
self.collect_nodes(link_nodes, &mut nodes);
|
||||
|
||||
for cmd in nodes {
|
||||
match &**cmd {
|
||||
|
@ -282,7 +282,7 @@ impl BindingsBuilder {
|
|||
|
||||
nested_refs.into_iter().for_each(|iter| {
|
||||
let mut child_bindings = Bindings::default();
|
||||
self.build_inner(&mut child_bindings, &iter);
|
||||
self.build_inner(&mut child_bindings, iter);
|
||||
nested.push(child_bindings)
|
||||
})
|
||||
}
|
||||
|
@ -417,7 +417,7 @@ fn match_loop_inner<'t>(
|
|||
let sep_len = item.sep.as_ref().map_or(0, Separator::tt_count);
|
||||
if item.sep.is_some() && sep_idx != sep_len {
|
||||
let sep = item.sep.as_ref().unwrap();
|
||||
if src.clone().expect_separator(&sep, sep_idx) {
|
||||
if src.clone().expect_separator(sep, sep_idx) {
|
||||
item.dot.next();
|
||||
item.sep_parsed = Some(sep_idx + 1);
|
||||
try_push!(next_items, item);
|
||||
|
@ -487,7 +487,7 @@ fn match_loop_inner<'t>(
|
|||
item.meta_result = Some((fork, match_res));
|
||||
try_push!(bb_items, item);
|
||||
} else {
|
||||
bindings_builder.push_optional(&mut item.bindings, &name);
|
||||
bindings_builder.push_optional(&mut item.bindings, name);
|
||||
item.dot.next();
|
||||
cur_items.push(item);
|
||||
}
|
||||
|
@ -495,7 +495,7 @@ fn match_loop_inner<'t>(
|
|||
Some(err) => {
|
||||
res.add_err(err);
|
||||
if let Some(fragment) = match_res.value {
|
||||
bindings_builder.push_fragment(&mut item.bindings, &name, fragment);
|
||||
bindings_builder.push_fragment(&mut item.bindings, name, fragment);
|
||||
}
|
||||
item.is_error = true;
|
||||
error_items.push(item);
|
||||
|
@ -504,7 +504,7 @@ fn match_loop_inner<'t>(
|
|||
}
|
||||
}
|
||||
OpDelimited::Op(Op::Leaf(leaf)) => {
|
||||
if let Err(err) = match_leaf(&leaf, &mut src.clone()) {
|
||||
if let Err(err) = match_leaf(leaf, &mut src.clone()) {
|
||||
res.add_err(err);
|
||||
item.is_error = true;
|
||||
} else {
|
||||
|
@ -640,10 +640,10 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
|
|||
let (iter, match_res) = item.meta_result.take().unwrap();
|
||||
match match_res.value {
|
||||
Some(fragment) => {
|
||||
bindings_builder.push_fragment(&mut item.bindings, &name, fragment);
|
||||
bindings_builder.push_fragment(&mut item.bindings, name, fragment);
|
||||
}
|
||||
None if match_res.err.is_none() => {
|
||||
bindings_builder.push_optional(&mut item.bindings, &name);
|
||||
bindings_builder.push_optional(&mut item.bindings, name);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ pub(super) fn transcribe(
|
|||
template: &MetaTemplate,
|
||||
bindings: &Bindings,
|
||||
) -> ExpandResult<tt::Subtree> {
|
||||
let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() };
|
||||
let mut ctx = ExpandCtx { bindings: bindings, nesting: Vec::new() };
|
||||
let mut arena: Vec<tt::TokenTree> = Vec::new();
|
||||
expand_subtree(&mut ctx, template, None, &mut arena)
|
||||
}
|
||||
|
@ -91,12 +91,12 @@ fn expand_subtree(
|
|||
Op::Leaf(tt) => arena.push(tt.clone().into()),
|
||||
Op::Subtree { tokens, delimiter } => {
|
||||
let ExpandResult { value: tt, err: e } =
|
||||
expand_subtree(ctx, &tokens, *delimiter, arena);
|
||||
expand_subtree(ctx, tokens, *delimiter, arena);
|
||||
err = err.or(e);
|
||||
arena.push(tt.into());
|
||||
}
|
||||
Op::Var { name, id, .. } => {
|
||||
let ExpandResult { value: fragment, err: e } = expand_var(ctx, &name, *id);
|
||||
let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id);
|
||||
err = err.or(e);
|
||||
push_fragment(arena, fragment);
|
||||
}
|
||||
|
@ -141,7 +141,7 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr, id: tt::TokenId) -> ExpandResult
|
|||
.into();
|
||||
ExpandResult::ok(Fragment::Tokens(tt))
|
||||
} else {
|
||||
ctx.bindings.get(&v, &mut ctx.nesting).map_or_else(
|
||||
ctx.bindings.get(v, &mut ctx.nesting).map_or_else(
|
||||
|e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) },
|
||||
|b| ExpandResult::ok(b.clone()),
|
||||
)
|
||||
|
|
|
@ -280,8 +280,8 @@ impl Rule {
|
|||
.expect_subtree()
|
||||
.map_err(|()| ParseError::Expected("expected subtree".to_string()))?;
|
||||
|
||||
let lhs = MetaTemplate(parse_pattern(&lhs)?);
|
||||
let rhs = MetaTemplate(parse_template(&rhs)?);
|
||||
let lhs = MetaTemplate(parse_pattern(lhs)?);
|
||||
let rhs = MetaTemplate(parse_template(rhs)?);
|
||||
|
||||
Ok(crate::Rule { lhs, rhs })
|
||||
}
|
||||
|
@ -290,7 +290,7 @@ impl Rule {
|
|||
fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
|
||||
for op in pattern.iter() {
|
||||
match op {
|
||||
Op::Subtree { tokens, .. } => validate(&tokens)?,
|
||||
Op::Subtree { tokens, .. } => validate(tokens)?,
|
||||
Op::Repeat { tokens: subtree, separator, .. } => {
|
||||
// Checks that no repetition which could match an empty token
|
||||
// https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558
|
||||
|
|
|
@ -42,7 +42,7 @@ impl<'a> OpDelimitedIter<'a> {
|
|||
}
|
||||
|
||||
pub(crate) fn reset(&self) -> Self {
|
||||
Self { inner: &self.inner, idx: 0, delimited: self.delimited }
|
||||
Self { inner: self.inner, idx: 0, delimited: self.delimited }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -126,11 +126,11 @@ impl Separator {
|
|||
}
|
||||
|
||||
pub(crate) fn parse_template(template: &tt::Subtree) -> Result<Vec<Op>, ParseError> {
|
||||
parse_inner(&template, Mode::Template).into_iter().collect()
|
||||
parse_inner(template, Mode::Template).into_iter().collect()
|
||||
}
|
||||
|
||||
pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<Vec<Op>, ParseError> {
|
||||
parse_inner(&pattern, Mode::Pattern).into_iter().collect()
|
||||
parse_inner(pattern, Mode::Pattern).into_iter().collect()
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
|
@ -140,7 +140,7 @@ enum Mode {
|
|||
}
|
||||
|
||||
fn parse_inner(tt: &tt::Subtree, mode: Mode) -> Vec<Result<Op, ParseError>> {
|
||||
let mut src = TtIter::new(&tt);
|
||||
let mut src = TtIter::new(tt);
|
||||
std::iter::from_fn(move || {
|
||||
let first = src.next()?;
|
||||
Some(next_op(first, &mut src, mode))
|
||||
|
@ -171,7 +171,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
|
|||
match second {
|
||||
tt::TokenTree::Subtree(subtree) => {
|
||||
let (separator, kind) = parse_repeat(src)?;
|
||||
let tokens = parse_inner(&subtree, mode)
|
||||
let tokens = parse_inner(subtree, mode)
|
||||
.into_iter()
|
||||
.collect::<Result<Vec<Op>, ParseError>>()?;
|
||||
Op::Repeat { tokens: MetaTemplate(tokens), separator, kind }
|
||||
|
@ -191,7 +191,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
|
|||
Op::Var { name, kind, id }
|
||||
}
|
||||
tt::Leaf::Literal(lit) => {
|
||||
if is_boolean_literal(&lit) {
|
||||
if is_boolean_literal(lit) {
|
||||
let name = lit.text.clone();
|
||||
let kind = eat_fragment_kind(src, mode)?;
|
||||
let id = lit.id;
|
||||
|
@ -206,7 +206,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
|
|||
tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()),
|
||||
tt::TokenTree::Subtree(subtree) => {
|
||||
let tokens =
|
||||
parse_inner(&subtree, mode).into_iter().collect::<Result<Vec<Op>, ParseError>>()?;
|
||||
parse_inner(subtree, mode).into_iter().collect::<Result<Vec<Op>, ParseError>>()?;
|
||||
Op::Subtree { tokens: MetaTemplate(tokens), delimiter: subtree.delimiter }
|
||||
}
|
||||
};
|
||||
|
|
|
@ -22,7 +22,7 @@ impl<'a> SubtreeTokenSource {
|
|||
#[cfg(test)]
|
||||
pub(crate) fn text(&self) -> SmolStr {
|
||||
match self.cached.get(self.curr.1) {
|
||||
Some(ref tt) => tt.text.clone(),
|
||||
Some(tt) => tt.text.clone(),
|
||||
_ => SmolStr::new(""),
|
||||
}
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ impl<'a> SubtreeTokenSource {
|
|||
|
||||
current = match tt {
|
||||
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
|
||||
cached.push(convert_leaf(&leaf));
|
||||
cached.push(convert_leaf(leaf));
|
||||
cursor.bump()
|
||||
}
|
||||
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
|
||||
|
@ -114,7 +114,7 @@ impl<'a> TokenSource for SubtreeTokenSource {
|
|||
/// Is the current token a specified keyword?
|
||||
fn is_keyword(&self, kw: &str) -> bool {
|
||||
match self.cached.get(self.curr.1) {
|
||||
Some(ref t) => t.text == *kw,
|
||||
Some(t) => t.text == *kw,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -633,7 +633,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
|
|||
}
|
||||
}
|
||||
};
|
||||
self.buf += &text;
|
||||
self.buf += text;
|
||||
self.text_pos += TextSize::of(text);
|
||||
}
|
||||
|
||||
|
|
|
@ -490,7 +490,7 @@ MACRO_ITEMS@0..40
|
|||
|
||||
fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree {
|
||||
if let tt::TokenTree::Subtree(subtree) = tt {
|
||||
return &subtree;
|
||||
return subtree;
|
||||
}
|
||||
unreachable!("It is not a subtree");
|
||||
}
|
||||
|
|
|
@ -115,7 +115,7 @@ impl<'a> TtIter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
let buffer = TokenBuffer::from_tokens(&self.inner.as_slice());
|
||||
let buffer = TokenBuffer::from_tokens(self.inner.as_slice());
|
||||
let mut src = SubtreeTokenSource::new(&buffer);
|
||||
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ pub trait Message: Serialize + DeserializeOwned {
|
|||
Ok(match read_json(inp, buf)? {
|
||||
None => None,
|
||||
Some(text) => {
|
||||
let mut deserializer = serde_json::Deserializer::from_str(&text);
|
||||
let mut deserializer = serde_json::Deserializer::from_str(text);
|
||||
// Note that some proc-macro generate very deep syntax tree
|
||||
// We have to disable the current limit of serde here
|
||||
deserializer.disable_recursion_limit();
|
||||
|
|
|
@ -184,7 +184,7 @@ impl WorkspaceBuildData {
|
|||
|
||||
// Copy-pasted from existing cargo_metadata. It seems like we
|
||||
// should be using sered_stacker here?
|
||||
let mut deserializer = serde_json::Deserializer::from_str(&line);
|
||||
let mut deserializer = serde_json::Deserializer::from_str(line);
|
||||
deserializer.disable_recursion_limit();
|
||||
let message = Message::deserialize(&mut deserializer)
|
||||
.unwrap_or(Message::TextLine(line.to_string()));
|
||||
|
|
|
@ -278,7 +278,7 @@ impl CargoWorkspace {
|
|||
id, edition, name, manifest_path, version, metadata, ..
|
||||
} = meta_pkg;
|
||||
let meta = from_value::<PackageMetadata>(metadata.clone()).unwrap_or_default();
|
||||
let is_member = ws_members.contains(&id);
|
||||
let is_member = ws_members.contains(id);
|
||||
let edition = edition
|
||||
.parse::<Edition>()
|
||||
.with_context(|| format!("Failed to parse edition {}", edition))?;
|
||||
|
|
|
@ -142,12 +142,12 @@ fn discover_sysroot_src_dir(
|
|||
log::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core);
|
||||
}
|
||||
|
||||
get_rust_src(&sysroot_path)
|
||||
get_rust_src(sysroot_path)
|
||||
.or_else(|| {
|
||||
let mut rustup = Command::new(toolchain::rustup());
|
||||
rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]);
|
||||
utf8_stdout(rustup).ok()?;
|
||||
get_rust_src(&sysroot_path)
|
||||
get_rust_src(sysroot_path)
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
format_err!(
|
||||
|
|
|
@ -185,7 +185,7 @@ impl ProjectWorkspace {
|
|||
|
||||
pub fn load_detached_files(detached_files: Vec<AbsPathBuf>) -> Result<ProjectWorkspace> {
|
||||
let sysroot = Sysroot::discover(
|
||||
&detached_files.first().ok_or_else(|| format_err!("No detached files to load"))?,
|
||||
detached_files.first().ok_or_else(|| format_err!("No detached files to load"))?,
|
||||
)?;
|
||||
let rustc_cfg = rustc_cfg::get(None, None);
|
||||
Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
|
||||
|
@ -324,7 +324,7 @@ impl ProjectWorkspace {
|
|||
pub fn collect_build_data_configs(&self, collector: &mut BuildDataCollector) {
|
||||
match self {
|
||||
ProjectWorkspace::Cargo { cargo, .. } => {
|
||||
collector.add_config(&cargo.workspace_root(), cargo.build_data_config().clone());
|
||||
collector.add_config(cargo.workspace_root(), cargo.build_data_config().clone());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -348,7 +348,7 @@ fn project_json_to_crate_graph(
|
|||
.crates()
|
||||
.filter_map(|(crate_id, krate)| {
|
||||
let file_path = &krate.root_module;
|
||||
let file_id = load(&file_path)?;
|
||||
let file_id = load(file_path)?;
|
||||
Some((crate_id, krate, file_id))
|
||||
})
|
||||
.map(|(crate_id, krate, file_id)| {
|
||||
|
@ -534,7 +534,7 @@ fn detached_files_to_crate_graph(
|
|||
cfg_options.extend(rustc_cfg);
|
||||
|
||||
for detached_file in detached_files {
|
||||
let file_id = match load(&detached_file) {
|
||||
let file_id = match load(detached_file) {
|
||||
Some(file_id) => file_id,
|
||||
None => {
|
||||
log::error!("Failed to load detached file {:?}", detached_file);
|
||||
|
@ -602,7 +602,7 @@ fn handle_rustc_crates(
|
|||
crate_graph,
|
||||
&rustc_workspace[pkg],
|
||||
rustc_build_data_map.and_then(|it| it.get(&rustc_workspace[pkg].id)),
|
||||
&cfg_options,
|
||||
cfg_options,
|
||||
proc_macro_loader,
|
||||
file_id,
|
||||
&rustc_workspace[tgt].name,
|
||||
|
@ -685,7 +685,7 @@ fn add_target_crate_root(
|
|||
let proc_macro = build_data
|
||||
.as_ref()
|
||||
.and_then(|it| it.proc_macro_dylib_path.as_ref())
|
||||
.map(|it| proc_macro_loader(&it))
|
||||
.map(|it| proc_macro_loader(it))
|
||||
.unwrap_or_default();
|
||||
|
||||
let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_string());
|
||||
|
|
|
@ -123,7 +123,7 @@ impl CargoTargetSpec {
|
|||
let res = CargoTargetSpec {
|
||||
workspace_root: cargo_ws.workspace_root().to_path_buf(),
|
||||
cargo_toml: package_data.manifest.clone(),
|
||||
package: cargo_ws.package_flag(&package_data),
|
||||
package: cargo_ws.package_flag(package_data),
|
||||
target: target_data.name.clone(),
|
||||
target_kind: target_data.kind,
|
||||
};
|
||||
|
|
|
@ -126,7 +126,7 @@ fn load_crate_graph(
|
|||
}
|
||||
}
|
||||
}
|
||||
let source_roots = source_root_config.partition(&vfs);
|
||||
let source_roots = source_root_config.partition(vfs);
|
||||
analysis_change.set_roots(source_roots);
|
||||
|
||||
analysis_change.set_crate_graph(crate_graph);
|
||||
|
|
|
@ -47,7 +47,7 @@ impl DiagnosticCollection {
|
|||
) {
|
||||
let diagnostics = self.check.entry(file_id).or_default();
|
||||
for existing_diagnostic in diagnostics.iter() {
|
||||
if are_diagnostics_equal(&existing_diagnostic, &diagnostic) {
|
||||
if are_diagnostics_equal(existing_diagnostic, &diagnostic) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -224,7 +224,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
|
|||
|
||||
let mut message = rd.message.clone();
|
||||
for child in &rd.children {
|
||||
let child = map_rust_child_diagnostic(config, workspace_root, &child);
|
||||
let child = map_rust_child_diagnostic(config, workspace_root, child);
|
||||
match child {
|
||||
MappedRustChildDiagnostic::SubDiagnostic(sub) => {
|
||||
subdiagnostics.push(sub);
|
||||
|
@ -268,7 +268,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
|
|||
primary_spans
|
||||
.iter()
|
||||
.flat_map(|primary_span| {
|
||||
let primary_location = primary_location(config, workspace_root, &primary_span);
|
||||
let primary_location = primary_location(config, workspace_root, primary_span);
|
||||
|
||||
let mut message = message.clone();
|
||||
if needs_primary_span_label {
|
||||
|
@ -298,7 +298,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
|
|||
// generated that code.
|
||||
let is_in_macro_call = i != 0;
|
||||
|
||||
let secondary_location = location(config, workspace_root, &span);
|
||||
let secondary_location = location(config, workspace_root, span);
|
||||
if secondary_location == primary_location {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -194,7 +194,7 @@ impl GlobalState {
|
|||
change.change_file(file.file_id, text);
|
||||
}
|
||||
if has_fs_changes {
|
||||
let roots = self.source_root_config.partition(&vfs);
|
||||
let roots = self.source_root_config.partition(vfs);
|
||||
change.set_roots(roots);
|
||||
}
|
||||
change
|
||||
|
@ -291,7 +291,7 @@ impl GlobalStateSnapshot {
|
|||
}
|
||||
|
||||
pub(crate) fn url_file_version(&self, url: &Url) -> Option<i32> {
|
||||
let path = from_proto::vfs_path(&url).ok()?;
|
||||
let path = from_proto::vfs_path(url).ok()?;
|
||||
Some(self.mem_docs.get(&path)?.version)
|
||||
}
|
||||
|
||||
|
@ -300,7 +300,7 @@ impl GlobalStateSnapshot {
|
|||
base.pop();
|
||||
let path = base.join(&path.path).unwrap();
|
||||
let path = path.as_path().unwrap();
|
||||
url_from_abs_path(&path)
|
||||
url_from_abs_path(path)
|
||||
}
|
||||
|
||||
pub(crate) fn cargo_target_for_crate_root(
|
||||
|
@ -312,7 +312,7 @@ impl GlobalStateSnapshot {
|
|||
let path = path.as_path()?;
|
||||
self.workspaces.iter().find_map(|ws| match ws {
|
||||
ProjectWorkspace::Cargo { cargo, .. } => {
|
||||
cargo.target_by_root(&path).map(|it| (cargo, it))
|
||||
cargo.target_by_root(path).map(|it| (cargo, it))
|
||||
}
|
||||
ProjectWorkspace::Json { .. } => None,
|
||||
ProjectWorkspace::DetachedFiles { .. } => None,
|
||||
|
@ -323,7 +323,7 @@ impl GlobalStateSnapshot {
|
|||
pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
|
||||
let path = vfs.file_path(id);
|
||||
let path = path.as_path().unwrap();
|
||||
url_from_abs_path(&path)
|
||||
url_from_abs_path(path)
|
||||
}
|
||||
|
||||
pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> {
|
||||
|
|
|
@ -1396,7 +1396,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
|
|||
|
||||
if let Some(prev_id) = &cached_tokens.result_id {
|
||||
if *prev_id == params.previous_result_id {
|
||||
let delta = to_proto::semantic_token_delta(&cached_tokens, &semantic_tokens);
|
||||
let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens);
|
||||
*cached_tokens = semantic_tokens;
|
||||
return Ok(Some(delta.into()));
|
||||
}
|
||||
|
@ -1540,7 +1540,7 @@ fn runnable_action_links(
|
|||
snap: &GlobalStateSnapshot,
|
||||
runnable: Runnable,
|
||||
) -> Option<lsp_ext::CommandLinkGroup> {
|
||||
let cargo_spec = CargoTargetSpec::for_file(&snap, runnable.nav.file_id).ok()?;
|
||||
let cargo_spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id).ok()?;
|
||||
let hover_config = snap.config.hover();
|
||||
if !hover_config.runnable() || should_skip_target(&runnable, cargo_spec.as_ref()) {
|
||||
return None;
|
||||
|
@ -1624,7 +1624,7 @@ fn run_rustfmt(
|
|||
text_document: TextDocumentIdentifier,
|
||||
range: Option<lsp_types::Range>,
|
||||
) -> Result<Option<Vec<lsp_types::TextEdit>>> {
|
||||
let file_id = from_proto::file_id(&snap, &text_document.uri)?;
|
||||
let file_id = from_proto::file_id(snap, &text_document.uri)?;
|
||||
let file = snap.analysis.file_text(file_id)?;
|
||||
let crate_ids = snap.analysis.crate_for(file_id)?;
|
||||
|
||||
|
@ -1671,7 +1671,7 @@ fn run_rustfmt(
|
|||
.into());
|
||||
}
|
||||
|
||||
let frange = from_proto::file_range(&snap, text_document, range)?;
|
||||
let frange = from_proto::file_range(snap, text_document, range)?;
|
||||
let start_line = line_index.index.line_col(frange.range.start()).line;
|
||||
let end_line = line_index.index.line_col(frange.range.end()).line;
|
||||
|
||||
|
|
|
@ -124,7 +124,7 @@ pub(crate) fn apply_document_changes(
|
|||
match change.range {
|
||||
Some(range) => {
|
||||
if !index_valid.covers(range.end.line) {
|
||||
line_index.index = Arc::new(ide::LineIndex::new(&old_text));
|
||||
line_index.index = Arc::new(ide::LineIndex::new(old_text));
|
||||
}
|
||||
index_valid = IndexValid::UpToLineExclusive(range.start.line);
|
||||
let range = from_proto::text_range(&line_index, range);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue