a number of code simplifications

This commit is contained in:
Daniel Eades 2023-01-10 18:48:51 +00:00
parent 3987c835f2
commit ac3844a0bb
13 changed files with 99 additions and 124 deletions

View file

@ -80,7 +80,7 @@ impl<'a> Write for Printer<'a> {
fn write_str(&mut self, s: &str) -> fmt::Result { fn write_str(&mut self, s: &str) -> fmt::Result {
for line in s.split_inclusive('\n') { for line in s.split_inclusive('\n') {
if self.needs_indent { if self.needs_indent {
match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() { match self.buf.chars().rev().find(|ch| *ch != ' ') {
Some('\n') | None => {} Some('\n') | None => {}
_ => self.buf.push('\n'), _ => self.buf.push('\n'),
} }

View file

@ -1600,17 +1600,15 @@ impl ModCollector<'_, '_> {
FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db); FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
if self.def_collector.is_proc_macro { if self.def_collector.is_proc_macro && self.module_id == def_map.root {
if self.module_id == def_map.root { if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) {
if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { let crate_root = def_map.module_id(def_map.root);
let crate_root = def_map.module_id(def_map.root); self.def_collector.export_proc_macro(
self.def_collector.export_proc_macro( proc_macro,
proc_macro, ItemTreeId::new(self.tree_id, id),
ItemTreeId::new(self.tree_id, id), fn_id,
fn_id, crate_root,
crate_root, );
);
}
} }
} }

View file

@ -472,8 +472,8 @@ impl Module {
let def_map = self.id.def_map(db.upcast()); let def_map = self.id.def_map(db.upcast());
let children = def_map[self.id.local_id] let children = def_map[self.id.local_id]
.children .children
.iter() .values()
.map(|(_, module_id)| Module { id: def_map.module_id(*module_id) }) .map(|module_id| Module { id: def_map.module_id(*module_id) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
children.into_iter() children.into_iter()
} }

View file

@ -82,18 +82,18 @@ fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code:
let generic_params = impl_.generic_param_list().map(|generic_params| { let generic_params = impl_.generic_param_list().map(|generic_params| {
let lifetime_params = let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| { let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
// remove defaults since they can't be specified in impls // remove defaults since they can't be specified in impls
match param { match param {
ast::TypeOrConstParam::Type(param) => { ast::TypeOrConstParam::Type(param) => {
let param = param.clone_for_update(); let param = param.clone_for_update();
param.remove_default(); param.remove_default();
Some(ast::GenericParam::TypeParam(param)) ast::GenericParam::TypeParam(param)
} }
ast::TypeOrConstParam::Const(param) => { ast::TypeOrConstParam::Const(param) => {
let param = param.clone_for_update(); let param = param.clone_for_update();
param.remove_default(); param.remove_default();
Some(ast::GenericParam::ConstParam(param)) ast::GenericParam::ConstParam(param)
} }
} }
}); });

View file

@ -92,7 +92,7 @@ trait Merge: AstNode + Clone {
fn try_merge_from(self, items: &mut dyn Iterator<Item = Self>) -> Option<Vec<Edit>> { fn try_merge_from(self, items: &mut dyn Iterator<Item = Self>) -> Option<Vec<Edit>> {
let mut edits = Vec::new(); let mut edits = Vec::new();
let mut merged = self.clone(); let mut merged = self.clone();
while let Some(item) = items.next() { for item in items {
merged = merged.try_merge(&item)?; merged = merged.try_merge(&item)?;
edits.push(Edit::Remove(item.into_either())); edits.push(Edit::Remove(item.into_either()));
} }

View file

@ -86,8 +86,7 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
it.prev_sibling_or_token() it.prev_sibling_or_token()
}) })
.map(|it| it.kind()) .map(|it| it.kind())
.skip_while(|it| it.is_trivia()) .find(|it| !it.is_trivia())
.next()
== Some(T![,]); == Some(T![,]);
let has_arms_after = neighbor(&match_arm, Direction::Next).is_some(); let has_arms_after = neighbor(&match_arm, Direction::Next).is_some();
if !has_comma_after && !has_arms_after { if !has_comma_after && !has_arms_after {

View file

@ -334,11 +334,9 @@ pub fn source_edit_from_references(
} }
_ => false, _ => false,
}; };
if !has_emitted_edit { if !has_emitted_edit && !edited_ranges.contains(&range.start()) {
if !edited_ranges.contains(&range.start()) { edit.replace(range, new_name.to_string());
edit.replace(range, new_name.to_string()); edited_ranges.push(range.start());
edited_ranges.push(range.start());
}
} }
} }

View file

@ -494,20 +494,28 @@ impl<'a> FindUsages<'a> {
} }
// Search for `super` and `crate` resolving to our module // Search for `super` and `crate` resolving to our module
match self.def { if let Definition::Module(module) = self.def {
Definition::Module(module) => { let scope =
let scope = search_scope search_scope.intersection(&SearchScope::module_and_children(self.sema.db, module));
.intersection(&SearchScope::module_and_children(self.sema.db, module));
let is_crate_root = let is_crate_root = module.is_crate_root(self.sema.db).then(|| Finder::new("crate"));
module.is_crate_root(self.sema.db).then(|| Finder::new("crate")); let finder = &Finder::new("super");
let finder = &Finder::new("super");
for (text, file_id, search_range) in scope_files(sema, &scope) { for (text, file_id, search_range) in scope_files(sema, &scope) {
let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
for offset in match_indices(&text, finder, search_range) {
if let Some(iter) = find_nodes("super", &tree, offset) {
for name_ref in iter.filter_map(ast::NameRef::cast) {
if self.found_name_ref(&name_ref, sink) {
return;
}
}
}
}
if let Some(finder) = &is_crate_root {
for offset in match_indices(&text, finder, search_range) { for offset in match_indices(&text, finder, search_range) {
if let Some(iter) = find_nodes("super", &tree, offset) { if let Some(iter) = find_nodes("crate", &tree, offset) {
for name_ref in iter.filter_map(ast::NameRef::cast) { for name_ref in iter.filter_map(ast::NameRef::cast) {
if self.found_name_ref(&name_ref, sink) { if self.found_name_ref(&name_ref, sink) {
return; return;
@ -515,20 +523,8 @@ impl<'a> FindUsages<'a> {
} }
} }
} }
if let Some(finder) = &is_crate_root {
for offset in match_indices(&text, finder, search_range) {
if let Some(iter) = find_nodes("crate", &tree, offset) {
for name_ref in iter.filter_map(ast::NameRef::cast) {
if self.found_name_ref(&name_ref, sink) {
return;
}
}
}
}
}
} }
} }
_ => (),
} }
// search for module `self` references in our module's definition source // search for module `self` references in our module's definition source

View file

@ -99,76 +99,66 @@ pub(crate) fn json_in_items(
&& node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY) && node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY)
{ {
let node_string = node.to_string(); let node_string = node.to_string();
if let Ok(it) = serde_json::from_str(&node_string) { if let Ok(serde_json::Value::Object(it)) = serde_json::from_str(&node_string) {
if let serde_json::Value::Object(it) = it { let import_scope = ImportScope::find_insert_use_container(node, sema)?;
let import_scope = ImportScope::find_insert_use_container(node, sema)?; let range = node.text_range();
let range = node.text_range(); let mut edit = TextEdit::builder();
let mut edit = TextEdit::builder(); edit.delete(range);
edit.delete(range); let mut state = State::default();
let mut state = State::default(); let semantics_scope = sema.scope(node)?;
let semantics_scope = sema.scope(node)?; let scope_resolve =
let scope_resolve = |it| semantics_scope.speculative_resolve(&make::path_from_text(it));
|it| semantics_scope.speculative_resolve(&make::path_from_text(it)); let scope_has = |it| scope_resolve(it).is_some();
let scope_has = |it| scope_resolve(it).is_some(); let deserialize_resolved = scope_resolve("::serde::Deserialize");
let deserialize_resolved = scope_resolve("::serde::Deserialize"); let serialize_resolved = scope_resolve("::serde::Serialize");
let serialize_resolved = scope_resolve("::serde::Serialize"); state.has_deserialize = deserialize_resolved.is_some();
state.has_deserialize = deserialize_resolved.is_some(); state.has_serialize = serialize_resolved.is_some();
state.has_serialize = serialize_resolved.is_some(); state.build_struct(&it);
state.build_struct(&it); edit.insert(range.start(), state.result);
edit.insert(range.start(), state.result); acc.push(
acc.push( Diagnostic::new(
Diagnostic::new( "json-is-not-rust",
"json-is-not-rust", "JSON syntax is not valid as a Rust item",
"JSON syntax is not valid as a Rust item", range,
range, )
) .severity(Severity::WeakWarning)
.severity(Severity::WeakWarning) .with_fixes(Some(vec![{
.with_fixes(Some(vec![{ let mut scb = SourceChangeBuilder::new(file_id);
let mut scb = SourceChangeBuilder::new(file_id); let scope = match import_scope {
let scope = match import_scope { ImportScope::File(it) => ImportScope::File(scb.make_mut(it)),
ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), };
}; let current_module = semantics_scope.module();
let current_module = semantics_scope.module(); if !scope_has("Serialize") {
if !scope_has("Serialize") { if let Some(PathResolution::Def(it)) = serialize_resolved {
if let Some(PathResolution::Def(it)) = serialize_resolved { if let Some(it) = current_module.find_use_path_prefixed(
if let Some(it) = current_module.find_use_path_prefixed( sema.db,
sema.db, it,
it, config.insert_use.prefix_kind,
config.insert_use.prefix_kind, config.prefer_no_std,
config.prefer_no_std, ) {
) { insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
insert_use(
&scope,
mod_path_to_ast(&it),
&config.insert_use,
);
}
} }
} }
if !scope_has("Deserialize") { }
if let Some(PathResolution::Def(it)) = deserialize_resolved { if !scope_has("Deserialize") {
if let Some(it) = current_module.find_use_path_prefixed( if let Some(PathResolution::Def(it)) = deserialize_resolved {
sema.db, if let Some(it) = current_module.find_use_path_prefixed(
it, sema.db,
config.insert_use.prefix_kind, it,
config.prefer_no_std, config.insert_use.prefix_kind,
) { config.prefer_no_std,
insert_use( ) {
&scope, insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
mod_path_to_ast(&it),
&config.insert_use,
);
}
} }
} }
let mut sc = scb.finish(); }
sc.insert_source_edit(file_id, edit.finish()); let mut sc = scb.finish();
fix("convert_json_to_struct", "Convert JSON to struct", sc, range) sc.insert_source_edit(file_id, edit.finish());
}])), fix("convert_json_to_struct", "Convert JSON to struct", sc, range)
); }])),
} );
} }
} }
Some(()) Some(())

View file

@ -11,10 +11,7 @@ pub(crate) fn private_assoc_item(
d: &hir::PrivateAssocItem, d: &hir::PrivateAssocItem,
) -> Diagnostic { ) -> Diagnostic {
// FIXME: add quickfix // FIXME: add quickfix
let name = match d.item.name(ctx.sema.db) { let name = d.item.name(ctx.sema.db).map(|name| format!("`{name}` ")).unwrap_or_default();
Some(name) => format!("`{}` ", name),
None => String::new(),
};
Diagnostic::new( Diagnostic::new(
"private-assoc-item", "private-assoc-item",
format!( format!(

View file

@ -34,10 +34,7 @@ pub(crate) fn unresolved_proc_macro(
let message = format!( let message = format!(
"{message}: {}", "{message}: {}",
if config_enabled { if config_enabled {
match def_map.proc_macro_loading_error() { def_map.proc_macro_loading_error().unwrap_or("proc macro not found in the built dylib")
Some(e) => e,
None => "proc macro not found in the built dylib",
}
} else { } else {
match d.kind { match d.kind {
hir::MacroKind::Attr if proc_macros_enabled => { hir::MacroKind::Attr if proc_macros_enabled => {

View file

@ -212,7 +212,7 @@ fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandRe
} else { } else {
ctx.bindings.get(v, &mut ctx.nesting).map_or_else( ctx.bindings.get(v, &mut ctx.nesting).map_or_else(
|e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) }, |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) },
|it| ExpandResult::ok(it), ExpandResult::ok,
) )
} }
} }

View file

@ -366,11 +366,11 @@ impl ProjectWorkspace {
_ => None, _ => None,
}) })
.collect(); .collect();
let ref mut outputs = match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { let outputs = &mut (match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) {
Ok(it) => Ok(it.into_iter()), Ok(it) => Ok(it.into_iter()),
// io::Error is not Clone? // io::Error is not Clone?
Err(e) => Err(Arc::new(e)), Err(e) => Err(Arc::new(e)),
}; });
workspaces workspaces
.iter() .iter()