mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-10 15:14:32 +00:00
refactor: fix clippy lints
This commit is contained in:
parent
12e28c3575
commit
ebbbaaa90f
5 changed files with 50 additions and 54 deletions
|
@ -33,13 +33,10 @@ impl<T: Internable> Interned<T> {
|
|||
// - if not, box it up, insert it, and return a clone
|
||||
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
|
||||
// insert the same object between us looking it up and inserting it.
|
||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash as u64, &obj) {
|
||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &obj) {
|
||||
RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
|
||||
RawEntryMut::Vacant(vac) => Self {
|
||||
arc: vac
|
||||
.insert_hashed_nocheck(hash as u64, Arc::new(obj), SharedValue::new(()))
|
||||
.0
|
||||
.clone(),
|
||||
arc: vac.insert_hashed_nocheck(hash, Arc::new(obj), SharedValue::new(())).0.clone(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -54,13 +51,10 @@ impl Interned<str> {
|
|||
// - if not, box it up, insert it, and return a clone
|
||||
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
|
||||
// insert the same object between us looking it up and inserting it.
|
||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash as u64, s) {
|
||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) {
|
||||
RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
|
||||
RawEntryMut::Vacant(vac) => Self {
|
||||
arc: vac
|
||||
.insert_hashed_nocheck(hash as u64, Arc::from(s), SharedValue::new(()))
|
||||
.0
|
||||
.clone(),
|
||||
arc: vac.insert_hashed_nocheck(hash, Arc::from(s), SharedValue::new(())).0.clone(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,29 +32,27 @@ impl LexedStr<'_> {
|
|||
let kind = self.kind(i);
|
||||
if kind.is_trivia() {
|
||||
was_joint = false
|
||||
} else if kind == SyntaxKind::IDENT {
|
||||
let token_text = self.text(i);
|
||||
let contextual_kw =
|
||||
SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
|
||||
res.push_ident(contextual_kw);
|
||||
} else {
|
||||
if kind == SyntaxKind::IDENT {
|
||||
let token_text = self.text(i);
|
||||
let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
|
||||
.unwrap_or(SyntaxKind::IDENT);
|
||||
res.push_ident(contextual_kw);
|
||||
} else {
|
||||
if was_joint {
|
||||
if was_joint {
|
||||
res.was_joint();
|
||||
}
|
||||
res.push(kind);
|
||||
// Tag the token as joint if it is float with a fractional part
|
||||
// we use this jointness to inform the parser about what token split
|
||||
// event to emit when we encounter a float literal in a field access
|
||||
if kind == SyntaxKind::FLOAT_NUMBER {
|
||||
if !self.text(i).ends_with('.') {
|
||||
res.was_joint();
|
||||
}
|
||||
res.push(kind);
|
||||
// Tag the token as joint if it is float with a fractional part
|
||||
// we use this jointness to inform the parser about what token split
|
||||
// event to emit when we encounter a float literal in a field access
|
||||
if kind == SyntaxKind::FLOAT_NUMBER {
|
||||
if !self.text(i).ends_with('.') {
|
||||
res.was_joint();
|
||||
} else {
|
||||
was_joint = false;
|
||||
}
|
||||
} else {
|
||||
was_joint = true;
|
||||
was_joint = false;
|
||||
}
|
||||
} else {
|
||||
was_joint = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -224,7 +224,7 @@ pub trait AttrsOwnerEdit: ast::HasAttrs {
|
|||
let after_attrs_and_comments = node
|
||||
.children_with_tokens()
|
||||
.find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
|
||||
.map_or(Position::first_child_of(node), |it| Position::before(it));
|
||||
.map_or(Position::first_child_of(node), Position::before);
|
||||
|
||||
ted::insert_all(
|
||||
after_attrs_and_comments,
|
||||
|
@ -433,7 +433,9 @@ impl ast::UseTree {
|
|||
if &path == prefix && self.use_tree_list().is_none() {
|
||||
if self.star_token().is_some() {
|
||||
// path$0::* -> *
|
||||
self.coloncolon_token().map(ted::remove);
|
||||
if let Some(a) = self.coloncolon_token() {
|
||||
ted::remove(a)
|
||||
}
|
||||
ted::remove(prefix.syntax());
|
||||
} else {
|
||||
// path$0 -> self
|
||||
|
@ -460,7 +462,9 @@ impl ast::UseTree {
|
|||
for p in successors(parent.parent_path(), |it| it.parent_path()) {
|
||||
p.segment()?;
|
||||
}
|
||||
prefix.parent_path().and_then(|p| p.coloncolon_token()).map(ted::remove);
|
||||
if let Some(a) = prefix.parent_path().and_then(|p| p.coloncolon_token()) {
|
||||
ted::remove(a)
|
||||
}
|
||||
ted::remove(prefix.syntax());
|
||||
Some(())
|
||||
}
|
||||
|
@ -976,7 +980,9 @@ enum Foo {
|
|||
|
||||
fn check_add_variant(before: &str, expected: &str, variant: ast::Variant) {
|
||||
let enum_ = ast_mut_from_text::<ast::Enum>(before);
|
||||
enum_.variant_list().map(|it| it.add_variant(variant));
|
||||
if let Some(it) = enum_.variant_list() {
|
||||
it.add_variant(variant)
|
||||
}
|
||||
let after = enum_.to_string();
|
||||
assert_eq_text!(&trim_indent(expected.trim()), &trim_indent(after.trim()));
|
||||
}
|
||||
|
|
|
@ -181,29 +181,27 @@ impl ast::TokenTree {
|
|||
let kind = t.kind();
|
||||
if kind.is_trivia() {
|
||||
was_joint = false
|
||||
} else if kind == SyntaxKind::IDENT {
|
||||
let token_text = t.text();
|
||||
let contextual_kw =
|
||||
SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
|
||||
parser_input.push_ident(contextual_kw);
|
||||
} else {
|
||||
if kind == SyntaxKind::IDENT {
|
||||
let token_text = t.text();
|
||||
let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
|
||||
.unwrap_or(SyntaxKind::IDENT);
|
||||
parser_input.push_ident(contextual_kw);
|
||||
} else {
|
||||
if was_joint {
|
||||
if was_joint {
|
||||
parser_input.was_joint();
|
||||
}
|
||||
parser_input.push(kind);
|
||||
// Tag the token as joint if it is float with a fractional part
|
||||
// we use this jointness to inform the parser about what token split
|
||||
// event to emit when we encounter a float literal in a field access
|
||||
if kind == SyntaxKind::FLOAT_NUMBER {
|
||||
if !t.text().ends_with('.') {
|
||||
parser_input.was_joint();
|
||||
}
|
||||
parser_input.push(kind);
|
||||
// Tag the token as joint if it is float with a fractional part
|
||||
// we use this jointness to inform the parser about what token split
|
||||
// event to emit when we encounter a float literal in a field access
|
||||
if kind == SyntaxKind::FLOAT_NUMBER {
|
||||
if !t.text().ends_with('.') {
|
||||
parser_input.was_joint();
|
||||
} else {
|
||||
was_joint = false;
|
||||
}
|
||||
} else {
|
||||
was_joint = true;
|
||||
was_joint = false;
|
||||
}
|
||||
} else {
|
||||
was_joint = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,11 +17,11 @@ use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError};
|
|||
|
||||
#[test]
|
||||
fn parse_smoke_test() {
|
||||
let code = r##"
|
||||
let code = r#"
|
||||
fn main() {
|
||||
println!("Hello, world!")
|
||||
}
|
||||
"##;
|
||||
"#;
|
||||
|
||||
let parse = SourceFile::parse(code);
|
||||
// eprintln!("{:#?}", parse.syntax_node());
|
||||
|
|
Loading…
Reference in a new issue