Apply more clippy suggestions and update generated

This commit is contained in:
Clemens Wasser 2021-06-03 12:46:56 +02:00
parent 3c6dc0f89d
commit 629e8d1ed0
11 changed files with 51 additions and 142 deletions

View file

@ -187,7 +187,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt
let a = 1664525; let a = 1664525;
let c = 1013904223; let c = 1013904223;
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
return *seed; *seed
} }
fn make_ident(ident: &str) -> tt::TokenTree { fn make_ident(ident: &str) -> tt::TokenTree {
tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) }) tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) })

View file

@ -219,7 +219,7 @@ impl BindingsBuilder {
bindings bindings
} }
fn build_inner(&self, bindings: &mut Bindings, link_nodes: &Vec<LinkNode<Rc<BindingKind>>>) { fn build_inner(&self, bindings: &mut Bindings, link_nodes: &[LinkNode<Rc<BindingKind>>]) {
let mut nodes = Vec::new(); let mut nodes = Vec::new();
self.collect_nodes(&link_nodes, &mut nodes); self.collect_nodes(&link_nodes, &mut nodes);
@ -301,7 +301,7 @@ impl BindingsBuilder {
fn collect_nodes<'a>( fn collect_nodes<'a>(
&'a self, &'a self,
link_nodes: &'a Vec<LinkNode<Rc<BindingKind>>>, link_nodes: &'a [LinkNode<Rc<BindingKind>>],
nodes: &mut Vec<&'a Rc<BindingKind>>, nodes: &mut Vec<&'a Rc<BindingKind>>,
) { ) {
link_nodes.iter().for_each(|it| match it { link_nodes.iter().for_each(|it| match it {
@ -494,15 +494,8 @@ fn match_loop_inner<'t>(
} }
Some(err) => { Some(err) => {
res.add_err(err); res.add_err(err);
match match_res.value { if let Some(fragment) = match_res.value {
Some(fragment) => { bindings_builder.push_fragment(&mut item.bindings, &name, fragment);
bindings_builder.push_fragment(
&mut item.bindings,
&name,
fragment,
);
}
_ => {}
} }
item.is_error = true; item.is_error = true;
error_items.push(item); error_items.push(item);
@ -578,9 +571,9 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
); );
stdx::always!(cur_items.is_empty()); stdx::always!(cur_items.is_empty());
if error_items.len() > 0 { if !error_items.is_empty() {
error_recover_item = error_items.pop().map(|it| it.bindings); error_recover_item = error_items.pop().map(|it| it.bindings);
} else if eof_items.len() > 0 { } else if !eof_items.is_empty() {
error_recover_item = Some(eof_items[0].bindings.clone()); error_recover_item = Some(eof_items[0].bindings.clone());
} }
@ -793,7 +786,7 @@ impl<'a> TtIter<'a> {
_ => (), _ => (),
} }
let tt = self.next().ok_or_else(|| ())?.clone(); let tt = self.next().ok_or(())?.clone();
let punct = match tt { let punct = match tt {
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => { tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => {
punct punct

View file

@ -295,8 +295,8 @@ fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
// Checks that no repetition which could match an empty token // Checks that no repetition which could match an empty token
// https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558 // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558
if separator.is_none() { if separator.is_none()
if subtree.iter().all(|child_op| { && subtree.iter().all(|child_op| {
match child_op { match child_op {
Op::Var { kind, .. } => { Op::Var { kind, .. } => {
// vis is optional // vis is optional
@ -314,9 +314,9 @@ fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
Op::Subtree { .. } => {} Op::Subtree { .. } => {}
} }
false false
}) { })
return Err(ParseError::RepetitionEmptyTokenTree); {
} return Err(ParseError::RepetitionEmptyTokenTree);
} }
validate(subtree)? validate(subtree)?
} }

View file

@ -213,7 +213,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
Ok(res) Ok(res)
} }
fn eat_fragment_kind<'a>(src: &mut TtIter<'a>, mode: Mode) -> Result<Option<SmolStr>, ParseError> { fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result<Option<SmolStr>, ParseError> {
if let Mode::Pattern = mode { if let Mode::Pattern = mode {
src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?; src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?;
let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?; let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?;

View file

@ -243,8 +243,7 @@ trait TokenConvertor {
type Token: SrcToken; type Token: SrcToken;
fn go(&mut self) -> tt::Subtree { fn go(&mut self) -> tt::Subtree {
let mut subtree = tt::Subtree::default(); let mut subtree = tt::Subtree { delimiter: None, ..Default::default() };
subtree.delimiter = None;
while self.peek().is_some() { while self.peek().is_some() {
self.collect_leaf(&mut subtree.token_trees); self.collect_leaf(&mut subtree.token_trees);
} }
@ -506,7 +505,7 @@ impl TokenConvertor for Convertor {
fn peek(&self) -> Option<Self::Token> { fn peek(&self) -> Option<Self::Token> {
if let Some((punct, mut offset)) = self.punct_offset.clone() { if let Some((punct, mut offset)) = self.punct_offset.clone() {
offset = offset + TextSize::of('.'); offset += TextSize::of('.');
if usize::from(offset) < punct.text().len() { if usize::from(offset) < punct.text().len() {
return Some(SynToken::Punch(punct, offset)); return Some(SynToken::Punch(punct, offset));
} }

View file

@ -138,7 +138,7 @@ impl<'a> TtIter<'a> {
} }
} }
self.inner = self.inner.as_slice()[res.len()..].iter(); self.inner = self.inner.as_slice()[res.len()..].iter();
if res.len() == 0 && err.is_none() { if res.is_empty() && err.is_none() {
err = Some(err!("no tokens consumed")); err = Some(err!("no tokens consumed"));
} }
let res = match res.len() { let res = match res.len() {

View file

@ -260,116 +260,33 @@ pub enum SyntaxKind {
use self::SyntaxKind::*; use self::SyntaxKind::*;
impl SyntaxKind { impl SyntaxKind {
pub fn is_keyword(self) -> bool { pub fn is_keyword(self) -> bool {
matches!( match self {
self, AS_KW | ASYNC_KW | AWAIT_KW | BOX_KW | BREAK_KW | CONST_KW | CONTINUE_KW | CRATE_KW
AS_KW | DYN_KW | ELSE_KW | ENUM_KW | EXTERN_KW | FALSE_KW | FN_KW | FOR_KW | IF_KW
| ASYNC_KW | IMPL_KW | IN_KW | LET_KW | LOOP_KW | MACRO_KW | MATCH_KW | MOD_KW | MOVE_KW
| AWAIT_KW | MUT_KW | PUB_KW | REF_KW | RETURN_KW | SELF_KW | STATIC_KW | STRUCT_KW | SUPER_KW
| BOX_KW | TRAIT_KW | TRUE_KW | TRY_KW | TYPE_KW | UNSAFE_KW | USE_KW | WHERE_KW | WHILE_KW
| BREAK_KW | YIELD_KW | AUTO_KW | DEFAULT_KW | EXISTENTIAL_KW | UNION_KW | RAW_KW
| CONST_KW | MACRO_RULES_KW => true,
| CONTINUE_KW _ => false,
| CRATE_KW }
| DYN_KW
| ELSE_KW
| ENUM_KW
| EXTERN_KW
| FALSE_KW
| FN_KW
| FOR_KW
| IF_KW
| IMPL_KW
| IN_KW
| LET_KW
| LOOP_KW
| MACRO_KW
| MATCH_KW
| MOD_KW
| MOVE_KW
| MUT_KW
| PUB_KW
| REF_KW
| RETURN_KW
| SELF_KW
| STATIC_KW
| STRUCT_KW
| SUPER_KW
| TRAIT_KW
| TRUE_KW
| TRY_KW
| TYPE_KW
| UNSAFE_KW
| USE_KW
| WHERE_KW
| WHILE_KW
| YIELD_KW
| AUTO_KW
| DEFAULT_KW
| EXISTENTIAL_KW
| UNION_KW
| RAW_KW
| MACRO_RULES_KW
)
} }
pub fn is_punct(self) -> bool { pub fn is_punct(self) -> bool {
matches!( match self {
self, SEMICOLON | COMMA | L_PAREN | R_PAREN | L_CURLY | R_CURLY | L_BRACK | R_BRACK
SEMICOLON | L_ANGLE | R_ANGLE | AT | POUND | TILDE | QUESTION | DOLLAR | AMP | PIPE | PLUS
| COMMA | STAR | SLASH | CARET | PERCENT | UNDERSCORE | DOT | DOT2 | DOT3 | DOT2EQ | COLON
| L_PAREN | COLON2 | EQ | EQ2 | FAT_ARROW | BANG | NEQ | MINUS | THIN_ARROW | LTEQ | GTEQ
| R_PAREN | PLUSEQ | MINUSEQ | PIPEEQ | AMPEQ | CARETEQ | SLASHEQ | STAREQ | PERCENTEQ | AMP2
| L_CURLY | PIPE2 | SHL | SHR | SHLEQ | SHREQ => true,
| R_CURLY _ => false,
| L_BRACK }
| R_BRACK
| L_ANGLE
| R_ANGLE
| AT
| POUND
| TILDE
| QUESTION
| DOLLAR
| AMP
| PIPE
| PLUS
| STAR
| SLASH
| CARET
| PERCENT
| UNDERSCORE
| DOT
| DOT2
| DOT3
| DOT2EQ
| COLON
| COLON2
| EQ
| EQ2
| FAT_ARROW
| BANG
| NEQ
| MINUS
| THIN_ARROW
| LTEQ
| GTEQ
| PLUSEQ
| MINUSEQ
| PIPEEQ
| AMPEQ
| CARETEQ
| SLASHEQ
| STAREQ
| PERCENTEQ
| AMP2
| PIPE2
| SHL
| SHR
| SHLEQ
| SHREQ
)
} }
pub fn is_literal(self) -> bool { pub fn is_literal(self) -> bool {
matches!(self, INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING) match self {
INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING => true,
_ => false,
}
} }
pub fn from_keyword(ident: &str) -> Option<SyntaxKind> { pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
let kw = match ident { let kw = match ident {

View file

@ -36,8 +36,8 @@ pub(crate) fn incremental_reparse(
None None
} }
fn reparse_token<'node>( fn reparse_token(
root: &'node SyntaxNode, root: &SyntaxNode,
edit: &Indel, edit: &Indel,
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
let prev_token = root.covering_element(edit.delete).as_token()?.clone(); let prev_token = root.covering_element(edit.delete).as_token()?.clone();
@ -84,8 +84,8 @@ fn reparse_token<'node>(
} }
} }
fn reparse_block<'node>( fn reparse_block(
root: &'node SyntaxNode, root: &SyntaxNode,
edit: &Indel, edit: &Indel,
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
let (node, reparser) = find_reparsable_node(root, edit.delete)?; let (node, reparser) = find_reparsable_node(root, edit.delete)?;

View file

@ -236,7 +236,7 @@ where
} }
}); });
dir_tests(&test_data_dir(), err_paths, "rast", |text, path| { dir_tests(&test_data_dir(), err_paths, "rast", |text, path| {
if let Ok(_) = f(text) { if f(text).is_ok() {
panic!("'{:?}' successfully parsed when it should have errored", path); panic!("'{:?}' successfully parsed when it should have errored", path);
} else { } else {
"ERROR\n".to_owned() "ERROR\n".to_owned()

View file

@ -132,7 +132,7 @@ fn parse_changelog_line(s: &str) -> Option<PrInfo> {
return Some(PrInfo { kind, message: Some(message) }); return Some(PrInfo { kind, message: Some(message) });
} }
}; };
let res = PrInfo { kind, message }; let res = PrInfo { message, kind };
Some(res) Some(res)
} }
@ -152,7 +152,7 @@ fn parse_title_line(s: &str) -> PrInfo {
PrKind::Skip => None, PrKind::Skip => None,
_ => Some(s[prefix.len()..].to_string()), _ => Some(s[prefix.len()..].to_string()),
}; };
return PrInfo { kind, message }; return PrInfo { message, kind };
} }
} }
PrInfo { kind: PrKind::Other, message: Some(s.to_string()) } PrInfo { kind: PrKind::Other, message: Some(s.to_string()) }

View file

@ -33,7 +33,7 @@ fn check_code_formatting() {
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
crate::ensure_rustfmt().unwrap(); crate::ensure_rustfmt().unwrap();
let res = cmd!("cargo fmt -- --check").run(); let res = cmd!("cargo fmt -- --check").run();
if !res.is_ok() { if res.is_err() {
let _ = cmd!("cargo fmt").run(); let _ = cmd!("cargo fmt").run();
} }
res.unwrap() res.unwrap()
@ -244,19 +244,19 @@ Zlib OR Apache-2.0 OR MIT
.map(|it| it.trim()) .map(|it| it.trim())
.map(|it| it[r#""license":"#.len()..].trim_matches('"')) .map(|it| it[r#""license":"#.len()..].trim_matches('"'))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
licenses.sort(); licenses.sort_unstable();
licenses.dedup(); licenses.dedup();
if licenses != expected { if licenses != expected {
let mut diff = String::new(); let mut diff = String::new();
diff += &format!("New Licenses:\n"); diff += &"New Licenses:\n".to_string();
for &l in licenses.iter() { for &l in licenses.iter() {
if !expected.contains(&l) { if !expected.contains(&l) {
diff += &format!(" {}\n", l) diff += &format!(" {}\n", l)
} }
} }
diff += &format!("\nMissing Licenses:\n"); diff += &"\nMissing Licenses:\n".to_string();
for &l in expected.iter() { for &l in expected.iter() {
if !licenses.contains(&l) { if !licenses.contains(&l) {
diff += &format!(" {}\n", l) diff += &format!(" {}\n", l)