mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Iterate over arrays dirrectly, instead of going through a slice
This commit is contained in:
parent
32be158630
commit
bd04416aaa
4 changed files with 5 additions and 5 deletions
|
@ -1269,7 +1269,7 @@ impl<'a> InferenceContext<'a> {
|
|||
// that are not closures, then we type-check the closures. This is so
|
||||
// that we have more information about the types of arguments when we
|
||||
// type-check the functions. This isn't really the right way to do this.
|
||||
for &check_closures in &[false, true] {
|
||||
for check_closures in [false, true] {
|
||||
let mut skip_indices = skip_indices.into_iter().copied().fuse().peekable();
|
||||
let param_iter = param_tys.iter().cloned().chain(repeat(self.err_ty()));
|
||||
let expected_iter = expected_inputs
|
||||
|
|
|
@ -869,7 +869,7 @@ impl Test for T {{
|
|||
};
|
||||
|
||||
// Enumerate some possible next siblings.
|
||||
for next_sibling in &[
|
||||
for next_sibling in [
|
||||
"",
|
||||
"fn other_fn() {}", // `const $0 fn` -> `const fn`
|
||||
"type OtherType = i32;",
|
||||
|
|
|
@ -185,14 +185,14 @@ mod tests {
|
|||
];
|
||||
|
||||
let index = LineIndex::new(text);
|
||||
for &(offset, line, col) in &table {
|
||||
for (offset, line, col) in table {
|
||||
assert_eq!(index.line_col(offset.into()), LineCol { line, col });
|
||||
}
|
||||
|
||||
let text = "\nhello\nworld";
|
||||
let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)];
|
||||
let index = LineIndex::new(text);
|
||||
for &(offset, line, col) in &table {
|
||||
for (offset, line, col) in table {
|
||||
assert_eq!(index.line_col(offset.into()), LineCol { line, col });
|
||||
}
|
||||
}
|
||||
|
|
|
@ -156,7 +156,7 @@ fn parse_title_line(s: &str) -> PrInfo {
|
|||
("minor: ", PrKind::Skip),
|
||||
];
|
||||
|
||||
for &(prefix, kind) in &PREFIXES {
|
||||
for (prefix, kind) in PREFIXES {
|
||||
if lower.starts_with(prefix) {
|
||||
let message = match &kind {
|
||||
PrKind::Skip => None,
|
||||
|
|
Loading…
Reference in a new issue