mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
fix clippy::single_char_pattern
This commit is contained in:
parent
7912e33ed6
commit
1f70886b15
10 changed files with 22 additions and 22 deletions
|
@ -394,9 +394,9 @@ struct FileMeta {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option<String>) {
|
fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option<String>) {
|
||||||
if let Some((a, b)) = crate_str.split_once("@") {
|
if let Some((a, b)) = crate_str.split_once('@') {
|
||||||
let (version, origin) = match b.split_once(":") {
|
let (version, origin) = match b.split_once(':') {
|
||||||
Some(("CratesIo", data)) => match data.split_once(",") {
|
Some(("CratesIo", data)) => match data.split_once(',') {
|
||||||
Some((version, url)) => {
|
Some((version, url)) => {
|
||||||
(version, CrateOrigin::CratesIo { repo: Some(url.to_owned()) })
|
(version, CrateOrigin::CratesIo { repo: Some(url.to_owned()) })
|
||||||
}
|
}
|
||||||
|
|
|
@ -178,7 +178,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||||
|
|
||||||
if tree {
|
if tree {
|
||||||
let tree = format!("{:#?}", parse.syntax_node())
|
let tree = format!("{:#?}", parse.syntax_node())
|
||||||
.split_inclusive("\n")
|
.split_inclusive('\n')
|
||||||
.map(|line| format!("// {}", line))
|
.map(|line| format!("// {}", line))
|
||||||
.collect::<String>();
|
.collect::<String>();
|
||||||
format_to!(expn_text, "\n{}", tree)
|
format_to!(expn_text, "\n{}", tree)
|
||||||
|
|
|
@ -337,7 +337,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
|
||||||
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
|
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
|
||||||
(self_param.name().unwrap().syntax().text_range(), "self".to_string())
|
(self_param.name().unwrap().syntax().text_range(), "self".to_string())
|
||||||
} else {
|
} else {
|
||||||
(node.value.text_range(), node.value.text().to_string().replace("\n", " "))
|
(node.value.text_range(), node.value.text().to_string().replace('\n', " "))
|
||||||
};
|
};
|
||||||
let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" };
|
let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" };
|
||||||
format_to!(
|
format_to!(
|
||||||
|
|
|
@ -47,7 +47,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
|
||||||
|
|
||||||
//FIXME: like, real html escaping
|
//FIXME: like, real html escaping
|
||||||
fn html_escape(text: &str) -> String {
|
fn html_escape(text: &str) -> String {
|
||||||
text.replace("<", "<").replace(">", ">")
|
text.replace('<', "<").replace('>', ">")
|
||||||
}
|
}
|
||||||
|
|
||||||
const STYLE: &str = "
|
const STYLE: &str = "
|
||||||
|
|
|
@ -57,7 +57,7 @@ fn remove_separators(acc: &mut Assists, literal: ast::IntNumber) -> Option<()> {
|
||||||
AssistId("reformat_number_literal", AssistKind::RefactorInline),
|
AssistId("reformat_number_literal", AssistKind::RefactorInline),
|
||||||
"Remove digit separators",
|
"Remove digit separators",
|
||||||
range,
|
range,
|
||||||
|builder| builder.replace(range, literal.text().replace("_", "")),
|
|builder| builder.replace(range, literal.text().replace('_', "")),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -75,7 +75,7 @@ fn generate_lint_descriptor(buf: &mut String) {
|
||||||
format!("lint group for: {}", lints.trim()).into(),
|
format!("lint group for: {}", lints.trim()).into(),
|
||||||
lints
|
lints
|
||||||
.split_ascii_whitespace()
|
.split_ascii_whitespace()
|
||||||
.map(|s| s.trim().trim_matches(',').replace("-", "_"))
|
.map(|s| s.trim().trim_matches(',').replace('-', "_"))
|
||||||
.collect(),
|
.collect(),
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
@ -85,7 +85,7 @@ fn generate_lint_descriptor(buf: &mut String) {
|
||||||
.sorted_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2))
|
.sorted_by(|(ident, ..), (ident2, ..)| ident.cmp(ident2))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
for (name, description, ..) in &lints {
|
for (name, description, ..) in &lints {
|
||||||
push_lint_completion(buf, &name.replace("-", "_"), description);
|
push_lint_completion(buf, &name.replace('-', "_"), description);
|
||||||
}
|
}
|
||||||
buf.push_str("];\n");
|
buf.push_str("];\n");
|
||||||
buf.push_str(r#"pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &["#);
|
buf.push_str(r#"pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &["#);
|
||||||
|
@ -96,7 +96,7 @@ fn generate_lint_descriptor(buf: &mut String) {
|
||||||
push_lint_group(buf, name, description, &Vec::new());
|
push_lint_group(buf, name, description, &Vec::new());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
push_lint_group(buf, &name.replace("-", "_"), description, children);
|
push_lint_group(buf, &name.replace('-', "_"), description, children);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
|
@ -124,7 +124,7 @@ fn generate_lint_descriptor(buf: &mut String) {
|
||||||
format!("lint group for: {}", lints.trim()).into(),
|
format!("lint group for: {}", lints.trim()).into(),
|
||||||
lints
|
lints
|
||||||
.split_ascii_whitespace()
|
.split_ascii_whitespace()
|
||||||
.map(|s| s.trim().trim_matches(',').replace("-", "_"))
|
.map(|s| s.trim().trim_matches(',').replace('-', "_"))
|
||||||
.collect(),
|
.collect(),
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|
@ -136,14 +136,14 @@ fn generate_lint_descriptor(buf: &mut String) {
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
for (name, description, ..) in &lints_rustdoc {
|
for (name, description, ..) in &lints_rustdoc {
|
||||||
push_lint_completion(buf, &name.replace("-", "_"), description)
|
push_lint_completion(buf, &name.replace('-', "_"), description)
|
||||||
}
|
}
|
||||||
buf.push_str("];\n");
|
buf.push_str("];\n");
|
||||||
|
|
||||||
buf.push_str(r#"pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &["#);
|
buf.push_str(r#"pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &["#);
|
||||||
for (name, description, children) in &lints_rustdoc {
|
for (name, description, children) in &lints_rustdoc {
|
||||||
if !children.is_empty() {
|
if !children.is_empty() {
|
||||||
push_lint_group(buf, &name.replace("-", "_"), description, children);
|
push_lint_group(buf, &name.replace('-', "_"), description, children);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
buf.push('\n');
|
buf.push('\n');
|
||||||
|
@ -159,7 +159,7 @@ fn generate_feature_descriptor(buf: &mut String, src_dir: &Path) {
|
||||||
path.extension().unwrap_or_default().to_str().unwrap_or_default() == "md"
|
path.extension().unwrap_or_default().to_str().unwrap_or_default() == "md"
|
||||||
})
|
})
|
||||||
.map(|path| {
|
.map(|path| {
|
||||||
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_");
|
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace('-', "_");
|
||||||
let doc = fs::read_to_string(path).unwrap();
|
let doc = fs::read_to_string(path).unwrap();
|
||||||
(feature_ident, doc)
|
(feature_ident, doc)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1238,7 +1238,7 @@ fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json:
|
||||||
let map = fields
|
let map = fields
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(field, ty, doc, default)| {
|
.map(|(field, ty, doc, default)| {
|
||||||
let name = field.replace("_", ".");
|
let name = field.replace('_', ".");
|
||||||
let name = format!("rust-analyzer.{}", name);
|
let name = format!("rust-analyzer.{}", name);
|
||||||
let props = field_props(field, ty, doc, default);
|
let props = field_props(field, ty, doc, default);
|
||||||
(name, props)
|
(name, props)
|
||||||
|
@ -1385,7 +1385,7 @@ fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
|
||||||
fields
|
fields
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(field, _ty, doc, default)| {
|
.map(|(field, _ty, doc, default)| {
|
||||||
let name = format!("rust-analyzer.{}", field.replace("_", "."));
|
let name = format!("rust-analyzer.{}", field.replace('_', "."));
|
||||||
let doc = doc_comment_to_string(*doc);
|
let doc = doc_comment_to_string(*doc);
|
||||||
if default.contains('\n') {
|
if default.contains('\n') {
|
||||||
format!(
|
format!(
|
||||||
|
@ -1428,7 +1428,7 @@ mod tests {
|
||||||
.trim_start_matches('{')
|
.trim_start_matches('{')
|
||||||
.trim_end_matches('}')
|
.trim_end_matches('}')
|
||||||
.replace(" ", " ")
|
.replace(" ", " ")
|
||||||
.replace("\n", "\n ")
|
.replace('\n', "\n ")
|
||||||
.trim_start_matches('\n')
|
.trim_start_matches('\n')
|
||||||
.trim_end()
|
.trim_end()
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
|
@ -972,7 +972,7 @@ fn main() {}
|
||||||
"documentChanges": [
|
"documentChanges": [
|
||||||
{
|
{
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace("\\", "/")),
|
"uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace('\\', "/")),
|
||||||
"version": null
|
"version": null
|
||||||
},
|
},
|
||||||
"edits": [
|
"edits": [
|
||||||
|
@ -1029,7 +1029,7 @@ fn main() {}
|
||||||
"documentChanges": [
|
"documentChanges": [
|
||||||
{
|
{
|
||||||
"textDocument": {
|
"textDocument": {
|
||||||
"uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace("\\", "/")),
|
"uri": format!("file://{}", tmp_dir_path.join("src").join("lib.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace('\\', "/")),
|
||||||
"version": null
|
"version": null
|
||||||
},
|
},
|
||||||
"edits": [
|
"edits": [
|
||||||
|
|
|
@ -374,8 +374,8 @@ fn lines_match(expected: &str, actual: &str) -> bool {
|
||||||
// Let's not deal with / vs \ (windows...)
|
// Let's not deal with / vs \ (windows...)
|
||||||
// First replace backslash-escaped backslashes with forward slashes
|
// First replace backslash-escaped backslashes with forward slashes
|
||||||
// which can occur in, for example, JSON output
|
// which can occur in, for example, JSON output
|
||||||
let expected = expected.replace(r"\\", "/").replace(r"\", "/");
|
let expected = expected.replace(r"\\", "/").replace('\\', "/");
|
||||||
let mut actual: &str = &actual.replace(r"\\", "/").replace(r"\", "/");
|
let mut actual: &str = &actual.replace(r"\\", "/").replace('\\', "/");
|
||||||
for (i, part) in expected.split("[..]").enumerate() {
|
for (i, part) in expected.split("[..]").enumerate() {
|
||||||
match actual.find(part) {
|
match actual.find(part) {
|
||||||
Some(j) => {
|
Some(j) => {
|
||||||
|
|
|
@ -307,7 +307,7 @@ impl ast::IntNumber {
|
||||||
|
|
||||||
pub fn value(&self) -> Option<u128> {
|
pub fn value(&self) -> Option<u128> {
|
||||||
let (_, text, _) = self.split_into_parts();
|
let (_, text, _) = self.split_into_parts();
|
||||||
let value = u128::from_str_radix(&text.replace("_", ""), self.radix() as u32).ok()?;
|
let value = u128::from_str_radix(&text.replace('_', ""), self.radix() as u32).ok()?;
|
||||||
Some(value)
|
Some(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue