mirror of
https://github.com/getzola/zola
synced 2024-11-10 06:14:19 +00:00
Fix clippy warnings (#744)
Clippy is returning some warnings. Let's fix or explicitly ignore them. In particular: - In `components/imageproc/src/lib.rs`, we implement `Hash` explicitly but derive `PartialEq`. We need to maintain the property that two keys being equal implies the hashes of those two keys are equal. Our `Hash` implementations preserve this, so we'll explicitly ignore the warnings. - In `components/site/src/lib.rs`, we were calling `.into()` on some values that are already of the correct type. - In `components/site/src/lib.rs`, we were using `.map(|x| *x)` in iterator chains to remove a level of indirection; we can instead say `.copied()` (introduced in Rust v1.36) or `.cloned()`. Using `.copied` here is better from a type-checking point of view, but we'll use `.cloned` for now as Rust v1.36 was only recently released. - In `components/templates/src/filters.rs` and `components/utils/src/site.rs`, we were taking `HashMap`s as function arguments but not generically accepting alternate `Hasher` implementations. - In `src/cmd/check.rs`, we use `env::current_dir()` as a default value, but our use of `unwrap_or` meant that we would always retrieve the current directory even when not needed. - In `components/errors/src/lib.rs`, we can use `if let` rather than `match`. - In `components/library/src/content/page.rs`, we can collapse a nested conditional into `else if let ...`. - In `components/library/src/sorting.rs`, a function takes `&&Page` arguments. Clippy warns about this for efficiency reasons, but we're doing it here to match a particular sorting API, so we'll explicitly ignore the warning.
This commit is contained in:
parent
666de5160e
commit
46ee256ba4
8 changed files with 31 additions and 20 deletions
|
@ -31,10 +31,9 @@ impl StdError for Error {
|
|||
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||
let mut source = self.source.as_ref().map(|c| &**c);
|
||||
if source.is_none() {
|
||||
match self.kind {
|
||||
ErrorKind::Tera(ref err) => source = err.source(),
|
||||
_ => (),
|
||||
};
|
||||
if let ErrorKind::Tera(ref err) = self.kind {
|
||||
source = err.source();
|
||||
}
|
||||
}
|
||||
|
||||
source
|
||||
|
|
|
@ -129,6 +129,7 @@ impl From<ResizeOp> for u8 {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::derive_hash_xor_eq)]
|
||||
impl Hash for ResizeOp {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
hasher.write_u8(u8::from(*self));
|
||||
|
@ -194,6 +195,7 @@ impl Format {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::derive_hash_xor_eq)]
|
||||
impl Hash for Format {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
use Format::*;
|
||||
|
|
|
@ -171,12 +171,10 @@ impl Page {
|
|||
} else {
|
||||
slugify(&page.file.name)
|
||||
}
|
||||
} else if let Some(slug) = slug_from_dated_filename {
|
||||
slugify(&slug)
|
||||
} else {
|
||||
if let Some(slug) = slug_from_dated_filename {
|
||||
slugify(&slug)
|
||||
} else {
|
||||
slugify(&page.file.name)
|
||||
}
|
||||
slugify(&page.file.name)
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ use content::Page;
|
|||
|
||||
/// Used by the RSS feed
|
||||
/// There to not have to import sorting stuff in the site crate
|
||||
#[allow(clippy::trivially_copy_pass_by_ref)]
|
||||
pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering {
|
||||
let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
|
|
|
@ -323,7 +323,7 @@ impl Site {
|
|||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
Err(Error { kind: ErrorKind::Msg(msg.into()), source: None })
|
||||
Err(Error { kind: ErrorKind::Msg(msg), source: None })
|
||||
}
|
||||
|
||||
pub fn check_external_links(&self) -> Result<()> {
|
||||
|
@ -352,7 +352,7 @@ impl Site {
|
|||
let pool = rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(threads)
|
||||
.build()
|
||||
.map_err(|e| Error { kind: ErrorKind::Msg(e.to_string().into()), source: None })?;
|
||||
.map_err(|e| Error { kind: ErrorKind::Msg(e.to_string()), source: None })?;
|
||||
|
||||
let errors: Vec<_> = pool.install(|| {
|
||||
all_links
|
||||
|
@ -383,7 +383,7 @@ impl Site {
|
|||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
Err(Error { kind: ErrorKind::Msg(msg.into()), source: None })
|
||||
Err(Error { kind: ErrorKind::Msg(msg), source: None })
|
||||
}
|
||||
|
||||
/// Insert a default index section for each language if necessary so we don't need to create
|
||||
|
@ -699,7 +699,7 @@ impl Site {
|
|||
.pages_values()
|
||||
.iter()
|
||||
.filter(|p| p.lang == self.config.default_language)
|
||||
.map(|p| *p)
|
||||
.cloned()
|
||||
.collect()
|
||||
} else {
|
||||
library.pages_values()
|
||||
|
@ -712,7 +712,7 @@ impl Site {
|
|||
continue;
|
||||
}
|
||||
let pages =
|
||||
library.pages_values().iter().filter(|p| p.lang == lang.code).map(|p| *p).collect();
|
||||
library.pages_values().iter().filter(|p| p.lang == lang.code).cloned().collect();
|
||||
self.render_rss_feed(pages, Some(&PathBuf::from(lang.code.clone())))?;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
use std::collections::HashMap;
|
||||
use std::hash::BuildHasher;
|
||||
|
||||
use base64::{decode, encode};
|
||||
use pulldown_cmark as cmark;
|
||||
use tera::{to_value, Result as TeraResult, Value};
|
||||
|
||||
pub fn markdown(value: &Value, args: &HashMap<String, Value>) -> TeraResult<Value> {
|
||||
pub fn markdown<S: BuildHasher>(
|
||||
value: &Value,
|
||||
args: &HashMap<String, Value, S>,
|
||||
) -> TeraResult<Value> {
|
||||
let s = try_get_value!("markdown", "value", String, value);
|
||||
let inline = match args.get("inline") {
|
||||
Some(val) => try_get_value!("markdown", "inline", bool, val),
|
||||
|
@ -30,12 +34,18 @@ pub fn markdown(value: &Value, args: &HashMap<String, Value>) -> TeraResult<Valu
|
|||
Ok(to_value(&html).unwrap())
|
||||
}
|
||||
|
||||
pub fn base64_encode(value: &Value, _: &HashMap<String, Value>) -> TeraResult<Value> {
|
||||
pub fn base64_encode<S: BuildHasher>(
|
||||
value: &Value,
|
||||
_: &HashMap<String, Value, S>,
|
||||
) -> TeraResult<Value> {
|
||||
let s = try_get_value!("base64_encode", "value", String, value);
|
||||
Ok(to_value(&encode(s.as_bytes())).unwrap())
|
||||
}
|
||||
|
||||
pub fn base64_decode(value: &Value, _: &HashMap<String, Value>) -> TeraResult<Value> {
|
||||
pub fn base64_decode<S: BuildHasher>(
|
||||
value: &Value,
|
||||
_: &HashMap<String, Value, S>,
|
||||
) -> TeraResult<Value> {
|
||||
let s = try_get_value!("base64_decode", "value", String, value);
|
||||
Ok(to_value(&String::from_utf8(decode(s.as_bytes()).unwrap()).unwrap()).unwrap())
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use std::collections::HashMap;
|
||||
use std::hash::BuildHasher;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
use errors::Result;
|
||||
|
@ -23,9 +24,9 @@ pub struct ResolvedInternalLink {
|
|||
|
||||
/// Resolves an internal link (of the `@/posts/something.md#hey` sort) to its absolute link and
|
||||
/// returns the path + anchor as well
|
||||
pub fn resolve_internal_link(
|
||||
pub fn resolve_internal_link<S: BuildHasher>(
|
||||
link: &str,
|
||||
permalinks: &HashMap<String, String>,
|
||||
permalinks: &HashMap<String, String, S>,
|
||||
) -> Result<ResolvedInternalLink> {
|
||||
// First we remove the ./ since that's zola specific
|
||||
let clean_link = link.replacen("@/", "", 1);
|
||||
|
|
|
@ -7,7 +7,7 @@ use site::Site;
|
|||
use console;
|
||||
|
||||
pub fn check(config_file: &str, base_path: Option<&str>, base_url: Option<&str>) -> Result<()> {
|
||||
let bp = base_path.map(PathBuf::from).unwrap_or(env::current_dir().unwrap());
|
||||
let bp = base_path.map(PathBuf::from).unwrap_or_else(|| env::current_dir().unwrap());
|
||||
let mut site = Site::new(bp, config_file)?;
|
||||
// Force the checking of external links
|
||||
site.config.check_external_links = true;
|
||||
|
|
Loading…
Reference in a new issue