Procimage refactor (#2086)
* Refactor image proc Closes #2066 * Add colocated_path to shortcodes Closes #1793
|
@ -12,6 +12,7 @@ This will error if 2 values are set
|
|||
- Remove built-ins shortcodes
|
||||
- Having a file called `index.md` in a folder with a `_index.md` is now an error
|
||||
- Ignore temp files from vim/emacs/macos/etc as well as files without extensions when getting colocated assets
|
||||
- Now integrates the file stem of the original file into the processed images filename: {stem}.{hash}.{extension}
|
||||
|
||||
### Other
|
||||
|
||||
|
@ -31,7 +32,7 @@ This will error if 2 values are set
|
|||
- Enable locale date formatting for the Tera `date` filter
|
||||
- Cachebust fingerprint is now only 20 chars long
|
||||
- Add `text` alias for plain text highlighting (before, only `txt` was used)
|
||||
|
||||
- Adds a new field to `page`: `colocated_path` that points to the folder of the current file being rendered if it's a colocated folder. None otherwise.
|
||||
|
||||
## 0.16.1 (2022-08-14)
|
||||
|
||||
|
|
|
@ -37,6 +37,9 @@ pub struct FileInfo {
|
|||
pub name: String,
|
||||
/// The .md path, starting from the content directory, with `/` slashes
|
||||
pub relative: String,
|
||||
/// The path from the content directory to the colocated directory. Ends with a `/` when set.
|
||||
/// Only filled if it is a colocated directory, None otherwise.
|
||||
pub colocated_path: Option<String>,
|
||||
/// Path of the directory containing the .md file
|
||||
pub parent: PathBuf,
|
||||
/// Path of the grand parent directory for that file. Only used in sections to find subsections.
|
||||
|
@ -63,11 +66,17 @@ impl FileInfo {
|
|||
} else {
|
||||
format!("{}.md", name)
|
||||
};
|
||||
let mut colocated_path = None;
|
||||
|
||||
// If we have a folder with an asset, don't consider it as a component
|
||||
// Splitting on `.` as we might have a language so it isn't *only* index but also index.fr
|
||||
// etc
|
||||
if !components.is_empty() && name.split('.').collect::<Vec<_>>()[0] == "index" {
|
||||
colocated_path = Some({
|
||||
let mut val = components.join("/");
|
||||
val.push('/');
|
||||
val
|
||||
});
|
||||
components.pop();
|
||||
// also set parent_path to grandparent instead
|
||||
parent = parent.parent().unwrap().to_path_buf();
|
||||
|
@ -83,6 +92,7 @@ impl FileInfo {
|
|||
name,
|
||||
components,
|
||||
relative,
|
||||
colocated_path,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -108,6 +118,7 @@ impl FileInfo {
|
|||
name,
|
||||
components,
|
||||
relative,
|
||||
colocated_path: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -171,6 +182,7 @@ mod tests {
|
|||
&PathBuf::new(),
|
||||
);
|
||||
assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]);
|
||||
assert_eq!(file.colocated_path, Some("posts/tutorials/python/".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -211,6 +223,7 @@ mod tests {
|
|||
&PathBuf::new(),
|
||||
);
|
||||
assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]);
|
||||
assert_eq!(file.colocated_path, Some("posts/tutorials/python/".to_string()));
|
||||
let res = file.find_language("en", &["fr"]);
|
||||
assert!(res.is_ok());
|
||||
assert_eq!(res.unwrap(), "fr");
|
||||
|
|
|
@ -42,6 +42,7 @@ fn find_backlinks<'a>(relative_path: &str, library: &'a Library) -> Vec<BackLink
|
|||
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
|
||||
pub struct SerializingPage<'a> {
|
||||
relative_path: &'a str,
|
||||
colocated_path: &'a Option<String>,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
slug: &'a str,
|
||||
|
@ -104,6 +105,7 @@ impl<'a> SerializingPage<'a> {
|
|||
|
||||
Self {
|
||||
relative_path: &page.file.relative,
|
||||
colocated_path: &page.file.colocated_path,
|
||||
ancestors: &page.ancestors,
|
||||
content: &page.content,
|
||||
permalink: &page.permalink,
|
||||
|
@ -137,6 +139,7 @@ impl<'a> SerializingPage<'a> {
|
|||
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
|
||||
pub struct SerializingSection<'a> {
|
||||
relative_path: &'a str,
|
||||
colocated_path: &'a Option<String>,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
draft: bool,
|
||||
|
@ -198,6 +201,7 @@ impl<'a> SerializingSection<'a> {
|
|||
|
||||
Self {
|
||||
relative_path: §ion.file.relative,
|
||||
colocated_path: §ion.file.colocated_path,
|
||||
ancestors: §ion.ancestors,
|
||||
draft: section.meta.draft,
|
||||
content: §ion.content,
|
||||
|
|
66
components/imageproc/src/format.rs
Normal file
|
@ -0,0 +1,66 @@
|
|||
use errors::{anyhow, Result};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
const DEFAULT_Q_JPG: u8 = 75;
|
||||
|
||||
/// Thumbnail image format
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Format {
|
||||
/// JPEG, The `u8` argument is JPEG quality (in percent).
|
||||
Jpeg(u8),
|
||||
/// PNG
|
||||
Png,
|
||||
/// WebP, The `u8` argument is WebP quality (in percent), None meaning lossless.
|
||||
WebP(Option<u8>),
|
||||
}
|
||||
|
||||
impl Format {
|
||||
pub fn from_args(is_lossy: bool, format: &str, quality: Option<u8>) -> Result<Format> {
|
||||
use Format::*;
|
||||
if let Some(quality) = quality {
|
||||
assert!(quality > 0 && quality <= 100, "Quality must be within the range [1; 100]");
|
||||
}
|
||||
let jpg_quality = quality.unwrap_or(DEFAULT_Q_JPG);
|
||||
match format {
|
||||
"auto" => {
|
||||
if is_lossy {
|
||||
Ok(Jpeg(jpg_quality))
|
||||
} else {
|
||||
Ok(Png)
|
||||
}
|
||||
}
|
||||
"jpeg" | "jpg" => Ok(Jpeg(jpg_quality)),
|
||||
"png" => Ok(Png),
|
||||
"webp" => Ok(WebP(quality)),
|
||||
_ => Err(anyhow!("Invalid image format: {}", format)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extension(&self) -> &str {
|
||||
// Kept in sync with RESIZED_FILENAME and op_filename
|
||||
use Format::*;
|
||||
|
||||
match *self {
|
||||
Png => "png",
|
||||
Jpeg(_) => "jpg",
|
||||
WebP(_) => "webp",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::derive_hash_xor_eq)]
|
||||
impl Hash for Format {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
use Format::*;
|
||||
|
||||
let q = match *self {
|
||||
Png => 0,
|
||||
Jpeg(q) => 1001 + q as u16,
|
||||
WebP(None) => 2000,
|
||||
WebP(Some(q)) => 2001 + q as u16,
|
||||
};
|
||||
|
||||
hasher.write_u16(q);
|
||||
hasher.write(self.extension().as_bytes());
|
||||
}
|
||||
}
|
55
components/imageproc/src/helpers.rs
Normal file
|
@ -0,0 +1,55 @@
|
|||
use std::borrow::Cow;
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::Path;
|
||||
|
||||
use crate::format::Format;
|
||||
use crate::ResizeOperation;
|
||||
use libs::image::DynamicImage;
|
||||
|
||||
/// Apply image rotation based on EXIF data
|
||||
/// Returns `None` if no transformation is needed
|
||||
pub fn fix_orientation(img: &DynamicImage, path: &Path) -> Option<DynamicImage> {
|
||||
let file = std::fs::File::open(path).ok()?;
|
||||
let mut buf_reader = std::io::BufReader::new(&file);
|
||||
let exif_reader = exif::Reader::new();
|
||||
let exif = exif_reader.read_from_container(&mut buf_reader).ok()?;
|
||||
let orientation =
|
||||
exif.get_field(exif::Tag::Orientation, exif::In::PRIMARY)?.value.get_uint(0)?;
|
||||
match orientation {
|
||||
// Values are taken from the page 30 of
|
||||
// https://www.cipa.jp/std/documents/e/DC-008-2012_E.pdf
|
||||
// For more details check http://sylvana.net/jpegcrop/exif_orientation.html
|
||||
1 => None,
|
||||
2 => Some(img.fliph()),
|
||||
3 => Some(img.rotate180()),
|
||||
4 => Some(img.flipv()),
|
||||
5 => Some(img.fliph().rotate270()),
|
||||
6 => Some(img.rotate90()),
|
||||
7 => Some(img.fliph().rotate90()),
|
||||
8 => Some(img.rotate270()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// We only use the input_path to get the file stem.
|
||||
/// Hashing the resolved `input_path` would include the absolute path to the image
|
||||
/// with all filesystem components.
|
||||
pub fn get_processed_filename(
|
||||
input_path: &Path,
|
||||
input_src: &str,
|
||||
op: &ResizeOperation,
|
||||
format: &Format,
|
||||
) -> String {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
hasher.write(input_src.as_ref());
|
||||
op.hash(&mut hasher);
|
||||
format.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
let filename = input_path
|
||||
.file_stem()
|
||||
.map(|s| s.to_string_lossy())
|
||||
.unwrap_or_else(|| Cow::Borrowed("unknown"));
|
||||
|
||||
format!("{}.{:016x}.{}", filename, hash, format.extension())
|
||||
}
|
|
@ -1,645 +1,10 @@
|
|||
use std::collections::hash_map::Entry as HEntry;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs::{self, File};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{
|
||||
collections::hash_map::DefaultHasher,
|
||||
io::{BufWriter, Write},
|
||||
};
|
||||
|
||||
use image::error::ImageResult;
|
||||
use image::io::Reader as ImgReader;
|
||||
use image::{imageops::FilterType, EncodableLayout};
|
||||
use image::{ImageFormat, ImageOutputFormat};
|
||||
use libs::image::DynamicImage;
|
||||
use libs::{image, once_cell, rayon, regex, svg_metadata, webp};
|
||||
use once_cell::sync::Lazy;
|
||||
use rayon::prelude::*;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use svg_metadata::Metadata as SvgMetadata;
|
||||
|
||||
use config::Config;
|
||||
use errors::{anyhow, Context, Error, Result};
|
||||
use utils::fs as ufs;
|
||||
|
||||
static RESIZED_SUBDIR: &str = "processed_images";
|
||||
const DEFAULT_Q_JPG: u8 = 75;
|
||||
|
||||
static RESIZED_FILENAME: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.](jpg|png|webp)"#).unwrap());
|
||||
|
||||
/// Size and format read cheaply with `image`'s `Reader`.
|
||||
#[derive(Debug)]
|
||||
struct ImageMeta {
|
||||
size: (u32, u32),
|
||||
format: Option<ImageFormat>,
|
||||
}
|
||||
|
||||
impl ImageMeta {
|
||||
fn read(path: &Path) -> ImageResult<Self> {
|
||||
let reader = ImgReader::open(path).and_then(ImgReader::with_guessed_format)?;
|
||||
let format = reader.format();
|
||||
let size = reader.into_dimensions()?;
|
||||
|
||||
Ok(Self { size, format })
|
||||
}
|
||||
|
||||
fn is_lossy(&self) -> bool {
|
||||
use ImageFormat::*;
|
||||
|
||||
// We assume lossy by default / if unknown format
|
||||
let format = self.format.unwrap_or(Jpeg);
|
||||
!matches!(format, Png | Pnm | Tiff | Tga | Bmp | Ico | Hdr | Farbfeld)
|
||||
}
|
||||
}
|
||||
|
||||
/// De-serialized & sanitized arguments of `resize_image`
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ResizeArgs {
|
||||
/// A simple scale operation that doesn't take aspect ratio into account
|
||||
Scale(u32, u32),
|
||||
/// Scales the image to a specified width with height computed such
|
||||
/// that aspect ratio is preserved
|
||||
FitWidth(u32),
|
||||
/// Scales the image to a specified height with width computed such
|
||||
/// that aspect ratio is preserved
|
||||
FitHeight(u32),
|
||||
/// If the image is larger than the specified width or height, scales the image such
|
||||
/// that it fits within the specified width and height preserving aspect ratio.
|
||||
/// Either dimension may end up being smaller, but never larger than specified.
|
||||
Fit(u32, u32),
|
||||
/// Scales the image such that it fills the specified width and height.
|
||||
/// Output will always have the exact dimensions specified.
|
||||
/// The part of the image that doesn't fit in the thumbnail due to differing
|
||||
/// aspect ratio will be cropped away, if any.
|
||||
Fill(u32, u32),
|
||||
}
|
||||
|
||||
impl ResizeArgs {
|
||||
pub fn from_args(op: &str, width: Option<u32>, height: Option<u32>) -> Result<Self> {
|
||||
use ResizeArgs::*;
|
||||
|
||||
// Validate args:
|
||||
match op {
|
||||
"fit_width" => {
|
||||
if width.is_none() {
|
||||
return Err(anyhow!("op=\"fit_width\" requires a `width` argument"));
|
||||
}
|
||||
}
|
||||
"fit_height" => {
|
||||
if height.is_none() {
|
||||
return Err(anyhow!("op=\"fit_height\" requires a `height` argument"));
|
||||
}
|
||||
}
|
||||
"scale" | "fit" | "fill" => {
|
||||
if width.is_none() || height.is_none() {
|
||||
return Err(anyhow!("op={} requires a `width` and `height` argument", op));
|
||||
}
|
||||
}
|
||||
_ => return Err(anyhow!("Invalid image resize operation: {}", op)),
|
||||
};
|
||||
|
||||
Ok(match op {
|
||||
"scale" => Scale(width.unwrap(), height.unwrap()),
|
||||
"fit_width" => FitWidth(width.unwrap()),
|
||||
"fit_height" => FitHeight(height.unwrap()),
|
||||
"fit" => Fit(width.unwrap(), height.unwrap()),
|
||||
"fill" => Fill(width.unwrap(), height.unwrap()),
|
||||
_ => unreachable!(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Contains image crop/resize instructions for use by `Processor`
|
||||
///
|
||||
/// The `Processor` applies `crop` first, if any, and then `resize`, if any.
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Default, Debug)]
|
||||
struct ResizeOp {
|
||||
crop: Option<(u32, u32, u32, u32)>, // x, y, w, h
|
||||
resize: Option<(u32, u32)>, // w, h
|
||||
}
|
||||
|
||||
impl ResizeOp {
|
||||
fn new(args: ResizeArgs, (orig_w, orig_h): (u32, u32)) -> Self {
|
||||
use ResizeArgs::*;
|
||||
|
||||
let res = ResizeOp::default();
|
||||
|
||||
match args {
|
||||
Scale(w, h) => res.resize((w, h)),
|
||||
FitWidth(w) => {
|
||||
let h = (orig_h as u64 * w as u64) / orig_w as u64;
|
||||
res.resize((w, h as u32))
|
||||
}
|
||||
FitHeight(h) => {
|
||||
let w = (orig_w as u64 * h as u64) / orig_h as u64;
|
||||
res.resize((w as u32, h))
|
||||
}
|
||||
Fit(w, h) => {
|
||||
if orig_w <= w && orig_h <= h {
|
||||
return res; // ie. no-op
|
||||
}
|
||||
|
||||
let orig_w_h = orig_w as u64 * h as u64;
|
||||
let orig_h_w = orig_h as u64 * w as u64;
|
||||
|
||||
if orig_w_h > orig_h_w {
|
||||
Self::new(FitWidth(w), (orig_w, orig_h))
|
||||
} else {
|
||||
Self::new(FitHeight(h), (orig_w, orig_h))
|
||||
}
|
||||
}
|
||||
Fill(w, h) => {
|
||||
const RATIO_EPSILLION: f32 = 0.1;
|
||||
|
||||
let factor_w = orig_w as f32 / w as f32;
|
||||
let factor_h = orig_h as f32 / h as f32;
|
||||
|
||||
if (factor_w - factor_h).abs() <= RATIO_EPSILLION {
|
||||
// If the horizontal and vertical factor is very similar,
|
||||
// that means the aspect is similar enough that there's not much point
|
||||
// in cropping, so just perform a simple scale in this case.
|
||||
res.resize((w, h))
|
||||
} else {
|
||||
// We perform the fill such that a crop is performed first
|
||||
// and then resize_exact can be used, which should be cheaper than
|
||||
// resizing and then cropping (smaller number of pixels to resize).
|
||||
let (crop_w, crop_h) = if factor_w < factor_h {
|
||||
(orig_w, (factor_w * h as f32).round() as u32)
|
||||
} else {
|
||||
((factor_h * w as f32).round() as u32, orig_h)
|
||||
};
|
||||
|
||||
let (offset_w, offset_h) = if factor_w < factor_h {
|
||||
(0, (orig_h - crop_h) / 2)
|
||||
} else {
|
||||
((orig_w - crop_w) / 2, 0)
|
||||
};
|
||||
|
||||
res.crop((offset_w, offset_h, crop_w, crop_h)).resize((w, h))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn crop(mut self, crop: (u32, u32, u32, u32)) -> Self {
|
||||
self.crop = Some(crop);
|
||||
self
|
||||
}
|
||||
|
||||
fn resize(mut self, size: (u32, u32)) -> Self {
|
||||
self.resize = Some(size);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Thumbnail image format
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Format {
|
||||
/// JPEG, The `u8` argument is JPEG quality (in percent).
|
||||
Jpeg(u8),
|
||||
/// PNG
|
||||
Png,
|
||||
/// WebP, The `u8` argument is WebP quality (in percent), None meaning lossless.
|
||||
WebP(Option<u8>),
|
||||
}
|
||||
|
||||
impl Format {
|
||||
fn from_args(meta: &ImageMeta, format: &str, quality: Option<u8>) -> Result<Format> {
|
||||
use Format::*;
|
||||
if let Some(quality) = quality {
|
||||
assert!(quality > 0 && quality <= 100, "Quality must be within the range [1; 100]");
|
||||
}
|
||||
let jpg_quality = quality.unwrap_or(DEFAULT_Q_JPG);
|
||||
match format {
|
||||
"auto" => {
|
||||
if meta.is_lossy() {
|
||||
Ok(Jpeg(jpg_quality))
|
||||
} else {
|
||||
Ok(Png)
|
||||
}
|
||||
}
|
||||
"jpeg" | "jpg" => Ok(Jpeg(jpg_quality)),
|
||||
"png" => Ok(Png),
|
||||
"webp" => Ok(WebP(quality)),
|
||||
_ => Err(anyhow!("Invalid image format: {}", format)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Looks at file's extension and, if it's a supported image format, returns whether the format is lossless
|
||||
pub fn is_lossy<P: AsRef<Path>>(p: P) -> Option<bool> {
|
||||
p.as_ref()
|
||||
.extension()
|
||||
.and_then(std::ffi::OsStr::to_str)
|
||||
.map(|ext| match ext.to_lowercase().as_str() {
|
||||
"jpg" | "jpeg" => Some(true),
|
||||
"png" => Some(false),
|
||||
"gif" => Some(false),
|
||||
"bmp" => Some(false),
|
||||
// It is assumed that webp is lossy, but it can be both
|
||||
"webp" => Some(true),
|
||||
_ => None,
|
||||
})
|
||||
.unwrap_or(None)
|
||||
}
|
||||
|
||||
fn extension(&self) -> &str {
|
||||
// Kept in sync with RESIZED_FILENAME and op_filename
|
||||
use Format::*;
|
||||
|
||||
match *self {
|
||||
Png => "png",
|
||||
Jpeg(_) => "jpg",
|
||||
WebP(_) => "webp",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::derive_hash_xor_eq)]
|
||||
impl Hash for Format {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
use Format::*;
|
||||
|
||||
let q = match *self {
|
||||
Png => 0,
|
||||
Jpeg(q) => q,
|
||||
WebP(None) => 0,
|
||||
WebP(Some(q)) => q,
|
||||
};
|
||||
|
||||
hasher.write_u8(q);
|
||||
hasher.write(self.extension().as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
/// Holds all data needed to perform a resize operation
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ImageOp {
|
||||
/// This is the source input path string as passed in the template, we need this to compute the hash.
|
||||
/// Hashing the resolved `input_path` would include the absolute path to the image
|
||||
/// with all filesystem components.
|
||||
input_src: String,
|
||||
input_path: PathBuf,
|
||||
op: ResizeOp,
|
||||
format: Format,
|
||||
/// Hash of the above parameters
|
||||
hash: u64,
|
||||
/// If there is a hash collision with another ImageOp, this contains a sequential ID > 1
|
||||
/// identifying the collision in the order as encountered (which is essentially random).
|
||||
/// Therefore, ImageOps with collisions (ie. collision_id > 0) are always considered out of date.
|
||||
/// Note that this is very unlikely to happen in practice
|
||||
collision_id: u32,
|
||||
}
|
||||
|
||||
impl ImageOp {
|
||||
const RESIZE_FILTER: FilterType = FilterType::Lanczos3;
|
||||
|
||||
fn new(input_src: String, input_path: PathBuf, op: ResizeOp, format: Format) -> ImageOp {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
hasher.write(input_src.as_ref());
|
||||
op.hash(&mut hasher);
|
||||
format.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
ImageOp { input_src, input_path, op, format, hash, collision_id: 0 }
|
||||
}
|
||||
|
||||
fn perform(&self, target_path: &Path) -> Result<()> {
|
||||
if !ufs::file_stale(&self.input_path, target_path) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut img = image::open(&self.input_path)?;
|
||||
|
||||
let img = match self.op.crop {
|
||||
Some((x, y, w, h)) => img.crop(x, y, w, h),
|
||||
None => img,
|
||||
};
|
||||
let img = match self.op.resize {
|
||||
Some((w, h)) => img.resize_exact(w, h, Self::RESIZE_FILTER),
|
||||
None => img,
|
||||
};
|
||||
|
||||
let img = fix_orientation(&img, &self.input_path).unwrap_or(img);
|
||||
|
||||
let f = File::create(target_path)?;
|
||||
let mut buffered_f = BufWriter::new(f);
|
||||
|
||||
match self.format {
|
||||
Format::Png => {
|
||||
img.write_to(&mut buffered_f, ImageOutputFormat::Png)?;
|
||||
}
|
||||
Format::Jpeg(q) => {
|
||||
img.write_to(&mut buffered_f, ImageOutputFormat::Jpeg(q))?;
|
||||
}
|
||||
Format::WebP(q) => {
|
||||
let encoder = webp::Encoder::from_image(&img)
|
||||
.map_err(|_| anyhow!("Unable to load this kind of image with webp"))?;
|
||||
let memory = match q {
|
||||
Some(q) => encoder.encode(q as f32),
|
||||
None => encoder.encode_lossless(),
|
||||
};
|
||||
buffered_f.write_all(memory.as_bytes())?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply image rotation based on EXIF data
|
||||
/// Returns `None` if no transformation is needed
|
||||
pub fn fix_orientation(img: &DynamicImage, path: &Path) -> Option<DynamicImage> {
|
||||
let file = std::fs::File::open(path).ok()?;
|
||||
let mut buf_reader = std::io::BufReader::new(&file);
|
||||
let exif_reader = exif::Reader::new();
|
||||
let exif = exif_reader.read_from_container(&mut buf_reader).ok()?;
|
||||
let orientation =
|
||||
exif.get_field(exif::Tag::Orientation, exif::In::PRIMARY)?.value.get_uint(0)?;
|
||||
match orientation {
|
||||
// Values are taken from the page 30 of
|
||||
// https://www.cipa.jp/std/documents/e/DC-008-2012_E.pdf
|
||||
// For more details check http://sylvana.net/jpegcrop/exif_orientation.html
|
||||
1 => None,
|
||||
2 => Some(img.fliph()),
|
||||
3 => Some(img.rotate180()),
|
||||
4 => Some(img.flipv()),
|
||||
5 => Some(img.fliph().rotate270()),
|
||||
6 => Some(img.rotate90()),
|
||||
7 => Some(img.fliph().rotate90()),
|
||||
8 => Some(img.rotate270()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct EnqueueResponse {
|
||||
/// The final URL for that asset
|
||||
pub url: String,
|
||||
/// The path to the static asset generated
|
||||
pub static_path: String,
|
||||
/// New image width
|
||||
pub width: u32,
|
||||
/// New image height
|
||||
pub height: u32,
|
||||
/// Original image width
|
||||
pub orig_width: u32,
|
||||
/// Original image height
|
||||
pub orig_height: u32,
|
||||
}
|
||||
|
||||
impl EnqueueResponse {
|
||||
fn new(url: String, static_path: PathBuf, meta: &ImageMeta, op: &ResizeOp) -> Self {
|
||||
let static_path = static_path.to_string_lossy().into_owned();
|
||||
let (width, height) = op.resize.unwrap_or(meta.size);
|
||||
let (orig_width, orig_height) = meta.size;
|
||||
|
||||
Self { url, static_path, width, height, orig_width, orig_height }
|
||||
}
|
||||
}
|
||||
|
||||
/// A struct into which image operations can be enqueued and then performed.
|
||||
/// All output is written in a subdirectory in `static_path`,
|
||||
/// taking care of file stale status based on timestamps and possible hash collisions.
|
||||
#[derive(Debug)]
|
||||
pub struct Processor {
|
||||
base_url: String,
|
||||
output_dir: PathBuf,
|
||||
/// A map of a ImageOps by their stored hash.
|
||||
/// Note that this cannot be a HashSet, because hashset handles collisions and we don't want that,
|
||||
/// we need to be aware of and handle collisions ourselves.
|
||||
img_ops: HashMap<u64, ImageOp>,
|
||||
/// Hash collisions go here:
|
||||
img_ops_collisions: Vec<ImageOp>,
|
||||
}
|
||||
|
||||
impl Processor {
|
||||
pub fn new(base_path: PathBuf, config: &Config) -> Processor {
|
||||
Processor {
|
||||
output_dir: base_path.join("static").join(RESIZED_SUBDIR),
|
||||
base_url: config.make_permalink(RESIZED_SUBDIR),
|
||||
img_ops: HashMap::new(),
|
||||
img_ops_collisions: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_base_url(&mut self, config: &Config) {
|
||||
self.base_url = config.make_permalink(RESIZED_SUBDIR);
|
||||
}
|
||||
|
||||
pub fn num_img_ops(&self) -> usize {
|
||||
self.img_ops.len() + self.img_ops_collisions.len()
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn enqueue(
|
||||
&mut self,
|
||||
input_src: String,
|
||||
input_path: PathBuf,
|
||||
op: &str,
|
||||
width: Option<u32>,
|
||||
height: Option<u32>,
|
||||
format: &str,
|
||||
quality: Option<u8>,
|
||||
) -> Result<EnqueueResponse> {
|
||||
let meta = ImageMeta::read(&input_path)
|
||||
.with_context(|| format!("Failed to read image: {}", input_path.display()))?;
|
||||
|
||||
let args = ResizeArgs::from_args(op, width, height)?;
|
||||
let op = ResizeOp::new(args, meta.size);
|
||||
let format = Format::from_args(&meta, format, quality)?;
|
||||
let img_op = ImageOp::new(input_src, input_path, op.clone(), format);
|
||||
let (static_path, url) = self.insert(img_op);
|
||||
|
||||
Ok(EnqueueResponse::new(url, static_path, &meta, &op))
|
||||
}
|
||||
|
||||
fn insert_with_collisions(&mut self, mut img_op: ImageOp) -> u32 {
|
||||
match self.img_ops.entry(img_op.hash) {
|
||||
HEntry::Occupied(entry) => {
|
||||
if *entry.get() == img_op {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
HEntry::Vacant(entry) => {
|
||||
entry.insert(img_op);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// If we get here, that means a hash collision.
|
||||
// This is detected when there is an ImageOp with the same hash in the `img_ops`
|
||||
// map but which is not equal to this one.
|
||||
// To deal with this, all collisions get a (random) sequential ID number.
|
||||
|
||||
// First try to look up this ImageOp in `img_ops_collisions`, maybe we've
|
||||
// already seen the same ImageOp.
|
||||
// At the same time, count IDs to figure out the next free one.
|
||||
// Start with the ID of 2, because we'll need to use 1 for the ImageOp
|
||||
// already present in the map:
|
||||
let mut collision_id = 2;
|
||||
for op in self.img_ops_collisions.iter().filter(|op| op.hash == img_op.hash) {
|
||||
if *op == img_op {
|
||||
// This is a colliding ImageOp, but we've already seen an equal one
|
||||
// (not just by hash, but by content too), so just return its ID:
|
||||
return collision_id;
|
||||
} else {
|
||||
collision_id += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// If we get here, that means this is a new colliding ImageOp and
|
||||
// `collision_id` is the next free ID
|
||||
if collision_id == 2 {
|
||||
// This is the first collision found with this hash, update the ID
|
||||
// of the matching ImageOp in the map.
|
||||
self.img_ops.get_mut(&img_op.hash).unwrap().collision_id = 1;
|
||||
}
|
||||
img_op.collision_id = collision_id;
|
||||
self.img_ops_collisions.push(img_op);
|
||||
collision_id
|
||||
}
|
||||
|
||||
fn op_filename(hash: u64, collision_id: u32, format: Format) -> String {
|
||||
// Please keep this in sync with RESIZED_FILENAME
|
||||
assert!(collision_id < 256, "Unexpectedly large number of collisions: {}", collision_id);
|
||||
format!("{:016x}{:02x}.{}", hash, collision_id, format.extension())
|
||||
}
|
||||
|
||||
/// Adds the given operation to the queue but do not process it immediately.
|
||||
/// Returns (path in static folder, final URL).
|
||||
fn insert(&mut self, img_op: ImageOp) -> (PathBuf, String) {
|
||||
let hash = img_op.hash;
|
||||
let format = img_op.format;
|
||||
let collision_id = self.insert_with_collisions(img_op);
|
||||
let filename = Self::op_filename(hash, collision_id, format);
|
||||
let url = format!("{}{}", self.base_url, filename);
|
||||
(Path::new("static").join(RESIZED_SUBDIR).join(filename), url)
|
||||
}
|
||||
|
||||
/// Remove stale processed images in the output directory
|
||||
pub fn prune(&self) -> Result<()> {
|
||||
// Do not create folders if they don't exist
|
||||
if !self.output_dir.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
ufs::ensure_directory_exists(&self.output_dir)?;
|
||||
let entries = fs::read_dir(&self.output_dir)?;
|
||||
for entry in entries {
|
||||
let entry_path = entry?.path();
|
||||
if entry_path.is_file() {
|
||||
let filename = entry_path.file_name().unwrap().to_string_lossy();
|
||||
if let Some(capts) = RESIZED_FILENAME.captures(filename.as_ref()) {
|
||||
let hash = u64::from_str_radix(capts.get(1).unwrap().as_str(), 16).unwrap();
|
||||
let collision_id =
|
||||
u32::from_str_radix(capts.get(2).unwrap().as_str(), 16).unwrap();
|
||||
|
||||
if collision_id > 0 || !self.img_ops.contains_key(&hash) {
|
||||
fs::remove_file(&entry_path)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Run the enqueued image operations
|
||||
pub fn do_process(&mut self) -> Result<()> {
|
||||
if !self.img_ops.is_empty() {
|
||||
ufs::ensure_directory_exists(&self.output_dir)?;
|
||||
}
|
||||
|
||||
self.img_ops
|
||||
.par_iter()
|
||||
.map(|(hash, op)| {
|
||||
let target =
|
||||
self.output_dir.join(Self::op_filename(*hash, op.collision_id, op.format));
|
||||
|
||||
op.perform(&target).with_context(|| {
|
||||
format!("Failed to process image: {}", op.input_path.display())
|
||||
})
|
||||
})
|
||||
.collect::<Result<()>>()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Eq, PartialEq)]
|
||||
pub struct ImageMetaResponse {
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub format: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl ImageMetaResponse {
|
||||
pub fn new_svg(width: u32, height: u32) -> Self {
|
||||
Self { width, height, format: Some("svg") }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ImageMeta> for ImageMetaResponse {
|
||||
fn from(im: ImageMeta) -> Self {
|
||||
Self {
|
||||
width: im.size.0,
|
||||
height: im.size.1,
|
||||
format: im.format.and_then(|f| f.extensions_str().first()).copied(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<webp::WebPImage> for ImageMetaResponse {
|
||||
fn from(img: webp::WebPImage) -> Self {
|
||||
Self { width: img.width(), height: img.height(), format: Some("webp") }
|
||||
}
|
||||
}
|
||||
|
||||
/// Read image dimensions (cheaply), used in `get_image_metadata()`, supports SVG
|
||||
pub fn read_image_metadata<P: AsRef<Path>>(path: P) -> Result<ImageMetaResponse> {
|
||||
let path = path.as_ref();
|
||||
let ext = path.extension().and_then(OsStr::to_str).unwrap_or("").to_lowercase();
|
||||
|
||||
let err_context = || format!("Failed to read image: {}", path.display());
|
||||
|
||||
match ext.as_str() {
|
||||
"svg" => {
|
||||
let img = SvgMetadata::parse_file(path).with_context(err_context)?;
|
||||
match (img.height(), img.width(), img.view_box()) {
|
||||
(Some(h), Some(w), _) => Ok((h, w)),
|
||||
(_, _, Some(view_box)) => Ok((view_box.height, view_box.width)),
|
||||
_ => Err(anyhow!("Invalid dimensions: SVG width/height and viewbox not set.")),
|
||||
}
|
||||
//this is not a typo, this returns the correct values for width and height.
|
||||
.map(|(h, w)| ImageMetaResponse::new_svg(w as u32, h as u32))
|
||||
}
|
||||
"webp" => {
|
||||
// Unfortunately we have to load the entire image here, unlike with the others :|
|
||||
let data = fs::read(path).with_context(err_context)?;
|
||||
let decoder = webp::Decoder::new(&data[..]);
|
||||
decoder.decode().map(ImageMetaResponse::from).ok_or_else(|| {
|
||||
Error::msg(format!("Failed to decode WebP image: {}", path.display()))
|
||||
})
|
||||
}
|
||||
_ => ImageMeta::read(path).map(ImageMetaResponse::from).with_context(err_context),
|
||||
}
|
||||
}
|
||||
|
||||
/// Assert that `address` matches `prefix` + RESIZED_FILENAME regex + "." + `extension`,
|
||||
/// this is useful in test so that we don't need to hardcode hash, which is annoying.
|
||||
pub fn assert_processed_path_matches(path: &str, prefix: &str, extension: &str) {
|
||||
let filename = path
|
||||
.strip_prefix(prefix)
|
||||
.unwrap_or_else(|| panic!("Path `{}` doesn't start with `{}`", path, prefix));
|
||||
|
||||
let suffix = format!(".{}", extension);
|
||||
assert!(filename.ends_with(&suffix), "Path `{}` doesn't end with `{}`", path, suffix);
|
||||
|
||||
assert!(
|
||||
RESIZED_FILENAME.is_match_at(filename, 0),
|
||||
"In path `{}`, file stem `{}` doesn't match the RESIZED_FILENAME regex",
|
||||
path,
|
||||
filename
|
||||
);
|
||||
}
|
||||
mod format;
|
||||
mod helpers;
|
||||
mod meta;
|
||||
mod ops;
|
||||
mod processor;
|
||||
|
||||
pub use helpers::fix_orientation;
|
||||
pub use meta::{read_image_metadata, ImageMeta, ImageMetaResponse};
|
||||
pub use ops::{ResizeInstructions, ResizeOperation};
|
||||
pub use processor::{EnqueueResponse, Processor, RESIZED_SUBDIR};
|
||||
|
|
89
components/imageproc/src/meta.rs
Normal file
|
@ -0,0 +1,89 @@
|
|||
use errors::{anyhow, Context, Result};
|
||||
use libs::image::io::Reader as ImgReader;
|
||||
use libs::image::{ImageFormat, ImageResult};
|
||||
use libs::svg_metadata::Metadata as SvgMetadata;
|
||||
use serde::Serialize;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::Path;
|
||||
|
||||
/// Size and format read cheaply with `image`'s `Reader`.
|
||||
#[derive(Debug)]
|
||||
pub struct ImageMeta {
|
||||
/// (w, h)
|
||||
pub size: (u32, u32),
|
||||
pub format: Option<ImageFormat>,
|
||||
}
|
||||
|
||||
impl ImageMeta {
|
||||
pub fn read(path: &Path) -> ImageResult<Self> {
|
||||
let reader = ImgReader::open(path).and_then(ImgReader::with_guessed_format)?;
|
||||
let format = reader.format();
|
||||
let size = reader.into_dimensions()?;
|
||||
|
||||
Ok(Self { size, format })
|
||||
}
|
||||
|
||||
pub fn is_lossy(&self) -> bool {
|
||||
use ImageFormat::*;
|
||||
|
||||
// We assume lossy by default / if unknown format
|
||||
let format = self.format.unwrap_or(Jpeg);
|
||||
!matches!(format, Png | Pnm | Tiff | Tga | Bmp | Ico | Hdr | Farbfeld)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Eq, PartialEq)]
|
||||
pub struct ImageMetaResponse {
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub format: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl ImageMetaResponse {
|
||||
pub fn new_svg(width: u32, height: u32) -> Self {
|
||||
Self { width, height, format: Some("svg") }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ImageMeta> for ImageMetaResponse {
|
||||
fn from(im: ImageMeta) -> Self {
|
||||
Self {
|
||||
width: im.size.0,
|
||||
height: im.size.1,
|
||||
format: im.format.and_then(|f| f.extensions_str().first()).copied(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Read image dimensions (cheaply), used in `get_image_metadata()`, supports SVG
|
||||
pub fn read_image_metadata<P: AsRef<Path>>(path: P) -> Result<ImageMetaResponse> {
|
||||
let path = path.as_ref();
|
||||
let ext = path.extension().and_then(OsStr::to_str).unwrap_or("").to_lowercase();
|
||||
|
||||
let err_context = || format!("Failed to read image: {}", path.display());
|
||||
|
||||
match ext.as_str() {
|
||||
"svg" => {
|
||||
let img = SvgMetadata::parse_file(path).with_context(err_context)?;
|
||||
match (img.height(), img.width(), img.view_box()) {
|
||||
(Some(h), Some(w), _) => Ok((h, w)),
|
||||
(_, _, Some(view_box)) => Ok((view_box.height, view_box.width)),
|
||||
_ => Err(anyhow!("Invalid dimensions: SVG width/height and viewbox not set.")),
|
||||
}
|
||||
// this is not a typo, this returns the correct values for width and height.
|
||||
.map(|(h, w)| ImageMetaResponse::new_svg(w as u32, h as u32))
|
||||
}
|
||||
_ => ImageMeta::read(path).map(ImageMetaResponse::from).with_context(err_context),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn can_get_webp_metadata() {
|
||||
let filepath = Path::new("tests/test_imgs/webp.webp");
|
||||
assert!(filepath.exists());
|
||||
}
|
||||
}
|
141
components/imageproc/src/ops.rs
Normal file
|
@ -0,0 +1,141 @@
|
|||
use errors::{anyhow, Result};
|
||||
|
||||
/// De-serialized & sanitized arguments of `resize_image`
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ResizeOperation {
|
||||
/// A simple scale operation that doesn't take aspect ratio into account
|
||||
Scale(u32, u32),
|
||||
/// Scales the image to a specified width with height computed such
|
||||
/// that aspect ratio is preserved
|
||||
FitWidth(u32),
|
||||
/// Scales the image to a specified height with width computed such
|
||||
/// that aspect ratio is preserved
|
||||
FitHeight(u32),
|
||||
/// If the image is larger than the specified width or height, scales the image such
|
||||
/// that it fits within the specified width and height preserving aspect ratio.
|
||||
/// Either dimension may end up being smaller, but never larger than specified.
|
||||
Fit(u32, u32),
|
||||
/// Scales the image such that it fills the specified width and height.
|
||||
/// Output will always have the exact dimensions specified.
|
||||
/// The part of the image that doesn't fit in the thumbnail due to differing
|
||||
/// aspect ratio will be cropped away, if any.
|
||||
Fill(u32, u32),
|
||||
}
|
||||
|
||||
impl ResizeOperation {
|
||||
pub fn from_args(op: &str, width: Option<u32>, height: Option<u32>) -> Result<Self> {
|
||||
use ResizeOperation::*;
|
||||
|
||||
// Validate args:
|
||||
match op {
|
||||
"fit_width" => {
|
||||
if width.is_none() {
|
||||
return Err(anyhow!("op=\"fit_width\" requires a `width` argument"));
|
||||
}
|
||||
}
|
||||
"fit_height" => {
|
||||
if height.is_none() {
|
||||
return Err(anyhow!("op=\"fit_height\" requires a `height` argument"));
|
||||
}
|
||||
}
|
||||
"scale" | "fit" | "fill" => {
|
||||
if width.is_none() || height.is_none() {
|
||||
return Err(anyhow!("op={} requires a `width` and `height` argument", op));
|
||||
}
|
||||
}
|
||||
_ => return Err(anyhow!("Invalid image resize operation: {}", op)),
|
||||
};
|
||||
|
||||
Ok(match op {
|
||||
"scale" => Scale(width.unwrap(), height.unwrap()),
|
||||
"fit_width" => FitWidth(width.unwrap()),
|
||||
"fit_height" => FitHeight(height.unwrap()),
|
||||
"fit" => Fit(width.unwrap(), height.unwrap()),
|
||||
"fill" => Fill(width.unwrap(), height.unwrap()),
|
||||
_ => unreachable!(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Contains image crop/resize instructions for use by `Processor`
|
||||
///
|
||||
/// The `Processor` applies `crop` first, if any, and then `resize`, if any.
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Default, Debug)]
|
||||
pub struct ResizeInstructions {
|
||||
pub crop_instruction: Option<(u32, u32, u32, u32)>, // x, y, w, h
|
||||
pub resize_instruction: Option<(u32, u32)>, // w, h
|
||||
}
|
||||
|
||||
impl ResizeInstructions {
|
||||
pub fn new(args: ResizeOperation, (orig_w, orig_h): (u32, u32)) -> Self {
|
||||
use ResizeOperation::*;
|
||||
|
||||
let res = ResizeInstructions::default();
|
||||
|
||||
match args {
|
||||
Scale(w, h) => res.resize((w, h)),
|
||||
FitWidth(w) => {
|
||||
let h = (orig_h as u64 * w as u64) / orig_w as u64;
|
||||
res.resize((w, h as u32))
|
||||
}
|
||||
FitHeight(h) => {
|
||||
let w = (orig_w as u64 * h as u64) / orig_h as u64;
|
||||
res.resize((w as u32, h))
|
||||
}
|
||||
Fit(w, h) => {
|
||||
if orig_w <= w && orig_h <= h {
|
||||
return res; // ie. no-op
|
||||
}
|
||||
|
||||
let orig_w_h = orig_w as u64 * h as u64;
|
||||
let orig_h_w = orig_h as u64 * w as u64;
|
||||
|
||||
if orig_w_h > orig_h_w {
|
||||
Self::new(FitWidth(w), (orig_w, orig_h))
|
||||
} else {
|
||||
Self::new(FitHeight(h), (orig_w, orig_h))
|
||||
}
|
||||
}
|
||||
Fill(w, h) => {
|
||||
const RATIO_EPSILLION: f32 = 0.1;
|
||||
|
||||
let factor_w = orig_w as f32 / w as f32;
|
||||
let factor_h = orig_h as f32 / h as f32;
|
||||
|
||||
if (factor_w - factor_h).abs() <= RATIO_EPSILLION {
|
||||
// If the horizontal and vertical factor is very similar,
|
||||
// that means the aspect is similar enough that there's not much point
|
||||
// in cropping, so just perform a simple scale in this case.
|
||||
res.resize((w, h))
|
||||
} else {
|
||||
// We perform the fill such that a crop is performed first
|
||||
// and then resize_exact can be used, which should be cheaper than
|
||||
// resizing and then cropping (smaller number of pixels to resize).
|
||||
let (crop_w, crop_h) = if factor_w < factor_h {
|
||||
(orig_w, (factor_w * h as f32).round() as u32)
|
||||
} else {
|
||||
((factor_h * w as f32).round() as u32, orig_h)
|
||||
};
|
||||
|
||||
let (offset_w, offset_h) = if factor_w < factor_h {
|
||||
(0, (orig_h - crop_h) / 2)
|
||||
} else {
|
||||
((orig_w - crop_w) / 2, 0)
|
||||
};
|
||||
|
||||
res.crop((offset_w, offset_h, crop_w, crop_h)).resize((w, h))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn crop(mut self, crop: (u32, u32, u32, u32)) -> Self {
|
||||
self.crop_instruction = Some(crop);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn resize(mut self, size: (u32, u32)) -> Self {
|
||||
self.resize_instruction = Some(size);
|
||||
self
|
||||
}
|
||||
}
|
218
components/imageproc/src/processor.rs
Normal file
|
@ -0,0 +1,218 @@
|
|||
use std::fs;
|
||||
use std::fs::File;
|
||||
use std::io::{BufWriter, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use config::Config;
|
||||
use errors::{anyhow, Context, Result};
|
||||
use libs::ahash::{HashMap, HashSet};
|
||||
use libs::image::imageops::FilterType;
|
||||
use libs::image::{EncodableLayout, ImageOutputFormat};
|
||||
use libs::rayon::prelude::*;
|
||||
use libs::{image, webp};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use utils::fs as ufs;
|
||||
|
||||
use crate::format::Format;
|
||||
use crate::helpers::get_processed_filename;
|
||||
use crate::{fix_orientation, ImageMeta, ResizeInstructions, ResizeOperation};
|
||||
|
||||
pub static RESIZED_SUBDIR: &str = "processed_images";
|
||||
|
||||
/// Holds all data needed to perform a resize operation
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub struct ImageOp {
|
||||
input_path: PathBuf,
|
||||
output_path: PathBuf,
|
||||
instr: ResizeInstructions,
|
||||
format: Format,
|
||||
/// Whether we actually want to perform that op.
|
||||
/// In practice we set it to true if the output file already
|
||||
/// exists and is not stale. We do need to keep the ImageOp around for pruning though.
|
||||
ignore: bool,
|
||||
}
|
||||
|
||||
impl ImageOp {
|
||||
fn perform(&self) -> Result<()> {
|
||||
if self.ignore {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut img = image::open(&self.input_path)?;
|
||||
|
||||
let img = match self.instr.crop_instruction {
|
||||
Some((x, y, w, h)) => img.crop(x, y, w, h),
|
||||
None => img,
|
||||
};
|
||||
let img = match self.instr.resize_instruction {
|
||||
Some((w, h)) => img.resize_exact(w, h, FilterType::Lanczos3),
|
||||
None => img,
|
||||
};
|
||||
|
||||
let img = fix_orientation(&img, &self.input_path).unwrap_or(img);
|
||||
|
||||
let f = File::create(&self.output_path)?;
|
||||
let mut buffered_f = BufWriter::new(f);
|
||||
|
||||
match self.format {
|
||||
Format::Png => {
|
||||
img.write_to(&mut buffered_f, ImageOutputFormat::Png)?;
|
||||
}
|
||||
Format::Jpeg(q) => {
|
||||
img.write_to(&mut buffered_f, ImageOutputFormat::Jpeg(q))?;
|
||||
}
|
||||
Format::WebP(q) => {
|
||||
let encoder = webp::Encoder::from_image(&img)
|
||||
.map_err(|_| anyhow!("Unable to load this kind of image with webp"))?;
|
||||
let memory = match q {
|
||||
Some(q) => encoder.encode(q as f32),
|
||||
None => encoder.encode_lossless(),
|
||||
};
|
||||
buffered_f.write_all(memory.as_bytes())?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct EnqueueResponse {
|
||||
/// The final URL for that asset
|
||||
pub url: String,
|
||||
/// The path to the static asset generated
|
||||
pub static_path: String,
|
||||
/// New image width
|
||||
pub width: u32,
|
||||
/// New image height
|
||||
pub height: u32,
|
||||
/// Original image width
|
||||
pub orig_width: u32,
|
||||
/// Original image height
|
||||
pub orig_height: u32,
|
||||
}
|
||||
|
||||
impl EnqueueResponse {
|
||||
fn new(
|
||||
url: String,
|
||||
static_path: PathBuf,
|
||||
meta: &ImageMeta,
|
||||
instr: &ResizeInstructions,
|
||||
) -> Self {
|
||||
let static_path = static_path.to_string_lossy().into_owned();
|
||||
let (width, height) = instr.resize_instruction.unwrap_or(meta.size);
|
||||
let (orig_width, orig_height) = meta.size;
|
||||
|
||||
Self { url, static_path, width, height, orig_width, orig_height }
|
||||
}
|
||||
}
|
||||
|
||||
/// A struct into which image operations can be enqueued and then performed.
|
||||
/// All output is written in a subdirectory in `static_path`,
|
||||
/// taking care of file stale status based on timestamps
|
||||
#[derive(Debug)]
|
||||
pub struct Processor {
|
||||
base_url: String,
|
||||
output_dir: PathBuf,
|
||||
img_ops: HashSet<ImageOp>,
|
||||
/// We want to make sure we only ever get metadata for an image once
|
||||
meta_cache: HashMap<PathBuf, ImageMeta>,
|
||||
}
|
||||
|
||||
impl Processor {
|
||||
pub fn new(base_path: PathBuf, config: &Config) -> Processor {
|
||||
Processor {
|
||||
output_dir: base_path.join("static").join(RESIZED_SUBDIR),
|
||||
base_url: config.make_permalink(RESIZED_SUBDIR),
|
||||
img_ops: HashSet::default(),
|
||||
meta_cache: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_base_url(&mut self, config: &Config) {
|
||||
self.base_url = config.make_permalink(RESIZED_SUBDIR);
|
||||
}
|
||||
|
||||
pub fn num_img_ops(&self) -> usize {
|
||||
self.img_ops.len()
|
||||
}
|
||||
|
||||
pub fn enqueue(
|
||||
&mut self,
|
||||
op: ResizeOperation,
|
||||
input_src: String,
|
||||
input_path: PathBuf,
|
||||
format: &str,
|
||||
quality: Option<u8>,
|
||||
) -> Result<EnqueueResponse> {
|
||||
// First we load metadata from the cache if possible, otherwise from the file itself
|
||||
if !self.meta_cache.contains_key(&input_path) {
|
||||
let meta = ImageMeta::read(&input_path)
|
||||
.with_context(|| format!("Failed to read image: {}", input_path.display()))?;
|
||||
self.meta_cache.insert(input_path.clone(), meta);
|
||||
}
|
||||
// We will have inserted it just above
|
||||
let meta = &self.meta_cache[&input_path];
|
||||
// We get the output format
|
||||
let format = Format::from_args(meta.is_lossy(), format, quality)?;
|
||||
// Now we have all the data we need to generate the output filename and the response
|
||||
let filename = get_processed_filename(&input_path, &input_src, &op, &format);
|
||||
let url = format!("{}{}", self.base_url, filename);
|
||||
let static_path = Path::new("static").join(RESIZED_SUBDIR).join(&filename);
|
||||
let output_path = self.output_dir.join(&filename);
|
||||
let instr = ResizeInstructions::new(op, meta.size);
|
||||
let enqueue_response = EnqueueResponse::new(url, static_path, meta, &instr);
|
||||
let img_op = ImageOp {
|
||||
ignore: output_path.exists() && !ufs::file_stale(&input_path, &output_path),
|
||||
input_path,
|
||||
output_path,
|
||||
instr,
|
||||
format,
|
||||
};
|
||||
self.img_ops.insert(img_op);
|
||||
|
||||
Ok(enqueue_response)
|
||||
}
|
||||
|
||||
/// Run the enqueued image operations
|
||||
pub fn do_process(&mut self) -> Result<()> {
|
||||
if !self.img_ops.is_empty() {
|
||||
ufs::ensure_directory_exists(&self.output_dir)?;
|
||||
}
|
||||
|
||||
self.img_ops
|
||||
.par_iter()
|
||||
.map(|op| {
|
||||
op.perform().with_context(|| {
|
||||
format!("Failed to process image: {}", op.input_path.display())
|
||||
})
|
||||
})
|
||||
.collect::<Result<()>>()
|
||||
}
|
||||
|
||||
/// Remove stale processed images in the output directory
|
||||
pub fn prune(&self) -> Result<()> {
|
||||
// Do not create folders if they don't exist
|
||||
if !self.output_dir.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
ufs::ensure_directory_exists(&self.output_dir)?;
|
||||
let output_paths: HashSet<_> = self
|
||||
.img_ops
|
||||
.iter()
|
||||
.map(|o| o.output_path.file_name().unwrap().to_string_lossy())
|
||||
.collect();
|
||||
|
||||
for entry in fs::read_dir(&self.output_dir)? {
|
||||
let entry_path = entry?.path();
|
||||
if entry_path.is_file() {
|
||||
let filename = entry_path.file_name().unwrap().to_string_lossy();
|
||||
if !output_paths.contains(&filename) {
|
||||
fs::remove_file(&entry_path)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -2,10 +2,20 @@ use std::env;
|
|||
use std::path::{PathBuf, MAIN_SEPARATOR as SLASH};
|
||||
|
||||
use config::Config;
|
||||
use imageproc::{assert_processed_path_matches, fix_orientation, ImageMetaResponse, Processor};
|
||||
use imageproc::{fix_orientation, ImageMetaResponse, Processor, ResizeOperation};
|
||||
use libs::image::{self, DynamicImage, GenericImageView, Pixel};
|
||||
use libs::once_cell::sync::Lazy;
|
||||
|
||||
/// Assert that `address` matches `prefix` + RESIZED_FILENAME regex + "." + `extension`,
|
||||
fn assert_processed_path_matches(path: &str, prefix: &str, extension: &str) {
|
||||
let filename = path
|
||||
.strip_prefix(prefix)
|
||||
.unwrap_or_else(|| panic!("Path `{}` doesn't start with `{}`", path, prefix));
|
||||
|
||||
let suffix = format!(".{}", extension);
|
||||
assert!(filename.ends_with(&suffix), "Path `{}` doesn't end with `{}`", path, suffix);
|
||||
}
|
||||
|
||||
static CONFIG: &str = r#"
|
||||
title = "imageproc integration tests"
|
||||
base_url = "https://example.com"
|
||||
|
@ -38,9 +48,9 @@ fn image_op_test(
|
|||
let tmpdir = tempfile::tempdir().unwrap().into_path();
|
||||
let config = Config::parse(CONFIG).unwrap();
|
||||
let mut proc = Processor::new(tmpdir.clone(), &config);
|
||||
let resize_op = ResizeOperation::from_args(op, width, height).unwrap();
|
||||
|
||||
let resp =
|
||||
proc.enqueue(source_img.into(), source_path, op, width, height, format, None).unwrap();
|
||||
let resp = proc.enqueue(resize_op, source_img.into(), source_path, format, None).unwrap();
|
||||
assert_processed_path_matches(&resp.url, "https://example.com/processed_images/", expect_ext);
|
||||
assert_processed_path_matches(&resp.static_path, PROCESSED_PREFIX.as_str(), expect_ext);
|
||||
assert_eq!(resp.width, expect_width);
|
||||
|
@ -202,10 +212,9 @@ fn resize_and_check(source_img: &str) -> bool {
|
|||
let tmpdir = tempfile::tempdir().unwrap().into_path();
|
||||
let config = Config::parse(CONFIG).unwrap();
|
||||
let mut proc = Processor::new(tmpdir.clone(), &config);
|
||||
let resize_op = ResizeOperation::from_args("scale", Some(16), Some(16)).unwrap();
|
||||
|
||||
let resp = proc
|
||||
.enqueue(source_img.into(), source_path, "scale", Some(16), Some(16), "jpg", None)
|
||||
.unwrap();
|
||||
let resp = proc.enqueue(resize_op, source_img.into(), source_path, "jpg", None).unwrap();
|
||||
|
||||
proc.do_process().unwrap();
|
||||
let processed_path = PathBuf::from(&resp.static_path);
|
||||
|
@ -224,5 +233,3 @@ fn check_img(img: DynamicImage) -> bool {
|
|||
// bottom right is white
|
||||
&& img.get_pixel(15, 15).channels() == [255, 255, 255, 255]
|
||||
}
|
||||
|
||||
// TODO: Test that hash remains the same if physical path is changed
|
||||
|
|
|
@ -60,7 +60,8 @@ impl TeraFn for ResizeImage {
|
|||
return Err("`resize_image`: `quality` must be in range 1-100".to_string().into());
|
||||
}
|
||||
}
|
||||
|
||||
let resize_op = imageproc::ResizeOperation::from_args(&op, width, height)
|
||||
.map_err(|e| format!("`resize_image`: {}", e))?;
|
||||
let mut imageproc = self.imageproc.lock().unwrap();
|
||||
let (file_path, unified_path) =
|
||||
match search_for_file(&self.base_path, &path, &self.theme, &self.output_path)
|
||||
|
@ -73,7 +74,7 @@ impl TeraFn for ResizeImage {
|
|||
};
|
||||
|
||||
let response = imageproc
|
||||
.enqueue(unified_path, file_path, &op, width, height, &format, quality)
|
||||
.enqueue(resize_op, unified_path, file_path, &format, quality)
|
||||
.map_err(|e| format!("`resize_image`: {}", e))?;
|
||||
|
||||
to_value(response).map_err(Into::into)
|
||||
|
@ -183,22 +184,22 @@ mod tests {
|
|||
args.insert("height".to_string(), to_value(40).unwrap());
|
||||
args.insert("width".to_string(), to_value(40).unwrap());
|
||||
|
||||
// hashing is stable based on filename and params so we can compare with hashes
|
||||
// hashing is stable based on filepath and params so we can compare with hashes
|
||||
|
||||
// 1. resizing an image in static
|
||||
args.insert("path".to_string(), to_value("static/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
let static_path = Path::new("static").join("processed_images");
|
||||
|
||||
// TODO: Use `assert_processed_path_matches()` from imageproc so that hashes don't need to be hardcoded
|
||||
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value(&format!("{}", static_path.join("6a89d6483cdc5f7700.jpg").display())).unwrap()
|
||||
to_value(&format!("{}", static_path.join("gutenberg.da10f4be4f1c441e.jpg").display()))
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/6a89d6483cdc5f7700.jpg").unwrap()
|
||||
to_value("http://a-website.com/processed_images/gutenberg.da10f4be4f1c441e.jpg")
|
||||
.unwrap()
|
||||
);
|
||||
|
||||
// 2. resizing an image in content with a relative path
|
||||
|
@ -206,11 +207,13 @@ mod tests {
|
|||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value(&format!("{}", static_path.join("202d9263f4dbc95900.jpg").display())).unwrap()
|
||||
to_value(&format!("{}", static_path.join("gutenberg.3301b37eed389d2e.jpg").display()))
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/202d9263f4dbc95900.jpg").unwrap()
|
||||
to_value("http://a-website.com/processed_images/gutenberg.3301b37eed389d2e.jpg")
|
||||
.unwrap()
|
||||
);
|
||||
|
||||
// 3. resizing with an absolute path is the same as the above
|
||||
|
@ -228,22 +231,26 @@ mod tests {
|
|||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value(&format!("{}", static_path.join("6296a3c153f701be00.jpg").display())).unwrap()
|
||||
to_value(&format!("{}", static_path.join("asset.d2fde9a750b68471.jpg").display()))
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/6296a3c153f701be00.jpg").unwrap()
|
||||
to_value("http://a-website.com/processed_images/asset.d2fde9a750b68471.jpg").unwrap()
|
||||
);
|
||||
|
||||
// 6. Looking up a file in the theme
|
||||
args.insert("path".to_string(), to_value("in-theme.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value(&format!("{}", static_path.join("6296a3c153f701be00.jpg").display())).unwrap()
|
||||
to_value(&format!("{}", static_path.join("in-theme.9b0d29e07d588b60.jpg").display()))
|
||||
.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/6296a3c153f701be00.jpg").unwrap()
|
||||
to_value("http://a-website.com/processed_images/in-theme.9b0d29e07d588b60.jpg")
|
||||
.unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -174,7 +174,6 @@ where
|
|||
path.as_ref().file_name().and_then(|s| s.to_str()).map(|s| s.starts_with('.')).unwrap_or(false)
|
||||
}
|
||||
|
||||
|
||||
/// Returns whether the path we received corresponds to a temp file created
|
||||
/// by an editor or the OS
|
||||
pub fn is_temp_file(path: &Path) -> bool {
|
||||
|
|
|
@ -164,10 +164,11 @@ Every shortcode can access some variables, beyond what you explicitly passed as
|
|||
|
||||
- invocation count (`nth`)
|
||||
- current language (`lang`), unless called from the `markdown` template filter (in which case it will always be the same value as `default_language` in configuration, or `en` when it is unset)
|
||||
- `colocated_path`
|
||||
|
||||
When one of these variables conflict with a variable passed as argument, the argument value will be used.
|
||||
|
||||
### Invocation Count
|
||||
### `nth`: invocation count
|
||||
|
||||
Every shortcode context is passed in a variable named `nth` that tracks how many times a particular shortcode has
|
||||
been invoked in the current Markdown file. Given a shortcode `true_statement.html` template:
|
||||
|
@ -185,17 +186,30 @@ It could be used in our Markdown as follows:
|
|||
|
||||
This is useful when implementing custom markup for features such as sidenotes or end notes.
|
||||
|
||||
### Current language
|
||||
### `lang`: current language
|
||||
**NOTE:** When calling a shortcode from within the `markdown` template filter, the `lang` variable will always be `en`.
|
||||
If you feel like you need that, please consider using template macros instead.
|
||||
If you really need that, you can rewrite your Markdown content to pass `lang` as argument to the shortcode.
|
||||
|
||||
**NOTE:** When calling a shortcode from within the `markdown` template filter, the `lang` variable will always be `en`. If you feel like you need that, please consider using template macros instead. If you really need that, you can rewrite your Markdown content to pass `lang` as argument to the shortcode.
|
||||
|
||||
Every shortcode can access the current language in the `lang` variable in the context. This is useful for presenting/filtering information in a shortcode depending in a per-language manner. For example, to display a per-language book cover for the current page in a shortcode called `bookcover.md`:
|
||||
Every shortcode can access the current language in the `lang` variable in the context.
|
||||
This is useful for presenting/filtering information in a shortcode depending in a per-language manner. For example, to display a per-language book cover for the current page in a shortcode called `bookcover.md`:
|
||||
|
||||
```jinja2
|
||||
![Book cover in {{ lang }}](cover.{{ lang }}.png)
|
||||
```
|
||||
|
||||
You can then use it in your Markdown like so: `{{/* bookcover() */}}`
|
||||
### `page` or `section`
|
||||
You can access a slighty stripped down version of the equivalent variables in the normal templates.
|
||||
The only things missing are translations, backlinks and pages for sections as we are still in the middle of processing.
|
||||
|
||||
A useful attribute to `page` in shortcodes is `colocated_path`.
|
||||
This is used when you want to pass the name of some assets to shortcodes without repeating the full folders path.
|
||||
Mostly useful when combined with `load_data` or `resize_image`.
|
||||
|
||||
```jinja2
|
||||
{% set resized = resize_image(format="jpg", path=page.colocated_path ~ img_name, width=width, op="fit_width") %}
|
||||
<img alt="{{ alt }}" src="{{ resized.url | safe }}" />
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
|
|
|
@ -50,6 +50,9 @@ assets: Array<String>;
|
|||
ancestors: Array<String>;
|
||||
// The relative path from the `content` directory to the markdown file
|
||||
relative_path: String;
|
||||
// The relative path from the `content` directory to the directory of a colocated index.md markdown file
|
||||
// Null if the file is not colocated.
|
||||
colocated_path: String?;
|
||||
// The language for the page if there is one. Default to the config `default_language`
|
||||
lang: String;
|
||||
// Information about all the available languages for that content, including the current page
|
||||
|
|
BIN
docs/static/processed_images/01-zola.1a8b81cf026f45cb.png
vendored
Normal file
After Width: | Height: | Size: 54 KiB |
BIN
docs/static/processed_images/01-zola.415a7ae280b04f3a.png
vendored
Normal file
After Width: | Height: | Size: 24 KiB |
BIN
docs/static/processed_images/01-zola.88a4046ce11eac5c.png
vendored
Normal file
After Width: | Height: | Size: 24 KiB |
BIN
docs/static/processed_images/01-zola.8b09d4ac023bf17f.png
vendored
Normal file
After Width: | Height: | Size: 140 KiB |
BIN
docs/static/processed_images/01-zola.a7f6fb4842538499.png
vendored
Normal file
After Width: | Height: | Size: 30 KiB |
BIN
docs/static/processed_images/01-zola.aa5c9741e1f54677.png
vendored
Normal file
After Width: | Height: | Size: 30 KiB |
BIN
docs/static/processed_images/01-zola.c31346a8ceb47990.png
vendored
Normal file
After Width: | Height: | Size: 18 KiB |
BIN
docs/static/processed_images/02-zola-manet.f247a1c1a09dea92.png
vendored
Normal file
After Width: | Height: | Size: 90 KiB |
BIN
docs/static/processed_images/03-zola-cezanne.54f3edb977adbe2f.png
vendored
Normal file
After Width: | Height: | Size: 101 KiB |
BIN
docs/static/processed_images/04-gutenberg.6b23c36e66378f24.jpg
vendored
Normal file
After Width: | Height: | Size: 12 KiB |
BIN
docs/static/processed_images/05-example.67dc3b46cdb5d5d4.jpg
vendored
Normal file
After Width: | Height: | Size: 12 KiB |
BIN
docs/static/processed_images/06-example.2c54491c2daefe2f.jpg
vendored
Normal file
After Width: | Height: | Size: 18 KiB |
BIN
docs/static/processed_images/07-example.3143e7a66ae6fd02.jpg
vendored
Normal file
After Width: | Height: | Size: 4.9 KiB |
BIN
docs/static/processed_images/08-example.684e6a6497b4e859.jpg
vendored
Normal file
After Width: | Height: | Size: 14 KiB |
BIN
docs/static/processed_images/0b751f5aa0aeb49e00.png
vendored
Before Width: | Height: | Size: 29 KiB |
BIN
docs/static/processed_images/10743d39eadb4f5500.png
vendored
Before Width: | Height: | Size: 29 KiB |
BIN
docs/static/processed_images/1b3166afe3c05c8100.png
vendored
Before Width: | Height: | Size: 17 KiB |
BIN
docs/static/processed_images/347f63dafce8976a00.jpg
vendored
Before Width: | Height: | Size: 12 KiB |
BIN
docs/static/processed_images/42c8c04e2cbdedc000.png
vendored
Before Width: | Height: | Size: 23 KiB |
BIN
docs/static/processed_images/6398117c46046a4a00.png
vendored
Before Width: | Height: | Size: 134 KiB |
BIN
docs/static/processed_images/68cc2a54764edf4500.png
vendored
Before Width: | Height: | Size: 54 KiB |
BIN
docs/static/processed_images/6ba688a3fb0d202b00.jpg
vendored
Before Width: | Height: | Size: 4.9 KiB |
BIN
docs/static/processed_images/7f400da1e92ca8a400.jpg
vendored
Before Width: | Height: | Size: 14 KiB |
BIN
docs/static/processed_images/9df3b2448c50878a00.jpg
vendored
Before Width: | Height: | Size: 12 KiB |
BIN
docs/static/processed_images/d2f2746ebaaa9e6200.png
vendored
Before Width: | Height: | Size: 89 KiB |
BIN
docs/static/processed_images/ea6a03d035169dbd00.jpg
vendored
Before Width: | Height: | Size: 18 KiB |
BIN
docs/static/processed_images/fd56f23df12683fc00.png
vendored
Before Width: | Height: | Size: 100 KiB |
|
@ -36,10 +36,10 @@ use mime_guess::from_path as mimetype_from_path;
|
|||
use time::macros::format_description;
|
||||
use time::{OffsetDateTime, UtcOffset};
|
||||
|
||||
use libs::percent_encoding;
|
||||
use libs::serde_json;
|
||||
use libs::globset::GlobSet;
|
||||
use libs::percent_encoding;
|
||||
use libs::relative_path::{RelativePath, RelativePathBuf};
|
||||
use libs::serde_json;
|
||||
use notify::{watcher, RecursiveMode, Watcher};
|
||||
use ws::{Message, Sender, WebSocket};
|
||||
|
||||
|
@ -652,7 +652,6 @@ fn is_ignored_file(ignored_content_globset: &Option<GlobSet>, path: &Path) -> bo
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/// Detect what changed from the given path so we have an idea what needs
|
||||
/// to be reloaded
|
||||
fn detect_change_kind(pwd: &Path, path: &Path, config_path: &Path) -> (ChangeKind, PathBuf) {
|
||||
|
|