mirror of
https://github.com/getzola/zola
synced 2024-11-10 06:14:19 +00:00
rustfmt
This commit is contained in:
parent
8586bc1838
commit
b7ce4e59fb
52 changed files with 1418 additions and 1091 deletions
10
build.rs
10
build.rs
|
@ -7,9 +7,9 @@ include!("src/cli.rs");
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
// disabled below as it fails in CI
|
// disabled below as it fails in CI
|
||||||
// let mut app = build_cli();
|
// let mut app = build_cli();
|
||||||
// app.gen_completions("zola", Shell::Bash, "completions/");
|
// app.gen_completions("zola", Shell::Bash, "completions/");
|
||||||
// app.gen_completions("zola", Shell::Fish, "completions/");
|
// app.gen_completions("zola", Shell::Fish, "completions/");
|
||||||
// app.gen_completions("zola", Shell::Zsh, "completions/");
|
// app.gen_completions("zola", Shell::Zsh, "completions/");
|
||||||
// app.gen_completions("zola", Shell::PowerShell, "completions/");
|
// app.gen_completions("zola", Shell::PowerShell, "completions/");
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,10 +3,10 @@
|
||||||
//! Although it is a valid example for serializing syntaxes, you probably won't need
|
//! Although it is a valid example for serializing syntaxes, you probably won't need
|
||||||
//! to do this yourself unless you want to cache your own compiled grammars.
|
//! to do this yourself unless you want to cache your own compiled grammars.
|
||||||
extern crate syntect;
|
extern crate syntect;
|
||||||
use syntect::parsing::SyntaxSetBuilder;
|
|
||||||
use syntect::highlighting::ThemeSet;
|
|
||||||
use syntect::dumps::*;
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
use syntect::dumps::*;
|
||||||
|
use syntect::highlighting::ThemeSet;
|
||||||
|
use syntect::parsing::SyntaxSetBuilder;
|
||||||
|
|
||||||
fn usage_and_exit() -> ! {
|
fn usage_and_exit() -> ! {
|
||||||
println!("USAGE: cargo run --example generate_sublime synpack source-dir newlines.packdump nonewlines.packdump\n
|
println!("USAGE: cargo run --example generate_sublime synpack source-dir newlines.packdump nonewlines.packdump\n
|
||||||
|
@ -32,7 +32,7 @@ fn main() {
|
||||||
println!("- {} -> {:?}", s.name, s.file_extensions);
|
println!("- {} -> {:?}", s.name, s.file_extensions);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
(Some(ref cmd), Some(ref theme_dir), Some(ref packpath)) if cmd == "themepack" => {
|
(Some(ref cmd), Some(ref theme_dir), Some(ref packpath)) if cmd == "themepack" => {
|
||||||
let ts = ThemeSet::load_from_folder(theme_dir).unwrap();
|
let ts = ThemeSet::load_from_folder(theme_dir).unwrap();
|
||||||
for path in ts.themes.keys() {
|
for path in ts.themes.keys() {
|
||||||
|
|
|
@ -41,12 +41,7 @@ impl Taxonomy {
|
||||||
|
|
||||||
impl Default for Taxonomy {
|
impl Default for Taxonomy {
|
||||||
fn default() -> Taxonomy {
|
fn default() -> Taxonomy {
|
||||||
Taxonomy {
|
Taxonomy { name: String::new(), paginate_by: None, paginate_path: None, rss: false }
|
||||||
name: String::new(),
|
|
||||||
paginate_by: None,
|
|
||||||
paginate_path: None,
|
|
||||||
rss: false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -137,19 +132,12 @@ impl Config {
|
||||||
for pat in &config.ignored_content {
|
for pat in &config.ignored_content {
|
||||||
let glob = match Glob::new(pat) {
|
let glob = match Glob::new(pat) {
|
||||||
Ok(g) => g,
|
Ok(g) => g,
|
||||||
Err(e) => bail!(
|
Err(e) => bail!("Invalid ignored_content glob pattern: {}, error = {}", pat, e),
|
||||||
"Invalid ignored_content glob pattern: {}, error = {}",
|
|
||||||
pat,
|
|
||||||
e
|
|
||||||
),
|
|
||||||
};
|
};
|
||||||
glob_set_builder.add(glob);
|
glob_set_builder.add(glob);
|
||||||
}
|
}
|
||||||
config.ignored_content_globset = Some(
|
config.ignored_content_globset =
|
||||||
glob_set_builder
|
Some(glob_set_builder.build().expect("Bad ignored_content in config file."));
|
||||||
.build()
|
|
||||||
.expect("Bad ignored_content in config file."),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(config)
|
Ok(config)
|
||||||
|
@ -162,10 +150,7 @@ impl Config {
|
||||||
let file_name = path.file_name().unwrap();
|
let file_name = path.file_name().unwrap();
|
||||||
File::open(path)
|
File::open(path)
|
||||||
.chain_err(|| {
|
.chain_err(|| {
|
||||||
format!(
|
format!("No `{:?}` file found. Are you in the right directory?", file_name)
|
||||||
"No `{:?}` file found. Are you in the right directory?",
|
|
||||||
file_name
|
|
||||||
)
|
|
||||||
})?
|
})?
|
||||||
.read_to_string(&mut content)?;
|
.read_to_string(&mut content)?;
|
||||||
|
|
||||||
|
@ -217,16 +202,12 @@ impl Config {
|
||||||
let original = self.extra.clone();
|
let original = self.extra.clone();
|
||||||
// 2. inject theme extra values
|
// 2. inject theme extra values
|
||||||
for (key, val) in &theme.extra {
|
for (key, val) in &theme.extra {
|
||||||
self.extra
|
self.extra.entry(key.to_string()).or_insert_with(|| val.clone());
|
||||||
.entry(key.to_string())
|
|
||||||
.or_insert_with(|| val.clone());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. overwrite with original config
|
// 3. overwrite with original config
|
||||||
for (key, val) in &original {
|
for (key, val) in &original {
|
||||||
self.extra
|
self.extra.entry(key.to_string()).or_insert_with(|| val.clone());
|
||||||
.entry(key.to_string())
|
|
||||||
.or_insert_with(|| val.clone());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -316,16 +297,7 @@ hello = "world"
|
||||||
|
|
||||||
let config = Config::parse(config);
|
let config = Config::parse(config);
|
||||||
assert!(config.is_ok());
|
assert!(config.is_ok());
|
||||||
assert_eq!(
|
assert_eq!(config.unwrap().extra.get("hello").unwrap().as_str().unwrap(), "world");
|
||||||
config
|
|
||||||
.unwrap()
|
|
||||||
.extra
|
|
||||||
.get("hello")
|
|
||||||
.unwrap()
|
|
||||||
.as_str()
|
|
||||||
.unwrap(),
|
|
||||||
"world"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -360,10 +332,7 @@ hello = "world"
|
||||||
fn can_make_url_with_localhost() {
|
fn can_make_url_with_localhost() {
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.base_url = "http://127.0.0.1:1111".to_string();
|
config.base_url = "http://127.0.0.1:1111".to_string();
|
||||||
assert_eq!(
|
assert_eq!(config.make_permalink("/tags/rust"), "http://127.0.0.1:1111/tags/rust/");
|
||||||
config.make_permalink("/tags/rust"),
|
|
||||||
"http://127.0.0.1:1111/tags/rust/"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://github.com/Keats/gutenberg/issues/486
|
// https://github.com/Keats/gutenberg/issues/486
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
use syntect::dumps::from_binary;
|
use syntect::dumps::from_binary;
|
||||||
use syntect::parsing::SyntaxSet;
|
|
||||||
use syntect::highlighting::ThemeSet;
|
|
||||||
use syntect::easy::HighlightLines;
|
use syntect::easy::HighlightLines;
|
||||||
|
use syntect::highlighting::ThemeSet;
|
||||||
|
use syntect::parsing::SyntaxSet;
|
||||||
|
|
||||||
use Config;
|
use Config;
|
||||||
|
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref SYNTAX_SET: SyntaxSet = {
|
pub static ref SYNTAX_SET: SyntaxSet = {
|
||||||
let ss: SyntaxSet = from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump"));
|
let ss: SyntaxSet =
|
||||||
|
from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump"));
|
||||||
ss
|
ss
|
||||||
};
|
};
|
||||||
|
pub static ref THEME_SET: ThemeSet =
|
||||||
pub static ref THEME_SET: ThemeSet = from_binary(include_bytes!("../../../sublime_themes/all.themedump"));
|
from_binary(include_bytes!("../../../sublime_themes/all.themedump"));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the highlighter and whether it was found in the extra or not
|
/// Returns the highlighter and whether it was found in the extra or not
|
||||||
|
@ -21,7 +21,8 @@ pub fn get_highlighter<'a>(info: &str, config: &Config) -> (HighlightLines<'a>,
|
||||||
let mut in_extra = false;
|
let mut in_extra = false;
|
||||||
|
|
||||||
if let Some(ref lang) = info.split(' ').next() {
|
if let Some(ref lang) = info.split(' ').next() {
|
||||||
let syntax = SYNTAX_SET.find_syntax_by_token(lang)
|
let syntax = SYNTAX_SET
|
||||||
|
.find_syntax_by_token(lang)
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
if let Some(ref extra) = config.extra_syntax_set {
|
if let Some(ref extra) = config.extra_syntax_set {
|
||||||
let s = extra.find_syntax_by_token(lang);
|
let s = extra.find_syntax_by_token(lang);
|
||||||
|
|
|
@ -9,10 +9,9 @@ extern crate globset;
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
extern crate syntect;
|
extern crate syntect;
|
||||||
|
|
||||||
|
|
||||||
mod config;
|
mod config;
|
||||||
mod theme;
|
|
||||||
pub mod highlighting;
|
pub mod highlighting;
|
||||||
|
mod theme;
|
||||||
pub use config::{Config, Taxonomy};
|
pub use config::{Config, Taxonomy};
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
|
@ -7,7 +7,6 @@ use toml::Value as Toml;
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
|
|
||||||
|
|
||||||
/// Holds the data from a `theme.toml` file.
|
/// Holds the data from a `theme.toml` file.
|
||||||
/// There are other fields than `extra` in it but Zola
|
/// There are other fields than `extra` in it but Zola
|
||||||
/// itself doesn't care about them.
|
/// itself doesn't care about them.
|
||||||
|
@ -36,7 +35,6 @@ impl Theme {
|
||||||
bail!("Expected the `theme.toml` to be a TOML table")
|
bail!("Expected the `theme.toml` to be a TOML table")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Ok(Theme { extra })
|
Ok(Theme { extra })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,11 +42,11 @@ impl Theme {
|
||||||
pub fn from_file(path: &PathBuf) -> Result<Theme> {
|
pub fn from_file(path: &PathBuf) -> Result<Theme> {
|
||||||
let mut content = String::new();
|
let mut content = String::new();
|
||||||
File::open(path)
|
File::open(path)
|
||||||
.chain_err(||
|
.chain_err(|| {
|
||||||
"No `theme.toml` file found. \
|
"No `theme.toml` file found. \
|
||||||
Is the `theme` defined in your `config.toml present in the `themes` directory \
|
Is the `theme` defined in your `config.toml present in the `themes` directory \
|
||||||
and does it have a `theme.toml` inside?"
|
and does it have a `theme.toml` inside?"
|
||||||
)?
|
})?
|
||||||
.read_to_string(&mut content)?;
|
.read_to_string(&mut content)?;
|
||||||
|
|
||||||
Theme::parse(&content)
|
Theme::parse(&content)
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate error_chain;
|
extern crate error_chain;
|
||||||
extern crate tera;
|
|
||||||
extern crate toml;
|
|
||||||
extern crate image;
|
extern crate image;
|
||||||
extern crate syntect;
|
extern crate syntect;
|
||||||
|
extern crate tera;
|
||||||
|
extern crate toml;
|
||||||
|
|
||||||
error_chain! {
|
error_chain! {
|
||||||
errors {}
|
errors {}
|
||||||
|
|
|
@ -2,18 +2,18 @@
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
extern crate serde;
|
|
||||||
extern crate toml;
|
|
||||||
extern crate regex;
|
|
||||||
extern crate tera;
|
|
||||||
extern crate chrono;
|
extern crate chrono;
|
||||||
|
extern crate regex;
|
||||||
|
extern crate serde;
|
||||||
|
extern crate tera;
|
||||||
|
extern crate toml;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate errors;
|
extern crate errors;
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
use regex::Regex;
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
|
use regex::Regex;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
mod page;
|
mod page;
|
||||||
mod section;
|
mod section;
|
||||||
|
@ -22,7 +22,8 @@ pub use page::PageFrontMatter;
|
||||||
pub use section::SectionFrontMatter;
|
pub use section::SectionFrontMatter;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref PAGE_RE: Regex = Regex::new(r"^[[:space:]]*\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
|
static ref PAGE_RE: Regex =
|
||||||
|
Regex::new(r"^[[:space:]]*\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
@ -44,12 +45,14 @@ pub enum InsertAnchor {
|
||||||
None,
|
None,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Split a file between the front matter and its content
|
/// Split a file between the front matter and its content
|
||||||
/// Will return an error if the front matter wasn't found
|
/// Will return an error if the front matter wasn't found
|
||||||
fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> {
|
fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> {
|
||||||
if !PAGE_RE.is_match(content) {
|
if !PAGE_RE.is_match(content) {
|
||||||
bail!("Couldn't find front matter in `{}`. Did you forget to add `+++`?", file_path.to_string_lossy());
|
bail!(
|
||||||
|
"Couldn't find front matter in `{}`. Did you forget to add `+++`?",
|
||||||
|
file_path.to_string_lossy()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. extract the front matter and the content
|
// 2. extract the front matter and the content
|
||||||
|
@ -62,10 +65,14 @@ fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> {
|
||||||
|
|
||||||
/// Split a file between the front matter and its content.
|
/// Split a file between the front matter and its content.
|
||||||
/// Returns a parsed `SectionFrontMatter` and the rest of the content
|
/// Returns a parsed `SectionFrontMatter` and the rest of the content
|
||||||
pub fn split_section_content(file_path: &Path, content: &str) -> Result<(SectionFrontMatter, String)> {
|
pub fn split_section_content(
|
||||||
|
file_path: &Path,
|
||||||
|
content: &str,
|
||||||
|
) -> Result<(SectionFrontMatter, String)> {
|
||||||
let (front_matter, content) = split_content(file_path, content)?;
|
let (front_matter, content) = split_content(file_path, content)?;
|
||||||
let meta = SectionFrontMatter::parse(&front_matter)
|
let meta = SectionFrontMatter::parse(&front_matter).chain_err(|| {
|
||||||
.chain_err(|| format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()))?;
|
format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy())
|
||||||
|
})?;
|
||||||
Ok((meta, content))
|
Ok((meta, content))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -73,8 +80,9 @@ pub fn split_section_content(file_path: &Path, content: &str) -> Result<(Section
|
||||||
/// Returns a parsed `PageFrontMatter` and the rest of the content
|
/// Returns a parsed `PageFrontMatter` and the rest of the content
|
||||||
pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> {
|
pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> {
|
||||||
let (front_matter, content) = split_content(file_path, content)?;
|
let (front_matter, content) = split_content(file_path, content)?;
|
||||||
let meta = PageFrontMatter::parse(&front_matter)
|
let meta = PageFrontMatter::parse(&front_matter).chain_err(|| {
|
||||||
.chain_err(|| format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()))?;
|
format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy())
|
||||||
|
})?;
|
||||||
Ok((meta, content))
|
Ok((meta, content))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,7 +90,7 @@ pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontM
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use super::{split_section_content, split_page_content};
|
use super::{split_page_content, split_section_content};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_split_page_content_valid() {
|
fn can_split_page_content_valid() {
|
||||||
|
|
|
@ -2,19 +2,17 @@ use std::collections::HashMap;
|
||||||
use std::result::Result as StdResult;
|
use std::result::Result as StdResult;
|
||||||
|
|
||||||
use chrono::prelude::*;
|
use chrono::prelude::*;
|
||||||
use tera::{Map, Value};
|
|
||||||
use serde::{Deserialize, Deserializer};
|
use serde::{Deserialize, Deserializer};
|
||||||
|
use tera::{Map, Value};
|
||||||
use toml;
|
use toml;
|
||||||
|
|
||||||
use errors::Result;
|
use errors::Result;
|
||||||
|
|
||||||
|
|
||||||
fn from_toml_datetime<'de, D>(deserializer: D) -> StdResult<Option<String>, D::Error>
|
fn from_toml_datetime<'de, D>(deserializer: D) -> StdResult<Option<String>, D::Error>
|
||||||
where
|
where
|
||||||
D: Deserializer<'de>,
|
D: Deserializer<'de>,
|
||||||
{
|
{
|
||||||
toml::value::Datetime::deserialize(deserializer)
|
toml::value::Datetime::deserialize(deserializer).map(|s| Some(s.to_string()))
|
||||||
.map(|s| Some(s.to_string()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns key/value for a converted date from TOML.
|
/// Returns key/value for a converted date from TOML.
|
||||||
|
@ -36,7 +34,9 @@ fn convert_toml_date(table: Map<String, Value>) -> Value {
|
||||||
}
|
}
|
||||||
new.insert(k, convert_toml_date(o));
|
new.insert(k, convert_toml_date(o));
|
||||||
}
|
}
|
||||||
_ => { new.insert(k, v); }
|
_ => {
|
||||||
|
new.insert(k, v);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,14 +53,15 @@ fn fix_toml_dates(table: Map<String, Value>) -> Value {
|
||||||
Value::Object(mut o) => {
|
Value::Object(mut o) => {
|
||||||
new.insert(key, convert_toml_date(o));
|
new.insert(key, convert_toml_date(o));
|
||||||
}
|
}
|
||||||
_ => { new.insert(key, value); }
|
_ => {
|
||||||
|
new.insert(key, value);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Value::Object(new)
|
Value::Object(new)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// The front matter of every page
|
/// The front matter of every page
|
||||||
#[derive(Debug, Clone, PartialEq, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Deserialize)]
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
@ -143,7 +144,9 @@ impl PageFrontMatter {
|
||||||
if d.contains('T') {
|
if d.contains('T') {
|
||||||
DateTime::parse_from_rfc3339(&d).ok().and_then(|s| Some(s.naive_local()))
|
DateTime::parse_from_rfc3339(&d).ok().and_then(|s| Some(s.naive_local()))
|
||||||
} else {
|
} else {
|
||||||
NaiveDate::parse_from_str(&d, "%Y-%m-%d").ok().and_then(|s| Some(s.and_hms(0, 0, 0)))
|
NaiveDate::parse_from_str(&d, "%Y-%m-%d")
|
||||||
|
.ok()
|
||||||
|
.and_then(|s| Some(s.and_hms(0, 0, 0)))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -187,11 +190,10 @@ impl Default for PageFrontMatter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use tera::to_value;
|
|
||||||
use super::PageFrontMatter;
|
use super::PageFrontMatter;
|
||||||
|
use tera::to_value;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_have_empty_front_matter() {
|
fn can_have_empty_front_matter() {
|
||||||
|
@ -213,7 +215,6 @@ mod tests {
|
||||||
assert_eq!(res.description.unwrap(), "hey there".to_string())
|
assert_eq!(res.description.unwrap(), "hey there".to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn errors_with_invalid_front_matter() {
|
fn errors_with_invalid_front_matter() {
|
||||||
let content = r#"title = 1\n"#;
|
let content = r#"title = 1\n"#;
|
||||||
|
|
|
@ -5,11 +5,10 @@ use toml;
|
||||||
|
|
||||||
use errors::Result;
|
use errors::Result;
|
||||||
|
|
||||||
use super::{SortBy, InsertAnchor};
|
use super::{InsertAnchor, SortBy};
|
||||||
|
|
||||||
static DEFAULT_PAGINATE_PATH: &'static str = "page";
|
static DEFAULT_PAGINATE_PATH: &'static str = "page";
|
||||||
|
|
||||||
|
|
||||||
/// The front matter of every section
|
/// The front matter of every section
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
@ -69,7 +68,7 @@ impl SectionFrontMatter {
|
||||||
pub fn is_paginated(&self) -> bool {
|
pub fn is_paginated(&self) -> bool {
|
||||||
match self.paginate_by {
|
match self.paginate_by {
|
||||||
Some(v) => v > 0,
|
Some(v) => v > 0,
|
||||||
None => false
|
None => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,32 +1,32 @@
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
extern crate regex;
|
|
||||||
extern crate image;
|
extern crate image;
|
||||||
extern crate rayon;
|
extern crate rayon;
|
||||||
|
extern crate regex;
|
||||||
|
|
||||||
extern crate utils;
|
|
||||||
extern crate errors;
|
extern crate errors;
|
||||||
|
extern crate utils;
|
||||||
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::hash::{Hash, Hasher};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::collections::hash_map::Entry as HEntry;
|
|
||||||
use std::collections::hash_map::DefaultHasher;
|
use std::collections::hash_map::DefaultHasher;
|
||||||
|
use std::collections::hash_map::Entry as HEntry;
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use regex::Regex;
|
|
||||||
use image::{FilterType, GenericImageView};
|
|
||||||
use image::jpeg::JPEGEncoder;
|
use image::jpeg::JPEGEncoder;
|
||||||
|
use image::{FilterType, GenericImageView};
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
|
use regex::Regex;
|
||||||
|
|
||||||
use utils::fs as ufs;
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
|
use utils::fs as ufs;
|
||||||
|
|
||||||
static RESIZED_SUBDIR: &'static str = "processed_images";
|
static RESIZED_SUBDIR: &'static str = "processed_images";
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref RESIZED_FILENAME: Regex = Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.]jpg"#).unwrap();
|
pub static ref RESIZED_FILENAME: Regex =
|
||||||
|
Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.]jpg"#).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Describes the precise kind of a resize operation
|
/// Describes the precise kind of a resize operation
|
||||||
|
@ -57,16 +57,22 @@ impl ResizeOp {
|
||||||
|
|
||||||
// Validate args:
|
// Validate args:
|
||||||
match op {
|
match op {
|
||||||
"fit_width" => if width.is_none() {
|
"fit_width" => {
|
||||||
return Err("op=\"fit_width\" requires a `width` argument".to_string().into());
|
if width.is_none() {
|
||||||
},
|
return Err("op=\"fit_width\" requires a `width` argument".to_string().into());
|
||||||
"fit_height" => if height.is_none() {
|
}
|
||||||
return Err("op=\"fit_height\" requires a `height` argument".to_string().into());
|
}
|
||||||
},
|
"fit_height" => {
|
||||||
"scale" | "fit" | "fill" => if width.is_none() || height.is_none() {
|
if height.is_none() {
|
||||||
return Err(format!("op={} requires a `width` and `height` argument", op).into());
|
return Err("op=\"fit_height\" requires a `height` argument".to_string().into());
|
||||||
},
|
}
|
||||||
_ => return Err(format!("Invalid image resize operation: {}", op).into())
|
}
|
||||||
|
"scale" | "fit" | "fill" => {
|
||||||
|
if width.is_none() || height.is_none() {
|
||||||
|
return Err(format!("op={} requires a `width` and `height` argument", op).into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => return Err(format!("Invalid image resize operation: {}", op).into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(match op {
|
Ok(match op {
|
||||||
|
@ -121,8 +127,12 @@ impl From<ResizeOp> for u8 {
|
||||||
impl Hash for ResizeOp {
|
impl Hash for ResizeOp {
|
||||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||||
hasher.write_u8(u8::from(*self));
|
hasher.write_u8(u8::from(*self));
|
||||||
if let Some(w) = self.width() { hasher.write_u32(w); }
|
if let Some(w) = self.width() {
|
||||||
if let Some(h) = self.height() { hasher.write_u32(h); }
|
hasher.write_u32(w);
|
||||||
|
}
|
||||||
|
if let Some(h) = self.height() {
|
||||||
|
hasher.write_u32(h);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -207,8 +217,7 @@ impl ImageOp {
|
||||||
((img_w - crop_w) / 2, 0)
|
((img_w - crop_w) / 2, 0)
|
||||||
};
|
};
|
||||||
|
|
||||||
img.crop(offset_w, offset_h, crop_w, crop_h)
|
img.crop(offset_w, offset_h, crop_w, crop_h).resize_exact(w, h, RESIZE_FILTER)
|
||||||
.resize_exact(w, h, RESIZE_FILTER)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -221,7 +230,6 @@ impl ImageOp {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// A strcture into which image operations can be enqueued and then performed.
|
/// A strcture into which image operations can be enqueued and then performed.
|
||||||
/// All output is written in a subdirectory in `static_path`,
|
/// All output is written in a subdirectory in `static_path`,
|
||||||
/// taking care of file stale status based on timestamps and possible hash collisions.
|
/// taking care of file stale status based on timestamps and possible hash collisions.
|
||||||
|
@ -271,7 +279,11 @@ impl Processor {
|
||||||
|
|
||||||
fn insert_with_collisions(&mut self, mut img_op: ImageOp) -> u32 {
|
fn insert_with_collisions(&mut self, mut img_op: ImageOp) -> u32 {
|
||||||
match self.img_ops.entry(img_op.hash) {
|
match self.img_ops.entry(img_op.hash) {
|
||||||
HEntry::Occupied(entry) => if *entry.get() == img_op { return 0; },
|
HEntry::Occupied(entry) => {
|
||||||
|
if *entry.get() == img_op {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
HEntry::Vacant(entry) => {
|
HEntry::Vacant(entry) => {
|
||||||
entry.insert(img_op);
|
entry.insert(img_op);
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -341,9 +353,8 @@ impl Processor {
|
||||||
let filename = entry_path.file_name().unwrap().to_string_lossy();
|
let filename = entry_path.file_name().unwrap().to_string_lossy();
|
||||||
if let Some(capts) = RESIZED_FILENAME.captures(filename.as_ref()) {
|
if let Some(capts) = RESIZED_FILENAME.captures(filename.as_ref()) {
|
||||||
let hash = u64::from_str_radix(capts.get(1).unwrap().as_str(), 16).unwrap();
|
let hash = u64::from_str_radix(capts.get(1).unwrap().as_str(), 16).unwrap();
|
||||||
let collision_id = u32::from_str_radix(
|
let collision_id =
|
||||||
capts.get(2).unwrap().as_str(), 16,
|
u32::from_str_radix(capts.get(2).unwrap().as_str(), 16).unwrap();
|
||||||
).unwrap();
|
|
||||||
|
|
||||||
if collision_id > 0 || !self.img_ops.contains_key(&hash) {
|
if collision_id > 0 || !self.img_ops.contains_key(&hash) {
|
||||||
fs::remove_file(&entry_path)?;
|
fs::remove_file(&entry_path)?;
|
||||||
|
@ -359,24 +370,28 @@ impl Processor {
|
||||||
ufs::ensure_directory_exists(&self.resized_path)?;
|
ufs::ensure_directory_exists(&self.resized_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.img_ops.par_iter().map(|(hash, op)| {
|
self.img_ops
|
||||||
let target = self.resized_path.join(Self::op_filename(*hash, op.collision_id));
|
.par_iter()
|
||||||
op.perform(&self.content_path, &target)
|
.map(|(hash, op)| {
|
||||||
.chain_err(|| format!("Failed to process image: {}", op.source))
|
let target = self.resized_path.join(Self::op_filename(*hash, op.collision_id));
|
||||||
}).collect::<Result<()>>()
|
op.perform(&self.content_path, &target)
|
||||||
|
.chain_err(|| format!("Failed to process image: {}", op.source))
|
||||||
|
})
|
||||||
|
.collect::<Result<()>>()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Looks at file's extension and returns whether it's a supported image format
|
/// Looks at file's extension and returns whether it's a supported image format
|
||||||
pub fn file_is_img<P: AsRef<Path>>(p: P) -> bool {
|
pub fn file_is_img<P: AsRef<Path>>(p: P) -> bool {
|
||||||
p.as_ref().extension().and_then(|s| s.to_str()).map(|ext| {
|
p.as_ref()
|
||||||
match ext.to_lowercase().as_str() {
|
.extension()
|
||||||
|
.and_then(|s| s.to_str())
|
||||||
|
.map(|ext| match ext.to_lowercase().as_str() {
|
||||||
"jpg" | "jpeg" => true,
|
"jpg" | "jpeg" => true,
|
||||||
"png" => true,
|
"png" => true,
|
||||||
"gif" => true,
|
"gif" => true,
|
||||||
"bmp" => true,
|
"bmp" => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
})
|
||||||
}).unwrap_or(false)
|
.unwrap_or(false)
|
||||||
}
|
}
|
||||||
|
|
|
@ -114,7 +114,8 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_find_content_components() {
|
fn can_find_content_components() {
|
||||||
let res = find_content_components("/home/vincent/code/site/content/posts/tutorials/python.md");
|
let res =
|
||||||
|
find_content_components("/home/vincent/code/site/content/posts/tutorials/python.md");
|
||||||
assert_eq!(res, ["posts".to_string(), "tutorials".to_string()]);
|
assert_eq!(res, ["posts".to_string(), "tutorials".to_string()]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,19 +2,19 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use tera::{Tera, Context as TeraContext};
|
|
||||||
use slug::slugify;
|
|
||||||
use slotmap::{Key};
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use slotmap::Key;
|
||||||
|
use slug::slugify;
|
||||||
|
use tera::{Context as TeraContext, Tera};
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
|
||||||
use config::Config;
|
use config::Config;
|
||||||
use utils::fs::{read_file, find_related_assets};
|
use errors::{Result, ResultExt};
|
||||||
|
use front_matter::{split_page_content, InsertAnchor, PageFrontMatter};
|
||||||
|
use library::Library;
|
||||||
|
use rendering::{render_content, Header, RenderContext};
|
||||||
|
use utils::fs::{find_related_assets, read_file};
|
||||||
use utils::site::get_reading_analytics;
|
use utils::site::get_reading_analytics;
|
||||||
use utils::templates::render_template;
|
use utils::templates::render_template;
|
||||||
use front_matter::{PageFrontMatter, InsertAnchor, split_page_content};
|
|
||||||
use rendering::{RenderContext, Header, render_content};
|
|
||||||
use library::Library;
|
|
||||||
|
|
||||||
use content::file_info::FileInfo;
|
use content::file_info::FileInfo;
|
||||||
use content::ser::SerializingPage;
|
use content::ser::SerializingPage;
|
||||||
|
@ -24,7 +24,6 @@ lazy_static! {
|
||||||
static ref DATE_IN_FILENAME: Regex = Regex::new(r"^^([12]\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01]))(_|-)").unwrap();
|
static ref DATE_IN_FILENAME: Regex = Regex::new(r"^^([12]\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01]))(_|-)").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct Page {
|
pub struct Page {
|
||||||
/// All info about the actual file
|
/// All info about the actual file
|
||||||
|
@ -71,7 +70,6 @@ pub struct Page {
|
||||||
pub reading_time: Option<usize>,
|
pub reading_time: Option<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
impl Page {
|
impl Page {
|
||||||
pub fn new<P: AsRef<Path>>(file_path: P, meta: PageFrontMatter) -> Page {
|
pub fn new<P: AsRef<Path>>(file_path: P, meta: PageFrontMatter) -> Page {
|
||||||
let file_path = file_path.as_ref();
|
let file_path = file_path.as_ref();
|
||||||
|
@ -155,7 +153,9 @@ impl Page {
|
||||||
page.path = format!("{}/", page.path);
|
page.path = format!("{}/", page.path);
|
||||||
}
|
}
|
||||||
|
|
||||||
page.components = page.path.split('/')
|
page.components = page
|
||||||
|
.path
|
||||||
|
.split('/')
|
||||||
.map(|p| p.to_string())
|
.map(|p| p.to_string())
|
||||||
.filter(|p| !p.is_empty())
|
.filter(|p| !p.is_empty())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
@ -182,13 +182,13 @@ impl Page {
|
||||||
// against the remaining path. Note that the current behaviour effectively means that
|
// against the remaining path. Note that the current behaviour effectively means that
|
||||||
// the `ignored_content` setting in the config file is limited to single-file glob
|
// the `ignored_content` setting in the config file is limited to single-file glob
|
||||||
// patterns (no "**" patterns).
|
// patterns (no "**" patterns).
|
||||||
page.assets = assets.into_iter()
|
page.assets = assets
|
||||||
.filter(|path|
|
.into_iter()
|
||||||
match path.file_name() {
|
.filter(|path| match path.file_name() {
|
||||||
None => true,
|
None => true,
|
||||||
Some(file) => !globset.is_match(file)
|
Some(file) => !globset.is_match(file),
|
||||||
}
|
})
|
||||||
).collect();
|
.collect();
|
||||||
} else {
|
} else {
|
||||||
page.assets = assets;
|
page.assets = assets;
|
||||||
}
|
}
|
||||||
|
@ -210,13 +210,8 @@ impl Page {
|
||||||
config: &Config,
|
config: &Config,
|
||||||
anchor_insert: InsertAnchor,
|
anchor_insert: InsertAnchor,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let mut context = RenderContext::new(
|
let mut context =
|
||||||
tera,
|
RenderContext::new(tera, config, &self.permalink, permalinks, anchor_insert);
|
||||||
config,
|
|
||||||
&self.permalink,
|
|
||||||
permalinks,
|
|
||||||
anchor_insert,
|
|
||||||
);
|
|
||||||
|
|
||||||
context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None));
|
context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None));
|
||||||
|
|
||||||
|
@ -234,7 +229,7 @@ impl Page {
|
||||||
pub fn render_html(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> {
|
pub fn render_html(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> {
|
||||||
let tpl_name = match self.meta.template {
|
let tpl_name = match self.meta.template {
|
||||||
Some(ref l) => l.to_string(),
|
Some(ref l) => l.to_string(),
|
||||||
None => "page.html".to_string()
|
None => "page.html".to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut context = TeraContext::new();
|
let mut context = TeraContext::new();
|
||||||
|
@ -249,7 +244,8 @@ impl Page {
|
||||||
|
|
||||||
/// Creates a vectors of asset URLs.
|
/// Creates a vectors of asset URLs.
|
||||||
fn serialize_assets(&self) -> Vec<String> {
|
fn serialize_assets(&self) -> Vec<String> {
|
||||||
self.assets.iter()
|
self.assets
|
||||||
|
.iter()
|
||||||
.filter_map(|asset| asset.file_name())
|
.filter_map(|asset| asset.file_name())
|
||||||
.filter_map(|filename| filename.to_str())
|
.filter_map(|filename| filename.to_str())
|
||||||
.map(|filename| self.path.clone() + filename)
|
.map(|filename| self.path.clone() + filename)
|
||||||
|
@ -294,19 +290,18 @@ impl Default for Page {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::fs::{create_dir, File};
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::fs::{File, create_dir};
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use tera::Tera;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
use globset::{Glob, GlobSetBuilder};
|
use globset::{Glob, GlobSetBuilder};
|
||||||
|
use tempfile::tempdir;
|
||||||
|
use tera::Tera;
|
||||||
|
|
||||||
use config::Config;
|
|
||||||
use super::Page;
|
use super::Page;
|
||||||
|
use config::Config;
|
||||||
use front_matter::InsertAnchor;
|
use front_matter::InsertAnchor;
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_can_parse_a_valid_page() {
|
fn test_can_parse_a_valid_page() {
|
||||||
let content = r#"
|
let content = r#"
|
||||||
|
@ -324,7 +319,8 @@ Hello world"#;
|
||||||
&Tera::default(),
|
&Tera::default(),
|
||||||
&Config::default(),
|
&Config::default(),
|
||||||
InsertAnchor::None,
|
InsertAnchor::None,
|
||||||
).unwrap();
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(page.meta.title.unwrap(), "Hello".to_string());
|
assert_eq!(page.meta.title.unwrap(), "Hello".to_string());
|
||||||
assert_eq!(page.meta.slug.unwrap(), "hello-world".to_string());
|
assert_eq!(page.meta.slug.unwrap(), "hello-world".to_string());
|
||||||
|
@ -426,16 +422,13 @@ Hello world"#;
|
||||||
+++
|
+++
|
||||||
+++
|
+++
|
||||||
Hello world
|
Hello world
|
||||||
<!-- more -->"#.to_string();
|
<!-- more -->"#
|
||||||
|
.to_string();
|
||||||
let res = Page::parse(Path::new("hello.md"), &content, &config);
|
let res = Page::parse(Path::new("hello.md"), &content, &config);
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
let mut page = res.unwrap();
|
let mut page = res.unwrap();
|
||||||
page.render_markdown(
|
page.render_markdown(&HashMap::default(), &Tera::default(), &config, InsertAnchor::None)
|
||||||
&HashMap::default(),
|
.unwrap();
|
||||||
&Tera::default(),
|
|
||||||
&config,
|
|
||||||
InsertAnchor::None,
|
|
||||||
).unwrap();
|
|
||||||
assert_eq!(page.summary, Some("<p>Hello world</p>\n".to_string()));
|
assert_eq!(page.summary, Some("<p>Hello world</p>\n".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -453,10 +446,7 @@ Hello world
|
||||||
File::create(nested_path.join("graph.jpg")).unwrap();
|
File::create(nested_path.join("graph.jpg")).unwrap();
|
||||||
File::create(nested_path.join("fail.png")).unwrap();
|
File::create(nested_path.join("fail.png")).unwrap();
|
||||||
|
|
||||||
let res = Page::from_file(
|
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default());
|
||||||
nested_path.join("index.md").as_path(),
|
|
||||||
&Config::default(),
|
|
||||||
);
|
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
let page = res.unwrap();
|
let page = res.unwrap();
|
||||||
assert_eq!(page.file.parent, path.join("content").join("posts"));
|
assert_eq!(page.file.parent, path.join("content").join("posts"));
|
||||||
|
@ -479,10 +469,7 @@ Hello world
|
||||||
File::create(nested_path.join("graph.jpg")).unwrap();
|
File::create(nested_path.join("graph.jpg")).unwrap();
|
||||||
File::create(nested_path.join("fail.png")).unwrap();
|
File::create(nested_path.join("fail.png")).unwrap();
|
||||||
|
|
||||||
let res = Page::from_file(
|
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default());
|
||||||
nested_path.join("index.md").as_path(),
|
|
||||||
&Config::default(),
|
|
||||||
);
|
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
let page = res.unwrap();
|
let page = res.unwrap();
|
||||||
assert_eq!(page.file.parent, path.join("content").join("posts"));
|
assert_eq!(page.file.parent, path.join("content").join("posts"));
|
||||||
|
@ -510,10 +497,7 @@ Hello world
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.ignored_content_globset = Some(gsb.build().unwrap());
|
config.ignored_content_globset = Some(gsb.build().unwrap());
|
||||||
|
|
||||||
let res = Page::from_file(
|
let res = Page::from_file(nested_path.join("index.md").as_path(), &config);
|
||||||
nested_path.join("index.md").as_path(),
|
|
||||||
&config,
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
let page = res.unwrap();
|
let page = res.unwrap();
|
||||||
|
@ -528,7 +512,8 @@ Hello world
|
||||||
+++
|
+++
|
||||||
+++
|
+++
|
||||||
Hello world
|
Hello world
|
||||||
<!-- more -->"#.to_string();
|
<!-- more -->"#
|
||||||
|
.to_string();
|
||||||
let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config);
|
let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config);
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
let page = res.unwrap();
|
let page = res.unwrap();
|
||||||
|
@ -539,14 +524,14 @@ Hello world
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn frontmatter_date_override_filename_date() {
|
fn frontmatter_date_override_filename_date() {
|
||||||
|
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let content = r#"
|
let content = r#"
|
||||||
+++
|
+++
|
||||||
date = 2018-09-09
|
date = 2018-09-09
|
||||||
+++
|
+++
|
||||||
Hello world
|
Hello world
|
||||||
<!-- more -->"#.to_string();
|
<!-- more -->"#
|
||||||
|
.to_string();
|
||||||
let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config);
|
let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config);
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
let page = res.unwrap();
|
let page = res.unwrap();
|
||||||
|
|
|
@ -1,22 +1,21 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use tera::{Tera, Context as TeraContext};
|
|
||||||
use slotmap::Key;
|
use slotmap::Key;
|
||||||
|
use tera::{Context as TeraContext, Tera};
|
||||||
|
|
||||||
use config::Config;
|
use config::Config;
|
||||||
use front_matter::{SectionFrontMatter, split_section_content};
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
use utils::fs::{read_file, find_related_assets};
|
use front_matter::{split_section_content, SectionFrontMatter};
|
||||||
use utils::templates::render_template;
|
use rendering::{render_content, Header, RenderContext};
|
||||||
|
use utils::fs::{find_related_assets, read_file};
|
||||||
use utils::site::get_reading_analytics;
|
use utils::site::get_reading_analytics;
|
||||||
use rendering::{RenderContext, Header, render_content};
|
use utils::templates::render_template;
|
||||||
|
|
||||||
use content::file_info::FileInfo;
|
use content::file_info::FileInfo;
|
||||||
use content::ser::SerializingSection;
|
use content::ser::SerializingSection;
|
||||||
use library::Library;
|
use library::Library;
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct Section {
|
pub struct Section {
|
||||||
/// All info about the actual file
|
/// All info about the actual file
|
||||||
|
@ -86,7 +85,9 @@ impl Section {
|
||||||
section.word_count = Some(word_count);
|
section.word_count = Some(word_count);
|
||||||
section.reading_time = Some(reading_time);
|
section.reading_time = Some(reading_time);
|
||||||
section.path = format!("{}/", section.file.components.join("/"));
|
section.path = format!("{}/", section.file.components.join("/"));
|
||||||
section.components = section.path.split('/')
|
section.components = section
|
||||||
|
.path
|
||||||
|
.split('/')
|
||||||
.map(|p| p.to_string())
|
.map(|p| p.to_string())
|
||||||
.filter(|p| !p.is_empty())
|
.filter(|p| !p.is_empty())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
@ -111,13 +112,13 @@ impl Section {
|
||||||
// against the remaining path. Note that the current behaviour effectively means that
|
// against the remaining path. Note that the current behaviour effectively means that
|
||||||
// the `ignored_content` setting in the config file is limited to single-file glob
|
// the `ignored_content` setting in the config file is limited to single-file glob
|
||||||
// patterns (no "**" patterns).
|
// patterns (no "**" patterns).
|
||||||
section.assets = assets.into_iter()
|
section.assets = assets
|
||||||
.filter(|path|
|
.into_iter()
|
||||||
match path.file_name() {
|
.filter(|path| match path.file_name() {
|
||||||
None => true,
|
None => true,
|
||||||
Some(file) => !globset.is_match(file)
|
Some(file) => !globset.is_match(file),
|
||||||
}
|
})
|
||||||
).collect();
|
.collect();
|
||||||
} else {
|
} else {
|
||||||
section.assets = assets;
|
section.assets = assets;
|
||||||
}
|
}
|
||||||
|
@ -185,7 +186,8 @@ impl Section {
|
||||||
|
|
||||||
/// Creates a vectors of asset URLs.
|
/// Creates a vectors of asset URLs.
|
||||||
fn serialize_assets(&self) -> Vec<String> {
|
fn serialize_assets(&self) -> Vec<String> {
|
||||||
self.assets.iter()
|
self.assets
|
||||||
|
.iter()
|
||||||
.filter_map(|asset| asset.file_name())
|
.filter_map(|asset| asset.file_name())
|
||||||
.filter_map(|filename| filename.to_str())
|
.filter_map(|filename| filename.to_str())
|
||||||
.map(|filename| self.path.clone() + filename)
|
.map(|filename| self.path.clone() + filename)
|
||||||
|
@ -227,14 +229,14 @@ impl Default for Section {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use std::fs::{create_dir, File};
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::fs::{File, create_dir};
|
|
||||||
|
|
||||||
use tempfile::tempdir;
|
|
||||||
use globset::{Glob, GlobSetBuilder};
|
use globset::{Glob, GlobSetBuilder};
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
use config::Config;
|
|
||||||
use super::Section;
|
use super::Section;
|
||||||
|
use config::Config;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn section_with_assets_gets_right_info() {
|
fn section_with_assets_gets_right_info() {
|
||||||
|
@ -250,10 +252,7 @@ mod tests {
|
||||||
File::create(nested_path.join("graph.jpg")).unwrap();
|
File::create(nested_path.join("graph.jpg")).unwrap();
|
||||||
File::create(nested_path.join("fail.png")).unwrap();
|
File::create(nested_path.join("fail.png")).unwrap();
|
||||||
|
|
||||||
let res = Section::from_file(
|
let res = Section::from_file(nested_path.join("_index.md").as_path(), &Config::default());
|
||||||
nested_path.join("_index.md").as_path(),
|
|
||||||
&Config::default(),
|
|
||||||
);
|
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
let section = res.unwrap();
|
let section = res.unwrap();
|
||||||
assert_eq!(section.assets.len(), 3);
|
assert_eq!(section.assets.len(), 3);
|
||||||
|
@ -279,10 +278,7 @@ mod tests {
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.ignored_content_globset = Some(gsb.build().unwrap());
|
config.ignored_content_globset = Some(gsb.build().unwrap());
|
||||||
|
|
||||||
let res = Section::from_file(
|
let res = Section::from_file(nested_path.join("_index.md").as_path(), &config);
|
||||||
nested_path.join("_index.md").as_path(),
|
|
||||||
&config,
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
let page = res.unwrap();
|
let page = res.unwrap();
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
//! What we are sending to the templates when rendering them
|
//! What we are sending to the templates when rendering them
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use tera::{Value, Map};
|
use tera::{Map, Value};
|
||||||
|
|
||||||
use library::Library;
|
|
||||||
use content::{Page, Section};
|
use content::{Page, Section};
|
||||||
|
use library::Library;
|
||||||
use rendering::Header;
|
use rendering::Header;
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||||
pub struct SerializingPage<'a> {
|
pub struct SerializingPage<'a> {
|
||||||
relative_path: &'a str,
|
relative_path: &'a str,
|
||||||
|
@ -49,11 +48,23 @@ impl<'a> SerializingPage<'a> {
|
||||||
day = Some(d.2);
|
day = Some(d.2);
|
||||||
}
|
}
|
||||||
let pages = library.pages();
|
let pages = library.pages();
|
||||||
let lighter = page.lighter.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
let lighter = page
|
||||||
let heavier = page.heavier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
.lighter
|
||||||
let earlier = page.earlier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||||
let later = page.later.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
let heavier = page
|
||||||
let ancestors = page.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
|
.heavier
|
||||||
|
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||||
|
let earlier = page
|
||||||
|
.earlier
|
||||||
|
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||||
|
let later = page
|
||||||
|
.later
|
||||||
|
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||||
|
let ancestors = page
|
||||||
|
.ancestors
|
||||||
|
.iter()
|
||||||
|
.map(|k| library.get_section_by_key(*k).file.relative.clone())
|
||||||
|
.collect();
|
||||||
|
|
||||||
SerializingPage {
|
SerializingPage {
|
||||||
relative_path: &page.file.relative,
|
relative_path: &page.file.relative,
|
||||||
|
@ -95,7 +106,10 @@ impl<'a> SerializingPage<'a> {
|
||||||
day = Some(d.2);
|
day = Some(d.2);
|
||||||
}
|
}
|
||||||
let ancestors = if let Some(ref lib) = library {
|
let ancestors = if let Some(ref lib) = library {
|
||||||
page.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
|
page.ancestors
|
||||||
|
.iter()
|
||||||
|
.map(|k| lib.get_section_by_key(*k).file.relative.clone())
|
||||||
|
.collect()
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
};
|
};
|
||||||
|
@ -130,7 +144,6 @@ impl<'a> SerializingPage<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||||
pub struct SerializingSection<'a> {
|
pub struct SerializingSection<'a> {
|
||||||
relative_path: &'a str,
|
relative_path: &'a str,
|
||||||
|
@ -145,7 +158,7 @@ pub struct SerializingSection<'a> {
|
||||||
word_count: Option<usize>,
|
word_count: Option<usize>,
|
||||||
reading_time: Option<usize>,
|
reading_time: Option<usize>,
|
||||||
toc: &'a [Header],
|
toc: &'a [Header],
|
||||||
assets: &'a [String],
|
assets: &'a [String],
|
||||||
pages: Vec<SerializingPage<'a>>,
|
pages: Vec<SerializingPage<'a>>,
|
||||||
subsections: Vec<&'a str>,
|
subsections: Vec<&'a str>,
|
||||||
}
|
}
|
||||||
|
@ -163,7 +176,11 @@ impl<'a> SerializingSection<'a> {
|
||||||
subsections.push(library.get_section_path_by_key(*k));
|
subsections.push(library.get_section_path_by_key(*k));
|
||||||
}
|
}
|
||||||
|
|
||||||
let ancestors = section.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
|
let ancestors = section
|
||||||
|
.ancestors
|
||||||
|
.iter()
|
||||||
|
.map(|k| library.get_section_by_key(*k).file.relative.clone())
|
||||||
|
.collect();
|
||||||
|
|
||||||
SerializingSection {
|
SerializingSection {
|
||||||
relative_path: §ion.file.relative,
|
relative_path: §ion.file.relative,
|
||||||
|
@ -187,7 +204,11 @@ impl<'a> SerializingSection<'a> {
|
||||||
/// Same as from_section but doesn't fetch pages and sections
|
/// Same as from_section but doesn't fetch pages and sections
|
||||||
pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self {
|
pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self {
|
||||||
let ancestors = if let Some(ref lib) = library {
|
let ancestors = if let Some(ref lib) = library {
|
||||||
section.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
|
section
|
||||||
|
.ancestors
|
||||||
|
.iter()
|
||||||
|
.map(|k| lib.get_section_by_key(*k).file.relative.clone())
|
||||||
|
.collect()
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,39 +1,39 @@
|
||||||
extern crate tera;
|
|
||||||
extern crate slug;
|
|
||||||
extern crate serde;
|
extern crate serde;
|
||||||
|
extern crate slug;
|
||||||
|
extern crate tera;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
extern crate chrono;
|
extern crate chrono;
|
||||||
extern crate slotmap;
|
|
||||||
extern crate rayon;
|
extern crate rayon;
|
||||||
|
extern crate slotmap;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
extern crate regex;
|
extern crate regex;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
extern crate globset;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
extern crate tempfile;
|
extern crate tempfile;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
extern crate toml;
|
extern crate toml;
|
||||||
#[cfg(test)]
|
|
||||||
extern crate globset;
|
|
||||||
|
|
||||||
extern crate front_matter;
|
|
||||||
extern crate config;
|
extern crate config;
|
||||||
extern crate utils;
|
extern crate front_matter;
|
||||||
extern crate rendering;
|
extern crate rendering;
|
||||||
|
extern crate utils;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate errors;
|
extern crate errors;
|
||||||
|
|
||||||
mod content;
|
mod content;
|
||||||
mod taxonomies;
|
mod library;
|
||||||
mod pagination;
|
mod pagination;
|
||||||
mod sorting;
|
mod sorting;
|
||||||
mod library;
|
mod taxonomies;
|
||||||
|
|
||||||
pub use slotmap::{Key, DenseSlotMap};
|
pub use slotmap::{DenseSlotMap, Key};
|
||||||
|
|
||||||
pub use sorting::sort_actual_pages_by_date;
|
pub use content::{Page, Section, SerializingPage, SerializingSection};
|
||||||
pub use content::{Page, SerializingPage, Section, SerializingSection};
|
|
||||||
pub use library::Library;
|
pub use library::Library;
|
||||||
pub use taxonomies::{Taxonomy, TaxonomyItem, find_taxonomies};
|
|
||||||
pub use pagination::Paginator;
|
pub use pagination::Paginator;
|
||||||
|
pub use sorting::sort_actual_pages_by_date;
|
||||||
|
pub use taxonomies::{find_taxonomies, Taxonomy, TaxonomyItem};
|
||||||
|
|
|
@ -5,9 +5,8 @@ use slotmap::{DenseSlotMap, Key};
|
||||||
|
|
||||||
use front_matter::SortBy;
|
use front_matter::SortBy;
|
||||||
|
|
||||||
use sorting::{find_siblings, sort_pages_by_weight, sort_pages_by_date};
|
|
||||||
use content::{Page, Section};
|
use content::{Page, Section};
|
||||||
|
use sorting::{find_siblings, sort_pages_by_date, sort_pages_by_weight};
|
||||||
|
|
||||||
/// Houses everything about pages and sections
|
/// Houses everything about pages and sections
|
||||||
/// Think of it as a database where each page and section has an id (Key here)
|
/// Think of it as a database where each page and section has an id (Key here)
|
||||||
|
@ -81,12 +80,13 @@ impl Library {
|
||||||
/// Find out the direct subsections of each subsection if there are some
|
/// Find out the direct subsections of each subsection if there are some
|
||||||
/// as well as the pages for each section
|
/// as well as the pages for each section
|
||||||
pub fn populate_sections(&mut self) {
|
pub fn populate_sections(&mut self) {
|
||||||
let (root_path, index_path) = self.sections
|
let (root_path, index_path) = self
|
||||||
|
.sections
|
||||||
.values()
|
.values()
|
||||||
.find(|s| s.is_index())
|
.find(|s| s.is_index())
|
||||||
.map(|s| (s.file.parent.clone(), s.file.path.clone()))
|
.map(|s| (s.file.parent.clone(), s.file.path.clone()))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let root_key = self.paths_to_sections[&index_path];
|
let root_key = self.paths_to_sections[&index_path];
|
||||||
|
|
||||||
// We are going to get both the ancestors and grandparents for each section in one go
|
// We are going to get both the ancestors and grandparents for each section in one go
|
||||||
let mut ancestors: HashMap<PathBuf, Vec<_>> = HashMap::new();
|
let mut ancestors: HashMap<PathBuf, Vec<_>> = HashMap::new();
|
||||||
|
@ -130,7 +130,8 @@ impl Library {
|
||||||
let parent_section_path = page.file.parent.join("_index.md");
|
let parent_section_path = page.file.parent.join("_index.md");
|
||||||
if let Some(section_key) = self.paths_to_sections.get(&parent_section_path) {
|
if let Some(section_key) = self.paths_to_sections.get(&parent_section_path) {
|
||||||
self.sections.get_mut(*section_key).unwrap().pages.push(key);
|
self.sections.get_mut(*section_key).unwrap().pages.push(key);
|
||||||
page.ancestors = ancestors.get(&parent_section_path).cloned().unwrap_or_else(|| vec![]);
|
page.ancestors =
|
||||||
|
ancestors.get(&parent_section_path).cloned().unwrap_or_else(|| vec![]);
|
||||||
// Don't forget to push the actual parent
|
// Don't forget to push the actual parent
|
||||||
page.ancestors.push(*section_key);
|
page.ancestors.push(*section_key);
|
||||||
}
|
}
|
||||||
|
@ -150,7 +151,8 @@ impl Library {
|
||||||
children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b]));
|
children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b]));
|
||||||
section.subsections = children;
|
section.subsections = children;
|
||||||
}
|
}
|
||||||
section.ancestors = ancestors.get(§ion.file.path).cloned().unwrap_or_else(|| vec![]);
|
section.ancestors =
|
||||||
|
ancestors.get(§ion.file.path).cloned().unwrap_or_else(|| vec![]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,7 +163,8 @@ impl Library {
|
||||||
let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by {
|
let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by {
|
||||||
SortBy::None => continue,
|
SortBy::None => continue,
|
||||||
SortBy::Date => {
|
SortBy::Date => {
|
||||||
let data = section.pages
|
let data = section
|
||||||
|
.pages
|
||||||
.iter()
|
.iter()
|
||||||
.map(|k| {
|
.map(|k| {
|
||||||
if let Some(page) = self.pages.get(*k) {
|
if let Some(page) = self.pages.get(*k) {
|
||||||
|
@ -173,9 +176,10 @@ impl Library {
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
sort_pages_by_date(data)
|
sort_pages_by_date(data)
|
||||||
},
|
}
|
||||||
SortBy::Weight => {
|
SortBy::Weight => {
|
||||||
let data = section.pages
|
let data = section
|
||||||
|
.pages
|
||||||
.iter()
|
.iter()
|
||||||
.map(|k| {
|
.map(|k| {
|
||||||
if let Some(page) = self.pages.get(*k) {
|
if let Some(page) = self.pages.get(*k) {
|
||||||
|
@ -194,13 +198,18 @@ impl Library {
|
||||||
|
|
||||||
for (key, (sorted, cannot_be_sorted, sort_by)) in updates {
|
for (key, (sorted, cannot_be_sorted, sort_by)) in updates {
|
||||||
// Find sibling between sorted pages first
|
// Find sibling between sorted pages first
|
||||||
let with_siblings = find_siblings(sorted.iter().map(|k| {
|
let with_siblings = find_siblings(
|
||||||
if let Some(page) = self.pages.get(*k) {
|
sorted
|
||||||
(k, page.is_draft())
|
.iter()
|
||||||
} else {
|
.map(|k| {
|
||||||
unreachable!("Sorting got an unknown page")
|
if let Some(page) = self.pages.get(*k) {
|
||||||
}
|
(k, page.is_draft())
|
||||||
}).collect());
|
} else {
|
||||||
|
unreachable!("Sorting got an unknown page")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
|
||||||
for (k2, val1, val2) in with_siblings {
|
for (k2, val1, val2) in with_siblings {
|
||||||
if let Some(page) = self.pages.get_mut(k2) {
|
if let Some(page) = self.pages.get_mut(k2) {
|
||||||
|
@ -208,12 +217,12 @@ impl Library {
|
||||||
SortBy::Date => {
|
SortBy::Date => {
|
||||||
page.earlier = val2;
|
page.earlier = val2;
|
||||||
page.later = val1;
|
page.later = val1;
|
||||||
},
|
}
|
||||||
SortBy::Weight => {
|
SortBy::Weight => {
|
||||||
page.lighter = val1;
|
page.lighter = val1;
|
||||||
page.heavier = val2;
|
page.heavier = val2;
|
||||||
},
|
}
|
||||||
SortBy::None => unreachable!("Impossible to find siblings in SortBy::None")
|
SortBy::None => unreachable!("Impossible to find siblings in SortBy::None"),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
unreachable!("Sorting got an unknown page")
|
unreachable!("Sorting got an unknown page")
|
||||||
|
@ -229,10 +238,8 @@ impl Library {
|
||||||
|
|
||||||
/// Find all the orphan pages: pages that are in a folder without an `_index.md`
|
/// Find all the orphan pages: pages that are in a folder without an `_index.md`
|
||||||
pub fn get_all_orphan_pages(&self) -> Vec<&Page> {
|
pub fn get_all_orphan_pages(&self) -> Vec<&Page> {
|
||||||
let pages_in_sections = self.sections
|
let pages_in_sections =
|
||||||
.values()
|
self.sections.values().flat_map(|s| &s.pages).collect::<HashSet<_>>();
|
||||||
.flat_map(|s| &s.pages)
|
|
||||||
.collect::<HashSet<_>>();
|
|
||||||
|
|
||||||
self.pages
|
self.pages
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -245,7 +252,7 @@ impl Library {
|
||||||
let page_key = self.paths_to_pages[path];
|
let page_key = self.paths_to_pages[path];
|
||||||
for s in self.sections.values() {
|
for s in self.sections.values() {
|
||||||
if s.pages.contains(&page_key) {
|
if s.pages.contains(&page_key) {
|
||||||
return Some(s)
|
return Some(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,16 +1,15 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use tera::{Tera, Context, to_value, Value};
|
use slotmap::Key;
|
||||||
use slotmap::{Key};
|
use tera::{to_value, Context, Tera, Value};
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
|
||||||
use config::Config;
|
use config::Config;
|
||||||
|
use errors::{Result, ResultExt};
|
||||||
use utils::templates::render_template;
|
use utils::templates::render_template;
|
||||||
|
|
||||||
use content::{Section, SerializingSection, SerializingPage};
|
use content::{Section, SerializingPage, SerializingSection};
|
||||||
use taxonomies::{TaxonomyItem, Taxonomy};
|
|
||||||
use library::Library;
|
use library::Library;
|
||||||
|
use taxonomies::{Taxonomy, TaxonomyItem};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
enum PaginationRoot<'a> {
|
enum PaginationRoot<'a> {
|
||||||
|
@ -18,7 +17,6 @@ enum PaginationRoot<'a> {
|
||||||
Taxonomy(&'a Taxonomy),
|
Taxonomy(&'a Taxonomy),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// A list of all the pages in the paginator with their index and links
|
/// A list of all the pages in the paginator with their index and links
|
||||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||||
pub struct Pager<'a> {
|
pub struct Pager<'a> {
|
||||||
|
@ -33,13 +31,13 @@ pub struct Pager<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Pager<'a> {
|
impl<'a> Pager<'a> {
|
||||||
fn new(index: usize, pages: Vec<SerializingPage<'a>>, permalink: String, path: String) -> Pager<'a> {
|
fn new(
|
||||||
Pager {
|
index: usize,
|
||||||
index,
|
pages: Vec<SerializingPage<'a>>,
|
||||||
permalink,
|
permalink: String,
|
||||||
path,
|
path: String,
|
||||||
pages,
|
) -> Pager<'a> {
|
||||||
}
|
Pager { index, permalink, path, pages }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,7 +81,11 @@ impl<'a> Paginator<'a> {
|
||||||
|
|
||||||
/// Create a new paginator from a taxonomy
|
/// Create a new paginator from a taxonomy
|
||||||
/// It will always at least create one pager (the first) even if there are not enough pages to paginate
|
/// It will always at least create one pager (the first) even if there are not enough pages to paginate
|
||||||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, item: &'a TaxonomyItem, library: &'a Library) -> Paginator<'a> {
|
pub fn from_taxonomy(
|
||||||
|
taxonomy: &'a Taxonomy,
|
||||||
|
item: &'a TaxonomyItem,
|
||||||
|
library: &'a Library,
|
||||||
|
) -> Paginator<'a> {
|
||||||
let paginate_by = taxonomy.kind.paginate_by.unwrap();
|
let paginate_by = taxonomy.kind.paginate_by.unwrap();
|
||||||
let mut paginator = Paginator {
|
let mut paginator = Paginator {
|
||||||
all_pages: &item.pages,
|
all_pages: &item.pages,
|
||||||
|
@ -92,7 +94,11 @@ impl<'a> Paginator<'a> {
|
||||||
root: PaginationRoot::Taxonomy(taxonomy),
|
root: PaginationRoot::Taxonomy(taxonomy),
|
||||||
permalink: item.permalink.clone(),
|
permalink: item.permalink.clone(),
|
||||||
path: format!("{}/{}", taxonomy.kind.name, item.slug),
|
path: format!("{}/{}", taxonomy.kind.name, item.slug),
|
||||||
paginate_path: taxonomy.kind.paginate_path.clone().unwrap_or_else(|| "pages".to_string()),
|
paginate_path: taxonomy
|
||||||
|
.kind
|
||||||
|
.paginate_path
|
||||||
|
.clone()
|
||||||
|
.unwrap_or_else(|| "pages".to_string()),
|
||||||
is_index: false,
|
is_index: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -142,12 +148,7 @@ impl<'a> Paginator<'a> {
|
||||||
format!("{}/{}", self.path, page_path)
|
format!("{}/{}", self.path, page_path)
|
||||||
};
|
};
|
||||||
|
|
||||||
pagers.push(Pager::new(
|
pagers.push(Pager::new(index + 1, page, permalink, pager_path));
|
||||||
index + 1,
|
|
||||||
page,
|
|
||||||
permalink,
|
|
||||||
pager_path,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// We always have the index one at least
|
// We always have the index one at least
|
||||||
|
@ -184,19 +185,29 @@ impl<'a> Paginator<'a> {
|
||||||
paginator.insert("next", Value::Null);
|
paginator.insert("next", Value::Null);
|
||||||
}
|
}
|
||||||
paginator.insert("number_pagers", to_value(&self.pagers.len()).unwrap());
|
paginator.insert("number_pagers", to_value(&self.pagers.len()).unwrap());
|
||||||
paginator.insert("base_url", to_value(&format!("{}{}/", self.permalink, self.paginate_path)).unwrap());
|
paginator.insert(
|
||||||
|
"base_url",
|
||||||
|
to_value(&format!("{}{}/", self.permalink, self.paginate_path)).unwrap(),
|
||||||
|
);
|
||||||
paginator.insert("pages", to_value(¤t_pager.pages).unwrap());
|
paginator.insert("pages", to_value(¤t_pager.pages).unwrap());
|
||||||
paginator.insert("current_index", to_value(current_pager.index).unwrap());
|
paginator.insert("current_index", to_value(current_pager.index).unwrap());
|
||||||
|
|
||||||
paginator
|
paginator
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn render_pager(&self, pager: &Pager, config: &Config, tera: &Tera, library: &Library) -> Result<String> {
|
pub fn render_pager(
|
||||||
|
&self,
|
||||||
|
pager: &Pager,
|
||||||
|
config: &Config,
|
||||||
|
tera: &Tera,
|
||||||
|
library: &Library,
|
||||||
|
) -> Result<String> {
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
context.insert("config", &config);
|
context.insert("config", &config);
|
||||||
let template_name = match self.root {
|
let template_name = match self.root {
|
||||||
PaginationRoot::Section(s) => {
|
PaginationRoot::Section(s) => {
|
||||||
context.insert("section", &SerializingSection::from_section_basic(s, Some(library)));
|
context
|
||||||
|
.insert("section", &SerializingSection::from_section_basic(s, Some(library)));
|
||||||
s.get_template_name()
|
s.get_template_name()
|
||||||
}
|
}
|
||||||
PaginationRoot::Taxonomy(t) => {
|
PaginationRoot::Taxonomy(t) => {
|
||||||
|
@ -217,11 +228,11 @@ impl<'a> Paginator<'a> {
|
||||||
mod tests {
|
mod tests {
|
||||||
use tera::to_value;
|
use tera::to_value;
|
||||||
|
|
||||||
use front_matter::SectionFrontMatter;
|
|
||||||
use content::{Page, Section};
|
|
||||||
use config::Taxonomy as TaxonomyConfig;
|
use config::Taxonomy as TaxonomyConfig;
|
||||||
use taxonomies::{Taxonomy, TaxonomyItem};
|
use content::{Page, Section};
|
||||||
|
use front_matter::SectionFrontMatter;
|
||||||
use library::Library;
|
use library::Library;
|
||||||
|
use taxonomies::{Taxonomy, TaxonomyItem};
|
||||||
|
|
||||||
use super::Paginator;
|
use super::Paginator;
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
use chrono::NaiveDateTime;
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
use slotmap::Key;
|
use slotmap::Key;
|
||||||
use chrono::NaiveDateTime;
|
|
||||||
|
|
||||||
use content::Page;
|
use content::Page;
|
||||||
|
|
||||||
|
@ -21,19 +21,17 @@ pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering {
|
||||||
/// Pages without date will be put in the unsortable bucket
|
/// Pages without date will be put in the unsortable bucket
|
||||||
/// The permalink is used to break ties
|
/// The permalink is used to break ties
|
||||||
pub fn sort_pages_by_date(pages: Vec<(&Key, Option<NaiveDateTime>, &str)>) -> (Vec<Key>, Vec<Key>) {
|
pub fn sort_pages_by_date(pages: Vec<(&Key, Option<NaiveDateTime>, &str)>) -> (Vec<Key>, Vec<Key>) {
|
||||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages
|
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) =
|
||||||
.into_par_iter()
|
pages.into_par_iter().partition(|page| page.1.is_some());
|
||||||
.partition(|page| page.1.is_some());
|
|
||||||
|
|
||||||
can_be_sorted
|
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||||
.par_sort_unstable_by(|a, b| {
|
let ord = b.1.unwrap().cmp(&a.1.unwrap());
|
||||||
let ord = b.1.unwrap().cmp(&a.1.unwrap());
|
if ord == Ordering::Equal {
|
||||||
if ord == Ordering::Equal {
|
a.2.cmp(&b.2)
|
||||||
a.2.cmp(&b.2)
|
} else {
|
||||||
} else {
|
ord
|
||||||
ord
|
}
|
||||||
}
|
});
|
||||||
});
|
|
||||||
|
|
||||||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
||||||
}
|
}
|
||||||
|
@ -42,19 +40,17 @@ pub fn sort_pages_by_date(pages: Vec<(&Key, Option<NaiveDateTime>, &str)>) -> (V
|
||||||
/// Pages without weight will be put in the unsortable bucket
|
/// Pages without weight will be put in the unsortable bucket
|
||||||
/// The permalink is used to break ties
|
/// The permalink is used to break ties
|
||||||
pub fn sort_pages_by_weight(pages: Vec<(&Key, Option<usize>, &str)>) -> (Vec<Key>, Vec<Key>) {
|
pub fn sort_pages_by_weight(pages: Vec<(&Key, Option<usize>, &str)>) -> (Vec<Key>, Vec<Key>) {
|
||||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages
|
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) =
|
||||||
.into_par_iter()
|
pages.into_par_iter().partition(|page| page.1.is_some());
|
||||||
.partition(|page| page.1.is_some());
|
|
||||||
|
|
||||||
can_be_sorted
|
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||||
.par_sort_unstable_by(|a, b| {
|
let ord = a.1.unwrap().cmp(&b.1.unwrap());
|
||||||
let ord = a.1.unwrap().cmp(&b.1.unwrap());
|
if ord == Ordering::Equal {
|
||||||
if ord == Ordering::Equal {
|
a.2.cmp(&b.2)
|
||||||
a.2.cmp(&b.2)
|
} else {
|
||||||
} else {
|
ord
|
||||||
ord
|
}
|
||||||
}
|
});
|
||||||
});
|
|
||||||
|
|
||||||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
||||||
}
|
}
|
||||||
|
@ -118,9 +114,9 @@ pub fn find_siblings(sorted: Vec<(&Key, bool)>) -> Vec<(Key, Option<Key>, Option
|
||||||
mod tests {
|
mod tests {
|
||||||
use slotmap::DenseSlotMap;
|
use slotmap::DenseSlotMap;
|
||||||
|
|
||||||
use front_matter::{PageFrontMatter};
|
use super::{find_siblings, sort_pages_by_date, sort_pages_by_weight};
|
||||||
use content::Page;
|
use content::Page;
|
||||||
use super::{sort_pages_by_date, sort_pages_by_weight, find_siblings};
|
use front_matter::PageFrontMatter;
|
||||||
|
|
||||||
fn create_page_with_date(date: &str) -> Page {
|
fn create_page_with_date(date: &str) -> Page {
|
||||||
let mut front_matter = PageFrontMatter::default();
|
let mut front_matter = PageFrontMatter::default();
|
||||||
|
@ -179,7 +175,6 @@ mod tests {
|
||||||
assert_eq!(pages[2], key2);
|
assert_eq!(pages[2], key2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn ignore_page_with_missing_field() {
|
fn ignore_page_with_missing_field() {
|
||||||
let mut dense = DenseSlotMap::new();
|
let mut dense = DenseSlotMap::new();
|
||||||
|
@ -196,7 +191,7 @@ mod tests {
|
||||||
(&key3, page3.meta.weight, page3.permalink.as_ref()),
|
(&key3, page3.meta.weight, page3.permalink.as_ref()),
|
||||||
];
|
];
|
||||||
|
|
||||||
let (pages,unsorted) = sort_pages_by_weight(input);
|
let (pages, unsorted) = sort_pages_by_weight(input);
|
||||||
assert_eq!(pages.len(), 2);
|
assert_eq!(pages.len(), 2);
|
||||||
assert_eq!(unsorted.len(), 1);
|
assert_eq!(unsorted.len(), 1);
|
||||||
}
|
}
|
||||||
|
@ -211,11 +206,8 @@ mod tests {
|
||||||
let page3 = create_page_with_weight(3);
|
let page3 = create_page_with_weight(3);
|
||||||
let key3 = dense.insert(page3.clone());
|
let key3 = dense.insert(page3.clone());
|
||||||
|
|
||||||
let input = vec![
|
let input =
|
||||||
(&key1, page1.is_draft()),
|
vec![(&key1, page1.is_draft()), (&key2, page2.is_draft()), (&key3, page3.is_draft())];
|
||||||
(&key2, page2.is_draft()),
|
|
||||||
(&key3, page3.is_draft()),
|
|
||||||
];
|
|
||||||
|
|
||||||
let pages = find_siblings(input);
|
let pages = find_siblings(input);
|
||||||
|
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use slotmap::Key;
|
||||||
use slug::slugify;
|
use slug::slugify;
|
||||||
use tera::{Context, Tera};
|
use tera::{Context, Tera};
|
||||||
use slotmap::{Key};
|
|
||||||
|
|
||||||
use config::{Config, Taxonomy as TaxonomyConfig};
|
use config::{Config, Taxonomy as TaxonomyConfig};
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
use utils::templates::render_template;
|
use utils::templates::render_template;
|
||||||
|
|
||||||
use content::SerializingPage;
|
use content::SerializingPage;
|
||||||
use sorting::sort_pages_by_date;
|
|
||||||
use library::Library;
|
use library::Library;
|
||||||
|
use sorting::sort_pages_by_date;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||||
struct SerializedTaxonomyItem<'a> {
|
struct SerializedTaxonomyItem<'a> {
|
||||||
|
@ -34,7 +34,6 @@ impl<'a> SerializedTaxonomyItem<'a> {
|
||||||
slug: &item.slug,
|
slug: &item.slug,
|
||||||
permalink: &item.permalink,
|
permalink: &item.permalink,
|
||||||
pages,
|
pages,
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -70,12 +69,7 @@ impl TaxonomyItem {
|
||||||
// We still append pages without dates at the end
|
// We still append pages without dates at the end
|
||||||
pages.extend(ignored_pages);
|
pages.extend(ignored_pages);
|
||||||
|
|
||||||
TaxonomyItem {
|
TaxonomyItem { name: name.to_string(), permalink, slug, pages }
|
||||||
name: name.to_string(),
|
|
||||||
permalink,
|
|
||||||
slug,
|
|
||||||
pages,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,11 +81,9 @@ pub struct SerializedTaxonomy<'a> {
|
||||||
|
|
||||||
impl<'a> SerializedTaxonomy<'a> {
|
impl<'a> SerializedTaxonomy<'a> {
|
||||||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self {
|
pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self {
|
||||||
let items: Vec<SerializedTaxonomyItem> = taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
|
let items: Vec<SerializedTaxonomyItem> =
|
||||||
SerializedTaxonomy {
|
taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
|
||||||
kind: &taxonomy.kind,
|
SerializedTaxonomy { kind: &taxonomy.kind, items }
|
||||||
items,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,19 +96,19 @@ pub struct Taxonomy {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Taxonomy {
|
impl Taxonomy {
|
||||||
fn new(kind: TaxonomyConfig, config: &Config, items: HashMap<String, Vec<Key>>, library: &Library) -> Taxonomy {
|
fn new(
|
||||||
|
kind: TaxonomyConfig,
|
||||||
|
config: &Config,
|
||||||
|
items: HashMap<String, Vec<Key>>,
|
||||||
|
library: &Library,
|
||||||
|
) -> Taxonomy {
|
||||||
let mut sorted_items = vec![];
|
let mut sorted_items = vec![];
|
||||||
for (name, pages) in items {
|
for (name, pages) in items {
|
||||||
sorted_items.push(
|
sorted_items.push(TaxonomyItem::new(&name, &kind.name, config, pages, library));
|
||||||
TaxonomyItem::new(&name, &kind.name, config, pages, library)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
sorted_items.sort_by(|a, b| a.name.cmp(&b.name));
|
sorted_items.sort_by(|a, b| a.name.cmp(&b.name));
|
||||||
|
|
||||||
Taxonomy {
|
Taxonomy { kind, items: sorted_items }
|
||||||
kind,
|
|
||||||
items: sorted_items,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
|
@ -127,22 +119,37 @@ impl Taxonomy {
|
||||||
self.len() == 0
|
self.len() == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn render_term(&self, item: &TaxonomyItem, tera: &Tera, config: &Config, library: &Library) -> Result<String> {
|
pub fn render_term(
|
||||||
|
&self,
|
||||||
|
item: &TaxonomyItem,
|
||||||
|
tera: &Tera,
|
||||||
|
config: &Config,
|
||||||
|
library: &Library,
|
||||||
|
) -> Result<String> {
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
context.insert("config", config);
|
context.insert("config", config);
|
||||||
context.insert("term", &SerializedTaxonomyItem::from_item(item, library));
|
context.insert("term", &SerializedTaxonomyItem::from_item(item, library));
|
||||||
context.insert("taxonomy", &self.kind);
|
context.insert("taxonomy", &self.kind);
|
||||||
context.insert("current_url", &config.make_permalink(&format!("{}/{}", self.kind.name, item.slug)));
|
context.insert(
|
||||||
|
"current_url",
|
||||||
|
&config.make_permalink(&format!("{}/{}", self.kind.name, item.slug)),
|
||||||
|
);
|
||||||
context.insert("current_path", &format!("/{}/{}", self.kind.name, item.slug));
|
context.insert("current_path", &format!("/{}/{}", self.kind.name, item.slug));
|
||||||
|
|
||||||
render_template(&format!("{}/single.html", self.kind.name), tera, &context, &config.theme)
|
render_template(&format!("{}/single.html", self.kind.name), tera, &context, &config.theme)
|
||||||
.chain_err(|| format!("Failed to render single term {} page.", self.kind.name))
|
.chain_err(|| format!("Failed to render single term {} page.", self.kind.name))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn render_all_terms(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> {
|
pub fn render_all_terms(
|
||||||
|
&self,
|
||||||
|
tera: &Tera,
|
||||||
|
config: &Config,
|
||||||
|
library: &Library,
|
||||||
|
) -> Result<String> {
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
context.insert("config", config);
|
context.insert("config", config);
|
||||||
let terms: Vec<SerializedTaxonomyItem> = self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
|
let terms: Vec<SerializedTaxonomyItem> =
|
||||||
|
self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
|
||||||
context.insert("terms", &terms);
|
context.insert("terms", &terms);
|
||||||
context.insert("taxonomy", &self.kind);
|
context.insert("taxonomy", &self.kind);
|
||||||
context.insert("current_url", &config.make_permalink(&self.kind.name));
|
context.insert("current_url", &config.make_permalink(&self.kind.name));
|
||||||
|
@ -175,19 +182,22 @@ pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonom
|
||||||
|
|
||||||
for (name, val) in &page.meta.taxonomies {
|
for (name, val) in &page.meta.taxonomies {
|
||||||
if taxonomies_def.contains_key(name) {
|
if taxonomies_def.contains_key(name) {
|
||||||
all_taxonomies
|
all_taxonomies.entry(name).or_insert_with(HashMap::new);
|
||||||
.entry(name)
|
|
||||||
.or_insert_with(HashMap::new);
|
|
||||||
|
|
||||||
for v in val {
|
for v in val {
|
||||||
all_taxonomies.get_mut(name)
|
all_taxonomies
|
||||||
|
.get_mut(name)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.entry(v.to_string())
|
.entry(v.to_string())
|
||||||
.or_insert_with(|| vec![])
|
.or_insert_with(|| vec![])
|
||||||
.push(key);
|
.push(key);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
bail!("Page `{}` has taxonomy `{}` which is not defined in config.toml", page.file.path.display(), name);
|
bail!(
|
||||||
|
"Page `{}` has taxonomy `{}` which is not defined in config.toml",
|
||||||
|
page.file.path.display(),
|
||||||
|
name
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -201,7 +211,6 @@ pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonom
|
||||||
Ok(taxonomies)
|
Ok(taxonomies)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -284,7 +293,10 @@ mod tests {
|
||||||
|
|
||||||
assert_eq!(categories.items[1].name, "Programming tutorials");
|
assert_eq!(categories.items[1].name, "Programming tutorials");
|
||||||
assert_eq!(categories.items[1].slug, "programming-tutorials");
|
assert_eq!(categories.items[1].slug, "programming-tutorials");
|
||||||
assert_eq!(categories.items[1].permalink, "http://a-website.com/categories/programming-tutorials/");
|
assert_eq!(
|
||||||
|
categories.items[1].permalink,
|
||||||
|
"http://a-website.com/categories/programming-tutorials/"
|
||||||
|
);
|
||||||
assert_eq!(categories.items[1].pages.len(), 1);
|
assert_eq!(categories.items[1].pages.len(), 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -293,9 +305,8 @@ mod tests {
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
let mut library = Library::new(2, 0);
|
let mut library = Library::new(2, 0);
|
||||||
|
|
||||||
config.taxonomies = vec![
|
config.taxonomies =
|
||||||
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() },
|
vec![TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }];
|
||||||
];
|
|
||||||
let mut page1 = Page::default();
|
let mut page1 = Page::default();
|
||||||
let mut taxo_page1 = HashMap::new();
|
let mut taxo_page1 = HashMap::new();
|
||||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
|
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
|
||||||
|
@ -306,6 +317,9 @@ mod tests {
|
||||||
assert!(taxonomies.is_err());
|
assert!(taxonomies.is_err());
|
||||||
let err = taxonomies.unwrap_err();
|
let err = taxonomies.unwrap_err();
|
||||||
// no path as this is created by Default
|
// no path as this is created by Default
|
||||||
assert_eq!(err.description(), "Page `` has taxonomy `tags` which is not defined in config.toml");
|
assert_eq!(
|
||||||
|
err.description(),
|
||||||
|
"Page `` has taxonomy `tags` which is not defined in config.toml"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ extern crate reqwest;
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
|
|
||||||
use reqwest::header::{HeaderMap, ACCEPT};
|
use reqwest::header::{HeaderMap, ACCEPT};
|
||||||
use reqwest::{StatusCode};
|
use reqwest::StatusCode;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::sync::{Arc, RwLock};
|
use std::sync::{Arc, RwLock};
|
||||||
|
@ -62,14 +62,8 @@ pub fn check_url(url: &str) -> LinkResult {
|
||||||
|
|
||||||
// Need to actually do the link checking
|
// Need to actually do the link checking
|
||||||
let res = match client.get(url).headers(headers).send() {
|
let res = match client.get(url).headers(headers).send() {
|
||||||
Ok(response) => LinkResult {
|
Ok(response) => LinkResult { code: Some(response.status()), error: None },
|
||||||
code: Some(response.status()),
|
Err(e) => LinkResult { code: None, error: Some(e.description().to_string()) },
|
||||||
error: None,
|
|
||||||
},
|
|
||||||
Err(e) => LinkResult {
|
|
||||||
code: None,
|
|
||||||
error: Some(e.description().to_string()),
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
LINKS.write().unwrap().insert(url.to_string(), res.clone());
|
LINKS.write().unwrap().insert(url.to_string(), res.clone());
|
||||||
|
|
|
@ -1,16 +1,15 @@
|
||||||
extern crate site;
|
extern crate site;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate errors;
|
extern crate errors;
|
||||||
extern crate library;
|
|
||||||
extern crate front_matter;
|
extern crate front_matter;
|
||||||
|
extern crate library;
|
||||||
|
|
||||||
use std::path::{Path, Component};
|
use std::path::{Component, Path};
|
||||||
|
|
||||||
use errors::Result;
|
use errors::Result;
|
||||||
use site::Site;
|
|
||||||
use library::{Page, Section};
|
|
||||||
use front_matter::{PageFrontMatter, SectionFrontMatter};
|
use front_matter::{PageFrontMatter, SectionFrontMatter};
|
||||||
|
use library::{Page, Section};
|
||||||
|
use site::Site;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||||
pub enum PageChangesNeeded {
|
pub enum PageChangesNeeded {
|
||||||
|
@ -37,7 +36,10 @@ pub enum SectionChangesNeeded {
|
||||||
/// Evaluates all the params in the front matter that changed so we can do the smallest
|
/// Evaluates all the params in the front matter that changed so we can do the smallest
|
||||||
/// delta in the serve command
|
/// delta in the serve command
|
||||||
/// Order matters as the actions will be done in insertion order
|
/// Order matters as the actions will be done in insertion order
|
||||||
fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &SectionFrontMatter) -> Vec<SectionChangesNeeded> {
|
fn find_section_front_matter_changes(
|
||||||
|
current: &SectionFrontMatter,
|
||||||
|
new: &SectionFrontMatter,
|
||||||
|
) -> Vec<SectionChangesNeeded> {
|
||||||
let mut changes_needed = vec![];
|
let mut changes_needed = vec![];
|
||||||
|
|
||||||
if current.sort_by != new.sort_by {
|
if current.sort_by != new.sort_by {
|
||||||
|
@ -54,7 +56,8 @@ fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &Section
|
||||||
|
|
||||||
if current.paginate_by != new.paginate_by
|
if current.paginate_by != new.paginate_by
|
||||||
|| current.paginate_path != new.paginate_path
|
|| current.paginate_path != new.paginate_path
|
||||||
|| current.insert_anchor_links != new.insert_anchor_links {
|
|| current.insert_anchor_links != new.insert_anchor_links
|
||||||
|
{
|
||||||
changes_needed.push(SectionChangesNeeded::RenderWithPages);
|
changes_needed.push(SectionChangesNeeded::RenderWithPages);
|
||||||
// Nothing else we can do
|
// Nothing else we can do
|
||||||
return changes_needed;
|
return changes_needed;
|
||||||
|
@ -68,14 +71,18 @@ fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &Section
|
||||||
/// Evaluates all the params in the front matter that changed so we can do the smallest
|
/// Evaluates all the params in the front matter that changed so we can do the smallest
|
||||||
/// delta in the serve command
|
/// delta in the serve command
|
||||||
/// Order matters as the actions will be done in insertion order
|
/// Order matters as the actions will be done in insertion order
|
||||||
fn find_page_front_matter_changes(current: &PageFrontMatter, other: &PageFrontMatter) -> Vec<PageChangesNeeded> {
|
fn find_page_front_matter_changes(
|
||||||
|
current: &PageFrontMatter,
|
||||||
|
other: &PageFrontMatter,
|
||||||
|
) -> Vec<PageChangesNeeded> {
|
||||||
let mut changes_needed = vec![];
|
let mut changes_needed = vec![];
|
||||||
|
|
||||||
if current.taxonomies != other.taxonomies {
|
if current.taxonomies != other.taxonomies {
|
||||||
changes_needed.push(PageChangesNeeded::Taxonomies);
|
changes_needed.push(PageChangesNeeded::Taxonomies);
|
||||||
}
|
}
|
||||||
|
|
||||||
if current.date != other.date || current.order != other.order || current.weight != other.weight {
|
if current.date != other.date || current.order != other.order || current.weight != other.weight
|
||||||
|
{
|
||||||
changes_needed.push(PageChangesNeeded::Sort);
|
changes_needed.push(PageChangesNeeded::Sort);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,7 +93,9 @@ fn find_page_front_matter_changes(current: &PageFrontMatter, other: &PageFrontMa
|
||||||
/// Handles a path deletion: could be a page, a section, a folder
|
/// Handles a path deletion: could be a page, a section, a folder
|
||||||
fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> {
|
fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> {
|
||||||
// Ignore the event if this path was not known
|
// Ignore the event if this path was not known
|
||||||
if !site.library.contains_section(&path.to_path_buf()) && !site.library.contains_page(&path.to_path_buf()) {
|
if !site.library.contains_section(&path.to_path_buf())
|
||||||
|
&& !site.library.contains_page(&path.to_path_buf())
|
||||||
|
{
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -127,14 +136,21 @@ fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Front matter changed
|
// Front matter changed
|
||||||
for changes in find_section_front_matter_changes(&site.library.get_section(&pathbuf).unwrap().meta, &prev.meta) {
|
for changes in find_section_front_matter_changes(
|
||||||
|
&site.library.get_section(&pathbuf).unwrap().meta,
|
||||||
|
&prev.meta,
|
||||||
|
) {
|
||||||
// Sort always comes first if present so the rendering will be fine
|
// Sort always comes first if present so the rendering will be fine
|
||||||
match changes {
|
match changes {
|
||||||
SectionChangesNeeded::Sort => {
|
SectionChangesNeeded::Sort => {
|
||||||
site.register_tera_global_fns();
|
site.register_tera_global_fns();
|
||||||
}
|
}
|
||||||
SectionChangesNeeded::Render => site.render_section(&site.library.get_section(&pathbuf).unwrap(), false)?,
|
SectionChangesNeeded::Render => {
|
||||||
SectionChangesNeeded::RenderWithPages => site.render_section(&site.library.get_section(&pathbuf).unwrap(), true)?,
|
site.render_section(&site.library.get_section(&pathbuf).unwrap(), false)?
|
||||||
|
}
|
||||||
|
SectionChangesNeeded::RenderWithPages => {
|
||||||
|
site.render_section(&site.library.get_section(&pathbuf).unwrap(), true)?
|
||||||
|
}
|
||||||
// not a common enough operation to make it worth optimizing
|
// not a common enough operation to make it worth optimizing
|
||||||
SectionChangesNeeded::Delete => {
|
SectionChangesNeeded::Delete => {
|
||||||
site.build()?;
|
site.build()?;
|
||||||
|
@ -157,7 +173,7 @@ macro_rules! render_parent_section {
|
||||||
if let Some(s) = $site.library.find_parent_section($path) {
|
if let Some(s) = $site.library.find_parent_section($path) {
|
||||||
$site.render_section(s, false)?;
|
$site.render_section(s, false)?;
|
||||||
};
|
};
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handles a page being edited in some ways
|
/// Handles a page being edited in some ways
|
||||||
|
@ -181,7 +197,10 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Front matter changed
|
// Front matter changed
|
||||||
for changes in find_page_front_matter_changes(&site.library.get_page(&pathbuf).unwrap().meta, &prev.meta) {
|
for changes in find_page_front_matter_changes(
|
||||||
|
&site.library.get_page(&pathbuf).unwrap().meta,
|
||||||
|
&prev.meta,
|
||||||
|
) {
|
||||||
site.register_tera_global_fns();
|
site.register_tera_global_fns();
|
||||||
|
|
||||||
// Sort always comes first if present so the rendering will be fine
|
// Sort always comes first if present so the rendering will be fine
|
||||||
|
@ -213,7 +232,6 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// What happens when a section or a page is changed
|
/// What happens when a section or a page is changed
|
||||||
pub fn after_content_change(site: &mut Site, path: &Path) -> Result<()> {
|
pub fn after_content_change(site: &mut Site, path: &Path) -> Result<()> {
|
||||||
let is_section = path.file_name().unwrap() == "_index.md";
|
let is_section = path.file_name().unwrap() == "_index.md";
|
||||||
|
@ -294,16 +312,15 @@ pub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use front_matter::{PageFrontMatter, SectionFrontMatter, SortBy};
|
|
||||||
use super::{
|
use super::{
|
||||||
find_page_front_matter_changes, find_section_front_matter_changes,
|
find_page_front_matter_changes, find_section_front_matter_changes, PageChangesNeeded,
|
||||||
PageChangesNeeded, SectionChangesNeeded,
|
SectionChangesNeeded,
|
||||||
};
|
};
|
||||||
|
use front_matter::{PageFrontMatter, SectionFrontMatter, SortBy};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_find_taxonomy_changes_in_page_frontmatter() {
|
fn can_find_taxonomy_changes_in_page_frontmatter() {
|
||||||
|
@ -320,7 +337,10 @@ mod tests {
|
||||||
taxonomies.insert("categories".to_string(), vec!["a category".to_string()]);
|
taxonomies.insert("categories".to_string(), vec!["a category".to_string()]);
|
||||||
let current = PageFrontMatter { taxonomies, order: Some(1), ..PageFrontMatter::default() };
|
let current = PageFrontMatter { taxonomies, order: Some(1), ..PageFrontMatter::default() };
|
||||||
let changes = find_page_front_matter_changes(¤t, &PageFrontMatter::default());
|
let changes = find_page_front_matter_changes(¤t, &PageFrontMatter::default());
|
||||||
assert_eq!(changes, vec![PageChangesNeeded::Taxonomies, PageChangesNeeded::Sort, PageChangesNeeded::Render]);
|
assert_eq!(
|
||||||
|
changes,
|
||||||
|
vec![PageChangesNeeded::Taxonomies, PageChangesNeeded::Sort, PageChangesNeeded::Render]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -1,89 +1,88 @@
|
||||||
|
extern crate fs_extra;
|
||||||
extern crate rebuild;
|
extern crate rebuild;
|
||||||
extern crate site;
|
extern crate site;
|
||||||
extern crate tempfile;
|
extern crate tempfile;
|
||||||
extern crate fs_extra;
|
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::fs::{remove_dir_all, File};
|
use std::fs::{remove_dir_all, File};
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
|
|
||||||
use fs_extra::dir;
|
use fs_extra::dir;
|
||||||
use tempfile::tempdir;
|
|
||||||
use site::Site;
|
use site::Site;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
use rebuild::after_content_change;
|
use rebuild::after_content_change;
|
||||||
|
|
||||||
// Loads the test_site in a tempdir and build it there
|
// Loads the test_site in a tempdir and build it there
|
||||||
// Returns (site_path_in_tempdir, site)
|
// Returns (site_path_in_tempdir, site)
|
||||||
macro_rules! load_and_build_site {
|
macro_rules! load_and_build_site {
|
||||||
($tmp_dir: expr) => {
|
($tmp_dir: expr) => {{
|
||||||
{
|
let mut path =
|
||||||
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();
|
env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();
|
||||||
path.push("test_site");
|
path.push("test_site");
|
||||||
let mut options = dir::CopyOptions::new();
|
let mut options = dir::CopyOptions::new();
|
||||||
options.copy_inside = true;
|
options.copy_inside = true;
|
||||||
dir::copy(&path, &$tmp_dir, &options).unwrap();
|
dir::copy(&path, &$tmp_dir, &options).unwrap();
|
||||||
|
|
||||||
let site_path = $tmp_dir.path().join("test_site");
|
let site_path = $tmp_dir.path().join("test_site");
|
||||||
// delete useless sections for those tests
|
// delete useless sections for those tests
|
||||||
remove_dir_all(site_path.join("content").join("paginated")).unwrap();
|
remove_dir_all(site_path.join("content").join("paginated")).unwrap();
|
||||||
remove_dir_all(site_path.join("content").join("posts")).unwrap();
|
remove_dir_all(site_path.join("content").join("posts")).unwrap();
|
||||||
|
|
||||||
let mut site = Site::new(&site_path, "config.toml").unwrap();
|
let mut site = Site::new(&site_path, "config.toml").unwrap();
|
||||||
site.load().unwrap();
|
site.load().unwrap();
|
||||||
let public = &site_path.join("public");
|
let public = &site_path.join("public");
|
||||||
site.set_output_path(&public);
|
site.set_output_path(&public);
|
||||||
site.build().unwrap();
|
site.build().unwrap();
|
||||||
|
|
||||||
(site_path, site)
|
(site_path, site)
|
||||||
}
|
}};
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Replace the file at the path (starting from root) by the given content
|
/// Replace the file at the path (starting from root) by the given content
|
||||||
/// and return the file path that was modified
|
/// and return the file path that was modified
|
||||||
macro_rules! edit_file {
|
macro_rules! edit_file {
|
||||||
($site_path: expr, $path: expr, $content: expr) => {
|
($site_path: expr, $path: expr, $content: expr) => {{
|
||||||
{
|
let mut t = $site_path.clone();
|
||||||
let mut t = $site_path.clone();
|
for c in $path.split('/') {
|
||||||
for c in $path.split('/') {
|
t.push(c);
|
||||||
t.push(c);
|
|
||||||
}
|
|
||||||
let mut file = File::create(&t).expect("Could not open/create file");
|
|
||||||
file.write_all($content).expect("Could not write to the file");
|
|
||||||
t
|
|
||||||
}
|
}
|
||||||
}
|
let mut file = File::create(&t).expect("Could not open/create file");
|
||||||
|
file.write_all($content).expect("Could not write to the file");
|
||||||
|
t
|
||||||
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! file_contains {
|
macro_rules! file_contains {
|
||||||
($site_path: expr, $path: expr, $text: expr) => {
|
($site_path: expr, $path: expr, $text: expr) => {{
|
||||||
{
|
let mut path = $site_path.clone();
|
||||||
let mut path = $site_path.clone();
|
for component in $path.split("/") {
|
||||||
for component in $path.split("/") {
|
path.push(component);
|
||||||
path.push(component);
|
|
||||||
}
|
|
||||||
let mut file = File::open(&path).unwrap();
|
|
||||||
let mut s = String::new();
|
|
||||||
file.read_to_string(&mut s).unwrap();
|
|
||||||
println!("{:?} -> {}", path, s);
|
|
||||||
s.contains($text)
|
|
||||||
}
|
}
|
||||||
}
|
let mut file = File::open(&path).unwrap();
|
||||||
|
let mut s = String::new();
|
||||||
|
file.read_to_string(&mut s).unwrap();
|
||||||
|
println!("{:?} -> {}", path, s);
|
||||||
|
s.contains($text)
|
||||||
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_rebuild_after_simple_change_to_page_content() {
|
fn can_rebuild_after_simple_change_to_page_content() {
|
||||||
let tmp_dir = tempdir().expect("create temp dir");
|
let tmp_dir = tempdir().expect("create temp dir");
|
||||||
let (site_path, mut site) = load_and_build_site!(tmp_dir);
|
let (site_path, mut site) = load_and_build_site!(tmp_dir);
|
||||||
let file_path = edit_file!(site_path, "content/rebuild/first.md", br#"
|
let file_path = edit_file!(
|
||||||
|
site_path,
|
||||||
|
"content/rebuild/first.md",
|
||||||
|
br#"
|
||||||
+++
|
+++
|
||||||
title = "first"
|
title = "first"
|
||||||
weight = 1
|
weight = 1
|
||||||
date = 2017-01-01
|
date = 2017-01-01
|
||||||
+++
|
+++
|
||||||
|
|
||||||
Some content"#);
|
Some content"#
|
||||||
|
);
|
||||||
|
|
||||||
let res = after_content_change(&mut site, &file_path);
|
let res = after_content_change(&mut site, &file_path);
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
|
@ -94,14 +93,18 @@ Some content"#);
|
||||||
fn can_rebuild_after_title_change_page_global_func_usage() {
|
fn can_rebuild_after_title_change_page_global_func_usage() {
|
||||||
let tmp_dir = tempdir().expect("create temp dir");
|
let tmp_dir = tempdir().expect("create temp dir");
|
||||||
let (site_path, mut site) = load_and_build_site!(tmp_dir);
|
let (site_path, mut site) = load_and_build_site!(tmp_dir);
|
||||||
let file_path = edit_file!(site_path, "content/rebuild/first.md", br#"
|
let file_path = edit_file!(
|
||||||
|
site_path,
|
||||||
|
"content/rebuild/first.md",
|
||||||
|
br#"
|
||||||
+++
|
+++
|
||||||
title = "Premier"
|
title = "Premier"
|
||||||
weight = 10
|
weight = 10
|
||||||
date = 2017-01-01
|
date = 2017-01-01
|
||||||
+++
|
+++
|
||||||
|
|
||||||
# A title"#);
|
# A title"#
|
||||||
|
);
|
||||||
|
|
||||||
let res = after_content_change(&mut site, &file_path);
|
let res = after_content_change(&mut site, &file_path);
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
|
@ -112,15 +115,23 @@ date = 2017-01-01
|
||||||
fn can_rebuild_after_sort_change_in_section() {
|
fn can_rebuild_after_sort_change_in_section() {
|
||||||
let tmp_dir = tempdir().expect("create temp dir");
|
let tmp_dir = tempdir().expect("create temp dir");
|
||||||
let (site_path, mut site) = load_and_build_site!(tmp_dir);
|
let (site_path, mut site) = load_and_build_site!(tmp_dir);
|
||||||
let file_path = edit_file!(site_path, "content/rebuild/_index.md", br#"
|
let file_path = edit_file!(
|
||||||
|
site_path,
|
||||||
|
"content/rebuild/_index.md",
|
||||||
|
br#"
|
||||||
+++
|
+++
|
||||||
paginate_by = 1
|
paginate_by = 1
|
||||||
sort_by = "weight"
|
sort_by = "weight"
|
||||||
template = "rebuild.html"
|
template = "rebuild.html"
|
||||||
+++
|
+++
|
||||||
"#);
|
"#
|
||||||
|
);
|
||||||
|
|
||||||
let res = after_content_change(&mut site, &file_path);
|
let res = after_content_change(&mut site, &file_path);
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
assert!(file_contains!(site_path, "public/rebuild/index.html", "<h1>first</h1><h1>second</h1>"));
|
assert!(file_contains!(
|
||||||
|
site_path,
|
||||||
|
"public/rebuild/index.html",
|
||||||
|
"<h1>first</h1><h1>second</h1>"
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
#![feature(test)]
|
#![feature(test)]
|
||||||
extern crate test;
|
|
||||||
extern crate tera;
|
extern crate tera;
|
||||||
|
extern crate test;
|
||||||
|
|
||||||
extern crate rendering;
|
|
||||||
extern crate config;
|
extern crate config;
|
||||||
extern crate front_matter;
|
extern crate front_matter;
|
||||||
|
extern crate rendering;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use tera::Tera;
|
|
||||||
use rendering::{RenderContext, render_content, render_shortcodes};
|
|
||||||
use front_matter::InsertAnchor;
|
|
||||||
use config::Config;
|
use config::Config;
|
||||||
|
use front_matter::InsertAnchor;
|
||||||
|
use rendering::{render_content, render_shortcodes, RenderContext};
|
||||||
|
use tera::Tera;
|
||||||
|
|
||||||
static CONTENT: &'static str = r#"
|
static CONTENT: &'static str = r#"
|
||||||
# Modus cognitius profanam ne duae virtutis mundi
|
# Modus cognitius profanam ne duae virtutis mundi
|
||||||
|
@ -92,7 +92,8 @@ fn bench_render_content_with_highlighting(b: &mut test::Bencher) {
|
||||||
tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap();
|
tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap();
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None);
|
let context =
|
||||||
|
RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None);
|
||||||
b.iter(|| render_content(CONTENT, &context).unwrap());
|
b.iter(|| render_content(CONTENT, &context).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,7 +104,8 @@ fn bench_render_content_without_highlighting(b: &mut test::Bencher) {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.highlight_code = false;
|
config.highlight_code = false;
|
||||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None);
|
let context =
|
||||||
|
RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None);
|
||||||
b.iter(|| render_content(CONTENT, &context).unwrap());
|
b.iter(|| render_content(CONTENT, &context).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,7 +116,8 @@ fn bench_render_content_no_shortcode(b: &mut test::Bencher) {
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.highlight_code = false;
|
config.highlight_code = false;
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None);
|
let context =
|
||||||
|
RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None);
|
||||||
|
|
||||||
b.iter(|| render_content(&content2, &context).unwrap());
|
b.iter(|| render_content(&content2, &context).unwrap());
|
||||||
}
|
}
|
||||||
|
@ -125,8 +128,8 @@ fn bench_render_shortcodes_one_present(b: &mut test::Bencher) {
|
||||||
tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap();
|
tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None);
|
let context =
|
||||||
|
RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None);
|
||||||
|
|
||||||
b.iter(|| render_shortcodes(CONTENT, &context));
|
b.iter(|| render_shortcodes(CONTENT, &context));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use tera::{Tera, Context};
|
|
||||||
use front_matter::InsertAnchor;
|
|
||||||
use config::Config;
|
use config::Config;
|
||||||
|
use front_matter::InsertAnchor;
|
||||||
|
use tera::{Context, Tera};
|
||||||
|
|
||||||
/// All the information from the zola site that is needed to render HTML from markdown
|
/// All the information from the zola site that is needed to render HTML from markdown
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
|
@ -1,35 +1,35 @@
|
||||||
extern crate tera;
|
|
||||||
extern crate syntect;
|
|
||||||
extern crate pulldown_cmark;
|
extern crate pulldown_cmark;
|
||||||
extern crate slug;
|
extern crate slug;
|
||||||
|
extern crate syntect;
|
||||||
|
extern crate tera;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
extern crate serde;
|
|
||||||
extern crate pest;
|
extern crate pest;
|
||||||
|
extern crate serde;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate pest_derive;
|
extern crate pest_derive;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate errors;
|
extern crate errors;
|
||||||
extern crate front_matter;
|
|
||||||
extern crate utils;
|
|
||||||
extern crate config;
|
extern crate config;
|
||||||
|
extern crate front_matter;
|
||||||
extern crate link_checker;
|
extern crate link_checker;
|
||||||
|
extern crate utils;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
extern crate templates;
|
extern crate templates;
|
||||||
|
|
||||||
mod context;
|
mod context;
|
||||||
mod markdown;
|
mod markdown;
|
||||||
mod table_of_contents;
|
|
||||||
mod shortcode;
|
mod shortcode;
|
||||||
|
mod table_of_contents;
|
||||||
|
|
||||||
use errors::Result;
|
use errors::Result;
|
||||||
|
|
||||||
use markdown::markdown_to_html;
|
|
||||||
pub use table_of_contents::Header;
|
|
||||||
pub use shortcode::render_shortcodes;
|
|
||||||
pub use context::RenderContext;
|
pub use context::RenderContext;
|
||||||
|
use markdown::markdown_to_html;
|
||||||
|
pub use shortcode::render_shortcodes;
|
||||||
|
pub use table_of_contents::Header;
|
||||||
|
|
||||||
pub fn render_content(content: &str, context: &RenderContext) -> Result<markdown::Rendered> {
|
pub fn render_content(content: &str, context: &RenderContext) -> Result<markdown::Rendered> {
|
||||||
// Don't do anything if there is nothing like a shortcode in the content
|
// Don't do anything if there is nothing like a shortcode in the content
|
||||||
|
|
|
@ -1,18 +1,20 @@
|
||||||
use std::borrow::Cow::{Owned, Borrowed};
|
use std::borrow::Cow::{Borrowed, Owned};
|
||||||
|
|
||||||
|
use self::cmark::{Event, Options, Parser, Tag, OPTION_ENABLE_FOOTNOTES, OPTION_ENABLE_TABLES};
|
||||||
use pulldown_cmark as cmark;
|
use pulldown_cmark as cmark;
|
||||||
use self::cmark::{Parser, Event, Tag, Options, OPTION_ENABLE_TABLES, OPTION_ENABLE_FOOTNOTES};
|
|
||||||
use slug::slugify;
|
use slug::slugify;
|
||||||
use syntect::easy::HighlightLines;
|
use syntect::easy::HighlightLines;
|
||||||
use syntect::html::{start_highlighted_html_snippet, styled_line_to_highlighted_html, IncludeBackground};
|
use syntect::html::{
|
||||||
|
start_highlighted_html_snippet, styled_line_to_highlighted_html, IncludeBackground,
|
||||||
|
};
|
||||||
|
|
||||||
|
use config::highlighting::{get_highlighter, SYNTAX_SET, THEME_SET};
|
||||||
use errors::Result;
|
use errors::Result;
|
||||||
use utils::site::resolve_internal_link;
|
|
||||||
use config::highlighting::{get_highlighter, THEME_SET, SYNTAX_SET};
|
|
||||||
use link_checker::check_url;
|
use link_checker::check_url;
|
||||||
|
use utils::site::resolve_internal_link;
|
||||||
|
|
||||||
use table_of_contents::{TempHeader, Header, make_table_of_contents};
|
|
||||||
use context::RenderContext;
|
use context::RenderContext;
|
||||||
|
use table_of_contents::{make_table_of_contents, Header, TempHeader};
|
||||||
|
|
||||||
const CONTINUE_READING: &str = "<p><a name=\"continue-reading\"></a></p>\n";
|
const CONTINUE_READING: &str = "<p><a name=\"continue-reading\"></a></p>\n";
|
||||||
|
|
||||||
|
@ -113,7 +115,8 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
|
||||||
let theme = &THEME_SET.themes[&context.config.highlight_theme];
|
let theme = &THEME_SET.themes[&context.config.highlight_theme];
|
||||||
highlighter = Some(get_highlighter(info, &context.config));
|
highlighter = Some(get_highlighter(info, &context.config));
|
||||||
// This selects the background color the same way that start_coloured_html_snippet does
|
// This selects the background color the same way that start_coloured_html_snippet does
|
||||||
let color = theme.settings.background.unwrap_or(::syntect::highlighting::Color::WHITE);
|
let color =
|
||||||
|
theme.settings.background.unwrap_or(::syntect::highlighting::Color::WHITE);
|
||||||
background = IncludeBackground::IfDifferent(color);
|
background = IncludeBackground::IfDifferent(color);
|
||||||
let snippet = start_highlighted_html_snippet(theme);
|
let snippet = start_highlighted_html_snippet(theme);
|
||||||
Event::Html(Owned(snippet.0))
|
Event::Html(Owned(snippet.0))
|
||||||
|
@ -128,12 +131,10 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
|
||||||
}
|
}
|
||||||
Event::Start(Tag::Image(src, title)) => {
|
Event::Start(Tag::Image(src, title)) => {
|
||||||
if is_colocated_asset_link(&src) {
|
if is_colocated_asset_link(&src) {
|
||||||
return Event::Start(
|
return Event::Start(Tag::Image(
|
||||||
Tag::Image(
|
Owned(format!("{}{}", context.current_page_permalink, src)),
|
||||||
Owned(format!("{}{}", context.current_page_permalink, src)),
|
title,
|
||||||
title,
|
));
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Event::Start(Tag::Image(src, title))
|
Event::Start(Tag::Image(src, title))
|
||||||
|
@ -157,13 +158,14 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
|
||||||
format!("{}{}", context.current_page_permalink, link)
|
format!("{}{}", context.current_page_permalink, link)
|
||||||
} else if context.config.check_external_links
|
} else if context.config.check_external_links
|
||||||
&& !link.starts_with('#')
|
&& !link.starts_with('#')
|
||||||
&& !link.starts_with("mailto:") {
|
&& !link.starts_with("mailto:")
|
||||||
|
{
|
||||||
let res = check_url(&link);
|
let res = check_url(&link);
|
||||||
if res.is_valid() {
|
if res.is_valid() {
|
||||||
link.to_string()
|
link.to_string()
|
||||||
} else {
|
} else {
|
||||||
error = Some(
|
error = Some(
|
||||||
format!("Link {} is not valid: {}", link, res.message()).into()
|
format!("Link {} is not valid: {}", link, res.message()).into(),
|
||||||
);
|
);
|
||||||
String::new()
|
String::new()
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
use pest::Parser;
|
|
||||||
use pest::iterators::Pair;
|
use pest::iterators::Pair;
|
||||||
use tera::{Map, Context, Value, to_value};
|
use pest::Parser;
|
||||||
|
use tera::{to_value, Context, Map, Value};
|
||||||
|
|
||||||
|
use context::RenderContext;
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
use ::context::RenderContext;
|
|
||||||
|
|
||||||
// This include forces recompiling this source file if the grammar file changes.
|
// This include forces recompiling this source file if the grammar file changes.
|
||||||
// Uncomment it when doing changes to the .pest file
|
// Uncomment it when doing changes to the .pest file
|
||||||
|
@ -13,7 +13,6 @@ const _GRAMMAR: &str = include_str!("content.pest");
|
||||||
#[grammar = "content.pest"]
|
#[grammar = "content.pest"]
|
||||||
pub struct ContentParser;
|
pub struct ContentParser;
|
||||||
|
|
||||||
|
|
||||||
fn replace_string_markers(input: &str) -> String {
|
fn replace_string_markers(input: &str) -> String {
|
||||||
match input.chars().next().unwrap() {
|
match input.chars().next().unwrap() {
|
||||||
'"' => input.replace('"', "").to_string(),
|
'"' => input.replace('"', "").to_string(),
|
||||||
|
@ -39,7 +38,7 @@ fn parse_literal(pair: Pair<Rule>) -> Value {
|
||||||
Rule::int => {
|
Rule::int => {
|
||||||
val = Some(to_value(p.as_str().parse::<i64>().unwrap()).unwrap());
|
val = Some(to_value(p.as_str().parse::<i64>().unwrap()).unwrap());
|
||||||
}
|
}
|
||||||
_ => unreachable!("Unknown literal: {:?}", p)
|
_ => unreachable!("Unknown literal: {:?}", p),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,20 +52,29 @@ fn parse_shortcode_call(pair: Pair<Rule>) -> (String, Map<String, Value>) {
|
||||||
|
|
||||||
for p in pair.into_inner() {
|
for p in pair.into_inner() {
|
||||||
match p.as_rule() {
|
match p.as_rule() {
|
||||||
Rule::ident => { name = Some(p.into_span().as_str().to_string()); }
|
Rule::ident => {
|
||||||
|
name = Some(p.into_span().as_str().to_string());
|
||||||
|
}
|
||||||
Rule::kwarg => {
|
Rule::kwarg => {
|
||||||
let mut arg_name = None;
|
let mut arg_name = None;
|
||||||
let mut arg_val = None;
|
let mut arg_val = None;
|
||||||
for p2 in p.into_inner() {
|
for p2 in p.into_inner() {
|
||||||
match p2.as_rule() {
|
match p2.as_rule() {
|
||||||
Rule::ident => { arg_name = Some(p2.into_span().as_str().to_string()); }
|
Rule::ident => {
|
||||||
Rule::literal => { arg_val = Some(parse_literal(p2)); }
|
arg_name = Some(p2.into_span().as_str().to_string());
|
||||||
|
}
|
||||||
|
Rule::literal => {
|
||||||
|
arg_val = Some(parse_literal(p2));
|
||||||
|
}
|
||||||
Rule::array => {
|
Rule::array => {
|
||||||
let mut vals = vec![];
|
let mut vals = vec![];
|
||||||
for p3 in p2.into_inner() {
|
for p3 in p2.into_inner() {
|
||||||
match p3.as_rule() {
|
match p3.as_rule() {
|
||||||
Rule::literal => vals.push(parse_literal(p3)),
|
Rule::literal => vals.push(parse_literal(p3)),
|
||||||
_ => unreachable!("Got something other than literal in an array: {:?}", p3),
|
_ => unreachable!(
|
||||||
|
"Got something other than literal in an array: {:?}",
|
||||||
|
p3
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
arg_val = Some(Value::Array(vals));
|
arg_val = Some(Value::Array(vals));
|
||||||
|
@ -77,14 +85,18 @@ fn parse_shortcode_call(pair: Pair<Rule>) -> (String, Map<String, Value>) {
|
||||||
|
|
||||||
args.insert(arg_name.unwrap(), arg_val.unwrap());
|
args.insert(arg_name.unwrap(), arg_val.unwrap());
|
||||||
}
|
}
|
||||||
_ => unreachable!("Got something unexpected in a shortcode: {:?}", p)
|
_ => unreachable!("Got something unexpected in a shortcode: {:?}", p),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(name.unwrap(), args)
|
(name.unwrap(), args)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn render_shortcode(
|
||||||
fn render_shortcode(name: &str, args: &Map<String, Value>, context: &RenderContext, body: Option<&str>) -> Result<String> {
|
name: &str,
|
||||||
|
args: &Map<String, Value>,
|
||||||
|
context: &RenderContext,
|
||||||
|
body: Option<&str>,
|
||||||
|
) -> Result<String> {
|
||||||
let mut tera_context = Context::new();
|
let mut tera_context = Context::new();
|
||||||
for (key, value) in args.iter() {
|
for (key, value) in args.iter() {
|
||||||
tera_context.insert(key, value);
|
tera_context.insert(key, value);
|
||||||
|
@ -96,7 +108,8 @@ fn render_shortcode(name: &str, args: &Map<String, Value>, context: &RenderConte
|
||||||
tera_context.extend(context.tera_context.clone());
|
tera_context.extend(context.tera_context.clone());
|
||||||
let tpl_name = format!("shortcodes/{}.html", name);
|
let tpl_name = format!("shortcodes/{}.html", name);
|
||||||
|
|
||||||
let res = context.tera
|
let res = context
|
||||||
|
.tera
|
||||||
.render(&tpl_name, &tera_context)
|
.render(&tpl_name, &tera_context)
|
||||||
.chain_err(|| format!("Failed to render {} shortcode", name))?;
|
.chain_err(|| format!("Failed to render {} shortcode", name))?;
|
||||||
|
|
||||||
|
@ -109,38 +122,36 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin
|
||||||
let mut pairs = match ContentParser::parse(Rule::page, content) {
|
let mut pairs = match ContentParser::parse(Rule::page, content) {
|
||||||
Ok(p) => p,
|
Ok(p) => p,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let fancy_e = e.renamed_rules(|rule| {
|
let fancy_e = e.renamed_rules(|rule| match *rule {
|
||||||
match *rule {
|
Rule::int => "an integer".to_string(),
|
||||||
Rule::int => "an integer".to_string(),
|
Rule::float => "a float".to_string(),
|
||||||
Rule::float => "a float".to_string(),
|
Rule::string => "a string".to_string(),
|
||||||
Rule::string => "a string".to_string(),
|
Rule::literal => "a literal (int, float, string, bool)".to_string(),
|
||||||
Rule::literal => "a literal (int, float, string, bool)".to_string(),
|
Rule::array => "an array".to_string(),
|
||||||
Rule::array => "an array".to_string(),
|
Rule::kwarg => "a keyword argument".to_string(),
|
||||||
Rule::kwarg => "a keyword argument".to_string(),
|
Rule::ident => "an identifier".to_string(),
|
||||||
Rule::ident => "an identifier".to_string(),
|
Rule::inline_shortcode => "an inline shortcode".to_string(),
|
||||||
Rule::inline_shortcode => "an inline shortcode".to_string(),
|
Rule::ignored_inline_shortcode => "an ignored inline shortcode".to_string(),
|
||||||
Rule::ignored_inline_shortcode => "an ignored inline shortcode".to_string(),
|
Rule::sc_body_start => "the start of a shortcode".to_string(),
|
||||||
Rule::sc_body_start => "the start of a shortcode".to_string(),
|
Rule::ignored_sc_body_start => "the start of an ignored shortcode".to_string(),
|
||||||
Rule::ignored_sc_body_start => "the start of an ignored shortcode".to_string(),
|
Rule::text => "some text".to_string(),
|
||||||
Rule::text => "some text".to_string(),
|
Rule::EOI => "end of input".to_string(),
|
||||||
Rule::EOI => "end of input".to_string(),
|
Rule::double_quoted_string => "double quoted string".to_string(),
|
||||||
Rule::double_quoted_string => "double quoted string".to_string(),
|
Rule::single_quoted_string => "single quoted string".to_string(),
|
||||||
Rule::single_quoted_string => "single quoted string".to_string(),
|
Rule::backquoted_quoted_string => "backquoted quoted string".to_string(),
|
||||||
Rule::backquoted_quoted_string => "backquoted quoted string".to_string(),
|
Rule::boolean => "a boolean (true, false)".to_string(),
|
||||||
Rule::boolean => "a boolean (true, false)".to_string(),
|
Rule::all_chars => "a alphanumerical character".to_string(),
|
||||||
Rule::all_chars => "a alphanumerical character".to_string(),
|
Rule::kwargs => "a list of keyword arguments".to_string(),
|
||||||
Rule::kwargs => "a list of keyword arguments".to_string(),
|
Rule::sc_def => "a shortcode definition".to_string(),
|
||||||
Rule::sc_def => "a shortcode definition".to_string(),
|
Rule::shortcode_with_body => "a shortcode with body".to_string(),
|
||||||
Rule::shortcode_with_body => "a shortcode with body".to_string(),
|
Rule::ignored_shortcode_with_body => "an ignored shortcode with body".to_string(),
|
||||||
Rule::ignored_shortcode_with_body => "an ignored shortcode with body".to_string(),
|
Rule::sc_body_end => "{% end %}".to_string(),
|
||||||
Rule::sc_body_end => "{% end %}".to_string(),
|
Rule::ignored_sc_body_end => "{%/* end */%}".to_string(),
|
||||||
Rule::ignored_sc_body_end => "{%/* end */%}".to_string(),
|
Rule::text_in_body_sc => "text in a shortcode body".to_string(),
|
||||||
Rule::text_in_body_sc => "text in a shortcode body".to_string(),
|
Rule::text_in_ignored_body_sc => "text in an ignored shortcode body".to_string(),
|
||||||
Rule::text_in_ignored_body_sc => "text in an ignored shortcode body".to_string(),
|
Rule::content => "some content".to_string(),
|
||||||
Rule::content => "some content".to_string(),
|
Rule::page => "a page".to_string(),
|
||||||
Rule::page => "a page".to_string(),
|
Rule::WHITESPACE => "whitespace".to_string(),
|
||||||
Rule::WHITESPACE => "whitespace".to_string(),
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
bail!("{}", fancy_e);
|
bail!("{}", fancy_e);
|
||||||
}
|
}
|
||||||
|
@ -164,9 +175,7 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin
|
||||||
}
|
}
|
||||||
Rule::ignored_inline_shortcode => {
|
Rule::ignored_inline_shortcode => {
|
||||||
res.push_str(
|
res.push_str(
|
||||||
&p.into_span().as_str()
|
&p.into_span().as_str().replacen("{{/*", "{{", 1).replacen("*/}}", "}}", 1),
|
||||||
.replacen("{{/*", "{{", 1)
|
|
||||||
.replacen("*/}}", "}}", 1)
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Rule::ignored_shortcode_with_body => {
|
Rule::ignored_shortcode_with_body => {
|
||||||
|
@ -174,16 +183,17 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin
|
||||||
match p2.as_rule() {
|
match p2.as_rule() {
|
||||||
Rule::ignored_sc_body_start | Rule::ignored_sc_body_end => {
|
Rule::ignored_sc_body_start | Rule::ignored_sc_body_end => {
|
||||||
res.push_str(
|
res.push_str(
|
||||||
&p2.into_span().as_str()
|
&p2.into_span()
|
||||||
|
.as_str()
|
||||||
.replacen("{%/*", "{%", 1)
|
.replacen("{%/*", "{%", 1)
|
||||||
.replacen("*/%}", "%}", 1)
|
.replacen("*/%}", "%}", 1),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Rule::text_in_ignored_body_sc => res.push_str(p2.into_span().as_str()),
|
Rule::text_in_ignored_body_sc => res.push_str(p2.into_span().as_str()),
|
||||||
_ => unreachable!("Got something weird in an ignored shortcode: {:?}", p2),
|
_ => unreachable!("Got something weird in an ignored shortcode: {:?}", p2),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Rule::EOI => (),
|
Rule::EOI => (),
|
||||||
_ => unreachable!("unexpected page rule: {:?}", p.as_rule()),
|
_ => unreachable!("unexpected page rule: {:?}", p.as_rule()),
|
||||||
}
|
}
|
||||||
|
@ -196,10 +206,10 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use tera::Tera;
|
use super::*;
|
||||||
use config::Config;
|
use config::Config;
|
||||||
use front_matter::InsertAnchor;
|
use front_matter::InsertAnchor;
|
||||||
use super::*;
|
use tera::Tera;
|
||||||
|
|
||||||
macro_rules! assert_lex_rule {
|
macro_rules! assert_lex_rule {
|
||||||
($rule: expr, $input: expr) => {
|
($rule: expr, $input: expr) => {
|
||||||
|
@ -297,7 +307,7 @@ mod tests {
|
||||||
{% hello() %}
|
{% hello() %}
|
||||||
Body {{ var }}
|
Body {{ var }}
|
||||||
{% end %}
|
{% end %}
|
||||||
"#
|
"#,
|
||||||
];
|
];
|
||||||
for i in inputs {
|
for i in inputs {
|
||||||
assert_lex_rule!(Rule::page, i);
|
assert_lex_rule!(Rule::page, i);
|
||||||
|
@ -318,19 +328,25 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_unignore_shortcode_with_body() {
|
fn can_unignore_shortcode_with_body() {
|
||||||
let res = render_shortcodes(r#"
|
let res = render_shortcodes(
|
||||||
|
r#"
|
||||||
Hello World
|
Hello World
|
||||||
{%/* youtube() */%}Some body {{ hello() }}{%/* end */%}"#, &Tera::default());
|
{%/* youtube() */%}Some body {{ hello() }}{%/* end */%}"#,
|
||||||
|
&Tera::default(),
|
||||||
|
);
|
||||||
assert_eq!(res, "\nHello World\n{% youtube() %}Some body {{ hello() }}{% end %}");
|
assert_eq!(res, "\nHello World\n{% youtube() %}Some body {{ hello() }}{% end %}");
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://github.com/Keats/gutenberg/issues/383
|
// https://github.com/Keats/gutenberg/issues/383
|
||||||
#[test]
|
#[test]
|
||||||
fn unignore_shortcode_with_body_does_not_swallow_initial_whitespace() {
|
fn unignore_shortcode_with_body_does_not_swallow_initial_whitespace() {
|
||||||
let res = render_shortcodes(r#"
|
let res = render_shortcodes(
|
||||||
|
r#"
|
||||||
Hello World
|
Hello World
|
||||||
{%/* youtube() */%}
|
{%/* youtube() */%}
|
||||||
Some body {{ hello() }}{%/* end */%}"#, &Tera::default());
|
Some body {{ hello() }}{%/* end */%}"#,
|
||||||
|
&Tera::default(),
|
||||||
|
);
|
||||||
assert_eq!(res, "\nHello World\n{% youtube() %}\nSome body {{ hello() }}{% end %}");
|
assert_eq!(res, "\nHello World\n{% youtube() %}\nSome body {{ hello() }}{% end %}");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -338,28 +354,20 @@ Some body {{ hello() }}{%/* end */%}"#, &Tera::default());
|
||||||
fn can_parse_shortcode_arguments() {
|
fn can_parse_shortcode_arguments() {
|
||||||
let inputs = vec![
|
let inputs = vec![
|
||||||
("{{ youtube() }}", "youtube", Map::new()),
|
("{{ youtube() }}", "youtube", Map::new()),
|
||||||
(
|
("{{ youtube(id=1, autoplay=true, hello='salut', float=1.2) }}", "youtube", {
|
||||||
"{{ youtube(id=1, autoplay=true, hello='salut', float=1.2) }}",
|
let mut m = Map::new();
|
||||||
"youtube",
|
m.insert("id".to_string(), to_value(1).unwrap());
|
||||||
{
|
m.insert("autoplay".to_string(), to_value(true).unwrap());
|
||||||
let mut m = Map::new();
|
m.insert("hello".to_string(), to_value("salut").unwrap());
|
||||||
m.insert("id".to_string(), to_value(1).unwrap());
|
m.insert("float".to_string(), to_value(1.2).unwrap());
|
||||||
m.insert("autoplay".to_string(), to_value(true).unwrap());
|
m
|
||||||
m.insert("hello".to_string(), to_value("salut").unwrap());
|
}),
|
||||||
m.insert("float".to_string(), to_value(1.2).unwrap());
|
("{{ gallery(photos=['something', 'else'], fullscreen=true) }}", "gallery", {
|
||||||
m
|
let mut m = Map::new();
|
||||||
}
|
m.insert("photos".to_string(), to_value(["something", "else"]).unwrap());
|
||||||
),
|
m.insert("fullscreen".to_string(), to_value(true).unwrap());
|
||||||
(
|
m
|
||||||
"{{ gallery(photos=['something', 'else'], fullscreen=true) }}",
|
}),
|
||||||
"gallery",
|
|
||||||
{
|
|
||||||
let mut m = Map::new();
|
|
||||||
m.insert("photos".to_string(), to_value(["something", "else"]).unwrap());
|
|
||||||
m.insert("fullscreen".to_string(), to_value(true).unwrap());
|
|
||||||
m
|
|
||||||
}
|
|
||||||
),
|
|
||||||
];
|
];
|
||||||
|
|
||||||
for (i, n, a) in inputs {
|
for (i, n, a) in inputs {
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
use tera::{Tera, Context as TeraContext};
|
|
||||||
use front_matter::InsertAnchor;
|
use front_matter::InsertAnchor;
|
||||||
|
use tera::{Context as TeraContext, Tera};
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone, Serialize)]
|
#[derive(Debug, PartialEq, Clone, Serialize)]
|
||||||
pub struct Header {
|
pub struct Header {
|
||||||
|
@ -65,9 +64,26 @@ impl TempHeader {
|
||||||
};
|
};
|
||||||
|
|
||||||
match insert_anchor {
|
match insert_anchor {
|
||||||
InsertAnchor::None => format!("<h{lvl} id=\"{id}\">{t}</h{lvl}>\n", lvl = self.level, t = self.html, id = self.id),
|
InsertAnchor::None => format!(
|
||||||
InsertAnchor::Left => format!("<h{lvl} id=\"{id}\">{a}{t}</h{lvl}>\n", lvl = self.level, a = anchor_link, t = self.html, id = self.id),
|
"<h{lvl} id=\"{id}\">{t}</h{lvl}>\n",
|
||||||
InsertAnchor::Right => format!("<h{lvl} id=\"{id}\">{t}{a}</h{lvl}>\n", lvl = self.level, a = anchor_link, t = self.html, id = self.id),
|
lvl = self.level,
|
||||||
|
t = self.html,
|
||||||
|
id = self.id
|
||||||
|
),
|
||||||
|
InsertAnchor::Left => format!(
|
||||||
|
"<h{lvl} id=\"{id}\">{a}{t}</h{lvl}>\n",
|
||||||
|
lvl = self.level,
|
||||||
|
a = anchor_link,
|
||||||
|
t = self.html,
|
||||||
|
id = self.id
|
||||||
|
),
|
||||||
|
InsertAnchor::Right => format!(
|
||||||
|
"<h{lvl} id=\"{id}\">{t}{a}</h{lvl}>\n",
|
||||||
|
lvl = self.level,
|
||||||
|
a = anchor_link,
|
||||||
|
t = self.html,
|
||||||
|
id = self.id
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -78,9 +94,12 @@ impl Default for TempHeader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Recursively finds children of a header
|
/// Recursively finds children of a header
|
||||||
fn find_children(parent_level: i32, start_at: usize, temp_headers: &[TempHeader]) -> (usize, Vec<Header>) {
|
fn find_children(
|
||||||
|
parent_level: i32,
|
||||||
|
start_at: usize,
|
||||||
|
temp_headers: &[TempHeader],
|
||||||
|
) -> (usize, Vec<Header>) {
|
||||||
let mut headers = vec![];
|
let mut headers = vec![];
|
||||||
|
|
||||||
let mut start_at = start_at;
|
let mut start_at = start_at;
|
||||||
|
@ -124,7 +143,6 @@ fn find_children(parent_level: i32, start_at: usize, temp_headers: &[TempHeader]
|
||||||
(start_at, headers)
|
(start_at, headers)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Converts the flat temp headers into a nested set of headers
|
/// Converts the flat temp headers into a nested set of headers
|
||||||
/// representing the hierarchy
|
/// representing the hierarchy
|
||||||
pub fn make_table_of_contents(temp_headers: &[TempHeader]) -> Vec<Header> {
|
pub fn make_table_of_contents(temp_headers: &[TempHeader]) -> Vec<Header> {
|
||||||
|
@ -148,11 +166,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_make_basic_toc() {
|
fn can_make_basic_toc() {
|
||||||
let input = vec![
|
let input = vec![TempHeader::new(1), TempHeader::new(1), TempHeader::new(1)];
|
||||||
TempHeader::new(1),
|
|
||||||
TempHeader::new(1),
|
|
||||||
TempHeader::new(1),
|
|
||||||
];
|
|
||||||
let toc = make_table_of_contents(&input);
|
let toc = make_table_of_contents(&input);
|
||||||
assert_eq!(toc.len(), 3);
|
assert_eq!(toc.len(), 3);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
extern crate tera;
|
|
||||||
extern crate front_matter;
|
|
||||||
extern crate templates;
|
|
||||||
extern crate rendering;
|
|
||||||
extern crate config;
|
extern crate config;
|
||||||
|
extern crate front_matter;
|
||||||
|
extern crate rendering;
|
||||||
|
extern crate templates;
|
||||||
|
extern crate tera;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
@ -10,9 +10,8 @@ use tera::Tera;
|
||||||
|
|
||||||
use config::Config;
|
use config::Config;
|
||||||
use front_matter::InsertAnchor;
|
use front_matter::InsertAnchor;
|
||||||
|
use rendering::{render_content, RenderContext};
|
||||||
use templates::ZOLA_TERA;
|
use templates::ZOLA_TERA;
|
||||||
use rendering::{RenderContext, render_content};
|
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_do_render_content_simple() {
|
fn can_do_render_content_simple() {
|
||||||
|
@ -32,10 +31,7 @@ fn doesnt_highlight_code_block_with_highlighting_off() {
|
||||||
config.highlight_code = false;
|
config.highlight_code = false;
|
||||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||||
let res = render_content("```\n$ gutenberg server\n```", &context).unwrap();
|
let res = render_content("```\n$ gutenberg server\n```", &context).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(res.body, "<pre><code>$ gutenberg server\n</code></pre>\n");
|
||||||
res.body,
|
|
||||||
"<pre><code>$ gutenberg server\n</code></pre>\n"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -86,11 +82,15 @@ fn can_render_shortcode() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||||
let res = render_content(r#"
|
let res = render_content(
|
||||||
|
r#"
|
||||||
Hello
|
Hello
|
||||||
|
|
||||||
{{ youtube(id="ub36ffWAqgQ") }}
|
{{ youtube(id="ub36ffWAqgQ") }}
|
||||||
"#, &context).unwrap();
|
"#,
|
||||||
|
&context,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
assert!(res.body.contains("<p>Hello</p>\n<div >"));
|
assert!(res.body.contains("<p>Hello</p>\n<div >"));
|
||||||
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ""#));
|
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ""#));
|
||||||
}
|
}
|
||||||
|
@ -100,14 +100,10 @@ fn can_render_shortcode_with_markdown_char_in_args_name() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||||
let input = vec![
|
let input = vec!["name", "na_me", "n_a_me", "n1"];
|
||||||
"name",
|
|
||||||
"na_me",
|
|
||||||
"n_a_me",
|
|
||||||
"n1",
|
|
||||||
];
|
|
||||||
for i in input {
|
for i in input {
|
||||||
let res = render_content(&format!("{{{{ youtube(id=\"hey\", {}=1) }}}}", i), &context).unwrap();
|
let res =
|
||||||
|
render_content(&format!("{{{{ youtube(id=\"hey\", {}=1) }}}}", i), &context).unwrap();
|
||||||
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/hey""#));
|
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/hey""#));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -126,7 +122,9 @@ fn can_render_shortcode_with_markdown_char_in_args_value() {
|
||||||
];
|
];
|
||||||
for i in input {
|
for i in input {
|
||||||
let res = render_content(&format!("{{{{ youtube(id=\"{}\") }}}}", i), &context).unwrap();
|
let res = render_content(&format!("{{{{ youtube(id=\"{}\") }}}}", i), &context).unwrap();
|
||||||
assert!(res.body.contains(&format!(r#"<iframe src="https://www.youtube.com/embed/{}""#, i)));
|
assert!(
|
||||||
|
res.body.contains(&format!(r#"<iframe src="https://www.youtube.com/embed/{}""#, i))
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,17 +133,20 @@ fn can_render_body_shortcode_with_markdown_char_in_name() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let mut tera = Tera::default();
|
let mut tera = Tera::default();
|
||||||
tera.extend(&ZOLA_TERA).unwrap();
|
tera.extend(&ZOLA_TERA).unwrap();
|
||||||
let input = vec![
|
let input = vec!["quo_te", "qu_o_te"];
|
||||||
"quo_te",
|
|
||||||
"qu_o_te",
|
|
||||||
];
|
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
|
|
||||||
for i in input {
|
for i in input {
|
||||||
tera.add_raw_template(&format!("shortcodes/{}.html", i), "<blockquote>{{ body }} - {{ author}}</blockquote>").unwrap();
|
tera.add_raw_template(
|
||||||
|
&format!("shortcodes/{}.html", i),
|
||||||
|
"<blockquote>{{ body }} - {{ author}}</blockquote>",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||||
|
|
||||||
let res = render_content(&format!("{{% {}(author=\"Bob\") %}}\nhey\n{{% end %}}", i), &context).unwrap();
|
let res =
|
||||||
|
render_content(&format!("{{% {}(author=\"Bob\") %}}\nhey\n{{% end %}}", i), &context)
|
||||||
|
.unwrap();
|
||||||
println!("{:?}", res);
|
println!("{:?}", res);
|
||||||
assert!(res.body.contains("<blockquote>hey - Bob</blockquote>"));
|
assert!(res.body.contains("<blockquote>hey - Bob</blockquote>"));
|
||||||
}
|
}
|
||||||
|
@ -217,7 +218,8 @@ fn can_render_several_shortcode_in_row() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||||
let res = render_content(r#"
|
let res = render_content(
|
||||||
|
r#"
|
||||||
Hello
|
Hello
|
||||||
|
|
||||||
{{ youtube(id="ub36ffWAqgQ") }}
|
{{ youtube(id="ub36ffWAqgQ") }}
|
||||||
|
@ -230,10 +232,15 @@ Hello
|
||||||
|
|
||||||
{{ gist(url="https://gist.github.com/Keats/32d26f699dcc13ebd41b") }}
|
{{ gist(url="https://gist.github.com/Keats/32d26f699dcc13ebd41b") }}
|
||||||
|
|
||||||
"#, &context).unwrap();
|
"#,
|
||||||
|
&context,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
assert!(res.body.contains("<p>Hello</p>\n<div >"));
|
assert!(res.body.contains("<p>Hello</p>\n<div >"));
|
||||||
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ""#));
|
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ""#));
|
||||||
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ?autoplay=1""#));
|
assert!(
|
||||||
|
res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ?autoplay=1""#)
|
||||||
|
);
|
||||||
assert!(res.body.contains(r#"<iframe src="https://www.streamable.com/e/c0ic""#));
|
assert!(res.body.contains(r#"<iframe src="https://www.streamable.com/e/c0ic""#));
|
||||||
assert!(res.body.contains(r#"//player.vimeo.com/video/210073083""#));
|
assert!(res.body.contains(r#"//player.vimeo.com/video/210073083""#));
|
||||||
}
|
}
|
||||||
|
@ -252,17 +259,25 @@ fn doesnt_render_ignored_shortcodes() {
|
||||||
fn can_render_shortcode_with_body() {
|
fn can_render_shortcode_with_body() {
|
||||||
let mut tera = Tera::default();
|
let mut tera = Tera::default();
|
||||||
tera.extend(&ZOLA_TERA).unwrap();
|
tera.extend(&ZOLA_TERA).unwrap();
|
||||||
tera.add_raw_template("shortcodes/quote.html", "<blockquote>{{ body }} - {{ author }}</blockquote>").unwrap();
|
tera.add_raw_template(
|
||||||
|
"shortcodes/quote.html",
|
||||||
|
"<blockquote>{{ body }} - {{ author }}</blockquote>",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||||
|
|
||||||
let res = render_content(r#"
|
let res = render_content(
|
||||||
|
r#"
|
||||||
Hello
|
Hello
|
||||||
{% quote(author="Keats") %}
|
{% quote(author="Keats") %}
|
||||||
A quote
|
A quote
|
||||||
{% end %}
|
{% end %}
|
||||||
"#, &context).unwrap();
|
"#,
|
||||||
|
&context,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(res.body, "<p>Hello</p>\n<blockquote>A quote - Keats</blockquote>\n");
|
assert_eq!(res.body, "<p>Hello</p>\n<blockquote>A quote - Keats</blockquote>\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -286,7 +301,8 @@ fn can_make_valid_relative_link() {
|
||||||
let res = render_content(
|
let res = render_content(
|
||||||
r#"[rel link](./pages/about.md), [abs link](https://vincent.is/about)"#,
|
r#"[rel link](./pages/about.md), [abs link](https://vincent.is/about)"#,
|
||||||
&context,
|
&context,
|
||||||
).unwrap();
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
res.body.contains(r#"<p><a href="https://vincent.is/about">rel link</a>, <a href="https://vincent.is/about">abs link</a></p>"#)
|
res.body.contains(r#"<p><a href="https://vincent.is/about">rel link</a>, <a href="https://vincent.is/about">abs link</a></p>"#)
|
||||||
|
@ -302,9 +318,7 @@ fn can_make_relative_links_with_anchors() {
|
||||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks, InsertAnchor::None);
|
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks, InsertAnchor::None);
|
||||||
let res = render_content(r#"[rel link](./pages/about.md#cv)"#, &context).unwrap();
|
let res = render_content(r#"[rel link](./pages/about.md#cv)"#, &context).unwrap();
|
||||||
|
|
||||||
assert!(
|
assert!(res.body.contains(r#"<p><a href="https://vincent.is/about#cv">rel link</a></p>"#));
|
||||||
res.body.contains(r#"<p><a href="https://vincent.is/about#cv">rel link</a></p>"#)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -411,7 +425,8 @@ fn can_make_toc() {
|
||||||
InsertAnchor::Left,
|
InsertAnchor::Left,
|
||||||
);
|
);
|
||||||
|
|
||||||
let res = render_content(r#"
|
let res = render_content(
|
||||||
|
r#"
|
||||||
# Header 1
|
# Header 1
|
||||||
|
|
||||||
## Header 2
|
## Header 2
|
||||||
|
@ -419,7 +434,10 @@ fn can_make_toc() {
|
||||||
## Another Header 2
|
## Another Header 2
|
||||||
|
|
||||||
### Last one
|
### Last one
|
||||||
"#, &context).unwrap();
|
"#,
|
||||||
|
&context,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let toc = res.toc;
|
let toc = res.toc;
|
||||||
assert_eq!(toc.len(), 1);
|
assert_eq!(toc.len(), 1);
|
||||||
|
@ -439,13 +457,17 @@ fn can_ignore_tags_in_toc() {
|
||||||
InsertAnchor::Left,
|
InsertAnchor::Left,
|
||||||
);
|
);
|
||||||
|
|
||||||
let res = render_content(r#"
|
let res = render_content(
|
||||||
|
r#"
|
||||||
## header with `code`
|
## header with `code`
|
||||||
|
|
||||||
## [anchor](https://duckduckgo.com/) in header
|
## [anchor](https://duckduckgo.com/) in header
|
||||||
|
|
||||||
## **bold** and *italics*
|
## **bold** and *italics*
|
||||||
"#, &context).unwrap();
|
"#,
|
||||||
|
&context,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let toc = res.toc;
|
let toc = res.toc;
|
||||||
|
|
||||||
|
@ -465,10 +487,7 @@ fn can_understand_backtick_in_titles() {
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||||
let res = render_content("# `Hello`", &context).unwrap();
|
let res = render_content("# `Hello`", &context).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(res.body, "<h1 id=\"hello\"><code>Hello</code></h1>\n");
|
||||||
res.body,
|
|
||||||
"<h1 id=\"hello\"><code>Hello</code></h1>\n"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -477,10 +496,7 @@ fn can_understand_backtick_in_paragraphs() {
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||||
let res = render_content("Hello `world`", &context).unwrap();
|
let res = render_content("Hello `world`", &context).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(res.body, "<p>Hello <code>world</code></p>\n");
|
||||||
res.body,
|
|
||||||
"<p>Hello <code>world</code></p>\n"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://github.com/Keats/gutenberg/issues/297
|
// https://github.com/Keats/gutenberg/issues/297
|
||||||
|
@ -490,10 +506,7 @@ fn can_understand_links_in_header() {
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||||
let res = render_content("# [Rust](https://rust-lang.org)", &context).unwrap();
|
let res = render_content("# [Rust](https://rust-lang.org)", &context).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(res.body, "<h1 id=\"rust\"><a href=\"https://rust-lang.org\">Rust</a></h1>\n");
|
||||||
res.body,
|
|
||||||
"<h1 id=\"rust\"><a href=\"https://rust-lang.org\">Rust</a></h1>\n"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -501,7 +514,8 @@ fn can_understand_link_with_title_in_header() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||||
let res = render_content("# [Rust](https://rust-lang.org \"Rust homepage\")", &context).unwrap();
|
let res =
|
||||||
|
render_content("# [Rust](https://rust-lang.org \"Rust homepage\")", &context).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
res.body,
|
res.body,
|
||||||
"<h1 id=\"rust\"><a href=\"https://rust-lang.org\" title=\"Rust homepage\">Rust</a></h1>\n"
|
"<h1 id=\"rust\"><a href=\"https://rust-lang.org\" title=\"Rust homepage\">Rust</a></h1>\n"
|
||||||
|
@ -515,10 +529,7 @@ fn can_make_valid_relative_link_in_header() {
|
||||||
let tera_ctx = Tera::default();
|
let tera_ctx = Tera::default();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks, InsertAnchor::None);
|
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks, InsertAnchor::None);
|
||||||
let res = render_content(
|
let res = render_content(r#" # [rel link](./pages/about.md)"#, &context).unwrap();
|
||||||
r#" # [rel link](./pages/about.md)"#,
|
|
||||||
&context,
|
|
||||||
).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
res.body,
|
res.body,
|
||||||
|
@ -530,19 +541,28 @@ fn can_make_valid_relative_link_in_header() {
|
||||||
fn can_make_permalinks_with_colocated_assets_for_link() {
|
fn can_make_permalinks_with_colocated_assets_for_link() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(
|
||||||
let res = render_content("[an image](image.jpg)", &context).unwrap();
|
&ZOLA_TERA,
|
||||||
assert_eq!(
|
&config,
|
||||||
res.body,
|
"https://vincent.is/about/",
|
||||||
"<p><a href=\"https://vincent.is/about/image.jpg\">an image</a></p>\n"
|
&permalinks_ctx,
|
||||||
|
InsertAnchor::None,
|
||||||
);
|
);
|
||||||
|
let res = render_content("[an image](image.jpg)", &context).unwrap();
|
||||||
|
assert_eq!(res.body, "<p><a href=\"https://vincent.is/about/image.jpg\">an image</a></p>\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_make_permalinks_with_colocated_assets_for_image() {
|
fn can_make_permalinks_with_colocated_assets_for_image() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(
|
||||||
|
&ZOLA_TERA,
|
||||||
|
&config,
|
||||||
|
"https://vincent.is/about/",
|
||||||
|
&permalinks_ctx,
|
||||||
|
InsertAnchor::None,
|
||||||
|
);
|
||||||
let res = render_content("![alt text](image.jpg)", &context).unwrap();
|
let res = render_content("![alt text](image.jpg)", &context).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
res.body,
|
res.body,
|
||||||
|
@ -554,8 +574,15 @@ fn can_make_permalinks_with_colocated_assets_for_image() {
|
||||||
fn markdown_doesnt_wrap_html_in_paragraph() {
|
fn markdown_doesnt_wrap_html_in_paragraph() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(
|
||||||
let res = render_content(r#"
|
&ZOLA_TERA,
|
||||||
|
&config,
|
||||||
|
"https://vincent.is/about/",
|
||||||
|
&permalinks_ctx,
|
||||||
|
InsertAnchor::None,
|
||||||
|
);
|
||||||
|
let res = render_content(
|
||||||
|
r#"
|
||||||
Some text
|
Some text
|
||||||
|
|
||||||
<h1>Helo</h1>
|
<h1>Helo</h1>
|
||||||
|
@ -565,7 +592,10 @@ Some text
|
||||||
<img src="mobx-flow.png" alt="MobX flow">
|
<img src="mobx-flow.png" alt="MobX flow">
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
"#, &context).unwrap();
|
"#,
|
||||||
|
&context,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
res.body,
|
res.body,
|
||||||
"<p>Some text</p>\n<h1>Helo</h1>\n<div>\n<a href=\"mobx-flow.png\">\n <img src=\"mobx-flow.png\" alt=\"MobX flow\">\n </a>\n</div>\n"
|
"<p>Some text</p>\n<h1>Helo</h1>\n<div>\n<a href=\"mobx-flow.png\">\n <img src=\"mobx-flow.png\" alt=\"MobX flow\">\n </a>\n</div>\n"
|
||||||
|
@ -577,12 +607,15 @@ fn can_validate_valid_external_links() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.check_external_links = true;
|
config.check_external_links = true;
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(
|
||||||
let res = render_content("[a link](http://google.com)", &context).unwrap();
|
&ZOLA_TERA,
|
||||||
assert_eq!(
|
&config,
|
||||||
res.body,
|
"https://vincent.is/about/",
|
||||||
"<p><a href=\"http://google.com\">a link</a></p>\n"
|
&permalinks_ctx,
|
||||||
|
InsertAnchor::None,
|
||||||
);
|
);
|
||||||
|
let res = render_content("[a link](http://google.com)", &context).unwrap();
|
||||||
|
assert_eq!(res.body, "<p><a href=\"http://google.com\">a link</a></p>\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -590,7 +623,13 @@ fn can_show_error_message_for_invalid_external_links() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.check_external_links = true;
|
config.check_external_links = true;
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(
|
||||||
|
&ZOLA_TERA,
|
||||||
|
&config,
|
||||||
|
"https://vincent.is/about/",
|
||||||
|
&permalinks_ctx,
|
||||||
|
InsertAnchor::None,
|
||||||
|
);
|
||||||
let res = render_content("[a link](http://google.comy)", &context);
|
let res = render_content("[a link](http://google.comy)", &context);
|
||||||
assert!(res.is_err());
|
assert!(res.is_err());
|
||||||
let err = res.unwrap_err();
|
let err = res.unwrap_err();
|
||||||
|
@ -602,12 +641,15 @@ fn doesnt_try_to_validate_email_links_mailto() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.check_external_links = true;
|
config.check_external_links = true;
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(
|
||||||
let res = render_content("Email: [foo@bar.baz](mailto:foo@bar.baz)", &context).unwrap();
|
&ZOLA_TERA,
|
||||||
assert_eq!(
|
&config,
|
||||||
res.body,
|
"https://vincent.is/about/",
|
||||||
"<p>Email: <a href=\"mailto:foo@bar.baz\">foo@bar.baz</a></p>\n"
|
&permalinks_ctx,
|
||||||
|
InsertAnchor::None,
|
||||||
);
|
);
|
||||||
|
let res = render_content("Email: [foo@bar.baz](mailto:foo@bar.baz)", &context).unwrap();
|
||||||
|
assert_eq!(res.body, "<p>Email: <a href=\"mailto:foo@bar.baz\">foo@bar.baz</a></p>\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -615,12 +657,15 @@ fn doesnt_try_to_validate_email_links_angled_brackets() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.check_external_links = true;
|
config.check_external_links = true;
|
||||||
let context = RenderContext::new(&ZOLA_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(
|
||||||
let res = render_content("Email: <foo@bar.baz>", &context).unwrap();
|
&ZOLA_TERA,
|
||||||
assert_eq!(
|
&config,
|
||||||
res.body,
|
"https://vincent.is/about/",
|
||||||
"<p>Email: <a href=\"mailto:foo@bar.baz\">foo@bar.baz</a></p>\n"
|
&permalinks_ctx,
|
||||||
|
InsertAnchor::None,
|
||||||
);
|
);
|
||||||
|
let res = render_content("Email: <foo@bar.baz>", &context).unwrap();
|
||||||
|
assert_eq!(res.body, "<p>Email: <a href=\"mailto:foo@bar.baz\">foo@bar.baz</a></p>\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -629,7 +674,11 @@ fn can_handle_summaries() {
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||||
let res = render_content("Hello [world]\n\n<!-- more -->\n\nBla bla\n\n[world]: https://vincent.is/about/", &context).unwrap();
|
let res = render_content(
|
||||||
|
"Hello [world]\n\n<!-- more -->\n\nBla bla\n\n[world]: https://vincent.is/about/",
|
||||||
|
&context,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
res.body,
|
res.body,
|
||||||
"<p>Hello <a href=\"https://vincent.is/about/\">world</a></p>\n<p><a name=\"continue-reading\"></a></p>\n<p>Bla bla</p>\n"
|
"<p>Hello <a href=\"https://vincent.is/about/\">world</a></p>\n<p><a name=\"continue-reading\"></a></p>\n<p>Bla bla</p>\n"
|
||||||
|
|
|
@ -11,9 +11,8 @@ use std::collections::{HashMap, HashSet};
|
||||||
|
|
||||||
use elasticlunr::{Index, Language};
|
use elasticlunr::{Index, Language};
|
||||||
|
|
||||||
use library::{Library, Section};
|
|
||||||
use errors::Result;
|
use errors::Result;
|
||||||
|
use library::{Library, Section};
|
||||||
|
|
||||||
pub const ELASTICLUNR_JS: &str = include_str!("elasticlunr.min.js");
|
pub const ELASTICLUNR_JS: &str = include_str!("elasticlunr.min.js");
|
||||||
|
|
||||||
|
@ -34,7 +33,6 @@ lazy_static! {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Returns the generated JSON index with all the documents of the site added using
|
/// Returns the generated JSON index with all the documents of the site added using
|
||||||
/// the language given
|
/// the language given
|
||||||
/// Errors if the language given is not available in Elasticlunr
|
/// Errors if the language given is not available in Elasticlunr
|
||||||
|
@ -42,7 +40,9 @@ lazy_static! {
|
||||||
pub fn build_index(lang: &str, library: &Library) -> Result<String> {
|
pub fn build_index(lang: &str, library: &Library) -> Result<String> {
|
||||||
let language = match Language::from_code(lang) {
|
let language = match Language::from_code(lang) {
|
||||||
Some(l) => l,
|
Some(l) => l,
|
||||||
None => { bail!("Tried to build search index for language {} which is not supported", lang); }
|
None => {
|
||||||
|
bail!("Tried to build search index for language {} which is not supported", lang);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut index = Index::with_language(language, &["title", "body"]);
|
let mut index = Index::with_language(language, &["title", "body"]);
|
||||||
|
@ -63,7 +63,10 @@ fn add_section_to_index(index: &mut Index, section: &Section, library: &Library)
|
||||||
if section.meta.redirect_to.is_none() {
|
if section.meta.redirect_to.is_none() {
|
||||||
index.add_doc(
|
index.add_doc(
|
||||||
§ion.permalink,
|
§ion.permalink,
|
||||||
&[§ion.meta.title.clone().unwrap_or_default(), &AMMONIA.clean(§ion.content).to_string()],
|
&[
|
||||||
|
§ion.meta.title.clone().unwrap_or_default(),
|
||||||
|
&AMMONIA.clean(§ion.content).to_string(),
|
||||||
|
],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,7 +78,10 @@ fn add_section_to_index(index: &mut Index, section: &Section, library: &Library)
|
||||||
|
|
||||||
index.add_doc(
|
index.add_doc(
|
||||||
&page.permalink,
|
&page.permalink,
|
||||||
&[&page.meta.title.clone().unwrap_or_default(), &AMMONIA.clean(&page.content).to_string()],
|
&[
|
||||||
|
&page.meta.title.clone().unwrap_or_default(),
|
||||||
|
&AMMONIA.clean(&page.content).to_string(),
|
||||||
|
],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
//! Benchmarking loading/markdown rendering of generated sites of various sizes
|
//! Benchmarking loading/markdown rendering of generated sites of various sizes
|
||||||
|
|
||||||
#![feature(test)]
|
#![feature(test)]
|
||||||
extern crate test;
|
|
||||||
extern crate site;
|
extern crate site;
|
||||||
|
extern crate test;
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
use site::Site;
|
use site::Site;
|
||||||
|
|
||||||
|
|
||||||
#[bench]
|
#[bench]
|
||||||
fn bench_loading_small_blog(b: &mut test::Bencher) {
|
fn bench_loading_small_blog(b: &mut test::Bencher) {
|
||||||
let mut path = env::current_dir().unwrap().to_path_buf();
|
let mut path = env::current_dir().unwrap().to_path_buf();
|
||||||
|
|
|
@ -1,15 +1,14 @@
|
||||||
#![feature(test)]
|
#![feature(test)]
|
||||||
extern crate test;
|
|
||||||
extern crate site;
|
|
||||||
extern crate library;
|
extern crate library;
|
||||||
|
extern crate site;
|
||||||
extern crate tempfile;
|
extern crate tempfile;
|
||||||
|
extern crate test;
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
use tempfile::tempdir;
|
|
||||||
use site::Site;
|
|
||||||
use library::Paginator;
|
use library::Paginator;
|
||||||
|
use site::Site;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
fn setup_site(name: &str) -> Site {
|
fn setup_site(name: &str) -> Site {
|
||||||
let mut path = env::current_dir().unwrap().to_path_buf();
|
let mut path = env::current_dir().unwrap().to_path_buf();
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
extern crate tera;
|
|
||||||
extern crate rayon;
|
|
||||||
extern crate glob;
|
extern crate glob;
|
||||||
|
extern crate rayon;
|
||||||
extern crate serde;
|
extern crate serde;
|
||||||
|
extern crate tera;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
extern crate sass_rs;
|
extern crate sass_rs;
|
||||||
|
@ -9,34 +9,36 @@ extern crate sass_rs;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate errors;
|
extern crate errors;
|
||||||
extern crate config;
|
extern crate config;
|
||||||
extern crate utils;
|
|
||||||
extern crate front_matter;
|
extern crate front_matter;
|
||||||
extern crate templates;
|
|
||||||
extern crate search;
|
|
||||||
extern crate imageproc;
|
extern crate imageproc;
|
||||||
extern crate library;
|
extern crate library;
|
||||||
|
extern crate search;
|
||||||
|
extern crate templates;
|
||||||
|
extern crate utils;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
extern crate tempfile;
|
extern crate tempfile;
|
||||||
|
|
||||||
use std::collections::{HashMap};
|
use std::collections::HashMap;
|
||||||
use std::fs::{create_dir_all, remove_dir_all, copy};
|
use std::fs::{copy, create_dir_all, remove_dir_all};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
use glob::glob;
|
use glob::glob;
|
||||||
use tera::{Tera, Context};
|
|
||||||
use sass_rs::{Options as SassOptions, OutputStyle, compile_file};
|
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
|
use sass_rs::{compile_file, Options as SassOptions, OutputStyle};
|
||||||
|
use tera::{Context, Tera};
|
||||||
|
|
||||||
|
use config::{get_config, Config};
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
use config::{Config, get_config};
|
use front_matter::InsertAnchor;
|
||||||
use utils::fs::{create_file, copy_directory, create_directory, ensure_directory_exists};
|
use library::{
|
||||||
use utils::templates::{render_template, rewrite_theme_paths};
|
find_taxonomies, sort_actual_pages_by_date, Library, Page, Paginator, Section, Taxonomy,
|
||||||
|
};
|
||||||
|
use templates::{global_fns, render_redirect_template, ZOLA_TERA};
|
||||||
|
use utils::fs::{copy_directory, create_directory, create_file, ensure_directory_exists};
|
||||||
use utils::net::get_available_port;
|
use utils::net::get_available_port;
|
||||||
use templates::{ZOLA_TERA, global_fns, render_redirect_template};
|
use utils::templates::{render_template, rewrite_theme_paths};
|
||||||
use front_matter::{InsertAnchor};
|
|
||||||
use library::{Page, Section, sort_actual_pages_by_date, Library, Taxonomy, find_taxonomies, Paginator};
|
|
||||||
|
|
||||||
/// The sitemap only needs links and potentially date so we trim down
|
/// The sitemap only needs links and potentially date so we trim down
|
||||||
/// all pages to only that
|
/// all pages to only that
|
||||||
|
@ -81,7 +83,8 @@ impl Site {
|
||||||
let mut config = get_config(path, config_file);
|
let mut config = get_config(path, config_file);
|
||||||
config.load_extra_syntaxes(path)?;
|
config.load_extra_syntaxes(path)?;
|
||||||
|
|
||||||
let tpl_glob = format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.*ml");
|
let tpl_glob =
|
||||||
|
format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.*ml");
|
||||||
// Only parsing as we might be extending templates from themes and that would error
|
// Only parsing as we might be extending templates from themes and that would error
|
||||||
// as we haven't loaded them yet
|
// as we haven't loaded them yet
|
||||||
let mut tera = Tera::parse(&tpl_glob).chain_err(|| "Error parsing templates")?;
|
let mut tera = Tera::parse(&tpl_glob).chain_err(|| "Error parsing templates")?;
|
||||||
|
@ -100,11 +103,13 @@ impl Site {
|
||||||
path.to_string_lossy().replace("\\", "/"),
|
path.to_string_lossy().replace("\\", "/"),
|
||||||
format!("themes/{}/templates/**/*.*ml", theme)
|
format!("themes/{}/templates/**/*.*ml", theme)
|
||||||
);
|
);
|
||||||
let mut tera_theme = Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?;
|
let mut tera_theme =
|
||||||
|
Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?;
|
||||||
rewrite_theme_paths(&mut tera_theme, &theme);
|
rewrite_theme_paths(&mut tera_theme, &theme);
|
||||||
// TODO: same as below
|
// TODO: same as below
|
||||||
if theme_path.join("templates").join("robots.txt").exists() {
|
if theme_path.join("templates").join("robots.txt").exists() {
|
||||||
tera_theme.add_template_file(theme_path.join("templates").join("robots.txt"), None)?;
|
tera_theme
|
||||||
|
.add_template_file(theme_path.join("templates").join("robots.txt"), None)?;
|
||||||
}
|
}
|
||||||
tera_theme.build_inheritance_chains()?;
|
tera_theme.build_inheritance_chains()?;
|
||||||
tera.extend(&tera_theme)?;
|
tera.extend(&tera_theme)?;
|
||||||
|
@ -121,7 +126,8 @@ impl Site {
|
||||||
|
|
||||||
let content_path = path.join("content");
|
let content_path = path.join("content");
|
||||||
let static_path = path.join("static");
|
let static_path = path.join("static");
|
||||||
let imageproc = imageproc::Processor::new(content_path.clone(), &static_path, &config.base_url);
|
let imageproc =
|
||||||
|
imageproc::Processor::new(content_path.clone(), &static_path, &config.base_url);
|
||||||
|
|
||||||
let site = Site {
|
let site = Site {
|
||||||
base_path: path.to_path_buf(),
|
base_path: path.to_path_buf(),
|
||||||
|
@ -238,7 +244,10 @@ impl Site {
|
||||||
let mut pages_insert_anchors = HashMap::new();
|
let mut pages_insert_anchors = HashMap::new();
|
||||||
for page in pages {
|
for page in pages {
|
||||||
let p = page?;
|
let p = page?;
|
||||||
pages_insert_anchors.insert(p.file.path.clone(), self.find_parent_section_insert_anchor(&p.file.parent.clone()));
|
pages_insert_anchors.insert(
|
||||||
|
p.file.path.clone(),
|
||||||
|
self.find_parent_section_insert_anchor(&p.file.parent.clone()),
|
||||||
|
);
|
||||||
self.add_page(p, false)?;
|
self.add_page(p, false)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -263,7 +272,10 @@ impl Site {
|
||||||
// This is needed in the first place because of silly borrow checker
|
// This is needed in the first place because of silly borrow checker
|
||||||
let mut pages_insert_anchors = HashMap::new();
|
let mut pages_insert_anchors = HashMap::new();
|
||||||
for (_, p) in self.library.pages() {
|
for (_, p) in self.library.pages() {
|
||||||
pages_insert_anchors.insert(p.file.path.clone(), self.find_parent_section_insert_anchor(&p.file.parent.clone()));
|
pages_insert_anchors.insert(
|
||||||
|
p.file.path.clone(),
|
||||||
|
self.find_parent_section_insert_anchor(&p.file.parent.clone()),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
self.library
|
self.library
|
||||||
|
@ -291,10 +303,12 @@ impl Site {
|
||||||
/// Adds global fns that are to be available to shortcodes while rendering markdown
|
/// Adds global fns that are to be available to shortcodes while rendering markdown
|
||||||
pub fn register_early_global_fns(&mut self) {
|
pub fn register_early_global_fns(&mut self) {
|
||||||
self.tera.register_function(
|
self.tera.register_function(
|
||||||
"get_url", global_fns::make_get_url(self.permalinks.clone(), self.config.clone()),
|
"get_url",
|
||||||
|
global_fns::make_get_url(self.permalinks.clone(), self.config.clone()),
|
||||||
);
|
);
|
||||||
self.tera.register_function(
|
self.tera.register_function(
|
||||||
"resize_image", global_fns::make_resize_image(self.imageproc.clone()),
|
"resize_image",
|
||||||
|
global_fns::make_resize_image(self.imageproc.clone()),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -310,7 +324,10 @@ impl Site {
|
||||||
"get_taxonomy_url",
|
"get_taxonomy_url",
|
||||||
global_fns::make_get_taxonomy_url(&self.taxonomies),
|
global_fns::make_get_taxonomy_url(&self.taxonomies),
|
||||||
);
|
);
|
||||||
self.tera.register_function("load_data", global_fns::make_load_data(self.content_path.clone(), self.base_path.clone()));
|
self.tera.register_function(
|
||||||
|
"load_data",
|
||||||
|
global_fns::make_load_data(self.content_path.clone(), self.base_path.clone()),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a page to the site
|
/// Add a page to the site
|
||||||
|
@ -349,7 +366,7 @@ impl Site {
|
||||||
pub fn find_parent_section_insert_anchor(&self, parent_path: &PathBuf) -> InsertAnchor {
|
pub fn find_parent_section_insert_anchor(&self, parent_path: &PathBuf) -> InsertAnchor {
|
||||||
match self.library.get_section(&parent_path.join("_index.md")) {
|
match self.library.get_section(&parent_path.join("_index.md")) {
|
||||||
Some(s) => s.meta.insert_anchor_links,
|
Some(s) => s.meta.insert_anchor_links,
|
||||||
None => InsertAnchor::None
|
None => InsertAnchor::None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -375,7 +392,10 @@ impl Site {
|
||||||
if let Some(port) = self.live_reload {
|
if let Some(port) = self.live_reload {
|
||||||
return html.replace(
|
return html.replace(
|
||||||
"</body>",
|
"</body>",
|
||||||
&format!(r#"<script src="/livereload.js?port={}&mindelay=10"></script></body>"#, port),
|
&format!(
|
||||||
|
r#"<script src="/livereload.js?port={}&mindelay=10"></script></body>"#,
|
||||||
|
port
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -498,10 +518,7 @@ impl Site {
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
// then elasticlunr.min.js
|
// then elasticlunr.min.js
|
||||||
create_file(
|
create_file(&self.output_path.join("elasticlunr.min.js"), search::ELASTICLUNR_JS)?;
|
||||||
&self.output_path.join("elasticlunr.min.js"),
|
|
||||||
search::ELASTICLUNR_JS,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -537,12 +554,19 @@ impl Site {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_sass_glob(&self, sass_path: &Path, extension: &str, options: &SassOptions) -> Result<Vec<(PathBuf, PathBuf)>> {
|
fn compile_sass_glob(
|
||||||
|
&self,
|
||||||
|
sass_path: &Path,
|
||||||
|
extension: &str,
|
||||||
|
options: &SassOptions,
|
||||||
|
) -> Result<Vec<(PathBuf, PathBuf)>> {
|
||||||
let glob_string = format!("{}/**/*.{}", sass_path.display(), extension);
|
let glob_string = format!("{}/**/*.{}", sass_path.display(), extension);
|
||||||
let files = glob(&glob_string)
|
let files = glob(&glob_string)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.filter_map(|e| e.ok())
|
.filter_map(|e| e.ok())
|
||||||
.filter(|entry| !entry.as_path().file_name().unwrap().to_string_lossy().starts_with('_'))
|
.filter(|entry| {
|
||||||
|
!entry.as_path().file_name().unwrap().to_string_lossy().starts_with('_')
|
||||||
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let mut compiled_paths = Vec::new();
|
let mut compiled_paths = Vec::new();
|
||||||
|
@ -579,7 +603,7 @@ impl Site {
|
||||||
split.push(part);
|
split.push(part);
|
||||||
"index.html"
|
"index.html"
|
||||||
}
|
}
|
||||||
None => "index.html"
|
None => "index.html",
|
||||||
};
|
};
|
||||||
|
|
||||||
for component in split {
|
for component in split {
|
||||||
|
@ -589,7 +613,10 @@ impl Site {
|
||||||
create_directory(&output_path)?;
|
create_directory(&output_path)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
create_file(&output_path.join(page_name), &render_redirect_template(&page.permalink, &self.tera)?)?;
|
create_file(
|
||||||
|
&output_path.join(page_name),
|
||||||
|
&render_redirect_template(&page.permalink, &self.tera)?,
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -650,15 +677,16 @@ impl Site {
|
||||||
}
|
}
|
||||||
|
|
||||||
if taxonomy.kind.is_paginated() {
|
if taxonomy.kind.is_paginated() {
|
||||||
self.render_paginated(&output_path, &Paginator::from_taxonomy(&taxonomy, item, &self.library))
|
self.render_paginated(
|
||||||
|
&output_path,
|
||||||
|
&Paginator::from_taxonomy(&taxonomy, item, &self.library),
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
let single_output = taxonomy.render_term(item, &self.tera, &self.config, &self.library)?;
|
let single_output =
|
||||||
|
taxonomy.render_term(item, &self.tera, &self.config, &self.library)?;
|
||||||
let path = output_path.join(&item.slug);
|
let path = output_path.join(&item.slug);
|
||||||
create_directory(&path)?;
|
create_directory(&path)?;
|
||||||
create_file(
|
create_file(&path.join("index.html"), &self.inject_livereload(single_output))
|
||||||
&path.join("index.html"),
|
|
||||||
&self.inject_livereload(single_output),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<Result<()>>()
|
.collect::<Result<()>>()
|
||||||
|
@ -670,7 +698,8 @@ impl Site {
|
||||||
|
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
|
|
||||||
let mut pages = self.library
|
let mut pages = self
|
||||||
|
.library
|
||||||
.pages_values()
|
.pages_values()
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|p| !p.is_draft())
|
.filter(|p| !p.is_draft())
|
||||||
|
@ -685,7 +714,8 @@ impl Site {
|
||||||
pages.sort_by(|a, b| a.permalink.cmp(&b.permalink));
|
pages.sort_by(|a, b| a.permalink.cmp(&b.permalink));
|
||||||
context.insert("pages", &pages);
|
context.insert("pages", &pages);
|
||||||
|
|
||||||
let mut sections = self.library
|
let mut sections = self
|
||||||
|
.library
|
||||||
.sections_values()
|
.sections_values()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| SitemapEntry::new(s.permalink.clone(), None))
|
.map(|s| SitemapEntry::new(s.permalink.clone(), None))
|
||||||
|
@ -699,7 +729,10 @@ impl Site {
|
||||||
let mut terms = vec![];
|
let mut terms = vec![];
|
||||||
terms.push(SitemapEntry::new(self.config.make_permalink(name), None));
|
terms.push(SitemapEntry::new(self.config.make_permalink(name), None));
|
||||||
for item in &taxonomy.items {
|
for item in &taxonomy.items {
|
||||||
terms.push(SitemapEntry::new(self.config.make_permalink(&format!("{}/{}", &name, item.slug)), None));
|
terms.push(SitemapEntry::new(
|
||||||
|
self.config.make_permalink(&format!("{}/{}", &name, item.slug)),
|
||||||
|
None,
|
||||||
|
));
|
||||||
}
|
}
|
||||||
terms.sort_by(|a, b| a.permalink.cmp(&b.permalink));
|
terms.sort_by(|a, b| a.permalink.cmp(&b.permalink));
|
||||||
taxonomies.push(terms);
|
taxonomies.push(terms);
|
||||||
|
@ -718,7 +751,11 @@ impl Site {
|
||||||
/// Renders a RSS feed for the given path and at the given path
|
/// Renders a RSS feed for the given path and at the given path
|
||||||
/// If both arguments are `None`, it will render only the RSS feed for the whole
|
/// If both arguments are `None`, it will render only the RSS feed for the whole
|
||||||
/// site at the root folder.
|
/// site at the root folder.
|
||||||
pub fn render_rss_feed(&self, all_pages: Vec<&Page>, base_path: Option<&PathBuf>) -> Result<()> {
|
pub fn render_rss_feed(
|
||||||
|
&self,
|
||||||
|
all_pages: Vec<&Page>,
|
||||||
|
base_path: Option<&PathBuf>,
|
||||||
|
) -> Result<()> {
|
||||||
ensure_directory_exists(&self.output_path)?;
|
ensure_directory_exists(&self.output_path)?;
|
||||||
|
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
|
@ -806,7 +843,10 @@ impl Site {
|
||||||
|
|
||||||
if let Some(ref redirect_to) = section.meta.redirect_to {
|
if let Some(ref redirect_to) = section.meta.redirect_to {
|
||||||
let permalink = self.config.make_permalink(redirect_to);
|
let permalink = self.config.make_permalink(redirect_to);
|
||||||
create_file(&output_path.join("index.html"), &render_redirect_template(&permalink, &self.tera)?)?;
|
create_file(
|
||||||
|
&output_path.join("index.html"),
|
||||||
|
&render_redirect_template(&permalink, &self.tera)?,
|
||||||
|
)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -861,12 +901,16 @@ impl Site {
|
||||||
.map(|pager| {
|
.map(|pager| {
|
||||||
let page_path = folder_path.join(&format!("{}", pager.index));
|
let page_path = folder_path.join(&format!("{}", pager.index));
|
||||||
create_directory(&page_path)?;
|
create_directory(&page_path)?;
|
||||||
let output = paginator.render_pager(pager, &self.config, &self.tera, &self.library)?;
|
let output =
|
||||||
|
paginator.render_pager(pager, &self.config, &self.tera, &self.library)?;
|
||||||
if pager.index > 1 {
|
if pager.index > 1 {
|
||||||
create_file(&page_path.join("index.html"), &self.inject_livereload(output))?;
|
create_file(&page_path.join("index.html"), &self.inject_livereload(output))?;
|
||||||
} else {
|
} else {
|
||||||
create_file(&output_path.join("index.html"), &self.inject_livereload(output))?;
|
create_file(&output_path.join("index.html"), &self.inject_livereload(output))?;
|
||||||
create_file(&page_path.join("index.html"), &render_redirect_template(&paginator.permalink, &self.tera)?)?;
|
create_file(
|
||||||
|
&page_path.join("index.html"),
|
||||||
|
&render_redirect_template(&paginator.permalink, &self.tera)?,
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
|
|
|
@ -3,13 +3,12 @@ extern crate tempfile;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::path::Path;
|
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use tempfile::tempdir;
|
|
||||||
use site::Site;
|
use site::Site;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_parse_site() {
|
fn can_parse_site() {
|
||||||
|
@ -27,7 +26,8 @@ fn can_parse_site() {
|
||||||
assert_eq!(url_post.path, "a-fixed-url/");
|
assert_eq!(url_post.path, "a-fixed-url/");
|
||||||
|
|
||||||
// Make sure the article in a folder with only asset doesn't get counted as a section
|
// Make sure the article in a folder with only asset doesn't get counted as a section
|
||||||
let asset_folder_post = site.library.get_page(&posts_path.join("with-assets").join("index.md")).unwrap();
|
let asset_folder_post =
|
||||||
|
site.library.get_page(&posts_path.join("with-assets").join("index.md")).unwrap();
|
||||||
assert_eq!(asset_folder_post.file.components, vec!["posts".to_string()]);
|
assert_eq!(asset_folder_post.file.components, vec!["posts".to_string()]);
|
||||||
|
|
||||||
// That we have the right number of sections
|
// That we have the right number of sections
|
||||||
|
@ -42,7 +42,10 @@ fn can_parse_site() {
|
||||||
let posts_section = site.library.get_section(&posts_path.join("_index.md")).unwrap();
|
let posts_section = site.library.get_section(&posts_path.join("_index.md")).unwrap();
|
||||||
assert_eq!(posts_section.subsections.len(), 1);
|
assert_eq!(posts_section.subsections.len(), 1);
|
||||||
assert_eq!(posts_section.pages.len(), 8);
|
assert_eq!(posts_section.pages.len(), 8);
|
||||||
assert_eq!(posts_section.ancestors, vec![*site.library.get_section_key(&index_section.file.path).unwrap()]);
|
assert_eq!(
|
||||||
|
posts_section.ancestors,
|
||||||
|
vec![*site.library.get_section_key(&index_section.file.path).unwrap()]
|
||||||
|
);
|
||||||
|
|
||||||
// Make sure we remove all the pwd + content from the sections
|
// Make sure we remove all the pwd + content from the sections
|
||||||
let basic = site.library.get_page(&posts_path.join("simple.md")).unwrap();
|
let basic = site.library.get_page(&posts_path.join("simple.md")).unwrap();
|
||||||
|
@ -55,7 +58,8 @@ fn can_parse_site() {
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
|
||||||
let tutorials_section = site.library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap();
|
let tutorials_section =
|
||||||
|
site.library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap();
|
||||||
assert_eq!(tutorials_section.subsections.len(), 2);
|
assert_eq!(tutorials_section.subsections.len(), 2);
|
||||||
let sub1 = site.library.get_section_by_key(tutorials_section.subsections[0]);
|
let sub1 = site.library.get_section_by_key(tutorials_section.subsections[0]);
|
||||||
let sub2 = site.library.get_section_by_key(tutorials_section.subsections[1]);
|
let sub2 = site.library.get_section_by_key(tutorials_section.subsections[1]);
|
||||||
|
@ -63,7 +67,10 @@ fn can_parse_site() {
|
||||||
assert_eq!(sub2.clone().meta.title.unwrap(), "DevOps");
|
assert_eq!(sub2.clone().meta.title.unwrap(), "DevOps");
|
||||||
assert_eq!(tutorials_section.pages.len(), 0);
|
assert_eq!(tutorials_section.pages.len(), 0);
|
||||||
|
|
||||||
let devops_section = site.library.get_section(&posts_path.join("tutorials").join("devops").join("_index.md")).unwrap();
|
let devops_section = site
|
||||||
|
.library
|
||||||
|
.get_section(&posts_path.join("tutorials").join("devops").join("_index.md"))
|
||||||
|
.unwrap();
|
||||||
assert_eq!(devops_section.subsections.len(), 0);
|
assert_eq!(devops_section.subsections.len(), 0);
|
||||||
assert_eq!(devops_section.pages.len(), 2);
|
assert_eq!(devops_section.pages.len(), 2);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -75,38 +82,37 @@ fn can_parse_site() {
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
|
||||||
let prog_section = site.library.get_section(&posts_path.join("tutorials").join("programming").join("_index.md")).unwrap();
|
let prog_section = site
|
||||||
|
.library
|
||||||
|
.get_section(&posts_path.join("tutorials").join("programming").join("_index.md"))
|
||||||
|
.unwrap();
|
||||||
assert_eq!(prog_section.subsections.len(), 0);
|
assert_eq!(prog_section.subsections.len(), 0);
|
||||||
assert_eq!(prog_section.pages.len(), 2);
|
assert_eq!(prog_section.pages.len(), 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2 helper macros to make all the build testing more bearable
|
// 2 helper macros to make all the build testing more bearable
|
||||||
macro_rules! file_exists {
|
macro_rules! file_exists {
|
||||||
($root: expr, $path: expr) => {
|
($root: expr, $path: expr) => {{
|
||||||
{
|
let mut path = $root.clone();
|
||||||
let mut path = $root.clone();
|
for component in $path.split("/") {
|
||||||
for component in $path.split("/") {
|
path = path.join(component);
|
||||||
path = path.join(component);
|
|
||||||
}
|
|
||||||
Path::new(&path).exists()
|
|
||||||
}
|
}
|
||||||
}
|
Path::new(&path).exists()
|
||||||
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! file_contains {
|
macro_rules! file_contains {
|
||||||
($root: expr, $path: expr, $text: expr) => {
|
($root: expr, $path: expr, $text: expr) => {{
|
||||||
{
|
let mut path = $root.clone();
|
||||||
let mut path = $root.clone();
|
for component in $path.split("/") {
|
||||||
for component in $path.split("/") {
|
path = path.join(component);
|
||||||
path = path.join(component);
|
|
||||||
}
|
|
||||||
let mut file = File::open(&path).unwrap();
|
|
||||||
let mut s = String::new();
|
|
||||||
file.read_to_string(&mut s).unwrap();
|
|
||||||
println!("{}", s);
|
|
||||||
s.contains($text)
|
|
||||||
}
|
}
|
||||||
}
|
let mut file = File::open(&path).unwrap();
|
||||||
|
let mut s = String::new();
|
||||||
|
file.read_to_string(&mut s).unwrap();
|
||||||
|
println!("{}", s);
|
||||||
|
s.contains($text)
|
||||||
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -145,7 +151,11 @@ fn can_build_site_without_live_reload() {
|
||||||
|
|
||||||
// Pages and section get their relative path
|
// Pages and section get their relative path
|
||||||
assert!(file_contains!(public, "posts/tutorials/index.html", "posts/tutorials/_index.md"));
|
assert!(file_contains!(public, "posts/tutorials/index.html", "posts/tutorials/_index.md"));
|
||||||
assert!(file_contains!(public, "posts/tutorials/devops/nix/index.html", "posts/tutorials/devops/nix.md"));
|
assert!(file_contains!(
|
||||||
|
public,
|
||||||
|
"posts/tutorials/devops/nix/index.html",
|
||||||
|
"posts/tutorials/devops/nix.md"
|
||||||
|
));
|
||||||
|
|
||||||
// aliases work
|
// aliases work
|
||||||
assert!(file_exists!(public, "an-old-url/old-page/index.html"));
|
assert!(file_exists!(public, "an-old-url/old-page/index.html"));
|
||||||
|
@ -183,14 +193,26 @@ fn can_build_site_without_live_reload() {
|
||||||
assert_eq!(file_contains!(public, "index.html", "/livereload.js?port=1112&mindelay=10"), false);
|
assert_eq!(file_contains!(public, "index.html", "/livereload.js?port=1112&mindelay=10"), false);
|
||||||
|
|
||||||
// Both pages and sections are in the sitemap
|
// Both pages and sections are in the sitemap
|
||||||
assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/posts/simple/</loc>"));
|
assert!(file_contains!(
|
||||||
assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/posts/</loc>"));
|
public,
|
||||||
|
"sitemap.xml",
|
||||||
|
"<loc>https://replace-this-with-your-url.com/posts/simple/</loc>"
|
||||||
|
));
|
||||||
|
assert!(file_contains!(
|
||||||
|
public,
|
||||||
|
"sitemap.xml",
|
||||||
|
"<loc>https://replace-this-with-your-url.com/posts/</loc>"
|
||||||
|
));
|
||||||
// Drafts are not in the sitemap
|
// Drafts are not in the sitemap
|
||||||
assert!(!file_contains!(public, "sitemap.xml", "draft"));
|
assert!(!file_contains!(public, "sitemap.xml", "draft"));
|
||||||
|
|
||||||
// robots.txt has been rendered from the template
|
// robots.txt has been rendered from the template
|
||||||
assert!(file_contains!(public, "robots.txt", "User-agent: zola"));
|
assert!(file_contains!(public, "robots.txt", "User-agent: zola"));
|
||||||
assert!(file_contains!(public, "robots.txt", "Sitemap: https://replace-this-with-your-url.com/sitemap.xml"));
|
assert!(file_contains!(
|
||||||
|
public,
|
||||||
|
"robots.txt",
|
||||||
|
"Sitemap: https://replace-this-with-your-url.com/sitemap.xml"
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -231,7 +253,11 @@ fn can_build_site_with_live_reload() {
|
||||||
assert!(file_contains!(public, "index.html", "/livereload.js"));
|
assert!(file_contains!(public, "index.html", "/livereload.js"));
|
||||||
|
|
||||||
// the summary anchor link has been created
|
// the summary anchor link has been created
|
||||||
assert!(file_contains!(public, "posts/python/index.html", r#"<a name="continue-reading"></a>"#));
|
assert!(file_contains!(
|
||||||
|
public,
|
||||||
|
"posts/python/index.html",
|
||||||
|
r#"<a name="continue-reading"></a>"#
|
||||||
|
));
|
||||||
assert!(file_contains!(public, "posts/draft/index.html", r#"THEME_SHORTCODE"#));
|
assert!(file_contains!(public, "posts/draft/index.html", r#"THEME_SHORTCODE"#));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -245,7 +271,10 @@ fn can_build_site_with_taxonomies() {
|
||||||
for (i, (_, page)) in site.library.pages_mut().iter_mut().enumerate() {
|
for (i, (_, page)) in site.library.pages_mut().iter_mut().enumerate() {
|
||||||
page.meta.taxonomies = {
|
page.meta.taxonomies = {
|
||||||
let mut taxonomies = HashMap::new();
|
let mut taxonomies = HashMap::new();
|
||||||
taxonomies.insert("categories".to_string(), vec![if i % 2 == 0 { "A" } else { "B" }.to_string()]);
|
taxonomies.insert(
|
||||||
|
"categories".to_string(),
|
||||||
|
vec![if i % 2 == 0 { "A" } else { "B" }.to_string()],
|
||||||
|
);
|
||||||
taxonomies
|
taxonomies
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -278,15 +307,27 @@ fn can_build_site_with_taxonomies() {
|
||||||
assert!(file_exists!(public, "categories/a/index.html"));
|
assert!(file_exists!(public, "categories/a/index.html"));
|
||||||
assert!(file_exists!(public, "categories/b/index.html"));
|
assert!(file_exists!(public, "categories/b/index.html"));
|
||||||
assert!(file_exists!(public, "categories/a/rss.xml"));
|
assert!(file_exists!(public, "categories/a/rss.xml"));
|
||||||
assert!(file_contains!(public, "categories/a/rss.xml", "https://replace-this-with-your-url.com/categories/a/rss.xml"));
|
assert!(file_contains!(
|
||||||
|
public,
|
||||||
|
"categories/a/rss.xml",
|
||||||
|
"https://replace-this-with-your-url.com/categories/a/rss.xml"
|
||||||
|
));
|
||||||
// Extending from a theme works
|
// Extending from a theme works
|
||||||
assert!(file_contains!(public, "categories/a/index.html", "EXTENDED"));
|
assert!(file_contains!(public, "categories/a/index.html", "EXTENDED"));
|
||||||
// Tags aren't
|
// Tags aren't
|
||||||
assert_eq!(file_exists!(public, "tags/index.html"), false);
|
assert_eq!(file_exists!(public, "tags/index.html"), false);
|
||||||
|
|
||||||
// Categories are in the sitemap
|
// Categories are in the sitemap
|
||||||
assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/categories/</loc>"));
|
assert!(file_contains!(
|
||||||
assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/categories/a/</loc>"));
|
public,
|
||||||
|
"sitemap.xml",
|
||||||
|
"<loc>https://replace-this-with-your-url.com/categories/</loc>"
|
||||||
|
));
|
||||||
|
assert!(file_contains!(
|
||||||
|
public,
|
||||||
|
"sitemap.xml",
|
||||||
|
"<loc>https://replace-this-with-your-url.com/categories/a/</loc>"
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -303,7 +344,11 @@ fn can_build_site_and_insert_anchor_links() {
|
||||||
|
|
||||||
assert!(Path::new(&public).exists());
|
assert!(Path::new(&public).exists());
|
||||||
// anchor link inserted
|
// anchor link inserted
|
||||||
assert!(file_contains!(public, "posts/something-else/index.html", "<h1 id=\"title\"><a class=\"zola-anchor\" href=\"#title\""));
|
assert!(file_contains!(
|
||||||
|
public,
|
||||||
|
"posts/something-else/index.html",
|
||||||
|
"<h1 id=\"title\"><a class=\"zola-anchor\" href=\"#title\""
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -352,8 +397,16 @@ fn can_build_site_with_pagination_for_section() {
|
||||||
assert!(file_contains!(public, "posts/index.html", "Current index: 1"));
|
assert!(file_contains!(public, "posts/index.html", "Current index: 1"));
|
||||||
assert!(!file_contains!(public, "posts/index.html", "has_prev"));
|
assert!(!file_contains!(public, "posts/index.html", "has_prev"));
|
||||||
assert!(file_contains!(public, "posts/index.html", "has_next"));
|
assert!(file_contains!(public, "posts/index.html", "has_next"));
|
||||||
assert!(file_contains!(public, "posts/index.html", "First: https://replace-this-with-your-url.com/posts/"));
|
assert!(file_contains!(
|
||||||
assert!(file_contains!(public, "posts/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/"));
|
public,
|
||||||
|
"posts/index.html",
|
||||||
|
"First: https://replace-this-with-your-url.com/posts/"
|
||||||
|
));
|
||||||
|
assert!(file_contains!(
|
||||||
|
public,
|
||||||
|
"posts/index.html",
|
||||||
|
"Last: https://replace-this-with-your-url.com/posts/page/4/"
|
||||||
|
));
|
||||||
assert_eq!(file_contains!(public, "posts/index.html", "has_prev"), false);
|
assert_eq!(file_contains!(public, "posts/index.html", "has_prev"), false);
|
||||||
|
|
||||||
assert!(file_exists!(public, "posts/page/2/index.html"));
|
assert!(file_exists!(public, "posts/page/2/index.html"));
|
||||||
|
@ -362,8 +415,16 @@ fn can_build_site_with_pagination_for_section() {
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "Current index: 2"));
|
assert!(file_contains!(public, "posts/page/2/index.html", "Current index: 2"));
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "has_prev"));
|
assert!(file_contains!(public, "posts/page/2/index.html", "has_prev"));
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "has_next"));
|
assert!(file_contains!(public, "posts/page/2/index.html", "has_next"));
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "First: https://replace-this-with-your-url.com/posts/"));
|
assert!(file_contains!(
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/"));
|
public,
|
||||||
|
"posts/page/2/index.html",
|
||||||
|
"First: https://replace-this-with-your-url.com/posts/"
|
||||||
|
));
|
||||||
|
assert!(file_contains!(
|
||||||
|
public,
|
||||||
|
"posts/page/2/index.html",
|
||||||
|
"Last: https://replace-this-with-your-url.com/posts/page/4/"
|
||||||
|
));
|
||||||
|
|
||||||
assert!(file_exists!(public, "posts/page/3/index.html"));
|
assert!(file_exists!(public, "posts/page/3/index.html"));
|
||||||
assert!(file_contains!(public, "posts/page/3/index.html", "Num pagers: 4"));
|
assert!(file_contains!(public, "posts/page/3/index.html", "Num pagers: 4"));
|
||||||
|
@ -371,8 +432,16 @@ fn can_build_site_with_pagination_for_section() {
|
||||||
assert!(file_contains!(public, "posts/page/3/index.html", "Current index: 3"));
|
assert!(file_contains!(public, "posts/page/3/index.html", "Current index: 3"));
|
||||||
assert!(file_contains!(public, "posts/page/3/index.html", "has_prev"));
|
assert!(file_contains!(public, "posts/page/3/index.html", "has_prev"));
|
||||||
assert!(file_contains!(public, "posts/page/3/index.html", "has_next"));
|
assert!(file_contains!(public, "posts/page/3/index.html", "has_next"));
|
||||||
assert!(file_contains!(public, "posts/page/3/index.html", "First: https://replace-this-with-your-url.com/posts/"));
|
assert!(file_contains!(
|
||||||
assert!(file_contains!(public, "posts/page/3/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/"));
|
public,
|
||||||
|
"posts/page/3/index.html",
|
||||||
|
"First: https://replace-this-with-your-url.com/posts/"
|
||||||
|
));
|
||||||
|
assert!(file_contains!(
|
||||||
|
public,
|
||||||
|
"posts/page/3/index.html",
|
||||||
|
"Last: https://replace-this-with-your-url.com/posts/page/4/"
|
||||||
|
));
|
||||||
|
|
||||||
assert!(file_exists!(public, "posts/page/4/index.html"));
|
assert!(file_exists!(public, "posts/page/4/index.html"));
|
||||||
assert!(file_contains!(public, "posts/page/4/index.html", "Num pagers: 4"));
|
assert!(file_contains!(public, "posts/page/4/index.html", "Num pagers: 4"));
|
||||||
|
@ -380,8 +449,16 @@ fn can_build_site_with_pagination_for_section() {
|
||||||
assert!(file_contains!(public, "posts/page/4/index.html", "Current index: 4"));
|
assert!(file_contains!(public, "posts/page/4/index.html", "Current index: 4"));
|
||||||
assert!(file_contains!(public, "posts/page/4/index.html", "has_prev"));
|
assert!(file_contains!(public, "posts/page/4/index.html", "has_prev"));
|
||||||
assert!(!file_contains!(public, "posts/page/4/index.html", "has_next"));
|
assert!(!file_contains!(public, "posts/page/4/index.html", "has_next"));
|
||||||
assert!(file_contains!(public, "posts/page/4/index.html", "First: https://replace-this-with-your-url.com/posts/"));
|
assert!(file_contains!(
|
||||||
assert!(file_contains!(public, "posts/page/4/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/"));
|
public,
|
||||||
|
"posts/page/4/index.html",
|
||||||
|
"First: https://replace-this-with-your-url.com/posts/"
|
||||||
|
));
|
||||||
|
assert!(file_contains!(
|
||||||
|
public,
|
||||||
|
"posts/page/4/index.html",
|
||||||
|
"Last: https://replace-this-with-your-url.com/posts/page/4/"
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -448,7 +525,6 @@ fn can_build_rss_feed() {
|
||||||
assert!(file_contains!(public, "rss.xml", "Simple article with shortcodes"));
|
assert!(file_contains!(public, "rss.xml", "Simple article with shortcodes"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_build_search_index() {
|
fn can_build_search_index() {
|
||||||
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();
|
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();
|
||||||
|
@ -479,6 +555,9 @@ fn can_build_with_extra_syntaxes() {
|
||||||
|
|
||||||
assert!(&public.exists());
|
assert!(&public.exists());
|
||||||
assert!(file_exists!(public, "posts/extra-syntax/index.html"));
|
assert!(file_exists!(public, "posts/extra-syntax/index.html"));
|
||||||
assert!(file_contains!(public, "posts/extra-syntax/index.html",
|
assert!(file_contains!(
|
||||||
r#"<span style="color:#d08770;">test</span>"#));
|
public,
|
||||||
|
"posts/extra-syntax/index.html",
|
||||||
|
r#"<span style="color:#d08770;">test</span>"#
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use base64::{encode, decode};
|
use base64::{decode, encode};
|
||||||
use pulldown_cmark as cmark;
|
use pulldown_cmark as cmark;
|
||||||
use tera::{Value, to_value, Result as TeraResult};
|
use tera::{to_value, Result as TeraResult, Value};
|
||||||
|
|
||||||
|
|
||||||
pub fn markdown(value: Value, args: HashMap<String, Value>) -> TeraResult<Value> {
|
pub fn markdown(value: Value, args: HashMap<String, Value>) -> TeraResult<Value> {
|
||||||
let s = try_get_value!("markdown", "value", String, value);
|
let s = try_get_value!("markdown", "value", String, value);
|
||||||
|
@ -31,33 +30,23 @@ pub fn markdown(value: Value, args: HashMap<String, Value>) -> TeraResult<Value>
|
||||||
Ok(to_value(&html).unwrap())
|
Ok(to_value(&html).unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub fn base64_encode(value: Value, _: HashMap<String, Value>) -> TeraResult<Value> {
|
pub fn base64_encode(value: Value, _: HashMap<String, Value>) -> TeraResult<Value> {
|
||||||
let s = try_get_value!("base64_encode", "value", String, value);
|
let s = try_get_value!("base64_encode", "value", String, value);
|
||||||
Ok(
|
Ok(to_value(&encode(s.as_bytes())).unwrap())
|
||||||
to_value(&encode(s.as_bytes())).unwrap()
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn base64_decode(value: Value, _: HashMap<String, Value>) -> TeraResult<Value> {
|
pub fn base64_decode(value: Value, _: HashMap<String, Value>) -> TeraResult<Value> {
|
||||||
let s = try_get_value!("base64_decode", "value", String, value);
|
let s = try_get_value!("base64_decode", "value", String, value);
|
||||||
Ok(
|
Ok(to_value(&String::from_utf8(decode(s.as_bytes()).unwrap()).unwrap()).unwrap())
|
||||||
to_value(
|
|
||||||
&String::from_utf8(
|
|
||||||
decode(s.as_bytes()).unwrap()
|
|
||||||
).unwrap()
|
|
||||||
).unwrap()
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use tera::to_value;
|
use tera::to_value;
|
||||||
|
|
||||||
use super::{markdown, base64_decode, base64_encode};
|
use super::{base64_decode, base64_encode, markdown};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn markdown_filter() {
|
fn markdown_filter() {
|
||||||
|
@ -70,7 +59,10 @@ mod tests {
|
||||||
fn markdown_filter_inline() {
|
fn markdown_filter_inline() {
|
||||||
let mut args = HashMap::new();
|
let mut args = HashMap::new();
|
||||||
args.insert("inline".to_string(), to_value(true).unwrap());
|
args.insert("inline".to_string(), to_value(true).unwrap());
|
||||||
let result = markdown(to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(), args);
|
let result = markdown(
|
||||||
|
to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(),
|
||||||
|
args,
|
||||||
|
);
|
||||||
assert!(result.is_ok());
|
assert!(result.is_ok());
|
||||||
assert_eq!(result.unwrap(), to_value(&"Using <code>map</code>, <code>filter</code>, and <code>fold</code> instead of <code>for</code>").unwrap());
|
assert_eq!(result.unwrap(), to_value(&"Using <code>map</code>, <code>filter</code>, and <code>fold</code> instead of <code>for</code>").unwrap());
|
||||||
}
|
}
|
||||||
|
@ -80,12 +72,18 @@ mod tests {
|
||||||
fn markdown_filter_inline_tables() {
|
fn markdown_filter_inline_tables() {
|
||||||
let mut args = HashMap::new();
|
let mut args = HashMap::new();
|
||||||
args.insert("inline".to_string(), to_value(true).unwrap());
|
args.insert("inline".to_string(), to_value(true).unwrap());
|
||||||
let result = markdown(to_value(&r#"
|
let result = markdown(
|
||||||
|
to_value(
|
||||||
|
&r#"
|
||||||
|id|author_id| timestamp_created|title |content |
|
|id|author_id| timestamp_created|title |content |
|
||||||
|-:|--------:|-----------------------:|:---------------------|:-----------------|
|
|-:|--------:|-----------------------:|:---------------------|:-----------------|
|
||||||
| 1| 1|2018-09-05 08:03:43.141Z|How to train your ORM |Badly written blog|
|
| 1| 1|2018-09-05 08:03:43.141Z|How to train your ORM |Badly written blog|
|
||||||
| 2| 1|2018-08-22 13:11:50.050Z|How to bake a nice pie|Badly written blog|
|
| 2| 1|2018-08-22 13:11:50.050Z|How to bake a nice pie|Badly written blog|
|
||||||
"#).unwrap(), args);
|
"#,
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
args,
|
||||||
|
);
|
||||||
assert!(result.is_ok());
|
assert!(result.is_ok());
|
||||||
assert!(result.unwrap().as_str().unwrap().contains("<table>"));
|
assert!(result.unwrap().as_str().unwrap().contains("<table>"));
|
||||||
}
|
}
|
||||||
|
@ -100,7 +98,7 @@ mod tests {
|
||||||
("foo", "Zm9v"),
|
("foo", "Zm9v"),
|
||||||
("foob", "Zm9vYg=="),
|
("foob", "Zm9vYg=="),
|
||||||
("fooba", "Zm9vYmE="),
|
("fooba", "Zm9vYmE="),
|
||||||
("foobar", "Zm9vYmFy")
|
("foobar", "Zm9vYmFy"),
|
||||||
];
|
];
|
||||||
for (input, expected) in tests {
|
for (input, expected) in tests {
|
||||||
let args = HashMap::new();
|
let args = HashMap::new();
|
||||||
|
@ -110,7 +108,6 @@ mod tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn base64_decode_filter() {
|
fn base64_decode_filter() {
|
||||||
let tests = vec![
|
let tests = vec![
|
||||||
|
@ -120,7 +117,7 @@ mod tests {
|
||||||
("Zm9v", "foo"),
|
("Zm9v", "foo"),
|
||||||
("Zm9vYg==", "foob"),
|
("Zm9vYg==", "foob"),
|
||||||
("Zm9vYmE=", "fooba"),
|
("Zm9vYmE=", "fooba"),
|
||||||
("Zm9vYmFy", "foobar")
|
("Zm9vYmFy", "foobar"),
|
||||||
];
|
];
|
||||||
for (input, expected) in tests {
|
for (input, expected) in tests {
|
||||||
let args = HashMap::new();
|
let args = HashMap::new();
|
||||||
|
|
|
@ -1,28 +1,28 @@
|
||||||
extern crate toml;
|
|
||||||
extern crate serde_json;
|
extern crate serde_json;
|
||||||
|
extern crate toml;
|
||||||
|
|
||||||
use utils::fs::{read_file, is_path_in_directory, get_file_time};
|
use utils::fs::{get_file_time, is_path_in_directory, read_file};
|
||||||
|
|
||||||
use std::hash::{Hasher, Hash};
|
use reqwest::{header, Client};
|
||||||
use std::str::FromStr;
|
|
||||||
use std::fmt;
|
|
||||||
use std::collections::hash_map::DefaultHasher;
|
use std::collections::hash_map::DefaultHasher;
|
||||||
use reqwest::{Client, header};
|
use std::fmt;
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
|
use std::str::FromStr;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
|
|
||||||
use csv::Reader;
|
use csv::Reader;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use tera::{GlobalFn, Value, from_value, to_value, Result, Map, Error};
|
use tera::{from_value, to_value, Error, GlobalFn, Map, Result, Value};
|
||||||
|
|
||||||
static GET_DATA_ARGUMENT_ERROR_MESSAGE: &str = "`load_data`: requires EITHER a `path` or `url` argument";
|
static GET_DATA_ARGUMENT_ERROR_MESSAGE: &str =
|
||||||
|
"`load_data`: requires EITHER a `path` or `url` argument";
|
||||||
|
|
||||||
enum DataSource {
|
enum DataSource {
|
||||||
Url(Url),
|
Url(Url),
|
||||||
Path(PathBuf)
|
Path(PathBuf),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -30,7 +30,7 @@ enum OutputFormat {
|
||||||
Toml,
|
Toml,
|
||||||
Json,
|
Json,
|
||||||
Csv,
|
Csv,
|
||||||
Plain
|
Plain,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for OutputFormat {
|
impl fmt::Display for OutputFormat {
|
||||||
|
@ -54,7 +54,7 @@ impl FromStr for OutputFormat {
|
||||||
"csv" => Ok(OutputFormat::Csv),
|
"csv" => Ok(OutputFormat::Csv),
|
||||||
"json" => Ok(OutputFormat::Json),
|
"json" => Ok(OutputFormat::Json),
|
||||||
"plain" => Ok(OutputFormat::Plain),
|
"plain" => Ok(OutputFormat::Plain),
|
||||||
format => Err(format!("Unknown output format {}", format).into())
|
format => Err(format!("Unknown output format {}", format).into()),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -71,7 +71,11 @@ impl OutputFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DataSource {
|
impl DataSource {
|
||||||
fn from_args(path_arg: Option<String>, url_arg: Option<String>, content_path: &PathBuf) -> Result<Self> {
|
fn from_args(
|
||||||
|
path_arg: Option<String>,
|
||||||
|
url_arg: Option<String>,
|
||||||
|
content_path: &PathBuf,
|
||||||
|
) -> Result<Self> {
|
||||||
if path_arg.is_some() && url_arg.is_some() {
|
if path_arg.is_some() && url_arg.is_some() {
|
||||||
return Err(GET_DATA_ARGUMENT_ERROR_MESSAGE.into());
|
return Err(GET_DATA_ARGUMENT_ERROR_MESSAGE.into());
|
||||||
}
|
}
|
||||||
|
@ -85,7 +89,9 @@ impl DataSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(url) = url_arg {
|
if let Some(url) = url_arg {
|
||||||
return Url::parse(&url).map(|parsed_url| DataSource::Url(parsed_url)).map_err(|e| format!("Failed to parse {} as url: {}", url, e).into());
|
return Url::parse(&url)
|
||||||
|
.map(|parsed_url| DataSource::Url(parsed_url))
|
||||||
|
.map_err(|e| format!("Failed to parse {} as url: {}", url, e).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
return Err(GET_DATA_ARGUMENT_ERROR_MESSAGE.into());
|
return Err(GET_DATA_ARGUMENT_ERROR_MESSAGE.into());
|
||||||
|
@ -111,32 +117,37 @@ impl Hash for DataSource {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_data_source_from_args(
|
||||||
|
content_path: &PathBuf,
|
||||||
|
args: &HashMap<String, Value>,
|
||||||
|
) -> Result<DataSource> {
|
||||||
|
let path_arg = optional_arg!(String, args.get("path"), GET_DATA_ARGUMENT_ERROR_MESSAGE);
|
||||||
|
|
||||||
fn get_data_source_from_args(content_path: &PathBuf, args: &HashMap<String, Value>) -> Result<DataSource> {
|
let url_arg = optional_arg!(String, args.get("url"), GET_DATA_ARGUMENT_ERROR_MESSAGE);
|
||||||
let path_arg = optional_arg!(
|
|
||||||
String,
|
|
||||||
args.get("path"),
|
|
||||||
GET_DATA_ARGUMENT_ERROR_MESSAGE
|
|
||||||
);
|
|
||||||
|
|
||||||
let url_arg = optional_arg!(
|
|
||||||
String,
|
|
||||||
args.get("url"),
|
|
||||||
GET_DATA_ARGUMENT_ERROR_MESSAGE
|
|
||||||
);
|
|
||||||
|
|
||||||
return DataSource::from_args(path_arg, url_arg, content_path);
|
return DataSource::from_args(path_arg, url_arg, content_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_data_file(base_path: &PathBuf, full_path: PathBuf) -> Result<String> {
|
fn read_data_file(base_path: &PathBuf, full_path: PathBuf) -> Result<String> {
|
||||||
if !is_path_in_directory(&base_path, &full_path).map_err(|e| format!("Failed to read data file {}: {}", full_path.display(), e))? {
|
if !is_path_in_directory(&base_path, &full_path)
|
||||||
return Err(format!("{} is not inside the base site directory {}", full_path.display(), base_path.display()).into());
|
.map_err(|e| format!("Failed to read data file {}: {}", full_path.display(), e))?
|
||||||
|
{
|
||||||
|
return Err(format!(
|
||||||
|
"{} is not inside the base site directory {}",
|
||||||
|
full_path.display(),
|
||||||
|
base_path.display()
|
||||||
|
)
|
||||||
|
.into());
|
||||||
}
|
}
|
||||||
return read_file(&full_path)
|
return read_file(&full_path).map_err(|e| {
|
||||||
.map_err(|e| format!("`load_data`: error {} loading file {}", full_path.to_str().unwrap(), e).into());
|
format!("`load_data`: error {} loading file {}", full_path.to_str().unwrap(), e).into()
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_output_format_from_args(args: &HashMap<String, Value>, data_source: &DataSource) -> Result<OutputFormat> {
|
fn get_output_format_from_args(
|
||||||
|
args: &HashMap<String, Value>,
|
||||||
|
data_source: &DataSource,
|
||||||
|
) -> Result<OutputFormat> {
|
||||||
let format_arg = optional_arg!(
|
let format_arg = optional_arg!(
|
||||||
String,
|
String,
|
||||||
args.get("format"),
|
args.get("format"),
|
||||||
|
@ -148,7 +159,10 @@ fn get_output_format_from_args(args: &HashMap<String, Value>, data_source: &Data
|
||||||
}
|
}
|
||||||
|
|
||||||
let from_extension = if let DataSource::Path(path) = data_source {
|
let from_extension = if let DataSource::Path(path) = data_source {
|
||||||
let extension_result: Result<&str> = path.extension().map(|extension| extension.to_str().unwrap()).ok_or(format!("Could not determine format for {} from extension", path.display()).into());
|
let extension_result: Result<&str> =
|
||||||
|
path.extension().map(|extension| extension.to_str().unwrap()).ok_or(
|
||||||
|
format!("Could not determine format for {} from extension", path.display()).into(),
|
||||||
|
);
|
||||||
extension_result?
|
extension_result?
|
||||||
} else {
|
} else {
|
||||||
"plain"
|
"plain"
|
||||||
|
@ -156,7 +170,6 @@ fn get_output_format_from_args(args: &HashMap<String, Value>, data_source: &Data
|
||||||
return OutputFormat::from_str(from_extension);
|
return OutputFormat::from_str(from_extension);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// A global function to load data from a file or from a URL
|
/// A global function to load data from a file or from a URL
|
||||||
/// Currently the supported formats are json, toml, csv and plain text
|
/// Currently the supported formats are json, toml, csv and plain text
|
||||||
pub fn make_load_data(content_path: PathBuf, base_path: PathBuf) -> GlobalFn {
|
pub fn make_load_data(content_path: PathBuf, base_path: PathBuf) -> GlobalFn {
|
||||||
|
@ -180,9 +193,22 @@ pub fn make_load_data(content_path: PathBuf, base_path: PathBuf) -> GlobalFn {
|
||||||
let data = match data_source {
|
let data = match data_source {
|
||||||
DataSource::Path(path) => read_data_file(&base_path, path),
|
DataSource::Path(path) => read_data_file(&base_path, path),
|
||||||
DataSource::Url(url) => {
|
DataSource::Url(url) => {
|
||||||
let mut response = response_client.get(url.as_str()).header(header::ACCEPT, file_format.as_accept_header()).send().and_then(|res| res.error_for_status()).map_err(|e| format!("Failed to request {}: {}", url, e.status().expect("response status")))?;
|
let mut response = response_client
|
||||||
response.text().map_err(|e| format!("Failed to parse response from {}: {:?}", url, e).into())
|
.get(url.as_str())
|
||||||
},
|
.header(header::ACCEPT, file_format.as_accept_header())
|
||||||
|
.send()
|
||||||
|
.and_then(|res| res.error_for_status())
|
||||||
|
.map_err(|e| {
|
||||||
|
format!(
|
||||||
|
"Failed to request {}: {}",
|
||||||
|
url,
|
||||||
|
e.status().expect("response status")
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
response
|
||||||
|
.text()
|
||||||
|
.map_err(|e| format!("Failed to parse response from {}: {:?}", url, e).into())
|
||||||
|
}
|
||||||
}?;
|
}?;
|
||||||
|
|
||||||
let result_value: Result<Value> = match file_format {
|
let result_value: Result<Value> = match file_format {
|
||||||
|
@ -202,7 +228,8 @@ pub fn make_load_data(content_path: PathBuf, base_path: PathBuf) -> GlobalFn {
|
||||||
|
|
||||||
/// Parse a JSON string and convert it to a Tera Value
|
/// Parse a JSON string and convert it to a Tera Value
|
||||||
fn load_json(json_data: String) -> Result<Value> {
|
fn load_json(json_data: String) -> Result<Value> {
|
||||||
let json_content: Value = serde_json::from_str(json_data.as_str()).map_err(|e| format!("{:?}", e))?;
|
let json_content: Value =
|
||||||
|
serde_json::from_str(json_data.as_str()).map_err(|e| format!("{:?}", e))?;
|
||||||
return Ok(json_content);
|
return Ok(json_content);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -235,12 +262,11 @@ fn load_csv(csv_data: String) -> Result<Value> {
|
||||||
let mut csv_map = Map::new();
|
let mut csv_map = Map::new();
|
||||||
|
|
||||||
{
|
{
|
||||||
let hdrs = reader.headers()
|
let hdrs = reader.headers().map_err(|e| {
|
||||||
.map_err(|e| format!("'load_data': {} - unable to read CSV header line (line 1) for CSV file", e))?;
|
format!("'load_data': {} - unable to read CSV header line (line 1) for CSV file", e)
|
||||||
|
})?;
|
||||||
|
|
||||||
let headers_array = hdrs.iter()
|
let headers_array = hdrs.iter().map(|v| Value::String(v.to_string())).collect();
|
||||||
.map(|v| Value::String(v.to_string()))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
csv_map.insert(String::from("headers"), Value::Array(headers_array));
|
csv_map.insert(String::from("headers"), Value::Array(headers_array));
|
||||||
}
|
}
|
||||||
|
@ -268,7 +294,6 @@ fn load_csv(csv_data: String) -> Result<Value> {
|
||||||
to_value(csv_value).map_err(|err| err.into())
|
to_value(csv_value).map_err(|err| err.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::{make_load_data, DataSource, OutputFormat};
|
use super::{make_load_data, DataSource, OutputFormat};
|
||||||
|
@ -285,7 +310,8 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn fails_when_missing_file() {
|
fn fails_when_missing_file() {
|
||||||
let static_fn = make_load_data(PathBuf::from("../utils/test-files"), PathBuf::from("../utils"));
|
let static_fn =
|
||||||
|
make_load_data(PathBuf::from("../utils/test-files"), PathBuf::from("../utils"));
|
||||||
let mut args = HashMap::new();
|
let mut args = HashMap::new();
|
||||||
args.insert("path".to_string(), to_value("../../../READMEE.md").unwrap());
|
args.insert("path".to_string(), to_value("../../../READMEE.md").unwrap());
|
||||||
let result = static_fn(args);
|
let result = static_fn(args);
|
||||||
|
@ -295,40 +321,54 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn cant_load_outside_content_dir() {
|
fn cant_load_outside_content_dir() {
|
||||||
let static_fn = make_load_data(PathBuf::from("../utils/test-files"), PathBuf::from("../utils"));
|
let static_fn =
|
||||||
|
make_load_data(PathBuf::from("../utils/test-files"), PathBuf::from("../utils"));
|
||||||
let mut args = HashMap::new();
|
let mut args = HashMap::new();
|
||||||
args.insert("path".to_string(), to_value("../../../README.md").unwrap());
|
args.insert("path".to_string(), to_value("../../../README.md").unwrap());
|
||||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||||
let result = static_fn(args);
|
let result = static_fn(args);
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert!(result.unwrap_err().description().contains("README.md is not inside the base site directory"));
|
assert!(
|
||||||
|
result
|
||||||
|
.unwrap_err()
|
||||||
|
.description()
|
||||||
|
.contains("README.md is not inside the base site directory")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn calculates_cache_key_for_path() {
|
fn calculates_cache_key_for_path() {
|
||||||
// We can't test against a fixed value, due to the fact the cache key is built from the absolute path
|
// We can't test against a fixed value, due to the fact the cache key is built from the absolute path
|
||||||
let cache_key = DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
let cache_key =
|
||||||
let cache_key_2 = DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
||||||
|
let cache_key_2 =
|
||||||
|
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
||||||
assert_eq!(cache_key, cache_key_2);
|
assert_eq!(cache_key, cache_key_2);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn calculates_cache_key_for_url() {
|
fn calculates_cache_key_for_url() {
|
||||||
let cache_key = DataSource::Url("https://api.github.com/repos/getzola/zola".parse().unwrap()).get_cache_key(&OutputFormat::Plain);
|
let cache_key =
|
||||||
|
DataSource::Url("https://api.github.com/repos/getzola/zola".parse().unwrap())
|
||||||
|
.get_cache_key(&OutputFormat::Plain);
|
||||||
assert_eq!(cache_key, 8916756616423791754);
|
assert_eq!(cache_key, 8916756616423791754);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn different_cache_key_per_filename() {
|
fn different_cache_key_per_filename() {
|
||||||
let toml_cache_key = DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
let toml_cache_key =
|
||||||
let json_cache_key = DataSource::Path(get_test_file("test.json")).get_cache_key(&OutputFormat::Toml);
|
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
||||||
|
let json_cache_key =
|
||||||
|
DataSource::Path(get_test_file("test.json")).get_cache_key(&OutputFormat::Toml);
|
||||||
assert_ne!(toml_cache_key, json_cache_key);
|
assert_ne!(toml_cache_key, json_cache_key);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn different_cache_key_per_format() {
|
fn different_cache_key_per_format() {
|
||||||
let toml_cache_key = DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
let toml_cache_key =
|
||||||
let json_cache_key = DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Json);
|
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
||||||
|
let json_cache_key =
|
||||||
|
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Json);
|
||||||
assert_ne!(toml_cache_key, json_cache_key);
|
assert_ne!(toml_cache_key, json_cache_key);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -339,7 +379,10 @@ mod tests {
|
||||||
args.insert("url".to_string(), to_value("https://httpbin.org/json").unwrap());
|
args.insert("url".to_string(), to_value("https://httpbin.org/json").unwrap());
|
||||||
args.insert("format".to_string(), to_value("json").unwrap());
|
args.insert("format".to_string(), to_value("json").unwrap());
|
||||||
let result = static_fn(args).unwrap();
|
let result = static_fn(args).unwrap();
|
||||||
assert_eq!(result.get("slideshow").unwrap().get("title").unwrap(), &to_value("Sample Slide Show").unwrap());
|
assert_eq!(
|
||||||
|
result.get("slideshow").unwrap().get("title").unwrap(),
|
||||||
|
&to_value("Sample Slide Show").unwrap()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -350,60 +393,78 @@ mod tests {
|
||||||
args.insert("format".to_string(), to_value("json").unwrap());
|
args.insert("format".to_string(), to_value("json").unwrap());
|
||||||
let result = static_fn(args);
|
let result = static_fn(args);
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(result.unwrap_err().description(), "Failed to request https://httpbin.org/status/404/: 404 Not Found");
|
assert_eq!(
|
||||||
|
result.unwrap_err().description(),
|
||||||
|
"Failed to request https://httpbin.org/status/404/: 404 Not Found"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_load_toml()
|
fn can_load_toml() {
|
||||||
{
|
let static_fn = make_load_data(
|
||||||
let static_fn = make_load_data(PathBuf::from("../utils/test-files"), PathBuf::from("../utils/test-files"));
|
PathBuf::from("../utils/test-files"),
|
||||||
|
PathBuf::from("../utils/test-files"),
|
||||||
|
);
|
||||||
let mut args = HashMap::new();
|
let mut args = HashMap::new();
|
||||||
args.insert("path".to_string(), to_value("test.toml").unwrap());
|
args.insert("path".to_string(), to_value("test.toml").unwrap());
|
||||||
let result = static_fn(args.clone()).unwrap();
|
let result = static_fn(args.clone()).unwrap();
|
||||||
|
|
||||||
//TOML does not load in order, and also dates are not returned as strings, but
|
//TOML does not load in order, and also dates are not returned as strings, but
|
||||||
//rather as another object with a key and value
|
//rather as another object with a key and value
|
||||||
assert_eq!(result, json!({
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
json!({
|
||||||
"category": {
|
"category": {
|
||||||
"date": {
|
"date": {
|
||||||
"$__toml_private_datetime": "1979-05-27T07:32:00Z"
|
"$__toml_private_datetime": "1979-05-27T07:32:00Z"
|
||||||
},
|
},
|
||||||
"key": "value"
|
"key": "value"
|
||||||
},
|
},
|
||||||
}));
|
})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_load_csv()
|
fn can_load_csv() {
|
||||||
{
|
let static_fn = make_load_data(
|
||||||
let static_fn = make_load_data(PathBuf::from("../utils/test-files"), PathBuf::from("../utils/test-files"));
|
PathBuf::from("../utils/test-files"),
|
||||||
|
PathBuf::from("../utils/test-files"),
|
||||||
|
);
|
||||||
let mut args = HashMap::new();
|
let mut args = HashMap::new();
|
||||||
args.insert("path".to_string(), to_value("test.csv").unwrap());
|
args.insert("path".to_string(), to_value("test.csv").unwrap());
|
||||||
let result = static_fn(args.clone()).unwrap();
|
let result = static_fn(args.clone()).unwrap();
|
||||||
|
|
||||||
assert_eq!(result, json!({
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
json!({
|
||||||
"headers": ["Number", "Title"],
|
"headers": ["Number", "Title"],
|
||||||
"records": [
|
"records": [
|
||||||
["1", "Gutenberg"],
|
["1", "Gutenberg"],
|
||||||
["2", "Printing"]
|
["2", "Printing"]
|
||||||
],
|
],
|
||||||
}))
|
})
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_load_json()
|
fn can_load_json() {
|
||||||
{
|
let static_fn = make_load_data(
|
||||||
let static_fn = make_load_data(PathBuf::from("../utils/test-files"), PathBuf::from("../utils/test-files"));
|
PathBuf::from("../utils/test-files"),
|
||||||
|
PathBuf::from("../utils/test-files"),
|
||||||
|
);
|
||||||
let mut args = HashMap::new();
|
let mut args = HashMap::new();
|
||||||
args.insert("path".to_string(), to_value("test.json").unwrap());
|
args.insert("path".to_string(), to_value("test.json").unwrap());
|
||||||
let result = static_fn(args.clone()).unwrap();
|
let result = static_fn(args.clone()).unwrap();
|
||||||
|
|
||||||
assert_eq!(result, json!({
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
json!({
|
||||||
"key": "value",
|
"key": "value",
|
||||||
"array": [1, 2, 3],
|
"array": [1, 2, 3],
|
||||||
"subpackage": {
|
"subpackage": {
|
||||||
"subkey": 5
|
"subkey": 5
|
||||||
}
|
}
|
||||||
}))
|
})
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,9 +4,9 @@ macro_rules! required_arg {
|
||||||
match $e {
|
match $e {
|
||||||
Some(v) => match from_value::<$ty>(v.clone()) {
|
Some(v) => match from_value::<$ty>(v.clone()) {
|
||||||
Ok(u) => u,
|
Ok(u) => u,
|
||||||
Err(_) => return Err($err.into())
|
Err(_) => return Err($err.into()),
|
||||||
},
|
},
|
||||||
None => return Err($err.into())
|
None => return Err($err.into()),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -17,9 +17,9 @@ macro_rules! optional_arg {
|
||||||
match $e {
|
match $e {
|
||||||
Some(v) => match from_value::<$ty>(v.clone()) {
|
Some(v) => match from_value::<$ty>(v.clone()) {
|
||||||
Ok(u) => Some(u),
|
Ok(u) => Some(u),
|
||||||
Err(_) => return Err($err.into())
|
Err(_) => return Err($err.into()),
|
||||||
},
|
},
|
||||||
None => None
|
None => None,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,10 +3,10 @@ extern crate error_chain;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
use tera::{GlobalFn, Value, from_value, to_value, Result};
|
use tera::{from_value, to_value, GlobalFn, Result, Value};
|
||||||
|
|
||||||
use library::{Taxonomy, Library};
|
|
||||||
use config::Config;
|
use config::Config;
|
||||||
|
use library::{Library, Taxonomy};
|
||||||
use utils::site::resolve_internal_link;
|
use utils::site::resolve_internal_link;
|
||||||
|
|
||||||
use imageproc;
|
use imageproc;
|
||||||
|
@ -18,24 +18,19 @@ mod load_data;
|
||||||
|
|
||||||
pub use self::load_data::make_load_data;
|
pub use self::load_data::make_load_data;
|
||||||
|
|
||||||
|
|
||||||
pub fn make_trans(config: Config) -> GlobalFn {
|
pub fn make_trans(config: Config) -> GlobalFn {
|
||||||
let translations_config = config.translations;
|
let translations_config = config.translations;
|
||||||
let default_lang = config.default_language.clone();
|
let default_lang = config.default_language.clone();
|
||||||
|
|
||||||
Box::new(move |args| -> Result<Value> {
|
Box::new(move |args| -> Result<Value> {
|
||||||
let key = required_arg!(String, args.get("key"), "`trans` requires a `key` argument.");
|
let key = required_arg!(String, args.get("key"), "`trans` requires a `key` argument.");
|
||||||
let lang = optional_arg!(
|
let lang = optional_arg!(String, args.get("lang"), "`trans`: `lang` must be a string.")
|
||||||
String,
|
.unwrap_or_else(|| default_lang.clone());
|
||||||
args.get("lang"),
|
|
||||||
"`trans`: `lang` must be a string."
|
|
||||||
).unwrap_or_else(|| default_lang.clone());
|
|
||||||
let translations = &translations_config[lang.as_str()];
|
let translations = &translations_config[lang.as_str()];
|
||||||
Ok(to_value(&translations[key.as_str()]).unwrap())
|
Ok(to_value(&translations[key.as_str()]).unwrap())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub fn make_get_page(library: &Library) -> GlobalFn {
|
pub fn make_get_page(library: &Library) -> GlobalFn {
|
||||||
let mut pages = HashMap::new();
|
let mut pages = HashMap::new();
|
||||||
for page in library.pages_values() {
|
for page in library.pages_values() {
|
||||||
|
@ -53,7 +48,7 @@ pub fn make_get_page(library: &Library) -> GlobalFn {
|
||||||
);
|
);
|
||||||
match pages.get(&path) {
|
match pages.get(&path) {
|
||||||
Some(p) => Ok(p.clone()),
|
Some(p) => Ok(p.clone()),
|
||||||
None => Err(format!("Page `{}` not found.", path).into())
|
None => Err(format!("Page `{}` not found.", path).into()),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -64,12 +59,14 @@ pub fn make_get_section(library: &Library) -> GlobalFn {
|
||||||
for section in library.sections_values() {
|
for section in library.sections_values() {
|
||||||
sections.insert(
|
sections.insert(
|
||||||
section.file.relative.clone(),
|
section.file.relative.clone(),
|
||||||
to_value(library.get_section(§ion.file.path).unwrap().to_serialized(library)).unwrap(),
|
to_value(library.get_section(§ion.file.path).unwrap().to_serialized(library))
|
||||||
|
.unwrap(),
|
||||||
);
|
);
|
||||||
|
|
||||||
sections_basic.insert(
|
sections_basic.insert(
|
||||||
section.file.relative.clone(),
|
section.file.relative.clone(),
|
||||||
to_value(library.get_section(§ion.file.path).unwrap().to_serialized_basic(library)).unwrap(),
|
to_value(library.get_section(§ion.file.path).unwrap().to_serialized_basic(library))
|
||||||
|
.unwrap(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,36 +79,25 @@ pub fn make_get_section(library: &Library) -> GlobalFn {
|
||||||
|
|
||||||
let metadata_only = args
|
let metadata_only = args
|
||||||
.get("metadata_only")
|
.get("metadata_only")
|
||||||
.map_or(false, |c| {
|
.map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));
|
||||||
from_value::<bool>(c.clone()).unwrap_or(false)
|
|
||||||
});
|
|
||||||
|
|
||||||
let container = if metadata_only {
|
let container = if metadata_only { §ions_basic } else { §ions };
|
||||||
§ions_basic
|
|
||||||
} else {
|
|
||||||
§ions
|
|
||||||
};
|
|
||||||
|
|
||||||
match container.get(&path) {
|
match container.get(&path) {
|
||||||
Some(p) => Ok(p.clone()),
|
Some(p) => Ok(p.clone()),
|
||||||
None => Err(format!("Section `{}` not found.", path).into())
|
None => Err(format!("Section `{}` not found.", path).into()),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> GlobalFn {
|
pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> GlobalFn {
|
||||||
Box::new(move |args| -> Result<Value> {
|
Box::new(move |args| -> Result<Value> {
|
||||||
let cachebust = args
|
let cachebust =
|
||||||
.get("cachebust")
|
args.get("cachebust").map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));
|
||||||
.map_or(false, |c| {
|
|
||||||
from_value::<bool>(c.clone()).unwrap_or(false)
|
|
||||||
});
|
|
||||||
|
|
||||||
let trailing_slash = args
|
let trailing_slash = args
|
||||||
.get("trailing_slash")
|
.get("trailing_slash")
|
||||||
.map_or(false, |c| {
|
.map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));
|
||||||
from_value::<bool>(c.clone()).unwrap_or(false)
|
|
||||||
});
|
|
||||||
|
|
||||||
let path = required_arg!(
|
let path = required_arg!(
|
||||||
String,
|
String,
|
||||||
|
@ -121,7 +107,9 @@ pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> Glob
|
||||||
if path.starts_with("./") {
|
if path.starts_with("./") {
|
||||||
match resolve_internal_link(&path, &permalinks) {
|
match resolve_internal_link(&path, &permalinks) {
|
||||||
Ok(url) => Ok(to_value(url).unwrap()),
|
Ok(url) => Ok(to_value(url).unwrap()),
|
||||||
Err(_) => Err(format!("Could not resolve URL for link `{}` not found.", path).into())
|
Err(_) => {
|
||||||
|
Err(format!("Could not resolve URL for link `{}` not found.", path).into())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// anything else
|
// anything else
|
||||||
|
@ -141,10 +129,8 @@ pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> Glob
|
||||||
pub fn make_get_taxonomy(all_taxonomies: &[Taxonomy], library: &Library) -> GlobalFn {
|
pub fn make_get_taxonomy(all_taxonomies: &[Taxonomy], library: &Library) -> GlobalFn {
|
||||||
let mut taxonomies = HashMap::new();
|
let mut taxonomies = HashMap::new();
|
||||||
for taxonomy in all_taxonomies {
|
for taxonomy in all_taxonomies {
|
||||||
taxonomies.insert(
|
taxonomies
|
||||||
taxonomy.kind.name.clone(),
|
.insert(taxonomy.kind.name.clone(), to_value(taxonomy.to_serialized(library)).unwrap());
|
||||||
to_value(taxonomy.to_serialized(library)).unwrap()
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Box::new(move |args| -> Result<Value> {
|
Box::new(move |args| -> Result<Value> {
|
||||||
|
@ -155,9 +141,11 @@ pub fn make_get_taxonomy(all_taxonomies: &[Taxonomy], library: &Library) -> Glob
|
||||||
);
|
);
|
||||||
let container = match taxonomies.get(&kind) {
|
let container = match taxonomies.get(&kind) {
|
||||||
Some(c) => c,
|
Some(c) => c,
|
||||||
None => return Err(
|
None => {
|
||||||
format!("`get_taxonomy` received an unknown taxonomy as kind: {}", kind).into()
|
return Err(
|
||||||
),
|
format!("`get_taxonomy` received an unknown taxonomy as kind: {}", kind).into()
|
||||||
|
)
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(to_value(container).unwrap())
|
Ok(to_value(container).unwrap())
|
||||||
|
@ -187,18 +175,20 @@ pub fn make_get_taxonomy_url(all_taxonomies: &[Taxonomy]) -> GlobalFn {
|
||||||
);
|
);
|
||||||
let container = match taxonomies.get(&kind) {
|
let container = match taxonomies.get(&kind) {
|
||||||
Some(c) => c,
|
Some(c) => c,
|
||||||
None => return Err(
|
None => {
|
||||||
format!("`get_taxonomy_url` received an unknown taxonomy as kind: {}", kind).into()
|
return Err(format!(
|
||||||
)
|
"`get_taxonomy_url` received an unknown taxonomy as kind: {}",
|
||||||
|
kind
|
||||||
|
)
|
||||||
|
.into())
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(ref permalink) = container.get(&name) {
|
if let Some(ref permalink) = container.get(&name) {
|
||||||
return Ok(to_value(permalink.clone()).unwrap());
|
return Ok(to_value(permalink.clone()).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(
|
Err(format!("`get_taxonomy_url`: couldn't find `{}` in `{}` taxonomy", name, kind).into())
|
||||||
format!("`get_taxonomy_url`: couldn't find `{}` in `{}` taxonomy", name, kind).into()
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -222,16 +212,11 @@ pub fn make_resize_image(imageproc: Arc<Mutex<imageproc::Processor>>) -> GlobalF
|
||||||
args.get("height"),
|
args.get("height"),
|
||||||
"`resize_image`: `height` must be a non-negative integer"
|
"`resize_image`: `height` must be a non-negative integer"
|
||||||
);
|
);
|
||||||
let op = optional_arg!(
|
let op = optional_arg!(String, args.get("op"), "`resize_image`: `op` must be a string")
|
||||||
String,
|
.unwrap_or_else(|| DEFAULT_OP.to_string());
|
||||||
args.get("op"),
|
let quality =
|
||||||
"`resize_image`: `op` must be a string"
|
optional_arg!(u8, args.get("quality"), "`resize_image`: `quality` must be a number")
|
||||||
).unwrap_or_else(|| DEFAULT_OP.to_string());
|
.unwrap_or(DEFAULT_Q);
|
||||||
let quality = optional_arg!(
|
|
||||||
u8,
|
|
||||||
args.get("quality"),
|
|
||||||
"`resize_image`: `quality` must be a number"
|
|
||||||
).unwrap_or(DEFAULT_Q);
|
|
||||||
if quality == 0 || quality > 100 {
|
if quality == 0 || quality > 100 {
|
||||||
return Err("`resize_image`: `quality` must be in range 1-100".to_string().into());
|
return Err("`resize_image`: `quality` must be in range 1-100".to_string().into());
|
||||||
}
|
}
|
||||||
|
@ -249,19 +234,16 @@ pub fn make_resize_image(imageproc: Arc<Mutex<imageproc::Processor>>) -> GlobalF
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::{make_get_url, make_get_taxonomy, make_get_taxonomy_url, make_trans};
|
use super::{make_get_taxonomy, make_get_taxonomy_url, make_get_url, make_trans};
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use tera::{to_value, Value};
|
use tera::{to_value, Value};
|
||||||
|
|
||||||
use config::{Config, Taxonomy as TaxonomyConfig};
|
use config::{Config, Taxonomy as TaxonomyConfig};
|
||||||
use library::{Taxonomy, TaxonomyItem, Library};
|
use library::{Library, Taxonomy, TaxonomyItem};
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_add_cachebust_to_url() {
|
fn can_add_cachebust_to_url() {
|
||||||
|
@ -307,17 +289,8 @@ mod tests {
|
||||||
fn can_get_taxonomy() {
|
fn can_get_taxonomy() {
|
||||||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() };
|
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() };
|
||||||
let library = Library::new(0, 0);
|
let library = Library::new(0, 0);
|
||||||
let tag = TaxonomyItem::new(
|
let tag = TaxonomyItem::new("Programming", "tags", &Config::default(), vec![], &library);
|
||||||
"Programming",
|
let tags = Taxonomy { kind: taxo_config, items: vec![tag] };
|
||||||
"tags",
|
|
||||||
&Config::default(),
|
|
||||||
vec![],
|
|
||||||
&library
|
|
||||||
);
|
|
||||||
let tags = Taxonomy {
|
|
||||||
kind: taxo_config,
|
|
||||||
items: vec![tag],
|
|
||||||
};
|
|
||||||
|
|
||||||
let taxonomies = vec![tags.clone()];
|
let taxonomies = vec![tags.clone()];
|
||||||
let static_fn = make_get_taxonomy(&taxonomies, &library);
|
let static_fn = make_get_taxonomy(&taxonomies, &library);
|
||||||
|
@ -337,7 +310,8 @@ mod tests {
|
||||||
Value::String("programming".to_string())
|
Value::String("programming".to_string())
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["permalink"],
|
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()
|
||||||
|
["permalink"],
|
||||||
Value::String("http://a-website.com/tags/programming/".to_string())
|
Value::String("http://a-website.com/tags/programming/".to_string())
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -354,17 +328,8 @@ mod tests {
|
||||||
fn can_get_taxonomy_url() {
|
fn can_get_taxonomy_url() {
|
||||||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() };
|
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() };
|
||||||
let library = Library::new(0, 0);
|
let library = Library::new(0, 0);
|
||||||
let tag = TaxonomyItem::new(
|
let tag = TaxonomyItem::new("Programming", "tags", &Config::default(), vec![], &library);
|
||||||
"Programming",
|
let tags = Taxonomy { kind: taxo_config, items: vec![tag] };
|
||||||
"tags",
|
|
||||||
&Config::default(),
|
|
||||||
vec![],
|
|
||||||
&library
|
|
||||||
);
|
|
||||||
let tags = Taxonomy {
|
|
||||||
kind: taxo_config,
|
|
||||||
items: vec![tag],
|
|
||||||
};
|
|
||||||
|
|
||||||
let taxonomies = vec![tags.clone()];
|
let taxonomies = vec![tags.clone()];
|
||||||
let static_fn = make_get_taxonomy_url(&taxonomies);
|
let static_fn = make_get_taxonomy_url(&taxonomies);
|
||||||
|
@ -372,7 +337,10 @@ mod tests {
|
||||||
let mut args = HashMap::new();
|
let mut args = HashMap::new();
|
||||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||||
args.insert("name".to_string(), to_value("Programming").unwrap());
|
args.insert("name".to_string(), to_value("Programming").unwrap());
|
||||||
assert_eq!(static_fn(args).unwrap(), to_value("http://a-website.com/tags/programming/").unwrap());
|
assert_eq!(
|
||||||
|
static_fn(args).unwrap(),
|
||||||
|
to_value("http://a-website.com/tags/programming/").unwrap()
|
||||||
|
);
|
||||||
// and errors if it can't find it
|
// and errors if it can't find it
|
||||||
let mut args = HashMap::new();
|
let mut args = HashMap::new();
|
||||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||||
|
|
|
@ -3,28 +3,27 @@ extern crate lazy_static;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate tera;
|
extern crate tera;
|
||||||
extern crate base64;
|
extern crate base64;
|
||||||
extern crate pulldown_cmark;
|
|
||||||
extern crate csv;
|
extern crate csv;
|
||||||
|
extern crate pulldown_cmark;
|
||||||
extern crate reqwest;
|
extern crate reqwest;
|
||||||
extern crate url;
|
extern crate url;
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_json;
|
extern crate serde_json;
|
||||||
#[cfg(not(test))]
|
#[cfg(not(test))]
|
||||||
extern crate serde_json;
|
extern crate serde_json;
|
||||||
|
|
||||||
extern crate errors;
|
|
||||||
extern crate utils;
|
|
||||||
extern crate library;
|
|
||||||
extern crate config;
|
extern crate config;
|
||||||
|
extern crate errors;
|
||||||
extern crate imageproc;
|
extern crate imageproc;
|
||||||
|
extern crate library;
|
||||||
|
extern crate utils;
|
||||||
|
|
||||||
pub mod filters;
|
pub mod filters;
|
||||||
pub mod global_fns;
|
pub mod global_fns;
|
||||||
|
|
||||||
use tera::{Tera, Context};
|
use tera::{Context, Tera};
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
|
|
||||||
|
@ -37,14 +36,13 @@ lazy_static! {
|
||||||
("sitemap.xml", include_str!("builtins/sitemap.xml")),
|
("sitemap.xml", include_str!("builtins/sitemap.xml")),
|
||||||
("robots.txt", include_str!("builtins/robots.txt")),
|
("robots.txt", include_str!("builtins/robots.txt")),
|
||||||
("anchor-link.html", include_str!("builtins/anchor-link.html")),
|
("anchor-link.html", include_str!("builtins/anchor-link.html")),
|
||||||
|
|
||||||
("shortcodes/youtube.html", include_str!("builtins/shortcodes/youtube.html")),
|
("shortcodes/youtube.html", include_str!("builtins/shortcodes/youtube.html")),
|
||||||
("shortcodes/vimeo.html", include_str!("builtins/shortcodes/vimeo.html")),
|
("shortcodes/vimeo.html", include_str!("builtins/shortcodes/vimeo.html")),
|
||||||
("shortcodes/gist.html", include_str!("builtins/shortcodes/gist.html")),
|
("shortcodes/gist.html", include_str!("builtins/shortcodes/gist.html")),
|
||||||
("shortcodes/streamable.html", include_str!("builtins/shortcodes/streamable.html")),
|
("shortcodes/streamable.html", include_str!("builtins/shortcodes/streamable.html")),
|
||||||
|
|
||||||
("internal/alias.html", include_str!("builtins/internal/alias.html")),
|
("internal/alias.html", include_str!("builtins/internal/alias.html")),
|
||||||
]).unwrap();
|
])
|
||||||
|
.unwrap();
|
||||||
tera.register_filter("markdown", filters::markdown);
|
tera.register_filter("markdown", filters::markdown);
|
||||||
tera.register_filter("base64_encode", filters::base64_encode);
|
tera.register_filter("base64_encode", filters::base64_encode);
|
||||||
tera.register_filter("base64_decode", filters::base64_decode);
|
tera.register_filter("base64_decode", filters::base64_decode);
|
||||||
|
@ -52,7 +50,6 @@ lazy_static! {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Renders the `internal/alias.html` template that will redirect
|
/// Renders the `internal/alias.html` template that will redirect
|
||||||
/// via refresh to the url given
|
/// via refresh to the url given
|
||||||
pub fn render_redirect_template(url: &str, tera: &Tera) -> Result<String> {
|
pub fn render_redirect_template(url: &str, tera: &Tera) -> Result<String> {
|
||||||
|
|
|
@ -1,20 +1,22 @@
|
||||||
|
use std::fs::{copy, create_dir_all, read_dir, File};
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
use std::fs::{File, create_dir_all, read_dir, copy};
|
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::time::SystemTime;
|
use std::time::SystemTime;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
|
|
||||||
|
|
||||||
pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result<bool> {
|
pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result<bool> {
|
||||||
let canonical_path = path.canonicalize().map_err(|e| format!("Failed to canonicalize {}: {}", path.display(), e))?;
|
let canonical_path = path
|
||||||
let canonical_parent = parent.canonicalize().map_err(|e| format!("Failed to canonicalize {}: {}", parent.display(), e))?;
|
.canonicalize()
|
||||||
|
.map_err(|e| format!("Failed to canonicalize {}: {}", path.display(), e))?;
|
||||||
|
let canonical_parent = parent
|
||||||
|
.canonicalize()
|
||||||
|
.map_err(|e| format!("Failed to canonicalize {}: {}", parent.display(), e))?;
|
||||||
|
|
||||||
Ok(canonical_path.starts_with(canonical_parent))
|
Ok(canonical_path.starts_with(canonical_parent))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Create a file with the content given
|
/// Create a file with the content given
|
||||||
pub fn create_file(path: &Path, content: &str) -> Result<()> {
|
pub fn create_file(path: &Path, content: &str) -> Result<()> {
|
||||||
let mut file = File::create(&path)?;
|
let mut file = File::create(&path)?;
|
||||||
|
@ -119,7 +121,11 @@ pub fn get_file_time(path: &Path) -> Option<SystemTime> {
|
||||||
|
|
||||||
/// Compares source and target files' timestamps and returns true if the source file
|
/// Compares source and target files' timestamps and returns true if the source file
|
||||||
/// has been created _or_ updated after the target file has
|
/// has been created _or_ updated after the target file has
|
||||||
pub fn file_stale<PS, PT>(p_source: PS, p_target: PT) -> bool where PS: AsRef<Path>, PT: AsRef<Path> {
|
pub fn file_stale<PS, PT>(p_source: PS, p_target: PT) -> bool
|
||||||
|
where
|
||||||
|
PS: AsRef<Path>,
|
||||||
|
PT: AsRef<Path>,
|
||||||
|
{
|
||||||
let p_source = p_source.as_ref();
|
let p_source = p_source.as_ref();
|
||||||
let p_target = p_target.as_ref();
|
let p_target = p_target.as_ref();
|
||||||
|
|
||||||
|
@ -133,7 +139,6 @@ pub fn file_stale<PS, PT>(p_source: PS, p_target: PT) -> bool where PS: AsRef<Pa
|
||||||
time_source.and_then(|ts| time_target.map(|tt| ts > tt)).unwrap_or(true)
|
time_source.and_then(|ts| time_target.map(|tt| ts > tt)).unwrap_or(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
|
|
|
@ -4,10 +4,10 @@ extern crate errors;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
extern crate tempfile;
|
extern crate tempfile;
|
||||||
extern crate tera;
|
extern crate tera;
|
||||||
extern crate walkdir;
|
|
||||||
extern crate unicode_segmentation;
|
extern crate unicode_segmentation;
|
||||||
|
extern crate walkdir;
|
||||||
|
|
||||||
pub mod fs;
|
pub mod fs;
|
||||||
|
pub mod net;
|
||||||
pub mod site;
|
pub mod site;
|
||||||
pub mod templates;
|
pub mod templates;
|
||||||
pub mod net;
|
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
use std::net::TcpListener;
|
use std::net::TcpListener;
|
||||||
|
|
||||||
|
|
||||||
pub fn get_available_port(avoid: u16) -> Option<u16> {
|
pub fn get_available_port(avoid: u16) -> Option<u16> {
|
||||||
(1000..9000)
|
(1000..9000).find(|port| *port != avoid && port_is_available(*port))
|
||||||
.find(|port| *port != avoid && port_is_available(*port))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn port_is_available(port: u16) -> bool {
|
pub fn port_is_available(port: u16) -> bool {
|
||||||
|
|
|
@ -31,12 +31,11 @@ pub fn resolve_internal_link(link: &str, permalinks: &HashMap<String, String>) -
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use super::{resolve_internal_link, get_reading_analytics};
|
use super::{get_reading_analytics, resolve_internal_link};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_resolve_valid_internal_link() {
|
fn can_resolve_valid_internal_link() {
|
||||||
|
|
|
@ -1,56 +1,55 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use tera::{Tera, Context};
|
use tera::{Context, Tera};
|
||||||
|
|
||||||
use errors::Result;
|
use errors::Result;
|
||||||
|
|
||||||
static DEFAULT_TPL: &str = include_str!("default_tpl.html");
|
static DEFAULT_TPL: &str = include_str!("default_tpl.html");
|
||||||
|
|
||||||
|
|
||||||
macro_rules! render_default_tpl {
|
macro_rules! render_default_tpl {
|
||||||
($filename: expr, $url: expr) => {
|
($filename: expr, $url: expr) => {{
|
||||||
{
|
let mut context = Context::new();
|
||||||
let mut context = Context::new();
|
context.insert("filename", $filename);
|
||||||
context.insert("filename", $filename);
|
context.insert("url", $url);
|
||||||
context.insert("url", $url);
|
Tera::one_off(DEFAULT_TPL, &context, true).map_err(|e| e.into())
|
||||||
Tera::one_off(DEFAULT_TPL, &context, true).map_err(|e| e.into())
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Renders the given template with the given context, but also ensures that, if the default file
|
/// Renders the given template with the given context, but also ensures that, if the default file
|
||||||
/// is not found, it will look up for the equivalent template for the current theme if there is one.
|
/// is not found, it will look up for the equivalent template for the current theme if there is one.
|
||||||
/// Lastly, if it's a default template (index, section or page), it will just return an empty string
|
/// Lastly, if it's a default template (index, section or page), it will just return an empty string
|
||||||
/// to avoid an error if there isn't a template with that name
|
/// to avoid an error if there isn't a template with that name
|
||||||
pub fn render_template(name: &str, tera: &Tera, context: &Context, theme: &Option<String>) -> Result<String> {
|
pub fn render_template(
|
||||||
|
name: &str,
|
||||||
|
tera: &Tera,
|
||||||
|
context: &Context,
|
||||||
|
theme: &Option<String>,
|
||||||
|
) -> Result<String> {
|
||||||
if tera.templates.contains_key(name) {
|
if tera.templates.contains_key(name) {
|
||||||
return tera
|
return tera.render(name, context).map_err(|e| e.into());
|
||||||
.render(name, context)
|
|
||||||
.map_err(|e| e.into());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(ref t) = *theme {
|
if let Some(ref t) = *theme {
|
||||||
return tera
|
return tera.render(&format!("{}/templates/{}", t, name), context).map_err(|e| e.into());
|
||||||
.render(&format!("{}/templates/{}", t, name), context)
|
|
||||||
.map_err(|e| e.into());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// maybe it's a default one?
|
// maybe it's a default one?
|
||||||
match name {
|
match name {
|
||||||
"index.html" | "section.html" => {
|
"index.html" | "section.html" => render_default_tpl!(
|
||||||
render_default_tpl!(name, "https://www.getzola.org/documentation/templates/pages-sections/#section-variables")
|
name,
|
||||||
}
|
"https://www.getzola.org/documentation/templates/pages-sections/#section-variables"
|
||||||
"page.html" => {
|
),
|
||||||
render_default_tpl!(name, "https://www.getzola.org/documentation/templates/pages-sections/#page-variables")
|
"page.html" => render_default_tpl!(
|
||||||
}
|
name,
|
||||||
|
"https://www.getzola.org/documentation/templates/pages-sections/#page-variables"
|
||||||
|
),
|
||||||
"single.html" | "list.html" => {
|
"single.html" | "list.html" => {
|
||||||
render_default_tpl!(name, "https://www.getzola.org/documentation/templates/taxonomies/")
|
render_default_tpl!(name, "https://www.getzola.org/documentation/templates/taxonomies/")
|
||||||
}
|
}
|
||||||
_ => bail!("Tried to render `{}` but the template wasn't found", name)
|
_ => bail!("Tried to render `{}` but the template wasn't found", name),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Rewrites the path from extend/macros of the theme used to ensure
|
/// Rewrites the path from extend/macros of the theme used to ensure
|
||||||
/// that they will point to the right place (theme/templates/...)
|
/// that they will point to the right place (theme/templates/...)
|
||||||
/// Include is NOT supported as it would be a pain to add and using blocks
|
/// Include is NOT supported as it would be a pain to add and using blocks
|
||||||
|
@ -63,7 +62,7 @@ pub fn rewrite_theme_paths(tera: &mut Tera, theme: &str) {
|
||||||
let old_templates = ::std::mem::replace(&mut tera.templates, HashMap::new());
|
let old_templates = ::std::mem::replace(&mut tera.templates, HashMap::new());
|
||||||
|
|
||||||
// We want to match the paths in the templates to the new names
|
// We want to match the paths in the templates to the new names
|
||||||
for (key, mut tpl) in old_templates{
|
for (key, mut tpl) in old_templates {
|
||||||
tpl.name = format!("{}/templates/{}", theme, tpl.name);
|
tpl.name = format!("{}/templates/{}", theme, tpl.name);
|
||||||
// First the parent if there is none
|
// First the parent if there is none
|
||||||
if let Some(ref p) = tpl.parent.clone() {
|
if let Some(ref p) = tpl.parent.clone() {
|
||||||
|
@ -97,8 +96,8 @@ pub fn rewrite_theme_paths(tera: &mut Tera, theme: &str) {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use tera::Tera;
|
|
||||||
use super::rewrite_theme_paths;
|
use super::rewrite_theme_paths;
|
||||||
|
use tera::Tera;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_rewrite_all_paths_of_theme() {
|
fn can_rewrite_all_paths_of_theme() {
|
||||||
|
|
1
rustfmt.toml
Normal file
1
rustfmt.toml
Normal file
|
@ -0,0 +1 @@
|
||||||
|
use_small_heuristics = "max"
|
|
@ -1,12 +1,11 @@
|
||||||
use std::fs::{create_dir, canonicalize};
|
use std::fs::{canonicalize, create_dir};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use errors::Result;
|
use errors::Result;
|
||||||
use utils::fs::create_file;
|
use utils::fs::create_file;
|
||||||
|
|
||||||
use prompt::{ask_bool, ask_url};
|
|
||||||
use console;
|
use console;
|
||||||
|
use prompt::{ask_bool, ask_url};
|
||||||
|
|
||||||
const CONFIG: &str = r#"
|
const CONFIG: &str = r#"
|
||||||
# The URL the site will be built for
|
# The URL the site will be built for
|
||||||
|
@ -26,7 +25,6 @@ build_search_index = %SEARCH%
|
||||||
# Put all your custom variables here
|
# Put all your custom variables here
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
|
|
||||||
pub fn create_new_project(name: &str) -> Result<()> {
|
pub fn create_new_project(name: &str) -> Result<()> {
|
||||||
let path = Path::new(name);
|
let path = Path::new(name);
|
||||||
// Better error message than the rust default
|
// Better error message than the rust default
|
||||||
|
@ -62,7 +60,9 @@ pub fn create_new_project(name: &str) -> Result<()> {
|
||||||
println!();
|
println!();
|
||||||
console::success(&format!("Done! Your site was created in {:?}", canonicalize(path).unwrap()));
|
console::success(&format!("Done! Your site was created in {:?}", canonicalize(path).unwrap()));
|
||||||
println!();
|
println!();
|
||||||
console::info("Get started by moving into the directory and using the built-in server: `zola serve`");
|
console::info(
|
||||||
|
"Get started by moving into the directory and using the built-in server: `zola serve`",
|
||||||
|
);
|
||||||
println!("Visit https://www.getzola.org for the full documentation.");
|
println!("Visit https://www.getzola.org for the full documentation.");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
mod init;
|
|
||||||
mod build;
|
mod build;
|
||||||
|
mod init;
|
||||||
mod serve;
|
mod serve;
|
||||||
|
|
||||||
pub use self::init::create_new_project;
|
|
||||||
pub use self::build::build;
|
pub use self::build::build;
|
||||||
|
pub use self::init::create_new_project;
|
||||||
pub use self::serve::serve;
|
pub use self::serve::serve;
|
||||||
|
|
178
src/cmd/serve.rs
178
src/cmd/serve.rs
|
@ -26,18 +26,18 @@ use std::fs::{remove_dir_all, File};
|
||||||
use std::io::{self, Read};
|
use std::io::{self, Read};
|
||||||
use std::path::{Path, PathBuf, MAIN_SEPARATOR};
|
use std::path::{Path, PathBuf, MAIN_SEPARATOR};
|
||||||
use std::sync::mpsc::channel;
|
use std::sync::mpsc::channel;
|
||||||
use std::time::{Instant, Duration};
|
|
||||||
use std::thread;
|
use std::thread;
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
use chrono::prelude::*;
|
use actix_web::middleware::{Middleware, Response, Started};
|
||||||
use actix_web::{self, fs, http, server, App, HttpRequest, HttpResponse, Responder};
|
use actix_web::{self, fs, http, server, App, HttpRequest, HttpResponse, Responder};
|
||||||
use actix_web::middleware::{Middleware, Started, Response};
|
use chrono::prelude::*;
|
||||||
use notify::{Watcher, RecursiveMode, watcher};
|
|
||||||
use ws::{WebSocket, Sender, Message};
|
|
||||||
use ctrlc;
|
use ctrlc;
|
||||||
|
use notify::{watcher, RecursiveMode, Watcher};
|
||||||
|
use ws::{Message, Sender, WebSocket};
|
||||||
|
|
||||||
use site::Site;
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
|
use site::Site;
|
||||||
use utils::fs::copy_file;
|
use utils::fs::copy_file;
|
||||||
|
|
||||||
use console;
|
use console;
|
||||||
|
@ -93,7 +93,9 @@ fn livereload_handler(_: &HttpRequest) -> &'static str {
|
||||||
fn rebuild_done_handling(broadcaster: &Sender, res: Result<()>, reload_path: &str) {
|
fn rebuild_done_handling(broadcaster: &Sender, res: Result<()>, reload_path: &str) {
|
||||||
match res {
|
match res {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
broadcaster.send(format!(r#"
|
broadcaster
|
||||||
|
.send(format!(
|
||||||
|
r#"
|
||||||
{{
|
{{
|
||||||
"command": "reload",
|
"command": "reload",
|
||||||
"path": "{}",
|
"path": "{}",
|
||||||
|
@ -101,14 +103,22 @@ fn rebuild_done_handling(broadcaster: &Sender, res: Result<()>, reload_path: &st
|
||||||
"liveCSS": true,
|
"liveCSS": true,
|
||||||
"liveImg": true,
|
"liveImg": true,
|
||||||
"protocol": ["http://livereload.com/protocols/official-7"]
|
"protocol": ["http://livereload.com/protocols/official-7"]
|
||||||
}}"#, reload_path)
|
}}"#,
|
||||||
).unwrap();
|
reload_path
|
||||||
},
|
))
|
||||||
Err(e) => console::unravel_errors("Failed to build the site", &e)
|
.unwrap();
|
||||||
|
}
|
||||||
|
Err(e) => console::unravel_errors("Failed to build the site", &e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_new_site(interface: &str, port: u16, output_dir: &str, base_url: &str, config_file: &str) -> Result<(Site, String)> {
|
fn create_new_site(
|
||||||
|
interface: &str,
|
||||||
|
port: u16,
|
||||||
|
output_dir: &str,
|
||||||
|
base_url: &str,
|
||||||
|
config_file: &str,
|
||||||
|
) -> Result<(Site, String)> {
|
||||||
let mut site = Site::new(env::current_dir().unwrap(), config_file)?;
|
let mut site = Site::new(env::current_dir().unwrap(), config_file)?;
|
||||||
|
|
||||||
let base_address = format!("{}:{}", base_url, port);
|
let base_address = format!("{}:{}", base_url, port);
|
||||||
|
@ -140,14 +150,23 @@ fn create_new_site(interface: &str, port: u16, output_dir: &str, base_url: &str,
|
||||||
/// Rather than deal with all of that, we can hijack a hook for presenting a
|
/// Rather than deal with all of that, we can hijack a hook for presenting a
|
||||||
/// custom directory listing response and serve it up using their
|
/// custom directory listing response and serve it up using their
|
||||||
/// `NamedFile` responder.
|
/// `NamedFile` responder.
|
||||||
fn handle_directory<'a, 'b>(dir: &'a fs::Directory, req: &'b HttpRequest) -> io::Result<HttpResponse> {
|
fn handle_directory<'a, 'b>(
|
||||||
|
dir: &'a fs::Directory,
|
||||||
|
req: &'b HttpRequest,
|
||||||
|
) -> io::Result<HttpResponse> {
|
||||||
let mut path = PathBuf::from(&dir.base);
|
let mut path = PathBuf::from(&dir.base);
|
||||||
path.push(&dir.path);
|
path.push(&dir.path);
|
||||||
path.push("index.html");
|
path.push("index.html");
|
||||||
fs::NamedFile::open(path)?.respond_to(req)
|
fs::NamedFile::open(path)?.respond_to(req)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serve(interface: &str, port: u16, output_dir: &str, base_url: &str, config_file: &str) -> Result<()> {
|
pub fn serve(
|
||||||
|
interface: &str,
|
||||||
|
port: u16,
|
||||||
|
output_dir: &str,
|
||||||
|
base_url: &str,
|
||||||
|
config_file: &str,
|
||||||
|
) -> Result<()> {
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
let (mut site, address) = create_new_site(interface, port, output_dir, base_url, config_file)?;
|
let (mut site, address) = create_new_site(interface, port, output_dir, base_url, config_file)?;
|
||||||
console::report_elapsed_time(start);
|
console::report_elapsed_time(start);
|
||||||
|
@ -157,20 +176,24 @@ pub fn serve(interface: &str, port: u16, output_dir: &str, base_url: &str, confi
|
||||||
let mut watching_templates = false;
|
let mut watching_templates = false;
|
||||||
let (tx, rx) = channel();
|
let (tx, rx) = channel();
|
||||||
let mut watcher = watcher(tx, Duration::from_secs(2)).unwrap();
|
let mut watcher = watcher(tx, Duration::from_secs(2)).unwrap();
|
||||||
watcher.watch("content/", RecursiveMode::Recursive)
|
watcher
|
||||||
|
.watch("content/", RecursiveMode::Recursive)
|
||||||
.chain_err(|| "Can't watch the `content` folder. Does it exist?")?;
|
.chain_err(|| "Can't watch the `content` folder. Does it exist?")?;
|
||||||
watcher.watch(config_file, RecursiveMode::Recursive)
|
watcher
|
||||||
|
.watch(config_file, RecursiveMode::Recursive)
|
||||||
.chain_err(|| "Can't watch the `config` file. Does it exist?")?;
|
.chain_err(|| "Can't watch the `config` file. Does it exist?")?;
|
||||||
|
|
||||||
if Path::new("static").exists() {
|
if Path::new("static").exists() {
|
||||||
watching_static = true;
|
watching_static = true;
|
||||||
watcher.watch("static/", RecursiveMode::Recursive)
|
watcher
|
||||||
|
.watch("static/", RecursiveMode::Recursive)
|
||||||
.chain_err(|| "Can't watch the `static` folder.")?;
|
.chain_err(|| "Can't watch the `static` folder.")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if Path::new("templates").exists() {
|
if Path::new("templates").exists() {
|
||||||
watching_templates = true;
|
watching_templates = true;
|
||||||
watcher.watch("templates/", RecursiveMode::Recursive)
|
watcher
|
||||||
|
.watch("templates/", RecursiveMode::Recursive)
|
||||||
.chain_err(|| "Can't watch the `templates` folder.")?;
|
.chain_err(|| "Can't watch the `templates` folder.")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -186,16 +209,16 @@ pub fn serve(interface: &str, port: u16, output_dir: &str, base_url: &str, confi
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
let s = server::new(move || {
|
let s = server::new(move || {
|
||||||
App::new()
|
App::new()
|
||||||
.middleware(NotFoundHandler { rendered_template: static_root.join("404.html") })
|
.middleware(NotFoundHandler { rendered_template: static_root.join("404.html") })
|
||||||
.resource(r"/livereload.js", |r| r.f(livereload_handler))
|
.resource(r"/livereload.js", |r| r.f(livereload_handler))
|
||||||
// Start a webserver that serves the `output_dir` directory
|
// Start a webserver that serves the `output_dir` directory
|
||||||
.handler(
|
.handler(
|
||||||
r"/",
|
r"/",
|
||||||
fs::StaticFiles::new(&static_root)
|
fs::StaticFiles::new(&static_root)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.show_files_listing()
|
.show_files_listing()
|
||||||
.files_listing_renderer(handle_directory)
|
.files_listing_renderer(handle_directory),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.bind(&address)
|
.bind(&address)
|
||||||
.expect("Can't start the webserver")
|
.expect("Can't start the webserver")
|
||||||
|
@ -208,17 +231,20 @@ pub fn serve(interface: &str, port: u16, output_dir: &str, base_url: &str, confi
|
||||||
let ws_server = WebSocket::new(|output: Sender| {
|
let ws_server = WebSocket::new(|output: Sender| {
|
||||||
move |msg: Message| {
|
move |msg: Message| {
|
||||||
if msg.into_text().unwrap().contains("\"hello\"") {
|
if msg.into_text().unwrap().contains("\"hello\"") {
|
||||||
return output.send(Message::text(r#"
|
return output.send(Message::text(
|
||||||
|
r#"
|
||||||
{
|
{
|
||||||
"command": "hello",
|
"command": "hello",
|
||||||
"protocols": [ "http://livereload.com/protocols/official-7" ],
|
"protocols": [ "http://livereload.com/protocols/official-7" ],
|
||||||
"serverName": "Zola"
|
"serverName": "Zola"
|
||||||
}
|
}
|
||||||
"#));
|
"#,
|
||||||
|
));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}).unwrap();
|
})
|
||||||
|
.unwrap();
|
||||||
let broadcaster = ws_server.broadcaster();
|
let broadcaster = ws_server.broadcaster();
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
ws_server.listen(&*ws_address).unwrap();
|
ws_server.listen(&*ws_address).unwrap();
|
||||||
|
@ -237,14 +263,20 @@ pub fn serve(interface: &str, port: u16, output_dir: &str, base_url: &str, confi
|
||||||
watchers.push("sass");
|
watchers.push("sass");
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("Listening for changes in {}{}{{{}}}", pwd.display(), MAIN_SEPARATOR, watchers.join(", "));
|
println!(
|
||||||
|
"Listening for changes in {}{}{{{}}}",
|
||||||
|
pwd.display(),
|
||||||
|
MAIN_SEPARATOR,
|
||||||
|
watchers.join(", ")
|
||||||
|
);
|
||||||
|
|
||||||
println!("Press Ctrl+C to stop\n");
|
println!("Press Ctrl+C to stop\n");
|
||||||
// Delete the output folder on ctrl+C
|
// Delete the output folder on ctrl+C
|
||||||
ctrlc::set_handler(move || {
|
ctrlc::set_handler(move || {
|
||||||
remove_dir_all(&output_path).expect("Failed to delete output directory");
|
remove_dir_all(&output_path).expect("Failed to delete output directory");
|
||||||
::std::process::exit(0);
|
::std::process::exit(0);
|
||||||
}).expect("Error setting Ctrl-C handler");
|
})
|
||||||
|
.expect("Error setting Ctrl-C handler");
|
||||||
|
|
||||||
use notify::DebouncedEvent::*;
|
use notify::DebouncedEvent::*;
|
||||||
|
|
||||||
|
@ -252,47 +284,74 @@ pub fn serve(interface: &str, port: u16, output_dir: &str, base_url: &str, confi
|
||||||
match rx.recv() {
|
match rx.recv() {
|
||||||
Ok(event) => {
|
Ok(event) => {
|
||||||
match event {
|
match event {
|
||||||
Create(path) |
|
Create(path) | Write(path) | Remove(path) | Rename(_, path) => {
|
||||||
Write(path) |
|
|
||||||
Remove(path) |
|
|
||||||
Rename(_, path) => {
|
|
||||||
if is_temp_file(&path) || path.is_dir() {
|
if is_temp_file(&path) || path.is_dir() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("Change detected @ {}", Local::now().format("%Y-%m-%d %H:%M:%S").to_string());
|
println!(
|
||||||
|
"Change detected @ {}",
|
||||||
|
Local::now().format("%Y-%m-%d %H:%M:%S").to_string()
|
||||||
|
);
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
match detect_change_kind(&pwd, &path) {
|
match detect_change_kind(&pwd, &path) {
|
||||||
(ChangeKind::Content, _) => {
|
(ChangeKind::Content, _) => {
|
||||||
console::info(&format!("-> Content changed {}", path.display()));
|
console::info(&format!("-> Content changed {}", path.display()));
|
||||||
// Force refresh
|
// Force refresh
|
||||||
rebuild_done_handling(&broadcaster, rebuild::after_content_change(&mut site, &path), "/x.js");
|
rebuild_done_handling(
|
||||||
},
|
&broadcaster,
|
||||||
|
rebuild::after_content_change(&mut site, &path),
|
||||||
|
"/x.js",
|
||||||
|
);
|
||||||
|
}
|
||||||
(ChangeKind::Templates, _) => {
|
(ChangeKind::Templates, _) => {
|
||||||
console::info(&format!("-> Template changed {}", path.display()));
|
console::info(&format!("-> Template changed {}", path.display()));
|
||||||
// Force refresh
|
// Force refresh
|
||||||
rebuild_done_handling(&broadcaster, rebuild::after_template_change(&mut site, &path), "/x.js");
|
rebuild_done_handling(
|
||||||
},
|
&broadcaster,
|
||||||
|
rebuild::after_template_change(&mut site, &path),
|
||||||
|
"/x.js",
|
||||||
|
);
|
||||||
|
}
|
||||||
(ChangeKind::StaticFiles, p) => {
|
(ChangeKind::StaticFiles, p) => {
|
||||||
if path.is_file() {
|
if path.is_file() {
|
||||||
console::info(&format!("-> Static file changes detected {}", path.display()));
|
console::info(&format!(
|
||||||
rebuild_done_handling(&broadcaster, copy_file(&path, &site.output_path, &site.static_path), &p.to_string_lossy());
|
"-> Static file changes detected {}",
|
||||||
|
path.display()
|
||||||
|
));
|
||||||
|
rebuild_done_handling(
|
||||||
|
&broadcaster,
|
||||||
|
copy_file(&path, &site.output_path, &site.static_path),
|
||||||
|
&p.to_string_lossy(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
(ChangeKind::Sass, p) => {
|
(ChangeKind::Sass, p) => {
|
||||||
console::info(&format!("-> Sass file changed {}", path.display()));
|
console::info(&format!("-> Sass file changed {}", path.display()));
|
||||||
rebuild_done_handling(&broadcaster, site.compile_sass(&site.base_path), &p.to_string_lossy());
|
rebuild_done_handling(
|
||||||
},
|
&broadcaster,
|
||||||
|
site.compile_sass(&site.base_path),
|
||||||
|
&p.to_string_lossy(),
|
||||||
|
);
|
||||||
|
}
|
||||||
(ChangeKind::Config, _) => {
|
(ChangeKind::Config, _) => {
|
||||||
console::info("-> Config changed. The whole site will be reloaded. The browser needs to be refreshed to make the changes visible.");
|
console::info("-> Config changed. The whole site will be reloaded. The browser needs to be refreshed to make the changes visible.");
|
||||||
site = create_new_site(interface, port, output_dir, base_url, config_file).unwrap().0;
|
site = create_new_site(
|
||||||
|
interface,
|
||||||
|
port,
|
||||||
|
output_dir,
|
||||||
|
base_url,
|
||||||
|
config_file,
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
.0;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
console::report_elapsed_time(start);
|
console::report_elapsed_time(start);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Err(e) => console::error(&format!("Watch error: {:?}", e)),
|
Err(e) => console::error(&format!("Watch error: {:?}", e)),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -321,9 +380,7 @@ fn is_temp_file(path: &Path) -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
None => {
|
None => true,
|
||||||
true
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -354,7 +411,7 @@ fn detect_change_kind(pwd: &Path, path: &Path) -> (ChangeKind, PathBuf) {
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use super::{is_temp_file, detect_change_kind, ChangeKind};
|
use super::{detect_change_kind, is_temp_file, ChangeKind};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_recognize_temp_files() {
|
fn can_recognize_temp_files() {
|
||||||
|
@ -380,23 +437,28 @@ mod tests {
|
||||||
let test_cases = vec![
|
let test_cases = vec![
|
||||||
(
|
(
|
||||||
(ChangeKind::Templates, PathBuf::from("/templates/hello.html")),
|
(ChangeKind::Templates, PathBuf::from("/templates/hello.html")),
|
||||||
Path::new("/home/vincent/site"), Path::new("/home/vincent/site/templates/hello.html")
|
Path::new("/home/vincent/site"),
|
||||||
|
Path::new("/home/vincent/site/templates/hello.html"),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
(ChangeKind::StaticFiles, PathBuf::from("/static/site.css")),
|
(ChangeKind::StaticFiles, PathBuf::from("/static/site.css")),
|
||||||
Path::new("/home/vincent/site"), Path::new("/home/vincent/site/static/site.css")
|
Path::new("/home/vincent/site"),
|
||||||
|
Path::new("/home/vincent/site/static/site.css"),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
(ChangeKind::Content, PathBuf::from("/content/posts/hello.md")),
|
(ChangeKind::Content, PathBuf::from("/content/posts/hello.md")),
|
||||||
Path::new("/home/vincent/site"), Path::new("/home/vincent/site/content/posts/hello.md")
|
Path::new("/home/vincent/site"),
|
||||||
|
Path::new("/home/vincent/site/content/posts/hello.md"),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
(ChangeKind::Sass, PathBuf::from("/sass/print.scss")),
|
(ChangeKind::Sass, PathBuf::from("/sass/print.scss")),
|
||||||
Path::new("/home/vincent/site"), Path::new("/home/vincent/site/sass/print.scss")
|
Path::new("/home/vincent/site"),
|
||||||
|
Path::new("/home/vincent/site/sass/print.scss"),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
(ChangeKind::Config, PathBuf::from("/config.toml")),
|
(ChangeKind::Config, PathBuf::from("/config.toml")),
|
||||||
Path::new("/home/vincent/site"), Path::new("/home/vincent/site/config.toml")
|
Path::new("/home/vincent/site"),
|
||||||
|
Path::new("/home/vincent/site/config.toml"),
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,6 @@ lazy_static! {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub fn info(message: &str) {
|
pub fn info(message: &str) {
|
||||||
colorize(message, ColorSpec::new().set_bold(true));
|
colorize(message, ColorSpec::new().set_bold(true));
|
||||||
}
|
}
|
||||||
|
@ -58,13 +57,12 @@ pub fn notify_site_size(site: &Site) {
|
||||||
|
|
||||||
/// Display a warning in the console if there are ignored pages in the site
|
/// Display a warning in the console if there are ignored pages in the site
|
||||||
pub fn warn_about_ignored_pages(site: &Site) {
|
pub fn warn_about_ignored_pages(site: &Site) {
|
||||||
let ignored_pages: Vec<_> = site.library
|
let ignored_pages: Vec<_> = site
|
||||||
|
.library
|
||||||
.sections_values()
|
.sections_values()
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|s| {
|
.flat_map(|s| {
|
||||||
s.ignored_pages
|
s.ignored_pages.iter().map(|k| site.library.get_page_by_key(*k).file.path.clone())
|
||||||
.iter()
|
|
||||||
.map(|k| site.library.get_page_by_key(*k).file.path.clone())
|
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
@ -104,8 +102,9 @@ pub fn unravel_errors(message: &str, error: &Error) {
|
||||||
|
|
||||||
/// Check whether to output colors
|
/// Check whether to output colors
|
||||||
fn has_color() -> bool {
|
fn has_color() -> bool {
|
||||||
let use_colors = env::var("CLICOLOR").unwrap_or_else(|_| "1".to_string()) != "0" && env::var("NO_COLOR").is_err();
|
let use_colors = env::var("CLICOLOR").unwrap_or_else(|_| "1".to_string()) != "0"
|
||||||
let force_colors = env::var("CLICOLOR_FORCE").unwrap_or_else(|_|"0".to_string()) != "0";
|
&& env::var("NO_COLOR").is_err();
|
||||||
|
let force_colors = env::var("CLICOLOR_FORCE").unwrap_or_else(|_| "0".to_string()) != "0";
|
||||||
|
|
||||||
force_colors || use_colors && atty::is(atty::Stream::Stdout)
|
force_colors || use_colors && atty::is(atty::Stream::Stdout)
|
||||||
}
|
}
|
||||||
|
|
21
src/main.rs
21
src/main.rs
|
@ -1,33 +1,32 @@
|
||||||
extern crate atty;
|
|
||||||
extern crate actix_web;
|
extern crate actix_web;
|
||||||
|
extern crate atty;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate clap;
|
extern crate clap;
|
||||||
extern crate chrono;
|
extern crate chrono;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
|
extern crate ctrlc;
|
||||||
extern crate notify;
|
extern crate notify;
|
||||||
extern crate termcolor;
|
extern crate termcolor;
|
||||||
extern crate url;
|
extern crate url;
|
||||||
extern crate ws;
|
extern crate ws;
|
||||||
extern crate ctrlc;
|
|
||||||
|
|
||||||
extern crate site;
|
extern crate site;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate errors;
|
extern crate errors;
|
||||||
extern crate front_matter;
|
extern crate front_matter;
|
||||||
extern crate utils;
|
|
||||||
extern crate rebuild;
|
extern crate rebuild;
|
||||||
|
extern crate utils;
|
||||||
|
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use utils::net::{get_available_port, port_is_available};
|
use utils::net::{get_available_port, port_is_available};
|
||||||
|
|
||||||
|
mod cli;
|
||||||
mod cmd;
|
mod cmd;
|
||||||
mod console;
|
mod console;
|
||||||
mod cli;
|
|
||||||
mod prompt;
|
mod prompt;
|
||||||
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let matches = cli::build_cli().get_matches();
|
let matches = cli::build_cli().get_matches();
|
||||||
|
|
||||||
|
@ -40,9 +39,9 @@ fn main() {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
console::unravel_errors("Failed to create the project", &e);
|
console::unravel_errors("Failed to create the project", &e);
|
||||||
::std::process::exit(1);
|
::std::process::exit(1);
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
},
|
}
|
||||||
("build", Some(matches)) => {
|
("build", Some(matches)) => {
|
||||||
console::info("Building site...");
|
console::info("Building site...");
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
|
@ -52,9 +51,9 @@ fn main() {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
console::unravel_errors("Failed to build the site", &e);
|
console::unravel_errors("Failed to build the site", &e);
|
||||||
::std::process::exit(1);
|
::std::process::exit(1);
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
},
|
}
|
||||||
("serve", Some(matches)) => {
|
("serve", Some(matches)) => {
|
||||||
let interface = matches.value_of("interface").unwrap_or("127.0.0.1");
|
let interface = matches.value_of("interface").unwrap_or("127.0.0.1");
|
||||||
let mut port: u16 = match matches.value_of("port").unwrap().parse() {
|
let mut port: u16 = match matches.value_of("port").unwrap().parse() {
|
||||||
|
@ -87,9 +86,9 @@ fn main() {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
console::unravel_errors("", &e);
|
console::unravel_errors("", &e);
|
||||||
::std::process::exit(1);
|
::std::process::exit(1);
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
},
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::io::{self, Write, BufRead};
|
use std::io::{self, BufRead, Write};
|
||||||
|
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ pub fn ask_bool(question: &str, default: bool) -> Result<bool> {
|
||||||
_ => {
|
_ => {
|
||||||
println!("Invalid choice: '{}'", input);
|
println!("Invalid choice: '{}'", input);
|
||||||
ask_bool(question, default)
|
ask_bool(question, default)
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,13 +40,11 @@ pub fn ask_url(question: &str, default: &str) -> Result<String> {
|
||||||
|
|
||||||
match &*input {
|
match &*input {
|
||||||
"" => Ok(default.to_string()),
|
"" => Ok(default.to_string()),
|
||||||
_ => {
|
_ => match Url::parse(&input) {
|
||||||
match Url::parse(&input) {
|
Ok(_) => Ok(input),
|
||||||
Ok(_) => Ok(input),
|
Err(_) => {
|
||||||
Err(_) => {
|
println!("Invalid URL: '{}'", input);
|
||||||
println!("Invalid URL: '{}'", input);
|
ask_url(question, default)
|
||||||
ask_url(question, default)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue