mirror of
https://github.com/getzola/zola
synced 2024-12-12 21:32:29 +00:00
Slotmap refactor
This commit is contained in:
parent
2cf99c3cfc
commit
10aba20fe5
30 changed files with 1244 additions and 959 deletions
88
Cargo.lock
generated
88
Cargo.lock
generated
|
@ -294,25 +294,6 @@ dependencies = [
|
|||
"toml 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "content"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"config 0.1.0",
|
||||
"errors 0.1.0",
|
||||
"front_matter 0.1.0",
|
||||
"globset 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rayon 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rendering 0.1.0",
|
||||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tempfile 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"toml 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"utils 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cookie"
|
||||
version = "0.11.0"
|
||||
|
@ -759,7 +740,6 @@ dependencies = [
|
|||
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"content 0.1.0",
|
||||
"ctrlc 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"errors 0.1.0",
|
||||
"front_matter 0.1.0",
|
||||
|
@ -1035,6 +1015,27 @@ dependencies = [
|
|||
"crc 1.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "library"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"config 0.1.0",
|
||||
"errors 0.1.0",
|
||||
"front_matter 0.1.0",
|
||||
"globset 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rayon 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rendering 0.1.0",
|
||||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"slotmap 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tempfile 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"toml 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"utils 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "link_checker"
|
||||
version = "0.1.0"
|
||||
|
@ -1383,21 +1384,6 @@ dependencies = [
|
|||
"stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pagination"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"config 0.1.0",
|
||||
"content 0.1.0",
|
||||
"errors 0.1.0",
|
||||
"front_matter 0.1.0",
|
||||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"taxonomies 0.1.0",
|
||||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"utils 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.6.4"
|
||||
|
@ -1610,11 +1596,11 @@ dependencies = [
|
|||
name = "rebuild"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"content 0.1.0",
|
||||
"errors 0.1.0",
|
||||
"front_matter 0.1.0",
|
||||
"fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"highlighting 0.1.0",
|
||||
"library 0.1.0",
|
||||
"site 0.1.0",
|
||||
"tempfile 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
@ -1809,10 +1795,10 @@ name = "search"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ammonia 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"content 0.1.0",
|
||||
"elasticlunr-rs 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"errors 0.1.0",
|
||||
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"library 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1900,18 +1886,16 @@ name = "site"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"config 0.1.0",
|
||||
"content 0.1.0",
|
||||
"errors 0.1.0",
|
||||
"front_matter 0.1.0",
|
||||
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"imageproc 0.1.0",
|
||||
"pagination 0.1.0",
|
||||
"library 0.1.0",
|
||||
"rayon 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"sass-rs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"search 0.1.0",
|
||||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"taxonomies 0.1.0",
|
||||
"tempfile 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"templates 0.1.0",
|
||||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1923,6 +1907,11 @@ name = "slab"
|
|||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "slotmap"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "slug"
|
||||
version = "0.1.4"
|
||||
|
@ -2090,21 +2079,6 @@ dependencies = [
|
|||
"yaml-rust 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "taxonomies"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"config 0.1.0",
|
||||
"content 0.1.0",
|
||||
"errors 0.1.0",
|
||||
"front_matter 0.1.0",
|
||||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"utils 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.0.4"
|
||||
|
@ -2124,12 +2098,11 @@ version = "0.1.0"
|
|||
dependencies = [
|
||||
"base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"config 0.1.0",
|
||||
"content 0.1.0",
|
||||
"errors 0.1.0",
|
||||
"imageproc 0.1.0",
|
||||
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"library 0.1.0",
|
||||
"pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"taxonomies 0.1.0",
|
||||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"utils 0.1.0",
|
||||
]
|
||||
|
@ -2940,6 +2913,7 @@ dependencies = [
|
|||
"checksum sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d"
|
||||
"checksum siphasher 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac"
|
||||
"checksum slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5f9776d6b986f77b35c6cf846c11ad986ff128fe0b2b63a3628e3755e8d3102d"
|
||||
"checksum slotmap 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4ed041f7f2ff35f2bf7d688bf30686976512f8300e37433c2c73ea9f4cf14b"
|
||||
"checksum slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b3bc762e6a4b6c6fcaade73e77f9ebc6991b676f88bb2358bddb56560f073373"
|
||||
"checksum smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "153ffa32fd170e9944f7e0838edf824a754ec4c1fc64746fcc9fe1f8fa602e5d"
|
||||
"checksum socket2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c4d11a52082057d87cb5caa31ad812f4504b97ab44732cd8359df2e9ff9f48e7"
|
||||
|
|
|
@ -33,7 +33,6 @@ ctrlc = "3"
|
|||
|
||||
site = { path = "components/site" }
|
||||
errors = { path = "components/errors" }
|
||||
content = { path = "components/content" }
|
||||
front_matter = { path = "components/front_matter" }
|
||||
utils = { path = "components/utils" }
|
||||
rebuild = { path = "components/rebuild" }
|
||||
|
@ -41,18 +40,19 @@ rebuild = { path = "components/rebuild" }
|
|||
[workspace]
|
||||
members = [
|
||||
"components/config",
|
||||
"components/content",
|
||||
"components/errors",
|
||||
"components/front_matter",
|
||||
"components/highlighting",
|
||||
"components/pagination",
|
||||
"components/rebuild",
|
||||
"components/rendering",
|
||||
"components/site",
|
||||
"components/taxonomies",
|
||||
"components/templates",
|
||||
"components/utils",
|
||||
"components/search",
|
||||
"components/imageproc",
|
||||
"components/link_checker",
|
||||
"components/library",
|
||||
]
|
||||
|
||||
#[profile.release]
|
||||
#debug = true
|
||||
|
|
|
@ -1,146 +0,0 @@
|
|||
#![feature(test)]
|
||||
extern crate test;
|
||||
extern crate tera;
|
||||
|
||||
extern crate content;
|
||||
extern crate front_matter;
|
||||
extern crate config;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
use config::Config;
|
||||
use tera::Tera;
|
||||
use front_matter::{SortBy, InsertAnchor};
|
||||
use content::{Page, sort_pages, populate_siblings};
|
||||
|
||||
|
||||
fn create_pages(number: usize) -> Vec<Page> {
|
||||
let mut pages = vec![];
|
||||
let config = Config::default();
|
||||
let mut tera = Tera::default();
|
||||
tera.add_raw_template("shortcodes/youtube.html", "hello");
|
||||
let permalinks = HashMap::new();
|
||||
|
||||
for i in 0..number {
|
||||
let mut page = Page::default();
|
||||
page.meta.weight = Some(i);
|
||||
page.raw_content = r#"
|
||||
# Modus cognitius profanam ne duae virtutis mundi
|
||||
|
||||
## Ut vita
|
||||
|
||||
Lorem markdownum litora, care ponto nomina, et ut aspicit gelidas sui et
|
||||
purpureo genuit. Tamen colla venientis [delphina](http://nil-sol.com/ecquis)
|
||||
Tusci et temptata citaeque curam isto ubi vult vulnere reppulit.
|
||||
|
||||
- Seque vidit flendoque de quodam
|
||||
- Dabit minimos deiecto caputque noctis pluma
|
||||
- Leti coniunx est Helicen
|
||||
- Illius pulvereumque Icare inpositos
|
||||
- Vivunt pereo pluvio tot ramos Olenios gelidis
|
||||
- Quater teretes natura inde
|
||||
|
||||
### A subsection
|
||||
|
||||
Protinus dicunt, breve per, et vivacis genus Orphei munere. Me terram [dimittere
|
||||
casside](http://corpus.org/) pervenit saxo primoque frequentat genuum sorori
|
||||
praeferre causas Libys. Illud in serpit adsuetam utrimque nunc haberent,
|
||||
**terrae si** veni! Hectoreis potes sumite [Mavortis retusa](http://tua.org/)
|
||||
granum captantur potuisse Minervae, frugum.
|
||||
|
||||
> Clivo sub inprovisoque nostrum minus fama est, discordia patrem petebat precatur
|
||||
absumitur, poena per sit. Foramina *tamen cupidine* memor supplex tollentes
|
||||
dictum unam orbem, Anubis caecae. Viderat formosior tegebat satis, Aethiopasque
|
||||
sit submisso coniuge tristis ubi!
|
||||
|
||||
## Praeceps Corinthus totidem quem crus vultum cape
|
||||
|
||||
```rs
|
||||
#[derive(Debug)]
|
||||
pub struct Site {
|
||||
/// The base path of the gutenberg site
|
||||
pub base_path: PathBuf,
|
||||
/// The parsed config for the site
|
||||
pub config: Config,
|
||||
pub pages: HashMap<PathBuf, Page>,
|
||||
pub sections: HashMap<PathBuf, Section>,
|
||||
pub tera: Tera,
|
||||
live_reload: bool,
|
||||
output_path: PathBuf,
|
||||
static_path: PathBuf,
|
||||
pub tags: Option<Taxonomy>,
|
||||
pub categories: Option<Taxonomy>,
|
||||
/// A map of all .md files (section and pages) and their permalink
|
||||
/// We need that if there are relative links in the content that need to be resolved
|
||||
pub permalinks: HashMap<String, String>,
|
||||
}
|
||||
```
|
||||
|
||||
## More stuff
|
||||
And a shortcode:
|
||||
|
||||
{{ youtube(id="my_youtube_id") }}
|
||||
|
||||
### Another subsection
|
||||
Gotta make the toc do a little bit of work
|
||||
|
||||
# A big title
|
||||
|
||||
- hello
|
||||
- world
|
||||
- !
|
||||
|
||||
```py
|
||||
if __name__ == "__main__":
|
||||
gen_site("basic-blog", [""], 250, paginate=True)
|
||||
```
|
||||
"#.to_string();
|
||||
page.render_markdown(&permalinks, &tera, &config, &Path::new(""), InsertAnchor::None).unwrap();
|
||||
pages.push(page);
|
||||
}
|
||||
|
||||
pages
|
||||
}
|
||||
|
||||
// Most of the time spent in those benches are due to the .clone()...
|
||||
// but i don't know how to remove them so there are some baseline bench with
|
||||
// just the cloning and with a bit of math we can figure it out
|
||||
|
||||
#[bench]
|
||||
fn bench_baseline_cloning(b: &mut test::Bencher) {
|
||||
let pages = create_pages(250);
|
||||
b.iter(|| pages.clone());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_sorting_none(b: &mut test::Bencher) {
|
||||
let pages = create_pages(250);
|
||||
b.iter(|| sort_pages(pages.clone(), SortBy::Weight));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_sorting_order(b: &mut test::Bencher) {
|
||||
let pages = create_pages(250);
|
||||
b.iter(|| sort_pages(pages.clone(), SortBy::Weight));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_populate_siblings(b: &mut test::Bencher) {
|
||||
let pages = create_pages(250);
|
||||
let (sorted_pages, _) = sort_pages(pages, SortBy::Weight);
|
||||
b.iter(|| populate_siblings(&sorted_pages.clone(), SortBy::Weight));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_page_render_html(b: &mut test::Bencher) {
|
||||
let pages = create_pages(10);
|
||||
let (mut sorted_pages, _) = sort_pages(pages, SortBy::Weight);
|
||||
sorted_pages = populate_siblings(&sorted_pages, SortBy::Weight);
|
||||
|
||||
let config = Config::default();
|
||||
let mut tera = Tera::default();
|
||||
tera.add_raw_template("page.html", "{{ page.content }}").unwrap();
|
||||
let page = &sorted_pages[5];
|
||||
b.iter(|| page.render_html(&tera, &config).unwrap());
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
extern crate tera;
|
||||
extern crate slug;
|
||||
extern crate serde;
|
||||
extern crate rayon;
|
||||
extern crate chrono;
|
||||
|
||||
extern crate errors;
|
||||
extern crate config;
|
||||
extern crate front_matter;
|
||||
extern crate rendering;
|
||||
extern crate utils;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate tempfile;
|
||||
#[cfg(test)]
|
||||
extern crate toml;
|
||||
#[cfg(test)]
|
||||
extern crate globset;
|
||||
|
||||
mod file_info;
|
||||
mod page;
|
||||
mod section;
|
||||
mod sorting;
|
||||
|
||||
|
||||
pub use file_info::FileInfo;
|
||||
pub use page::Page;
|
||||
pub use section::Section;
|
||||
pub use sorting::{sort_pages, populate_siblings, sort_pages_by_date};
|
|
@ -1,247 +0,0 @@
|
|||
use std::cmp::Ordering;
|
||||
|
||||
use rayon::prelude::*;
|
||||
|
||||
use page::Page;
|
||||
use front_matter::SortBy;
|
||||
|
||||
|
||||
/// The comparison function of sorting pages by day
|
||||
/// Used by the RSS rendering
|
||||
/// To remove if `sort_pages` is changed to work on borrowed values
|
||||
/// This cannot be used in `sort_pages` currently as it takes &&Page instead of &Page
|
||||
pub fn sort_pages_by_date(a: &&Page, b: &&Page) -> Ordering {
|
||||
let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.permalink.cmp(&b.permalink)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Sort pages by the given criteria
|
||||
///
|
||||
/// Any pages that doesn't have a required field when the sorting method is other than none
|
||||
/// will be ignored.
|
||||
pub fn sort_pages(pages: Vec<Page>, sort_by: SortBy) -> (Vec<Page>, Vec<Page>) {
|
||||
if sort_by == SortBy::None {
|
||||
return (pages, vec![]);
|
||||
}
|
||||
|
||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages
|
||||
.into_par_iter()
|
||||
.partition(|page| {
|
||||
match sort_by {
|
||||
SortBy::Date => page.meta.datetime.is_some(),
|
||||
SortBy::Weight => page.meta.weight.is_some(),
|
||||
_ => unreachable!()
|
||||
}
|
||||
});
|
||||
|
||||
match sort_by {
|
||||
SortBy::Date => {
|
||||
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||
let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.permalink.cmp(&b.permalink)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
})
|
||||
}
|
||||
SortBy::Weight => {
|
||||
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||
let ord = a.meta.weight().cmp(&b.meta.weight());
|
||||
if ord == Ordering::Equal {
|
||||
a.permalink.cmp(&b.permalink)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
})
|
||||
}
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
(can_be_sorted, cannot_be_sorted)
|
||||
}
|
||||
|
||||
/// Horribly inefficient way to set previous and next on each pages that skips drafts
|
||||
/// So many clones
|
||||
pub fn populate_siblings(input: &[Page], sort_by: SortBy) -> Vec<Page> {
|
||||
let mut res = Vec::with_capacity(input.len());
|
||||
|
||||
// The input is already sorted
|
||||
for (i, _) in input.iter().enumerate() {
|
||||
let mut new_page = input[i].clone();
|
||||
|
||||
if new_page.is_draft() {
|
||||
res.push(new_page);
|
||||
continue;
|
||||
}
|
||||
|
||||
if i > 0 {
|
||||
let mut j = i;
|
||||
loop {
|
||||
if j == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
j -= 1;
|
||||
|
||||
if input[j].is_draft() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Remove prev/next otherwise we serialise the whole thing...
|
||||
let mut next_page = input[j].clone();
|
||||
|
||||
match sort_by {
|
||||
SortBy::Weight => {
|
||||
next_page.lighter = None;
|
||||
next_page.heavier = None;
|
||||
new_page.lighter = Some(Box::new(next_page));
|
||||
}
|
||||
SortBy::Date => {
|
||||
next_page.earlier = None;
|
||||
next_page.later = None;
|
||||
new_page.later = Some(Box::new(next_page));
|
||||
}
|
||||
SortBy::None => ()
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if i < input.len() - 1 {
|
||||
let mut j = i;
|
||||
loop {
|
||||
if j == input.len() - 1 {
|
||||
break;
|
||||
}
|
||||
|
||||
j += 1;
|
||||
|
||||
if input[j].is_draft() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Remove prev/next otherwise we serialise the whole thing...
|
||||
let mut previous_page = input[j].clone();
|
||||
match sort_by {
|
||||
SortBy::Weight => {
|
||||
previous_page.lighter = None;
|
||||
previous_page.heavier = None;
|
||||
new_page.heavier = Some(Box::new(previous_page));
|
||||
}
|
||||
SortBy::Date => {
|
||||
previous_page.earlier = None;
|
||||
previous_page.later = None;
|
||||
new_page.earlier = Some(Box::new(previous_page));
|
||||
}
|
||||
SortBy::None => {}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
res.push(new_page);
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use front_matter::{PageFrontMatter, SortBy};
|
||||
use page::Page;
|
||||
use super::{sort_pages, populate_siblings};
|
||||
|
||||
fn create_page_with_date(date: &str) -> Page {
|
||||
let mut front_matter = PageFrontMatter::default();
|
||||
front_matter.date = Some(date.to_string());
|
||||
front_matter.date_to_datetime();
|
||||
Page::new("content/hello.md", front_matter)
|
||||
}
|
||||
|
||||
fn create_page_with_weight(weight: usize) -> Page {
|
||||
let mut front_matter = PageFrontMatter::default();
|
||||
front_matter.weight = Some(weight);
|
||||
Page::new("content/hello.md", front_matter)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_dates() {
|
||||
let input = vec![
|
||||
create_page_with_date("2018-01-01"),
|
||||
create_page_with_date("2017-01-01"),
|
||||
create_page_with_date("2019-01-01"),
|
||||
];
|
||||
let (pages, _) = sort_pages(input, SortBy::Date);
|
||||
// Should be sorted by date
|
||||
assert_eq!(pages[0].clone().meta.date.unwrap().to_string(), "2019-01-01");
|
||||
assert_eq!(pages[1].clone().meta.date.unwrap().to_string(), "2018-01-01");
|
||||
assert_eq!(pages[2].clone().meta.date.unwrap().to_string(), "2017-01-01");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_weight() {
|
||||
let input = vec![
|
||||
create_page_with_weight(2),
|
||||
create_page_with_weight(3),
|
||||
create_page_with_weight(1),
|
||||
];
|
||||
let (pages, _) = sort_pages(input, SortBy::Weight);
|
||||
// Should be sorted by weight
|
||||
assert_eq!(pages[0].clone().meta.weight.unwrap(), 1);
|
||||
assert_eq!(pages[1].clone().meta.weight.unwrap(), 2);
|
||||
assert_eq!(pages[2].clone().meta.weight.unwrap(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_none() {
|
||||
let input = vec![
|
||||
create_page_with_weight(2),
|
||||
create_page_with_weight(3),
|
||||
create_page_with_weight(1),
|
||||
];
|
||||
let (pages, _) = sort_pages(input, SortBy::None);
|
||||
assert_eq!(pages[0].clone().meta.weight.unwrap(), 2);
|
||||
assert_eq!(pages[1].clone().meta.weight.unwrap(), 3);
|
||||
assert_eq!(pages[2].clone().meta.weight.unwrap(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_page_with_missing_field() {
|
||||
let input = vec![
|
||||
create_page_with_weight(2),
|
||||
create_page_with_weight(3),
|
||||
create_page_with_date("2019-01-01"),
|
||||
];
|
||||
let (pages, unsorted) = sort_pages(input, SortBy::Weight);
|
||||
assert_eq!(pages.len(), 2);
|
||||
assert_eq!(unsorted.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_populate_siblings() {
|
||||
let input = vec![
|
||||
create_page_with_weight(1),
|
||||
create_page_with_weight(2),
|
||||
create_page_with_weight(3),
|
||||
];
|
||||
let pages = populate_siblings(&input, SortBy::Weight);
|
||||
|
||||
assert!(pages[0].clone().lighter.is_none());
|
||||
assert!(pages[0].clone().heavier.is_some());
|
||||
assert_eq!(pages[0].clone().heavier.unwrap().meta.weight.unwrap(), 2);
|
||||
|
||||
assert!(pages[1].clone().heavier.is_some());
|
||||
assert!(pages[1].clone().lighter.is_some());
|
||||
assert_eq!(pages[1].clone().lighter.unwrap().meta.weight.unwrap(), 1);
|
||||
assert_eq!(pages[1].clone().heavier.unwrap().meta.weight.unwrap(), 3);
|
||||
|
||||
assert!(pages[2].clone().lighter.is_some());
|
||||
assert!(pages[2].clone().heavier.is_none());
|
||||
assert_eq!(pages[2].clone().lighter.unwrap().meta.weight.unwrap(), 2);
|
||||
}
|
||||
}
|
|
@ -1,20 +1,22 @@
|
|||
[package]
|
||||
name = "content"
|
||||
name = "library"
|
||||
version = "0.1.0"
|
||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
tera = "0.11"
|
||||
serde = "1"
|
||||
slug = "0.1"
|
||||
slotmap = "0.2"
|
||||
rayon = "1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
tera = "0.11"
|
||||
serde = "1"
|
||||
serde_derive = "1"
|
||||
slug = "0.1"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
front_matter = { path = "../front_matter" }
|
||||
config = { path = "../config" }
|
||||
utils = { path = "../utils" }
|
||||
rendering = { path = "../rendering" }
|
||||
front_matter = { path = "../front_matter" }
|
||||
errors = { path = "../errors" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3"
|
7
components/library/src/content/mod.rs
Normal file
7
components/library/src/content/mod.rs
Normal file
|
@ -0,0 +1,7 @@
|
|||
mod file_info;
|
||||
mod page;
|
||||
mod section;
|
||||
|
||||
pub use self::file_info::FileInfo;
|
||||
pub use self::page::{Page, SerializingPage};
|
||||
pub use self::section::{Section, SerializingSection};
|
|
@ -1,11 +1,10 @@
|
|||
/// A page, can be a blog post or a basic page
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::result::Result as StdResult;
|
||||
|
||||
use tera::{Tera, Context as TeraContext};
|
||||
use serde::ser::{SerializeStruct, self};
|
||||
use tera::{Tera, Context as TeraContext, Value, Map};
|
||||
use slug::slugify;
|
||||
use slotmap::{Key, DenseSlotMap};
|
||||
|
||||
use errors::{Result, ResultExt};
|
||||
use config::Config;
|
||||
|
@ -14,9 +13,119 @@ use utils::site::get_reading_analytics;
|
|||
use utils::templates::render_template;
|
||||
use front_matter::{PageFrontMatter, InsertAnchor, split_page_content};
|
||||
use rendering::{RenderContext, Header, render_content};
|
||||
use library::Library;
|
||||
|
||||
use file_info::FileInfo;
|
||||
use content::file_info::FileInfo;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct SerializingPage<'a> {
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
slug: &'a str,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
date: &'a Option<String>,
|
||||
year: Option<i32>,
|
||||
month: Option<u32>,
|
||||
day: Option<u32>,
|
||||
taxonomies: &'a HashMap<String, Vec<String>>,
|
||||
extra: &'a Map<String, Value>,
|
||||
path: &'a str,
|
||||
components: &'a [String],
|
||||
summary: &'a Option<String>,
|
||||
word_count: Option<usize>,
|
||||
reading_time: Option<usize>,
|
||||
toc: &'a [Header],
|
||||
assets: Vec<String>,
|
||||
draft: bool,
|
||||
lighter: Option<Box<SerializingPage<'a>>>,
|
||||
heavier: Option<Box<SerializingPage<'a>>>,
|
||||
earlier: Option<Box<SerializingPage<'a>>>,
|
||||
later: Option<Box<SerializingPage<'a>>>,
|
||||
}
|
||||
|
||||
impl<'a> SerializingPage<'a> {
|
||||
// We only want to fill one level of prev/next, otherwise it is going to overflow.
|
||||
// `stop` is there for that
|
||||
pub fn from_page(page: &'a Page, pages: &'a DenseSlotMap<Page>) -> Self {
|
||||
let mut year = None;
|
||||
let mut month = None;
|
||||
let mut day = None;
|
||||
if let Some(d) = page.meta.datetime_tuple {
|
||||
year = Some(d.0);
|
||||
month = Some(d.1);
|
||||
day = Some(d.2);
|
||||
}
|
||||
let lighter = page.lighter.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
|
||||
let heavier = page.heavier.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
|
||||
let earlier = page.earlier.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
|
||||
let later = page.later.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
|
||||
|
||||
SerializingPage {
|
||||
content: &page.content,
|
||||
permalink: &page.permalink,
|
||||
slug: &page.slug,
|
||||
title: &page.meta.title,
|
||||
description: &page.meta.description,
|
||||
extra: &page.meta.extra,
|
||||
date: &page.meta.date,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
taxonomies: &page.meta.taxonomies,
|
||||
path: &page.path,
|
||||
components: &page.components,
|
||||
summary: &page.summary,
|
||||
word_count: page.word_count,
|
||||
reading_time: page.reading_time,
|
||||
toc: &page.toc,
|
||||
assets: page.serialize_assets(),
|
||||
draft: page.is_draft(),
|
||||
lighter,
|
||||
heavier,
|
||||
earlier,
|
||||
later,
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as from_page but does not fill sibling pages
|
||||
pub fn from_page_basic(page: &'a Page) -> Self {
|
||||
let mut year = None;
|
||||
let mut month = None;
|
||||
let mut day = None;
|
||||
if let Some(d) = page.meta.datetime_tuple {
|
||||
year = Some(d.0);
|
||||
month = Some(d.1);
|
||||
day = Some(d.2);
|
||||
}
|
||||
|
||||
SerializingPage {
|
||||
content: &page.content,
|
||||
permalink: &page.permalink,
|
||||
slug: &page.slug,
|
||||
title: &page.meta.title,
|
||||
description: &page.meta.description,
|
||||
extra: &page.meta.extra,
|
||||
date: &page.meta.date,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
taxonomies: &page.meta.taxonomies,
|
||||
path: &page.path,
|
||||
components: &page.components,
|
||||
summary: &page.summary,
|
||||
word_count: page.word_count,
|
||||
reading_time: page.reading_time,
|
||||
toc: &page.toc,
|
||||
assets: page.serialize_assets(),
|
||||
draft: page.is_draft(),
|
||||
lighter: None,
|
||||
heavier: None,
|
||||
earlier: None,
|
||||
later: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Page {
|
||||
|
@ -44,13 +153,13 @@ pub struct Page {
|
|||
/// as summary
|
||||
pub summary: Option<String>,
|
||||
/// The earlier page, for pages sorted by date
|
||||
pub earlier: Option<Box<Page>>,
|
||||
pub earlier: Option<Key>,
|
||||
/// The later page, for pages sorted by date
|
||||
pub later: Option<Box<Page>>,
|
||||
pub later: Option<Key>,
|
||||
/// The lighter page, for pages sorted by weight
|
||||
pub lighter: Option<Box<Page>>,
|
||||
pub lighter: Option<Key>,
|
||||
/// The heavier page, for pages sorted by weight
|
||||
pub heavier: Option<Box<Page>>,
|
||||
pub heavier: Option<Key>,
|
||||
/// Toc made from the headers of the markdown file
|
||||
pub toc: Vec<Header>,
|
||||
/// How many words in the raw content
|
||||
|
@ -190,7 +299,7 @@ impl Page {
|
|||
anchor_insert,
|
||||
);
|
||||
|
||||
context.tera_context.insert("page", self);
|
||||
context.tera_context.insert("page", &SerializingPage::from_page_basic(self));
|
||||
|
||||
let res = render_content(&self.raw_content, &context)
|
||||
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?;
|
||||
|
@ -203,7 +312,7 @@ impl Page {
|
|||
}
|
||||
|
||||
/// Renders the page using the default layout, unless specified in front-matter
|
||||
pub fn render_html(&self, tera: &Tera, config: &Config) -> Result<String> {
|
||||
pub fn render_html(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> {
|
||||
let tpl_name = match self.meta.template {
|
||||
Some(ref l) => l.to_string(),
|
||||
None => "page.html".to_string()
|
||||
|
@ -211,7 +320,7 @@ impl Page {
|
|||
|
||||
let mut context = TeraContext::new();
|
||||
context.insert("config", config);
|
||||
context.insert("page", self);
|
||||
context.insert("page", &library.get_cached_page_value(&self.file.path));
|
||||
context.insert("current_url", &self.permalink);
|
||||
context.insert("current_path", &self.path);
|
||||
|
||||
|
@ -227,6 +336,14 @@ impl Page {
|
|||
.map(|filename| self.path.clone() + filename)
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn to_serialized<'a>(&'a self, pages: &'a DenseSlotMap<Page>) -> SerializingPage<'a> {
|
||||
SerializingPage::from_page(self, pages)
|
||||
}
|
||||
|
||||
pub fn to_serialized_basic<'a>(&'a self) -> SerializingPage<'a> {
|
||||
SerializingPage::from_page_basic(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Page {
|
||||
|
@ -253,43 +370,6 @@ impl Default for Page {
|
|||
}
|
||||
}
|
||||
|
||||
impl ser::Serialize for Page {
|
||||
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer {
|
||||
let mut state = serializer.serialize_struct("page", 20)?;
|
||||
state.serialize_field("content", &self.content)?;
|
||||
state.serialize_field("title", &self.meta.title)?;
|
||||
state.serialize_field("description", &self.meta.description)?;
|
||||
state.serialize_field("date", &self.meta.date)?;
|
||||
if let Some(d) = self.meta.datetime_tuple {
|
||||
state.serialize_field("year", &d.0)?;
|
||||
state.serialize_field("month", &d.1)?;
|
||||
state.serialize_field("day", &d.2)?;
|
||||
} else {
|
||||
state.serialize_field::<Option<usize>>("year", &None)?;
|
||||
state.serialize_field::<Option<usize>>("month", &None)?;
|
||||
state.serialize_field::<Option<usize>>("day", &None)?;
|
||||
}
|
||||
state.serialize_field("slug", &self.slug)?;
|
||||
state.serialize_field("path", &self.path)?;
|
||||
state.serialize_field("components", &self.components)?;
|
||||
state.serialize_field("permalink", &self.permalink)?;
|
||||
state.serialize_field("summary", &self.summary)?;
|
||||
state.serialize_field("taxonomies", &self.meta.taxonomies)?;
|
||||
state.serialize_field("extra", &self.meta.extra)?;
|
||||
state.serialize_field("word_count", &self.word_count)?;
|
||||
state.serialize_field("reading_time", &self.reading_time)?;
|
||||
state.serialize_field("earlier", &self.earlier)?;
|
||||
state.serialize_field("later", &self.later)?;
|
||||
state.serialize_field("lighter", &self.lighter)?;
|
||||
state.serialize_field("heavier", &self.heavier)?;
|
||||
state.serialize_field("toc", &self.toc)?;
|
||||
state.serialize_field("draft", &self.is_draft())?;
|
||||
let assets = self.serialize_assets();
|
||||
state.serialize_field("assets", &assets)?;
|
||||
state.end()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
|
@ -1,9 +1,8 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::result::Result as StdResult;
|
||||
|
||||
use tera::{Tera, Context as TeraContext};
|
||||
use serde::ser::{SerializeStruct, self};
|
||||
use tera::{Tera, Context as TeraContext, Value};
|
||||
use slotmap::{Key};
|
||||
|
||||
use config::Config;
|
||||
use front_matter::{SectionFrontMatter, split_section_content};
|
||||
|
@ -13,10 +12,77 @@ use utils::templates::render_template;
|
|||
use utils::site::get_reading_analytics;
|
||||
use rendering::{RenderContext, Header, render_content};
|
||||
|
||||
use page::Page;
|
||||
use file_info::FileInfo;
|
||||
use content::file_info::FileInfo;
|
||||
use library::Library;
|
||||
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct SerializingSection<'a> {
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
extra: &'a HashMap<String, Value>,
|
||||
path: &'a str,
|
||||
components: &'a [String],
|
||||
word_count: Option<usize>,
|
||||
reading_time: Option<usize>,
|
||||
toc: &'a [Header],
|
||||
assets: Vec<String>,
|
||||
pages: Vec<&'a Value>,
|
||||
subsections: Vec<&'a Value>,
|
||||
}
|
||||
|
||||
impl<'a> SerializingSection<'a> {
|
||||
pub fn from_section(section: &'a Section, library: &'a Library) -> Self {
|
||||
let mut section_pages = vec![];
|
||||
let mut subsections = vec![];
|
||||
|
||||
for k in §ion.pages {
|
||||
section_pages.push(library.get_cached_page_value_by_key(k));
|
||||
}
|
||||
|
||||
for k in §ion.subsections {
|
||||
subsections.push(library.get_cached_section_value_by_key(k));
|
||||
}
|
||||
|
||||
SerializingSection {
|
||||
content: §ion.content,
|
||||
permalink: §ion.permalink,
|
||||
title: §ion.meta.title,
|
||||
description: §ion.meta.description,
|
||||
extra: §ion.meta.extra,
|
||||
path: §ion.path,
|
||||
components: §ion.components,
|
||||
word_count: section.word_count,
|
||||
reading_time: section.reading_time,
|
||||
toc: §ion.toc,
|
||||
assets: section.serialize_assets(),
|
||||
pages: section_pages,
|
||||
subsections,
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as from_section but doesn't fetch pages and sections
|
||||
pub fn from_section_basic(section: &'a Section) -> Self {
|
||||
SerializingSection {
|
||||
content: §ion.content,
|
||||
permalink: §ion.permalink,
|
||||
title: §ion.meta.title,
|
||||
description: §ion.meta.description,
|
||||
extra: §ion.meta.extra,
|
||||
path: §ion.path,
|
||||
components: §ion.components,
|
||||
word_count: section.word_count,
|
||||
reading_time: section.reading_time,
|
||||
toc: §ion.toc,
|
||||
assets: section.serialize_assets(),
|
||||
pages: vec![],
|
||||
subsections: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Section {
|
||||
/// All info about the actual file
|
||||
|
@ -36,11 +102,11 @@ pub struct Section {
|
|||
/// All the non-md files we found next to the .md file
|
||||
pub assets: Vec<PathBuf>,
|
||||
/// All direct pages of that section
|
||||
pub pages: Vec<Page>,
|
||||
pub pages: Vec<Key>,
|
||||
/// All pages that cannot be sorted in this section
|
||||
pub ignored_pages: Vec<Page>,
|
||||
pub ignored_pages: Vec<Key>,
|
||||
/// All direct subsections
|
||||
pub subsections: Vec<Section>,
|
||||
pub subsections: Vec<Key>,
|
||||
/// Toc made from the headers of the markdown file
|
||||
pub toc: Vec<Header>,
|
||||
/// How many words in the raw content
|
||||
|
@ -133,7 +199,13 @@ impl Section {
|
|||
|
||||
/// We need access to all pages url to render links relative to content
|
||||
/// so that can't happen at the same time as parsing
|
||||
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config, base_path: &Path) -> Result<()> {
|
||||
pub fn render_markdown(
|
||||
&mut self,
|
||||
permalinks: &HashMap<String, String>,
|
||||
tera: &Tera,
|
||||
config: &Config,
|
||||
base_path: &Path,
|
||||
) -> Result<()> {
|
||||
let mut context = RenderContext::new(
|
||||
tera,
|
||||
config,
|
||||
|
@ -143,7 +215,7 @@ impl Section {
|
|||
self.meta.insert_anchor_links,
|
||||
);
|
||||
|
||||
context.tera_context.insert("section", self);
|
||||
context.tera_context.insert("section", &SerializingSection::from_section_basic(self));
|
||||
|
||||
let res = render_content(&self.raw_content, &context)
|
||||
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?;
|
||||
|
@ -153,12 +225,12 @@ impl Section {
|
|||
}
|
||||
|
||||
/// Renders the page using the default layout, unless specified in front-matter
|
||||
pub fn render_html(&self, tera: &Tera, config: &Config) -> Result<String> {
|
||||
pub fn render_html(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> {
|
||||
let tpl_name = self.get_template_name();
|
||||
|
||||
let mut context = TeraContext::new();
|
||||
context.insert("config", config);
|
||||
context.insert("section", self);
|
||||
context.insert("section", &library.get_cached_section_value(&self.file.path));
|
||||
context.insert("current_url", &self.permalink);
|
||||
context.insert("current_path", &self.path);
|
||||
|
||||
|
@ -173,15 +245,13 @@ impl Section {
|
|||
|
||||
/// Returns all the paths of the pages belonging to that section
|
||||
pub fn all_pages_path(&self) -> Vec<PathBuf> {
|
||||
let mut paths = vec![];
|
||||
paths.extend(self.pages.iter().map(|p| p.file.path.clone()));
|
||||
paths.extend(self.ignored_pages.iter().map(|p| p.file.path.clone()));
|
||||
let paths = vec![];
|
||||
paths
|
||||
}
|
||||
|
||||
/// Whether the page given belongs to that section
|
||||
pub fn is_child_page(&self, path: &PathBuf) -> bool {
|
||||
self.all_pages_path().contains(path)
|
||||
false
|
||||
}
|
||||
|
||||
/// Creates a vectors of asset URLs.
|
||||
|
@ -193,50 +263,8 @@ impl Section {
|
|||
.collect()
|
||||
}
|
||||
|
||||
pub fn clone_without_pages(&self) -> Section {
|
||||
let mut subsections = vec![];
|
||||
for subsection in &self.subsections {
|
||||
subsections.push(subsection.clone_without_pages());
|
||||
}
|
||||
|
||||
Section {
|
||||
file: self.file.clone(),
|
||||
meta: self.meta.clone(),
|
||||
path: self.path.clone(),
|
||||
components: self.components.clone(),
|
||||
permalink: self.permalink.clone(),
|
||||
raw_content: self.raw_content.clone(),
|
||||
content: self.content.clone(),
|
||||
assets: self.assets.clone(),
|
||||
toc: self.toc.clone(),
|
||||
subsections,
|
||||
pages: vec![],
|
||||
ignored_pages: vec![],
|
||||
word_count: self.word_count,
|
||||
reading_time: self.reading_time,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ser::Serialize for Section {
|
||||
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer {
|
||||
let mut state = serializer.serialize_struct("section", 13)?;
|
||||
state.serialize_field("content", &self.content)?;
|
||||
state.serialize_field("permalink", &self.permalink)?;
|
||||
state.serialize_field("title", &self.meta.title)?;
|
||||
state.serialize_field("description", &self.meta.description)?;
|
||||
state.serialize_field("extra", &self.meta.extra)?;
|
||||
state.serialize_field("path", &self.path)?;
|
||||
state.serialize_field("components", &self.components)?;
|
||||
state.serialize_field("permalink", &self.permalink)?;
|
||||
state.serialize_field("pages", &self.pages)?;
|
||||
state.serialize_field("subsections", &self.subsections)?;
|
||||
state.serialize_field("word_count", &self.word_count)?;
|
||||
state.serialize_field("reading_time", &self.reading_time)?;
|
||||
state.serialize_field("toc", &self.toc)?;
|
||||
let assets = self.serialize_assets();
|
||||
state.serialize_field("assets", &assets)?;
|
||||
state.end()
|
||||
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> {
|
||||
SerializingSection::from_section(self, library)
|
||||
}
|
||||
}
|
||||
|
36
components/library/src/lib.rs
Normal file
36
components/library/src/lib.rs
Normal file
|
@ -0,0 +1,36 @@
|
|||
extern crate tera;
|
||||
extern crate slug;
|
||||
extern crate serde;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate chrono;
|
||||
extern crate slotmap;
|
||||
extern crate rayon;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate tempfile;
|
||||
#[cfg(test)]
|
||||
extern crate toml;
|
||||
#[cfg(test)]
|
||||
extern crate globset;
|
||||
|
||||
extern crate front_matter;
|
||||
extern crate config;
|
||||
extern crate utils;
|
||||
extern crate rendering;
|
||||
#[macro_use]
|
||||
extern crate errors;
|
||||
|
||||
mod content;
|
||||
mod taxonomies;
|
||||
mod pagination;
|
||||
mod sorting;
|
||||
mod library;
|
||||
|
||||
pub use slotmap::{Key, DenseSlotMap};
|
||||
|
||||
pub use sorting::sort_actual_pages_by_date;
|
||||
pub use content::{Page, SerializingPage, Section, SerializingSection};
|
||||
pub use library::Library;
|
||||
pub use taxonomies::{Taxonomy, TaxonomyItem, find_taxonomies};
|
||||
pub use pagination::Paginator;
|
356
components/library/src/library.rs
Normal file
356
components/library/src/library.rs
Normal file
|
@ -0,0 +1,356 @@
|
|||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use slotmap::{DenseSlotMap, Key};
|
||||
use tera::{Value, to_value};
|
||||
|
||||
use front_matter::SortBy;
|
||||
|
||||
use sorting::{find_siblings, sort_pages_by_weight, sort_pages_by_date};
|
||||
use content::{Page, Section};
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Values {
|
||||
pages: HashMap<Key, Value>,
|
||||
sections: HashMap<Key, Value>,
|
||||
}
|
||||
|
||||
impl Values {
|
||||
pub fn new(cap_pages: usize, cap_sections: usize) -> Values {
|
||||
Values {
|
||||
pages: HashMap::with_capacity(cap_pages),
|
||||
sections: HashMap::with_capacity(cap_sections),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_page(&self, key: &Key) -> &Value {
|
||||
return self.pages.get(key).unwrap()
|
||||
}
|
||||
|
||||
pub fn insert_page(&mut self, key: Key, value: Value) {
|
||||
self.pages.insert(key, value);
|
||||
}
|
||||
|
||||
pub fn remove_page(&mut self, key: &Key) {
|
||||
self.pages.remove(key);
|
||||
}
|
||||
|
||||
pub fn get_section(&self, key: &Key) -> &Value {
|
||||
return self.sections.get(key).unwrap()
|
||||
}
|
||||
|
||||
pub fn insert_section(&mut self, key: Key, value: Value) {
|
||||
self.sections.insert(key, value);
|
||||
}
|
||||
|
||||
pub fn remove_section(&mut self, key: &Key) {
|
||||
self.sections.remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
// Houses everything about pages/sections/taxonomies
|
||||
#[derive(Debug)]
|
||||
pub struct Library {
|
||||
pages: DenseSlotMap<Page>,
|
||||
sections: DenseSlotMap<Section>,
|
||||
paths_to_pages: HashMap<PathBuf, Key>,
|
||||
paths_to_sections: HashMap<PathBuf, Key>,
|
||||
|
||||
values: Values,
|
||||
}
|
||||
|
||||
impl Library {
|
||||
pub fn new(cap_pages: usize, cap_sections: usize) -> Self {
|
||||
Library {
|
||||
pages: DenseSlotMap::with_capacity(cap_pages),
|
||||
sections: DenseSlotMap::with_capacity(cap_sections),
|
||||
paths_to_pages: HashMap::with_capacity(cap_pages),
|
||||
paths_to_sections: HashMap::with_capacity(cap_sections),
|
||||
values: Values::new(cap_pages, cap_sections),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_section(&mut self, section: Section) -> Key {
|
||||
let path = section.file.path.clone();
|
||||
let key = self.sections.insert(section);
|
||||
self.paths_to_sections.insert(path, key);
|
||||
key
|
||||
}
|
||||
|
||||
pub fn insert_page(&mut self, page: Page) -> Key {
|
||||
let path = page.file.path.clone();
|
||||
let key = self.pages.insert(page);
|
||||
self.paths_to_pages.insert(path, key);
|
||||
key
|
||||
}
|
||||
|
||||
pub fn pages(&self) -> &DenseSlotMap<Page> {
|
||||
&self.pages
|
||||
}
|
||||
|
||||
pub fn pages_mut(&mut self) -> &mut DenseSlotMap<Page> {
|
||||
&mut self.pages
|
||||
}
|
||||
|
||||
pub fn pages_values(&self) -> Vec<&Page> {
|
||||
self.pages.values().collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
pub fn pages_values_mut(&mut self) -> Vec<&mut Page> {
|
||||
self.pages.values_mut().collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
pub fn sections(&self) -> &DenseSlotMap<Section> {
|
||||
&self.sections
|
||||
}
|
||||
|
||||
pub fn sections_mut(&mut self) -> &mut DenseSlotMap<Section> {
|
||||
&mut self.sections
|
||||
}
|
||||
|
||||
pub fn sections_values(&self) -> Vec<&Section> {
|
||||
self.sections.values().collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
pub fn sections_values_mut(&mut self) -> Vec<&mut Section> {
|
||||
self.sections.values_mut().collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
/// Find out the direct subsections of each subsection if there are some
|
||||
/// as well as the pages for each section
|
||||
pub fn populate_sections(&mut self) {
|
||||
let mut grandparent_paths: HashMap<PathBuf, Vec<PathBuf>> = HashMap::new();
|
||||
|
||||
for section in self.sections.values_mut() {
|
||||
if let Some(ref grand_parent) = section.file.grand_parent {
|
||||
grandparent_paths
|
||||
.entry(grand_parent.to_path_buf())
|
||||
.or_insert_with(|| vec![])
|
||||
.push(section.file.path.clone());
|
||||
}
|
||||
// Make sure the pages of a section are empty since we can call that many times on `serve`
|
||||
section.pages = vec![];
|
||||
section.ignored_pages = vec![];
|
||||
}
|
||||
|
||||
for (key, page) in &mut self.pages {
|
||||
let parent_section_path = page.file.parent.join("_index.md");
|
||||
if let Some(section_key) = self.paths_to_sections.get(&parent_section_path) {
|
||||
self.sections.get_mut(*section_key).unwrap().pages.push(key);
|
||||
}
|
||||
}
|
||||
self.sort_sections_pages(None);
|
||||
|
||||
let sections = self.paths_to_sections.clone();
|
||||
let mut sections_weight = HashMap::new();
|
||||
for (key, section) in &self.sections {
|
||||
sections_weight.insert(key, section.meta.weight);
|
||||
}
|
||||
for section in self.sections.values_mut() {
|
||||
if let Some(paths) = grandparent_paths.get(§ion.file.parent) {
|
||||
section.subsections = paths
|
||||
.iter()
|
||||
.map(|p| sections[p])
|
||||
.collect::<Vec<_>>();
|
||||
section.subsections
|
||||
.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sort_sections_pages(&mut self, only: Option<&Path>) {
|
||||
let mut updates = HashMap::new();
|
||||
for (key, section) in &self.sections {
|
||||
if let Some(p) = only {
|
||||
if p != section.file.path {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: use an enum to avoid duplication there and in sorting.rs?
|
||||
let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by {
|
||||
SortBy::None => continue,
|
||||
SortBy::Date => {
|
||||
let data = section.pages
|
||||
.iter()
|
||||
.map(|k| {
|
||||
if let Some(page) = self.pages.get(*k) {
|
||||
(k, page.meta.datetime, page.permalink.as_ref())
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
sort_pages_by_date(data)
|
||||
},
|
||||
SortBy::Weight => {
|
||||
let data = section.pages
|
||||
.iter()
|
||||
.map(|k| {
|
||||
if let Some(page) = self.pages.get(*k) {
|
||||
(k, page.meta.weight, page.permalink.as_ref())
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
sort_pages_by_weight(data)
|
||||
}
|
||||
};
|
||||
updates.insert(key, (sorted_pages, cannot_be_sorted_pages, section.meta.sort_by));
|
||||
}
|
||||
|
||||
for (key, (sorted, cannot_be_sorted, sort_by)) in updates {
|
||||
// Find sibling between sorted pages first
|
||||
let with_siblings = find_siblings(sorted.iter().map(|k| {
|
||||
if let Some(page) = self.pages.get(*k) {
|
||||
(k, page.is_draft())
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
}).collect());
|
||||
|
||||
for (k2, val1, val2) in with_siblings {
|
||||
if let Some(page) = self.pages.get_mut(k2) {
|
||||
match sort_by {
|
||||
SortBy::Date => {
|
||||
page.earlier = val2;
|
||||
page.later = val1;
|
||||
},
|
||||
SortBy::Weight => {
|
||||
page.lighter = val1;
|
||||
page.heavier = val2;
|
||||
},
|
||||
SortBy::None => unreachable!("Impossible to find siblings in SortBy::None")
|
||||
}
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(s) = self.sections.get_mut(key) {
|
||||
s.pages = sorted;
|
||||
s.ignored_pages = cannot_be_sorted;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cache_all_pages(&mut self) {
|
||||
let mut cache = HashMap::with_capacity(self.pages.capacity());
|
||||
for (key, page) in &self.pages {
|
||||
cache.insert(key, to_value(page.to_serialized(self.pages())).unwrap());
|
||||
}
|
||||
|
||||
for (key, value) in cache {
|
||||
self.values.insert_page(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
// We need to do it from the bottom up to ensure all subsections of a section have been
|
||||
// cached before doing it
|
||||
pub fn cache_all_sections(&mut self) {
|
||||
// we get the Key in order we want to process them first
|
||||
let mut deps = HashMap::new();
|
||||
for (key, section) in &self.sections {
|
||||
deps.insert(key, section.subsections.clone());
|
||||
}
|
||||
|
||||
loop {
|
||||
if deps.is_empty() {
|
||||
break;
|
||||
}
|
||||
|
||||
let mut processed_keys = vec![];
|
||||
for (key, _) in deps.iter().filter(|(_, v)| v.is_empty()) {
|
||||
let section = self.sections.get(*key).unwrap();
|
||||
let value = to_value(section.to_serialized(self)).unwrap();
|
||||
self.values.insert_section(*key, value);
|
||||
processed_keys.push(*key);
|
||||
}
|
||||
|
||||
// remove the processed keys from the action
|
||||
for key in processed_keys {
|
||||
deps.remove(&key);
|
||||
for (_, subs) in &mut deps {
|
||||
subs.retain(|k| k != &key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_all_orphan_pages(&self) -> Vec<&Page> {
|
||||
let pages_in_sections = self.sections
|
||||
.values()
|
||||
.flat_map(|s| s.all_pages_path())
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
self.pages
|
||||
.values()
|
||||
.filter(|page| !pages_in_sections.contains(&page.file.path))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn get_section(&self, path: &PathBuf) -> Option<&Section> {
|
||||
self.sections.get(self.paths_to_sections.get(path).cloned().unwrap_or_default())
|
||||
}
|
||||
|
||||
pub fn get_section_mut(&mut self, path: &PathBuf) -> Option<&mut Section> {
|
||||
self.sections.get_mut(self.paths_to_sections.get(path).cloned().unwrap_or_default())
|
||||
}
|
||||
|
||||
pub fn get_section_by_key(&self, key: Key) -> &Section {
|
||||
self.sections.get(key).unwrap()
|
||||
}
|
||||
|
||||
pub fn remove_section_by_path(&mut self, path: &PathBuf) -> Option<Section> {
|
||||
if let Some(k) = self.paths_to_sections.remove(path) {
|
||||
self.values.remove_section(&k);
|
||||
self.sections.remove(k)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contains_section(&self, path: &PathBuf) -> bool {
|
||||
self.paths_to_sections.contains_key(path)
|
||||
}
|
||||
|
||||
pub fn get_cached_section_value(&self, path: &PathBuf) -> &Value {
|
||||
self.values.get_section(self.paths_to_sections.get(path).unwrap())
|
||||
}
|
||||
|
||||
pub fn get_cached_section_value_by_key(&self, key: &Key) -> &Value {
|
||||
self.values.get_section(key)
|
||||
}
|
||||
|
||||
pub fn get_page(&self, path: &PathBuf) -> Option<&Page> {
|
||||
self.pages.get(self.paths_to_pages.get(path).cloned().unwrap_or_default())
|
||||
}
|
||||
|
||||
pub fn get_cached_page_value(&self, path: &PathBuf) -> &Value {
|
||||
self.values.get_page(self.paths_to_pages.get(path).unwrap())
|
||||
}
|
||||
|
||||
pub fn get_cached_page_value_by_key(&self, key: &Key) -> &Value {
|
||||
self.values.get_page(key)
|
||||
}
|
||||
|
||||
pub fn get_page_by_key(&self, key: Key) -> &Page {
|
||||
self.pages.get(key).unwrap()
|
||||
}
|
||||
|
||||
pub fn remove_page_by_path(&mut self, path: &PathBuf) -> Option<Page> {
|
||||
if let Some(k) = self.paths_to_pages.remove(path) {
|
||||
self.values.remove_page(&k);
|
||||
self.pages.remove(k)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contains_page(&self, path: &PathBuf) -> bool {
|
||||
self.paths_to_pages.contains_key(path)
|
||||
}
|
||||
}
|
|
@ -1,25 +1,15 @@
|
|||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate tera;
|
||||
|
||||
extern crate errors;
|
||||
extern crate config;
|
||||
extern crate content;
|
||||
extern crate utils;
|
||||
extern crate taxonomies;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate front_matter;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use tera::{Tera, Context, to_value, Value};
|
||||
use slotmap::{Key};
|
||||
|
||||
use errors::{Result, ResultExt};
|
||||
use config::Config;
|
||||
use content::{Page, Section};
|
||||
use utils::templates::render_template;
|
||||
use taxonomies::{Taxonomy, TaxonomyItem};
|
||||
|
||||
use content::{Section, SerializingSection};
|
||||
use taxonomies::{TaxonomyItem, Taxonomy};
|
||||
use library::Library;
|
||||
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
|
@ -39,11 +29,11 @@ pub struct Pager<'a> {
|
|||
/// Path to that page
|
||||
path: String,
|
||||
/// All pages for the pager
|
||||
pages: Vec<&'a Page>,
|
||||
pages: Vec<&'a Value>,
|
||||
}
|
||||
|
||||
impl<'a> Pager<'a> {
|
||||
fn new(index: usize, pages: Vec<&'a Page>, permalink: String, path: String) -> Pager<'a> {
|
||||
fn new(index: usize, pages: Vec<&'a Value>, permalink: String, path: String) -> Pager<'a> {
|
||||
Pager {
|
||||
index,
|
||||
permalink,
|
||||
|
@ -55,8 +45,8 @@ impl<'a> Pager<'a> {
|
|||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Paginator<'a> {
|
||||
/// All pages in the section
|
||||
all_pages: &'a [Page],
|
||||
/// All pages in the section/taxonomy
|
||||
all_pages: &'a [Key],
|
||||
/// Pages split in chunks of `paginate_by`
|
||||
pub pagers: Vec<Pager<'a>>,
|
||||
/// How many content pages on a paginated page at max
|
||||
|
@ -67,17 +57,18 @@ pub struct Paginator<'a> {
|
|||
pub permalink: String,
|
||||
path: String,
|
||||
pub paginate_path: String,
|
||||
/// Whether this is the index section, we need it for the template name
|
||||
is_index: bool,
|
||||
}
|
||||
|
||||
impl<'a> Paginator<'a> {
|
||||
/// Create a new paginator from a section
|
||||
/// It will always at least create one pager (the first) even if there are no pages to paginate
|
||||
pub fn from_section(all_pages: &'a [Page], section: &'a Section) -> Paginator<'a> {
|
||||
/// It will always at least create one pager (the first) even if there are not enough pages to paginate
|
||||
pub fn from_section(section: &'a Section, library: &'a Library) -> Paginator<'a> {
|
||||
let paginate_by = section.meta.paginate_by.unwrap();
|
||||
let mut paginator = Paginator {
|
||||
all_pages,
|
||||
pagers: vec![],
|
||||
all_pages: §ion.pages,
|
||||
pagers: Vec::with_capacity(section.pages.len() / paginate_by),
|
||||
paginate_by,
|
||||
root: PaginationRoot::Section(section),
|
||||
permalink: section.permalink.clone(),
|
||||
|
@ -86,17 +77,17 @@ impl<'a> Paginator<'a> {
|
|||
is_index: section.is_index(),
|
||||
};
|
||||
|
||||
paginator.fill_pagers();
|
||||
paginator.fill_pagers(library);
|
||||
paginator
|
||||
}
|
||||
|
||||
/// Create a new paginator from a taxonomy
|
||||
/// It will always at least create one pager (the first) even if there are no pages to paginate
|
||||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, item: &'a TaxonomyItem) -> Paginator<'a> {
|
||||
/// It will always at least create one pager (the first) even if there are not enough pages to paginate
|
||||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, item: &'a TaxonomyItem, library: &'a Library) -> Paginator<'a> {
|
||||
let paginate_by = taxonomy.kind.paginate_by.unwrap();
|
||||
let mut paginator = Paginator {
|
||||
all_pages: &item.pages,
|
||||
pagers: vec![],
|
||||
pagers: Vec::with_capacity(item.pages.len() / paginate_by),
|
||||
paginate_by,
|
||||
root: PaginationRoot::Taxonomy(taxonomy),
|
||||
permalink: item.permalink.clone(),
|
||||
|
@ -105,31 +96,34 @@ impl<'a> Paginator<'a> {
|
|||
is_index: false,
|
||||
};
|
||||
|
||||
paginator.fill_pagers();
|
||||
paginator.fill_pagers(library);
|
||||
paginator
|
||||
}
|
||||
|
||||
fn fill_pagers(&mut self) {
|
||||
fn fill_pagers(&mut self, library: &'a Library) {
|
||||
// the list of pagers
|
||||
let mut pages = vec![];
|
||||
// the pages in the current pagers
|
||||
let mut current_page = vec![];
|
||||
|
||||
for page in self.all_pages {
|
||||
current_page.push(page);
|
||||
for key in self.all_pages {
|
||||
current_page.push(library.get_cached_page_value_by_key(key));
|
||||
|
||||
if current_page.len() == self.paginate_by {
|
||||
pages.push(current_page);
|
||||
current_page = vec![];
|
||||
}
|
||||
}
|
||||
|
||||
if !current_page.is_empty() {
|
||||
pages.push(current_page);
|
||||
}
|
||||
|
||||
let mut pagers = vec![];
|
||||
for (index, page) in pages.iter().enumerate() {
|
||||
for (index, page) in pages.into_iter().enumerate() {
|
||||
// First page has no pagination path
|
||||
if index == 0 {
|
||||
pagers.push(Pager::new(1, page.clone(), self.permalink.clone(), self.path.clone()));
|
||||
pagers.push(Pager::new(1, page, self.permalink.clone(), self.path.clone()));
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -146,7 +140,7 @@ impl<'a> Paginator<'a> {
|
|||
|
||||
pagers.push(Pager::new(
|
||||
index + 1,
|
||||
page.clone(),
|
||||
page,
|
||||
permalink,
|
||||
pager_path,
|
||||
));
|
||||
|
@ -198,7 +192,7 @@ impl<'a> Paginator<'a> {
|
|||
context.insert("config", &config);
|
||||
let template_name = match self.root {
|
||||
PaginationRoot::Section(s) => {
|
||||
context.insert("section", &s.clone_without_pages());
|
||||
context.insert("section", &SerializingSection::from_section_basic(s));
|
||||
s.get_template_name()
|
||||
}
|
||||
PaginationRoot::Taxonomy(t) => {
|
||||
|
@ -223,6 +217,7 @@ mod tests {
|
|||
use content::{Page, Section};
|
||||
use config::Taxonomy as TaxonomyConfig;
|
||||
use taxonomies::{Taxonomy, TaxonomyItem};
|
||||
use library::Library;
|
||||
|
||||
use super::Paginator;
|
||||
|
||||
|
@ -241,15 +236,24 @@ mod tests {
|
|||
s
|
||||
}
|
||||
|
||||
fn create_library(is_index: bool) -> (Section, Library) {
|
||||
let mut library = Library::new(3, 0);
|
||||
library.insert_page(Page::default());
|
||||
library.insert_page(Page::default());
|
||||
library.insert_page(Page::default());
|
||||
let mut section = create_section(is_index);
|
||||
section.pages = library.pages().keys().collect();
|
||||
library.insert_section(section.clone());
|
||||
library.cache_all_pages();
|
||||
library.cache_all_sections();
|
||||
|
||||
(section, library)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_can_create_paginator() {
|
||||
let pages = vec![
|
||||
Page::default(),
|
||||
Page::default(),
|
||||
Page::default(),
|
||||
];
|
||||
let section = create_section(false);
|
||||
let paginator = Paginator::from_section(pages.as_slice(), §ion);
|
||||
let (section, library) = create_library(false);
|
||||
let paginator = Paginator::from_section(§ion, &library);
|
||||
assert_eq!(paginator.pagers.len(), 2);
|
||||
|
||||
assert_eq!(paginator.pagers[0].index, 1);
|
||||
|
@ -265,13 +269,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_can_create_paginator_for_index() {
|
||||
let pages = vec![
|
||||
Page::default(),
|
||||
Page::default(),
|
||||
Page::default(),
|
||||
];
|
||||
let section = create_section(true);
|
||||
let paginator = Paginator::from_section(pages.as_slice(), §ion);
|
||||
let (section, library) = create_library(true);
|
||||
let paginator = Paginator::from_section(§ion, &library);
|
||||
assert_eq!(paginator.pagers.len(), 2);
|
||||
|
||||
assert_eq!(paginator.pagers[0].index, 1);
|
||||
|
@ -287,13 +286,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_can_build_paginator_context() {
|
||||
let pages = vec![
|
||||
Page::default(),
|
||||
Page::default(),
|
||||
Page::default(),
|
||||
];
|
||||
let section = create_section(false);
|
||||
let paginator = Paginator::from_section(pages.as_slice(), §ion);
|
||||
let (section, library) = create_library(false);
|
||||
let paginator = Paginator::from_section(§ion, &library);
|
||||
assert_eq!(paginator.pagers.len(), 2);
|
||||
|
||||
let context = paginator.build_paginator_context(&paginator.pagers[0]);
|
||||
|
@ -315,11 +309,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_can_create_paginator_for_taxonomy() {
|
||||
let pages = vec![
|
||||
Page::default(),
|
||||
Page::default(),
|
||||
Page::default(),
|
||||
];
|
||||
let (_, library) = create_library(false);
|
||||
let taxonomy_def = TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
paginate_by: Some(2),
|
||||
|
@ -329,10 +319,10 @@ mod tests {
|
|||
name: "Something".to_string(),
|
||||
slug: "something".to_string(),
|
||||
permalink: "https://vincent.is/tags/something/".to_string(),
|
||||
pages,
|
||||
pages: library.pages().keys().collect(),
|
||||
};
|
||||
let taxonomy = Taxonomy { kind: taxonomy_def, items: vec![taxonomy_item.clone()] };
|
||||
let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item);
|
||||
let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item, &library);
|
||||
assert_eq!(paginator.pagers.len(), 2);
|
||||
|
||||
assert_eq!(paginator.pagers[0].index, 1);
|
224
components/library/src/sorting.rs
Normal file
224
components/library/src/sorting.rs
Normal file
|
@ -0,0 +1,224 @@
|
|||
use std::cmp::Ordering;
|
||||
|
||||
use rayon::prelude::*;
|
||||
use slotmap::Key;
|
||||
use chrono::NaiveDateTime;
|
||||
|
||||
use content::Page;
|
||||
|
||||
// Used by the RSS feed
|
||||
pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering {
|
||||
let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.permalink.cmp(&b.permalink)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: unify both sort_ functions
|
||||
// TODO: add back sorting tests
|
||||
pub fn sort_pages_by_date(pages: Vec<(&Key, Option<NaiveDateTime>, &str)>) -> (Vec<Key>, Vec<Key>) {
|
||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages
|
||||
.into_par_iter()
|
||||
.partition(|page| page.1.is_some());
|
||||
|
||||
can_be_sorted
|
||||
.par_sort_unstable_by(|a, b| {
|
||||
let ord = b.1.unwrap().cmp(&a.1.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.2.cmp(&b.2)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
});
|
||||
|
||||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
||||
}
|
||||
|
||||
pub fn sort_pages_by_weight(pages: Vec<(&Key, Option<usize>, &str)>) -> (Vec<Key>, Vec<Key>) {
|
||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages
|
||||
.into_par_iter()
|
||||
.partition(|page| page.1.is_some());
|
||||
|
||||
can_be_sorted
|
||||
.par_sort_unstable_by(|a, b| {
|
||||
let ord = a.1.unwrap().cmp(&b.1.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.2.cmp(&b.2)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
});
|
||||
|
||||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
||||
}
|
||||
|
||||
pub fn find_siblings(sorted: Vec<(&Key, bool)>) -> Vec<(Key, Option<Key>, Option<Key>)> {
|
||||
let mut res = Vec::with_capacity(sorted.len());
|
||||
let length = sorted.len();
|
||||
|
||||
for (i, (key, is_draft)) in sorted.iter().enumerate() {
|
||||
if *is_draft {
|
||||
res.push((**key, None, None));
|
||||
continue;
|
||||
}
|
||||
let mut with_siblings = (**key, None, None);
|
||||
|
||||
if i > 0 {
|
||||
let mut j = i;
|
||||
loop {
|
||||
if j == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
j -= 1;
|
||||
|
||||
if sorted[j].1 {
|
||||
continue;
|
||||
}
|
||||
// lighter / later
|
||||
with_siblings.1 = Some(*sorted[j].0);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if i < length - 1 {
|
||||
let mut j = i;
|
||||
loop {
|
||||
if j == length - 1 {
|
||||
break;
|
||||
}
|
||||
|
||||
j += 1;
|
||||
|
||||
if sorted[j].1 {
|
||||
continue;
|
||||
}
|
||||
|
||||
// heavier/earlier
|
||||
with_siblings.2 = Some(*sorted[j].0);
|
||||
break;
|
||||
}
|
||||
}
|
||||
res.push(with_siblings);
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use slotmap::DenseSlotMap;
|
||||
|
||||
use front_matter::{PageFrontMatter};
|
||||
use content::Page;
|
||||
use super::{sort_pages_by_date, sort_pages_by_weight, find_siblings};
|
||||
|
||||
fn create_page_with_date(date: &str) -> Page {
|
||||
let mut front_matter = PageFrontMatter::default();
|
||||
front_matter.date = Some(date.to_string());
|
||||
front_matter.date_to_datetime();
|
||||
Page::new("content/hello.md", front_matter)
|
||||
}
|
||||
|
||||
fn create_page_with_weight(weight: usize) -> Page {
|
||||
let mut front_matter = PageFrontMatter::default();
|
||||
front_matter.weight = Some(weight);
|
||||
Page::new("content/hello.md", front_matter)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_dates() {
|
||||
let mut dense = DenseSlotMap::new();
|
||||
let page1 = create_page_with_date("2018-01-01");
|
||||
let key1 = dense.insert(page1.clone());
|
||||
let page2 = create_page_with_date("2017-01-01");
|
||||
let key2 = dense.insert(page2.clone());
|
||||
let page3 = create_page_with_date("2019-01-01");
|
||||
let key3 = dense.insert(page3.clone());
|
||||
|
||||
let input = vec![
|
||||
(&key1, page1.meta.datetime, page1.permalink.as_ref()),
|
||||
(&key2, page2.meta.datetime, page2.permalink.as_ref()),
|
||||
(&key3, page3.meta.datetime, page3.permalink.as_ref()),
|
||||
];
|
||||
let (pages, _) = sort_pages_by_date(input);
|
||||
// Should be sorted by date
|
||||
assert_eq!(pages[0], key3);
|
||||
assert_eq!(pages[1], key1);
|
||||
assert_eq!(pages[2], key2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_weight() {
|
||||
let mut dense = DenseSlotMap::new();
|
||||
let page1 = create_page_with_weight(2);
|
||||
let key1 = dense.insert(page1.clone());
|
||||
let page2 = create_page_with_weight(3);
|
||||
let key2 = dense.insert(page2.clone());
|
||||
let page3 = create_page_with_weight(1);
|
||||
let key3 = dense.insert(page3.clone());
|
||||
|
||||
let input = vec![
|
||||
(&key1, page1.meta.weight, page1.permalink.as_ref()),
|
||||
(&key2, page2.meta.weight, page2.permalink.as_ref()),
|
||||
(&key3, page3.meta.weight, page3.permalink.as_ref()),
|
||||
];
|
||||
let (pages, _) = sort_pages_by_weight(input);
|
||||
// Should be sorted by weight
|
||||
assert_eq!(pages[0], key3);
|
||||
assert_eq!(pages[1], key1);
|
||||
assert_eq!(pages[2], key2);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn ignore_page_with_missing_field() {
|
||||
let mut dense = DenseSlotMap::new();
|
||||
let page1 = create_page_with_weight(2);
|
||||
let key1 = dense.insert(page1.clone());
|
||||
let page2 = create_page_with_weight(3);
|
||||
let key2 = dense.insert(page2.clone());
|
||||
let page3 = create_page_with_date("2019-01-01");
|
||||
let key3 = dense.insert(page3.clone());
|
||||
|
||||
let input = vec![
|
||||
(&key1, page1.meta.weight, page1.permalink.as_ref()),
|
||||
(&key2, page2.meta.weight, page2.permalink.as_ref()),
|
||||
(&key3, page3.meta.weight, page3.permalink.as_ref()),
|
||||
];
|
||||
|
||||
let (pages,unsorted) = sort_pages_by_weight(input);
|
||||
assert_eq!(pages.len(), 2);
|
||||
assert_eq!(unsorted.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_siblings() {
|
||||
let mut dense = DenseSlotMap::new();
|
||||
let page1 = create_page_with_weight(1);
|
||||
let key1 = dense.insert(page1.clone());
|
||||
let page2 = create_page_with_weight(2);
|
||||
let key2 = dense.insert(page2.clone());
|
||||
let page3 = create_page_with_weight(3);
|
||||
let key3 = dense.insert(page3.clone());
|
||||
|
||||
let input = vec![
|
||||
(&key1, page1.is_draft()),
|
||||
(&key2, page2.is_draft()),
|
||||
(&key3, page3.is_draft()),
|
||||
];
|
||||
|
||||
let pages = find_siblings(input);
|
||||
|
||||
assert_eq!(pages[0].1, None);
|
||||
assert_eq!(pages[0].2, Some(key2));
|
||||
|
||||
assert_eq!(pages[1].1, Some(key1));
|
||||
assert_eq!(pages[1].2, Some(key3));
|
||||
|
||||
assert_eq!(pages[2].1, Some(key2));
|
||||
assert_eq!(pages[2].2, None);
|
||||
}
|
||||
}
|
|
@ -1,46 +1,68 @@
|
|||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate tera;
|
||||
extern crate slug;
|
||||
|
||||
#[macro_use]
|
||||
extern crate errors;
|
||||
extern crate config;
|
||||
extern crate content;
|
||||
extern crate front_matter;
|
||||
extern crate utils;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use slug::slugify;
|
||||
use tera::{Context, Tera};
|
||||
use tera::{Context, Tera, Value};
|
||||
use slotmap::{Key};
|
||||
|
||||
use config::{Config, Taxonomy as TaxonomyConfig};
|
||||
use errors::{Result, ResultExt};
|
||||
use content::{Page, sort_pages};
|
||||
use front_matter::SortBy;
|
||||
use utils::templates::render_template;
|
||||
|
||||
use sorting::sort_pages_by_date;
|
||||
use library::Library;
|
||||
|
||||
/// A tag or category
|
||||
#[derive(Debug, Clone, Serialize, PartialEq)]
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
struct SerializedTaxonomyItem<'a> {
|
||||
name: &'a str,
|
||||
slug: &'a str,
|
||||
permalink: &'a str,
|
||||
pages: Vec<&'a Value>,
|
||||
}
|
||||
|
||||
impl<'a> SerializedTaxonomyItem<'a> {
|
||||
pub fn from_item(item: &'a TaxonomyItem, library: &'a Library) -> Self {
|
||||
let mut pages = vec![];
|
||||
|
||||
for k in &item.pages {
|
||||
pages.push(library.get_cached_page_value_by_key(k));
|
||||
}
|
||||
|
||||
SerializedTaxonomyItem {
|
||||
name: &item.name,
|
||||
slug: &item.slug,
|
||||
permalink: &item.permalink,
|
||||
pages,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A taxonomy with all its pages
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct TaxonomyItem {
|
||||
pub name: String,
|
||||
pub slug: String,
|
||||
pub permalink: String,
|
||||
pub pages: Vec<Page>,
|
||||
pub pages: Vec<Key>,
|
||||
}
|
||||
|
||||
impl TaxonomyItem {
|
||||
pub fn new(name: &str, path: &str, config: &Config, pages: Vec<Page>) -> TaxonomyItem {
|
||||
pub fn new(name: &str, path: &str, config: &Config, keys: Vec<Key>, library: &Library) -> Self {
|
||||
// Taxonomy are almost always used for blogs so we filter by dates
|
||||
// and it's not like we can sort things across sections by anything other
|
||||
// than dates
|
||||
let (mut pages, ignored_pages) = sort_pages(pages, SortBy::Date);
|
||||
let data = keys
|
||||
.iter()
|
||||
.map(|k| {
|
||||
if let Some(page) = library.pages().get(*k) {
|
||||
(k, page.meta.datetime, page.permalink.as_ref())
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let (mut pages, ignored_pages) = sort_pages_by_date(data);
|
||||
let slug = slugify(name);
|
||||
let permalink = {
|
||||
config.make_permalink(&format!("/{}/{}", path, slug))
|
||||
};
|
||||
let permalink = config.make_permalink(&format!("/{}/{}", path, slug));
|
||||
|
||||
// We still append pages without dates at the end
|
||||
pages.extend(ignored_pages);
|
||||
|
@ -54,8 +76,24 @@ impl TaxonomyItem {
|
|||
}
|
||||
}
|
||||
|
||||
/// All the tags or categories
|
||||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub struct SerializedTaxonomy<'a> {
|
||||
kind: &'a TaxonomyConfig,
|
||||
items: Vec<SerializedTaxonomyItem<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> SerializedTaxonomy<'a> {
|
||||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self {
|
||||
let items: Vec<SerializedTaxonomyItem> = taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
|
||||
SerializedTaxonomy {
|
||||
kind: &taxonomy.kind,
|
||||
items,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// All different taxonomies we have and their content
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Taxonomy {
|
||||
pub kind: TaxonomyConfig,
|
||||
// this vec is sorted by the count of item
|
||||
|
@ -63,11 +101,11 @@ pub struct Taxonomy {
|
|||
}
|
||||
|
||||
impl Taxonomy {
|
||||
fn new(kind: TaxonomyConfig, config: &Config, items: HashMap<String, Vec<Page>>) -> Taxonomy {
|
||||
fn new(kind: TaxonomyConfig, config: &Config, items: HashMap<String, Vec<Key>>, library: &Library) -> Taxonomy {
|
||||
let mut sorted_items = vec![];
|
||||
for (name, pages) in items {
|
||||
sorted_items.push(
|
||||
TaxonomyItem::new(&name, &kind.name, config, pages)
|
||||
TaxonomyItem::new(&name, &kind.name, config, pages, library)
|
||||
);
|
||||
}
|
||||
sorted_items.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
|
@ -86,10 +124,10 @@ impl Taxonomy {
|
|||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn render_term(&self, item: &TaxonomyItem, tera: &Tera, config: &Config) -> Result<String> {
|
||||
pub fn render_term(&self, item: &TaxonomyItem, tera: &Tera, config: &Config, library: &Library) -> Result<String> {
|
||||
let mut context = Context::new();
|
||||
context.insert("config", config);
|
||||
context.insert("term", item);
|
||||
context.insert("term", &SerializedTaxonomyItem::from_item(item, library));
|
||||
context.insert("taxonomy", &self.kind);
|
||||
context.insert("current_url", &config.make_permalink(&format!("{}/{}", self.kind.name, item.slug)));
|
||||
context.insert("current_path", &format!("/{}/{}", self.kind.name, item.slug));
|
||||
|
@ -98,10 +136,11 @@ impl Taxonomy {
|
|||
.chain_err(|| format!("Failed to render single term {} page.", self.kind.name))
|
||||
}
|
||||
|
||||
pub fn render_all_terms(&self, tera: &Tera, config: &Config) -> Result<String> {
|
||||
pub fn render_all_terms(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> {
|
||||
let mut context = Context::new();
|
||||
context.insert("config", config);
|
||||
context.insert("terms", &self.items);
|
||||
let terms: Vec<SerializedTaxonomyItem> = self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
|
||||
context.insert("terms", &terms);
|
||||
context.insert("taxonomy", &self.kind);
|
||||
context.insert("current_url", &config.make_permalink(&self.kind.name));
|
||||
context.insert("current_path", &self.kind.name);
|
||||
|
@ -109,9 +148,13 @@ impl Taxonomy {
|
|||
render_template(&format!("{}/list.html", self.kind.name), tera, &context, &config.theme)
|
||||
.chain_err(|| format!("Failed to render a list of {} page.", self.kind.name))
|
||||
}
|
||||
|
||||
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> {
|
||||
SerializedTaxonomy::from_taxonomy(self, library)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_taxonomies(config: &Config, all_pages: Vec<&Page>) -> Result<Vec<Taxonomy>> {
|
||||
pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonomy>> {
|
||||
let taxonomies_def = {
|
||||
let mut m = HashMap::new();
|
||||
for t in &config.taxonomies {
|
||||
|
@ -121,8 +164,12 @@ pub fn find_taxonomies(config: &Config, all_pages: Vec<&Page>) -> Result<Vec<Tax
|
|||
};
|
||||
let mut all_taxonomies = HashMap::new();
|
||||
|
||||
// Find all the taxonomies first
|
||||
for page in all_pages {
|
||||
for (key, page) in library.pages() {
|
||||
// Draft are not part of taxonomies
|
||||
if page.is_draft() {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (name, val) in &page.meta.taxonomies {
|
||||
if taxonomies_def.contains_key(name) {
|
||||
all_taxonomies
|
||||
|
@ -134,7 +181,7 @@ pub fn find_taxonomies(config: &Config, all_pages: Vec<&Page>) -> Result<Vec<Tax
|
|||
.unwrap()
|
||||
.entry(v.to_string())
|
||||
.or_insert_with(|| vec![])
|
||||
.push(page.clone());
|
||||
.push(key);
|
||||
}
|
||||
} else {
|
||||
bail!("Page `{}` has taxonomy `{}` which is not defined in config.toml", page.file.path.display(), name);
|
||||
|
@ -145,7 +192,7 @@ pub fn find_taxonomies(config: &Config, all_pages: Vec<&Page>) -> Result<Vec<Tax
|
|||
let mut taxonomies = vec![];
|
||||
|
||||
for (name, taxo) in all_taxonomies {
|
||||
taxonomies.push(Taxonomy::new(taxonomies_def[name].clone(), config, taxo));
|
||||
taxonomies.push(Taxonomy::new(taxonomies_def[name].clone(), config, taxo, library));
|
||||
}
|
||||
|
||||
Ok(taxonomies)
|
||||
|
@ -157,35 +204,43 @@ mod tests {
|
|||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use config::{Config, Taxonomy};
|
||||
use config::{Config, Taxonomy as TaxonomyConfig};
|
||||
use content::Page;
|
||||
use library::Library;
|
||||
|
||||
#[test]
|
||||
fn can_make_taxonomies() {
|
||||
let mut config = Config::default();
|
||||
let mut library = Library::new(2, 0);
|
||||
|
||||
config.taxonomies = vec![
|
||||
Taxonomy { name: "categories".to_string(), ..Taxonomy::default() },
|
||||
Taxonomy { name: "tags".to_string(), ..Taxonomy::default() },
|
||||
Taxonomy { name: "authors".to_string(), ..Taxonomy::default() },
|
||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
|
||||
taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]);
|
||||
page1.meta.taxonomies = taxo_page1;
|
||||
library.insert_page(page1);
|
||||
|
||||
let mut page2 = Page::default();
|
||||
let mut taxo_page2 = HashMap::new();
|
||||
taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]);
|
||||
taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]);
|
||||
page2.meta.taxonomies = taxo_page2;
|
||||
library.insert_page(page2);
|
||||
|
||||
let mut page3 = Page::default();
|
||||
let mut taxo_page3 = HashMap::new();
|
||||
taxo_page3.insert("tags".to_string(), vec!["js".to_string()]);
|
||||
taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]);
|
||||
page3.meta.taxonomies = taxo_page3;
|
||||
let pages = vec![&page1, &page2, &page3];
|
||||
library.insert_page(page3);
|
||||
|
||||
let taxonomies = find_taxonomies(&config, pages).unwrap();
|
||||
let taxonomies = find_taxonomies(&config, &library).unwrap();
|
||||
let (tags, categories, authors) = {
|
||||
let mut t = None;
|
||||
let mut c = None;
|
||||
|
@ -233,15 +288,18 @@ mod tests {
|
|||
#[test]
|
||||
fn errors_on_unknown_taxonomy() {
|
||||
let mut config = Config::default();
|
||||
let mut library = Library::new(2, 0);
|
||||
|
||||
config.taxonomies = vec![
|
||||
Taxonomy { name: "authors".to_string(), ..Taxonomy::default() },
|
||||
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
|
||||
page1.meta.taxonomies = taxo_page1;
|
||||
library.insert_page(page1);
|
||||
|
||||
let taxonomies = find_taxonomies(&config, vec![&page1]);
|
||||
let taxonomies = find_taxonomies(&config, &library);
|
||||
assert!(taxonomies.is_err());
|
||||
let err = taxonomies.unwrap_err();
|
||||
// no path as this is created by Default
|
|
@ -1,18 +0,0 @@
|
|||
[package]
|
||||
name = "pagination"
|
||||
version = "0.1.0"
|
||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
tera = "0.11"
|
||||
serde = "1"
|
||||
serde_derive = "1"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
config = { path = "../config" }
|
||||
content = { path = "../content" }
|
||||
utils = { path = "../utils" }
|
||||
taxonomies = { path = "../taxonomies" }
|
||||
|
||||
[dev-dependencies]
|
||||
front_matter = { path = "../front_matter" }
|
|
@ -7,7 +7,7 @@ authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
|||
errors = { path = "../errors" }
|
||||
front_matter = { path = "../front_matter" }
|
||||
highlighting = { path = "../highlighting" }
|
||||
content = { path = "../content" }
|
||||
library = { path = "../library" }
|
||||
site = { path = "../site" }
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
@ -1,20 +1,21 @@
|
|||
extern crate site;
|
||||
#[macro_use]
|
||||
extern crate errors;
|
||||
extern crate content;
|
||||
extern crate library;
|
||||
extern crate front_matter;
|
||||
|
||||
use std::path::{Path, Component};
|
||||
|
||||
use errors::Result;
|
||||
use site::Site;
|
||||
use content::{Page, Section};
|
||||
use library::{Page, Section};
|
||||
use front_matter::{PageFrontMatter, SectionFrontMatter};
|
||||
|
||||
|
||||
/// Finds the section that contains the page given if there is one
|
||||
pub fn find_parent_section<'a>(site: &'a Site, page: &Page) -> Option<&'a Section> {
|
||||
for section in site.sections.values() {
|
||||
for section in site.library.sections_values() {
|
||||
// TODO: remove that, it's wrong now it should check the page key
|
||||
if section.is_child_page(&page.file.path) {
|
||||
return Some(section);
|
||||
}
|
||||
|
@ -98,16 +99,16 @@ fn find_page_front_matter_changes(current: &PageFrontMatter, other: &PageFrontMa
|
|||
/// Handles a path deletion: could be a page, a section, a folder
|
||||
fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> {
|
||||
// Ignore the event if this path was not known
|
||||
if !site.sections.contains_key(path) && !site.pages.contains_key(path) {
|
||||
if !site.library.contains_section(&path.to_path_buf()) && !site.library.contains_page(&path.to_path_buf()) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if is_section {
|
||||
if let Some(s) = site.pages.remove(path) {
|
||||
if let Some(s) = site.library.remove_section_by_path(&path.to_path_buf()) {
|
||||
site.permalinks.remove(&s.file.relative);
|
||||
site.populate_sections();
|
||||
}
|
||||
} else if let Some(p) = site.pages.remove(path) {
|
||||
} else if let Some(p) = site.library.remove_page_by_path(&path.to_path_buf()) {
|
||||
site.permalinks.remove(&p.file.relative);
|
||||
|
||||
if !p.meta.taxonomies.is_empty() {
|
||||
|
@ -131,30 +132,31 @@ fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()>
|
|||
/// Handles a `_index.md` (a section) being edited in some ways
|
||||
fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> {
|
||||
let section = Section::from_file(path, &site.config)?;
|
||||
let pathbuf = path.to_path_buf();
|
||||
match site.add_section(section, true)? {
|
||||
// Updating a section
|
||||
Some(prev) => {
|
||||
// Copy the section data so we don't end up with an almost empty object
|
||||
site.sections.get_mut(path).unwrap().pages = prev.pages;
|
||||
site.sections.get_mut(path).unwrap().ignored_pages = prev.ignored_pages;
|
||||
site.sections.get_mut(path).unwrap().subsections = prev.subsections;
|
||||
site.library.get_section_mut(&pathbuf).unwrap().pages = prev.pages;
|
||||
site.library.get_section_mut(&pathbuf).unwrap().ignored_pages = prev.ignored_pages;
|
||||
site.library.get_section_mut(&pathbuf).unwrap().subsections = prev.subsections;
|
||||
|
||||
if site.sections[path].meta == prev.meta {
|
||||
if site.library.get_section(&pathbuf).unwrap().meta == prev.meta {
|
||||
// Front matter didn't change, only content did
|
||||
// so we render only the section page, not its pages
|
||||
return site.render_section(&site.sections[path], false);
|
||||
return site.render_section(&site.library.get_section(&pathbuf).unwrap(), false);
|
||||
}
|
||||
|
||||
// Front matter changed
|
||||
for changes in find_section_front_matter_changes(&site.sections[path].meta, &prev.meta) {
|
||||
for changes in find_section_front_matter_changes(&site.library.get_section(&pathbuf).unwrap().meta, &prev.meta) {
|
||||
// Sort always comes first if present so the rendering will be fine
|
||||
match changes {
|
||||
SectionChangesNeeded::Sort => {
|
||||
site.sort_sections_pages(Some(path));
|
||||
site.register_tera_global_fns();
|
||||
}
|
||||
SectionChangesNeeded::Render => site.render_section(&site.sections[path], false)?,
|
||||
SectionChangesNeeded::RenderWithPages => site.render_section(&site.sections[path], true)?,
|
||||
SectionChangesNeeded::Render => site.render_section(&site.library.get_section(&pathbuf).unwrap(), false)?,
|
||||
SectionChangesNeeded::RenderWithPages => site.render_section(&site.library.get_section(&pathbuf).unwrap(), true)?,
|
||||
// not a common enough operation to make it worth optimizing
|
||||
SectionChangesNeeded::Delete => {
|
||||
site.populate_sections();
|
||||
|
@ -168,14 +170,14 @@ fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> {
|
|||
None => {
|
||||
site.populate_sections();
|
||||
site.register_tera_global_fns();
|
||||
site.render_section(&site.sections[path], true)
|
||||
site.render_section(&site.library.get_section(&pathbuf).unwrap(), true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! render_parent_section {
|
||||
($site: expr, $path: expr) => {
|
||||
if let Some(s) = find_parent_section($site, &$site.pages[$path]) {
|
||||
if let Some(s) = find_parent_section($site, &$site.library.get_page(&$path.to_path_buf()).unwrap()) {
|
||||
$site.render_section(s, false)?;
|
||||
};
|
||||
}
|
||||
|
@ -184,26 +186,27 @@ macro_rules! render_parent_section {
|
|||
/// Handles a page being edited in some ways
|
||||
fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> {
|
||||
let page = Page::from_file(path, &site.config)?;
|
||||
let pathbuf = path.to_path_buf();
|
||||
match site.add_page(page, true)? {
|
||||
// Updating a page
|
||||
Some(prev) => {
|
||||
// Front matter didn't change, only content did
|
||||
if site.pages[path].meta == prev.meta {
|
||||
if site.library.get_page(&pathbuf).unwrap().meta == prev.meta {
|
||||
// Other than the page itself, the summary might be seen
|
||||
// on a paginated list for a blog for example
|
||||
if site.pages[path].summary.is_some() {
|
||||
if site.library.get_page(&pathbuf).unwrap().summary.is_some() {
|
||||
render_parent_section!(site, path);
|
||||
}
|
||||
// TODO: register_tera_global_fns is expensive as it involves lots of cloning
|
||||
// I can't think of a valid usecase where you would need the content
|
||||
// of a page through a global fn so it's commented out for now
|
||||
// site.register_tera_global_fns();
|
||||
return site.render_page(&site.pages[path]);
|
||||
return site.render_page(&site.library.get_page(&pathbuf).unwrap());
|
||||
}
|
||||
|
||||
// Front matter changed
|
||||
let mut sections_populated = false;
|
||||
for changes in find_page_front_matter_changes(&site.pages[path].meta, &prev.meta) {
|
||||
for changes in find_page_front_matter_changes(&site.library.get_page(&pathbuf).unwrap().meta, &prev.meta) {
|
||||
// Sort always comes first if present so the rendering will be fine
|
||||
match changes {
|
||||
PageChangesNeeded::Taxonomies => {
|
||||
|
@ -212,7 +215,7 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> {
|
|||
site.render_taxonomies()?;
|
||||
}
|
||||
PageChangesNeeded::Sort => {
|
||||
let section_path = match find_parent_section(site, &site.pages[path]) {
|
||||
let section_path = match find_parent_section(site, &site.library.get_page(&pathbuf).unwrap()) {
|
||||
Some(s) => s.file.path.clone(),
|
||||
None => continue // Do nothing if it's an orphan page
|
||||
};
|
||||
|
@ -231,7 +234,7 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> {
|
|||
}
|
||||
site.register_tera_global_fns();
|
||||
render_parent_section!(site, path);
|
||||
site.render_page(&site.pages[path])?;
|
||||
site.render_page(&site.library.get_page(&path.to_path_buf()).unwrap())?;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -302,7 +305,7 @@ pub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> {
|
|||
|
||||
match filename {
|
||||
"sitemap.xml" => site.render_sitemap(),
|
||||
"rss.xml" => site.render_rss_feed(site.pages.values().collect(), None),
|
||||
"rss.xml" => site.render_rss_feed(site.library.pages_values(), None),
|
||||
"robots.txt" => site.render_robots(),
|
||||
"single.html" | "list.html" => site.render_taxonomies(),
|
||||
"page.html" => {
|
||||
|
|
|
@ -9,4 +9,4 @@ ammonia = "1"
|
|||
lazy_static = "1"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
content = { path = "../content" }
|
||||
library = { path = "../library" }
|
||||
|
|
|
@ -2,16 +2,16 @@ extern crate elasticlunr;
|
|||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
extern crate ammonia;
|
||||
|
||||
#[macro_use]
|
||||
extern crate errors;
|
||||
extern crate content;
|
||||
extern crate library;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use elasticlunr::{Index, Language};
|
||||
|
||||
use content::Section;
|
||||
use library::{Library, Section};
|
||||
use errors::Result;
|
||||
|
||||
|
||||
|
@ -39,7 +39,7 @@ lazy_static! {
|
|||
/// the language given
|
||||
/// Errors if the language given is not available in Elasticlunr
|
||||
/// TODO: is making `in_search_index` apply to subsections of a `false` section useful?
|
||||
pub fn build_index(sections: &HashMap<PathBuf, Section>, lang: &str) -> Result<String> {
|
||||
pub fn build_index(lang: &str, library: &Library) -> Result<String> {
|
||||
let language = match Language::from_code(lang) {
|
||||
Some(l) => l,
|
||||
None => { bail!("Tried to build search index for language {} which is not supported", lang); }
|
||||
|
@ -47,14 +47,14 @@ pub fn build_index(sections: &HashMap<PathBuf, Section>, lang: &str) -> Result<S
|
|||
|
||||
let mut index = Index::with_language(language, &["title", "body"]);
|
||||
|
||||
for section in sections.values() {
|
||||
add_section_to_index(&mut index, section);
|
||||
for section in library.sections_values() {
|
||||
add_section_to_index(&mut index, section, library);
|
||||
}
|
||||
|
||||
Ok(index.to_json())
|
||||
}
|
||||
|
||||
fn add_section_to_index(index: &mut Index, section: &Section) {
|
||||
fn add_section_to_index(index: &mut Index, section: &Section, library: &Library) {
|
||||
if !section.meta.in_search_index {
|
||||
return;
|
||||
}
|
||||
|
@ -67,7 +67,8 @@ fn add_section_to_index(index: &mut Index, section: &Section) {
|
|||
);
|
||||
}
|
||||
|
||||
for page in §ion.pages {
|
||||
for key in §ion.pages {
|
||||
let page = library.get_page_by_key(*key);
|
||||
if !page.meta.in_search_index || page.meta.draft {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -16,11 +16,9 @@ config = { path = "../config" }
|
|||
utils = { path = "../utils" }
|
||||
templates = { path = "../templates" }
|
||||
front_matter = { path = "../front_matter" }
|
||||
pagination = { path = "../pagination" }
|
||||
taxonomies = { path = "../taxonomies" }
|
||||
content = { path = "../content" }
|
||||
search = { path = "../search" }
|
||||
imageproc = { path = "../imageproc" }
|
||||
library = { path = "../library" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3"
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
#![feature(test)]
|
||||
extern crate test;
|
||||
extern crate site;
|
||||
extern crate pagination;
|
||||
extern crate library;
|
||||
extern crate tempfile;
|
||||
|
||||
use std::env;
|
||||
|
||||
use tempfile::tempdir;
|
||||
use site::Site;
|
||||
use pagination::Paginator;
|
||||
use library::Paginator;
|
||||
|
||||
|
||||
fn setup_site(name: &str) -> Site {
|
||||
|
@ -44,7 +44,7 @@ fn bench_render_rss_feed(b: &mut test::Bencher) {
|
|||
let tmp_dir = tempdir().expect("create temp dir");
|
||||
let public = &tmp_dir.path().join("public");
|
||||
site.set_output_path(&public);
|
||||
b.iter(|| site.render_rss_feed(site.pages.values().collect(), None).unwrap());
|
||||
b.iter(|| site.render_rss_feed(site.library.pages_values(), None).unwrap());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
|
@ -62,8 +62,8 @@ fn bench_render_paginated(b: &mut test::Bencher) {
|
|||
let tmp_dir = tempdir().expect("create temp dir");
|
||||
let public = &tmp_dir.path().join("public");
|
||||
site.set_output_path(&public);
|
||||
let section = site.sections.values().collect::<Vec<_>>()[0];
|
||||
let paginator = Paginator::from_section(§ion.pages, section);
|
||||
let section = site.library.sections_values()[0];
|
||||
let paginator = Paginator::from_section(§ion, site.library.pages());
|
||||
|
||||
b.iter(|| site.render_paginated(public, &paginator));
|
||||
}
|
||||
|
|
|
@ -12,38 +12,31 @@ extern crate config;
|
|||
extern crate utils;
|
||||
extern crate front_matter;
|
||||
extern crate templates;
|
||||
extern crate pagination;
|
||||
extern crate taxonomies;
|
||||
extern crate content;
|
||||
extern crate search;
|
||||
extern crate imageproc;
|
||||
extern crate library;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate tempfile;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::collections::{HashMap};
|
||||
use std::fs::{create_dir_all, remove_dir_all, copy};
|
||||
use std::mem;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use glob::glob;
|
||||
use tera::{Tera, Context};
|
||||
use sass_rs::{Options as SassOptions, OutputStyle, compile_file};
|
||||
use rayon::prelude::*;
|
||||
|
||||
use errors::{Result, ResultExt};
|
||||
use config::{Config, get_config};
|
||||
use utils::fs::{create_file, copy_directory, create_directory, ensure_directory_exists};
|
||||
use utils::templates::{render_template, rewrite_theme_paths};
|
||||
use utils::net::get_available_port;
|
||||
use content::{Page, Section, populate_siblings, sort_pages, sort_pages_by_date};
|
||||
use templates::{GUTENBERG_TERA, global_fns, render_redirect_template};
|
||||
use front_matter::{InsertAnchor};
|
||||
use taxonomies::{Taxonomy, find_taxonomies};
|
||||
use pagination::Paginator;
|
||||
|
||||
use rayon::prelude::*;
|
||||
|
||||
use library::{Page, Section, sort_actual_pages_by_date, Library, Taxonomy, find_taxonomies, Paginator};
|
||||
|
||||
/// The sitemap only needs links and potentially date so we trim down
|
||||
/// all pages to only that
|
||||
|
@ -65,8 +58,6 @@ pub struct Site {
|
|||
pub base_path: PathBuf,
|
||||
/// The parsed config for the site
|
||||
pub config: Config,
|
||||
pub pages: HashMap<PathBuf, Page>,
|
||||
pub sections: HashMap<PathBuf, Section>,
|
||||
pub tera: Tera,
|
||||
imageproc: Arc<Mutex<imageproc::Processor>>,
|
||||
// the live reload port to be used if there is one
|
||||
|
@ -78,6 +69,8 @@ pub struct Site {
|
|||
/// A map of all .md files (section and pages) and their permalink
|
||||
/// We need that if there are relative links in the content that need to be resolved
|
||||
pub permalinks: HashMap<String, String>,
|
||||
/// Contains all pages and sections of the site
|
||||
pub library: Library,
|
||||
}
|
||||
|
||||
impl Site {
|
||||
|
@ -108,7 +101,7 @@ impl Site {
|
|||
);
|
||||
let mut tera_theme = Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?;
|
||||
rewrite_theme_paths(&mut tera_theme, &theme);
|
||||
// TODO: same as above
|
||||
// TODO: same as below
|
||||
if theme_path.join("templates").join("robots.txt").exists() {
|
||||
tera_theme.add_template_file(theme_path.join("templates").join("robots.txt"), None)?;
|
||||
}
|
||||
|
@ -133,8 +126,6 @@ impl Site {
|
|||
base_path: path.to_path_buf(),
|
||||
config,
|
||||
tera,
|
||||
pages: HashMap::new(),
|
||||
sections: HashMap::new(),
|
||||
imageproc: Arc::new(Mutex::new(imageproc)),
|
||||
live_reload: None,
|
||||
output_path: path.join("public"),
|
||||
|
@ -142,6 +133,8 @@ impl Site {
|
|||
static_path,
|
||||
taxonomies: Vec::new(),
|
||||
permalinks: HashMap::new(),
|
||||
// We will allocate it properly later on
|
||||
library: Library::new(0, 0),
|
||||
};
|
||||
|
||||
Ok(site)
|
||||
|
@ -158,15 +151,7 @@ impl Site {
|
|||
|
||||
/// Get all the orphan (== without section) pages in the site
|
||||
pub fn get_all_orphan_pages(&self) -> Vec<&Page> {
|
||||
let pages_in_sections = self.sections
|
||||
.values()
|
||||
.flat_map(|s| s.all_pages_path())
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
self.pages
|
||||
.values()
|
||||
.filter(|page| !pages_in_sections.contains(&page.file.path))
|
||||
.collect()
|
||||
self.library.get_all_orphan_pages()
|
||||
}
|
||||
|
||||
pub fn set_base_url(&mut self, base_url: String) {
|
||||
|
@ -191,6 +176,8 @@ impl Site {
|
|||
.filter(|e| !e.as_path().file_name().unwrap().to_str().unwrap().starts_with('.'))
|
||||
.partition(|entry| entry.as_path().file_name().unwrap() == "_index.md");
|
||||
|
||||
self.library = Library::new(page_entries.len(), section_entries.len());
|
||||
|
||||
let sections = {
|
||||
let config = &self.config;
|
||||
|
||||
|
@ -225,7 +212,7 @@ impl Site {
|
|||
// Insert a default index section if necessary so we don't need to create
|
||||
// a _index.md to render the index page at the root of the site
|
||||
let index_path = self.index_section_path();
|
||||
if let Some(ref index_section) = self.sections.get(&index_path) {
|
||||
if let Some(ref index_section) = self.library.get_section(&index_path) {
|
||||
if self.config.build_search_index && !index_section.meta.in_search_index {
|
||||
bail!(
|
||||
"You have enabled search in the config but disabled it in the index section: \
|
||||
|
@ -235,12 +222,13 @@ impl Site {
|
|||
}
|
||||
}
|
||||
// Not in else because of borrow checker
|
||||
if !self.sections.contains_key(&index_path) {
|
||||
if !self.library.contains_section(&index_path) {
|
||||
let mut index_section = Section::default();
|
||||
index_section.permalink = self.config.make_permalink("");
|
||||
index_section.file.path = self.content_path.join("_index.md");
|
||||
index_section.file.parent = self.content_path.clone();
|
||||
index_section.file.relative = "_index.md".to_string();
|
||||
self.sections.insert(index_path, index_section);
|
||||
self.library.insert_section(index_section);
|
||||
}
|
||||
|
||||
let mut pages_insert_anchors = HashMap::new();
|
||||
|
@ -253,6 +241,8 @@ impl Site {
|
|||
self.register_early_global_fns();
|
||||
self.render_markdown()?;
|
||||
self.populate_sections();
|
||||
self.library.cache_all_pages();
|
||||
self.library.cache_all_sections();
|
||||
self.populate_taxonomies()?;
|
||||
self.register_tera_global_fns();
|
||||
|
||||
|
@ -271,19 +261,27 @@ impl Site {
|
|||
|
||||
// This is needed in the first place because of silly borrow checker
|
||||
let mut pages_insert_anchors = HashMap::new();
|
||||
for p in self.pages.values() {
|
||||
for (_, p) in self.library.pages() {
|
||||
pages_insert_anchors.insert(p.file.path.clone(), self.find_parent_section_insert_anchor(&p.file.parent.clone()));
|
||||
}
|
||||
|
||||
self.pages.par_iter_mut()
|
||||
.map(|(_, page)| {
|
||||
self.library
|
||||
.pages_mut()
|
||||
.values_mut()
|
||||
.collect::<Vec<_>>()
|
||||
.par_iter_mut()
|
||||
.map(|page| {
|
||||
let insert_anchor = pages_insert_anchors[&page.file.path];
|
||||
page.render_markdown(permalinks, tera, config, base_path, insert_anchor)
|
||||
})
|
||||
.collect::<Result<()>>()?;
|
||||
|
||||
self.sections.par_iter_mut()
|
||||
.map(|(_, section)| section.render_markdown(permalinks, tera, config, base_path))
|
||||
self.library
|
||||
.sections_mut()
|
||||
.values_mut()
|
||||
.collect::<Vec<_>>()
|
||||
.par_iter_mut()
|
||||
.map(|section| section.render_markdown(permalinks, tera, config, base_path))
|
||||
.collect::<Result<()>>()?;
|
||||
|
||||
Ok(())
|
||||
|
@ -301,15 +299,15 @@ impl Site {
|
|||
|
||||
pub fn register_tera_global_fns(&mut self) {
|
||||
self.tera.register_function("trans", global_fns::make_trans(self.config.clone()));
|
||||
self.tera.register_function("get_page", global_fns::make_get_page(&self.pages));
|
||||
self.tera.register_function("get_section", global_fns::make_get_section(&self.sections));
|
||||
self.tera.register_function("get_page", global_fns::make_get_page(&self.library));
|
||||
self.tera.register_function("get_section", global_fns::make_get_section(&self.library));
|
||||
self.tera.register_function(
|
||||
"get_taxonomy",
|
||||
global_fns::make_get_taxonomy(self.taxonomies.clone()),
|
||||
global_fns::make_get_taxonomy(&self.taxonomies, &self.library),
|
||||
);
|
||||
self.tera.register_function(
|
||||
"get_taxonomy_url",
|
||||
global_fns::make_get_taxonomy_url(self.taxonomies.clone()),
|
||||
global_fns::make_get_taxonomy_url(&self.taxonomies),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -317,16 +315,14 @@ impl Site {
|
|||
/// The `render` parameter is used in the serve command, when rebuilding a page.
|
||||
/// If `true`, it will also render the markdown for that page
|
||||
/// Returns the previous page struct if there was one at the same path
|
||||
pub fn add_page(&mut self, page: Page, render: bool) -> Result<Option<Page>> {
|
||||
let path = page.file.path.clone();
|
||||
pub fn add_page(&mut self, mut page: Page, render: bool) -> Result<Option<Page>> {
|
||||
self.permalinks.insert(page.file.relative.clone(), page.permalink.clone());
|
||||
let prev = self.pages.insert(page.file.path.clone(), page);
|
||||
|
||||
if render {
|
||||
let insert_anchor = self.find_parent_section_insert_anchor(&self.pages[&path].file.parent);
|
||||
let page = self.pages.get_mut(&path).unwrap();
|
||||
let insert_anchor = self.find_parent_section_insert_anchor(&page.file.parent);
|
||||
page.render_markdown(&self.permalinks, &self.tera, &self.config, &self.base_path, insert_anchor)?;
|
||||
}
|
||||
let prev = self.library.remove_page_by_path(&page.file.path);
|
||||
self.library.insert_page(page);
|
||||
|
||||
Ok(prev)
|
||||
}
|
||||
|
@ -335,15 +331,13 @@ impl Site {
|
|||
/// The `render` parameter is used in the serve command, when rebuilding a page.
|
||||
/// If `true`, it will also render the markdown for that page
|
||||
/// Returns the previous section struct if there was one at the same path
|
||||
pub fn add_section(&mut self, section: Section, render: bool) -> Result<Option<Section>> {
|
||||
let path = section.file.path.clone();
|
||||
pub fn add_section(&mut self, mut section: Section, render: bool) -> Result<Option<Section>> {
|
||||
self.permalinks.insert(section.file.relative.clone(), section.permalink.clone());
|
||||
let prev = self.sections.insert(section.file.path.clone(), section);
|
||||
|
||||
if render {
|
||||
let section = self.sections.get_mut(&path).unwrap();
|
||||
section.render_markdown(&self.permalinks, &self.tera, &self.config, &self.base_path)?;
|
||||
}
|
||||
let prev = self.library.remove_section_by_path(§ion.file.path);
|
||||
self.library.insert_section(section);
|
||||
|
||||
Ok(prev)
|
||||
}
|
||||
|
@ -351,7 +345,7 @@ impl Site {
|
|||
/// Finds the insert_anchor for the parent section of the directory at `path`.
|
||||
/// Defaults to `AnchorInsert::None` if no parent section found
|
||||
pub fn find_parent_section_insert_anchor(&self, parent_path: &PathBuf) -> InsertAnchor {
|
||||
match self.sections.get(&parent_path.join("_index.md")) {
|
||||
match self.library.get_section(&parent_path.join("_index.md")) {
|
||||
Some(s) => s.meta.insert_anchor_links,
|
||||
None => InsertAnchor::None
|
||||
}
|
||||
|
@ -360,59 +354,13 @@ impl Site {
|
|||
/// Find out the direct subsections of each subsection if there are some
|
||||
/// as well as the pages for each section
|
||||
pub fn populate_sections(&mut self) {
|
||||
let mut grandparent_paths: HashMap<PathBuf, Vec<PathBuf>> = HashMap::new();
|
||||
|
||||
for section in self.sections.values_mut() {
|
||||
if let Some(ref grand_parent) = section.file.grand_parent {
|
||||
grandparent_paths
|
||||
.entry(grand_parent.to_path_buf())
|
||||
.or_insert_with(|| vec![])
|
||||
.push(section.file.path.clone());
|
||||
}
|
||||
// Make sure the pages of a section are empty since we can call that many times on `serve`
|
||||
section.pages = vec![];
|
||||
section.ignored_pages = vec![];
|
||||
}
|
||||
|
||||
for page in self.pages.values() {
|
||||
let parent_section_path = page.file.parent.join("_index.md");
|
||||
if self.sections.contains_key(&parent_section_path) {
|
||||
// TODO: use references instead of cloning to avoid having to call populate_section on
|
||||
// content change
|
||||
self.sections.get_mut(&parent_section_path).unwrap().pages.push(page.clone());
|
||||
}
|
||||
}
|
||||
|
||||
self.sort_sections_pages(None);
|
||||
// TODO: remove this clone
|
||||
let sections = self.sections.clone();
|
||||
|
||||
for section in self.sections.values_mut() {
|
||||
if let Some(paths) = grandparent_paths.get(§ion.file.parent) {
|
||||
section.subsections = paths
|
||||
.iter()
|
||||
.map(|p| sections[p].clone())
|
||||
.collect::<Vec<_>>();
|
||||
section.subsections
|
||||
.sort_by(|a, b| a.meta.weight.cmp(&b.meta.weight));
|
||||
}
|
||||
}
|
||||
self.library.populate_sections();
|
||||
}
|
||||
|
||||
/// Sorts the pages of the section at the given path
|
||||
/// By default will sort all sections but can be made to only sort a single one by providing a path
|
||||
pub fn sort_sections_pages(&mut self, only: Option<&Path>) {
|
||||
for (path, section) in &mut self.sections {
|
||||
if let Some(p) = only {
|
||||
if p != path {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let pages = mem::replace(&mut section.pages, vec![]);
|
||||
let (sorted_pages, cannot_be_sorted_pages) = sort_pages(pages, section.meta.sort_by);
|
||||
section.pages = populate_siblings(&sorted_pages, section.meta.sort_by);
|
||||
section.ignored_pages = cannot_be_sorted_pages;
|
||||
}
|
||||
self.library.sort_sections_pages(only);
|
||||
}
|
||||
|
||||
/// Find all the tags and categories if it's asked in the config
|
||||
|
@ -421,13 +369,7 @@ impl Site {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
self.taxonomies = find_taxonomies(
|
||||
&self.config,
|
||||
self.pages
|
||||
.values()
|
||||
.filter(|p| !p.is_draft())
|
||||
.collect::<Vec<_>>(),
|
||||
)?;
|
||||
self.taxonomies = find_taxonomies(&self.config, &self.library)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -501,7 +443,7 @@ impl Site {
|
|||
create_directory(¤t_path)?;
|
||||
|
||||
// Finally, create a index.html file there with the page rendered
|
||||
let output = page.render_html(&self.tera, &self.config)?;
|
||||
let output = page.render_html(&self.tera, &self.config, &self.library)?;
|
||||
create_file(¤t_path.join("index.html"), &self.inject_livereload(output))?;
|
||||
|
||||
// Copy any asset we found previously into the same directory as the index.html
|
||||
|
@ -522,7 +464,7 @@ impl Site {
|
|||
self.render_orphan_pages()?;
|
||||
self.render_sitemap()?;
|
||||
if self.config.generate_rss {
|
||||
self.render_rss_feed(self.pages.values().collect(), None)?;
|
||||
self.render_rss_feed(self.library.pages_values(), None)?;
|
||||
}
|
||||
self.render_404()?;
|
||||
self.render_robots()?;
|
||||
|
@ -555,7 +497,7 @@ impl Site {
|
|||
&self.output_path.join(&format!("search_index.{}.js", self.config.default_language)),
|
||||
&format!(
|
||||
"window.searchIndex = {};",
|
||||
search::build_index(&self.sections, &self.config.default_language)?
|
||||
search::build_index(&self.config.default_language, &self.library)?
|
||||
),
|
||||
)?;
|
||||
|
||||
|
@ -627,7 +569,7 @@ impl Site {
|
|||
}
|
||||
|
||||
pub fn render_aliases(&self) -> Result<()> {
|
||||
for page in self.pages.values() {
|
||||
for page in self.library.pages_values() {
|
||||
for alias in &page.meta.aliases {
|
||||
let mut output_path = self.output_path.to_path_buf();
|
||||
let mut split = alias.split('/').collect::<Vec<_>>();
|
||||
|
@ -693,7 +635,7 @@ impl Site {
|
|||
|
||||
ensure_directory_exists(&self.output_path)?;
|
||||
let output_path = self.output_path.join(&taxonomy.kind.name);
|
||||
let list_output = taxonomy.render_all_terms(&self.tera, &self.config)?;
|
||||
let list_output = taxonomy.render_all_terms(&self.tera, &self.config, &self.library)?;
|
||||
create_directory(&output_path)?;
|
||||
create_file(&output_path.join("index.html"), &self.inject_livereload(list_output))?;
|
||||
|
||||
|
@ -703,15 +645,15 @@ impl Site {
|
|||
.map(|item| {
|
||||
if taxonomy.kind.rss {
|
||||
self.render_rss_feed(
|
||||
item.pages.iter().map(|p| p).collect(),
|
||||
item.pages.iter().map(|p| self.library.get_page_by_key(*p)).collect(),
|
||||
Some(&PathBuf::from(format!("{}/{}", taxonomy.kind.name, item.slug))),
|
||||
)?;
|
||||
}
|
||||
|
||||
if taxonomy.kind.is_paginated() {
|
||||
self.render_paginated(&output_path, &Paginator::from_taxonomy(&taxonomy, item))
|
||||
self.render_paginated(&output_path, &Paginator::from_taxonomy(&taxonomy, item, &self.library))
|
||||
} else {
|
||||
let single_output = taxonomy.render_term(item, &self.tera, &self.config)?;
|
||||
let single_output = taxonomy.render_term(item, &self.tera, &self.config, &self.library)?;
|
||||
let path = output_path.join(&item.slug);
|
||||
create_directory(&path)?;
|
||||
create_file(
|
||||
|
@ -729,8 +671,9 @@ impl Site {
|
|||
|
||||
let mut context = Context::new();
|
||||
|
||||
let mut pages = self.pages
|
||||
.values()
|
||||
let mut pages = self.library
|
||||
.pages_values()
|
||||
.iter()
|
||||
.filter(|p| !p.is_draft())
|
||||
.map(|p| {
|
||||
let date = match p.meta.date {
|
||||
|
@ -743,8 +686,9 @@ impl Site {
|
|||
pages.sort_by(|a, b| a.permalink.cmp(&b.permalink));
|
||||
context.insert("pages", &pages);
|
||||
|
||||
let mut sections = self.sections
|
||||
.values()
|
||||
let mut sections = self.library
|
||||
.sections_values()
|
||||
.iter()
|
||||
.map(|s| SitemapEntry::new(s.permalink.clone(), None))
|
||||
.collect::<Vec<_>>();
|
||||
sections.sort_by(|a, b| a.permalink.cmp(&b.permalink));
|
||||
|
@ -784,16 +728,22 @@ impl Site {
|
|||
.filter(|p| p.meta.date.is_some() && !p.is_draft())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
pages.par_sort_unstable_by(sort_pages_by_date);
|
||||
|
||||
// Don't generate a RSS feed if none of the pages has a date
|
||||
if pages.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
pages.par_sort_unstable_by(sort_actual_pages_by_date);
|
||||
|
||||
context.insert("last_build_date", &pages[0].meta.date.clone().map(|d| d.to_string()));
|
||||
// limit to the last n elements
|
||||
context.insert("pages", &pages.iter().take(self.config.rss_limit).collect::<Vec<_>>());
|
||||
let p = pages
|
||||
.iter()
|
||||
.take(self.config.rss_limit)
|
||||
.map(|x| x.to_serialized_basic())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
context.insert("pages", &p);
|
||||
context.insert("config", &self.config);
|
||||
|
||||
let rss_feed_url = if let Some(ref base) = base_path {
|
||||
|
@ -846,7 +796,7 @@ impl Site {
|
|||
section
|
||||
.pages
|
||||
.par_iter()
|
||||
.map(|p| self.render_page(p))
|
||||
.map(|k| self.render_page(self.library.get_page_by_key(*k)))
|
||||
.collect::<Result<()>>()?;
|
||||
}
|
||||
|
||||
|
@ -861,9 +811,9 @@ impl Site {
|
|||
}
|
||||
|
||||
if section.meta.is_paginated() {
|
||||
self.render_paginated(&output_path, &Paginator::from_section(§ion.pages, section))?;
|
||||
self.render_paginated(&output_path, &Paginator::from_section(§ion, &self.library))?;
|
||||
} else {
|
||||
let output = section.render_html(&self.tera, &self.config)?;
|
||||
let output = section.render_html(&self.tera, &self.config, &self.library)?;
|
||||
create_file(&output_path.join("index.html"), &self.inject_livereload(output))?;
|
||||
}
|
||||
|
||||
|
@ -873,16 +823,15 @@ impl Site {
|
|||
/// Used only on reload
|
||||
pub fn render_index(&self) -> Result<()> {
|
||||
self.render_section(
|
||||
&self.sections[&self.content_path.join("_index.md")],
|
||||
&self.library.get_section(&self.content_path.join("_index.md")).unwrap(),
|
||||
false,
|
||||
)
|
||||
}
|
||||
|
||||
/// Renders all sections
|
||||
pub fn render_sections(&self) -> Result<()> {
|
||||
self.sections
|
||||
.values()
|
||||
.collect::<Vec<_>>()
|
||||
self.library
|
||||
.sections_values()
|
||||
.into_par_iter()
|
||||
.map(|s| self.render_section(s, true))
|
||||
.collect::<Result<()>>()
|
||||
|
|
|
@ -19,44 +19,46 @@ fn can_parse_site() {
|
|||
site.load().unwrap();
|
||||
|
||||
// Correct number of pages (sections are pages too)
|
||||
assert_eq!(site.pages.len(), 15);
|
||||
assert_eq!(site.library.pages().len(), 15);
|
||||
let posts_path = path.join("content").join("posts");
|
||||
|
||||
// Make sure we remove all the pwd + content from the sections
|
||||
let basic = &site.pages[&posts_path.join("simple.md")];
|
||||
let basic = site.library.get_page(&posts_path.join("simple.md")).unwrap();
|
||||
assert_eq!(basic.file.components, vec!["posts".to_string()]);
|
||||
|
||||
// Make sure the page with a url doesn't have any sections
|
||||
let url_post = &site.pages[&posts_path.join("fixed-url.md")];
|
||||
let url_post = site.library.get_page(&posts_path.join("fixed-url.md")).unwrap();
|
||||
assert_eq!(url_post.path, "a-fixed-url/");
|
||||
|
||||
// Make sure the article in a folder with only asset doesn't get counted as a section
|
||||
let asset_folder_post = &site.pages[&posts_path.join("with-assets").join("index.md")];
|
||||
let asset_folder_post = site.library.get_page(&posts_path.join("with-assets").join("index.md")).unwrap();
|
||||
assert_eq!(asset_folder_post.file.components, vec!["posts".to_string()]);
|
||||
|
||||
// That we have the right number of sections
|
||||
assert_eq!(site.sections.len(), 7);
|
||||
assert_eq!(site.library.sections().len(), 7);
|
||||
|
||||
// And that the sections are correct
|
||||
let index_section = &site.sections[&path.join("content").join("_index.md")];
|
||||
let index_section = site.library.get_section(&path.join("content").join("_index.md")).unwrap();
|
||||
assert_eq!(index_section.subsections.len(), 3);
|
||||
assert_eq!(index_section.pages.len(), 1);
|
||||
|
||||
let posts_section = &site.sections[&posts_path.join("_index.md")];
|
||||
let posts_section = site.library.get_section(&posts_path.join("_index.md")).unwrap();
|
||||
assert_eq!(posts_section.subsections.len(), 1);
|
||||
assert_eq!(posts_section.pages.len(), 7);
|
||||
|
||||
let tutorials_section = &site.sections[&posts_path.join("tutorials").join("_index.md")];
|
||||
let tutorials_section = site.library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap();
|
||||
assert_eq!(tutorials_section.subsections.len(), 2);
|
||||
assert_eq!(tutorials_section.subsections[0].clone().meta.title.unwrap(), "Programming");
|
||||
assert_eq!(tutorials_section.subsections[1].clone().meta.title.unwrap(), "DevOps");
|
||||
let sub1 = site.library.get_section_by_key(tutorials_section.subsections[0]);
|
||||
let sub2 = site.library.get_section_by_key(tutorials_section.subsections[1]);
|
||||
assert_eq!(sub1.clone().meta.title.unwrap(), "Programming");
|
||||
assert_eq!(sub2.clone().meta.title.unwrap(), "DevOps");
|
||||
assert_eq!(tutorials_section.pages.len(), 0);
|
||||
|
||||
let devops_section = &site.sections[&posts_path.join("tutorials").join("devops").join("_index.md")];
|
||||
let devops_section = site.library.get_section(&posts_path.join("tutorials").join("devops").join("_index.md")).unwrap();
|
||||
assert_eq!(devops_section.subsections.len(), 0);
|
||||
assert_eq!(devops_section.pages.len(), 2);
|
||||
|
||||
let prog_section = &site.sections[&posts_path.join("tutorials").join("programming").join("_index.md")];
|
||||
let prog_section = site.library.get_section(&posts_path.join("tutorials").join("programming").join("_index.md")).unwrap();
|
||||
assert_eq!(prog_section.subsections.len(), 0);
|
||||
assert_eq!(prog_section.pages.len(), 2);
|
||||
}
|
||||
|
@ -219,7 +221,7 @@ fn can_build_site_with_taxonomies() {
|
|||
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||
site.load().unwrap();
|
||||
|
||||
for (i, page) in site.pages.values_mut().enumerate() {
|
||||
for (i, page) in site.library.pages_values_mut().iter_mut().enumerate() {
|
||||
page.meta.taxonomies = {
|
||||
let mut taxonomies = HashMap::new();
|
||||
taxonomies.insert("categories".to_string(), vec![if i % 2 == 0 { "A" } else { "B" }.to_string()]);
|
||||
|
@ -289,7 +291,7 @@ fn can_build_site_with_pagination_for_section() {
|
|||
path.push("test_site");
|
||||
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||
site.load().unwrap();
|
||||
for section in site.sections.values_mut(){
|
||||
for section in site.library.sections_values_mut() {
|
||||
if section.is_index() {
|
||||
continue;
|
||||
}
|
||||
|
@ -368,7 +370,7 @@ fn can_build_site_with_pagination_for_index() {
|
|||
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||
site.load().unwrap();
|
||||
{
|
||||
let index = site.sections.get_mut(&path.join("content").join("_index.md")).unwrap();
|
||||
let index = site.library.get_section_mut(&path.join("content").join("_index.md")).unwrap();
|
||||
index.meta.paginate_by = Some(2);
|
||||
index.meta.template = Some("index_paginated.html".to_string());
|
||||
}
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
[package]
|
||||
name = "taxonomies"
|
||||
version = "0.1.0"
|
||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||
|
||||
[dependencies]
|
||||
tera = "0.11"
|
||||
slug = "0.1"
|
||||
serde = "1"
|
||||
serde_derive = "1"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
config = { path = "../config" }
|
||||
content = { path = "../content" }
|
||||
front_matter = { path = "../front_matter" }
|
||||
utils = { path = "../utils" }
|
|
@ -11,7 +11,6 @@ pulldown-cmark = "0"
|
|||
|
||||
errors = { path = "../errors" }
|
||||
utils = { path = "../utils" }
|
||||
content = { path = "../content" }
|
||||
library = { path = "../library" }
|
||||
config = { path = "../config" }
|
||||
taxonomies = { path = "../taxonomies" }
|
||||
imageproc = { path = "../imageproc" }
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use tera::{GlobalFn, Value, from_value, to_value, Result};
|
||||
|
||||
use content::{Page, Section};
|
||||
use library::{Taxonomy, Library};
|
||||
use config::Config;
|
||||
use utils::site::resolve_internal_link;
|
||||
use taxonomies::Taxonomy;
|
||||
use imageproc;
|
||||
|
||||
|
||||
|
@ -53,10 +51,13 @@ pub fn make_trans(config: Config) -> GlobalFn {
|
|||
}
|
||||
|
||||
|
||||
pub fn make_get_page(all_pages: &HashMap<PathBuf, Page>) -> GlobalFn {
|
||||
pub fn make_get_page(library: &Library) -> GlobalFn {
|
||||
let mut pages = HashMap::new();
|
||||
for page in all_pages.values() {
|
||||
pages.insert(page.file.relative.clone(), page.clone());
|
||||
for page in library.pages_values() {
|
||||
pages.insert(
|
||||
page.file.relative.clone(),
|
||||
library.get_cached_page_value(&page.file.path).clone(),
|
||||
);
|
||||
}
|
||||
|
||||
Box::new(move |args| -> Result<Value> {
|
||||
|
@ -66,19 +67,19 @@ pub fn make_get_page(all_pages: &HashMap<PathBuf, Page>) -> GlobalFn {
|
|||
"`get_page` requires a `path` argument with a string value"
|
||||
);
|
||||
match pages.get(&path) {
|
||||
Some(p) => Ok(to_value(p).unwrap()),
|
||||
Some(p) => Ok(p.clone()),
|
||||
None => Err(format!("Page `{}` not found.", path).into())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn make_get_section(all_sections: &HashMap<PathBuf, Section>) -> GlobalFn {
|
||||
pub fn make_get_section(library: &Library) -> GlobalFn {
|
||||
let mut sections = HashMap::new();
|
||||
for section in all_sections.values() {
|
||||
if section.file.components == vec!["rebuild".to_string()] {
|
||||
//println!("Setting sections:\n{:#?}", section.pages[0]);
|
||||
}
|
||||
sections.insert(section.file.relative.clone(), section.clone());
|
||||
for section in library.sections_values() {
|
||||
sections.insert(
|
||||
section.file.relative.clone(),
|
||||
library.get_cached_section_value(§ion.file.path).clone(),
|
||||
);
|
||||
}
|
||||
|
||||
Box::new(move |args| -> Result<Value> {
|
||||
|
@ -87,9 +88,9 @@ pub fn make_get_section(all_sections: &HashMap<PathBuf, Section>) -> GlobalFn {
|
|||
args.get("path"),
|
||||
"`get_section` requires a `path` argument with a string value"
|
||||
);
|
||||
//println!("Found {:#?}", sections.get(&path).unwrap().pages[0]);
|
||||
|
||||
match sections.get(&path) {
|
||||
Some(p) => Ok(to_value(p).unwrap()),
|
||||
Some(p) => Ok(p.clone()),
|
||||
None => Err(format!("Section `{}` not found.", path).into())
|
||||
}
|
||||
})
|
||||
|
@ -134,10 +135,13 @@ pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> Glob
|
|||
})
|
||||
}
|
||||
|
||||
pub fn make_get_taxonomy(all_taxonomies: Vec<Taxonomy>) -> GlobalFn {
|
||||
pub fn make_get_taxonomy(all_taxonomies: &[Taxonomy], library: &Library) -> GlobalFn {
|
||||
let mut taxonomies = HashMap::new();
|
||||
for taxonomy in all_taxonomies {
|
||||
taxonomies.insert(taxonomy.kind.name.clone(), taxonomy);
|
||||
taxonomies.insert(
|
||||
taxonomy.kind.name.clone(),
|
||||
to_value(taxonomy.to_serialized(library)).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
Box::new(move |args| -> Result<Value> {
|
||||
|
@ -157,10 +161,14 @@ pub fn make_get_taxonomy(all_taxonomies: Vec<Taxonomy>) -> GlobalFn {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn make_get_taxonomy_url(all_taxonomies: Vec<Taxonomy>) -> GlobalFn {
|
||||
pub fn make_get_taxonomy_url(all_taxonomies: &[Taxonomy]) -> GlobalFn {
|
||||
let mut taxonomies = HashMap::new();
|
||||
for taxonomy in all_taxonomies {
|
||||
taxonomies.insert(taxonomy.kind.name.clone(), taxonomy);
|
||||
let mut items = HashMap::new();
|
||||
for item in &taxonomy.items {
|
||||
items.insert(item.name.clone(), item.permalink.clone());
|
||||
}
|
||||
taxonomies.insert(taxonomy.kind.name.clone(), items);
|
||||
}
|
||||
|
||||
Box::new(move |args| -> Result<Value> {
|
||||
|
@ -181,10 +189,8 @@ pub fn make_get_taxonomy_url(all_taxonomies: Vec<Taxonomy>) -> GlobalFn {
|
|||
)
|
||||
};
|
||||
|
||||
for item in &container.items {
|
||||
if item.name == name {
|
||||
return Ok(to_value(item.permalink.clone()).unwrap());
|
||||
}
|
||||
if let Some(ref permalink) = container.get(&name) {
|
||||
return Ok(to_value(permalink.clone()).unwrap());
|
||||
}
|
||||
|
||||
Err(
|
||||
|
@ -247,10 +253,10 @@ mod tests {
|
|||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use tera::to_value;
|
||||
use tera::{to_value, Value};
|
||||
|
||||
use config::{Config, Taxonomy as TaxonomyConfig};
|
||||
use taxonomies::{Taxonomy, TaxonomyItem};
|
||||
use library::{Taxonomy, TaxonomyItem, Library};
|
||||
|
||||
|
||||
#[test]
|
||||
|
@ -296,22 +302,44 @@ mod tests {
|
|||
#[test]
|
||||
fn can_get_taxonomy() {
|
||||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() };
|
||||
let library = Library::new(0, 0);
|
||||
let tag = TaxonomyItem::new(
|
||||
"Progamming",
|
||||
"Programming",
|
||||
"tags",
|
||||
&Config::default(),
|
||||
vec![],
|
||||
&library
|
||||
);
|
||||
let tags = Taxonomy {
|
||||
kind: taxo_config,
|
||||
items: vec![tag],
|
||||
};
|
||||
|
||||
let static_fn = make_get_taxonomy(vec![tags.clone()]);
|
||||
let taxonomies = vec![tags.clone()];
|
||||
let static_fn = make_get_taxonomy(&taxonomies, &library);
|
||||
// can find it correctly
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
assert_eq!(static_fn(args).unwrap(), to_value(&tags).unwrap());
|
||||
let res = static_fn(args).unwrap();
|
||||
let res_obj = res.as_object().unwrap();
|
||||
assert_eq!(res_obj["kind"], to_value(tags.kind).unwrap());
|
||||
assert_eq!(res_obj["items"].clone().as_array().unwrap().len(), 1);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["name"],
|
||||
Value::String("Programming".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["slug"],
|
||||
Value::String("programming".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["permalink"],
|
||||
Value::String("http://a-website.com/tags/programming/".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["pages"],
|
||||
Value::Array(vec![])
|
||||
);
|
||||
// and errors if it can't find it
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("something-else").unwrap());
|
||||
|
@ -321,18 +349,21 @@ mod tests {
|
|||
#[test]
|
||||
fn can_get_taxonomy_url() {
|
||||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() };
|
||||
let library = Library::new(0, 0);
|
||||
let tag = TaxonomyItem::new(
|
||||
"Programming",
|
||||
"tags",
|
||||
&Config::default(),
|
||||
vec![],
|
||||
&library
|
||||
);
|
||||
let tags = Taxonomy {
|
||||
kind: taxo_config,
|
||||
items: vec![tag],
|
||||
};
|
||||
|
||||
let static_fn = make_get_taxonomy_url(vec![tags.clone()]);
|
||||
let taxonomies = vec![tags.clone()];
|
||||
let static_fn = make_get_taxonomy_url(&taxonomies);
|
||||
// can find it correctly
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
|
|
|
@ -7,9 +7,8 @@ extern crate pulldown_cmark;
|
|||
|
||||
extern crate errors;
|
||||
extern crate utils;
|
||||
extern crate content;
|
||||
extern crate library;
|
||||
extern crate config;
|
||||
extern crate taxonomies;
|
||||
extern crate imageproc;
|
||||
|
||||
pub mod filters;
|
||||
|
|
|
@ -49,18 +49,23 @@ fn colorize(message: &str, color: &ColorSpec) {
|
|||
pub fn notify_site_size(site: &Site) {
|
||||
println!(
|
||||
"-> Creating {} pages ({} orphan), {} sections, and processing {} images",
|
||||
site.pages.len(),
|
||||
site.library.pages().len(),
|
||||
site.get_all_orphan_pages().len(),
|
||||
site.sections.len() - 1, // -1 since we do not the index as a section
|
||||
site.library.sections().len() - 1, // -1 since we do not the index as a section
|
||||
site.num_img_ops(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Display a warning in the console if there are ignored pages in the site
|
||||
pub fn warn_about_ignored_pages(site: &Site) {
|
||||
let ignored_pages: Vec<_> = site.sections
|
||||
.values()
|
||||
.flat_map(|s| s.ignored_pages.iter().map(|p| p.file.path.clone()))
|
||||
let ignored_pages: Vec<_> = site.library
|
||||
.sections_values()
|
||||
.iter()
|
||||
.flat_map(|s| {
|
||||
s.ignored_pages
|
||||
.iter()
|
||||
.map(|k| site.library.get_page_by_key(*k).file.path.clone())
|
||||
})
|
||||
.collect();
|
||||
|
||||
if !ignored_pages.is_empty() {
|
||||
|
|
|
@ -14,7 +14,6 @@ extern crate ctrlc;
|
|||
extern crate site;
|
||||
#[macro_use]
|
||||
extern crate errors;
|
||||
extern crate content;
|
||||
extern crate front_matter;
|
||||
extern crate utils;
|
||||
extern crate rebuild;
|
||||
|
|
Loading…
Reference in a new issue