diff --git a/Cargo.toml b/Cargo.toml index 8e18e17..fddea48 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,33 +23,35 @@ required-features = ["cli"] [dependencies] chrono = { version = "0.4.38", features = ["serde", "unstable-locales"] } -clap = { version = "4.5.17", features = ["derive", "cargo"], optional = true } -comrak = { version = "0.28.0", features = ["syntect", "shortcodes"], default-features = false } +clap = { version = "4.5.20", features = ["derive", "cargo"], optional = true } +comrak = { version = "0.29.0", features = ["syntect", "shortcodes"], default-features = false } daggy = { version = "0.8.0", features = ["stable_dag"] } toml = "0.8.19" liquid = "0.26.9" liquid-core = "0.26.9" liquid-lib = { version = "0.26.9", features = ["all", "stdlib", "jekyll", "shopify", "extra"] } -serde = "1.0.210" +serde = "1.0.213" sys-locale = "0.3.1" latex2mathml = "0.2.3" ahash = { version = "0.8.11", features = ["std", "serde", "runtime-rng"] } mimalloc = { version = "0.1.43", optional = true } -ticky = { version = "1.0.2", optional = true } +ticky = { version = "1.0.2" } miette = { version = "7.2.0", features = ["fancy", "syntect-highlighter"] } -thiserror = "1.0.63" +thiserror = "1.0.65" glob = "0.3.1" -tokio = { version = "1.40.0", features = ["full"], optional = true } -futures = "0.3.30" +tokio = { version = "1.41.0", features = ["full"], optional = true } +futures = "0.3.31" tracing-subscriber = { version = "0.3.18", optional = true, features = ["env-filter"]} tracing = "0.1.40" -notify-debouncer-full = { version = "0.3.1", default-features = false, optional = true } +notify-debouncer-full = { version = "0.4.0", default-features = false, optional = true } actix-files = { version = "0.6.6", optional = true } actix-web = { version = "4.9.0", optional = true } layout-rs = "0.1.2" html-escape = "0.2.13" syntect = "5.2.0" +path-clean = "1.0.1" [features] default = [] -cli = ["dep:mimalloc", "dep:ticky", "dep:tokio", "dep:clap", "dep:tracing-subscriber", "dep:notify-debouncer-full", "dep:actix-files", "dep:actix-web"] +cli = ["fs_provider", "dep:mimalloc", "dep:tokio", "dep:clap", "dep:tracing-subscriber", "dep:notify-debouncer-full", "dep:actix-files", "dep:actix-web"] +fs_provider = [] \ No newline at end of file diff --git a/src/builds.rs b/src/builds.rs index 0f7599c..4f04376 100644 --- a/src/builds.rs +++ b/src/builds.rs @@ -2,22 +2,16 @@ use crate::page::Page; use ahash::AHashMap; use chrono::Locale; use daggy::{ - petgraph::{ - algo::toposort, - dot::{Config, Dot}, - Direction, - }, + petgraph::{algo::toposort, Direction}, stable_dag::StableDag, NodeIndex, Walker, }; -use layout::gv::DotParser; -use layout::gv::GraphBuilder; -use layout::{backends::svg::SVGWriter, core::color::Color, std_shapes::shapes::ShapeKind}; use liquid::{to_object, Object, Parser}; use liquid_core::to_value; use miette::IntoDiagnostic; -use std::{env, fs, path::PathBuf}; -use tracing::{debug, trace, warn}; +use path_clean::PathClean; +use std::path::PathBuf; +use tracing::{debug, trace}; /// Information held in memory while performing a build. #[derive(Clone, Default)] @@ -42,58 +36,6 @@ pub enum EdgeType { } impl Build { - /// Visualise the DAG. - pub fn visualise_dag(&mut self) -> miette::Result<()> { - let dag_graph = self.dag.graph(); - let dag_graphviz = Dot::with_attr_getters( - dag_graph, - &[Config::NodeNoLabel, Config::EdgeNoLabel], - &|_graph, edge| format!("label = \"{:?}\"", edge.weight()), - &|_graph, node| { - let path = PathBuf::from(node.1.to_path_string()); - let relative_path = path - .strip_prefix(fs::canonicalize(env::current_dir().unwrap()).unwrap()) - .unwrap(); - let label = relative_path.to_string_lossy().to_string(); - format!("label = \"{}\"", label) - }, - ); - let mut parser = DotParser::new(&format!("{:?}", dag_graphviz)); - let tree = parser.process(); - if let Ok(tree) = tree { - let mut gb = GraphBuilder::new(); - gb.visit_graph(&tree); - let mut vg = gb.get(); - let mut svg = SVGWriter::new(); - for node_handle in vg.iter_nodes() { - let node = vg.element_mut(node_handle); - let old_shape = node.shape.clone(); - if let ShapeKind::Circle(label) = old_shape { - node.shape = ShapeKind::Box(label.clone()); - if Page::is_layout_path(label.clone())? { - node.look.fill_color = Some(Color::fast("#FFDFBA")); - } else { - match Page::get_collections_from_path(label)? { - Some(_) => { - node.look.fill_color = Some(Color::fast("#DAFFBA")); - } - None => { - node.look.fill_color = Some(Color::fast("#BADAFF")); - } - } - } - } - } - vg.do_it(false, false, false, &mut svg); - let content = svg.finalize(); - std::fs::create_dir_all("output").into_diagnostic()?; - std::fs::write("output/dag.svg", content).into_diagnostic()?; - } else { - warn!("Unable to visualise the DAG.") - } - Ok(()) - } - /// Get all descendants of a page in a DAG. /// /// # Arguments @@ -160,7 +102,7 @@ impl Build { let parents = dag.parents(root_index).iter(dag).collect::>(); for parent in parents { let parent_page = &dag.graph()[parent.1]; - if !parent_page.is_layout()? { + if !parent_page.is_layout() { ancestors.push(parent.1); } let parent_ancestors = Build::get_non_layout_ancestors(dag, parent.1)?; @@ -188,7 +130,7 @@ impl Build { let ancestor_page = &self.dag.graph()[ancestor]; let ancestor_object = liquid_core::Value::Object(to_object(&ancestor_page).into_diagnostic()?); - if ancestor_page.is_layout()? { + if ancestor_page.is_layout() { let ancestor_object = liquid_core::Value::Object(to_object(&ancestor_page).into_diagnostic()?); layout_ancestor_contexts.push(ancestor_object); @@ -210,11 +152,8 @@ impl Build { /// # Returns /// /// A list of all nodes that were rendered. - pub fn render_all(&mut self, visualise_dag: bool) -> miette::Result> { + pub fn render_all(&mut self) -> miette::Result> { trace!("Rendering all pages … "); - if visualise_dag { - self.visualise_dag()?; - } let mut rendered_indices = Vec::new(); let indices = toposort(&self.dag.graph(), None).unwrap_or_default(); for index in indices { @@ -242,16 +181,11 @@ impl Build { recursive: bool, rendered_indices: &mut Vec, ) -> miette::Result<()> { - let current_directory = - fs::canonicalize(env::current_dir().into_diagnostic()?).into_diagnostic()?; let root_page = self.dag.graph()[root_index].to_owned(); - let root_path = fs::canonicalize(root_page.to_path_string()).into_diagnostic()?; - let root_path_difference = root_path - .strip_prefix(¤t_directory) - .into_diagnostic()?; - debug!("Rendering page: {:?}", root_path_difference); + let root_path: PathBuf = PathBuf::from(root_page.to_path_string()).clean(); + debug!("Rendering page: {:?}", root_path); let mut root_contexts = self.contexts.clone(); - if root_path_difference.starts_with(PathBuf::from("layouts/")) { + if root_path.starts_with(PathBuf::from("layouts/")) { debug!("Page is a layout page … "); let layout_object = liquid_core::Value::Object(to_object(&root_page).into_diagnostic()?); diff --git a/src/date.rs b/src/date.rs index 2e81a2f..c824299 100644 --- a/src/date.rs +++ b/src/date.rs @@ -5,9 +5,9 @@ use chrono::NaiveDateTime; use chrono::NaiveTime; use chrono::TimeZone; use chrono::Utc; +use core::fmt; use serde::Deserialize; use serde::Serialize; -use std::fmt; use sys_locale::get_locale; use tracing::debug; diff --git a/src/fs_provider.rs b/src/fs_provider.rs new file mode 100644 index 0000000..16d3116 --- /dev/null +++ b/src/fs_provider.rs @@ -0,0 +1,35 @@ +use crate::provider::VoxProvider; +use miette::IntoDiagnostic; + +#[derive(Debug)] +/// A provider of the Vox build system that reads & writes from the file system. +pub struct FsProvider; +impl VoxProvider for FsProvider { + fn read_to_string(path: impl AsRef) -> miette::Result { + std::fs::read_to_string(path).into_diagnostic() + } + fn write_file( + path: impl AsRef + Clone, + contents: impl AsRef<[u8]>, + ) -> miette::Result<()> { + if let Some(parent_path) = path.as_ref().parent() { + std::fs::create_dir_all(parent_path).into_diagnostic()?; + } + std::fs::write(path, contents).into_diagnostic() + } + fn remove_file(path: impl AsRef) -> miette::Result<()> { + std::fs::remove_file(path).into_diagnostic() + } + fn list_vox_files() -> miette::Result> { + Ok(glob::glob("**/*.vox") + .into_diagnostic()? + .filter_map(Result::ok) + .collect()) + } + fn list_snippets() -> miette::Result> { + Ok(glob::glob("snippets/**/*") + .into_diagnostic()? + .filter_map(Result::ok) + .collect()) + } +} diff --git a/src/lib.rs b/src/lib.rs index 098f633..7a27ced 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -21,5 +21,9 @@ pub mod page; /// Errors originating during the build process. pub mod error; -/// Template parsing and rendering. -pub mod templates; +/// The interface to the Vox build system. +pub mod provider; + +/// A provider of the Vox build system that reads & writes from the file system. +#[cfg(feature = "fs_provider")] +pub mod fs_provider; diff --git a/src/main.rs b/src/main.rs index fdcd66c..d73cedb 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,38 +1,24 @@ use actix_files::NamedFile; use actix_web::dev::{ServiceRequest, ServiceResponse}; use actix_web::{App, HttpServer}; -use ahash::{AHashMap, AHashSet, HashSet, HashSetExt}; -use chrono::{Locale, Utc}; -use clap::{arg, crate_version}; use clap::{Parser, Subcommand}; -use daggy::petgraph::algo::toposort; -use daggy::Walker; -use daggy::{stable_dag::StableDag, NodeIndex}; -use glob::glob; -use liquid::{object, Object}; -use miette::{Context, IntoDiagnostic}; +use miette::IntoDiagnostic; use mimalloc::MiMalloc; -use notify_debouncer_full::{ - new_debouncer, - notify::{RecursiveMode, Watcher}, -}; +use notify_debouncer_full::{new_debouncer, notify::RecursiveMode}; use std::net::Ipv4Addr; -use std::path::Path; use std::sync::mpsc::channel; -use std::{fs, path::PathBuf, time::Duration}; -use syntect::highlighting::ThemeSet; -use syntect::html::css_for_theme_with_class_style; -use ticky::Stopwatch; +use std::sync::LazyLock; +use std::{path::PathBuf, time::Duration}; use tokio::time::sleep; -use toml::Table; -use tracing::{debug, error, info, trace, warn, Level}; -use vox::builds::EdgeType; -use vox::date::{self, Date}; -use vox::{builds::Build, page::Page, templates::create_liquid_parser}; +use tracing::{error, info, trace, Level}; +use vox::fs_provider::FsProvider; +use vox::provider::{VoxProvider, VERSION}; #[global_allocator] static GLOBAL: MiMalloc = MiMalloc; +static FS_PROVIDER: LazyLock = LazyLock::new(|| FsProvider {}); + #[derive(Parser)] #[command(version, about, long_about = None)] struct Cli { @@ -115,25 +101,22 @@ async fn main() -> miette::Result<()> { } subscriber_builder.init(); info!("Building … "); - let build_loop = tokio::spawn(async move { - loop { - let building = tokio::spawn(build(watch, visualise_dag, generate_syntax_css)); - match building.await.unwrap() { - Ok(_) => { - if !watch { - break; - } - } - Err(err) => { - error!("Building failed: {:#?}", err); - info!("Retrying in 5 seconds … "); - sleep(Duration::from_secs(5)).await; - continue; + loop { + let building = build(watch, visualise_dag, generate_syntax_css); + match building { + Ok(_) => { + if !watch { + break; } } + Err(err) => { + error!("Building failed: {:#?}", err); + info!("Retrying in 5 seconds … "); + sleep(Duration::from_secs(5)).await; + continue; + } } - }); - build_loop.await.into_diagnostic()?; + } } Some(Commands::Serve { path, @@ -168,8 +151,8 @@ async fn main() -> miette::Result<()> { subscriber_builder.init(); let build_loop = tokio::spawn(async move { loop { - let building = tokio::spawn(build(watch, visualise_dag, generate_syntax_css)); - match building.await.unwrap() { + let building = build(watch, visualise_dag, generate_syntax_css); + match building { Ok(_) => { if !watch { break; @@ -237,170 +220,26 @@ async fn main() -> miette::Result<()> { build_loop.await.into_diagnostic()?; serve_loop.await.into_diagnostic()?; } - None => println!("Vox {}", crate_version!()), - }; - Ok(()) -} - -fn insert_or_update_page( - entry: PathBuf, - layout_index: Option, - dag: &mut StableDag, - pages: &mut AHashMap, - layouts: &mut AHashMap>, - collection_dependents: &mut AHashMap>, - collection_members: &mut AHashMap>, - locale: Locale, -) -> miette::Result<()> { - let entry = fs::canonicalize(entry).into_diagnostic()?; - let (page, index) = if !Page::is_layout_path(&entry)? { - debug!("Inserting or updating page: {:?} … ", entry); - let page = path_to_page(entry.clone(), locale)?; - // If the page already exists in the DAG, update it. Otherwise, insert it. - let index = if pages.contains_key(&entry) { - debug!("Updating page: {:?} … ", entry); - let index = pages[&entry]; - let node = dag.node_weight_mut(index).unwrap(); - *node = page.clone(); - index - } else { - debug!("Inserting page: {:?} … ", entry); - let index = dag.add_node(page.clone()); - pages.insert(entry, index); - index - }; - (page, index) - } else { - debug!("Inserting layout: {:?} … ", entry); - let index = layout_index.unwrap(); - let page = dag.graph()[layout_index.unwrap()].clone(); - // debug!("{:#?}", page); - (page, index) + None => println!("Vox {}", VERSION), }; - - // A page's parents are pages in the collections it depends on. Its layout is a child. - let layout = page.layout.clone(); - let collections = page.collections.clone(); - let depends = page.depends.clone(); - debug!("Layout used: {:?} … ", layout); - debug!("Collections used: {:?} … ", depends); - if let Some(layout) = layout { - // Layouts are inserted multiple times, once for each page that uses them. - let layout_path = fs::canonicalize(format!("layouts/{}.vox", layout)) - .into_diagnostic() - .with_context(|| format!("Layout not found: `layouts/{}.vox`", layout))?; - let children = dag.children(index).iter(dag).collect::>(); - // If this page is being updated, the old layout should be replaced with the current one in the DAG. - let old_layout = children - .iter() - .find(|child| *dag.edge_weight(child.0).unwrap() == EdgeType::Layout); - if let Some(old_layout) = old_layout { - trace!("Removing old layout … "); - dag.remove_node(old_layout.1); - } - debug!("Inserting layout: {:?} … ", layout_path); - let layout_page = path_to_page(layout_path.clone(), locale)?; - let layout_index = dag.add_child(index, EdgeType::Layout, layout_page); - if let Some(layouts) = layouts.get_mut(&layout_path) { - layouts.insert(layout_index.1); - } else { - let mut new_set = HashSet::new(); - new_set.insert(layout_index.1); - layouts.insert(layout_path.clone(), new_set); - } - } - if let Some(collections) = collections { - for collection in collections { - if let Some(collection_members) = collection_members.get_mut(&collection) { - collection_members.insert(index); - } else { - let mut new_set = HashSet::new(); - new_set.insert(index); - collection_members.insert(collection.clone(), new_set); - } - } - } - if let Some(depends) = depends { - for collection in depends { - if let Some(collection_dependents) = collection_dependents.get_mut(&collection) { - collection_dependents.insert(index); - } else { - let mut new_set = HashSet::new(); - new_set.insert(index); - collection_dependents.insert(collection.clone(), new_set); - } - } - } - Ok(()) } -async fn build(watch: bool, visualise_dag: bool, generate_syntax_css: bool) -> miette::Result<()> { - let parser = create_liquid_parser()?; - let global = get_global_context()?; - let mut dag = StableDag::new(); - let mut pages: AHashMap = AHashMap::new(); - let mut layouts: AHashMap> = AHashMap::new(); - let mut collection_dependents: AHashMap> = AHashMap::new(); - let mut collection_members: AHashMap> = AHashMap::new(); - - // Initial DAG construction. - debug!("Constructing DAG … "); - for entry in glob("**/*.vox").into_diagnostic()? { - let entry = fs::canonicalize(entry.into_diagnostic()?).into_diagnostic()?; - // In the event that a layout has collection parents, we do not want it duplicated, so we avoid inserting it at first. - if Page::is_layout_path(&entry)? { - continue; - } - insert_or_update_page( - entry, - None, - &mut dag, - &mut pages, - &mut layouts, - &mut collection_dependents, - &mut collection_members, - global.1, - )?; - } - // We update the layouts with their parents and children once all other pages have been inserted. - for (layout_path, layout_indices) in layouts.clone() { - for layout_index in layout_indices { - insert_or_update_page( - layout_path.clone(), - Some(layout_index), - &mut dag, - &mut pages, - &mut layouts, - &mut collection_dependents, - &mut collection_members, - global.1, - )?; - } - } - // We construct edges between collection members and dependents. - for (collection, members) in collection_members { - if let Some(dependents) = collection_dependents.get(&collection) { - for member in members { - for dependent in dependents { - dag.add_edge(member, *dependent, EdgeType::Collection) - .into_diagnostic()?; - } - } - } - } +fn build(watch: bool, visualise_dag: bool, generate_syntax_css: bool) -> miette::Result<()> { + let parser = FS_PROVIDER.create_liquid_parser()?; + let global = FsProvider::get_global_context()?; + let (mut dag, mut pages, mut layouts) = FsProvider::generate_dag()?; // Write the initial site to the output directory. info!("Performing initial build … "); - let (_updated_pages, updated_dag) = generate_site( + let (_updated_pages, updated_dag) = FsProvider::generate_site( parser.clone(), global.0.clone(), global.1, dag, visualise_dag, generate_syntax_css, - ) - .await?; + )?; dag = updated_dag; // Watch for changes to the site. @@ -413,503 +252,57 @@ async fn build(watch: bool, visualise_dag: bool, generate_syntax_css: bool) -> m new_debouncer(Duration::from_secs(1), None, sender).into_diagnostic()?; info!("Watching {:?} … ", current_path); debouncer - .watcher() .watch(¤t_path, RecursiveMode::Recursive) .into_diagnostic()?; - let mut global_or_snippets_changed = false; - loop { - if let Ok(events) = receiver.recv().into_diagnostic()? { - // Changes to the output directory or version control are irrelevant. - if !events.iter().any(|event| { - event - .paths - .iter() - .any(|path| !path.starts_with(&output_path) && !path.starts_with(&git_path)) - }) { - continue; - } - global_or_snippets_changed = events.iter().any(|event| { - event.paths.iter().any(|path| { - path.strip_prefix(current_path.clone()) - .unwrap_or(path) - .starts_with("global.toml") - || path - .strip_prefix(current_path.clone()) - .unwrap_or(path) - .starts_with("snippets/") - }) - }); - trace!( - "Changes detected: {:#?} … ", - events - .into_iter() - .map(|event| event - .paths - .clone() - .into_iter() - .map(|path| { - path.strip_prefix(current_path.clone()) - .unwrap_or(&path) - .to_path_buf() - }) - .collect::>()) - .collect::>() - ); - } - - // 1. Build a new DAG. - let parser = create_liquid_parser()?; - let global = get_global_context()?; - let mut new_dag = StableDag::new(); - let mut new_pages: AHashMap = AHashMap::new(); - let mut new_layouts: AHashMap> = AHashMap::new(); - let mut new_collection_dependents: AHashMap> = - AHashMap::new(); - let mut new_collection_members: AHashMap> = AHashMap::new(); - - // New DAG construction. - debug!("Constructing DAG … "); - for entry in glob("**/*.vox").into_diagnostic()? { - let entry = fs::canonicalize(entry.into_diagnostic()?).into_diagnostic()?; - if Page::is_layout_path(&entry)? { - continue; - } - insert_or_update_page( - entry, - None, - &mut new_dag, - &mut new_pages, - &mut new_layouts, - &mut new_collection_dependents, - &mut new_collection_members, - global.1, - )?; - } - for (layout_path, layout_indices) in new_layouts.clone() { - for layout_index in layout_indices { - insert_or_update_page( - layout_path.clone(), - Some(layout_index), - &mut new_dag, - &mut new_pages, - &mut new_layouts, - &mut new_collection_dependents, - &mut new_collection_members, - global.1, - )?; - } - } - for (collection, members) in new_collection_members { - if let Some(dependents) = new_collection_dependents.get(&collection) { - for member in members { - for dependent in dependents { - new_dag - .add_edge(member, *dependent, EdgeType::Collection) - .into_diagnostic()?; - } - } - } - } - - // 2. Obtain the difference between the old and new DAGs; ie, calculate the set of added or modified nodes. - // - A node is modified if it has the same label, but its page is different (not comparing `url` or `rendered`). - // - If a node's page is the same (excluding `url` or `rendered`), it is unchanged. - // - A node is added if its label appears in the new DAG, but not the old one. - // - A node is removed if its label appears in the old DAG, but not the new one. - - let mut old_dag_pages = AHashMap::new(); - for (page_path, page_index) in &pages { - let page = dag.node_weight(*page_index).unwrap(); - old_dag_pages.insert(page_path.clone(), page); - } - let mut new_dag_pages = AHashMap::new(); - for (page_path, page_index) in &new_pages { - let page = new_dag.node_weight(*page_index).unwrap(); - new_dag_pages.insert(page_path.clone(), page); - } - let mut added_or_modified = AHashSet::new(); - let mut removed = AHashSet::new(); - let mut removed_output_paths = AHashSet::new(); - for (page_path, new_page) in new_dag_pages.iter() { - if let Some(old_page) = old_dag_pages.get(page_path) { - // If the page has been modified, its index is noted. - if !new_page.is_equivalent(old_page) { - added_or_modified.insert(new_pages[page_path]); - } - } else { - // If the page is new, its index is noted. - added_or_modified.insert(new_pages[page_path]); - } - } - // The ancestors of modified or added layouts are themselves modified or added. - for (layout_path, new_layout_indices) in &new_layouts { - let new_layout = new_dag - .node_weight(*new_layout_indices.iter().last().unwrap()) - .unwrap(); - if let Some(old_layout_indices) = layouts.get(layout_path) { - let old_layout = dag - .node_weight(*old_layout_indices.iter().last().unwrap()) - .unwrap(); - // Layout has been modified. - if !new_layout.is_equivalent(old_layout) { - for new_layout_index in new_layout_indices { - let ancestors = - Build::get_non_layout_ancestors(&new_dag, *new_layout_index)?; - for ancestor in ancestors { - added_or_modified.insert(ancestor); - } - } - } - } else { - // Layout is new. - for new_layout_index in new_layout_indices { - let ancestors = - Build::get_non_layout_ancestors(&new_dag, *new_layout_index)?; - for ancestor in ancestors { - added_or_modified.insert(ancestor); - } - } - } - } - // The ancestors of removed layouts are modified. - for (layout_path, old_layout_indices) in &layouts { - if new_layouts.get(layout_path).is_none() { - for old_layout_index in old_layout_indices { - let ancestors = Build::get_non_layout_ancestors(&dag, *old_layout_index)?; - let ancestor_paths = ancestors - .iter() - .map(|ancestor| { - PathBuf::from(dag.node_weight(*ancestor).unwrap().to_path_string()) - }) - .collect::>(); - for ancestor_path in ancestor_paths { - if let Some(ancestor_index) = new_pages.get(&ancestor_path) { - added_or_modified.insert(*ancestor_index); - } - } - } - } - } - for (page_path, _old_page) in old_dag_pages.iter() { - if new_dag_pages.get(page_path).is_none() { - // If the page has been removed, its index is noted. - removed.insert(pages[page_path]); - if let Some(old_page) = old_dag_pages.get(page_path) { - let output_path = if old_page.url.is_empty() { - let layout_url = get_layout_url(&pages[page_path], &dag); - layout_url.map(|layout_url| format!("output/{}", layout_url)) - } else if !old_page.url.is_empty() { - Some(format!("output/{}", old_page.url)) - } else { - None - }; - if output_path.is_none() { - warn!("Page has no URL: {:#?} … ", old_page.to_path_string()); - continue; - } - let output_path = output_path.unwrap(); - removed_output_paths - .insert(fs::canonicalize(output_path.clone()).into_diagnostic()?); - } - } - } - debug!("Removed pages: {:#?} … ", removed_output_paths); - // No need to continue if nothing changed. - if !global_or_snippets_changed - && added_or_modified.is_empty() - && removed.is_empty() - && removed_output_paths.is_empty() - { - info!("Nothing changed. Aborting rebuild … "); - continue; - } - - // 3. Compute which pages need to be rendered, noting their node IDs. - // - All pages that were modified need to be re-rendered. - // - Their descendants in the new DAG also need to be rendered. - // - All pages that were added need to be rendered. - // - Their descendants in the new DAG also need to be rendered. - // - All pages that were removed need their descendants in the new DAG rendered. - // - Their old output also needs to be deleted. - - let mut pages_to_render = added_or_modified.clone(); - // If the global context or snippets have changed, all pages need to be re-rendered. - if global_or_snippets_changed { - pages_to_render.extend(new_pages.values()); - pages_to_render.extend(new_layouts.values().flatten()); - } - for page_index in added_or_modified.clone() { - let descendants = Build::get_descendants(&new_dag, page_index); - for descendant in descendants { - pages_to_render.insert(descendant); - } - } - for page_index in removed.clone() { - let descendants = Build::get_descendants(&dag, page_index); - let descendant_page_paths = descendants + while let Ok(events) = receiver.recv().into_diagnostic()? { + // Changes to the output directory or version control are irrelevant. + if !events.iter().any(|event| { + event + .paths .iter() - .map(|descendant| { - PathBuf::from(dag.node_weight(*descendant).unwrap().to_path_string()) - }) - .collect::>(); - for descendant_page_path in descendant_page_paths { - if let Some(descendant_page_index) = new_pages.get(&descendant_page_path) { - pages_to_render.insert(*descendant_page_index); - } - } - } - // Only the root pages need to be passed to the rendering code, as it will recursively render their descendants. - for page_index in removed.clone() { - let children = dag.children(page_index).iter(&dag).collect::>(); - let child_page_paths = children - .iter() - .map(|child| PathBuf::from(dag.node_weight(child.1).unwrap().to_path_string())) - .collect::>(); - for child_page_path in child_page_paths { - if let Some(child_page_index) = new_pages.get(&child_page_path) { - pages_to_render.insert(*child_page_index); - } - } - } - - // 4. Merge the DAGs. - // - In the new DAG, replace all pages not needing rendering with their rendered counterparts from the old DAG. - - for (page_path, page_index) in &new_pages { - if !pages_to_render.contains(page_index) { - // Pages may be added, so it is necessary to check if the page already exists in the old DAG. - if let Some(old_page) = dag.node_weight(pages[page_path]) { - let new_page = new_dag.node_weight_mut(*page_index).unwrap(); - new_page.url.clone_from(&old_page.url); - new_page.rendered.clone_from(&old_page.rendered); - } - } - } - dag = new_dag; - trace!("Merged DAGs … "); - - // 5. Render & output the appropriate pages. - info!("Rebuilding … "); - let mut timer = Stopwatch::start_new(); - let mut build = Build { - template_parser: parser, - contexts: global.0, - locale: global.1, - dag, - }; - if visualise_dag { - build.visualise_dag()?; - } - - // Delete the output of removed pages. - for removed_output_path in removed_output_paths { - debug!("Removing {:?} … ", removed_output_path); - tokio::fs::remove_file(removed_output_path) - .await - .into_diagnostic()?; - } - - let mut rendered_pages = Vec::new(); - let render_order = toposort(&build.dag.graph(), None).unwrap_or_default(); - for page in render_order { - if pages_to_render.contains(&page) { - build.render_page(page, false, &mut rendered_pages)?; - } - } - - for updated_page_index in rendered_pages.iter() { - let updated_page = &build.dag.graph()[*updated_page_index]; - let output_path = get_output_path(updated_page, updated_page_index, &build); - if output_path.is_none() { - warn!("Page has no URL: {:#?} … ", updated_page.to_path_string()); - continue; - } - let output_path = output_path.unwrap(); - info!( - "Writing `{}` to `{}` … ", - updated_page.to_path_string(), - output_path - ); - tokio::fs::create_dir_all( - Path::new(&output_path) - .parent() - .unwrap_or(Path::new(&output_path)), - ) - .await - .into_diagnostic()?; - tokio::fs::write(output_path, updated_page.rendered.clone()) - .await - .into_diagnostic()?; - } - if generate_syntax_css { - generate_syntax_stylesheets()?; + .any(|path| !path.starts_with(&output_path) && !path.starts_with(&git_path)) + }) { + continue; } - timer.stop(); - println!( - "Generated {} pages in {:.2} seconds … ", - rendered_pages.len(), - timer.elapsed_s() + let global_or_snippets_changed = events.iter().any(|event| { + event.paths.iter().any(|path| { + path.strip_prefix(current_path.clone()) + .unwrap_or(path) + .starts_with("global.toml") + || path + .strip_prefix(current_path.clone()) + .unwrap_or(path) + .starts_with("snippets/") + }) + }); + trace!( + "Changes detected: {:#?} … ", + events + .into_iter() + .map(|event| event + .paths + .clone() + .into_iter() + .map(|path| { + path.strip_prefix(current_path.clone()) + .unwrap_or(&path) + .to_path_buf() + }) + .collect::>()) + .collect::>() ); - dag = build.dag; - } - } - - Ok(()) -} - -fn get_layout_url( - layout_node_index: &NodeIndex, - dag: &StableDag, -) -> Option { - let layout_node = dag.graph()[*layout_node_index].clone(); - if !layout_node.url.is_empty() { - return Some(layout_node.url); - } - - let parents = dag - .parents(*layout_node_index) - .iter(dag) - .collect::>(); - let mut result = String::new(); - for parent in parents { - if *dag.edge_weight(parent.0).unwrap() != EdgeType::Layout { - continue; - } - result = get_layout_url(&parent.1, dag)?; - } - if result.is_empty() { - None - } else { - Some(result) - } -} - -fn get_output_path(page: &Page, page_index: &NodeIndex, build: &Build) -> Option { - // If a page has no URL, it may be a layout. - // Layouts contain rendered content but must be written using their parent's URL. - - if page.url.is_empty() { - let layout_url = get_layout_url(page_index, &build.dag); - layout_url.map(|layout_url| format!("output/{}", layout_url)) - } else if !page.url.is_empty() { - Some(format!("output/{}", page.url)) - } else { - None - } -} -async fn generate_site( - template_parser: liquid::Parser, - contexts: liquid::Object, - locale: Locale, - dag: StableDag, - visualise_dag: bool, - generate_syntax_css: bool, -) -> miette::Result<(Vec, StableDag)> { - let mut timer = Stopwatch::start_new(); - let mut build = Build { - template_parser, - contexts, - locale, - dag, - }; - let updated_pages = build.render_all(visualise_dag)?; - info!("{} pages were rendered … ", updated_pages.len()); - for updated_page_index in updated_pages.iter() { - let updated_page = &build.dag.graph()[*updated_page_index]; - // If a page has no URL, it may be a layout. - // Layouts contain rendered content but must be written using their parent's URL. - let output_path = get_output_path(updated_page, updated_page_index, &build); - if output_path.is_none() { - warn!("Page has no URL: {:#?} … ", updated_page.to_path_string()); - continue; + (dag, pages, layouts) = FsProvider::incremental_regeneration( + global_or_snippets_changed, + parser.clone(), + visualise_dag, + generate_syntax_css, + dag, + pages, + layouts, + )?; } - let output_path = output_path.unwrap(); - info!( - "Writing `{}` to `{}` … ", - updated_page.to_path_string(), - output_path - ); - tokio::fs::create_dir_all( - Path::new(&output_path) - .parent() - .unwrap_or(Path::new(&output_path)), - ) - .await - .into_diagnostic()?; - tokio::fs::write(output_path, updated_page.rendered.clone()) - .await - .into_diagnostic()?; - } - if generate_syntax_css { - generate_syntax_stylesheets()?; } - timer.stop(); - println!( - "Generated {} pages in {:.2} seconds … ", - updated_pages.len(), - timer.elapsed_s() - ); - Ok((updated_pages, build.dag)) -} - -fn path_to_page(path: PathBuf, locale: Locale) -> miette::Result { - Page::new( - fs::read_to_string(path.clone()).into_diagnostic()?, - path, - locale, - ) -} - -fn get_global_context() -> miette::Result<(Object, Locale)> { - let global_context = match fs::read_to_string("global.toml") { - Ok(global_file) => global_file.parse::().into_diagnostic()?, - Err(_) => format!("locale = '{}'", date::default_locale_string()) - .parse::
() - .into_diagnostic()?, - }; - let locale: String = global_context - .get("locale") - .unwrap_or(&toml::Value::String(date::default_locale_string())) - .as_str() - .unwrap_or(&date::default_locale_string()) - .to_string(); - let locale = date::locale_string_to_locale(locale.clone()); - let current_date = Date::chrono_to_date(Utc::now(), locale); - Ok(( - object!({ - "global": global_context, - "meta": { - "builder": "Vox", - "version": crate_version!(), - "date": current_date, - } - }), - locale, - )) -} - -/// Generate stylesheets for syntax highlighting. -fn generate_syntax_stylesheets() -> miette::Result<()> { - let css_path = PathBuf::from("output/css/"); - let dark_css_path = css_path.join("dark-code.css"); - let light_css_path = css_path.join("light-code.css"); - let code_css_path = css_path.join("code.css"); - std::fs::create_dir_all(css_path).into_diagnostic()?; - - let ts = ThemeSet::load_defaults(); - let dark_theme = &ts.themes["base16-ocean.dark"]; - let css_dark = css_for_theme_with_class_style(dark_theme, syntect::html::ClassStyle::Spaced) - .into_diagnostic()?; - std::fs::write(dark_css_path, css_dark).into_diagnostic()?; - - let light_theme = &ts.themes["base16-ocean.light"]; - let css_light = css_for_theme_with_class_style(light_theme, syntect::html::ClassStyle::Spaced) - .into_diagnostic()?; - std::fs::write(light_css_path, css_light).into_diagnostic()?; - - let css = r#"@import url("light-code.css") (prefers-color-scheme: light);@import url("dark-code.css") (prefers-color-scheme: dark);"#; - std::fs::write(code_css_path, css).into_diagnostic()?; Ok(()) } diff --git a/src/page.rs b/src/page.rs index 01a338e..a191b2b 100644 --- a/src/page.rs +++ b/src/page.rs @@ -3,14 +3,15 @@ use crate::{ error::{DateNotValid, FrontmatterNotFound, InvalidDependsProperty}, }; use chrono::Locale; +use core::fmt; use liquid::{Object, Parser}; use miette::IntoDiagnostic; use miette::NamedSource; +use path_clean::PathClean; use serde::{Deserialize, Serialize}; use std::{ ffi::OsString, - fmt, fs, - path::{Path, PathBuf}, + path::{Component, PathBuf}, }; use toml::Table; @@ -65,14 +66,8 @@ impl Page { /// # Returns /// /// Whether or not the page is a layout. - pub fn is_layout_path>(path: P) -> miette::Result { - let current_directory = - fs::canonicalize(std::env::current_dir().into_diagnostic()?).into_diagnostic()?; - let page_path = fs::canonicalize(path).into_diagnostic()?; - let path_difference = page_path - .strip_prefix(¤t_directory) - .into_diagnostic()?; - Ok(path_difference.starts_with("layouts/")) + pub fn is_layout_path(path: impl Into) -> bool { + path.into().clean().starts_with("layouts/") } /// Get the names of the collections a page belongs to based on its path. @@ -84,42 +79,45 @@ impl Page { /// # Returns /// /// The names of the collections a page belongs to, or `None` if the page does not belong to a collection. - pub fn get_collections_from_path>( - path: P, + pub fn get_collections_from_path( + path: impl Into, ) -> miette::Result>> { - let current_directory = - fs::canonicalize(std::env::current_dir().into_diagnostic()?).into_diagnostic()?; - let page_path = fs::canonicalize(path).into_diagnostic()?; - let path_difference = page_path - .strip_prefix(¤t_directory) - .into_diagnostic()?; - let path_components: Vec = path_difference + let path_components: Vec<_> = path + .into() + .clean() .components() - .map(|c| c.as_os_str().to_string_lossy().to_string()) + .filter_map(|x| match x { + Component::CurDir => None, + Component::RootDir => None, + Component::ParentDir => None, + Component::Prefix(_) => None, + Component::Normal(x) => Some(PathBuf::from(x)), + }) + .filter(|x| x.extension().is_none()) .collect(); - let first_path_component = path_components[0].clone(); - if first_path_component == "layouts" { - return Ok(None); - } - if Path::new(first_path_component.as_str()).is_file() { - return Ok(None); + if let Some(first_path_component) = path_components.first() { + if first_path_component.extension().is_some() + || *first_path_component == PathBuf::from("layouts") + { + return Ok(None); + } } let mut results = Vec::new(); - let mut path_builder = Vec::new(); for path_component in path_components { - if Path::new(&path_builder.join("/")) - .join(&path_component) - .is_file() - { - break; - } - results.push(path_component.clone()); - path_builder.push(path_component.clone()); - let current_path = path_builder.join("_"); - if path_component != current_path { - results.push(path_builder.join("_")) - } + let path_to_this_component: Vec<_> = path_component + .components() + .filter_map(|x| match x { + Component::CurDir => None, + Component::RootDir => None, + Component::ParentDir => None, + Component::Prefix(_) => None, + Component::Normal(x) => Some(x.to_string_lossy().to_string()), + }) + .collect(); + results.push(path_to_this_component.join("_")); + results.push(path_component.to_string_lossy().to_string()) } + results.dedup(); Ok(Some(results)) } @@ -165,7 +163,7 @@ impl Page { /// # Returns /// /// Whether or not the page is a layout. - pub fn is_layout(&self) -> miette::Result { + pub fn is_layout(&self) -> bool { Page::is_layout_path(self.to_path_string()) } @@ -299,8 +297,8 @@ impl Page { /// # Returns /// /// An instance of a page. - pub fn new(contents: String, path: PathBuf, locale: Locale) -> miette::Result { - let path = fs::canonicalize(path).into_diagnostic()?; + pub fn new(contents: String, path: impl Into, locale: Locale) -> miette::Result { + let path = path.into().clean(); let (frontmatter, body) = Self::get_frontmatter_and_body(contents.clone(), path.clone())?; let frontmatter_data = frontmatter.parse::
().into_diagnostic()?; let frontmatter_data_clone = frontmatter_data.clone(); @@ -360,7 +358,7 @@ impl Page { .to_string_lossy() .to_string(), collections: Page::get_collections_from_path(path.clone())?, - is_layout: Page::is_layout_path(path)?, + is_layout: Page::is_layout_path(path), url: String::new(), rendered: String::new(), }) diff --git a/src/provider.rs b/src/provider.rs new file mode 100644 index 0000000..a844f8d --- /dev/null +++ b/src/provider.rs @@ -0,0 +1,1007 @@ +use crate::builds::EdgeType; +use crate::date::{self, Date}; +use crate::markdown_block::MarkdownBlock; +use crate::math_block::MathBlock; +use crate::{builds::Build, page::Page}; +use ahash::{AHashMap, AHashSet, HashSet, HashSetExt}; +use chrono::{Locale, Utc}; +use daggy::petgraph::algo::toposort; +use daggy::petgraph::dot::{Config, Dot}; +use daggy::Walker; +use daggy::{stable_dag::StableDag, NodeIndex}; +use layout::backends::svg::SVGWriter; +use layout::core::color::Color; +use layout::gv::{DotParser, GraphBuilder}; +use layout::std_shapes::shapes::ShapeKind; +use liquid::{object, Object}; +use miette::IntoDiagnostic; +use path_clean::PathClean; +use std::path::Path; +use std::path::PathBuf; +use syntect::highlighting::ThemeSet; +use syntect::html::css_for_theme_with_class_style; +use ticky::Stopwatch; +use toml::Table; +use tracing::{debug, error, info, trace, warn}; + +/// The Vox crate version number. +pub const VERSION: &str = env!("CARGO_PKG_VERSION"); + +/// An implementation of the Vox build process. +pub trait VoxProvider: core::fmt::Debug + core::marker::Sized + Sync { + /// Read a file's contents as a string. + /// + /// # Arguments + /// + /// * `path` - The path to the file. + /// + /// # Returns + /// + /// The file's contents as a string. + fn read_to_string(path: impl AsRef) -> miette::Result; + + /// Write data to a file. + /// + /// # Arguments + /// + /// * `path` - The path to the file. + /// + /// * `contents` - The bytes to be written. + fn write_file( + path: impl AsRef + Clone, + contents: impl AsRef<[u8]>, + ) -> miette::Result<()>; + + /// Remove a file. + /// + /// # Arguments + /// + /// * `path` - The path to the file. + fn remove_file(path: impl AsRef) -> miette::Result<()>; + + /// List all Vox pages. + /// + /// # Returns + /// + /// A list of paths to Vox pages. + fn list_vox_files() -> miette::Result>; + + /// List all Vox snippets. + /// + /// # Returns + /// + /// A list of paths to Vox snippets. + fn list_snippets() -> miette::Result>; + + /// Obtain a source of Liquid partials. + /// + /// # Returns + /// + /// A source of Liquid partials, initially with an empty list of snippets. + fn partial_source(&self) -> PartialSource<&Self> { + PartialSource(self, Vec::new()) + } + + /// Create a Liquid parser. + /// + /// # Returns + /// + /// A Liquid parser. + fn create_liquid_parser(&'static self) -> miette::Result { + let mut partials = self.partial_source(); + partials.update_list(); + let partial_compiler = liquid::partials::EagerCompiler::new(partials); + liquid::ParserBuilder::with_stdlib() + .tag(liquid_lib::jekyll::IncludeTag) + .filter(liquid_lib::jekyll::ArrayToSentenceString) + .filter(liquid_lib::jekyll::Pop) + .filter(liquid_lib::jekyll::Push) + .filter(liquid_lib::jekyll::Shift) + .filter(liquid_lib::jekyll::Slugify) + .filter(liquid_lib::jekyll::Unshift) + .filter(liquid_lib::jekyll::Sort) + .filter(liquid_lib::shopify::Pluralize) + .filter(liquid_lib::extra::DateInTz) + .block(MathBlock) + .block(MarkdownBlock) + .partials(partial_compiler) + .build() + .into_diagnostic() + } + + /// Given a path and locale, get a page. + /// + /// # Arguments + /// + /// * `path` - The path to the page. + /// + /// * `locale` - The locale for date formatting. + /// + /// # Returns + /// + /// A Vox page. + fn path_to_page(path: PathBuf, locale: Locale) -> miette::Result { + Page::new(Self::read_to_string(path.clone())?, path, locale) + } + + /// Get the global Liquid context. + /// + /// # Returns + /// + /// The global Liquid context and detected locale. + fn get_global_context() -> miette::Result<(Object, Locale)> { + let global_context = match Self::read_to_string("global.toml") { + Ok(global_file) => global_file.parse::
().into_diagnostic()?, + Err(_) => format!("locale = '{}'", date::default_locale_string()) + .parse::
() + .into_diagnostic()?, + }; + let locale: String = global_context + .get("locale") + .unwrap_or(&toml::Value::String(date::default_locale_string())) + .as_str() + .unwrap_or(&date::default_locale_string()) + .to_string(); + let locale = date::locale_string_to_locale(locale.clone()); + let current_date = Date::chrono_to_date(Utc::now(), locale); + Ok(( + object!({ + "global": global_context, + "meta": { + "builder": "Vox", + "version": VERSION, + "date": current_date, + } + }), + locale, + )) + } + + /// Upsert a page into a DAG. + /// + /// # Arguments + /// + /// * `entry` - The path to the page. + /// + /// * `layout_index` - The DAG index of the page's layout, if it has one. + /// + /// * `dag` - The DAG to upsert the page into. + /// + /// * `pages` - Mapping of paths to DAG indices. + /// + /// * `layouts` - Mapping of paths to a set of DAG indices. + /// + /// * `collection_dependents` - Mapping of collection names to a set of dependent pages. + /// + /// * `collection_members` - Mapping of collection names to a set of pages in said collection. + /// + /// * `locale` - The locale for date formatting. + #[allow(clippy::too_many_arguments)] + fn insert_or_update_page( + entry: PathBuf, + layout_index: Option, + dag: &mut StableDag, + pages: &mut AHashMap, + layouts: &mut AHashMap>, + collection_dependents: &mut AHashMap>, + collection_members: &mut AHashMap>, + locale: Locale, + ) -> miette::Result<()> { + let entry = entry.clean(); + let (page, index) = if !Page::is_layout_path(&entry) { + debug!("Inserting or updating page: {:?} … ", entry); + let page = Self::path_to_page(entry.clone(), locale)?; + // If the page already exists in the DAG, update it. Otherwise, insert it. + let index = if pages.contains_key(&entry) { + debug!("Updating page: {:?} … ", entry); + let index = pages[&entry]; + let node = dag.node_weight_mut(index).unwrap(); + *node = page.clone(); + index + } else { + debug!("Inserting page: {:?} … ", entry); + let index = dag.add_node(page.clone()); + pages.insert(entry, index); + index + }; + (page, index) + } else { + debug!("Inserting layout: {:?} … ", entry); + let index = layout_index.unwrap(); + let page = dag.graph()[layout_index.unwrap()].clone(); + (page, index) + }; + + // A page's parents are pages in the collections it depends on. Its layout is a child. + let layout = page.layout.clone(); + let collections = page.collections.clone(); + let depends = page.depends.clone(); + debug!("Layout used: {:?} … ", layout); + debug!("Collections used: {:?} … ", depends); + if let Some(layout) = layout { + // Layouts are inserted multiple times, once for each page that uses them. + let layout_path = PathBuf::from(format!("layouts/{}.vox", layout)).clean(); + let children = dag.children(index).iter(dag).collect::>(); + // If this page is being updated, the old layout should be replaced with the current one in the DAG. + let old_layout = children + .iter() + .find(|child| *dag.edge_weight(child.0).unwrap() == EdgeType::Layout); + if let Some(old_layout) = old_layout { + trace!("Removing old layout … "); + dag.remove_node(old_layout.1); + } + debug!("Inserting layout: {:?} … ", layout_path); + let layout_page = Self::path_to_page(layout_path.clone(), locale)?; + let layout_index = dag.add_child(index, EdgeType::Layout, layout_page); + if let Some(layouts) = layouts.get_mut(&layout_path) { + layouts.insert(layout_index.1); + } else { + let mut new_set = HashSet::new(); + new_set.insert(layout_index.1); + layouts.insert(layout_path.clone(), new_set); + } + } + if let Some(collections) = collections { + for collection in collections { + if let Some(collection_members) = collection_members.get_mut(&collection) { + collection_members.insert(index); + } else { + let mut new_set = HashSet::new(); + new_set.insert(index); + collection_members.insert(collection.clone(), new_set); + } + } + } + if let Some(depends) = depends { + for collection in depends { + if let Some(collection_dependents) = collection_dependents.get_mut(&collection) { + collection_dependents.insert(index); + } else { + let mut new_set = HashSet::new(); + new_set.insert(index); + collection_dependents.insert(collection.clone(), new_set); + } + } + } + + Ok(()) + } + + /// Obtain the URL of a layout page. + /// + /// # Arguments + /// + /// * `layout_node_index` - The index of a layout page in a DAG. + /// + /// * `dag` - The DAG containing the layout page. + /// + /// # Returns + /// + /// The layout page's URL. + fn get_layout_url( + layout_node_index: &NodeIndex, + dag: &StableDag, + ) -> Option { + let layout_node = dag.graph()[*layout_node_index].clone(); + if !layout_node.url.is_empty() { + return Some(layout_node.url); + } + + let parents = dag + .parents(*layout_node_index) + .iter(dag) + .collect::>(); + let mut result = String::new(); + for parent in parents { + if *dag.edge_weight(parent.0).unwrap() != EdgeType::Layout { + continue; + } + result = Self::get_layout_url(&parent.1, dag)?; + } + if result.is_empty() { + None + } else { + Some(result) + } + } + + /// Obtain the output path of a page. + /// + /// # Arguments + /// + /// * `page` - The page. + /// + /// * `page_index` - The index of the page in the build's DAG. + /// + /// * `build` - The build. + /// + /// # Returns + /// + /// The page's output path. + fn get_output_path(page: &Page, page_index: &NodeIndex, build: &Build) -> Option { + // If a page has no URL, it may be a layout. + // Layouts contain rendered content but must be written using their parent's URL. + + if page.url.is_empty() { + let layout_url = Self::get_layout_url(page_index, &build.dag); + layout_url.map(|layout_url| format!("output/{}", layout_url)) + } else if !page.url.is_empty() { + Some(format!("output/{}", page.url)) + } else { + None + } + } + + /// Generate stylesheets for syntax highlighting. + fn generate_syntax_stylesheets() -> miette::Result<()> { + let css_path = PathBuf::from("output/css/"); + let dark_css_path = css_path.join("dark-code.css"); + let light_css_path = css_path.join("light-code.css"); + let code_css_path = css_path.join("code.css"); + + let ts = ThemeSet::load_defaults(); + let dark_theme = &ts.themes["base16-ocean.dark"]; + let css_dark = + css_for_theme_with_class_style(dark_theme, syntect::html::ClassStyle::Spaced) + .into_diagnostic()?; + Self::write_file(dark_css_path, css_dark)?; + + let light_theme = &ts.themes["base16-ocean.light"]; + let css_light = + css_for_theme_with_class_style(light_theme, syntect::html::ClassStyle::Spaced) + .into_diagnostic()?; + Self::write_file(light_css_path, css_light)?; + + let css = r#"@import url("light-code.css") (prefers-color-scheme: light);@import url("dark-code.css") (prefers-color-scheme: dark);"#; + Self::write_file(code_css_path, css)?; + Ok(()) + } + + /// Output a visualisation of a build's DAG. + /// + /// # Arguments + /// + /// * `build` - A Vox build. + fn visualise_dag(build: &Build) -> miette::Result<()> { + let dag_graph = build.dag.graph(); + let dag_graphviz = Dot::with_attr_getters( + dag_graph, + &[Config::NodeNoLabel, Config::EdgeNoLabel], + &|_graph, edge| format!("label = \"{:?}\"", edge.weight()), + &|_graph, node| { + let path = PathBuf::from(node.1.to_path_string()).clean(); + let label = path.to_string_lossy().to_string(); + format!("label = \"{}\"", label) + }, + ); + let mut parser = DotParser::new(&format!("{:?}", dag_graphviz)); + let tree = parser.process(); + if let Ok(tree) = tree { + let mut gb = GraphBuilder::new(); + gb.visit_graph(&tree); + let mut vg = gb.get(); + let mut svg = SVGWriter::new(); + for node_handle in vg.iter_nodes() { + let node = vg.element_mut(node_handle); + let old_shape = node.shape.clone(); + if let ShapeKind::Circle(label) = old_shape { + node.shape = ShapeKind::Box(label.clone()); + if Page::is_layout_path(label.clone()) { + node.look.fill_color = Some(Color::fast("#FFDFBA")); + } else { + match Page::get_collections_from_path(label)? { + Some(_) => { + node.look.fill_color = Some(Color::fast("#DAFFBA")); + } + None => { + node.look.fill_color = Some(Color::fast("#BADAFF")); + } + } + } + } + } + vg.do_it(false, false, false, &mut svg); + let content = svg.finalize(); + Self::write_file("output/dag.svg", content)?; + } else { + warn!("Unable to visualise the DAG.") + } + Ok(()) + } + + /// Perform an initial build of a Vox site. + /// + /// # Arguments + /// + /// * `template_parser` - A Liquid parser. + /// + /// * `contexts` - The Liquid contexts to render with. + /// + /// * `locale` - The locale for date formatting. + /// + /// * `dag` - The DAG representing the structure of the site. + /// + /// * `visualise_dag` - Whether or not to output a visualisation of the DAG. + /// + /// * `generate_syntax_css` - Whether or not to output a stylesheet for syntax highlighting. + /// + /// # Returns + /// + /// A list of rendered pages and the DAG of the finished Vox build. + fn generate_site( + template_parser: liquid::Parser, + contexts: liquid::Object, + locale: Locale, + dag: StableDag, + visualise_dag: bool, + generate_syntax_css: bool, + ) -> miette::Result<(Vec, StableDag)> { + let mut timer = Stopwatch::start_new(); + let mut build = Build { + template_parser, + contexts, + locale, + dag, + }; + let updated_pages = build.render_all()?; + if visualise_dag { + Self::visualise_dag(&build)?; + } + info!("{} pages were rendered … ", updated_pages.len()); + for updated_page_index in updated_pages.iter() { + let updated_page = &build.dag.graph()[*updated_page_index]; + // If a page has no URL, it may be a layout. + // Layouts contain rendered content but must be written using their parent's URL. + let output_path = Self::get_output_path(updated_page, updated_page_index, &build); + match output_path { + None => { + warn!("Page has no URL: {:#?} … ", updated_page.to_path_string()); + continue; + } + Some(output_path) => { + info!( + "Writing `{}` to `{}` … ", + updated_page.to_path_string(), + output_path + ); + Self::write_file(output_path, updated_page.rendered.clone())?; + } + } + } + if generate_syntax_css { + Self::generate_syntax_stylesheets()?; + } + timer.stop(); + info!( + "Generated {} pages in {:.2} seconds … ", + updated_pages.len(), + timer.elapsed_s() + ); + Ok((updated_pages, build.dag)) + } + + /// Perform the rendering pipeline after changes have been detected. + /// + /// # Arguments + /// + /// * `global_or_snippets_changed` - Whether the global context or any snippets have changed. + /// + /// * `parser` - A Liquid parser. + /// + /// * `visualise_dag` - Whether or not to output a visualisation of the DAG. + /// + /// * `generate_syntax_css` - Whether or not to output a stylesheet for syntax highlighting. + /// + /// * `old_dag` - The former DAG. + /// + /// * `old_pages` - Former mapping of paths to DAG indices. + /// + /// * `old_layouts` - Former mapping of paths to a set of DAG indices. + /// + /// # Returns + /// + /// The DAG of the new finished Vox build, a new mapping of paths to DAG indices, and a new mapping of paths to a set of DAG indices. + #[allow(clippy::type_complexity)] + fn incremental_regeneration( + global_or_snippets_changed: bool, + parser: liquid::Parser, + visualise_dag: bool, + generate_syntax_css: bool, + old_dag: StableDag, + old_pages: AHashMap, + old_layouts: AHashMap>, + ) -> miette::Result<( + StableDag, + AHashMap>, + AHashMap>, + )> { + let (mut new_dag, new_pages, new_layouts) = Self::generate_dag()?; + let (added_or_modified, removed, removed_output_paths) = Self::get_dag_difference( + &old_dag, + &old_pages, + &old_layouts, + &new_dag, + &new_pages, + &new_layouts, + )?; + let pages_to_render = Self::pages_to_render( + &old_dag, + &new_dag, + &new_pages, + &new_layouts, + global_or_snippets_changed, + added_or_modified, + removed, + )?; + Self::merge_dags( + &pages_to_render, + old_dag, + &mut new_dag, + old_pages, + &new_pages, + )?; + Ok(( + Self::output_regenerated( + visualise_dag, + generate_syntax_css, + parser, + removed_output_paths, + new_dag, + pages_to_render, + )?, + new_pages, + new_layouts, + )) + } + + /// Fifth stage of the rendering pipeline. + /// + /// Render & output the appropriate pages. + /// + /// # Arguments + /// + /// * `visualise_dag` - Whether or not to output a visualisation of the DAG. + /// + /// * `generate_syntax_css` - Whether or not to output a stylesheet for syntax highlighting. + /// + /// * `parser` - A Liquid parser. + /// + /// * `removed_output_paths` - A set of paths pointing to removed output files. + /// + /// * `new_dag` - The current DAG to use in rendering. + /// + /// * `pages_to_render` - A set of pages needing to be rendered. + /// + /// # Returns + /// + /// The DAG of the new finished Vox build. + fn output_regenerated( + visualise_dag: bool, + generate_syntax_css: bool, + parser: liquid::Parser, + removed_output_paths: AHashSet, + new_dag: StableDag, + pages_to_render: AHashSet, + ) -> miette::Result> { + let global = Self::get_global_context()?; + info!("Rebuilding … "); + let mut timer = Stopwatch::start_new(); + let mut build = Build { + template_parser: parser, + contexts: global.0, + locale: global.1, + dag: new_dag, + }; + if visualise_dag { + Self::visualise_dag(&build)?; + } + + // Delete the output of removed pages. + for removed_output_path in removed_output_paths { + debug!("Removing {:?} … ", removed_output_path); + Self::remove_file(removed_output_path)?; + } + + let mut rendered_pages = Vec::new(); + let render_order = toposort(&build.dag.graph(), None).unwrap_or_default(); + for page in render_order + .iter() + .filter(|page| pages_to_render.contains(page)) + { + build.render_page(*page, false, &mut rendered_pages)?; + } + + for updated_page_index in rendered_pages.iter() { + let updated_page = &build.dag.graph()[*updated_page_index]; + let output_path = Self::get_output_path(updated_page, updated_page_index, &build); + match output_path { + None => { + warn!("Page has no URL: {:#?} … ", updated_page.to_path_string()); + continue; + } + Some(output_path) => { + info!( + "Writing `{}` to `{}` … ", + updated_page.to_path_string(), + output_path + ); + Self::write_file(output_path, updated_page.rendered.clone())?; + } + } + } + if generate_syntax_css { + Self::generate_syntax_stylesheets()?; + } + timer.stop(); + info!( + "Generated {} pages in {:.2} seconds … ", + rendered_pages.len(), + timer.elapsed_s() + ); + Ok(build.dag) + } + + /// Fourth stage of the rendering pipeline. + /// + /// Merge the DAGs. + /// - In the new DAG, replace all pages not needing rendering with their rendered counterparts from the old DAG. + /// + /// # Arguments + /// + /// * `pages_to_render` - A set of pages needing to be rendered. + /// + /// * `old_dag` - The former DAG. + /// + /// * `new_dag` - The current DAG that the former DAG is to be merged into. + /// + /// * `old_pages` - Former mapping of paths to DAG indices. + /// + /// * `new_pages` - New mapping of paths to DAG indices. + fn merge_dags( + pages_to_render: &AHashSet, + old_dag: StableDag, + new_dag: &mut StableDag, + old_pages: AHashMap, + new_pages: &AHashMap, + ) -> miette::Result<()> { + for (page_path, page_index) in new_pages { + if !pages_to_render.contains(page_index) { + // Pages may be added, so it is necessary to check if the page already exists in the old DAG. + if let Some(old_page) = old_dag.node_weight(old_pages[page_path]) { + let new_page = new_dag.node_weight_mut(*page_index).unwrap(); + new_page.url.clone_from(&old_page.url); + new_page.rendered.clone_from(&old_page.rendered); + } + } + } + Ok(()) + } + + /// Third stage of the rendering pipeline. + /// + /// Compute which pages need to be rendered, noting their node IDs. + /// - All pages that were modified need to be re-rendered. + /// - Their descendants in the new DAG also need to be rendered. + /// - All pages that were added need to be rendered. + /// - Their descendants in the new DAG also need to be rendered. + /// - All pages that were removed need their descendants in the new DAG rendered. + /// - Their old output also needs to be deleted. + /// + /// # Arguments + /// + /// * `old_dag` - The former DAG. + /// + /// * `new_dag` - The new DAG. + /// + /// * `new_pages` - New mapping of paths to DAG indices. + /// + /// * `new_layouts` - New mapping of paths to a set of DAG indices. + /// + /// * `global_or_snippets_changed` - Whether the global context or any snippets have changed. + /// + /// * `added_or_modified` - A set of pages that were added or modified. + /// + /// * `removed` - A set of pages that were removed. + /// + /// # Returns + /// + /// A set of pages needing to be rendered. + fn pages_to_render( + old_dag: &StableDag, + new_dag: &StableDag, + new_pages: &AHashMap, + new_layouts: &AHashMap>, + global_or_snippets_changed: bool, + added_or_modified: AHashSet, + removed: AHashSet, + ) -> miette::Result> { + let mut pages_to_render = added_or_modified.clone(); + // If the global context or snippets have changed, all pages need to be re-rendered. + if global_or_snippets_changed { + pages_to_render.extend(new_pages.values()); + pages_to_render.extend(new_layouts.values().flatten()); + } + for page_index in added_or_modified.clone() { + let descendants = Build::get_descendants(new_dag, page_index); + for descendant in descendants { + pages_to_render.insert(descendant); + } + } + for page_index in removed.clone() { + let descendants = Build::get_descendants(old_dag, page_index); + for descendant_page_index in descendants + .iter() + .filter_map(|descendant| { + old_dag.node_weight(*descendant).map(|x| x.to_path_string()) + }) + .map(PathBuf::from) + .filter_map(|x| new_pages.get(&x)) + { + pages_to_render.insert(*descendant_page_index); + } + } + // Only the root pages need to be passed to the rendering code, as it will recursively render their descendants. + for page_index in removed.clone() { + let children = old_dag + .children(page_index) + .iter(old_dag) + .collect::>(); + for child_page_index in children + .iter() + .filter_map(|child| old_dag.node_weight(child.1).map(|x| x.to_path_string())) + .map(PathBuf::from) + .filter_map(|x| new_pages.get(&x)) + { + pages_to_render.insert(*child_page_index); + } + } + Ok(pages_to_render) + } + + /// Second stage of the rendering pipeline. + /// + /// Obtain the difference between the old and new DAGs; ie, calculate the set of added or modified nodes. + /// - A node is modified if it has the same label, but its page is different (not comparing `url` or `rendered`). + /// - If a node's page is the same (excluding `url` or `rendered`), it is unchanged. + /// - A node is added if its label appears in the new DAG, but not the old one. + /// - A node is removed if its label appears in the old DAG, but not the new one. + /// + /// # Arguments + /// + /// * `old_dag` - The former DAG. + /// + /// * `old_pages` - Former mapping of paths to DAG indices. + /// + /// * `old_layouts` - Former mapping of paths to a set of DAG indices. + /// + /// * `new_dag` - The new DAG. + /// + /// * `new_pages` - New mapping of paths to DAG indices. + /// + /// * `new_layouts` - New mapping of paths to a set of DAG indices. + /// + /// # Returns + /// + /// A set of pages that were added or modified, a set of pages that were removed, and a set of paths pointing to removed output files. + fn get_dag_difference( + old_dag: &StableDag, + old_pages: &AHashMap, + old_layouts: &AHashMap>, + new_dag: &StableDag, + new_pages: &AHashMap, + new_layouts: &AHashMap>, + ) -> miette::Result<(AHashSet, AHashSet, AHashSet)> { + let mut old_dag_pages = AHashMap::new(); + for (page_path, page) in old_pages.iter().filter_map(|(page_path, page_index)| { + old_dag.node_weight(*page_index).map(|x| (page_path, x)) + }) { + old_dag_pages.insert(page_path, page); + } + let mut new_dag_pages = AHashMap::new(); + for (page_path, page) in new_pages.iter().filter_map(|(page_path, page_index)| { + new_dag.node_weight(*page_index).map(|x| (page_path, x)) + }) { + new_dag_pages.insert(page_path.clone(), page); + } + let mut added_or_modified = AHashSet::new(); + let mut removed = AHashSet::new(); + let mut removed_output_paths = AHashSet::new(); + for (page_path, new_page) in new_dag_pages.iter() { + match old_dag_pages.get(page_path) { + // If the page has been modified, its index is noted. + Some(old_page) => { + if !new_page.is_equivalent(old_page) { + added_or_modified.insert(new_pages[page_path]); + } + } + // If the page is new, its index is noted. + None => { + added_or_modified.insert(new_pages[page_path]); + } + } + } + // The ancestors of modified or added layouts are themselves modified or added. + for (layout_path, new_layout_indices) in new_layouts { + let new_layout = new_layout_indices + .iter() + .last() + .and_then(|x| new_dag.node_weight(*x)); + match old_layouts.get(layout_path) { + Some(old_layout_indices) => { + let old_layout = old_layout_indices + .iter() + .last() + .and_then(|x| old_dag.node_weight(*x)); + // Layout has been modified. + if !matches!((new_layout, old_layout), (Some(new_layout), Some(old_layout)) if new_layout.is_equivalent(old_layout)) + { + for new_layout_index in new_layout_indices { + let ancestors = + Build::get_non_layout_ancestors(new_dag, *new_layout_index)?; + for ancestor in ancestors { + added_or_modified.insert(ancestor); + } + } + } + } + None => { + // Layout is new. + for new_layout_index in new_layout_indices { + let ancestors = + Build::get_non_layout_ancestors(new_dag, *new_layout_index)?; + for ancestor in ancestors { + added_or_modified.insert(ancestor); + } + } + } + } + } + // The ancestors of removed layouts are modified. + for (layout_path, old_layout_indices) in old_layouts { + if new_layouts.get(layout_path).is_none() { + for old_layout_index in old_layout_indices { + let ancestors = Build::get_non_layout_ancestors(old_dag, *old_layout_index)?; + let ancestor_paths = ancestors + .iter() + .map(|ancestor| { + PathBuf::from(old_dag.node_weight(*ancestor).unwrap().to_path_string()) + }) + .collect::>(); + for ancestor_path in ancestor_paths { + if let Some(ancestor_index) = new_pages.get(&ancestor_path) { + added_or_modified.insert(*ancestor_index); + } + } + } + } + } + for (page_path, _old_page) in old_dag_pages.iter() { + if new_dag_pages.get(*page_path).is_none() { + // If the page has been removed, its index is noted. + removed.insert(old_pages[*page_path]); + if let Some(old_page) = old_dag_pages.get(page_path) { + let output_path = if old_page.url.is_empty() { + let layout_url = Self::get_layout_url(&old_pages[*page_path], old_dag); + layout_url.map(|layout_url| format!("output/{}", layout_url)) + } else if !old_page.url.is_empty() { + Some(format!("output/{}", old_page.url)) + } else { + None + }; + match output_path { + None => { + warn!("Page has no URL: {:#?} … ", old_page.to_path_string()); + continue; + } + Some(output_path) => { + removed_output_paths.insert(PathBuf::from(output_path)); + } + } + } + } + } + Ok((added_or_modified, removed, removed_output_paths)) + } + + /// First stage of the rendering pipeline. + /// + /// Constructing a DAG. + /// + /// # Returns + /// + /// The new DAG, a mapping of paths to DAG indices, and a mapping of paths to a set of DAG indices. + #[allow(clippy::type_complexity)] + fn generate_dag() -> miette::Result<( + StableDag, + AHashMap, + AHashMap>, + )> { + let global = Self::get_global_context()?; + let mut dag = StableDag::new(); + let mut pages: AHashMap = AHashMap::new(); + let mut layouts: AHashMap> = AHashMap::new(); + let mut collection_dependents: AHashMap> = AHashMap::new(); + let mut collection_members: AHashMap> = AHashMap::new(); + + // DAG construction. + debug!("Constructing DAG … "); + // In the event that a layout has collection parents, we do not want it duplicated, so we avoid inserting it at first. + for entry in Self::list_vox_files()? + .into_iter() + .filter(|x| !Page::is_layout_path(x)) + { + Self::insert_or_update_page( + entry, + None, + &mut dag, + &mut pages, + &mut layouts, + &mut collection_dependents, + &mut collection_members, + global.1, + )?; + } + // We update the layouts with their parents and children once all other pages have been inserted. + for (layout_path, layout_indices) in layouts.clone() { + for layout_index in layout_indices { + Self::insert_or_update_page( + layout_path.clone(), + Some(layout_index), + &mut dag, + &mut pages, + &mut layouts, + &mut collection_dependents, + &mut collection_members, + global.1, + )?; + } + } + // We construct edges between collection members and dependents. + for (collection, members) in collection_members { + if let Some(dependents) = collection_dependents.get(&collection) { + for member in members { + for dependent in dependents { + dag.add_edge(member, *dependent, EdgeType::Collection) + .into_diagnostic()?; + } + } + } + } + Ok((dag, pages, layouts)) + } +} + +#[derive(Debug, Clone)] +/// A source of Liquid partials. +/// +/// Composed of a Vox provider and a list of snippets. +pub struct PartialSource(T, Vec); +impl PartialSource<&'_ T> { + /// Refresh the internal list of snippets. + pub fn update_list(&mut self) { + self.1 = match T::list_snippets() { + Ok(snippets) => snippets + .iter() + .filter_map(|x| x.file_name()) + .map(|x| x.to_string_lossy().to_string()) + .collect(), + Err(e) => { + error!("{}", e); + Vec::new() + } + } + } +} +impl liquid::partials::PartialSource for PartialSource<&'_ T> { + fn contains(&self, name: &str) -> bool { + self.1.contains(&name.to_owned()) + } + fn names(&self) -> Vec<&str> { + self.1.iter().map(|s| s.as_str()).collect() + } + fn try_get<'a>(&'a self, name: &str) -> Option> { + T::read_to_string(format!("snippets/{}", name)) + .ok() + .map(|x| x.into()) + } +} diff --git a/src/templates.rs b/src/templates.rs deleted file mode 100644 index 127ceb5..0000000 --- a/src/templates.rs +++ /dev/null @@ -1,88 +0,0 @@ -use crate::{markdown_block::MarkdownBlock, math_block::MathBlock}; -use glob::glob; -use miette::IntoDiagnostic; -use std::{ - borrow::Cow, - path::{Path, PathBuf}, -}; - -#[derive(Eq, PartialEq, PartialOrd, Clone, Default, Debug)] -/// A source of snippets for Liquid templates. -pub struct SnippetSource { - names: Vec, -} - -impl SnippetSource { - /// Create a new `SnippetSource`. - /// - /// # Returns - /// - /// A new `SnippetSource` with an empty list of snippets. - pub fn new() -> Self { - Self { names: Vec::new() } - } - - /// Update the list of snippets. - pub fn update_list(&mut self) { - self.names.clear(); - if let Ok(snippets_directory) = std::fs::read_dir("snippets") { - for entry in snippets_directory.flatten() { - if entry.file_type().unwrap().is_file() { - let name = entry - .path() - .file_name() - .unwrap() - .to_string_lossy() - .to_string(); - self.names.push(name); - } - } - } - } -} - -impl liquid::partials::PartialSource for SnippetSource { - fn contains(&self, name: &str) -> bool { - Path::new(&format!("snippets/{}", name)).exists() - || glob(&format!("snippets/{}.*", name)) - .unwrap() - .next() - .is_some() - } - - fn names(&self) -> Vec<&str> { - self.names.iter().map(|s| s.as_str()).collect() - } - - fn try_get(&self, name: &str) -> Option> { - let path = PathBuf::from(format!("snippets/{}", name)); - Some(std::fs::read_to_string(path).unwrap().into()) - } -} - -/// Create a Liquid parser with custom tags and filters. -/// -/// # Returns -/// -/// A Liquid parser with custom tags and filters. -pub fn create_liquid_parser() -> miette::Result { - let mut partials = SnippetSource::new(); - partials.update_list(); - let partial_compiler = liquid::partials::EagerCompiler::new(partials); - liquid::ParserBuilder::with_stdlib() - .tag(liquid_lib::jekyll::IncludeTag) - .filter(liquid_lib::jekyll::ArrayToSentenceString) - .filter(liquid_lib::jekyll::Pop) - .filter(liquid_lib::jekyll::Push) - .filter(liquid_lib::jekyll::Shift) - .filter(liquid_lib::jekyll::Slugify) - .filter(liquid_lib::jekyll::Unshift) - .filter(liquid_lib::jekyll::Sort) - .filter(liquid_lib::shopify::Pluralize) - .filter(liquid_lib::extra::DateInTz) - .block(MathBlock) - .block(MarkdownBlock) - .partials(partial_compiler) - .build() - .into_diagnostic() -}