From 733afdc98facfdcb655b62430ea178063d376cc0 Mon Sep 17 00:00:00 2001
From: Emil Sayahi <97276123+emmyoh@users.noreply.github.com>
Date: Thu, 30 May 2024 17:00:14 -0400
Subject: [PATCH] feat: Watching Bugs & Meta
---
.github/workflows/rust.yml | 18 ++--
site/assets/style/_variables.scss | 2 +-
site/diary/dag_watching.vox | 20 ++--
site/diary/watching_bugs.vox | 58 +++++++++++
site/guide/cli.vox | 40 +++++++-
site/guide/pipeline.vox | 2 +-
site/layouts/default.vox | 2 +-
src/builds.rs | 38 ++++---
src/main.rs | 158 ++++++++++++++++++++++--------
src/page.rs | 7 +-
10 files changed, 264 insertions(+), 81 deletions(-)
create mode 100644 site/diary/watching_bugs.vox
diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml
index 339cb81..9d2cbad 100644
--- a/.github/workflows/rust.yml
+++ b/.github/workflows/rust.yml
@@ -17,32 +17,30 @@ jobs:
# target: x86_64-unknown-linux-gnu
# default: true
# profile: default
- - name: Get dependency information
+ - name: Checkout codebase
+ uses: actions/checkout@v4
+ - name: Check the current Grass version
run: |
- gh api /repos/emmyoh/vox/commits/master --jq '.sha' > vox_rev
curl https://crates.io/api/v1/crates/grass > grass_rev
- name: Restore Cargo cache
id: cache-cargo
uses: actions/cache@v1
with:
path: ~/.cargo
- key: ${{ runner.os }}-cargo-${{ hashFiles('vox_rev', 'grass_rev') }}
+ key: ${{ runner.os }}-cargo-${{ hashFiles('src/**/*', '.cargo/**/*', 'Cargo.toml', 'rust-toolchain.toml', 'grass_rev') }}
restore-keys: |
- ${{ runner.os }}-cargo-${{ hashFiles('vox_rev', 'grass_rev') }}
+ ${{ runner.os }}-cargo-${{ hashFiles('src/**/*', '.cargo/**/*', 'Cargo.toml', 'rust-toolchain.toml', 'grass_rev') }}
- if: ${{ steps.cache-cargo.outputs.cache-hit != 'true' }}
- name: Install Grass and Vox
+ name: Install Vox and Grass
run: |
- rm vox_rev
- rm grass_rev
rustup update nightly && rustup default nightly
+ time cargo install --path . --features="cli"
time cargo install grass
- time cargo install --git https://github.com/emmyoh/vox --features="cli"
- - name: Checkout codebase
- uses: actions/checkout@v4
- name: Generate documentation
run: time cargo doc --no-deps -Zrustdoc-map --release --quiet
- name: Build site
run: |
+ rm grass_rev
mkdir -p site/output
cp -r target/doc/* site/output/
cd site
diff --git a/site/assets/style/_variables.scss b/site/assets/style/_variables.scss
index 7d443cc..8c85275 100644
--- a/site/assets/style/_variables.scss
+++ b/site/assets/style/_variables.scss
@@ -52,7 +52,7 @@
@media (prefers-color-scheme: dark) {
:root {
--body-color: var(--gray-300);
- --body-bg: var(--gray-800);
+ --body-bg: var(--gray-900);
--heading-color: #fff;
diff --git a/site/diary/dag_watching.vox b/site/diary/dag_watching.vox
index 7a7a4df..8cceec6 100644
--- a/site/diary/dag_watching.vox
+++ b/site/diary/dag_watching.vox
@@ -7,17 +7,17 @@ permalink = "date"
{% markdown %}
-# Notes
+## Notes
I've been using two particular terminal invocations frequently.
-## Linting & formatting
+### Linting & formatting
```sh
cargo fix --edition --edition-idioms --allow-dirty; cargo clippy --fix -Z unstable-options --allow-dirty; cargo fix --edition --edition-idioms --bin vox --features="cli" --allow-dirty; cargo clippy --fix -Z unstable-options --bin vox --features="cli" --allow-dirty; cargo fmt
```
These commands lint Vox, check it for errors, and then format its code.
-## Installing the local copy of Vox
+### Installing the local copy of Vox
```sh
cargo install --path . --features="cli"
```
@@ -26,7 +26,7 @@ This command allows me to use the `vox` CLI built with my latest local changes.
---
-# Goals
+## Goals
Today, I'm concerned with:
- Colouring pages differently when visualising the DAG; currently, the same colour is used for all pages.
@@ -35,17 +35,17 @@ Today, I'm concerned with:
- Putting together this development diary.
- Adding a `meta` templating context.
-## DAG Visualisation
+### DAG Visualisation
Colouring should be done based on the node's label.
To me, beige (ie, a light orange) is the colour of layouts, and a light blue complement is appropriate for collection-less pages.
- If the page is a layout page, set its colour to beige ().
- If the page is a page in a collection, set its colour to light green ().
- If the page is a page not in a collection, set its colour to light blue ().
-## CLI Path Parameter
+### CLI Path Parameter
The CLI should take an `Option`. If this path is `None`, do nothing. Otherwise, use this path to set the current environment path.
-## Watching
+### Watching
If changes are made, wait until a certain period (eg, five seconds) has elapsed where no further changes have been made.
When such a period has elapsed, do the following:
@@ -65,7 +65,7 @@ When such a period has elapsed, do the following:
- In the new DAG, replace all pages not needing rendering with their rendered counterparts from the old DAG.
5. Render & output the appropriate pages.
-## Development Diary
+### Development Diary
Maintaining a development diary is important for three primary reasons:
1. It conveys to others that the project is being developed.
2. It aids me in returning to the project when my attention temporarily turned away from in-progress work.
@@ -79,7 +79,7 @@ To build this development diary, I'll need to perform the following tasks:
To publish this development diary, I'll use a GitHub workflow similar to the one I wrote for [`vox-basic`](https://github.com/emmyoh/vox-basic).
-## `meta` Context
+### `meta` Context
The `meta` context comprises the following:
- `meta.date`, being the current date-time of the build.
- `meta.builder`, being the name of the software building the site ('Vox').
@@ -92,7 +92,7 @@ This makes Vox (and the abstract 'standard' it describes) very opinionated; it s
---
-# Future Goals
+## Future Goals
In the future, this tool needs:
1. A blog pertaining to the project, very similar in appearance to the development diary, but different in scope.
diff --git a/site/diary/watching_bugs.vox b/site/diary/watching_bugs.vox
new file mode 100644
index 0000000..5203616
--- /dev/null
+++ b/site/diary/watching_bugs.vox
@@ -0,0 +1,58 @@
+---
+title = "Watching Bugs & Meta"
+date = 2024-05-30T00:00:00+00:00
+layout = "post"
+permalink = "date"
+---
+
+{% markdown %}
+
+## Goals
+
+Today's agenda:
+- Fixing how page removal is handled while watching for changes.
+- Investigating a bug during watching where only the most recently rendered collection page will be rendered to its children.
+- Adding a `meta` context.
+- Improving the GitHub workflow for publishing this site.
+
+### Page Removal
+
+When computing which pages need rendering, we need to consider the following: if the removed page is a parent, the pages that once depended on it need to be re-rendered.
+Additionally, removed pages should have their outputs deleted.
+
+### Collection Page Rendering Lost
+
+This bug arose from improperly merging DAGs.
+
+### `meta` Context
+
+The `meta` context comprises the following:
+- `meta.date`, being the current date-time of the build.
+- `meta.builder`, being the name of the software building the site ('Vox').
+- `meta.version`, being the current version number of Vox.
+
+This was added when retrieving the {% raw %}{{ global }}{% endraw %} context.
+
+### GitHub Workflow
+
+Since this site exists in the same Git repository as Vox itself, it is not useful to pin the latest installation of Vox to the latest Git commit.
+Instead, Vox should be considered out-of-date when any of the following are changed:
+- `src` folder
+- `.cargo` folder
+- `Cargo.toml` file
+- `rust-toolchain.toml` file
+
+---
+
+## Future Goals
+- Re-rendering when layouts are modified during watching.
+- Including the contexts of all parent layouts when rendering.
+- Finishing the user guide.
+- Pointing site pages to indices, setting appropriate HTML titles.
+- Implementing partial date-times.
+- Parallelising both rendering and writing to disk.
+- Documenting the CLI code.
+- Creating a logo for Vox.
+- Updating all dependent crates.
+
+{% endmarkdown %}
\ No newline at end of file
diff --git a/site/guide/cli.vox b/site/guide/cli.vox
index 612a4ba..79f23f6 100644
--- a/site/guide/cli.vox
+++ b/site/guide/cli.vox
@@ -2,4 +2,42 @@
title = "Command-Line Interface"
layout = "page"
permalink = "none"
----
\ No newline at end of file
+---
+
+{% markdown %}
+Vox has two subcommands: `vox build` and `vox serve`.
+
+## Building
+
+To build a site, invoke `vox build`. This command takes an optional path argument, if not building a site in the current working directory.\
+This command takes the following options:
+* `-w` or `--watch`: will watch for changes.
+* `-v` or `--verbosity`: sets the maximum level of logging output.
+ - `-v`: recoverable errors
+ - `-vv`: warnings
+ - `-vvv`: information
+ - `-vvvv`: debugging information
+ - `-vvvvv`: trace information
+* `-d` or `--visualise-dag`: will output a visualisation of the DAG to `dag.svg`.
+
+### Example
+
+To build from the current working directory while watching, visualising the DAG, and logging everything:
+```sh
+vox build -w -d -vvvvv
+```
+
+## Serving
+
+When `vox serve` is invoked, the site is first built, then served locally.\
+This command takes the same arguments and flags as `vox build`, as well as the following additional options:
+* `-p <PORT>` or `--port <PORT>`: the port to serve on (defaults to `80`).
+
+### Example
+
+To serve from `./site` on port `8080` while watching, visualising the DAG, and logging errors & warnings:
+```sh
+vox serve -p 8080 -w -d -vv ./site
+```
+
+{% endmarkdown %}
\ No newline at end of file
diff --git a/site/guide/pipeline.vox b/site/guide/pipeline.vox
index abca91e..ed255af 100644
--- a/site/guide/pipeline.vox
+++ b/site/guide/pipeline.vox
@@ -26,7 +26,7 @@ When changes are made, rebuilding can be done selectively:
- Their descendants in the new DAG also need to be rendered.
- All pages that were added need to be rendered.
- Their descendants in the new DAG also need to be rendered.
- - All pages that were removed need their descendants in the old DAG rendered.
+ - All pages that were removed need their descendants in the new DAG rendered.
4. The DAGs are merged.
- In the new DAG, all pages not needing rendering are replaced with their rendered counterparts from the old DAG.
5. Pages are rendered.
diff --git a/site/layouts/default.vox b/site/layouts/default.vox
index 9fce31b..9ae6412 100644
--- a/site/layouts/default.vox
+++ b/site/layouts/default.vox
@@ -16,7 +16,7 @@
diff --git a/src/builds.rs b/src/builds.rs
index e9495fd..8829792 100644
--- a/src/builds.rs
+++ b/src/builds.rs
@@ -1,5 +1,6 @@
use crate::page::Page;
use ahash::AHashMap;
+use chrono::Locale;
use daggy::{
petgraph::{
dot::{Config, Dot},
@@ -25,7 +26,7 @@ pub struct Build {
/// The Liquid contexts necessary to render templates in pages.
pub contexts: Object,
/// The locale information of the build, primarily used to render dates and times.
- pub locale: String,
+ pub locale: Locale,
/// A directed acyclic graph (DAG) populated with pages and their children.
pub dag: StableDag,
}
@@ -47,7 +48,14 @@ impl Build {
dag_graph,
&[Config::NodeNoLabel, Config::EdgeNoLabel],
&|_graph, edge| format!("label = \"{:?}\"", edge.weight()),
- &|_graph, node| format!("label = \"{}\"", node.1.to_path_string()),
+ &|_graph, node| {
+ let path = PathBuf::from(node.1.to_path_string());
+ let relative_path = path
+ .strip_prefix(fs::canonicalize(env::current_dir().unwrap()).unwrap())
+ .unwrap();
+ let label = relative_path.to_string_lossy().to_string();
+ format!("label = \"{}\"", label)
+ },
);
debug!("DAG: {:#?}", dag_graphviz);
let mut parser = DotParser::new(&format!("{:?}", dag_graphviz));
@@ -203,20 +211,22 @@ impl Build {
}
// If the parent page is in a collection this page depends on, make note of it.
EdgeType::Collection => {
- let parent_path =
- fs::canonicalize(PathBuf::from(parent_page.directory.clone()))
- .into_diagnostic()?;
- let parent_path_difference = parent_path
- .strip_prefix(¤t_directory)
- .into_diagnostic()?;
- let path_components: Vec = parent_path_difference
- .components()
- .map(|c| c.as_os_str().to_string_lossy().to_string())
- .collect();
- let collection_name = path_components[0].clone();
+ // let parent_path =
+ // fs::canonicalize(PathBuf::from(parent_page.directory.clone()))
+ // .into_diagnostic()?;
+ let parent_path = parent_page.to_path_string();
+ let collection_name = parent_page.get_collection_name()?.unwrap();
+ // let parent_path_difference = parent_path
+ // .strip_prefix(¤t_directory)
+ // .into_diagnostic()?;
+ // let path_components: Vec = parent_path_difference
+ // .components()
+ // .map(|c| c.as_os_str().to_string_lossy().to_string())
+ // .collect();
+ // let collection_name = path_components[0].clone();
info!(
"Parent page ({:?}) is in collection: {:?}",
- parent_path_difference, collection_name
+ parent_path, collection_name
);
if collection_pages.contains_key(&collection_name) {
collection_pages
diff --git a/src/main.rs b/src/main.rs
index 602f34a..01f8c74 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -2,6 +2,7 @@ use actix_files::NamedFile;
use actix_web::dev::{ServiceRequest, ServiceResponse};
use actix_web::{App, HttpServer};
use ahash::{AHashMap, AHashSet, HashSet, HashSetExt};
+use chrono::{Locale, Utc};
use clap::{arg, crate_version};
use clap::{Parser, Subcommand};
use daggy::Walker;
@@ -19,11 +20,11 @@ use std::path::Path;
use std::sync::mpsc::channel;
use std::{fs, path::PathBuf, time::Duration};
use ticky::Stopwatch;
-use tokio::time::{sleep, Instant};
+use tokio::time::sleep;
use toml::Table;
use tracing::{debug, error, info, trace, warn, Level};
use vox::builds::EdgeType;
-use vox::date::{self};
+use vox::date::{self, Date};
use vox::{builds::Build, page::Page, templates::create_liquid_parser};
#[global_allocator]
@@ -253,12 +254,12 @@ fn insert_or_update_page(
pages: &mut AHashMap,
layouts: &mut AHashMap>,
collection_dependents: &mut AHashMap>,
- locale: String,
+ locale: Locale,
) -> miette::Result<()> {
let entry = fs::canonicalize(entry).into_diagnostic()?;
let (page, index) = if !Page::is_layout_path(&entry)? {
info!("Inserting or updating page: {:?} … ", entry);
- let page = path_to_page(entry.clone(), locale.clone())?;
+ let page = path_to_page(entry.clone(), locale)?;
debug!("{:#?}", page);
// If the page already exists in the DAG, update it. Otherwise, insert it.
let index = if pages.contains_key(&entry) {
@@ -305,7 +306,7 @@ fn insert_or_update_page(
dag.remove_node(old_layout.1);
}
info!("Inserting layout: {:?} … ", layout_path);
- let layout_page = path_to_page(layout_path.clone(), locale.clone())?;
+ let layout_page = path_to_page(layout_path.clone(), locale)?;
debug!("{:#?}", layout_page);
let layout_index = dag.add_child(index, EdgeType::Layout, layout_page);
if let Some(layouts) = layouts.get_mut(&layout_path) {
@@ -349,7 +350,7 @@ fn insert_or_update_page(
let entry = fs::canonicalize(entry.into_diagnostic()?).into_diagnostic()?;
if pages.get(&entry).is_none() {
info!("Inserting collection page: {:?} … ", entry);
- let collection_page = path_to_page(entry.clone(), locale.clone())?;
+ let collection_page = path_to_page(entry.clone(), locale)?;
debug!("{:#?}", collection_page);
let collection_page_index =
dag.add_parent(index, EdgeType::Collection, collection_page);
@@ -395,7 +396,7 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
&mut pages,
&mut layouts,
&mut collection_dependents,
- global.1.clone(),
+ global.1,
)?;
}
// We update the layouts with their parents and children once all other pages have been inserted.
@@ -409,7 +410,7 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
&mut pages,
&mut layouts,
&mut collection_dependents,
- global.1.clone(),
+ global.1,
)?;
}
}
@@ -419,7 +420,7 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
let (_updated_pages, updated_dag) = generate_site(
parser.clone(),
global.0.clone(),
- global.1.clone(),
+ global.1,
dag,
visualise_dag,
)
@@ -447,7 +448,7 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
// .unwatch(&output_path)
// .into_diagnostic()?;
- let mut changed_times = Vec::new();
+ // let mut changed_times = Vec::new();
loop {
if let Ok(events) = receiver.recv().into_diagnostic()? {
// Changes to the output directory or version control are irrelevant.
@@ -460,14 +461,17 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
continue;
}
debug!("Changes detected: {:#?} … ", events);
- changed_times.push(Instant::now());
- }
- if changed_times.len() > 1 {
- let first = changed_times.remove(0);
- if first.elapsed() < Duration::from_secs(1) {
- continue;
- }
+ // changed_times.push(Instant::now());
}
+ // If changes are made, wait until a certain period (eg, one second) has elapsed where no further changes have been made.
+ // if changed_times.len() > 0 {
+ // let last_change = changed_times.last().unwrap();
+ // if last_change.elapsed() >= Duration::from_secs(1) {
+ // info!("Changes stabilised. Rebuilding … ");
+ // } else {
+ // continue;
+ // }
+ // }
// 1. Build a new DAG.
let parser = create_liquid_parser()?;
@@ -492,7 +496,7 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
&mut new_pages,
&mut new_layouts,
&mut new_collection_dependents,
- global.1.clone(),
+ global.1,
)?;
}
for (layout_path, layout_indices) in new_layouts.clone() {
@@ -504,7 +508,7 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
&mut new_pages,
&mut new_layouts,
&mut new_collection_dependents,
- global.1.clone(),
+ global.1,
)?;
}
}
@@ -533,20 +537,48 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
let page = new_dag.node_weight(*page_index).unwrap();
new_dag_pages.insert(page_path.clone(), page);
}
- let mut updated_pages = AHashSet::new();
- for (page_path, new_page) in new_dag_pages {
- if let Some(old_page) = old_dag_pages.get(&page_path) {
+ let mut added_or_modified = AHashSet::new();
+ let mut removed = AHashSet::new();
+ let mut removed_output_paths = AHashSet::new();
+ for (page_path, new_page) in new_dag_pages.iter() {
+ if let Some(old_page) = old_dag_pages.get(page_path) {
// If the page has been modified, its index is noted.
if !new_page.is_equivalent(old_page) {
- updated_pages.insert(new_pages[&page_path]);
+ added_or_modified.insert(new_pages[page_path]);
}
} else {
// If the page is new, its index is noted.
- updated_pages.insert(new_pages[&page_path]);
+ added_or_modified.insert(new_pages[page_path]);
}
}
- // No need to continue if no pages were added or changed.
- if updated_pages.is_empty() {
+ for (page_path, _old_page) in old_dag_pages.iter() {
+ if new_dag_pages.get(page_path).is_none() {
+ // If the page has been removed, its index is noted.
+ removed.insert(pages[page_path]);
+ if let Some(old_page) = old_dag_pages.get(page_path) {
+ let output_path = if old_page.url.is_empty() {
+ let layout_url = get_layout_url(&pages[page_path], &dag);
+ layout_url.map(|layout_url| format!("output/{}", layout_url))
+ } else if !old_page.url.is_empty() {
+ Some(format!("output/{}", old_page.url))
+ } else {
+ None
+ };
+ if output_path.is_none() {
+ warn!("Page has no URL: {:#?} … ", old_page.to_path_string());
+ continue;
+ }
+ let output_path = output_path.unwrap();
+ removed_output_paths
+ .insert(fs::canonicalize(output_path.clone()).into_diagnostic()?);
+ }
+ }
+ }
+ info!("Removed pages: {:#?} … ", removed_output_paths);
+ // No need to continue if nothing changed.
+ if added_or_modified.is_empty() && removed.is_empty() && removed_output_paths.is_empty()
+ {
+ info!("Nothing changed. Aborting rebuild … ");
continue;
}
@@ -555,15 +587,44 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
// - Their descendants in the new DAG also need to be rendered.
// - All pages that were added need to be rendered.
// - Their descendants in the new DAG also need to be rendered.
- // - Nothing is done with the pages that were removed. Necessary changes are covered by the two cases above.
+ // - All pages that were removed need their descendants in the new DAG rendered.
+ // - Their old output also needs to be deleted.
- let mut pages_to_render = updated_pages.clone();
- for updated_page_index in updated_pages.clone() {
- let descendants = Build::get_descendants(&new_dag, updated_page_index);
+ let mut pages_to_render = added_or_modified.clone();
+ for page_index in added_or_modified.clone() {
+ let descendants = Build::get_descendants(&new_dag, page_index);
for descendant in descendants {
pages_to_render.insert(descendant);
}
}
+ for page_index in removed.clone() {
+ let descendants = Build::get_descendants(&dag, page_index);
+ let descendant_page_paths = descendants
+ .iter()
+ .map(|descendant| {
+ PathBuf::from(dag.node_weight(*descendant).unwrap().to_path_string())
+ })
+ .collect::>();
+ for descendant_page_path in descendant_page_paths {
+ if let Some(descendant_page_index) = new_pages.get(&descendant_page_path) {
+ pages_to_render.insert(*descendant_page_index);
+ }
+ }
+ }
+ // Only the root pages need to be passed to the rendering code, as it will recursively render their descendants.
+ let mut root_pages_to_render = added_or_modified.clone();
+ for page_index in removed.clone() {
+ let children = dag.children(page_index).iter(&dag).collect::>();
+ let child_page_paths = children
+ .iter()
+ .map(|child| PathBuf::from(dag.node_weight(child.1).unwrap().to_path_string()))
+ .collect::>();
+ for child_page_path in child_page_paths {
+ if let Some(child_page_index) = new_pages.get(&child_page_path) {
+ root_pages_to_render.insert(*child_page_index);
+ }
+ }
+ }
// 4. Merge the DAGs.
// - In the new DAG, replace all pages not needing rendering with their rendered counterparts from the old DAG.
@@ -572,12 +633,14 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
if !pages_to_render.contains(page_index) {
// Pages may be added, so it is necessary to check if the page already exists in the old DAG.
if let Some(old_page) = dag.node_weight(pages[page_path]) {
- let mut _new_page = new_dag.node_weight_mut(*page_index).unwrap();
- _new_page = &mut old_page.clone();
+ let new_page = new_dag.node_weight_mut(*page_index).unwrap();
+ new_page.url = old_page.url.clone();
+ new_page.rendered = old_page.rendered.clone();
}
}
}
dag = new_dag;
+ trace!("Merged DAGs … ");
// 5. Render & output the appropriate pages.
info!("Rebuilding … ");
@@ -588,12 +651,20 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
locale: global.1,
dag,
};
+
+ // Delete the output of removed pages.
+ for removed_output_path in removed_output_paths {
+ info!("Removing {:?} … ", removed_output_path);
+ tokio::fs::remove_file(removed_output_path)
+ .await
+ .into_diagnostic()?;
+ }
+
let mut rendered_pages = Vec::new();
- for page in updated_pages.iter() {
+ for page in root_pages_to_render.iter() {
build.render_recursively(*page, &mut rendered_pages)?;
}
- info!("{} pages were rendered … ", rendered_pages.len());
for updated_page_index in rendered_pages.iter() {
let updated_page = &build.dag.graph()[*updated_page_index];
let output_path = if updated_page.url.is_empty() {
@@ -609,7 +680,7 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
continue;
}
let output_path = output_path.unwrap();
- info!("Writing to {} … ", output_path);
+ info!("Writing to `{}` … ", output_path);
tokio::fs::create_dir_all(
Path::new(&output_path)
.parent()
@@ -624,7 +695,7 @@ async fn build(watch: bool, visualise_dag: bool) -> miette::Result<()> {
timer.stop();
println!(
"Generated {} pages in {:.2} seconds … ",
- updated_pages.len(),
+ rendered_pages.len(),
timer.elapsed_s()
);
dag = build.dag;
@@ -664,7 +735,7 @@ fn get_layout_url(
async fn generate_site(
template_parser: liquid::Parser,
contexts: liquid::Object,
- locale: String,
+ locale: Locale,
dag: StableDag,
visualise_dag: bool,
) -> miette::Result<(Vec, StableDag)> {
@@ -731,7 +802,7 @@ async fn generate_site(
Ok((updated_pages, build.dag))
}
-fn path_to_page(path: PathBuf, locale: String) -> miette::Result {
+fn path_to_page(path: PathBuf, locale: Locale) -> miette::Result {
Page::new(
fs::read_to_string(path.clone()).into_diagnostic()?,
path,
@@ -739,7 +810,7 @@ fn path_to_page(path: PathBuf, locale: String) -> miette::Result {
)
}
-fn get_global_context() -> miette::Result<(Object, String)> {
+fn get_global_context() -> miette::Result<(Object, Locale)> {
let global_context = match fs::read_to_string("global.toml") {
Ok(global_file) => global_file.parse::().into_diagnostic()?,
Err(_) => format!("locale = '{}'", date::default_locale_string())
@@ -752,9 +823,16 @@ fn get_global_context() -> miette::Result<(Object, String)> {
.as_str()
.unwrap_or(&date::default_locale_string())
.to_string();
+ let locale = date::locale_string_to_locale(locale.clone());
+ let current_date = Date::chrono_to_date(Utc::now(), locale);
Ok((
object!({
- "global": global_context
+ "global": global_context,
+ "meta": {
+ "builder": "Vox",
+ "version": crate_version!(),
+ "date": current_date,
+ }
}),
locale,
))
diff --git a/src/page.rs b/src/page.rs
index 945260c..b4e31f6 100644
--- a/src/page.rs
+++ b/src/page.rs
@@ -1,7 +1,8 @@
use crate::{
- date::{locale_string_to_locale, Date},
+ date::Date,
error::{DateNotValid, FrontmatterNotFound, InvalidCollectionsProperty},
};
+use chrono::Locale;
use liquid::{Object, Parser};
use miette::IntoDiagnostic;
use miette::NamedSource;
@@ -298,7 +299,7 @@ impl Page {
/// # Returns
///
/// An instance of a page.
- pub fn new(contents: String, path: PathBuf, locale: String) -> miette::Result {
+ pub fn new(contents: String, path: PathBuf, locale: Locale) -> miette::Result {
let path = fs::canonicalize(path).into_diagnostic()?;
let (frontmatter, body) = Self::get_frontmatter_and_body(contents.clone(), path.clone())?;
let frontmatter_data = frontmatter.parse::().into_diagnostic()?;
@@ -314,7 +315,7 @@ impl Page {
// src: NamedSource::new(path.to_string_lossy(), frontmatter.clone()),
// })
// .into_diagnostic()?;
- let locale = locale_string_to_locale(locale);
+ // let locale = locale_string_to_locale(locale);
let date = if let Some(date) = frontmatter_data.get("date") {
// if date.as_str().is_none() {
// return Err(DateNotFound {