Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

create site.pages with globs #186

Merged
merged 4 commits into from
Dec 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 10 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ spin-sdk = { git = "https://github.com/fermyon/spin", rev = "139c40967a75dbdd5d4
toml = "0.5.9"
wit-bindgen-rust = { git = "https://github.com/bytecodealliance/wit-bindgen", rev = "cb871cfa1ee460b51eb1d144b175b9aab9c50aba", default-features = false }
regex = "1"
walkdir = "2.4.0"
glob = "0.3.1"

[workspace]
members = ["bart"]
Expand Down
138 changes: 88 additions & 50 deletions src/content.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use anyhow::bail;
use glob::glob;
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap};
Expand Down Expand Up @@ -167,6 +169,19 @@ pub fn all_pages(
}
}

pub fn get_pages_by_glob(
dir: PathBuf,
globs: &Vec<String>,
show_unpublished: bool,
) -> anyhow::Result<BTreeMap<String, PageValues>> {
let index_cache: IndexCache = pages_by_glob_load(dir, globs, show_unpublished)?;
Ok(index_cache.contents)
}

pub fn all_pages_load(dir: PathBuf, show_unpublished: bool) -> anyhow::Result<IndexCache> {
karthik2804 marked this conversation as resolved.
Show resolved Hide resolved
let files = all_files(dir)?;
get_index_cache_from_files(files, show_unpublished)
}
pub struct IndexCache {
contents: BTreeMap<String, PageValues>,
cache_expiration: Option<DateTime<Utc>>,
Expand All @@ -176,60 +191,25 @@ pub struct IndexCache {
///
/// If show_unpublished is `true`, this will include pages that Bartholomew has determined are
/// unpublished.
pub fn all_pages_load(dir: PathBuf, show_unpublished: bool) -> anyhow::Result<IndexCache> {
let files = all_files(dir)?;
let mut contents = BTreeMap::new();
let mut contains_unpublished: bool = false;
let mut earliest_unpublished: Option<DateTime<Utc>> = None;
for f in files {
// Dotfiles should not be loaded.
if f.file_name()
.map(|f| f.to_string_lossy().starts_with('.'))
.unwrap_or(false)
{
eprintln!("Skipping dotfile {f:?}");
continue;
}
let raw_data = std::fs::read_to_string(&f)
.map_err(|e| anyhow::anyhow!("File is not string data: {:?}: {}", &f, e))?;
match raw_data.parse::<Content>() {
Ok(content) => {
if show_unpublished || content.published {
contents.insert(f.to_string_lossy().to_string(), content.into());
} else {
// find earliest unpublished article to save timestamp to refresh cache
let article_date = content.head.date;
match contains_unpublished {
true => {
if match earliest_unpublished {
Some(val) => article_date.map(|d| d <= val).unwrap_or(true),
_ => false,
} {
earliest_unpublished = article_date;
}
}
false => {
if let Some(val) = article_date {
if val > Utc::now() {
earliest_unpublished = article_date;
contains_unpublished = true;
}
};
}
}
pub fn pages_by_glob_load(
dir: PathBuf,
glob_patterns: &Vec<String>,
show_unpublished: bool,
) -> anyhow::Result<IndexCache> {
let mut files: Vec<PathBuf> = Vec::new();
for glob_pattern in glob_patterns {
let full_pattern = format!("{}{}", dir.to_string_lossy(), glob_pattern);
let full_path = PathBuf::from(&full_pattern);
for entry in glob(full_path.to_str().unwrap_or_default())? {
match entry {
Ok(path) => files.push(path),
Err(e) => {
bail!("Failed to read file glob \"{glob_pattern}\": {e}")
}
}
Err(e) => {
// If a parse fails, don't take down the entire site. Just skip this piece of content.
eprintln!("File {:?}: {}", &f, e);
continue;
}
}
}
Ok(IndexCache {
contents,
cache_expiration: earliest_unpublished,
})
get_index_cache_from_files(files, show_unpublished)
}

/// Fetch a list of paths to every file in the directory
Expand Down Expand Up @@ -434,6 +414,64 @@ impl FromStr for Content {
}
}

fn get_index_cache_from_files(
files: Vec<PathBuf>,
show_unpublished: bool,
) -> Result<IndexCache, anyhow::Error> {
let mut contents = BTreeMap::new();
let mut contains_unpublished: bool = false;
let mut earliest_unpublished: Option<DateTime<Utc>> = None;
for f in files {
// Dotfiles should not be loaded.
if f.file_name()
.map(|f| f.to_string_lossy().starts_with('.'))
.unwrap_or(false)
{
eprintln!("Skipping dotfile {f:?}");
continue;
}
let raw_data = std::fs::read_to_string(&f)
.map_err(|e| anyhow::anyhow!("File is not string data: {:?}: {}", &f, e))?;
match raw_data.parse::<Content>() {
Ok(content) => {
if show_unpublished || content.published {
contents.insert(f.to_string_lossy().to_string(), content.into());
} else {
// find earliest unpublished article to save timestamp to refresh cache
let article_date = content.head.date;
match contains_unpublished {
true => {
if match earliest_unpublished {
Some(val) => article_date.map(|d| d <= val).unwrap_or(true),
_ => false,
} {
earliest_unpublished = article_date;
}
}
false => {
if let Some(val) = article_date {
if val > Utc::now() {
earliest_unpublished = article_date;
contains_unpublished = true;
}
};
}
}
}
}
Err(e) => {
// If a parse fails, don't take down the entire site. Just skip this piece of content.
eprintln!("File {:?}: {}", &f, e);
continue;
}
}
}
Ok(IndexCache {
contents,
cache_expiration: earliest_unpublished,
})
}

#[cfg(test)]
mod test {
use super::*;
Expand Down
97 changes: 90 additions & 7 deletions src/template.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,14 @@ use {
std::path::PathBuf,
};

use std::{collections::HashMap, fs::File, io::Read};

use crate::rhai_engine::custom_rhai_engine_init;

use super::content::{Content, Head};
use anyhow::bail;
use serde::{Deserialize, Serialize};
use walkdir::WalkDir;

/// The name of the default template.
/// This will be resolved to $TEMPLATE_DIR/$DEFAULT_TEMPLATE.hbs
Expand Down Expand Up @@ -91,11 +95,17 @@ impl From<Content> for PageValues {
}
}
}
#[cfg(feature = "server")]
#[derive(Serialize, Deserialize, Default, Debug)]
pub struct TemplateMeta {
read_pages_glob: Option<Vec<String>>,
}

/// Renderer can execute a handlebars template and render the results into HTML.
#[cfg(feature = "server")]
pub struct Renderer<'a> {
pub template_dir: PathBuf,
pub template_meta: HashMap<String, TemplateMeta>,
pub theme_dir: Option<PathBuf>,
pub script_dir: PathBuf,
pub content_dir: PathBuf,
Expand All @@ -117,9 +127,11 @@ impl<'a> Renderer<'a> {
// Create custom rhai engine and assign to handlebars
let rhai_engine = custom_rhai_engine_init();
handlebars.set_engine(rhai_engine);
let template_meta = HashMap::new();

Renderer {
template_dir,
template_meta,
theme_dir,
script_dir,
content_dir,
Expand Down Expand Up @@ -147,8 +159,26 @@ impl<'a> Renderer<'a> {
self.handlebars
.register_templates_directory(".hbs", templates)?;
}
self.handlebars
.register_templates_directory(".hbs", &self.template_dir)?;
for entry in WalkDir::new(&self.template_dir)
.into_iter()
.filter_map(|e| e.ok())
{
if entry.file_type().is_file() && entry.path().extension().map_or(false, |e| e == "hbs")
karthik2804 marked this conversation as resolved.
Show resolved Hide resolved
{
let filename = entry.file_name().to_str().unwrap().replace(".hbs", "");
let res = parse_hbs_template(entry);
match res {
Ok((template_meta, body)) => {
self.template_meta
.insert(filename.to_owned(), template_meta);
self.handlebars.register_template_string(&filename, body)?;
}
Err(err) => {
eprintln!("Error reading template {}: {}", filename, err);
}
}
}
}
Ok(())
}

Expand Down Expand Up @@ -220,11 +250,14 @@ impl<'a> Renderer<'a> {
pages: match &info.index_site_pages {
Some(templates) => {
if templates.contains(&tpl) {
crate::content::all_pages(
self.content_dir.clone(),
self.show_unpublished,
self.disable_cache,
)?
let mut glob_pattern: Option<&Vec<String>> = None;
let default_meta = TemplateMeta::default();
let template_meta =
self.template_meta.get(&tpl).unwrap_or(&default_meta);
if let Some(val) = &template_meta.read_pages_glob {
glob_pattern = Some(val);
}
self.create_site_pages_index(glob_pattern, &tpl)?
} else {
BTreeMap::new()
}
Expand All @@ -249,6 +282,32 @@ impl<'a> Renderer<'a> {
fn register_helpers(&mut self) {
handlebars_sprig::addhelpers(&mut self.handlebars)
}
fn create_site_pages_index(
&self,
glob_pattern: Option<&Vec<String>>,
tpl: &str,
) -> anyhow::Result<BTreeMap<String, PageValues>> {
match glob_pattern {
Some(pattern) => {
let pages = crate::content::get_pages_by_glob(
self.content_dir.clone(),
pattern,
self.show_unpublished,
);
match pages {
Ok(val) => Ok(val),
Err(err) => {
bail!("Error parsing glob in template \"{tpl}\": {err}")
}
}
}
None => crate::content::all_pages(
self.content_dir.clone(),
self.show_unpublished,
self.disable_cache,
),
}
}
}

/*
Expand Down Expand Up @@ -295,3 +354,27 @@ pub fn error_values(title: &str, msg: &str) -> PageValues {
published: true,
}
}

fn read_file(file_path: &str) -> anyhow::Result<String> {
let mut file = File::open(file_path)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
}

#[cfg(feature = "server")]
fn parse_hbs_template(entry: walkdir::DirEntry) -> anyhow::Result<(TemplateMeta, String)> {
let path = entry.path();
match read_file(path.to_str().unwrap()) {
Ok(contents) => {
let doc = contents.replace("\r\n", "\n");
let (toml_text, body) = doc.split_once("\n---\n").unwrap_or(("", &doc));
let toml_text = toml_text.trim_start_matches("---").trim();
let template_meta = toml::from_str(toml_text)?;
Ok((template_meta, body.to_owned()))
}
Err(err) => {
bail!("Failed to parse hbs template \"entry\": {err}")
}
}
}
Loading