Skip to content

Commit

Permalink
create site.pages with globs
Browse files Browse the repository at this point in the history
Signed-off-by: karthik2804 <karthik.ganeshram@fermyon.com>
  • Loading branch information
karthik2804 committed Dec 4, 2023
1 parent 462797d commit 99178cc
Show file tree
Hide file tree
Showing 4 changed files with 164 additions and 11 deletions.
12 changes: 10 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ spin-sdk = { git = "https://github.com/fermyon/spin", rev = "139c40967a75dbdd5d4
toml = "0.5.9"
wit-bindgen-rust = { git = "https://github.com/bytecodealliance/wit-bindgen", rev = "cb871cfa1ee460b51eb1d144b175b9aab9c50aba", default-features = false }
regex = "1"
walkdir = "2.4.0"
glob = "0.3.1"

[workspace]
members = ["bart"]
Expand Down
84 changes: 82 additions & 2 deletions src/content.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use anyhow::bail;
use glob::glob;
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap};
Expand Down Expand Up @@ -167,6 +169,70 @@ pub fn all_pages(
}
}

pub fn get_pages_by_glob(
dir: PathBuf,
glob: String,
show_unpublished: bool,
) -> anyhow::Result<BTreeMap<String, PageValues>> {
let index_cache: IndexCache = pages_by_glob_load(dir, glob, show_unpublished)?;
return Ok(index_cache.contents);
}

pub fn all_pages_load(dir: PathBuf, show_unpublished: bool) -> anyhow::Result<IndexCache> {
let files = all_files(dir)?;
let mut contents = BTreeMap::new();
let mut contains_unpublished: bool = false;
let mut earliest_unpublished: Option<DateTime<Utc>> = None;
for f in files {
// Dotfiles should not be loaded.
if f.file_name()
.map(|f| f.to_string_lossy().starts_with('.'))
.unwrap_or(false)
{
eprintln!("Skipping dotfile {f:?}");
continue;
}
let raw_data = std::fs::read_to_string(&f)
.map_err(|e| anyhow::anyhow!("File is not string data: {:?}: {}", &f, e))?;
match raw_data.parse::<Content>() {
Ok(content) => {
if show_unpublished || content.published {
contents.insert(f.to_string_lossy().to_string(), content.into());
} else {
// find earliest unpublished article to save timestamp to refresh cache
let article_date = content.head.date;
match contains_unpublished {
true => {
if match earliest_unpublished {
Some(val) => article_date.map(|d| d <= val).unwrap_or(true),
_ => false,
} {
earliest_unpublished = article_date;
}
}
false => {
if let Some(val) = article_date {
if val > Utc::now() {
earliest_unpublished = article_date;
contains_unpublished = true;
}
};
}
}
}
}
Err(e) => {
// If a parse fails, don't take down the entire site. Just skip this piece of content.
eprintln!("File {:?}: {}", &f, e);
continue;
}
}
}
Ok(IndexCache {
contents,
cache_expiration: earliest_unpublished,
})
}
pub struct IndexCache {
contents: BTreeMap<String, PageValues>,
cache_expiration: Option<DateTime<Utc>>,
Expand All @@ -176,8 +242,22 @@ pub struct IndexCache {
///
/// If show_unpublished is `true`, this will include pages that Bartholomew has determined are
/// unpublished.
pub fn all_pages_load(dir: PathBuf, show_unpublished: bool) -> anyhow::Result<IndexCache> {
let files = all_files(dir)?;
pub fn pages_by_glob_load(
dir: PathBuf,
glob_pattern: String,
show_unpublished: bool,
) -> anyhow::Result<IndexCache> {
let mut files: Vec<PathBuf> = Vec::new();
let full_pattern = format!("{}{}", dir.to_string_lossy(), glob_pattern);
let full_path = PathBuf::from(&full_pattern);
for entry in glob(&full_path.to_str().unwrap_or_default())? {
match entry {
Ok(path) => files.push(path),
Err(e) => {
bail!("Failed to read file glob: {e}")
}
}
}
let mut contents = BTreeMap::new();
let mut contains_unpublished: bool = false;
let mut earliest_unpublished: Option<DateTime<Utc>> = None;
Expand Down
77 changes: 70 additions & 7 deletions src/template.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,13 @@ use {
std::path::PathBuf,
};

use std::{collections::HashMap, fs::File, io::Read};

use crate::rhai_engine::custom_rhai_engine_init;

use super::content::{Content, Head};
use serde::{Deserialize, Serialize};
use walkdir::WalkDir;

/// The name of the default template.
/// This will be resolved to $TEMPLATE_DIR/$DEFAULT_TEMPLATE.hbs
Expand Down Expand Up @@ -91,11 +94,17 @@ impl From<Content> for PageValues {
}
}
}
#[cfg(feature = "server")]
#[derive(Serialize, Deserialize, Default, Debug)]
pub struct TemplateMeta {
read_pages_glob: Option<String>,
}

/// Renderer can execute a handlebars template and render the results into HTML.
#[cfg(feature = "server")]
pub struct Renderer<'a> {
pub template_dir: PathBuf,
pub template_meta: HashMap<String, TemplateMeta>,
pub theme_dir: Option<PathBuf>,
pub script_dir: PathBuf,
pub content_dir: PathBuf,
Expand All @@ -117,9 +126,11 @@ impl<'a> Renderer<'a> {
// Create custom rhai engine and assign to handlebars
let rhai_engine = custom_rhai_engine_init();
handlebars.set_engine(rhai_engine);
let template_meta = HashMap::new();

Renderer {
template_dir,
template_meta,
theme_dir,
script_dir,
content_dir,
Expand Down Expand Up @@ -147,8 +158,38 @@ impl<'a> Renderer<'a> {
self.handlebars
.register_templates_directory(".hbs", templates)?;
}
self.handlebars
.register_templates_directory(".hbs", &self.template_dir)?;
for entry in WalkDir::new(&self.template_dir)
.into_iter()
.filter_map(|e| e.ok())
{
if entry.file_type().is_file() && entry.path().extension().map_or(false, |e| e == "hbs")
{
let path = entry.path();
match read_file(path.to_str().unwrap()) {
Ok(contents) => {
// Do something with the file contents
let doc = contents.replace("\r\n", "\n");
let (toml_text, body) = doc.split_once("\n---\n").unwrap_or(("", &doc));
let toml_text = toml_text.trim_start_matches("---").trim();
let template_meta = toml::from_str(toml_text)?;
let filename = path
.file_name()
.unwrap_or_default()
.to_str()
.unwrap()
.replace(".hbs", "");
self.template_meta
.insert(filename.to_owned(), template_meta);
self.handlebars.register_template_string(&filename, body)?;
}
Err(err) => {
eprintln!("Error reading template {}: {}", entry.path().display(), err);
}
}
}
}
// self.handlebars
// .register_templates_directory(".hbs", &self.template_dir)?;
Ok(())
}

Expand Down Expand Up @@ -220,11 +261,26 @@ impl<'a> Renderer<'a> {
pages: match &info.index_site_pages {
Some(templates) => {
if templates.contains(&tpl) {
crate::content::all_pages(
self.content_dir.clone(),
self.show_unpublished,
self.disable_cache,
)?
let mut glob_pattern: Option<String> = None;
let template_meta = self.template_meta.get(&tpl);
if let Some(val) = template_meta {
if val.read_pages_glob.is_some() {
glob_pattern =
Some(val.read_pages_glob.as_ref().unwrap().to_owned());
}
}
match glob_pattern {
Some(pattern) => crate::content::get_pages_by_glob(
self.content_dir.clone(),
pattern,
self.show_unpublished,
)?,
None => crate::content::all_pages(
self.content_dir.clone(),
self.show_unpublished,
self.disable_cache,
)?,
}
} else {
BTreeMap::new()
}
Expand Down Expand Up @@ -295,3 +351,10 @@ pub fn error_values(title: &str, msg: &str) -> PageValues {
published: true,
}
}

fn read_file(file_path: &str) -> anyhow::Result<String> {
let mut file = File::open(file_path)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
}

0 comments on commit 99178cc

Please sign in to comment.