Skip to content

Commit

Permalink
more refactor and clearer function name
Browse files Browse the repository at this point in the history
Signed-off-by: karthik2804 <karthik.ganeshram@fermyon.com>
  • Loading branch information
karthik2804 committed Dec 5, 2023
1 parent 7b1e06d commit 8909f5b
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 59 deletions.
29 changes: 12 additions & 17 deletions src/content.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ pub fn all_pages(

pub fn get_pages_by_glob(
dir: PathBuf,
globs: Vec<String>,
globs: &Vec<String>,
show_unpublished: bool,
) -> anyhow::Result<BTreeMap<String, PageValues>> {
let index_cache: IndexCache = pages_by_glob_load(dir, globs, show_unpublished)?;
Expand All @@ -180,11 +180,8 @@ pub fn get_pages_by_glob(

pub fn all_pages_load(dir: PathBuf, show_unpublished: bool) -> anyhow::Result<IndexCache> {
let files = all_files(dir)?;
let (contents, earliest_unpublished) = process_files(files, show_unpublished)?;
Ok(IndexCache {
contents,
cache_expiration: earliest_unpublished,
})
let index_cache = get_index_cache_from_files(files, show_unpublished)?;
Ok(index_cache)
}
pub struct IndexCache {
contents: BTreeMap<String, PageValues>,
Expand All @@ -197,7 +194,7 @@ pub struct IndexCache {
/// unpublished.
pub fn pages_by_glob_load(
dir: PathBuf,
glob_patterns: Vec<String>,
glob_patterns: &Vec<String>,
show_unpublished: bool,
) -> anyhow::Result<IndexCache> {
let mut files: Vec<PathBuf> = Vec::new();
Expand All @@ -213,11 +210,8 @@ pub fn pages_by_glob_load(
}
}
}
let (contents, earliest_unpublished) = process_files(files, show_unpublished)?;
Ok(IndexCache {
contents,
cache_expiration: earliest_unpublished,
})
let index_cache = get_index_cache_from_files(files, show_unpublished)?;
Ok(index_cache)
}

/// Fetch a list of paths to every file in the directory
Expand Down Expand Up @@ -422,12 +416,10 @@ impl FromStr for Content {
}
}

type ProcessFileReturn = (BTreeMap<String, PageValues>, Option<DateTime<Utc>>);

fn process_files(
fn get_index_cache_from_files(
files: Vec<PathBuf>,
show_unpublished: bool,
) -> Result<ProcessFileReturn, anyhow::Error> {
) -> Result<IndexCache, anyhow::Error> {
let mut contents = BTreeMap::new();
let mut contains_unpublished: bool = false;
let mut earliest_unpublished: Option<DateTime<Utc>> = None;
Expand Down Expand Up @@ -476,7 +468,10 @@ fn process_files(
}
}
}
Ok((contents, earliest_unpublished))
Ok(IndexCache {
contents,
cache_expiration: earliest_unpublished,
})
}

#[cfg(test)]
Expand Down
96 changes: 54 additions & 42 deletions src/template.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,26 +165,16 @@ impl<'a> Renderer<'a> {
{
if entry.file_type().is_file() && entry.path().extension().map_or(false, |e| e == "hbs")
{
let path = entry.path();
match read_file(path.to_str().unwrap()) {
Ok(contents) => {
// Do something with the file contents
let doc = contents.replace("\r\n", "\n");
let (toml_text, body) = doc.split_once("\n---\n").unwrap_or(("", &doc));
let toml_text = toml_text.trim_start_matches("---").trim();
let template_meta = toml::from_str(toml_text)?;
let filename = path
.file_name()
.unwrap_or_default()
.to_str()
.unwrap()
.replace(".hbs", "");
let filename = entry.file_name().to_str().unwrap().replace(".hbs", "");
let res = parse_hbs_template(entry);
match res {
Ok((template_meta, body)) => {
self.template_meta
.insert(filename.to_owned(), template_meta);
self.handlebars.register_template_string(&filename, body)?;
}
Err(err) => {
eprintln!("Error reading template {}: {}", entry.path().display(), err);
eprintln!("Error reading template {}: {}", filename, err);
}
}
}
Expand Down Expand Up @@ -262,34 +252,14 @@ impl<'a> Renderer<'a> {
pages: match &info.index_site_pages {
Some(templates) => {
if templates.contains(&tpl) {
let mut glob_pattern: Option<Vec<String>> = None;
let template_meta = self.template_meta.get(&tpl);
if let Some(val) = template_meta {
if val.read_pages_glob.is_some() {
glob_pattern =
Some(val.read_pages_glob.as_ref().unwrap().to_owned());
}
}
match glob_pattern {
Some(pattern) => {
let pages = crate::content::get_pages_by_glob(
self.content_dir.clone(),
pattern,
self.show_unpublished,
);
match pages {
Ok(val) => val,
Err(err) => {
bail!("Error parsing glob in template \"{tpl}\": {err}")
}
}
}
None => crate::content::all_pages(
self.content_dir.clone(),
self.show_unpublished,
self.disable_cache,
)?,
let mut glob_pattern: Option<&Vec<String>> = None;
let default_meta = TemplateMeta::default();
let template_meta =
self.template_meta.get(&tpl).unwrap_or(&default_meta);
if let Some(val) = &template_meta.read_pages_glob {
glob_pattern = Some(val);
}
self.create_site_pages_index(glob_pattern, &tpl)?
} else {
BTreeMap::new()
}
Expand All @@ -314,6 +284,32 @@ impl<'a> Renderer<'a> {
fn register_helpers(&mut self) {
handlebars_sprig::addhelpers(&mut self.handlebars)
}
fn create_site_pages_index(
&self,
glob_pattern: Option<&Vec<String>>,
tpl: &str,
) -> anyhow::Result<BTreeMap<String, PageValues>> {
match glob_pattern {
Some(pattern) => {
let pages = crate::content::get_pages_by_glob(
self.content_dir.clone(),
pattern,
self.show_unpublished,
);
match pages {
Ok(val) => Ok(val),
Err(err) => {
bail!("Error parsing glob in template \"{tpl}\": {err}")
}
}
}
None => crate::content::all_pages(
self.content_dir.clone(),
self.show_unpublished,
self.disable_cache,
),
}
}
}

/*
Expand Down Expand Up @@ -367,3 +363,19 @@ fn read_file(file_path: &str) -> anyhow::Result<String> {
file.read_to_string(&mut contents)?;
Ok(contents)
}

fn parse_hbs_template(entry: walkdir::DirEntry) -> anyhow::Result<(TemplateMeta, String)> {
let path = entry.path();
match read_file(path.to_str().unwrap()) {
Ok(contents) => {
let doc = contents.replace("\r\n", "\n");
let (toml_text, body) = doc.split_once("\n---\n").unwrap_or(("", &doc));
let toml_text = toml_text.trim_start_matches("---").trim();
let template_meta = toml::from_str(toml_text)?;
Ok((template_meta, body.to_owned()))
}
Err(err) => {
bail!("Failed to parse hbs template \"entry\": {err}")
}
}
}

0 comments on commit 8909f5b

Please sign in to comment.