From bd08b37b8bd429e0e5c5817fb912fbc22566dcbf Mon Sep 17 00:00:00 2001 From: Lars Wirzenius Date: Mon, 25 Jul 2022 08:13:52 +0300 Subject: feat: "riki list" to list source files for a site Sponsored-by: author --- riki.md | 24 ++++++++++++++++++++++++ src/bin/riki.rs | 21 +++++++++++++++++++++ src/site.rs | 7 +++++++ 3 files changed, 52 insertions(+) diff --git a/riki.md b/riki.md index fa982b2..00637dc 100644 --- a/riki.md +++ b/riki.md @@ -445,3 +445,27 @@ then file output/index.html contains " Result<(), SiteError> { let args = Args::parse(); match args.command { Command::Build(cmd) => cmd.run()?, + Command::List(cmd) => cmd.run()?, } info!("riki ends OK"); @@ -47,6 +48,7 @@ struct Args { #[derive(Parser)] enum Command { Build(Build), + List(List) } #[derive(Parser)] @@ -92,3 +94,22 @@ impl Build { Ok(()) } } + +#[derive(Parser)] +struct List { + srcdir: PathBuf, +} + +impl List { + fn run(&self) -> Result<(), SiteError> { + let srcdir = canonicalize(&self.srcdir)?; + let mut site = Site::new(&srcdir, &srcdir); + site.scan()?; + let mut filenames = site.included_files().to_vec(); + filenames.sort_unstable(); + for filename in filenames { + println!("{}", filename.display()); + } + Ok(()) + } +} diff --git a/src/site.rs b/src/site.rs index 1a58801..1d1de60 100644 --- a/src/site.rs +++ b/src/site.rs @@ -13,6 +13,7 @@ pub struct Site { unprocessed_pages: Vec, markdown_pages: Vec, files: Vec, + included_files: Vec, patterns: TokenPatterns, srcdir: PathBuf, destdir: PathBuf, @@ -29,6 +30,7 @@ impl Site { unprocessed_pages: vec![], markdown_pages: vec![], files: vec![], + included_files: vec![], patterns: TokenPatterns::default(), srcdir: srcdir.as_ref().into(), destdir: destdir.as_ref().into(), @@ -38,6 +40,7 @@ impl Site { pub fn scan(&mut self) -> Result<(), SiteError> { for filename in Self::all_files(&self.srcdir)? { + self.included_files.push(make_path_relative_to(&self.srcdir, &filename)); if Self::is_markdown(&filename) { let page = WikitextPage::read(&self.srcdir, &filename)?; self.add_wikitextpage(page); @@ -104,6 +107,10 @@ impl Site { &self.files } + pub fn included_files(&self) -> &[PathBuf] { + &self.included_files + } + pub fn input_filename(&self, filename: &Path) -> Result { Ok(join_subpath(&self.srcdir, filename)) } -- cgit v1.2.1 From 8fac4f3e6420a8854ec22a006fddcc98af29ba83 Mon Sep 17 00:00:00 2001 From: Lars Wirzenius Date: Mon, 25 Jul 2022 08:45:10 +0300 Subject: feat: exclude unwanted files from site Sponsored-by: author --- riki.md | 11 +++++++++++ src/site.rs | 63 ++++++++++++++++++++++++++++++++++++++----------------------- 2 files changed, 50 insertions(+), 24 deletions(-) diff --git a/riki.md b/riki.md index 00637dc..62093ff 100644 --- a/riki.md +++ b/riki.md @@ -468,4 +468,15 @@ the site content should be excluded._ ~~~scenario given an installed riki +given file site/index.mdwn from empty +given file site/img.jpg from empty +given file site/.git/HEAD from empty +given file site/index.mdwn~ from empty +given file site/#index.mdwn# from empty +when I run riki list site +then stdout contains "img.jpg" +then stdout contains "index.mdwn" +then stdout doesn't contain ".git" +then stdout doesn't contain "index.mdwn~" +then stdout doesn't contain "#index.mdwn#" ~~~ diff --git a/src/site.rs b/src/site.rs index 1d1de60..48de58d 100644 --- a/src/site.rs +++ b/src/site.rs @@ -2,7 +2,7 @@ use crate::error::SiteError; use crate::page::{MarkdownPage, UnprocessedPage, WikitextPage}; use crate::parser::WikitextParser; use crate::token::TokenPatterns; -use crate::util::{make_path_absolute, make_path_relative_to, make_relative_link, join_subpath}; +use crate::util::{join_subpath, make_path_absolute, make_path_relative_to, make_relative_link}; use log::{debug, info, trace}; use std::collections::HashMap; use std::path::{Path, PathBuf}; @@ -21,6 +21,9 @@ pub struct Site { } impl Site { + const EXCLUDE_SUBSTRINGS: &'static [&'static str] = &[".git"]; + const EXCLUDE_ENDS: &'static [&'static str] = &[".git", "~", "#"]; + pub fn new

(srcdir: P, destdir: P) -> Self where P: AsRef, @@ -40,7 +43,8 @@ impl Site { pub fn scan(&mut self) -> Result<(), SiteError> { for filename in Self::all_files(&self.srcdir)? { - self.included_files.push(make_path_relative_to(&self.srcdir, &filename)); + self.included_files + .push(make_path_relative_to(&self.srcdir, &filename)); if Self::is_markdown(&filename) { let page = WikitextPage::read(&self.srcdir, &filename)?; self.add_wikitextpage(page); @@ -121,14 +125,35 @@ impl Site { fn all_files(root: &Path) -> Result, SiteError> { let mut files = vec![]; - for e in WalkDir::new(root) { + for e in WalkDir::new(root) + { let e = e.map_err(|err| SiteError::WalkDir(root.to_path_buf(), err))?; let path = e.path(); - files.push(path.to_path_buf()); + if Self::is_excluded(path) { + debug!("exclude {}", path.display()); + } else { + debug!("include {}", path.display()); + files.push(path.to_path_buf()); + } } Ok(files) } + fn is_excluded(path: &Path) -> bool { + let path = path.to_string_lossy(); + for pat in Self::EXCLUDE_ENDS { + if path.ends_with(pat) { + return true; + } + } + for pat in Self::EXCLUDE_SUBSTRINGS { + if path.contains(pat) { + return true; + } + } + false + } + fn is_markdown(path: &Path) -> bool { if let Some(ext) = path.extension() { ext == "mdwn" @@ -178,7 +203,11 @@ impl Site { // Does a sibling page or file exist? if let Some(parent) = page.parent() { let path = parent.join(target); - trace!("checking for sibling in {}: {}", parent.display(), path.display()); + trace!( + "checking for sibling in {}: {}", + parent.display(), + path.display() + ); if let Some(path) = self.pages.get(path.as_path()) { trace!("sibling page exists: {}", path.display()); return Ok(path.into()); @@ -222,9 +251,7 @@ impl PageSet { } fn normalize(path: &Path) -> String { - path.to_str() - .expect("path is UTF8") - .to_lowercase() + path.to_str().expect("path is UTF8").to_lowercase() } } @@ -272,10 +299,7 @@ mod test { let mut site = Site::new(".", "."); site.add_wikitextpage(page("/foo/yo")); site.process().unwrap(); - assert_eq!( - site.resolve("/foo/bar", "yo").unwrap(), - Path::new("yo") - ); + assert_eq!(site.resolve("/foo/bar", "yo").unwrap(), Path::new("yo")); } #[test] @@ -283,10 +307,7 @@ mod test { let mut site = Site::new(".", "."); site.add_wikitextpage(page("/foo/yo")); site.process().unwrap(); - assert_eq!( - site.resolve("/foo/bar", "YO").unwrap(), - Path::new("yo") - ); + assert_eq!(site.resolve("/foo/bar", "YO").unwrap(), Path::new("yo")); } #[test] @@ -294,10 +315,7 @@ mod test { let mut site = Site::new(".", "."); site.add_wikitextpage(page("/foo/bar/yo")); site.process().unwrap(); - assert_eq!( - site.resolve("/foo/bar", "yo").unwrap(), - Path::new("bar/yo") - ); + assert_eq!(site.resolve("/foo/bar", "yo").unwrap(), Path::new("bar/yo")); } #[test] @@ -306,10 +324,7 @@ mod test { site.add_wikitextpage(page("/foo/bar/yo")); site.add_wikitextpage(page("/foo/yo")); site.process().unwrap(); - assert_eq!( - site.resolve("/foo/bar", "yo").unwrap(), - Path::new("bar/yo") - ); + assert_eq!(site.resolve("/foo/bar", "yo").unwrap(), Path::new("bar/yo")); } #[test] -- cgit v1.2.1