summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLars Wirzenius <liw@liw.fi>2022-07-25 05:46:21 +0000
committerLars Wirzenius <liw@liw.fi>2022-07-25 05:46:21 +0000
commita3e3f6b8056a6a9f985969b24253f30a934ca197 (patch)
tree75f0161612556a6635eb2c5f674fe90b03d239e8
parentb08f50789bec1f9f0d7b4aa45177838b3bd2d24c (diff)
parent8fac4f3e6420a8854ec22a006fddcc98af29ba83 (diff)
downloadriki-a3e3f6b8056a6a9f985969b24253f30a934ca197.tar.gz
Merge branch 'ignore' into 'main'
exclude files from site See merge request larswirzenius/riki!25
-rw-r--r--riki.md35
-rw-r--r--src/bin/riki.rs21
-rw-r--r--src/site.rs68
3 files changed, 101 insertions, 23 deletions
diff --git a/riki.md b/riki.md
index fa982b2..62093ff 100644
--- a/riki.md
+++ b/riki.md
@@ -445,3 +445,38 @@ then file output/index.html contains "<img src="img.jpg""
~~~{#jpeg .file}
This is a dummy JPEG image.
~~~
+
+## Source file tree
+
+### Listing source files
+
+_Requirement: source files can be listed._
+
+~~~scenario
+given an installed riki
+given file site/index.mdwn from empty
+given file site/img.jpg from empty
+when I run riki list site
+then stdout contains "img.jpg"
+then stdout contains "index.mdwn"
+~~~
+
+### Exclude unusual files
+
+_Requirement: files and directories that aren't meant to be part of
+the site content should be excluded._
+
+~~~scenario
+given an installed riki
+given file site/index.mdwn from empty
+given file site/img.jpg from empty
+given file site/.git/HEAD from empty
+given file site/index.mdwn~ from empty
+given file site/#index.mdwn# from empty
+when I run riki list site
+then stdout contains "img.jpg"
+then stdout contains "index.mdwn"
+then stdout doesn't contain ".git"
+then stdout doesn't contain "index.mdwn~"
+then stdout doesn't contain "#index.mdwn#"
+~~~
diff --git a/src/bin/riki.rs b/src/bin/riki.rs
index 9749e3b..1455b45 100644
--- a/src/bin/riki.rs
+++ b/src/bin/riki.rs
@@ -32,6 +32,7 @@ fn real_main() -> Result<(), SiteError> {
let args = Args::parse();
match args.command {
Command::Build(cmd) => cmd.run()?,
+ Command::List(cmd) => cmd.run()?,
}
info!("riki ends OK");
@@ -47,6 +48,7 @@ struct Args {
#[derive(Parser)]
enum Command {
Build(Build),
+ List(List)
}
#[derive(Parser)]
@@ -92,3 +94,22 @@ impl Build {
Ok(())
}
}
+
+#[derive(Parser)]
+struct List {
+ srcdir: PathBuf,
+}
+
+impl List {
+ fn run(&self) -> Result<(), SiteError> {
+ let srcdir = canonicalize(&self.srcdir)?;
+ let mut site = Site::new(&srcdir, &srcdir);
+ site.scan()?;
+ let mut filenames = site.included_files().to_vec();
+ filenames.sort_unstable();
+ for filename in filenames {
+ println!("{}", filename.display());
+ }
+ Ok(())
+ }
+}
diff --git a/src/site.rs b/src/site.rs
index 1a58801..48de58d 100644
--- a/src/site.rs
+++ b/src/site.rs
@@ -2,7 +2,7 @@ use crate::error::SiteError;
use crate::page::{MarkdownPage, UnprocessedPage, WikitextPage};
use crate::parser::WikitextParser;
use crate::token::TokenPatterns;
-use crate::util::{make_path_absolute, make_path_relative_to, make_relative_link, join_subpath};
+use crate::util::{join_subpath, make_path_absolute, make_path_relative_to, make_relative_link};
use log::{debug, info, trace};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
@@ -13,6 +13,7 @@ pub struct Site {
unprocessed_pages: Vec<UnprocessedPage>,
markdown_pages: Vec<MarkdownPage>,
files: Vec<PathBuf>,
+ included_files: Vec<PathBuf>,
patterns: TokenPatterns,
srcdir: PathBuf,
destdir: PathBuf,
@@ -20,6 +21,9 @@ pub struct Site {
}
impl Site {
+ const EXCLUDE_SUBSTRINGS: &'static [&'static str] = &[".git"];
+ const EXCLUDE_ENDS: &'static [&'static str] = &[".git", "~", "#"];
+
pub fn new<P>(srcdir: P, destdir: P) -> Self
where
P: AsRef<Path>,
@@ -29,6 +33,7 @@ impl Site {
unprocessed_pages: vec![],
markdown_pages: vec![],
files: vec![],
+ included_files: vec![],
patterns: TokenPatterns::default(),
srcdir: srcdir.as_ref().into(),
destdir: destdir.as_ref().into(),
@@ -38,6 +43,8 @@ impl Site {
pub fn scan(&mut self) -> Result<(), SiteError> {
for filename in Self::all_files(&self.srcdir)? {
+ self.included_files
+ .push(make_path_relative_to(&self.srcdir, &filename));
if Self::is_markdown(&filename) {
let page = WikitextPage::read(&self.srcdir, &filename)?;
self.add_wikitextpage(page);
@@ -104,6 +111,10 @@ impl Site {
&self.files
}
+ pub fn included_files(&self) -> &[PathBuf] {
+ &self.included_files
+ }
+
pub fn input_filename(&self, filename: &Path) -> Result<PathBuf, SiteError> {
Ok(join_subpath(&self.srcdir, filename))
}
@@ -114,14 +125,35 @@ impl Site {
fn all_files(root: &Path) -> Result<Vec<PathBuf>, SiteError> {
let mut files = vec![];
- for e in WalkDir::new(root) {
+ for e in WalkDir::new(root)
+ {
let e = e.map_err(|err| SiteError::WalkDir(root.to_path_buf(), err))?;
let path = e.path();
- files.push(path.to_path_buf());
+ if Self::is_excluded(path) {
+ debug!("exclude {}", path.display());
+ } else {
+ debug!("include {}", path.display());
+ files.push(path.to_path_buf());
+ }
}
Ok(files)
}
+ fn is_excluded(path: &Path) -> bool {
+ let path = path.to_string_lossy();
+ for pat in Self::EXCLUDE_ENDS {
+ if path.ends_with(pat) {
+ return true;
+ }
+ }
+ for pat in Self::EXCLUDE_SUBSTRINGS {
+ if path.contains(pat) {
+ return true;
+ }
+ }
+ false
+ }
+
fn is_markdown(path: &Path) -> bool {
if let Some(ext) = path.extension() {
ext == "mdwn"
@@ -171,7 +203,11 @@ impl Site {
// Does a sibling page or file exist?
if let Some(parent) = page.parent() {
let path = parent.join(target);
- trace!("checking for sibling in {}: {}", parent.display(), path.display());
+ trace!(
+ "checking for sibling in {}: {}",
+ parent.display(),
+ path.display()
+ );
if let Some(path) = self.pages.get(path.as_path()) {
trace!("sibling page exists: {}", path.display());
return Ok(path.into());
@@ -215,9 +251,7 @@ impl PageSet {
}
fn normalize(path: &Path) -> String {
- path.to_str()
- .expect("path is UTF8")
- .to_lowercase()
+ path.to_str().expect("path is UTF8").to_lowercase()
}
}
@@ -265,10 +299,7 @@ mod test {
let mut site = Site::new(".", ".");
site.add_wikitextpage(page("/foo/yo"));
site.process().unwrap();
- assert_eq!(
- site.resolve("/foo/bar", "yo").unwrap(),
- Path::new("yo")
- );
+ assert_eq!(site.resolve("/foo/bar", "yo").unwrap(), Path::new("yo"));
}
#[test]
@@ -276,10 +307,7 @@ mod test {
let mut site = Site::new(".", ".");
site.add_wikitextpage(page("/foo/yo"));
site.process().unwrap();
- assert_eq!(
- site.resolve("/foo/bar", "YO").unwrap(),
- Path::new("yo")
- );
+ assert_eq!(site.resolve("/foo/bar", "YO").unwrap(), Path::new("yo"));
}
#[test]
@@ -287,10 +315,7 @@ mod test {
let mut site = Site::new(".", ".");
site.add_wikitextpage(page("/foo/bar/yo"));
site.process().unwrap();
- assert_eq!(
- site.resolve("/foo/bar", "yo").unwrap(),
- Path::new("bar/yo")
- );
+ assert_eq!(site.resolve("/foo/bar", "yo").unwrap(), Path::new("bar/yo"));
}
#[test]
@@ -299,10 +324,7 @@ mod test {
site.add_wikitextpage(page("/foo/bar/yo"));
site.add_wikitextpage(page("/foo/yo"));
site.process().unwrap();
- assert_eq!(
- site.resolve("/foo/bar", "yo").unwrap(),
- Path::new("bar/yo")
- );
+ assert_eq!(site.resolve("/foo/bar", "yo").unwrap(), Path::new("bar/yo"));
}
#[test]