summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLars Wirzenius <liw@liw.fi>2022-08-05 11:17:11 +0000
committerLars Wirzenius <liw@liw.fi>2022-08-05 11:17:11 +0000
commitc0b2e9b7d3caa27865e8c0f5dcf4ed7e9223f245 (patch)
treeb5fc9f97cee99cf2f8f17397988ee42af0398772
parent4abc61a4aeef01fc918957b48c4b618ad7109889 (diff)
parentc7b37b35c4530dc9ee79a4b79a46041ed682b183 (diff)
downloadriki-c0b2e9b7d3caa27865e8c0f5dcf4ed7e9223f245.tar.gz
Merge branch 'names' into 'main'
refactor: drop unused method PageMeta::name See merge request larswirzenius/riki!33
-rw-r--r--riki.md12
-rw-r--r--src/bin/riki.rs17
-rw-r--r--src/lib.rs1
-rw-r--r--src/name.rs198
-rw-r--r--src/page.rs68
-rw-r--r--src/site.rs181
6 files changed, 347 insertions, 130 deletions
diff --git a/riki.md b/riki.md
index 7adb3ff..c5dc4c0 100644
--- a/riki.md
+++ b/riki.md
@@ -532,6 +532,18 @@ then stdout doesn't contain "#index.mdwn#"
## Output directory tree
+### No markdown files in output tree
+
+_Requirement: Markdown files are not copied to the output tree._
+
+~~~scenario
+given an installed riki
+given file site/index.mdwn from empty
+when I run riki build site output
+then file output/index.html exists
+then file output/index.mdwn does not exist
+~~~
+
### Output files have source file modification times
_Requirement: Files in the output directory have the same time stamp
diff --git a/src/bin/riki.rs b/src/bin/riki.rs
index 83d1a9c..70bf71c 100644
--- a/src/bin/riki.rs
+++ b/src/bin/riki.rs
@@ -2,6 +2,7 @@ use clap::{CommandFactory, FromArgMatches, Parser};
use git_testament::{git_testament, render_testament, GitModification};
use log::{debug, error, info};
use riki::error::SiteError;
+use riki::name::Name;
use riki::site::Site;
use riki::util::{canonicalize, copy_file_from_source, mkdir, set_mtime};
use std::error::Error;
@@ -145,16 +146,14 @@ impl Build {
} else {
page.to_html()?
};
- let output = page.meta().destination_filename(&destdir);
+ let output = page.meta().destination_filename();
debug!("writing: {}", output.display());
htmlpage.write(&output)?;
set_mtime(&output, page.meta().mtime())?;
}
- for file in site.files() {
- let input = site.input_filename(file)?;
- let output = site.output_filename(file)?;
- copy_file_from_source(&input, &output)?;
+ for file in site.files_only() {
+ copy_file_from_source(file.source_path(), file.destination_path())?;
}
Ok(())
@@ -173,10 +172,10 @@ impl List {
let srcdir = canonicalize(&self.srcdir)?;
let mut site = Site::new(&srcdir, &srcdir);
site.scan()?;
- let mut filenames = site.included_files().to_vec();
- filenames.sort_unstable();
- for filename in filenames {
- println!("{}", filename.display());
+ let mut names: Vec<&Name> = site.pages_and_files().collect();
+ names.sort_by_cached_key(|name| name.page_path());
+ for name in names {
+ println!("{}", name);
}
Ok(())
}
diff --git a/src/lib.rs b/src/lib.rs
index 2caefeb..b78fda2 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -7,6 +7,7 @@
//! little slow. This care implements a subset of the functionality of
//! ikiwiki in Rust, for speed.
+pub mod name;
pub mod directive;
pub mod error;
pub mod html;
diff --git a/src/name.rs b/src/name.rs
new file mode 100644
index 0000000..9cfca6b
--- /dev/null
+++ b/src/name.rs
@@ -0,0 +1,198 @@
+use crate::util::{join_subpath, make_path_absolute, make_path_relative_to};
+use std::fmt;
+use std::path::{Path, PathBuf};
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Name {
+ is_wikitext: bool,
+ src: PathBuf,
+ dest: PathBuf,
+ page: PathBuf,
+ page_name: String,
+}
+
+impl Name {
+ fn new(is_wikitext: bool, src: PathBuf, dest: PathBuf, page: PathBuf) -> Self {
+ let page_name = page
+ .file_name()
+ .unwrap_or_else(|| panic!("get filename from {}", page.display()))
+ .to_string_lossy()
+ .to_string();
+ Self {
+ is_wikitext,
+ src,
+ dest,
+ page,
+ page_name,
+ }
+ }
+
+ pub fn is_wikitext_page(&self) -> bool {
+ self.is_wikitext
+ }
+
+ pub fn source_path(&self) -> &Path {
+ &self.src
+ }
+
+ pub fn destination_path(&self) -> &Path {
+ &self.dest
+ }
+
+ pub fn page_path(&self) -> &Path {
+ &self.page
+ }
+
+ pub fn page_name(&self) -> &str {
+ &self.page_name
+ }
+}
+
+impl PartialEq<Name> for &Name {
+ fn eq(&self, other: &Name) -> bool {
+ self.src == other.src
+ }
+}
+
+impl fmt::Display for Name {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
+ write!(f, "{}", self.src.display())
+ }
+}
+
+pub struct NameBuilder {
+ srcdir: PathBuf,
+ destdir: PathBuf,
+}
+
+impl NameBuilder {
+ pub fn new(srcdir: &Path, destdir: &Path) -> Self {
+ Self {
+ srcdir: srcdir.into(),
+ destdir: destdir.into(),
+ }
+ }
+
+ pub fn srcdir(&self) -> &Path {
+ &self.srcdir
+ }
+
+ pub fn destdir(&self) -> &Path {
+ &self.destdir
+ }
+
+ fn name(&self, path: &Path, ext: Option<&str>) -> Name {
+ assert!(path.starts_with(&self.srcdir));
+ let src = path.into();
+ let relative = make_path_relative_to(&self.srcdir, path);
+ let dest = join_subpath(&self.destdir, &relative);
+ let page = make_path_absolute(&relative);
+ if let Some(ext) = ext {
+ Name::new(true, src, dest.with_extension(ext), page.with_extension(""))
+ } else {
+ Name::new(false, src, dest, page)
+ }
+ }
+
+ pub fn page(&self, path: &Path) -> Name {
+ self.name(path, Some("html"))
+ }
+
+ pub fn file(&self, path: &Path) -> Name {
+ self.name(path, None)
+ }
+}
+
+#[derive(Default, Debug)]
+pub struct Names {
+ names: Vec<Name>,
+}
+
+impl Names {
+ pub fn insert(&mut self, name: Name) {
+ self.names.push(name);
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = &Name> {
+ self.names.iter()
+ }
+
+ pub fn source_paths(&self) -> impl Iterator<Item = &PathBuf> {
+ self.names.iter().map(|name| &name.src)
+ }
+
+ pub fn pages(&self) -> impl Iterator<Item = &Name> {
+ self.names.iter().filter(|name| name.is_wikitext_page())
+ }
+
+ pub fn files(&self) -> impl Iterator<Item = &Name> {
+ self.names.iter().filter(|name| !name.is_wikitext_page())
+ }
+
+ pub fn get_source_path(&self, path: &Path) -> Option<&Name> {
+ self.names.iter().find(|name| name.src == path)
+ }
+
+ pub fn get_page_path(&self, path: &Path) -> Option<&Name> {
+ self.names.iter().find(|name| name.page == path)
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::{Name, NameBuilder, Names};
+ use std::path::Path;
+
+ fn builder() -> NameBuilder {
+ NameBuilder::new(Path::new("/src"), Path::new("/dest"))
+ }
+
+ #[test]
+ fn builds_page_name() {
+ let name = builder().page(Path::new("/src/foo/bar.mdwn"));
+ assert_eq!(name.source_path(), Path::new("/src/foo/bar.mdwn"));
+ assert_eq!(name.destination_path(), Path::new("/dest/foo/bar.html"));
+ assert_eq!(name.page_path(), Path::new("/foo/bar"));
+ assert_eq!(name.page_name(), "bar");
+ }
+
+ #[test]
+ fn builds_file_name() {
+ let name = builder().file(Path::new("/src/foo/bar.jpg"));
+ assert_eq!(name.source_path(), Path::new("/src/foo/bar.jpg"));
+ assert_eq!(name.destination_path(), Path::new("/dest/foo/bar.jpg"));
+ assert_eq!(name.page_path(), Path::new("/foo/bar.jpg"));
+ assert_eq!(name.page_name(), "bar.jpg");
+ }
+
+ #[test]
+ fn names_is_empty_by_default() {
+ let names = Names::default();
+ assert!(names.names.is_empty());
+ }
+
+ #[test]
+ fn names_remembers_inserted() {
+ let mut names = Names::default();
+ let name = builder().page(Path::new("/src/foo/bar.mdwn"));
+ names.insert(name.clone());
+ assert_eq!(
+ names.get_source_path(Path::new("/src/foo/bar.mdwn")),
+ Some(&name)
+ );
+ assert_eq!(names.get_page_path(Path::new("/foo/bar")), Some(&name));
+ }
+
+ #[test]
+ fn names_remembers_inserted_pages_and_files() {
+ let mut names = Names::default();
+ let page = builder().page(Path::new("/src/foo/bar.mdwn"));
+ let file = builder().file(Path::new("/src/foo/bar.jpg"));
+ names.insert(page.clone());
+ names.insert(file.clone());
+ let pages: Vec<&Name> = names.pages().collect();
+ let files: Vec<&Name> = names.files().collect();
+ assert_eq!(pages, vec![page],);
+ assert_eq!(files, vec![file],);
+ }
+}
diff --git a/src/page.rs b/src/page.rs
index 41a8584..d3bc8e7 100644
--- a/src/page.rs
+++ b/src/page.rs
@@ -1,8 +1,9 @@
use crate::error::SiteError;
use crate::html::{parse, Content, Element, ElementTag, HtmlPage};
+use crate::name::Name;
use crate::parser::WikitextParser;
use crate::site::Site;
-use crate::util::{get_mtime, join_subpath, make_path_relative_to};
+use crate::util::get_mtime;
use crate::wikitext::Snippet;
use log::{info, trace};
use std::path::{Path, PathBuf};
@@ -19,21 +20,16 @@ impl WikitextPage {
Self { meta, wikitext }
}
- pub fn read(srcdir: &Path, filename: &Path) -> Result<Self, SiteError> {
- info!("input file: {}", filename.display());
- let relative = make_path_relative_to(srcdir, filename).with_extension("");
- let absolute = Path::new("/").join(&relative);
- let name = relative
- .file_name()
- .unwrap_or_else(|| panic!("get filename from {}", relative.display()))
- .to_string_lossy()
- .to_string();
- let data = std::fs::read(filename).map_err(|e| SiteError::FileRead(filename.into(), e))?;
- let wikitext = String::from_utf8(data).map_err(|e| SiteError::Utf8(filename.into(), e))?;
- let mtime = get_mtime(filename)?;
+ pub fn read(name: &Name) -> Result<Self, SiteError> {
+ info!("input file: {}", name);
+
+ let src = name.source_path();
+ let data = std::fs::read(&src).map_err(|e| SiteError::FileRead(src.into(), e))?;
+ let wikitext = String::from_utf8(data).map_err(|e| SiteError::Utf8(src.into(), e))?;
+ let mtime = get_mtime(src)?;
+
let meta = MetaBuilder::default()
- .name(name)
- .path(absolute)
+ .name(name.clone())
.mtime(mtime)
.build();
Ok(Self::new(meta, wikitext))
@@ -125,35 +121,24 @@ impl MarkdownPage {
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct PageMeta {
- name: String,
+ name: Name,
title: Option<String>,
- path: PathBuf,
mtime: SystemTime,
}
impl PageMeta {
- fn new(name: String, title: Option<String>, path: PathBuf, mtime: SystemTime) -> Self {
+ fn new(name: Name, title: Option<String>, mtime: SystemTime) -> Self {
trace!(
- "PageMeta: name={:?} title={:?} path={:?} mtime={:?}",
+ "PageMeta: name={:?} title={:?} mtime={:?}",
name,
title,
- path,
mtime,
);
- Self {
- name,
- title,
- path,
- mtime,
- }
+ Self { name, title, mtime }
}
- pub fn destination_filename(&self, destdir: &Path) -> PathBuf {
- join_subpath(destdir, &self.path).with_extension("html")
- }
-
- pub fn name(&self) -> &str {
- &self.name
+ pub fn destination_filename(&self) -> PathBuf {
+ self.name.destination_path().into()
}
pub fn set_title(&mut self, title: String) {
@@ -165,12 +150,12 @@ impl PageMeta {
if let Some(title) = &self.title {
title
} else {
- &self.name
+ self.name.page_name()
}
}
pub fn path(&self) -> &Path {
- &self.path
+ self.name.page_path()
}
pub fn mtime(&self) -> SystemTime {
@@ -180,24 +165,22 @@ impl PageMeta {
#[derive(Debug, Default)]
pub struct MetaBuilder {
- name: String,
+ name: Option<Name>,
title: Option<String>,
- path: Option<PathBuf>,
mtime: Option<SystemTime>,
}
impl MetaBuilder {
pub fn build(self) -> PageMeta {
PageMeta::new(
- self.name,
+ self.name.expect("name set on MetaBuilder"),
self.title,
- self.path.expect("path set on MetaBuilder"),
self.mtime.expect("mtime set on MetaBuilder"),
)
}
- pub fn name(mut self, name: String) -> Self {
- self.name = name;
+ pub fn name(mut self, name: Name) -> Self {
+ self.name = Some(name);
self
}
@@ -206,11 +189,6 @@ impl MetaBuilder {
self
}
- pub fn path(mut self, path: PathBuf) -> Self {
- self.path = Some(path);
- self
- }
-
pub fn mtime(mut self, mtime: SystemTime) -> Self {
self.mtime = Some(mtime);
self
diff --git a/src/site.rs b/src/site.rs
index fd28a2b..2d1368d 100644
--- a/src/site.rs
+++ b/src/site.rs
@@ -1,23 +1,23 @@
use crate::error::SiteError;
+use crate::name::{Name, NameBuilder, Names};
use crate::page::{MarkdownPage, UnprocessedPage, WikitextPage};
use crate::parser::WikitextParser;
use crate::token::TokenPatterns;
-use crate::util::{join_subpath, make_path_absolute, make_path_relative_to, make_relative_link};
+use crate::util::make_relative_link;
use log::{debug, info, trace};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
pub struct Site {
+ builder: NameBuilder,
wikitext_pages: Vec<WikitextPage>,
unprocessed_pages: Vec<UnprocessedPage>,
markdown_pages: Vec<MarkdownPage>,
- files: Vec<PathBuf>,
- included_files: Vec<PathBuf>,
+ files: Names,
patterns: TokenPatterns,
- srcdir: PathBuf,
- destdir: PathBuf,
- pages: PageSet,
+ name_queue: Vec<Name>,
+ page_queue: PageSet,
}
impl Site {
@@ -29,55 +29,73 @@ impl Site {
P: AsRef<Path>,
{
Self {
+ builder: NameBuilder::new(srcdir.as_ref(), destdir.as_ref()),
wikitext_pages: vec![],
unprocessed_pages: vec![],
markdown_pages: vec![],
- files: vec![],
- included_files: vec![],
+ files: Names::default(),
patterns: TokenPatterns::default(),
- srcdir: srcdir.as_ref().into(),
- destdir: destdir.as_ref().into(),
- pages: PageSet::default(),
+ name_queue: vec![],
+ page_queue: PageSet::default(),
}
}
pub fn scan(&mut self) -> Result<(), SiteError> {
- for filename in Self::all_files(&self.srcdir)? {
- self.included_files
- .push(make_path_relative_to(&self.srcdir, &filename));
- if Self::is_markdown(&filename) {
- let page = WikitextPage::read(&self.srcdir, &filename)?;
- self.add_wikitextpage(page);
- } else if filename.is_file() || filename.is_symlink() {
- self.add_other_file(filename);
+ for name in self.all_files()? {
+ trace!("scan: name={}", name);
+ if name.is_wikitext_page() {
+ trace!("scan: it's a page");
+ self.name_queue.push(name);
+ } else {
+ trace!("scan: it's a non-page file");
+ let filename = name.source_path();
+ if filename.is_file() || filename.is_symlink() {
+ self.add_other_file(name);
+ }
}
}
Ok(())
}
- pub fn add_wikitextpage(&mut self, page: WikitextPage) {
+ fn add_wikitextpage(&mut self, page: WikitextPage) {
info!("add wikitext page {}", page.meta().path().display());
- self.pages.insert(&page);
+ self.page_queue.insert(&page);
self.wikitext_pages.push(page);
}
- pub fn add_other_file(&mut self, filename: PathBuf) {
- info!("add other file {}", filename.display());
- let filename = make_path_relative_to(&self.srcdir, &filename);
- let filename = make_path_absolute(&filename);
- self.files.push(filename);
+ fn add_other_file(&mut self, name: Name) {
+ info!("add other file {}", name);
+ self.files.insert(name);
}
pub fn process(&mut self) -> Result<(), SiteError> {
+ trace!("processing queues");
loop {
- if !self.process_wikipage()? && !self.process_unrocessed_page()? {
+ if !self.process_name()?
+ && !self.process_wikipage()?
+ && !self.process_unrocessed_page()?
+ {
+ trace!("processing queues done");
break;
}
}
Ok(())
}
- pub fn process_wikipage(&mut self) -> Result<bool, SiteError> {
+ fn process_name(&mut self) -> Result<bool, SiteError> {
+ if let Some(name) = self.name_queue.pop() {
+ debug!("loading wikitext page {}", name.source_path().display());
+ let page = WikitextPage::read(&name)?;
+ self.files.insert(name);
+ self.add_wikitextpage(page);
+ Ok(true)
+ } else {
+ trace!("name_queue was empty");
+ Ok(false)
+ }
+ }
+
+ fn process_wikipage(&mut self) -> Result<bool, SiteError> {
if let Some(page) = self.wikitext_pages.pop() {
debug!("processing wikitext page {}", page.meta().path().display());
let mut parser = WikitextParser::new(page.wikitext(), &self.patterns);
@@ -85,11 +103,12 @@ impl Site {
self.unprocessed_pages.push(page);
Ok(true)
} else {
+ trace!("wikitext_ages was empty");
Ok(false)
}
}
- pub fn process_unrocessed_page(&mut self) -> Result<bool, SiteError> {
+ fn process_unrocessed_page(&mut self) -> Result<bool, SiteError> {
if let Some(page) = self.unprocessed_pages.pop() {
debug!(
"processing unprocessed page {}",
@@ -99,6 +118,7 @@ impl Site {
self.markdown_pages.push(page);
Ok(true)
} else {
+ trace!("unprocessed_ages was empty");
Ok(false)
}
}
@@ -107,35 +127,38 @@ impl Site {
&self.markdown_pages
}
- pub fn files(&self) -> &[PathBuf] {
- &self.files
- }
-
- pub fn included_files(&self) -> &[PathBuf] {
- &self.included_files
- }
-
- pub fn input_filename(&self, filename: &Path) -> Result<PathBuf, SiteError> {
- Ok(join_subpath(&self.srcdir, filename))
+ pub fn files_only(&self) -> impl Iterator<Item = &Name> {
+ self.files.files()
}
- pub fn output_filename(&self, filename: &Path) -> Result<PathBuf, SiteError> {
- Ok(join_subpath(&self.destdir, filename))
+ pub fn pages_and_files(&self) -> impl Iterator<Item = &Name> {
+ self.files.iter().chain(self.name_queue.iter())
}
- fn all_files(root: &Path) -> Result<Vec<PathBuf>, SiteError> {
- let mut files = vec![];
+ fn all_files(&self) -> Result<Vec<Name>, SiteError> {
+ let mut names = vec![];
+ let root = self.builder.srcdir();
+ trace!("all_files: root={}", root.display());
for e in WalkDir::new(root) {
let e = e.map_err(|err| SiteError::WalkDir(root.to_path_buf(), err))?;
let path = e.path();
+ trace!("all_files: path={}", path.display());
if Self::is_excluded(path) {
debug!("exclude {}", path.display());
} else {
debug!("include {}", path.display());
- files.push(path.to_path_buf());
+ if Self::is_markdown(path) {
+ trace!("it's markdown");
+ names.push(self.builder.page(path));
+ } else if path.is_file() {
+ trace!("it's not markdown");
+ names.push(self.builder.file(path));
+ } else {
+ trace!("it's not a file");
+ }
}
}
- Ok(files)
+ Ok(names)
}
fn is_excluded(path: &Path) -> bool {
@@ -182,7 +205,7 @@ impl Site {
// Is target absolute?
if target.starts_with("/") {
- if let Some(path) = self.pages.get(target) {
+ if let Some(path) = self.page_queue.get(target) {
trace!("absolute target exists");
return Ok(path.into());
} else {
@@ -194,7 +217,7 @@ impl Site {
// Does a sub-page exist?
let path = page.join(target);
trace!("checking for subpage {}", path.display());
- if let Some(path) = self.pages.get(&path) {
+ if let Some(path) = self.page_queue.get(&path) {
trace!("subpage exists: {}", path.display());
return Ok(path.into());
}
@@ -207,7 +230,7 @@ impl Site {
parent.display(),
path.display()
);
- if let Some(path) = self.pages.get(path.as_path()) {
+ if let Some(path) = self.page_queue.get(path.as_path()) {
trace!("sibling page exists: {}", path.display());
return Ok(path.into());
}
@@ -224,8 +247,8 @@ impl Site {
}
fn file_exists(&self, filename: &Path) -> bool {
- for existing in self.files.iter() {
- if filename == existing {
+ for existing in self.files.files() {
+ if filename == existing.page_path() {
return true;
}
}
@@ -256,32 +279,37 @@ impl PageSet {
#[cfg(test)]
mod test {
- use super::{Site, SiteError, WikitextPage};
+ use super::{NameBuilder, Site, SiteError, WikitextPage};
use crate::page::MetaBuilder;
use std::{
path::{Path, PathBuf},
time::SystemTime,
};
+ fn site() -> Site {
+ Site::new("/src", "/dest")
+ }
+
+ fn builder() -> NameBuilder {
+ NameBuilder::new(Path::new("/src"), Path::new("/dest"))
+ }
+
fn page(path: &str) -> WikitextPage {
+ let name = builder().page(Path::new(path));
let mtime = SystemTime::now();
- let meta = MetaBuilder::default()
- .path(PathBuf::from(path))
- .mtime(mtime)
- .build();
+ let meta = MetaBuilder::default().name(name).mtime(mtime).build();
WikitextPage::new(meta, "".into())
}
#[test]
fn has_no_pages_initially() {
- let site = Site::new(".", ".");
- assert_eq!(site.markdown_pages().to_vec(), vec![]);
+ assert_eq!(site().markdown_pages().to_vec(), vec![]);
}
#[test]
fn absolute_link_resolves_to_link_relative_root_of_site() {
- let mut site = Site::new(".", ".");
- site.add_wikitextpage(page("/yo/yoyo"));
+ let mut site = site();
+ site.add_wikitextpage(page("/src/yo/yoyo"));
assert_eq!(
site.resolve("/foo/bar", "/yo/yoyo").unwrap(),
Path::new("../yo/yoyo")
@@ -290,11 +318,11 @@ mod test {
#[test]
fn link_to_missing_is_an_error() {
- let site = Site::new(".", ".");
- match site.resolve("/foo/bar", "yo") {
+ let site = site();
+ match site.resolve("/src/foo/bar", "yo") {
Err(SiteError::PageMissing(page, target)) => {
assert_eq!(target, PathBuf::from("yo"));
- assert_eq!(page, PathBuf::from("/foo/bar"));
+ assert_eq!(page, PathBuf::from("/src/foo/bar"));
}
_ => panic!("unexpected success"),
}
@@ -302,40 +330,40 @@ mod test {
#[test]
fn link_to_sibling_resolves_to_it() {
- let mut site = Site::new(".", ".");
- site.add_wikitextpage(page("/foo/yo"));
+ let mut site = site();
+ site.add_wikitextpage(page("/src/foo/yo"));
site.process().unwrap();
assert_eq!(site.resolve("/foo/bar", "yo").unwrap(), Path::new("yo"));
}
#[test]
fn link_using_other_casing_is_resolved() {
- let mut site = Site::new(".", ".");
- site.add_wikitextpage(page("/foo/yo"));
+ let mut site = site();
+ site.add_wikitextpage(page("/src/foo/yo"));
site.process().unwrap();
assert_eq!(site.resolve("/foo/bar", "YO").unwrap(), Path::new("yo"));
}
#[test]
fn link_to_sublpage_resolves_to_it() {
- let mut site = Site::new(".", ".");
- site.add_wikitextpage(page("/foo/bar/yo"));
+ let mut site = site();
+ site.add_wikitextpage(page("/src/foo/bar/yo"));
site.process().unwrap();
assert_eq!(site.resolve("/foo/bar", "yo").unwrap(), Path::new("bar/yo"));
}
#[test]
fn link_to_sublpage_resolves_to_it_and_not_sibling() {
- let mut site = Site::new(".", ".");
- site.add_wikitextpage(page("/foo/bar/yo"));
- site.add_wikitextpage(page("/foo/yo"));
+ let mut site = site();
+ site.add_wikitextpage(page("/src/foo/bar/yo"));
+ site.add_wikitextpage(page("/src/foo/yo"));
site.process().unwrap();
assert_eq!(site.resolve("/foo/bar", "yo").unwrap(), Path::new("bar/yo"));
}
#[test]
fn link_to_unrelated_subpage_is_an_error() {
- let mut site = Site::new(".", ".");
+ let mut site = site();
site.process().unwrap();
match site.resolve("/foo/bar", "yo/yoyo") {
Err(SiteError::PageMissing(page, target)) => {
@@ -348,8 +376,8 @@ mod test {
#[test]
fn link_to_subsubpage_resolves_to_it() {
- let mut site = Site::new(".", ".");
- site.add_wikitextpage(page("/foo/bar/yo/yoyo"));
+ let mut site = site();
+ site.add_wikitextpage(page("/src/foo/bar/yo/yoyo"));
site.process().unwrap();
assert_eq!(
site.resolve("/foo/bar", "yo/yoyo").unwrap(),
@@ -359,8 +387,9 @@ mod test {
#[test]
fn link_to_sibling_file_resolves_to_it() {
- let mut site = Site::new("/src", "/dest");
- site.add_other_file(PathBuf::from("/src/foo/bar.jpg"));
+ let mut site = site();
+ let name = builder().file(Path::new("/src/foo/bar.jpg"));
+ site.add_other_file(name);
site.process().unwrap();
assert_eq!(
site.resolve("/foo/bar", "bar.jpg").unwrap(),