summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLars Wirzenius <liw@liw.fi>2022-07-17 11:49:29 +0300
committerLars Wirzenius <liw@liw.fi>2022-07-17 14:08:53 +0300
commit9095e13c0828a9469ce1727e89a53bff95e6062c (patch)
tree77d4873ddb4e57d4cc3c2cf81a4630b6a73f52b8
parent13ef8ece3f7a85c488be1604ee63fec486a07b8a (diff)
downloadriki-9095e13c0828a9469ce1727e89a53bff95e6062c.tar.gz
feat: resolve links to other pages
This is meant to implement the rules described on https://ikiwiki.info/ikiwiki/subpage/linkingrules/. Sponsored-by: author
-rw-r--r--riki.md28
-rw-r--r--src/bin/riki.rs2
-rw-r--r--src/page.rs43
-rw-r--r--src/site.rs141
-rw-r--r--src/wikitext.rs14
5 files changed, 194 insertions, 34 deletions
diff --git a/riki.md b/riki.md
index 23f430e..40ea5e7 100644
--- a/riki.md
+++ b/riki.md
@@ -363,3 +363,31 @@ when I run riki build site output
then AST of site/foo/page.mdwn matches that of output/foo/page.html
then files site/bar//image.jpg and output/bar/image.jpg match
~~~
+
+## Wiki links to other pages on the site
+
+_Requirement: Pages can link to other pages on the site, the same
+way ikiwiki does, including subpages._
+
+~~~scenario
+given an installed riki
+given file site/dir/foo.mdwn from foo
+given file site/absolute.mdwn from empty
+given file site/dir/sibling.mdwn from empty
+given file site/dir/foo/child.mdwn from empty
+given file site/dir/foo/child/grandchild.mdwn from empty
+when I run riki build site output
+then file output/dir/foo.html contains "href="/absolute""
+then file output/dir/foo.html contains "href="dir/sibling""
+then file output/dir/foo.html contains "href="dir/foo/child""
+then file output/dir/foo.html contains "href="dir/foo/child/grandchild""
+then file output/dir/foo.html contains "href="/missing"
+~~~
+
+~~~{#foo .file .markdown}
+[[/absolute]]
+[[sibling]]
+[[child]]
+[[child/grandchild]]
+[[missing]]
+~~~
diff --git a/src/bin/riki.rs b/src/bin/riki.rs
index afc9fbd..ea3b3dc 100644
--- a/src/bin/riki.rs
+++ b/src/bin/riki.rs
@@ -64,7 +64,7 @@ impl Build {
let destdir = canonicalize(&self.destdir)?;
debug!("destdir={}", destdir.display());
- let mut site = Site::new(&srcdir, &destdir)?;
+ let mut site = Site::new(&srcdir, &destdir);
site.scan()?;
site.process()?;
debug!("markdown file count: {}", site.markdown_pages().len());
diff --git a/src/page.rs b/src/page.rs
index 1de439b..5c379ba 100644
--- a/src/page.rs
+++ b/src/page.rs
@@ -1,36 +1,36 @@
use crate::error::SiteError;
use crate::html::{parse, Element, ElementTag, HtmlPage};
+use crate::site::Site;
use crate::wikitext::{Snippet, WikitextParser};
-use log::info;
+use log::{info, trace};
use std::path::{Path, PathBuf};
-#[derive(Debug)]
+#[derive(Debug, Eq, PartialEq)]
pub struct WikitextPage {
meta: PageMeta,
wikitext: String,
}
impl WikitextPage {
+ pub fn new(meta: PageMeta, wikitext: String) -> Self {
+ Self { meta, wikitext }
+ }
+
pub fn read(srcdir: &Path, filename: &Path) -> Result<Self, SiteError> {
info!("input file: {}", filename.display());
let relative = filename
.strip_prefix(&srcdir)
.unwrap_or_else(|_| panic!("get stem from {}", filename.display()))
.with_extension("");
- let name = relative.file_name()
+ let name = relative
+ .file_name()
.unwrap_or_else(|| panic!("get filename from {}", relative.display()))
.to_string_lossy()
.to_string();
let data = std::fs::read(filename).map_err(|e| SiteError::FileRead(filename.into(), e))?;
- let data = String::from_utf8(data).map_err(|e| SiteError::Utf8(filename.into(), e))?;
- let meta = MetaBuilder::default()
- .name(name)
- .path(relative)
- .build();
- Ok(Self {
- meta,
- wikitext: data,
- })
+ let wikitext = String::from_utf8(data).map_err(|e| SiteError::Utf8(filename.into(), e))?;
+ let meta = MetaBuilder::default().name(name).path(relative).build();
+ Ok(Self::new(meta, wikitext))
}
pub fn meta(&self) -> &PageMeta {
@@ -70,17 +70,17 @@ impl UnprocessedPage {
snippets
}
- pub fn process(&self) -> MarkdownPage {
+ pub fn process(&self, site: &Site) -> MarkdownPage {
let mut meta = self.meta.clone();
let mut m = String::new();
for snippet in self.snippets.iter() {
- m.push_str(&snippet.process(&mut meta));
+ m.push_str(&snippet.process(site, &mut meta));
}
MarkdownPage::new(m, meta)
}
}
-#[derive(Debug)]
+#[derive(Debug, Clone, Eq, PartialEq)]
pub struct MarkdownPage {
meta: PageMeta,
markdown: String,
@@ -106,7 +106,7 @@ impl MarkdownPage {
}
}
-#[derive(Debug, Default, Clone)]
+#[derive(Debug, Clone, Eq, PartialEq)]
pub struct PageMeta {
name: String,
title: Option<String>,
@@ -114,6 +114,11 @@ pub struct PageMeta {
}
impl PageMeta {
+ fn new(name: String, title: Option<String>, path: PathBuf) -> Self {
+ trace!("PageMeta: name={:?} title={:?} path={:?}", name, title, path);
+ Self { name, title, path }
+ }
+
pub fn destination_filename(&self, destdir: &Path) -> PathBuf {
destdir.join(&self.path).with_extension("html")
}
@@ -148,11 +153,7 @@ pub struct MetaBuilder {
impl MetaBuilder {
pub fn build(self) -> PageMeta {
- PageMeta {
- name: self.name,
- title: self.title,
- path: self.path.expect("path set on MetaBuilder"),
- }
+ PageMeta::new(self.name, self.title, self.path.expect("path set on MetaBuilder"))
}
pub fn name(mut self, name: String) -> Self {
diff --git a/src/site.rs b/src/site.rs
index a2257d9..377b87c 100644
--- a/src/site.rs
+++ b/src/site.rs
@@ -1,7 +1,8 @@
use crate::error::SiteError;
use crate::page::{MarkdownPage, UnprocessedPage, WikitextPage};
use crate::wikitext::WikitextParser;
-use log::{debug, trace};
+use log::{info, trace};
+use std::collections::HashSet;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
@@ -13,14 +14,15 @@ pub struct Site {
parser: WikitextParser,
srcdir: PathBuf,
destdir: PathBuf,
+ pages: HashSet<PathBuf>,
}
impl Site {
- pub fn new<P>(srcdir: P, destdir: P) -> Result<Self, SiteError>
+ pub fn new<P>(srcdir: P, destdir: P) -> Self
where
P: AsRef<Path>,
{
- Ok(Self {
+ Self {
wikitext_pages: vec![],
unprocessed_pages: vec![],
markdown_pages: vec![],
@@ -28,11 +30,11 @@ impl Site {
parser: WikitextParser::default(),
srcdir: srcdir.as_ref().into(),
destdir: destdir.as_ref().into(),
- })
+ pages: HashSet::new(),
+ }
}
- pub fn scan(&mut self) -> Result<(), SiteError>
- {
+ pub fn scan(&mut self) -> Result<(), SiteError> {
for filename in Self::all_files(&self.srcdir)? {
if Self::is_markdown(&filename) {
let page = WikitextPage::read(&self.srcdir, &filename)?;
@@ -45,10 +47,13 @@ impl Site {
}
pub fn add_wikitextpage(&mut self, page: WikitextPage) {
+ info!("add wikitext page {}", page.meta().path().display());
+ self.pages.insert(page.meta().path().to_path_buf());
self.wikitext_pages.push(page);
}
pub fn add_other_file(&mut self, filename: PathBuf) {
+ info!("add other file {}", filename.display());
self.files.push(filename);
}
@@ -63,7 +68,7 @@ impl Site {
pub fn process_wikipage(&mut self) -> Result<bool, SiteError> {
if let Some(page) = self.wikitext_pages.pop() {
- trace!("processing wikitext page {}", page.meta().name());
+ trace!("processing wikitext page {}", page.meta().path().display());
let page = UnprocessedPage::new(page.meta().clone(), page.wikitext(), &self.parser);
self.unprocessed_pages.push(page);
Ok(true)
@@ -74,8 +79,11 @@ impl Site {
pub fn process_unrocessed_page(&mut self) -> Result<bool, SiteError> {
if let Some(page) = self.unprocessed_pages.pop() {
- trace!("processing unprocessed page {}", page.meta().name());
- let page = page.process();
+ trace!(
+ "processing unprocessed page {}",
+ page.meta().path().display()
+ );
+ let page = page.process(self);
self.markdown_pages.push(page);
Ok(true)
} else {
@@ -104,7 +112,6 @@ impl Site {
for e in WalkDir::new(root) {
let e = e.map_err(|err| SiteError::WalkDir(root.to_path_buf(), err))?;
let path = e.path();
- debug!("found file: {}", path.display());
files.push(path.to_path_buf());
}
Ok(files)
@@ -117,4 +124,118 @@ impl Site {
false
}
}
+
+ pub fn resolve<P: AsRef<Path>>(&self, page: P, target: P) -> PathBuf {
+ let page = page.as_ref();
+ let target = target.as_ref();
+ let resolved = self.resolve_helper(page, target);
+ trace!(
+ "resolve: page={}, target={} -> {}",
+ page.display(),
+ target.display(),
+ resolved.display()
+ );
+ resolved
+ }
+
+ fn resolve_helper(&self, page: &Path, target: &Path) -> PathBuf {
+ // Is target absolute?
+ if target.starts_with("/") {
+ return target.to_path_buf();
+ }
+
+ // Does a sub-page exist?
+ let path = page.join(target);
+ if self.have_page(path.as_path()) {
+ return path;
+ }
+
+ // Does a sibling exist?
+ if let Some(parent) = page.parent() {
+ let path = parent.join(target);
+ if self.have_page(path.as_path()) {
+ return path;
+ }
+ }
+
+ // Nothing else worked, so make the target absolute.
+ PathBuf::from("/").join(target)
+ }
+
+ fn have_page(&self, path: &Path) -> bool {
+ self.pages.contains(path)
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::{Site, WikitextPage};
+ use crate::page::MetaBuilder;
+ use std::path::{Path, PathBuf};
+
+ fn page(path: &str) -> WikitextPage {
+ let meta = MetaBuilder::default().path(PathBuf::from(path)).build();
+ WikitextPage::new(meta, "".into())
+ }
+
+ #[test]
+ fn has_no_pages_initially() {
+ let site = Site::new(".", ".");
+ assert_eq!(site.markdown_pages().to_vec(), vec![]);
+ }
+
+ #[test]
+ fn absolute_link_resolves_to_absolute() {
+ let site = Site::new(".", ".");
+ assert_eq!(site.resolve("/foo/bar", "/yo/yoyo"), Path::new("/yo/yoyo"));
+ }
+
+ #[test]
+ fn link_to_missing_resolves_to_absolute() {
+ let site = Site::new(".", ".");
+ assert_eq!(site.resolve("/foo/bar", "yo"), Path::new("/yo"));
+ }
+
+ #[test]
+ fn link_to_sibling_resolves_to_it() {
+ let mut site = Site::new(".", ".");
+ site.add_wikitextpage(page("/foo/yo"));
+ site.process().unwrap();
+ assert_eq!(site.resolve("/foo/bar", "yo"), Path::new("/foo/yo"));
+ }
+
+ #[test]
+ fn link_to_sublpage_resolves_to_it() {
+ let mut site = Site::new(".", ".");
+ site.add_wikitextpage(page("/foo/bar/yo"));
+ site.process().unwrap();
+ assert_eq!(site.resolve("/foo/bar", "yo"), Path::new("/foo/bar/yo"));
+ }
+
+ #[test]
+ fn link_to_sublpage_resolves_to_it_and_not_sibling() {
+ let mut site = Site::new(".", ".");
+ site.add_wikitextpage(page("/foo/bar/yo"));
+ site.add_wikitextpage(page("/foo/yo"));
+ site.process().unwrap();
+ assert_eq!(site.resolve("/foo/bar", "yo"), Path::new("/foo/bar/yo"));
+ }
+
+ #[test]
+ fn link_to_unrelated_subpage_resolves_absolute() {
+ let mut site = Site::new(".", ".");
+ site.process().unwrap();
+ assert_eq!(site.resolve("/foo/bar", "yo/yoyo"), Path::new("/yo/yoyo"));
+ }
+
+ #[test]
+ fn link_to_subsubpage_resolves_to_it() {
+ let mut site = Site::new(".", ".");
+ site.add_wikitextpage(page("/foo/bar/yo/yoyo"));
+ site.process().unwrap();
+ assert_eq!(
+ site.resolve("/foo/bar", "yo/yoyo"),
+ Path::new("/foo/bar/yo/yoyo")
+ );
+ }
}
diff --git a/src/wikitext.rs b/src/wikitext.rs
index c7136cc..b7e0b72 100644
--- a/src/wikitext.rs
+++ b/src/wikitext.rs
@@ -1,7 +1,10 @@
use crate::directive::{self, Directive};
use crate::page::PageMeta;
+use crate::site::Site;
+use log::trace;
use regex::Regex;
use std::collections::HashMap;
+use std::path::Path;
#[derive(Debug)]
pub struct WikitextParser {
@@ -48,6 +51,7 @@ impl WikitextParser {
if m.start() > 0 {
continue;
}
+ trace!("WikitextParser: m={:?}", m);
let t = m.as_str();
let c = pat.captures(t).unwrap();
let token = if pat.as_str() == self.directive_args.as_str() {
@@ -78,6 +82,7 @@ impl WikitextParser {
unreachable!("need to handle pattern: {}", pat.as_str());
};
let rest = text.get(m.end()..).unwrap();
+ trace!("WikitextParser: token={:?}", token);
return (token, rest);
}
}
@@ -146,10 +151,15 @@ pub enum Snippet {
}
impl Snippet {
- pub fn process(&self, meta: &mut PageMeta) -> String {
+ pub fn process(&self, site: &Site, meta: &mut PageMeta) -> String {
+ trace!("Snippet::process: self={:?}", self);
match self {
Snippet::Markdown(text) => text.into(),
- Snippet::WikiLink(w) => format!("[{}]({})", w.link_text(), w.target()),
+ Snippet::WikiLink(w) => {
+ let resolved = site.resolve(meta.path(), Path::new(w.target()));
+ trace!("resolved {} to {}", w.target(), resolved.display());
+ format!("[{}]({})", w.link_text(), resolved.display())
+ }
Snippet::Directive(d) => {
match d.name() {
"meta" => {