summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/bin/riki.rs2
-rw-r--r--src/directive/inline.rs1
-rw-r--r--src/directive/map.rs1
-rw-r--r--src/directive/pagestats.rs1
-rw-r--r--src/lib.rs2
-rw-r--r--src/page.rs4
-rw-r--r--src/site.rs104
-rw-r--r--src/wikitext.rs6
8 files changed, 74 insertions, 47 deletions
diff --git a/src/bin/riki.rs b/src/bin/riki.rs
index d219803..946051f 100644
--- a/src/bin/riki.rs
+++ b/src/bin/riki.rs
@@ -109,7 +109,7 @@ impl Build {
let mut site = Site::new(&srcdir, &destdir);
site.scan()?;
site.process()?;
- debug!("markdown file count: {}", site.markdown_pages().len());
+ debug!("markdown file count: {}", site.markdown_pages().count());
for page in site.markdown_pages() {
let htmlpage = if self.plain_body {
diff --git a/src/directive/inline.rs b/src/directive/inline.rs
index 973890b..7c0114b 100644
--- a/src/directive/inline.rs
+++ b/src/directive/inline.rs
@@ -41,7 +41,6 @@ impl DirectiveImplementation for Inline {
let pagespec = PageSpec::new(meta.path(), &self.pages).map_err(DirectiveError::PageSpec)?;
let matches: Vec<String> = site
.markdown_pages()
- .iter()
.filter(|page| pagespec.matches(site, page.meta().path()))
.map(|page| format!("* {}\n", Self::link(meta.path(), page.meta())))
.collect();
diff --git a/src/directive/map.rs b/src/directive/map.rs
index a2f43a6..a7ee3b2 100644
--- a/src/directive/map.rs
+++ b/src/directive/map.rs
@@ -26,7 +26,6 @@ impl DirectiveImplementation for Map {
let pagespec = PageSpec::new(meta.path(), &self.pages).map_err(DirectiveError::PageSpec)?;
let matches: Vec<String> = site
.markdown_pages()
- .iter()
.filter(|page| pagespec.matches(site, page.meta().path()))
.map(|page| format!("* {}\n", Self::link(meta.path(), page.meta())))
.collect();
diff --git a/src/directive/pagestats.rs b/src/directive/pagestats.rs
index 8c7feec..0d34a77 100644
--- a/src/directive/pagestats.rs
+++ b/src/directive/pagestats.rs
@@ -26,7 +26,6 @@ impl DirectiveImplementation for PageStats {
let pagespec = PageSpec::new(meta.path(), &self.pages).map_err(DirectiveError::PageSpec)?;
let matches: Vec<String> = site
.markdown_pages()
- .iter()
.filter(|page| pagespec.matches(site, page.meta().path()))
.map(|page| format!("* {}\n", Self::link(meta.path(), page.meta())))
.collect();
diff --git a/src/lib.rs b/src/lib.rs
index be6e4a9..bca0992 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -20,5 +20,5 @@ pub mod srcdir;
pub mod time;
pub mod token;
pub mod util;
-pub mod wikitext;
pub mod version;
+pub mod wikitext;
diff --git a/src/page.rs b/src/page.rs
index 485632d..77a06e6 100644
--- a/src/page.rs
+++ b/src/page.rs
@@ -31,7 +31,7 @@ pub enum PageError {
Parser(#[from] crate::parser::ParserError),
}
-#[derive(Debug)]
+#[derive(Debug, Clone)]
pub struct Page {
meta: PageMeta,
unprocessed: UnprocessedPage,
@@ -90,7 +90,7 @@ impl WikitextPage {
}
}
-#[derive(Debug)]
+#[derive(Debug, Clone)]
pub struct UnprocessedPage {
meta: PageMeta,
snippets: Vec<Snippet>,
diff --git a/src/site.rs b/src/site.rs
index cfd5017..5196b5c 100644
--- a/src/site.rs
+++ b/src/site.rs
@@ -44,9 +44,8 @@ pub struct Site {
patterns: TokenPatterns,
shortcuts: HashMap<String, Shortcut>,
name_builder: NameBuilder,
- pages: Vec<Page>,
- markdown_pages: Vec<MarkdownPage>,
- pages_that_will_exist: PageSet,
+ unprocessed_pages: PageSet<Page>,
+ markdown_pages: PageSet<MarkdownPage>,
files: Names,
}
@@ -60,11 +59,10 @@ impl Site {
{
Self {
name_builder: NameBuilder::new(srcdir.as_ref(), destdir.as_ref()),
- pages: vec![],
- markdown_pages: vec![],
+ unprocessed_pages: PageSet::default(),
+ markdown_pages: PageSet::default(),
files: Names::default(),
patterns: TokenPatterns::default(),
- pages_that_will_exist: PageSet::default(),
shortcuts: HashMap::new(),
}
}
@@ -91,7 +89,6 @@ impl Site {
fn add_wikitextpage(&mut self, page: WikitextPage) -> Result<(), SiteError> {
info!("add wikitext page {}", page.meta().path().display());
- self.pages_that_will_exist.insert(&page);
trace!("parsing wikitext page {}", page.meta().path().display());
let mut parser = WikitextParser::new(page.wikitext(), &self.patterns);
@@ -99,7 +96,8 @@ impl Site {
page.prepare(self)?;
let page = Page::new(page.meta().clone(), page);
- self.pages.push(page);
+ self.unprocessed_pages
+ .insert(page.meta().path(), page.clone());
Ok(())
}
@@ -121,13 +119,13 @@ impl Site {
}
fn process_page(&mut self) -> Result<bool, SiteError> {
- if let Some(page) = self.pages.pop() {
+ if let Some(page) = self.unprocessed_pages.remove_random_page() {
debug!(
"processing unprocessed page {}",
page.meta().path().display()
);
let page = page.markdown(self)?;
- self.markdown_pages.push(page);
+ self.markdown_pages.insert(page.meta().path(), page.clone());
Ok(true)
} else {
trace!("no pages to process");
@@ -135,8 +133,8 @@ impl Site {
}
}
- pub fn markdown_pages(&self) -> &[MarkdownPage] {
- &self.markdown_pages
+ pub fn markdown_pages(&self) -> impl Iterator<Item = &MarkdownPage> {
+ self.markdown_pages.pages()
}
pub fn files_only(&self) -> impl Iterator<Item = &Name> {
@@ -148,13 +146,11 @@ impl Site {
}
pub fn is_page(&self, path: &Path) -> bool {
- self.pages_that_will_exist.get_path(path).is_some()
+ self.unprocessed_pages.contains(path) || self.markdown_pages.contains(path)
}
pub fn page(&self, path: &Path) -> Option<&MarkdownPage> {
- self.markdown_pages
- .iter()
- .find(|&page| page.meta().path() == path)
+ self.markdown_pages.get_page(path)
}
fn all_files(&self) -> Result<Vec<Name>, SiteError> {
@@ -213,9 +209,9 @@ impl Site {
// Is target absolute?
if target.starts_with("/") {
- if let Some(path) = self.pages_that_will_exist.get_path(target) {
+ if self.is_page(target) {
trace!("absolute target exists");
- return Ok(path.into());
+ return Ok(target.into());
} else {
trace!("absolute target does not exist");
return Err(SiteError::PageMissing(page.into(), target.into()));
@@ -225,9 +221,9 @@ impl Site {
// Does a sub-page or file exist?
let wanted = page.join(target);
trace!("checking for subpage or file {}", wanted.display());
- if let Some(path) = self.pages_that_will_exist.get_path(&wanted) {
- trace!("subpage exists: {}", path.display());
- return Ok(path.into());
+ if self.is_page(&wanted) {
+ trace!("subpage exists: {}", wanted.display());
+ return Ok(wanted);
} else if self.file_exists(&wanted) {
trace!("subpage file exists: {}", wanted.display());
return Ok(wanted);
@@ -241,9 +237,13 @@ impl Site {
parent.display(),
path.display()
);
- if let Some(path) = self.pages_that_will_exist.get_path(path.as_path()) {
- trace!("sibling page exists: {}", path.display());
- return Ok(path.into());
+ if let Some(actual) = self.unprocessed_pages.get_path(&path) {
+ trace!("sibling page exists: {}", actual.display());
+ return Ok(actual.into());
+ }
+ if let Some(actual) = self.markdown_pages.get_path(&path) {
+ trace!("sibling page exists: {}", actual.display());
+ return Ok(actual.into());
}
// trace!("consider files: {:?}", self.files);
if self.file_exists(&path) {
@@ -255,9 +255,9 @@ impl Site {
// Does target exist relative to root?
let wanted = Path::new("/").join(target);
trace!("checking for absolute path {}", wanted.display());
- if let Some(path) = self.pages_that_will_exist.get_path(&wanted) {
- trace!("page at absolute path exists: {}", path.display());
- return Ok(path.into());
+ if self.is_page(&wanted) {
+ trace!("page at absolute path exists: {}", wanted.display());
+ return Ok(wanted);
} else if self.file_exists(&wanted) {
trace!("file at absolute path exists: {}", wanted.display());
return Ok(wanted);
@@ -290,20 +290,42 @@ impl Site {
}
}
-#[derive(Default, Debug)]
-struct PageSet {
- map: HashMap<String, PathBuf>,
+#[derive(Debug)]
+struct PageSet<T> {
+ map: HashMap<String, (PathBuf, T)>,
}
-impl PageSet {
- fn insert(&mut self, page: &WikitextPage) {
- let path = page.meta().path();
+impl<T> PageSet<T> {
+ fn insert(&mut self, path: &Path, page: T) {
let key = Self::normalize(path);
- self.map.insert(key, path.into());
+ self.map.insert(key, (path.into(), page));
+ }
+
+ fn contains(&self, path: &Path) -> bool {
+ self.map.contains_key(&Self::normalize(path))
+ }
+
+ fn get_page(&self, path: &Path) -> Option<&T> {
+ self.map.get(&Self::normalize(path)).map(|(_, page)| page)
+ }
+
+ fn get_path(&self, wanted: &Path) -> Option<&Path> {
+ self.map
+ .get(&Self::normalize(wanted))
+ .map(|(path, _)| path.as_path())
}
- fn get_path(&self, path: &Path) -> Option<&Path> {
- self.map.get(&Self::normalize(path)).map(|x| x.as_ref())
+ fn pages(&self) -> impl Iterator<Item = &T> {
+ self.map.values().map(|(_, page)| page)
+ }
+
+ fn remove_random_page(&mut self) -> Option<T> {
+ let mut keys: Vec<String> = self.map.keys().take(1).map(|k| k.into()).collect();
+ if let Some(key) = keys.pop() {
+ self.map.remove(&key).map(|(_, page)| page)
+ } else {
+ None
+ }
}
fn normalize(path: &Path) -> String {
@@ -311,6 +333,14 @@ impl PageSet {
}
}
+impl<T> Default for PageSet<T> {
+ fn default() -> Self {
+ Self {
+ map: HashMap::default(),
+ }
+ }
+}
+
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Shortcut {
name: String,
@@ -371,7 +401,7 @@ mod test {
#[test]
fn has_no_pages_initially() {
- assert_eq!(site().markdown_pages().to_vec(), vec![]);
+ assert_eq!(site().markdown_pages().count(), 0);
}
#[test]
diff --git a/src/wikitext.rs b/src/wikitext.rs
index 57dff88..4192c85 100644
--- a/src/wikitext.rs
+++ b/src/wikitext.rs
@@ -14,7 +14,7 @@ pub enum WikitextError {
Site(#[from] crate::site::SiteError),
}
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Snippet {
Markdown(String),
WikiLink(WikiLink),
@@ -69,7 +69,7 @@ impl Snippet {
}
}
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Debug, Clone, Eq, PartialEq)]
pub struct WikiLink {
link_text: String,
target: String,
@@ -92,7 +92,7 @@ impl WikiLink {
}
}
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Debug, Clone, Eq, PartialEq)]
pub struct ParsedDirective {
name: String,
args: HashMap<String, String>,