summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLars Wirzenius <liw@liw.fi>2022-07-31 08:24:24 +0000
committerLars Wirzenius <liw@liw.fi>2022-07-31 08:24:24 +0000
commit65c453b9b8499eb4a273bfd6f013608b9af23844 (patch)
treeca7932482c933447f96004673e284c94ee86f07d
parentbc9493725346d909c305fcaacd06c448e303cd05 (diff)
parentec41f376c51ad990c67015829d24daba65cd2538 (diff)
downloadriki-65c453b9b8499eb4a273bfd6f013608b9af23844.tar.gz
Merge branch 'definition-lists' into 'main'
detect definition lists, fail if found Closes #5 See merge request larswirzenius/riki!29
-rw-r--r--build.rs3
-rw-r--r--riki.md46
-rw-r--r--src/bin/riki.rs6
-rw-r--r--src/directive/img.rs4
-rw-r--r--src/directive/tag.rs4
-rw-r--r--src/error.rs3
-rw-r--r--src/html.rs19
-rw-r--r--src/lib.rs12
-rw-r--r--src/page.rs25
-rw-r--r--src/parser.rs14
-rw-r--r--src/site.rs3
-rw-r--r--src/token.rs27
-rw-r--r--src/util.rs10
-rw-r--r--src/wikitext.rs8
14 files changed, 122 insertions, 62 deletions
diff --git a/build.rs b/build.rs
index da983ec..cc39391 100644
--- a/build.rs
+++ b/build.rs
@@ -1,4 +1,3 @@
fn main() {
- subplot_build::codegen("riki.md")
- .expect("failed to generate code with Subplot");
+ subplot_build::codegen("riki.md").expect("failed to generate code with Subplot");
}
diff --git a/riki.md b/riki.md
index 6fc0d53..e208baa 100644
--- a/riki.md
+++ b/riki.md
@@ -334,6 +334,52 @@ then AST of site/page.mdwn matches that of output/page.html
* [x] done
~~~
+## Definition list
+
+_Requirement: Markup indicating use of a definition list should be
+flagged as an error._
+
+Justification: Neither the CommonMark specification, nor GitHub
+Flavored Markdown, supports definition lists, even though some
+Markdown variants do. The Markdown parser Riki uses doesn't support
+it.
+
+~~~scenario
+given an installed riki
+
+given file site/page.mdwn from dl-1
+when I try to run riki build --plain-body site output
+then command fails
+then stderr contains "definition list"
+
+given file site/page.mdwn from dl-2
+when I try to run riki build --plain-body site output
+then command fails
+then stderr contains "definition list"
+
+given file site/page.mdwn from dl-3
+when I run riki build --plain-body site output
+then file output/page.html contains ": bar"
+~~~
+
+~~~{#dl-1 .file}
+foo
+: bar
+~~~
+
+~~~{#dl-2 .file}
+foo
+
+: bar
+~~~
+
+~~~{#dl-3 .file}
+foo
+
+<!-- no colon at beginning of line here -->: bar
+~~~
+
+
## Input files other than Markdown
_Requirement: Input files that aren't Markdown files must be copied
diff --git a/src/bin/riki.rs b/src/bin/riki.rs
index 1455b45..afd43ef 100644
--- a/src/bin/riki.rs
+++ b/src/bin/riki.rs
@@ -48,7 +48,7 @@ struct Args {
#[derive(Parser)]
enum Command {
Build(Build),
- List(List)
+ List(List),
}
#[derive(Parser)]
@@ -76,9 +76,9 @@ impl Build {
for page in site.markdown_pages() {
let htmlpage = if self.plain_body {
- page.body_to_html()
+ page.body_to_html()?
} else {
- page.to_html()
+ page.to_html()?
};
let output = page.meta().destination_filename(&destdir);
debug!("writing: {}", output.display());
diff --git a/src/directive/img.rs b/src/directive/img.rs
index f81f8a9..e10851e 100644
--- a/src/directive/img.rs
+++ b/src/directive/img.rs
@@ -16,9 +16,7 @@ impl Img {
pub const ALLOW_ANY_UNNAMED: bool = true;
pub fn new(src: String) -> Self {
- Self {
- src,
- }
+ Self { src }
}
pub fn process(&self, site: &Site, meta: &mut PageMeta) -> Result<String, SiteError> {
diff --git a/src/directive/tag.rs b/src/directive/tag.rs
index 9486a7b..baf4561 100644
--- a/src/directive/tag.rs
+++ b/src/directive/tag.rs
@@ -14,9 +14,7 @@ impl Tag {
pub const ALLOW_ANY_UNNAMED: bool = true;
pub fn new(tags: Vec<String>) -> Self {
- Self {
- tags,
- }
+ Self { tags }
}
pub fn process(&self, _site: &Site, _meta: &mut PageMeta) -> Result<String, SiteError> {
diff --git a/src/error.rs b/src/error.rs
index a5e65e1..756d7f0 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -49,4 +49,7 @@ pub enum SiteError {
#[error("link to missing page {1} on {0}")]
PageMissing(PathBuf, PathBuf),
+
+ #[error("attempt to use definition lists in Markdown: {0:?}")]
+ DefinitionList(String),
}
diff --git a/src/html.rs b/src/html.rs
index b4e4c45..9d048a5 100644
--- a/src/html.rs
+++ b/src/html.rs
@@ -60,7 +60,7 @@ impl HtmlPage {
}
}
-pub fn parse(markdown: &str) -> Element {
+pub fn parse(markdown: &str) -> Result<Element, SiteError> {
let mut options = Options::empty();
options.insert(Options::ENABLE_HEADING_ATTRIBUTES);
options.insert(Options::ENABLE_STRIKETHROUGH);
@@ -134,8 +134,17 @@ pub fn parse(markdown: &str) -> Element {
}
},
Event::End(tag) => match &tag {
- Tag::Paragraph
- | Tag::Heading(_, _, _)
+ Tag::Paragraph => {
+ trace!("at end of paragraph, looking for definition list use");
+ let e = stack.pop();
+ let s = as_plain_text(e.children());
+ trace!("paragraph text: {:?}", s);
+ if s.starts_with(": ") || s.contains("\n: ") {
+ return Err(SiteError::DefinitionList(s));
+ }
+ stack.append_child(Content::Elt(e));
+ }
+ Tag::Heading(_, _, _)
| Tag::List(_)
| Tag::Item
| Tag::Link(_, _, _)
@@ -162,7 +171,7 @@ pub fn parse(markdown: &str) -> Element {
}
Event::Html(s) => stack.append_child(Content::Html(s.to_string())),
Event::FootnoteReference(s) => trace!("footnote ref {:?}", s),
- Event::SoftBreak => stack.append_str(" "),
+ Event::SoftBreak => stack.append_str("\n"),
Event::HardBreak => stack.append_element(Element::new(ElementTag::Br)),
Event::Rule => stack.append_element(Element::new(ElementTag::Hr)),
Event::TaskListMarker(done) => {
@@ -179,7 +188,7 @@ pub fn parse(markdown: &str) -> Element {
let mut body = stack.pop();
assert!(stack.is_empty());
body.fix_up_img_alt();
- body
+ Ok(body)
}
fn as_plain_text(content: &[Content]) -> String {
diff --git a/src/lib.rs b/src/lib.rs
index 781b999..2caefeb 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -7,12 +7,12 @@
//! little slow. This care implements a subset of the functionality of
//! ikiwiki in Rust, for speed.
-pub mod site;
-pub mod page;
-pub mod error;
-pub mod token;
-pub mod parser;
-pub mod wikitext;
pub mod directive;
+pub mod error;
pub mod html;
+pub mod page;
+pub mod parser;
+pub mod site;
+pub mod token;
pub mod util;
+pub mod wikitext;
diff --git a/src/page.rs b/src/page.rs
index d0ba861..d2f144a 100644
--- a/src/page.rs
+++ b/src/page.rs
@@ -97,23 +97,23 @@ impl MarkdownPage {
&self.meta
}
- pub fn body_to_html(&self) -> HtmlPage {
+ pub fn body_to_html(&self) -> Result<HtmlPage, SiteError> {
let head = Element::new(ElementTag::Head);
- let body = parse(self.markdown());
- HtmlPage::new(head, body)
+ let body = parse(self.markdown())?;
+ Ok(HtmlPage::new(head, body))
}
- pub fn to_html(&self) -> HtmlPage {
+ pub fn to_html(&self) -> Result<HtmlPage, SiteError> {
let mut title = Element::new(ElementTag::Title);
title.push_child(Content::Text(self.meta.title().into()));
let mut head = Element::new(ElementTag::Head);
head.push_child(Content::Elt(title));
- let body = parse(self.markdown());
+ let body = parse(self.markdown())?;
trace!("MarkdownPage::to_html: head={:?}", head);
- HtmlPage::new(head, body)
+ Ok(HtmlPage::new(head, body))
}
}
@@ -126,7 +126,12 @@ pub struct PageMeta {
impl PageMeta {
fn new(name: String, title: Option<String>, path: PathBuf) -> Self {
- trace!("PageMeta: name={:?} title={:?} path={:?}", name, title, path);
+ trace!(
+ "PageMeta: name={:?} title={:?} path={:?}",
+ name,
+ title,
+ path
+ );
Self { name, title, path }
}
@@ -165,7 +170,11 @@ pub struct MetaBuilder {
impl MetaBuilder {
pub fn build(self) -> PageMeta {
- PageMeta::new(self.name, self.title, self.path.expect("path set on MetaBuilder"))
+ PageMeta::new(
+ self.name,
+ self.title,
+ self.path.expect("path set on MetaBuilder"),
+ )
}
pub fn name(mut self, name: String) -> Self {
diff --git a/src/parser.rs b/src/parser.rs
index 14f46c4..ed205e4 100644
--- a/src/parser.rs
+++ b/src/parser.rs
@@ -120,7 +120,8 @@ impl WikitextParser {
link_text.push(' ');
self.tokens.drain(..1);
}
- [Token::ClosedBracket, Token::OpenParens, Token::Word(word), Token::ClosedParens, ..] => {
+ [Token::ClosedBracket, Token::OpenParens, Token::Word(word), Token::ClosedParens, ..] =>
+ {
target = Some(word.to_string());
self.tokens.drain(..4);
break;
@@ -314,7 +315,7 @@ mod test {
let mut p = WikitextParser::new(
r#"[[!multilinearg yo="""foo
bar"""]]"#,
- &patterns
+ &patterns,
);
assert_eq!(
p.parse().unwrap(),
@@ -326,13 +327,12 @@ mod test {
#[test]
fn directive_multiple_args() {
let patterns = TokenPatterns::default();
- let mut p = WikitextParser::new(
- r#"[[!img foo.jpg class=image]]"#,
- &patterns
- );
+ let mut p = WikitextParser::new(r#"[[!img foo.jpg class=image]]"#, &patterns);
assert_eq!(
p.parse().unwrap(),
- Some(Snippet::Directive(Directive::Img(Img::new("foo.jpg".into()))))
+ Some(Snippet::Directive(Directive::Img(Img::new(
+ "foo.jpg".into()
+ ))))
);
assert_eq!(p.parse().unwrap(), None);
}
diff --git a/src/site.rs b/src/site.rs
index 48de58d..d48d94c 100644
--- a/src/site.rs
+++ b/src/site.rs
@@ -125,8 +125,7 @@ impl Site {
fn all_files(root: &Path) -> Result<Vec<PathBuf>, SiteError> {
let mut files = vec![];
- for e in WalkDir::new(root)
- {
+ for e in WalkDir::new(root) {
let e = e.map_err(|err| SiteError::WalkDir(root.to_path_buf(), err))?;
let path = e.path();
if Self::is_excluded(path) {
diff --git a/src/token.rs b/src/token.rs
index f5696e7..76cd7b4 100644
--- a/src/token.rs
+++ b/src/token.rs
@@ -49,10 +49,7 @@ pub struct TokenParser<'a> {
impl<'a> TokenParser<'a> {
pub fn new(input: &'a str, patterns: &'a TokenPatterns) -> Self {
- Self {
- input,
- patterns,
- }
+ Self { input, patterns }
}
pub fn parse(&mut self) -> Token {
@@ -76,7 +73,7 @@ impl<'a> TokenParser<'a> {
Token::Pipe
} else if self.literal("=") {
Token::Equals
- } else if let Some(m) = self.regex(&self.patterns.spaces.clone()) {
+ } else if let Some(m) = self.regex(&self.patterns.spaces.clone()) {
Token::Spaces(m.as_str().into())
} else if let Some(m) = self.regex(&self.patterns.triple_quoted.clone()) {
Token::QuotedValue(m.as_str().into())
@@ -160,7 +157,6 @@ mod test {
assert_eq!(p.parse(), Token::End);
}
-
#[test]
fn single_open_bracket() {
let patterns = TokenPatterns::default();
@@ -265,10 +261,7 @@ mod test {
#[test]
fn simple_directive() {
let patterns = TokenPatterns::default();
- let mut p = parser(
- r#"[[!if test="enabled(sidebar)"]]"#,
- &patterns
- );
+ let mut p = parser(r#"[[!if test="enabled(sidebar)"]]"#, &patterns);
assert_eq!(p.parse(), Token::OpenBrackets);
assert_eq!(p.parse(), Token::Bang);
assert_eq!(p.parse(), Token::Word("if".into()));
@@ -291,7 +284,7 @@ mod test {
""" else="""
[[!inline pages=sidebar raw=yes]]
"""]]"#,
- &patterns
+ &patterns,
);
assert_eq!(p.parse(), Token::OpenBrackets);
assert_eq!(p.parse(), Token::Bang);
@@ -310,7 +303,10 @@ mod test {
assert_eq!(p.parse(), Token::Spaces(" ".into()));
assert_eq!(p.parse(), Token::Word("else".into()));
assert_eq!(p.parse(), Token::Equals);
- assert_eq!(p.parse(), Token::QuotedValue("\n[[!inline pages=sidebar raw=yes]]\n".into()));
+ assert_eq!(
+ p.parse(),
+ Token::QuotedValue("\n[[!inline pages=sidebar raw=yes]]\n".into())
+ );
assert_eq!(p.parse(), Token::CloseBrackets);
assert_eq!(p.parse(), Token::End);
@@ -325,7 +321,7 @@ mod test {
""" else="""
[[!inline pages=sidebar raw=yes]]
"""]]"#,
- &patterns
+ &patterns,
);
let mut p = orig.clone();
assert_eq!(p.parse(), Token::OpenBrackets);
@@ -345,7 +341,10 @@ mod test {
assert_eq!(p.parse(), Token::Spaces(" ".into()));
assert_eq!(p.parse(), Token::Word("else".into()));
assert_eq!(p.parse(), Token::Equals);
- assert_eq!(p.parse(), Token::QuotedValue("\n[[!inline pages=sidebar raw=yes]]\n".into()));
+ assert_eq!(
+ p.parse(),
+ Token::QuotedValue("\n[[!inline pages=sidebar raw=yes]]\n".into())
+ );
assert_eq!(p.parse(), Token::CloseBrackets);
assert_eq!(p.parse(), Token::End);
diff --git a/src/util.rs b/src/util.rs
index fbf81f6..e1f82b3 100644
--- a/src/util.rs
+++ b/src/util.rs
@@ -84,12 +84,18 @@ mod test {
#[test]
fn joins_relative() {
- assert_eq!(join_subpath(Path::new("foo"), Path::new("bar")), PathBuf::from("foo/bar"));
+ assert_eq!(
+ join_subpath(Path::new("foo"), Path::new("bar")),
+ PathBuf::from("foo/bar")
+ );
}
#[test]
fn joins_absolute() {
- assert_eq!(join_subpath(Path::new("foo"), Path::new("/bar")), PathBuf::from("foo/bar"));
+ assert_eq!(
+ join_subpath(Path::new("foo"), Path::new("/bar")),
+ PathBuf::from("foo/bar")
+ );
}
#[test]
diff --git a/src/wikitext.rs b/src/wikitext.rs
index 54570fa..b425021 100644
--- a/src/wikitext.rs
+++ b/src/wikitext.rs
@@ -80,13 +80,7 @@ impl ParsedDirective {
pub fn unnamed_args(&self) -> Vec<&str> {
self.args
.iter()
- .filter_map(|(k, v)| {
- if v.is_empty() {
- Some(k.as_str())
- } else {
- None
- }
- })
+ .filter_map(|(k, v)| if v.is_empty() { Some(k.as_str()) } else { None })
.collect()
}
}