diff options
author | Lars Wirzenius <liw@liw.fi> | 2022-07-17 19:38:30 +0300 |
---|---|---|
committer | Lars Wirzenius <liw@liw.fi> | 2022-07-17 20:12:31 +0300 |
commit | 16bde7a48eca66114f9adaa0f87e7fa43ff4211e (patch) | |
tree | 1c39b14a843f2532787e006b85ab8450958be7ac | |
parent | 7c02d67dcd4d23ab760c070b23472b621a149077 (diff) | |
download | riki-16bde7a48eca66114f9adaa0f87e7fa43ff4211e.tar.gz |
feat: "meta title" directive
Add some infrastructure for directives, and implement the meta title
directive.
Sponsored-by: author
-rw-r--r-- | riki.md | 55 | ||||
-rw-r--r-- | src/bin/riki.rs | 9 | ||||
-rw-r--r-- | src/directive.rs | 27 | ||||
-rw-r--r-- | src/directive/meta.rs | 36 | ||||
-rw-r--r-- | src/directive/mod.rs | 88 | ||||
-rw-r--r-- | src/error.rs | 9 | ||||
-rw-r--r-- | src/html.rs | 4 | ||||
-rw-r--r-- | src/page.rs | 38 | ||||
-rw-r--r-- | src/site.rs | 4 | ||||
-rw-r--r-- | src/wikitext.rs | 111 |
10 files changed, 260 insertions, 121 deletions
@@ -38,7 +38,7 @@ be an empty HTML file._ ~~~scenario given an installed riki given file site/empty.mdwn from empty -when I run riki build site output +when I run riki build --plain-body site output then AST of site/empty.mdwn matches that of output/empty.html ~~~ @@ -54,7 +54,7 @@ be an HTML file with the same text, without extra elements._ ~~~scenario given an installed riki given file site/page.mdwn from para -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -74,7 +74,7 @@ output must have a blockquote element. ~~~scenario given an installed riki given file site/page.mdwn from blockquote -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -92,7 +92,7 @@ output must have a pre element. ~~~scenario given an installed riki given file site/page.mdwn from indented-code -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -110,7 +110,7 @@ output must have a pre element. ~~~scenario given an installed riki given file site/page.mdwn from fenced-code -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -131,7 +131,7 @@ must have an img element. ~~~scenario given an installed riki given file site/page.mdwn from image-link -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -148,7 +148,7 @@ in HTML output._ ~~~scenario given an installed riki given file site/page.mdwn from emph -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -165,7 +165,7 @@ strong element in HTML output._ ~~~scenario given an installed riki given file site/page.mdwn from strong -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -182,7 +182,7 @@ element in HTML output._ ~~~scenario given an installed riki given file site/page.mdwn from strike -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -201,7 +201,7 @@ supported._ ~~~scenario given an installed riki given file site/page.mdwn from headings -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -223,7 +223,7 @@ element in HTML output._ ~~~scenario given an installed riki given file site/page.mdwn from backticks -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -242,7 +242,7 @@ output._ ~~~ given an installed riki given file site/page.mdwn from table -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -261,7 +261,7 @@ HTML output._ ~~~scenario given an installed riki given file site/page.mdwn from rule -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -282,7 +282,7 @@ in HTML output._ ~~~scenario given an installed riki given file site/page.mdwn from ul -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -303,7 +303,7 @@ the same as the Markdown.*** ~~~ given an installed riki given file site/page.mdwn from ol -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -321,7 +321,7 @@ in HTML output._ ~~~scenario given an installed riki given file site/page.mdwn from tasklist -when I run riki build site output +when I run riki build --plain-body site output then AST of site/page.mdwn matches that of output/page.html ~~~ @@ -339,7 +339,7 @@ into the destination directory as-is._ ~~~scenario given an installed riki given file site/image.jpg from image -when I run riki build site output +when I run riki build --plain-body site output then files site/image.jpg and output/image.jpg match ~~~ @@ -359,7 +359,7 @@ directory._ given an installed riki given file site/foo/page.mdwn from image given file site/bar/image.jpg from para -when I run riki build site output +when I run riki build --plain-body site output then AST of site/foo/page.mdwn matches that of output/foo/page.html then files site/bar//image.jpg and output/bar/image.jpg match ~~~ @@ -376,7 +376,7 @@ given file site/absolute.mdwn from empty given file site/dir/sibling.mdwn from empty given file site/dir/foo/child.mdwn from empty given file site/dir/foo/child/grandchild.mdwn from empty -when I run riki build site output +when I run riki build --plain-body site output then file output/dir/foo.html contains "href="/absolute"" then file output/dir/foo.html contains "href="dir/sibling"" then file output/dir/foo.html contains "href="dir/foo/child"" @@ -391,3 +391,20 @@ then file output/dir/foo.html contains "href="/missing" [[child/grandchild]] [[missing]] ~~~ + +## Directives + +### `meta title` + +_Requirement: the `meta title` directive sets page title._ + +~~~scenario +given an installed riki +given file site/index.mdwn from meta +when I run riki build site output +then file output/index.html contains "<title>Yo</title>" +~~~ + +~~~{#meta .file .markdown} +[[!meta title=Yo]]] +~~~ diff --git a/src/bin/riki.rs b/src/bin/riki.rs index ea3b3dc..4ef30b0 100644 --- a/src/bin/riki.rs +++ b/src/bin/riki.rs @@ -51,6 +51,9 @@ enum Command { #[derive(Parser)] struct Build { + #[clap(long)] + plain_body: bool, + srcdir: PathBuf, destdir: PathBuf, } @@ -70,7 +73,11 @@ impl Build { debug!("markdown file count: {}", site.markdown_pages().len()); for page in site.markdown_pages() { - let htmlpage = page.to_html(); + let htmlpage = if self.plain_body { + page.body_to_html() + } else { + page.to_html() + }; let output = page.meta().destination_filename(&destdir); debug!("writing: {}", output.display()); htmlpage.write(&output)?; diff --git a/src/directive.rs b/src/directive.rs deleted file mode 100644 index fde0565..0000000 --- a/src/directive.rs +++ /dev/null @@ -1,27 +0,0 @@ -use crate::page::PageMeta; -use crate::wikitext::ParsedDirective; - -pub trait Directive { - fn process(&self, d: &ParsedDirective, meta: &mut PageMeta) -> String; -} - -#[derive(Default)] -pub struct Meta {} - -impl Directive for Meta { - fn process(&self, d: &ParsedDirective, meta: &mut PageMeta) -> String { - if let Some(title) = d.args().get("title") { - meta.set_title(title.into()); - } - "".into() - } -} - -#[derive(Default)] -pub struct Nop {} - -impl Directive for Nop { - fn process(&self, _d: &ParsedDirective, _meta: &mut PageMeta) -> String { - "".into() - } -} diff --git a/src/directive/meta.rs b/src/directive/meta.rs new file mode 100644 index 0000000..e8a4c02 --- /dev/null +++ b/src/directive/meta.rs @@ -0,0 +1,36 @@ +use crate::error::SiteError; +use crate::page::PageMeta; +use crate::site::Site; +use crate::wikitext::ParsedDirective; + +#[derive(Default, Debug, Eq, PartialEq)] +pub struct Meta { + title: Option<String>, +} + +impl Meta { + pub const REQUIRED: &'static [&'static str] = &[]; + pub const ALLOWED: &'static [&'static str] = &["date", "link", "title"]; + + fn set_title(&mut self, title: &str) { + self.title = Some(title.into()); + } + + pub fn process(&self, _site: &Site, meta: &mut PageMeta) -> Result<String, SiteError> { + if let Some(title) = &self.title { + meta.set_title(title.into()); + } + Ok("".into()) + } +} + +impl From<ParsedDirective> for Meta { + fn from(p: ParsedDirective) -> Self { + let mut meta = Meta::default(); + let args = p.args(); + if let Some(title) = args.get("title") { + meta.set_title(title); + } + meta + } +} diff --git a/src/directive/mod.rs b/src/directive/mod.rs new file mode 100644 index 0000000..5baa4ec --- /dev/null +++ b/src/directive/mod.rs @@ -0,0 +1,88 @@ +use crate::error::SiteError; +use crate::page::PageMeta; +use crate::site::Site; +use crate::wikitext::ParsedDirective; +use std::collections::HashSet; + +#[derive(Debug, Eq, PartialEq)] +pub enum Directive { + Simple, + SimpleArg, + QuotedArg, + MultilineArg, + + Meta(Meta), +} + +impl TryFrom<ParsedDirective> for Directive { + type Error = SiteError; + + fn try_from(p: ParsedDirective) -> Result<Self, Self::Error> { + let d = match p.name() { + "simple" => { + Self::check_args(&p, &[], &[])?; + Directive::Simple + } + "simplearg" => { + Self::check_args(&p, &["foo"], &[])?; + Directive::SimpleArg + } + "quotedarg" => { + Self::check_args(&p, &["bar"], &[])?; + Directive::QuotedArg + } + "multilinearg" => { + Self::check_args(&p, &["yo"], &[])?; + Directive::MultilineArg + } + "meta" => { + Self::check_args(&p, Meta::REQUIRED, Meta::ALLOWED)?; + Directive::Meta(Meta::from(p)) + } + _ => return Err(SiteError::UnknownDirective(p.name().into())), + }; + Ok(d) + } +} + +impl Directive { + fn check_args( + p: &ParsedDirective, + required: &[&str], + allowed: &[&str], + ) -> Result<(), SiteError> { + let args = p.args(); + for arg in required.iter() { + if !args.contains_key(arg) { + return Err(SiteError::DirectiveMissingArg( + p.name().into(), + arg.to_string(), + )); + } + } + let required: HashSet<String> = required.iter().map(|arg| arg.to_string()).collect(); + let allowed: HashSet<String> = allowed.iter().map(|arg| arg.to_string()).collect(); + let allowed: HashSet<String> = required.union(&allowed).cloned().collect(); + for arg in args.keys() { + if !allowed.contains(*arg) { + return Err(SiteError::DirectiveUnknownArg( + p.name().into(), + arg.to_string(), + )); + } + } + Ok(()) + } + + pub fn process(&self, site: &mut Site, meta: &mut PageMeta) -> Result<String, SiteError> { + match self { + Self::Simple | Self::SimpleArg | Self::QuotedArg | Self::MultilineArg => { + panic!("directive {:?} may only be used in parsing tests", self) + } + Self::Meta(x) => x.process(site, meta), + } + } +} + +mod meta; +use meta::Meta; diff --git a/src/error.rs b/src/error.rs index c47110a..faaaaa8 100644 --- a/src/error.rs +++ b/src/error.rs @@ -34,4 +34,13 @@ pub enum SiteError { #[error("failed to create directory {0}")] CreateDir(PathBuf, #[source] std::io::Error), + + #[error("unknown directive {0}")] + UnknownDirective(String), + + #[error("directive {0} is missing required argument {1}")] + DirectiveMissingArg(String, String), + + #[error("directive {0} is missing required argument {1}")] + DirectiveUnknownArg(String, String), } diff --git a/src/html.rs b/src/html.rs index 499c2de..b4e4c45 100644 --- a/src/html.rs +++ b/src/html.rs @@ -212,7 +212,7 @@ impl Element { self.attrs.push(attr); } - fn push_child(&mut self, child: Content) { + pub fn push_child(&mut self, child: Content) { self.children.push(child); } @@ -309,6 +309,7 @@ pub enum ElementTag { A, Img, Table, + Title, Th, Tr, Td, @@ -342,6 +343,7 @@ impl ElementTag { Self::Img => "img", Self::Table => "table", Self::Th => "th", + Self::Title => "title", Self::Tr => "tr", Self::Td => "td", Self::Br => "br", diff --git a/src/page.rs b/src/page.rs index 5c379ba..8f7c775 100644 --- a/src/page.rs +++ b/src/page.rs @@ -1,5 +1,5 @@ use crate::error::SiteError; -use crate::html::{parse, Element, ElementTag, HtmlPage}; +use crate::html::{parse, Content, Element, ElementTag, HtmlPage}; use crate::site::Site; use crate::wikitext::{Snippet, WikitextParser}; use log::{info, trace}; @@ -49,34 +49,34 @@ pub struct UnprocessedPage { } impl UnprocessedPage { - pub fn new(meta: PageMeta, data: &str, parser: &WikitextParser) -> Self { - Self { + pub fn new(meta: PageMeta, data: &str, parser: &WikitextParser) -> Result<Self, SiteError> { + Ok(Self { meta, - snippets: Self::snippets(data, parser), - } + snippets: Self::snippets(data, parser)?, + }) } pub fn meta(&self) -> &PageMeta { &self.meta } - fn snippets(mut data: &str, parser: &WikitextParser) -> Vec<Snippet> { + fn snippets(mut data: &str, parser: &WikitextParser) -> Result<Vec<Snippet>, SiteError> { let mut snippets = vec![]; while !data.is_empty() { - let (snippet, rest) = parser.parse(data); + let (snippet, rest) = parser.parse(data)?; snippets.push(snippet); data = rest; } - snippets + Ok(snippets) } - pub fn process(&self, site: &Site) -> MarkdownPage { + pub fn process(&self, site: &mut Site) -> Result<MarkdownPage, SiteError> { let mut meta = self.meta.clone(); let mut m = String::new(); for snippet in self.snippets.iter() { - m.push_str(&snippet.process(site, &mut meta)); + m.push_str(&snippet.process(site, &mut meta)?); } - MarkdownPage::new(m, meta) + Ok(MarkdownPage::new(m, meta)) } } @@ -99,11 +99,24 @@ impl MarkdownPage { &self.meta } - pub fn to_html(&self) -> HtmlPage { + pub fn body_to_html(&self) -> HtmlPage { let head = Element::new(ElementTag::Head); let body = parse(self.markdown()); HtmlPage::new(head, body) } + + pub fn to_html(&self) -> HtmlPage { + let mut title = Element::new(ElementTag::Title); + title.push_child(Content::Text(self.meta.title().into())); + + let mut head = Element::new(ElementTag::Head); + head.push_child(Content::Elt(title)); + + let body = parse(self.markdown()); + + trace!("MarkdownPage::to_html: head={:?}", head); + HtmlPage::new(head, body) + } } #[derive(Debug, Clone, Eq, PartialEq)] @@ -128,6 +141,7 @@ impl PageMeta { } pub fn set_title(&mut self, title: String) { + trace!("PageMeta::set_title: title={:?}", title); self.title = Some(title); } diff --git a/src/site.rs b/src/site.rs index 377b87c..088a5a3 100644 --- a/src/site.rs +++ b/src/site.rs @@ -69,7 +69,7 @@ impl Site { pub fn process_wikipage(&mut self) -> Result<bool, SiteError> { if let Some(page) = self.wikitext_pages.pop() { trace!("processing wikitext page {}", page.meta().path().display()); - let page = UnprocessedPage::new(page.meta().clone(), page.wikitext(), &self.parser); + let page = UnprocessedPage::new(page.meta().clone(), page.wikitext(), &self.parser)?; self.unprocessed_pages.push(page); Ok(true) } else { @@ -83,7 +83,7 @@ impl Site { "processing unprocessed page {}", page.meta().path().display() ); - let page = page.process(self); + let page = page.process(self)?; self.markdown_pages.push(page); Ok(true) } else { diff --git a/src/wikitext.rs b/src/wikitext.rs index b7e0b72..8c9a20e 100644 --- a/src/wikitext.rs +++ b/src/wikitext.rs @@ -1,9 +1,11 @@ -use crate::directive::{self, Directive}; +use crate::directive::Directive; +use crate::error::SiteError; use crate::page::PageMeta; use crate::site::Site; use log::trace; use regex::Regex; use std::collections::HashMap; +use std::convert::TryFrom; use std::path::Path; #[derive(Debug)] @@ -37,7 +39,7 @@ pub struct WikitextParser { } impl WikitextParser { - pub fn parse<'a>(&self, text: &'a str) -> (Snippet, &'a str) { + pub fn parse<'a>(&self, text: &'a str) -> Result<(Snippet, &'a str), SiteError> { let patterns = vec![ &self.directive_args, &self.directive_no_args, @@ -59,11 +61,13 @@ impl WikitextParser { let args = c.name("args").unwrap().as_str(); let args = self.parse_args(args); let d = ParsedDirective::new(name, args); + let d = Directive::try_from(d)?; Snippet::Directive(d) } else if pat.as_str() == self.directive_no_args.as_str() { let name = c.name("name").unwrap().as_str(); let args = self.parse_args(""); let d = ParsedDirective::new(name, args); + let d = Directive::try_from(d)?; Snippet::Directive(d) } else if pat.as_str() == self.wikilink_bare.as_str() { let s = c.name("linktext").unwrap().as_str(); @@ -83,10 +87,10 @@ impl WikitextParser { }; let rest = text.get(m.end()..).unwrap(); trace!("WikitextParser: token={:?}", token); - return (token, rest); + return Ok((token, rest)); } } - (Snippet::Markdown(text.into()), "") + Ok((Snippet::Markdown(text.into()), "")) } fn parse_args(&self, mut args: &str) -> HashMap<String, String> { @@ -131,8 +135,10 @@ impl Default for WikitextParser { no_bracket: Regex::new(r"^[^\[]+").unwrap(), bracket: Regex::new(r"^\[").unwrap(), wikilink_bare: Regex::new(r"^\[\[\s*(?P<linktext>(\w|[-/_])+)\s*\]\]").unwrap(), - wikilink_complex: Regex::new(r"\[\[\s*(?P<linktext>.*)\|(?P<target>(\w|[-/_])+)\s*\]\]") - .unwrap(), + wikilink_complex: Regex::new( + r"\[\[\s*(?P<linktext>.*)\|(?P<target>(\w|[-/_])+)\s*\]\]", + ) + .unwrap(), directive_no_args: Regex::new(r"^\[\[!(?P<name>\w+)\s*\]\]").unwrap(), directive_args: Regex::new(r#"\[\[!(?P<name>\w+)\s+(?P<args>[^]]*?)\s*\]\]"#).unwrap(), plain: Regex::new(r"(?P<key>\w+)").unwrap(), @@ -147,32 +153,22 @@ impl Default for WikitextParser { pub enum Snippet { Markdown(String), WikiLink(WikiLink), - Directive(ParsedDirective), + Directive(Directive), } impl Snippet { - pub fn process(&self, site: &Site, meta: &mut PageMeta) -> String { + pub fn process(&self, site: &mut Site, meta: &mut PageMeta) -> Result<String, SiteError> { trace!("Snippet::process: self={:?}", self); - match self { + let s = match self { Snippet::Markdown(text) => text.into(), Snippet::WikiLink(w) => { let resolved = site.resolve(meta.path(), Path::new(w.target())); trace!("resolved {} to {}", w.target(), resolved.display()); format!("[{}]({})", w.link_text(), resolved.display()) } - Snippet::Directive(d) => { - match d.name() { - "meta" => { - let directive = directive::Meta::default(); - directive.process(d, meta) - } - _ => { - let directive = directive::Nop::default(); - directive.process(d, meta) - } // _ => unreachable!("unknown directive {}", d.name()), - } - } - } + Snippet::Directive(d) => d.process(site, meta)?, + }; + Ok(s) } } @@ -217,8 +213,11 @@ impl ParsedDirective { &self.name } - pub fn args(&self) -> &HashMap<String, String> { - &self.args + pub fn args(&self) -> HashMap<&str, &str> { + self.args + .iter() + .map(|(k, v)| (k.as_str(), v.as_str())) + .collect() } } @@ -227,12 +226,12 @@ pub enum WikiError {} #[cfg(test)] mod test { - use super::{HashMap, ParsedDirective, Snippet, WikiLink, WikitextParser}; + use super::{Directive, Snippet, WikiLink, WikitextParser}; #[test] fn plain_markdown() { let p = WikitextParser::default(); - let (snippet, rest) = p.parse("hello, world"); + let (snippet, rest) = p.parse("hello, world").unwrap(); assert_eq!(snippet, Snippet::Markdown("hello, world".into())); assert_eq!(rest, ""); } @@ -241,12 +240,15 @@ mod test { fn simple_wikilink() { let p = WikitextParser::default(); - let (snippet, rest) = p.parse("hello, [[planet-earth]]"); + let (snippet, rest) = p.parse("hello, [[planet-earth]]").unwrap(); assert_eq!(snippet, Snippet::Markdown("hello, ".into())); assert_eq!(rest, "[[planet-earth]]"); - let (snippet, rest) = p.parse(rest); - assert_eq!(snippet, Snippet::WikiLink(WikiLink::new("planet-earth", "planet-earth"))); + let (snippet, rest) = p.parse(rest).unwrap(); + assert_eq!( + snippet, + Snippet::WikiLink(WikiLink::new("planet-earth", "planet-earth")) + ); assert_eq!(rest, ""); } @@ -254,12 +256,15 @@ mod test { fn simple_wikilink_to_subpage() { let p = WikitextParser::default(); - let (snippet, rest) = p.parse("hello, [[planets/earth]]"); + let (snippet, rest) = p.parse("hello, [[planets/earth]]").unwrap(); assert_eq!(snippet, Snippet::Markdown("hello, ".into())); assert_eq!(rest, "[[planets/earth]]"); - let (snippet, rest) = p.parse(rest); - assert_eq!(snippet, Snippet::WikiLink(WikiLink::new("planets/earth", "planets/earth"))); + let (snippet, rest) = p.parse(rest).unwrap(); + assert_eq!( + snippet, + Snippet::WikiLink(WikiLink::new("planets/earth", "planets/earth")) + ); assert_eq!(rest, ""); } @@ -267,11 +272,11 @@ mod test { fn complex_wikilink() { let p = WikitextParser::default(); - let (snippet, rest) = p.parse("hello, [[whomever we greet|planet-earth]]"); + let (snippet, rest) = p.parse("hello, [[whomever we greet|planet-earth]]").unwrap(); assert_eq!(snippet, Snippet::Markdown("hello, ".into())); assert_eq!(rest, "[[whomever we greet|planet-earth]]"); - let (snippet, rest) = p.parse(rest); + let (snippet, rest) = p.parse(rest).unwrap(); assert_eq!( snippet, Snippet::WikiLink(WikiLink::new("whomever we greet", "planet-earth")) @@ -283,11 +288,11 @@ mod test { fn complex_wikilink_to_subpage() { let p = WikitextParser::default(); - let (snippet, rest) = p.parse("hello, [[whomever we greet|planets/earth]]"); + let (snippet, rest) = p.parse("hello, [[whomever we greet|planets/earth]]").unwrap(); assert_eq!(snippet, Snippet::Markdown("hello, ".into())); assert_eq!(rest, "[[whomever we greet|planets/earth]]"); - let (snippet, rest) = p.parse(rest); + let (snippet, rest) = p.parse(rest).unwrap(); assert_eq!( snippet, Snippet::WikiLink(WikiLink::new("whomever we greet", "planets/earth")) @@ -299,11 +304,11 @@ mod test { fn bracket() { let p = WikitextParser::default(); - let (snippet, rest) = p.parse("[world"); + let (snippet, rest) = p.parse("[world").unwrap(); assert_eq!(snippet, Snippet::Markdown("[".into())); assert_eq!(rest, "world"); - let (snippet, rest) = p.parse(rest); + let (snippet, rest) = p.parse(rest).unwrap(); assert_eq!(snippet, Snippet::Markdown("world".into())); assert_eq!(rest, ""); } @@ -311,13 +316,10 @@ mod test { #[test] fn simple_directive() { let p = WikitextParser::default(); - let (snippet, rest) = p.parse("[[!foo]]"); + let (snippet, rest) = p.parse("[[!simple]]").unwrap(); assert_eq!( snippet, - Snippet::Directive(ParsedDirective { - name: "foo".into(), - args: HashMap::new() - }) + Snippet::Directive(Directive::Simple) ); assert_eq!(rest, ""); } @@ -325,13 +327,10 @@ mod test { #[test] fn directive_simple_arg() { let p = WikitextParser::default(); - let (snippet, rest) = p.parse("[[!foo bar]]"); + let (snippet, rest) = p.parse("[[!simplearg foo]]").unwrap(); assert_eq!( snippet, - Snippet::Directive(ParsedDirective { - name: "foo".into(), - args: HashMap::from([("bar".into(), "".into())]) - }) + Snippet::Directive(Directive::SimpleArg) ); assert_eq!(rest, ""); } @@ -339,13 +338,10 @@ mod test { #[test] fn directive_quoted_arg() { let p = WikitextParser::default(); - let (snippet, rest) = p.parse(r#"[[!foo bar="foobar"]]"#); + let (snippet, rest) = p.parse(r#"[[!quotedarg bar="foobar"]]"#).unwrap(); assert_eq!( snippet, - Snippet::Directive(ParsedDirective { - name: "foo".into(), - args: HashMap::from([("bar".into(), "foobar".into())]) - }) + Snippet::Directive(Directive::QuotedArg) ); assert_eq!(rest, ""); } @@ -354,15 +350,12 @@ mod test { fn directive_multiline_arg() { let p = WikitextParser::default(); let (snippet, rest) = p.parse( - r#"[[!foo bar="""foo + r#"[[!multilinearg yo="""foo bar"""]]"#, - ); + ).unwrap(); assert_eq!( snippet, - Snippet::Directive(ParsedDirective { - name: "foo".into(), - args: HashMap::from([("bar".into(), "foo\nbar".into())]) - }) + Snippet::Directive(Directive::MultilineArg) ); assert_eq!(rest, ""); } |