summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLars Wirzenius <liw@liw.fi>2022-10-22 10:25:16 +0300
committerLars Wirzenius <liw@liw.fi>2022-10-22 11:10:08 +0300
commitdeae40fdcd83b8f583caa79029b3215668dfcd16 (patch)
tree39c5000e3fb726540eeb41fb5f63b6d0ffca583f
parentcb8a66f409f5ae56cbc274a38d3ff2a5f2c877ff (diff)
downloadriki-deae40fdcd83b8f583caa79029b3215668dfcd16.tar.gz
feat: give location in source for errors
Sponsored-by: author
-rw-r--r--Cargo.lock7
-rw-r--r--Cargo.toml1
-rw-r--r--src/error.rs4
-rw-r--r--src/html.rs9
-rw-r--r--src/parser.rs103
-rw-r--r--src/token.rs249
6 files changed, 212 insertions, 161 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 2890524..c9d2409 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -660,6 +660,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
[[package]]
+name = "line-col"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e69cdf6b85b5c8dce514f694089a2cf8b1a702f6cd28607bcb3cf296c9778db"
+
+[[package]]
name = "linked-hash-map"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1133,6 +1139,7 @@ dependencies = [
"lalrpop",
"lalrpop-util",
"libc",
+ "line-col",
"log",
"pandoc",
"pandoc_ast",
diff --git a/Cargo.toml b/Cargo.toml
index e0cf3f2..bcb118f 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -13,6 +13,7 @@ html-escape = "0.2.11"
lalrpop = "0.19.8"
lalrpop-util = "0.19.8"
libc = "0.2.126"
+line-col = "0.2.1"
log = "0.4.17"
pulldown-cmark = "0.9.0"
regex = "1.5.6"
diff --git a/src/error.rs b/src/error.rs
index 428c32d..2a7d373 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -53,8 +53,8 @@ pub enum SiteError {
#[error("link to missing page {1} on {0}")]
PageMissing(PathBuf, PathBuf),
- #[error("attempt to use definition lists in Markdown: {0:?}")]
- DefinitionList(String),
+ #[error("attempt to use definition lists in Markdown: line {0}, column {1}")]
+ DefinitionList(usize, usize),
#[error("failed to get file metadata: {0}")]
FileMetadata(PathBuf, #[source] std::io::Error),
diff --git a/src/html.rs b/src/html.rs
index 9d048a5..abbe71d 100644
--- a/src/html.rs
+++ b/src/html.rs
@@ -1,6 +1,7 @@
use crate::error::SiteError;
use crate::util::mkdir;
use html_escape::{encode_double_quoted_attribute, encode_text};
+use line_col::LineColLookup;
use log::trace;
use pulldown_cmark::{Event, HeadingLevel, Options, Parser, Tag};
use std::fmt::Write as _;
@@ -66,10 +67,11 @@ pub fn parse(markdown: &str) -> Result<Element, SiteError> {
options.insert(Options::ENABLE_STRIKETHROUGH);
options.insert(Options::ENABLE_TABLES);
options.insert(Options::ENABLE_TASKLISTS);
- let p = Parser::new_ext(markdown, options);
+ let p = Parser::new_ext(markdown, options).into_offset_iter();
+ let linecol = LineColLookup::new(markdown);
let mut stack = Stack::new();
stack.push(Element::new(ElementTag::Body));
- for event in p {
+ for (event, loc) in p {
trace!("event {:?}", event);
match event {
Event::Start(tag) => match tag {
@@ -140,7 +142,8 @@ pub fn parse(markdown: &str) -> Result<Element, SiteError> {
let s = as_plain_text(e.children());
trace!("paragraph text: {:?}", s);
if s.starts_with(": ") || s.contains("\n: ") {
- return Err(SiteError::DefinitionList(s));
+ let (line, col) = linecol.get(loc.start);
+ return Err(SiteError::DefinitionList(line, col));
}
stack.append_child(Content::Elt(e));
}
diff --git a/src/parser.rs b/src/parser.rs
index 8058aeb..973d0b2 100644
--- a/src/parser.rs
+++ b/src/parser.rs
@@ -1,24 +1,28 @@
use crate::error::SiteError;
-use crate::token::{Token, TokenParser, TokenPatterns};
+use crate::token::{TokenKind, TokenParser, TokenPatterns};
use crate::wikitext::{ParsedDirective, Snippet, WikiLink};
-use log::trace;
+use line_col::LineColLookup;
+use log::{debug, trace};
use std::collections::HashMap;
#[derive(Debug)]
pub struct WikitextParser {
- tokens: Vec<Token>,
+ tokens: Vec<(TokenKind, usize, usize)>,
}
impl WikitextParser {
pub fn new(input: &str, patterns: &TokenPatterns) -> Self {
+ let linecol = LineColLookup::new(input);
let mut p = TokenParser::new(input, patterns);
let mut tokens = vec![];
loop {
let token = p.parse();
- if token == Token::End {
+ debug!("token {:?}", token);
+ if token.token == TokenKind::End {
break;
}
- tokens.push(token);
+ let (line, col) = linecol.get(token.pos);
+ tokens.push((token.token, line, col));
}
Self { tokens }
@@ -29,58 +33,67 @@ impl WikitextParser {
return Ok(None);
}
+ let (_, line, col) = self.tokens[0];
+ debug!("token at {}:{}", line, col);
let snippet = match &self.tokens[..] {
- [Token::OpenBrackets, Token::Word(target), Token::CloseBrackets, ..] => {
+ [(TokenKind::OpenBrackets, _, _), (TokenKind::Word(target), _, _), (TokenKind::CloseBrackets, _, _), ..] => {
let wikilink = WikiLink::new(target, target);
let snippet = Snippet::WikiLink(wikilink);
self.tokens.drain(..3);
snippet
}
- [Token::OpenBrackets, Token::Word(word), ..] => {
+ [(TokenKind::OpenBrackets, _, _), (TokenKind::Word(word), _, _), ..] => {
trace!("match [[{:?}", word);
let mut link_text = word.to_string();
let mut target = None;
self.tokens.drain(..2);
loop {
+ let (_, line, col) = self.tokens[0];
match &self.tokens[..] {
- [Token::Spaces(_), ..] => {
+ [(TokenKind::Spaces(_), _, _), ..] => {
trace!("match space");
self.tokens.drain(..1);
link_text.push(' ');
}
- [Token::Markdown(s), ..] => {
+ [(TokenKind::Markdown(s), _, _), ..] => {
trace!("match markdown {:?}", s);
link_text.push_str(s);
self.tokens.drain(..1);
}
- [Token::OpenParens, Token::Word(word), ..] => {
+ [(TokenKind::OpenParens, _, _), (TokenKind::Word(word), _, _), ..] => {
trace!("match ({:?}", word);
link_text.push('(');
link_text.push_str(word);
self.tokens.drain(..2);
}
- [Token::Word(word), ..] => {
+ [(TokenKind::Word(word), _, _), ..] => {
trace!("match {:?}", word);
link_text.push_str(word);
self.tokens.drain(..1);
}
- [Token::ClosedParens, ..] => {
+ [(TokenKind::ClosedParens, _, _), ..] => {
trace!("match )");
link_text.push(')');
self.tokens.drain(..1);
}
- [Token::CloseBrackets, ..] => {
+ [(TokenKind::CloseBrackets, _, _), ..] => {
trace!("match ]]");
self.tokens.drain(..1);
break;
}
- [Token::Pipe, Token::Word(word), Token::CloseBrackets, ..] => {
+ [(TokenKind::Pipe, _, _), (TokenKind::Word(word), _, _), (TokenKind::CloseBrackets, _, _), ..] => {
trace!("match |{:?}]]", word);
target = Some(word.to_string());
self.tokens.drain(..3);
break;
}
- _ => panic!("can't parse: {:?}", &self.tokens[..5]),
+ [(TokenKind::Pipe, _, _), (TokenKind::Spaces(_), _, _), (TokenKind::Word(word), _, _), (TokenKind::CloseBrackets, _, _), ..] => {
+ trace!("match |{:?}]]", word);
+ target = Some(word.to_string());
+ self.tokens.drain(..3);
+ break;
+ }
+ _ => panic!("a can't parse line {} column {}: {:?}", line, col, &self.tokens[..5]),
}
}
if target.is_none() {
@@ -89,141 +102,143 @@ impl WikitextParser {
let wikilink = WikiLink::new(&link_text, &target.unwrap());
Snippet::WikiLink(wikilink)
}
- [Token::OpenBrackets, Token::Bang, Token::Word(name), ..] => {
+ [(TokenKind::OpenBrackets, _, _), (TokenKind::Bang, _, _), (TokenKind::Word(name), _, _), ..] => {
trace!("match [[!{:?}", name);
let name = name.to_string();
let mut args = HashMap::new();
self.tokens.drain(..3);
loop {
+ let (_, line, col) = self.tokens[0];
match &self.tokens[..] {
- [Token::Spaces(_), ..] => {
+ [(TokenKind::Spaces(_), _, _), ..] => {
trace!("match spaces");
self.tokens.drain(..1);
}
- [Token::CloseBrackets, ..] => {
+ [(TokenKind::CloseBrackets, _, _), ..] => {
trace!("match ]]");
self.tokens.drain(..1);
break;
}
- [Token::Word(word), Token::Spaces(_), ..] => {
+ [(TokenKind::Word(word), _, _), (TokenKind::Spaces(_), _, _), ..] => {
trace!("match {:?} spaces", word);
args.insert(word.to_string(), "".to_string());
self.tokens.drain(..2);
}
- [Token::Word(word), Token::CloseBrackets, ..] => {
+ [(TokenKind::Word(word), _, _), (TokenKind::CloseBrackets, _, _), ..] => {
trace!("match {:?}]]", word);
args.insert(word.to_string(), "".to_string());
self.tokens.drain(..2);
break;
}
- [Token::Word(name), Token::Equals, Token::Word(value), ..] => {
+ [(TokenKind::Word(name), _, _), (TokenKind::Equals, _, _), (TokenKind::Word(value), _, _), ..] => {
trace!("match {:?}={:?}", name, value);
args.insert(name.to_string(), value.to_string());
self.tokens.drain(..3);
}
- [Token::Word(name), Token::Equals, Token::QuotedValue(value), ..] => {
+ [(TokenKind::Word(name), _, _), (TokenKind::Equals, _, _), (TokenKind::QuotedValue(value), _, _), ..] => {
trace!("match {:?}={:?}", name, value);
args.insert(name.to_string(), value.to_string());
self.tokens.drain(..3);
}
- [Token::QuotedValue(value), ..] => {
+ [(TokenKind::QuotedValue(value), _, _), ..] => {
trace!("match {:?}", value);
args.insert(value.to_string(), "".to_string());
self.tokens.drain(..1);
}
- _ => panic!("can't parse: {:?}", &self.tokens[..5]),
+ _ => panic!("b can't parse line {} column {}: {:?}", line, col, &self.tokens[..5]),
}
}
Snippet::Directive(ParsedDirective::new(&name, args)?)
}
- [Token::Bang, Token::OpenBracket, ..] => {
+ [(TokenKind::Bang, _, _), (TokenKind::OpenBracket, _, _), ..] => {
let mut link_text = String::new();
#[allow(unused_assignments)]
let mut target = None;
self.tokens.drain(..2);
loop {
+ let (_, line, col) = self.tokens[0];
match &self.tokens[..] {
- [Token::Word(word), ..] => {
+ [(TokenKind::Word(word), _, _), ..] => {
link_text.push_str(word);
self.tokens.drain(..1);
}
- [Token::Spaces(_), ..] => {
+ [(TokenKind::Spaces(_), _, _), ..] => {
link_text.push(' ');
self.tokens.drain(..1);
}
- [Token::ClosedBracket, Token::OpenParens, Token::Word(word), Token::ClosedParens, ..] =>
+ [(TokenKind::ClosedBracket, _, _), (TokenKind::OpenParens, _, _), (TokenKind::Word(word), _, _), (TokenKind::ClosedParens, _, _), ..] =>
{
target = Some(word.to_string());
self.tokens.drain(..4);
break;
}
- _ => panic!("can't parse: {:?}", &self.tokens[..5]),
+ _ => panic!("c can't parse line {} column {}: {:?}", line, col, &self.tokens[..5]),
}
}
Snippet::Markdown(format!("![{}]({})", link_text, target.unwrap()))
}
- [Token::Markdown(text), ..] => {
+ [(TokenKind::Markdown(text), _, _), ..] => {
let snippet = Snippet::Markdown(text.to_string());
self.tokens.drain(..1);
snippet
}
- [Token::Spaces(s), ..] => {
+ [(TokenKind::Spaces(s), _, _), ..] => {
let snippet = Snippet::Markdown(s.to_string());
self.tokens.drain(..1);
snippet
}
- [Token::Word(text), ..] => {
+ [(TokenKind::Word(text), _, _), ..] => {
let snippet = Snippet::Markdown(text.to_string());
self.tokens.drain(..1);
snippet
}
- [Token::Equals, ..] => {
+ [(TokenKind::Equals, _, _), ..] => {
self.tokens.drain(..1);
Snippet::Markdown("=".into())
}
- [Token::Bang, ..] => {
+ [(TokenKind::Bang, _, _), ..] => {
self.tokens.drain(..1);
Snippet::Markdown("!".into())
}
- [Token::Pipe, ..] => {
+ [(TokenKind::Pipe, _, _), ..] => {
self.tokens.drain(..1);
Snippet::Markdown("|".into())
}
- [Token::PageName(s), ..] => {
+ [(TokenKind::PageName(s), _, _), ..] => {
let snippet = Snippet::Markdown(s.to_string());
self.tokens.drain(..1);
snippet
}
- [Token::QuotedValue(s), ..] => {
+ [(TokenKind::QuotedValue(s), _, _), ..] => {
let snippet = Snippet::Markdown(format!("\"{}\"", s));
self.tokens.drain(..1);
snippet
}
- [Token::OpenParens, ..] => {
+ [(TokenKind::OpenParens, _, _), ..] => {
self.tokens.drain(..1);
Snippet::Markdown("(".into())
}
- [Token::ClosedParens, ..] => {
+ [(TokenKind::ClosedParens, _, _), ..] => {
self.tokens.drain(..1);
Snippet::Markdown(")".into())
}
- [Token::OpenBracket, ..] => {
+ [(TokenKind::OpenBracket, _, _), ..] => {
self.tokens.drain(..1);
Snippet::Markdown("[".into())
}
- [Token::ClosedBracket, ..] => {
+ [(TokenKind::ClosedBracket, _, _), ..] => {
self.tokens.drain(..1);
Snippet::Markdown("]".into())
}
- [Token::OpenBrackets, ..] => {
+ [(TokenKind::OpenBrackets, _, _), ..] => {
self.tokens.drain(..1);
Snippet::Markdown("[[".into())
}
- [Token::CloseBrackets, ..] => {
+ [(TokenKind::CloseBrackets, _, _), ..] => {
self.tokens.drain(..1);
Snippet::Markdown("]]".into())
}
- _ => panic!("eeek: {:?}", self.tokens),
+ _ => panic!("d can't parse line {} column {}: {:?}", line, col, self.tokens),
};
Ok(Some(snippet))
}
diff --git a/src/token.rs b/src/token.rs
index f6fb92c..7af018a 100644
--- a/src/token.rs
+++ b/src/token.rs
@@ -1,8 +1,24 @@
-//use log::trace;
+use log::debug;
use regex::Regex;
#[derive(Debug, Clone, Eq, PartialEq)]
-pub enum Token {
+pub struct Token {
+ pub token: TokenKind,
+ pub pos: usize,
+}
+
+impl Token {
+ fn new(token: TokenKind, pos: usize) -> Self {
+ debug!("Token: token={:?} pos={}", token, pos);
+ Self {
+ token,
+ pos,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum TokenKind {
End,
Markdown(String),
OpenParens,
@@ -47,57 +63,65 @@ impl Default for TokenPatterns {
#[derive(Debug, Clone)]
pub struct TokenParser<'a> {
+ pos: usize,
input: &'a str,
patterns: &'a TokenPatterns,
}
impl<'a> TokenParser<'a> {
pub fn new(input: &'a str, patterns: &'a TokenPatterns) -> Self {
- Self { input, patterns }
+ Self {
+ pos: 0,
+ input,
+ patterns,
+ }
}
pub fn parse(&mut self) -> Token {
- if self.input.is_empty() {
- Token::End
+ let pos = self.pos;
+ let token = if self.input.is_empty() {
+ TokenKind::End
} else if self.literal("(") {
- Token::OpenParens
+ TokenKind::OpenParens
} else if self.literal(")") {
- Token::ClosedParens
+ TokenKind::ClosedParens
} else if self.literal("[[") {
- Token::OpenBrackets
+ TokenKind::OpenBrackets
} else if self.literal("]]") {
- Token::CloseBrackets
+ TokenKind::CloseBrackets
} else if self.literal("[") {
- Token::OpenBracket
+ TokenKind::OpenBracket
} else if self.literal("]") {
- Token::ClosedBracket
+ TokenKind::ClosedBracket
} else if self.literal("!") {
- Token::Bang
+ TokenKind::Bang
} else if self.literal("|") {
- Token::Pipe
+ TokenKind::Pipe
} else if self.literal("=") {
- Token::Equals
+ TokenKind::Equals
} else if let Some(m) = self.regex(&self.patterns.spaces.clone()) {
- Token::Spaces(m.as_str().into())
+ TokenKind::Spaces(m.as_str().into())
} else if let Some(m) = self.regex(&self.patterns.triple_quoted.clone()) {
- Token::QuotedValue(m.as_str().into())
+ TokenKind::QuotedValue(m.as_str().into())
} else if let Some(m) = self.regex(&self.patterns.triple_quoted2.clone()) {
- Token::QuotedValue(m.as_str().into())
+ TokenKind::QuotedValue(m.as_str().into())
} else if let Some(m) = self.regex(&self.patterns.single_quoted.clone()) {
- Token::QuotedValue(m.as_str().into())
+ TokenKind::QuotedValue(m.as_str().into())
} else if let Some(m) = self.regex(&self.patterns.single_quoted2.clone()) {
- Token::QuotedValue(m.as_str().into())
+ TokenKind::QuotedValue(m.as_str().into())
} else if let Some(m) = self.regex(&self.patterns.word.clone()) {
- Token::Word(m.as_str().into())
+ TokenKind::Word(m.as_str().into())
} else if let Some(m) = self.regex(&self.patterns.plain.clone()) {
- Token::Markdown(m.as_str().into())
+ TokenKind::Markdown(m.as_str().into())
} else {
panic!("can't handle input: {:?}", self.input);
- }
+ };
+ Token::new(token, pos)
}
fn literal(&mut self, pattern: &str) -> bool {
if let Some(rest) = self.input.strip_prefix(pattern) {
+ self.pos += pattern.len();
self.input = rest;
true
} else {
@@ -118,6 +142,7 @@ impl<'a> TokenParser<'a> {
self.input = &self.input[m.end()..];
captures.get(0).unwrap()
};
+ self.pos += m.end();
return Some(m.as_str().to_string());
}
}
@@ -128,7 +153,7 @@ impl<'a> TokenParser<'a> {
#[cfg(test)]
mod test {
- use super::{Token, TokenParser, TokenPatterns};
+ use super::{Token, TokenKind, TokenParser, TokenPatterns};
fn parser<'a>(input: &'a str, patterns: &'a TokenPatterns) -> TokenParser<'a> {
TokenParser::new(input, patterns)
@@ -138,157 +163,157 @@ mod test {
fn empty_string() {
let patterns = TokenPatterns::default();
let mut p = parser("", &patterns);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn plain_markdown() {
let patterns = TokenPatterns::default();
let mut p = parser("** hello, world", &patterns);
- assert_eq!(p.parse(), Token::Markdown("** hello, world".into()));
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::Markdown("** hello, world".into()));
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn single_open_parens() {
let patterns = TokenPatterns::default();
let mut p = parser("(", &patterns);
- assert_eq!(p.parse(), Token::OpenParens);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::OpenParens);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn single_close_parens() {
let patterns = TokenPatterns::default();
let mut p = parser(")", &patterns);
- assert_eq!(p.parse(), Token::ClosedParens);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::ClosedParens);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn single_open_bracket() {
let patterns = TokenPatterns::default();
let mut p = parser("[", &patterns);
- assert_eq!(p.parse(), Token::OpenBracket);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::OpenBracket);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn single_close_bracket() {
let patterns = TokenPatterns::default();
let mut p = parser("]", &patterns);
- assert_eq!(p.parse(), Token::ClosedBracket);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::ClosedBracket);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn double_open_bracket() {
let patterns = TokenPatterns::default();
let mut p = parser("[[", &patterns);
- assert_eq!(p.parse(), Token::OpenBrackets);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::OpenBrackets);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn double_close_bracket() {
let patterns = TokenPatterns::default();
let mut p = parser("]]", &patterns);
- assert_eq!(p.parse(), Token::CloseBrackets);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::CloseBrackets);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn bang() {
let patterns = TokenPatterns::default();
let mut p = parser("!", &patterns);
- assert_eq!(p.parse(), Token::Bang);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::Bang);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn pipe() {
let patterns = TokenPatterns::default();
let mut p = parser("|", &patterns);
- assert_eq!(p.parse(), Token::Pipe);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::Pipe);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn equals() {
let patterns = TokenPatterns::default();
let mut p = parser("=", &patterns);
- assert_eq!(p.parse(), Token::Equals);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::Equals);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn simple_word() {
let patterns = TokenPatterns::default();
let mut p = parser("foo bar", &patterns);
- assert_eq!(p.parse(), Token::Word("foo".into()));
- assert_eq!(p.parse(), Token::Spaces(" ".into()));
- assert_eq!(p.parse(), Token::Word("bar".into()));
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::Word("foo".into()));
+ assert_eq!(p.parse().token, TokenKind::Spaces(" ".into()));
+ assert_eq!(p.parse().token, TokenKind::Word("bar".into()));
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn number_word() {
let patterns = TokenPatterns::default();
let mut p = parser("123", &patterns);
- assert_eq!(p.parse(), Token::Word("123".into()));
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::Word("123".into()));
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn complex_word() {
let patterns = TokenPatterns::default();
let mut p = parser("foo-1.2_3[[bar/subpage]]", &patterns);
- assert_eq!(p.parse(), Token::Word("foo-1.2_3".into()));
- assert_eq!(p.parse(), Token::OpenBrackets);
- assert_eq!(p.parse(), Token::Word("bar/subpage".into()));
- assert_eq!(p.parse(), Token::CloseBrackets);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::Word("foo-1.2_3".into()));
+ assert_eq!(p.parse().token, TokenKind::OpenBrackets);
+ assert_eq!(p.parse().token, TokenKind::Word("bar/subpage".into()));
+ assert_eq!(p.parse().token, TokenKind::CloseBrackets);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn spaces() {
let patterns = TokenPatterns::default();
let mut p = parser("\n", &patterns);
- assert_eq!(p.parse(), Token::Spaces("\n".into()));
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::Spaces("\n".into()));
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn single_quoted() {
let patterns = TokenPatterns::default();
let mut p = parser(r#""hello there""#, &patterns);
- assert_eq!(p.parse(), Token::QuotedValue(r#"hello there"#.into()));
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::QuotedValue(r#"hello there"#.into()));
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn triple_quoted() {
let patterns = TokenPatterns::default();
let mut p = parser(r#""""hello\nthere""""#, &patterns);
- assert_eq!(p.parse(), Token::QuotedValue(r#"hello\nthere"#.into()));
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::QuotedValue(r#"hello\nthere"#.into()));
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
fn simple_directive() {
let patterns = TokenPatterns::default();
let mut p = parser(r#"[[!if test="enabled(sidebar)"]]"#, &patterns);
- assert_eq!(p.parse(), Token::OpenBrackets);
- assert_eq!(p.parse(), Token::Bang);
- assert_eq!(p.parse(), Token::Word("if".into()));
- assert_eq!(p.parse(), Token::Spaces(" ".into()));
+ assert_eq!(p.parse().token, TokenKind::OpenBrackets);
+ assert_eq!(p.parse().token, TokenKind::Bang);
+ assert_eq!(p.parse().token, TokenKind::Word("if".into()));
+ assert_eq!(p.parse().token, TokenKind::Spaces(" ".into()));
- assert_eq!(p.parse(), Token::Word("test".into()));
- assert_eq!(p.parse(), Token::Equals);
- assert_eq!(p.parse(), Token::QuotedValue(r#"enabled(sidebar)"#.into()));
+ assert_eq!(p.parse().token, TokenKind::Word("test".into()));
+ assert_eq!(p.parse().token, TokenKind::Equals);
+ assert_eq!(p.parse().token, TokenKind::QuotedValue(r#"enabled(sidebar)"#.into()));
- assert_eq!(p.parse(), Token::CloseBrackets);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::CloseBrackets);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
@@ -302,30 +327,30 @@ mod test {
"""]]"#,
&patterns,
);
- assert_eq!(p.parse(), Token::OpenBrackets);
- assert_eq!(p.parse(), Token::Bang);
- assert_eq!(p.parse(), Token::Word("if".into()));
-
- assert_eq!(p.parse(), Token::Spaces(" ".into()));
- assert_eq!(p.parse(), Token::Word("test".into()));
- assert_eq!(p.parse(), Token::Equals);
- assert_eq!(p.parse(), Token::QuotedValue(r#"enabled(sidebar)"#.into()));
-
- assert_eq!(p.parse(), Token::Spaces(" ".into()));
- assert_eq!(p.parse(), Token::Word("then".into()));
- assert_eq!(p.parse(), Token::Equals);
- assert_eq!(p.parse(), Token::QuotedValue("\n[[!sidebar]]\n".into()));
-
- assert_eq!(p.parse(), Token::Spaces(" ".into()));
- assert_eq!(p.parse(), Token::Word("else".into()));
- assert_eq!(p.parse(), Token::Equals);
+ assert_eq!(p.parse().token, TokenKind::OpenBrackets);
+ assert_eq!(p.parse().token, TokenKind::Bang);
+ assert_eq!(p.parse().token, TokenKind::Word("if".into()));
+
+ assert_eq!(p.parse().token, TokenKind::Spaces(" ".into()));
+ assert_eq!(p.parse().token, TokenKind::Word("test".into()));
+ assert_eq!(p.parse().token, TokenKind::Equals);
+ assert_eq!(p.parse().token, TokenKind::QuotedValue(r#"enabled(sidebar)"#.into()));
+
+ assert_eq!(p.parse().token, TokenKind::Spaces(" ".into()));
+ assert_eq!(p.parse().token, TokenKind::Word("then".into()));
+ assert_eq!(p.parse().token, TokenKind::Equals);
+ assert_eq!(p.parse().token, TokenKind::QuotedValue("\n[[!sidebar]]\n".into()));
+
+ assert_eq!(p.parse().token, TokenKind::Spaces(" ".into()));
+ assert_eq!(p.parse().token, TokenKind::Word("else".into()));
+ assert_eq!(p.parse().token, TokenKind::Equals);
assert_eq!(
- p.parse(),
- Token::QuotedValue("\n[[!inline pages=sidebar raw=yes]]\n".into())
+ p.parse().token,
+ TokenKind::QuotedValue("\n[[!inline pages=sidebar raw=yes]]\n".into())
);
- assert_eq!(p.parse(), Token::CloseBrackets);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::CloseBrackets);
+ assert_eq!(p.parse().token, TokenKind::End);
}
#[test]
@@ -340,29 +365,29 @@ mod test {
&patterns,
);
let mut p = orig.clone();
- assert_eq!(p.parse(), Token::OpenBrackets);
- assert_eq!(p.parse(), Token::Bang);
- assert_eq!(p.parse(), Token::Word("if".into()));
-
- assert_eq!(p.parse(), Token::Spaces(" ".into()));
- assert_eq!(p.parse(), Token::Word("test".into()));
- assert_eq!(p.parse(), Token::Equals);
- assert_eq!(p.parse(), Token::QuotedValue(r#"enabled(sidebar)"#.into()));
-
- assert_eq!(p.parse(), Token::Spaces(" ".into()));
- assert_eq!(p.parse(), Token::Word("then".into()));
- assert_eq!(p.parse(), Token::Equals);
- assert_eq!(p.parse(), Token::QuotedValue("\n[[!sidebar]]\n".into()));
-
- assert_eq!(p.parse(), Token::Spaces(" ".into()));
- assert_eq!(p.parse(), Token::Word("else".into()));
- assert_eq!(p.parse(), Token::Equals);
+ assert_eq!(p.parse().token, TokenKind::OpenBrackets);
+ assert_eq!(p.parse().token, TokenKind::Bang);
+ assert_eq!(p.parse().token, TokenKind::Word("if".into()));
+
+ assert_eq!(p.parse().token, TokenKind::Spaces(" ".into()));
+ assert_eq!(p.parse().token, TokenKind::Word("test".into()));
+ assert_eq!(p.parse().token, TokenKind::Equals);
+ assert_eq!(p.parse().token, TokenKind::QuotedValue(r#"enabled(sidebar)"#.into()));
+
+ assert_eq!(p.parse().token, TokenKind::Spaces(" ".into()));
+ assert_eq!(p.parse().token, TokenKind::Word("then".into()));
+ assert_eq!(p.parse().token, TokenKind::Equals);
+ assert_eq!(p.parse().token, TokenKind::QuotedValue("\n[[!sidebar]]\n".into()));
+
+ assert_eq!(p.parse().token, TokenKind::Spaces(" ".into()));
+ assert_eq!(p.parse().token, TokenKind::Word("else".into()));
+ assert_eq!(p.parse().token, TokenKind::Equals);
assert_eq!(
- p.parse(),
- Token::QuotedValue("\n[[!inline pages=sidebar raw=yes]]\n".into())
+ p.parse().token,
+ TokenKind::QuotedValue("\n[[!inline pages=sidebar raw=yes]]\n".into())
);
- assert_eq!(p.parse(), Token::CloseBrackets);
- assert_eq!(p.parse(), Token::End);
+ assert_eq!(p.parse().token, TokenKind::CloseBrackets);
+ assert_eq!(p.parse().token, TokenKind::End);
}
}