Port tests 🚁

This commit is contained in:
Laurenz 2020-02-03 15:05:34 +01:00
parent 3150fd5643
commit ec60795575
15 changed files with 473 additions and 566 deletions

View File

@ -3,7 +3,7 @@ name = "typstc"
version = "0.1.0"
authors = ["Laurenz Mädje <laurmaedje@gmail.com>"]
edition = "2018"
build = "build.rs"
# build = "build.rs"
[dependencies]
toddle = { path = "../toddle", features = ["query"], default-features = false }
@ -22,15 +22,10 @@ fs-provider = ["toddle/fs-provider"]
[[bin]]
name = "typst"
path = "src/bin/main.rs"
required-features = ["futures-executor"]
required-features = ["fs-provider", "futures-executor"]
[[test]]
name = "layouter"
path = "tests/src/layouter.rs"
harness = false
required-features = ["futures-executor"]
[[test]]
name = "parser"
path = "tests/src/parser.rs"
name = "typeset"
path = "tests/src/typeset.rs"
harness = false
required-features = ["fs-provider", "futures-executor"]

View File

@ -1,51 +0,0 @@
use std::fs::{self, create_dir_all, read_dir, read_to_string};
use std::ffi::OsStr;
fn main() -> Result<(), Box<dyn std::error::Error>> {
create_dir_all("tests/cache")?;
// Make sure the script reruns if this file changes or files are
// added/deleted in the parsing folder.
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-changed=tests/cache/parser-tests.rs");
println!("cargo:rerun-if-changed=tests/parser");
// Compile all parser tests into a single giant vector.
let mut code = "vec![".to_string();
for entry in read_dir("tests/parser")? {
let path = entry?.path();
if path.extension() != Some(OsStr::new("rs")) {
continue;
}
let name = path
.file_stem().ok_or("expected file stem")?
.to_string_lossy();
// Make sure this also reruns if the contents of a file in parsing
// change. This is not ensured by rerunning only on the folder.
println!("cargo:rerun-if-changed=tests/parser/{}.rs", name);
code.push_str(&format!("(\"{}\", tokens!{{", name));
// Replace the `=>` arrows with a double arrow indicating the line
// number in the middle, such that the tester can tell which line number
// a test originated from.
let file = read_to_string(&path)?;
for (index, line) in file.lines().enumerate() {
let line = line.replace("=>", &format!("=>({})=>", index + 1));
code.push_str(&line);
code.push('\n');
}
code.push_str("}),");
}
code.push(']');
fs::write("tests/cache/parser-tests.rs", code)?;
Ok(())
}

View File

@ -11,11 +11,13 @@ use self::span::{Spanned, SpanVec};
pub mod expr;
pub mod func;
pub mod span;
pub_use_mod!(scope);
pub_use_mod!(parsing);
pub_use_mod!(tokens);
#[cfg(test)]
mod test;
/// Represents a parsed piece of source that can be layouted and in the future
/// also be queried for information used for refactorings, autocomplete, etc.

View File

@ -397,3 +397,193 @@ impl<'s> FuncParser<'s> {
.unwrap_or_else(|| self.tokens.pos())
}
}
#[cfg(test)]
#[allow(non_snake_case)]
mod tests {
use crate::size::Size;
use super::super::test::{DebugFn, SpanlessEq};
use super::*;
use Node::{
Space as S, Newline as N,
ToggleItalic as Italic, ToggleBolder as Bold, ToggleMonospace as Mono,
};
pub use Expr::{Number as Num, Bool};
pub fn Id(text: &str) -> Expr { Expr::Ident(Ident(text.to_string())) }
pub fn Str(text: &str) -> Expr { Expr::Str(text.to_string()) }
fn T(text: &str) -> Node { Node::Text(text.to_string()) }
/// Test whether the given string parses into the given node list.
macro_rules! p {
($s:expr => [$($b:tt)*]) => {
let ctx = ParseContext { scope: &scope() };
let model = parse(Position::ZERO, $s, ctx).output;
let (expected, cmp) = model!([$($b)*]);
if !cmp(&model, &expected) {
fail($s, model, expected);
}
};
}
/// Test whether the given string yields the given parse errors.
macro_rules! e {
($s:expr => [$(($sl:tt:$sc:tt, $el:tt:$ec:tt, $e:expr)),* $(,)?]) => {
let ctx = ParseContext { scope: &scope() };
let errors = parse(Position::ZERO, $s, ctx).errors
.into_iter()
.map(|s| s.map(|e| e.message))
.collect::<Vec<_>>();
let expected = vec![
$(Spanned {
v: $e.to_string(),
span: Span {
start: Position { line: $sl, column: $sc },
end: Position { line: $el, column: $ec },
},
}),*
];
if errors != expected {
fail($s, errors, expected);
}
};
}
fn scope() -> Scope {
let mut scope = Scope::new::<DebugFn>();
scope.add::<DebugFn>("f");
scope.add::<DebugFn>("box");
scope
}
fn fail(src: &str, found: impl Debug, expected: impl Debug) {
eprintln!("source: {:?}", src);
eprintln!("found: {:#?}", found);
eprintln!("expected: {:#?}", expected);
panic!("test failed");
}
/// Parse a list of optionally spanned nodes into a syntax model.
macro_rules! model {
([$(($sl:tt:$sc:tt, $el:tt:$ec:tt, $n:expr)),* $(,)?]) => ((SyntaxModel {
nodes: vec![
$(Spanned { v: $n, span: Span {
start: Position { line: $sl, column: $sc },
end: Position { line: $el, column: $ec },
}}),*
]
}, <SyntaxModel as PartialEq>::eq));
([$($e:tt)*]) => ((SyntaxModel {
nodes: vec![$($e)*].into_iter().map(zspan).collect::<Vec<_>>()
}, <SyntaxModel as SpanlessEq>::spanless_eq));
}
/// Build a `DebugFn` function model.
macro_rules! func {
($name:expr
$(,pos: [$($item:expr),* $(,)?])?
$(,key: [$($key:expr => $value:expr),* $(,)?])?;
$($b:tt)*) => ({
#![allow(unused_mut, unused_assignments)]
let mut pos = Tuple::new();
let mut key = Object::new();
$(pos = Tuple { items: vec![$(zspan($item)),*] };)?
$(key = Object {
pairs: vec![$(Pair {
key: zspan(Ident($key.to_string())),
value: zspan($value),
}),*]
};)?
Node::Model(Box::new(DebugFn {
header: FuncHeader {
name: zspan(Ident($name.to_string())),
args: FuncArgs {
pos,
key,
},
},
body: func!(@body $($b)*),
}))
});
(@body Some([$($b:tt)*])) => (Some(model!([$($b)*]).0));
(@body None) => (None);
}
/// Span an element with a zero span.
fn zspan<T>(v: T) -> Spanned<T> {
Spanned { v, span: Span::ZERO }
}
#[test]
fn parse_flat_nodes() {
p!("" => []);
p!("hi" => [T("hi")]);
p!("*hi" => [Bold, T("hi")]);
p!("hi_" => [T("hi"), Italic]);
p!("`py`" => [Mono, T("py"), Mono]);
p!("hi you" => [T("hi"), S, T("you")]);
p!("💜\n\n 🌍" => [T("💜"), N, T("🌍")]);
}
#[test]
fn parse_functions() {
p!("[func]" => [func!("func"; None)]);
p!("[tree][hi *you*]" => [func!("tree"; Some([T("hi"), S, Bold, T("you"), Bold]))]);
p!("[f: , hi, * \"du\"]" => [func!("f", pos: [Id("hi"), Str("du")]; None)]);
p!("from [align: left] to" => [
T("from"), S, func!("align", pos: [Id("left")]; None), S, T("to")
]);
p!("[f: left, 12pt, false]" => [
func!("f", pos: [Id("left"), Expr::Size(Size::pt(12.0)), Bool(false)]; None)
]);
p!("[box: x=1.2pt, false][a b c] bye" => [
func!(
"box",
pos: [Bool(false)],
key: ["x" => Expr::Size(Size::pt(1.2))];
Some([T("a"), S, T("b"), S, T("c")])
),
S, T("bye"),
]);
}
#[test]
fn parse_spanned() {
p!("hi you" => [(0:0, 0:2, T("hi")), (0:2, 0:3, S), (0:3, 0:6, T("you"))]);
}
#[test]
fn parse_errors() {
e!("[f: , hi, * \"du\"]" => [
(0:4, 0:5, "expected value, found comma"),
(0:10, 0:11, "expected value, found invalid token"),
]);
e!("[f:, , ,]" => [
(0:3, 0:4, "expected value, found comma"),
(0:5, 0:6, "expected value, found comma"),
(0:7, 0:8, "expected value, found comma"),
]);
e!("[f:" => [(0:3, 0:3, "expected closing bracket")]);
e!("[f: hi" => [(0:6, 0:6, "expected closing bracket")]);
e!("[f: hey 12pt]" => [(0:7, 0:7, "expected comma")]);
e!("[box: x=, false z=y=4" => [
(0:8, 0:9, "expected value, found comma"),
(0:15, 0:15, "expected comma"),
(0:19, 0:19, "expected comma"),
(0:19, 0:20, "expected value, found equals sign"),
(0:21, 0:21, "expected closing bracket"),
]);
}
}

View File

@ -116,7 +116,7 @@ impl Debug for Span {
}
/// A value with the span it corresponds to in the source code.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize)]
#[derive(Copy, Clone, Eq, PartialEq, Hash, Serialize)]
pub struct Spanned<T> {
/// The value.
pub v: T,
@ -147,6 +147,14 @@ impl<T> Spanned<T> {
}
}
impl<T: Debug> Debug for Spanned<T> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
self.span.fmt(f)?;
f.write_str(": ")?;
self.v.fmt(f)
}
}
/// A vector of spanned things.
pub type SpanVec<T> = Vec<Spanned<T>>;

88
src/syntax/test.rs Normal file
View File

@ -0,0 +1,88 @@
use super::func::FuncHeader;
use super::expr::{Expr, Tuple, Object};
use super::*;
function! {
/// Most functions in the tests are parsed into the debug function for easy
/// inspection of arguments and body.
#[derive(Debug, Clone, PartialEq)]
pub struct DebugFn {
pub header: FuncHeader,
pub body: Option<SyntaxModel>,
}
parse(header, body, ctx, errors, decos) {
let cloned = header.clone();
header.args.pos.items.clear();
header.args.key.pairs.clear();
DebugFn {
header: cloned,
body: body!(opt: body, ctx, errors, decos),
}
}
layout(self, ctx, errors) { vec![] }
}
/// Compares elements by only looking at values and ignoring spans.
pub trait SpanlessEq<Rhs=Self> {
fn spanless_eq(&self, other: &Rhs) -> bool;
}
impl SpanlessEq for Vec<Spanned<Token<'_>>> {
fn spanless_eq(&self, other: &Vec<Spanned<Token>>) -> bool {
self.len() == other.len()
&& self.iter().zip(other).all(|(x, y)| x.v == y.v)
}
}
impl SpanlessEq for SyntaxModel {
fn spanless_eq(&self, other: &SyntaxModel) -> bool {
fn downcast<'a>(func: &'a (dyn Model + 'static)) -> &'a DebugFn {
func.downcast::<DebugFn>().expect("not a debug fn")
}
self.nodes.len() == other.nodes.len()
&& self.nodes.iter().zip(&other.nodes).all(|(x, y)| match (&x.v, &y.v) {
(Node::Model(a), Node::Model(b)) => {
downcast(a.as_ref()).spanless_eq(downcast(b.as_ref()))
}
(a, b) => a == b,
})
}
}
impl SpanlessEq for DebugFn {
fn spanless_eq(&self, other: &DebugFn) -> bool {
self.header.name.v == other.header.name.v
&& self.header.args.pos.spanless_eq(&other.header.args.pos)
&& self.header.args.key.spanless_eq(&other.header.args.key)
}
}
impl SpanlessEq for Expr {
fn spanless_eq(&self, other: &Expr) -> bool {
match (self, other) {
(Expr::Tuple(a), Expr::Tuple(b)) => a.spanless_eq(b),
(Expr::Object(a), Expr::Object(b)) => a.spanless_eq(b),
(a, b) => a == b,
}
}
}
impl SpanlessEq for Tuple {
fn spanless_eq(&self, other: &Tuple) -> bool {
self.items.len() == other.items.len()
&& self.items.iter().zip(&other.items)
.all(|(x, y)| x.v.spanless_eq(&y.v))
}
}
impl SpanlessEq for Object {
fn spanless_eq(&self, other: &Object) -> bool {
self.pairs.len() == other.pairs.len()
&& self.pairs.iter().zip(&other.pairs)
.all(|(x, y)| x.key.v == y.key.v && x.value.v.spanless_eq(&y.value.v))
}
}

View File

@ -341,12 +341,17 @@ impl<'s> Tokens<'s> {
}
let c = self.peek().unwrap_or('n');
if self.mode == Body && is_escapable(c) {
let string = if is_escapable(c) {
let index = self.index();
self.eat();
Text(&self.src[index .. index + c.len_utf8()])
&self.src[index .. index + c.len_utf8()]
} else {
Text("\\")
"\\"
};
match self.mode {
Header => Invalid(string),
Body => Text(string),
}
}
@ -456,3 +461,155 @@ pub fn is_identifier(string: &str) -> bool {
true
}
#[cfg(test)]
mod tests {
use super::*;
use Token::{
Space as S,
LineComment as LC, BlockComment as BC,
LeftParen as LP, RightParen as RP,
LeftBrace as LB, RightBrace as RB,
ExprIdent as Id, ExprNumber as Num, ExprBool as Bool,
Text as T,
};
#[allow(non_snake_case)]
fn Str(string: &'static str, terminated: bool) -> Token<'static> {
Token::ExprStr { string, terminated }
}
/// Test whether the given string tokenizes into the given list of tokens.
macro_rules! t {
($m:expr, $s:expr => [$(($sl:tt:$sc:tt, $el:tt:$ec:tt, $t:expr)),* $(,)?]) => {
let tokens = Tokens::new(Position::ZERO, $s, $m).collect::<Vec<_>>();
assert_eq!(tokens, vec![$(Spanned {
span: Span::new(Position::new($sl, $sc), Position::new($el, $ec)),
v: $t
}),*]);
};
($m:expr, $s:expr => [$($t:expr),* $(,)?]) => {
let tokens = Tokens::new(Position::ZERO, $s, $m)
.map(Spanned::value)
.collect::<Vec<_>>();
assert_eq!(tokens, vec![$($t),*]);
};
}
/// Parse a function token.
macro_rules! func {
($header:expr, Some(($sl:tt:$sc:tt, $el:tt:$ec:tt, $body:expr)), $terminated:expr) => {
Function {
header: $header,
body: Some(Spanned {
span: Span::new(Position::new($sl, $sc), Position::new($el, $ec)),
v: $body,
}),
terminated: $terminated,
}
};
($header:expr, None, $terminated:expr) => {
Function { header: $header, body: None, terminated: $terminated }
}
}
#[test]
fn tokenize_whitespace() {
t!(Body, "" => []);
t!(Body, " " => [S(0)]);
t!(Body, " " => [S(0)]);
t!(Body, "\t" => [S(0)]);
t!(Body, " \t" => [S(0)]);
t!(Body, "\n" => [S(1)]);
t!(Body, "\n " => [S(1)]);
t!(Body, " \n" => [S(1)]);
t!(Body, " \n " => [S(1)]);
t!(Body, "\r\n" => [S(1)]);
t!(Body, " \n\t \n " => [S(2)]);
t!(Body, "\n\r" => [S(2)]);
t!(Body, " \r\r\n \x0D" => [S(3)]);
}
#[test]
fn tokenize_comments() {
t!(Body, "a // bc\n " => [T("a"), S(0), LC(" bc"), S(1)]);
t!(Body, "a //a//b\n " => [T("a"), S(0), LC("a//b"), S(1)]);
t!(Body, "a //a//b\r\n" => [T("a"), S(0), LC("a//b"), S(1)]);
t!(Body, "a //a//b\n\nhello" => [T("a"), S(0), LC("a//b"), S(2), T("hello")]);
t!(Body, "/**/" => [BC("")]);
t!(Body, "_/*_/*a*/*/" => [Underscore, BC("_/*a*/")]);
t!(Body, "/*/*/" => [BC("/*/")]);
t!(Body, "abc*/" => [T("abc"), Invalid("*/")]);
}
#[test]
fn tokenize_header_only_tokens() {
t!(Body, "\"hi\"" => [T("\"hi"), T("\"")]);
t!(Body, "a: b" => [T("a"), T(":"), S(0), T("b")]);
t!(Body, "c=d, " => [T("c"), T("=d"), T(","), S(0)]);
t!(Header, "[" => [func!("", None, false)]);
t!(Header, "]" => [Invalid("]")]);
t!(Header, "(){}:=," => [LP, RP, LB, RB, Colon, Equals, Comma]);
t!(Header, "a:b" => [Id("a"), Colon, Id("b")]);
t!(Header, "=" => [Equals]);
t!(Header, "," => [Comma]);
t!(Header, r#""hello\"world""# => [Str(r#"hello\"world"#, true)]);
t!(Header, r#""hi", 12pt"# => [Str("hi", true), Comma, S(0), ExprSize(Size::pt(12.0))]);
t!(Header, "a: true, x=1" => [Id("a"), Colon, S(0), Bool(true), Comma, S(0), Id("x"), Equals, Num(1.0)]);
t!(Header, "120%" => [Num(1.2)]);
t!(Header, "🌓, 🌍," => [Invalid("🌓"), Comma, S(0), Invalid("🌍"), Comma]);
}
#[test]
fn tokenize_body_only_tokens() {
t!(Body, "_*`" => [Underscore, Star, Backtick]);
t!(Body, "[func]*bold*" => [func!("func", None, true), Star, T("bold"), Star]);
t!(Body, "hi_you_ there" => [T("hi"), Underscore, T("you"), Underscore, S(0), T("there")]);
t!(Header, "_*`" => [Invalid("_"), Invalid("*"), Invalid("`")]);
}
#[test]
fn tokenize_nested_functions() {
t!(Body, "[f: [=][*]]" => [func!("f: [=][*]", None, true)]);
t!(Body, "[_][[,],]," => [func!("_", Some((0:3, 0:9, "[,],")), true), T(",")]);
t!(Body, "[=][=][=]" => [func!("=", Some((0:3, 0:6, "=")), true), func!("=", None, true)]);
t!(Body, "[=][[=][=][=]]" => [func!("=", Some((0:3, 0:14, "[=][=][=]")), true)]);
}
#[test]
fn tokenize_escaped_symbols() {
t!(Body, r"\\" => [T(r"\")]);
t!(Body, r"\[" => [T("[")]);
t!(Body, r"\]" => [T("]")]);
t!(Body, r"\*" => [T("*")]);
t!(Body, r"\_" => [T("_")]);
t!(Body, r"\`" => [T("`")]);
t!(Body, r"\/" => [T("/")]);
}
#[test]
fn tokenize_unescapable_symbols() {
t!(Body, r"\a" => [T("\\"), T("a")]);
t!(Body, r"\:" => [T(r"\"), T(":")]);
t!(Body, r"\=" => [T(r"\"), T("=")]);
t!(Header, r"\\\\" => [Invalid("\\"), Invalid("\\")]);
t!(Header, r"\a" => [Invalid("\\"), Id("a")]);
t!(Header, r"\:" => [Invalid(r"\"), Colon]);
t!(Header, r"\=" => [Invalid(r"\"), Equals]);
t!(Header, r"\," => [Invalid(r"\"), Comma]);
}
#[test]
fn tokenize_with_spans() {
t!(Body, "hello" => [(0:0, 0:5, T("hello"))]);
t!(Body, "ab\r\nc" => [(0:0, 0:2, T("ab")), (0:2, 1:0, S(1)), (1:0, 1:1, T("c"))]);
t!(Body, "[x = \"(1)\"]*" => [(0:0, 0:11, func!("x = \"(1)\"", None, true)), (0:11, 0:12, Star)]);
t!(Body, "// ab\r\n\nf" => [(0:0, 0:5, LC(" ab")), (0:5, 2:0, S(2)), (2:0, 2:1, T("f"))]);
t!(Body, "/*b*/_" => [(0:0, 0:5, BC("b")), (0:5, 0:6, Underscore)]);
t!(Header, "a=10" => [(0:0, 0:1, Id("a")), (0:1, 0:2, Equals), (0:2, 0:4, Num(10.0))]);
}
}

View File

@ -1,77 +0,0 @@
// Whitespace.
t "" => []
t " " => [S(0)]
t " " => [S(0)]
t "\t" => [S(0)]
t " \t" => [S(0)]
t "\n" => [S(1)]
t "\n " => [S(1)]
t " \n" => [S(1)]
t " \n " => [S(1)]
t " \n\t \n " => [S(2)]
t "\r\n" => [S(1)]
t " \r\r\n \x0D" => [S(3)]
t "\n\r" => [S(2)]
// Comments.
t "a // bc\n " => [T("a"), S(0), LC(" bc"), S(1)]
t "a //a//b\n " => [T("a"), S(0), LC("a//b"), S(1)]
t "a //a//b\r\n" => [T("a"), S(0), LC("a//b"), S(1)]
t "a //a//b\n\nhello" => [T("a"), S(0), LC("a//b"), S(2), T("hello")]
t "/**/" => [BC("")]
t "_/*_/*a*/*/" => [Underscore, BC("_/*a*/")]
t "/*/*/" => [BC("/*/")]
t "abc*/" => [T("abc"), Invalid("*/")]
// Header only tokens.
th "[" => [Func("", None, false)]
th "]" => [Invalid("]")]
th "(){}:=," => [LP, RP, LB, RB, Colon, Equals, Comma]
th "a:b" => [Id("a"), Colon, Id("b")]
th "=" => [Equals]
th "," => [Comma]
th r#""hello\"world""# => [Str(r#"hello\"world"#)]
th r#""hi", 12pt"# => [Str("hi"), Comma, S(0), Size(12.0)]
th "\"hi\"" => [T("\"hi"), T("\"")]
th "a: true, x=1" => [Id("a"), Colon, S(0), Bool(true), Comma, S(0),
Id("x"), Equals, Num(1.0)]
th "120%" => [Num(1.2)]
th "🌓, 🌍," => [T("🌓"), Comma, S(0), T("🌍"), Comma]
tb "a: b" => [T("a"), T(":"), S(0), T("b")]
tb "c=d, " => [T("c"), T("=d"), T(","), S(0)]
// Body only tokens.
tb "_*`" => [Underscore, Star, Backtick]
tb "[func]*bold*" => [Func("func", None, true), Star, T("bold"), Star]
tb "hi_you_ there" => [T("hi"), Underscore, T("you"), Underscore, S(0), T("there")]
th "_*`" => [Invalid("_"), Invalid("*"), Invalid("`")]
// Nested functions.
tb "[f: [=][*]]" => [Func("f: [=][*]", None, true)]
tb "[_][[,],]," => [Func("_", Some("[,],"), true), T(",")]
tb "[=][=][=]" => [Func("=", Some("="), true), Func("=", None, true)]
tb "[=][[=][=][=]]" => [Func("=", Some("[=][=][=]")), true]
// Escapes.
tb r"\[" => [T("[")]
tb r"\]" => [T("]")]
tb r"\\" => [T(r"\")]
tb r"\/" => [T("/")]
tb r"\*" => [T("*")]
tb r"\_" => [T("_")]
tb r"\`" => [T("`")]
// Unescapable special symbols.
th r"\:" => [T(r"\"), T(":")]
th r"\=" => [T(r"\"), T("=")]
th r"\:" => [T(r"\"), Colon]
th r"\=" => [T(r"\"), Equals]
th r"\," => [T(r"\"), Comma]
// Spans.
tbs "hello" => [(0:0, 0:5, T("hello"))]
tbs "ab\r\nc" => [(0:0, 0:2, T("ab")), (0:2, 1:0, S(1)), (1:0, 1:1, T("c"))]
tbs "[x = \"(1)\"]*" => [(0:0, 0:11, Func("x = \"(1)\"", None, true)), (0:11, 0:12, Star)]
tbs "// ab\r\n\nf" => [(0:0, 0:5, LC(" ab")), (0:5, 2:0, S(2)), (2:0, 2:1, T("f"))]
tbs "/*b*/_" => [(0:0, 0:5, BC("b")), (0:5, 0:6, Underscore)]
ths "a=10" => [(0:0, 0:1, Id("a")), (0:1, 0:2, Equals), (0:2, 0:4, Num(10.0))]

View File

@ -1,46 +0,0 @@
// Basics.
p "" => []
p "hi" => [T("hi")]
p "hi you" => [T("hi"), S, T("you")]
p "\n\n 🌍" => [T(""), N, T("🌍")]
// Functions.
p "[func]" => [func!("func"; None)]
p "[tree][hi *you*]" => [func!("tree"; Some([T("hi"), S, B, T("you"), B]))]
p "from [align: left] to" => [
T("from"), S, func!("align", pos: [ID("left")]; None), S, T("to"),
]
p "[f: left, 12pt, false]" => [
func!("f", pos: [ID("left"), SIZE(Size::pt(12.0)), BOOL(false)]; None)
]
p "[f: , hi, * \"du\"]" => [func!("f", pos: [ID("hi"), STR("du")]; None)]
p "[box: x=1.2pt, false][a b c] bye" => [
func!(
"box",
pos: [BOOL(false)],
key: ["x" => SIZE(Size::pt(1.2))];
Some([T("a"), S, T("b"), S, T("c")])
),
S, T("bye"),
]
// Errors.
e "[f: , hi, * \"du\"]" => [
(0:4, 0:5, "expected value, found comma"),
(0:10, 0:11, "expected value, found invalid identifier"),
]
e "[f:, , ,]" => [
(0:3, 0:4, "expected value, found comma"),
(0:5, 0:6, "expected value, found comma"),
(0:7, 0:8, "expected value, found comma"),
]
e "[f:" => [(0:3, 0:3, "expected closing bracket")]
e "[f: hi" => [(0:6, 0:6, "expected closing bracket")]
e "[f: hey 12pt]" => [(0:7, 0:7, "expected comma")]
e "[box: x=, false y=z=4" => [
(0:8, 0:9, "expected value, found comma"),
(0:15, 0:15, "expected comma"),
(0:19, 0:19, "expected comma"),
(0:19, 0:20, "expected value, found equals sign"),
(0:21, 0:21, "expected closing bracket"),
]

View File

@ -1,297 +0,0 @@
use std::fmt::Debug;
use typstc::func::Scope;
use typstc::size::Size;
use typstc::syntax::*;
use typstc::{function, parse};
mod spanless;
use spanless::SpanlessEq;
/// The result of a single test case.
enum Case {
Okay,
Failed {
line: usize,
src: &'static str,
expected: String,
found: String,
}
}
/// Test all tests.
fn test(tests: Vec<(&str, Vec<Case>)>) {
println!();
let mut errors = false;
let len = tests.len();
println!("Running {} test{}", len, if len > 1 { "s" } else { "" });
for (file, cases) in tests {
print!("Testing: {}. ", file);
let mut okay = 0;
let mut failed = 0;
for case in cases {
match case {
Case::Okay => okay += 1,
Case::Failed { line, src, expected, found } => {
println!();
println!(" ❌ Case failed in file {}.rs in line {}.", file, line);
println!(" - Source: {:?}", src);
println!(" - Expected: {}", expected);
println!(" - Found: {}", found);
failed += 1;
}
}
}
// Print a small summary.
print!("{} okay, {} failed.", okay, failed);
if failed == 0 {
print!("")
} else {
errors = true;
}
println!();
}
println!();
if errors {
std::process::exit(-1);
}
}
/// The main test macro.
macro_rules! tokens {
($($task:ident $src:expr =>($line:expr)=> [$($e:tt)*])*) => ({
vec![$({
let (okay, expected, found) = case!($task $src, [$($e)*]);
if okay {
Case::Okay
} else {
Case::Failed {
line: $line,
src: $src,
expected: format(expected),
found: format(found),
}
}
}),*]
});
}
//// Indented formatting for failed cases.
fn format(thing: impl Debug) -> String {
format!("{:#?}", thing).replace('\n', "\n ")
}
/// Evaluates a single test.
macro_rules! case {
(t $($rest:tt)*) => (case!(@tokenize SpanlessEq::spanless_eq, $($rest)*));
(ts $($rest:tt)*) => (case!(@tokenize PartialEq::eq, $($rest)*));
(@tokenize $cmp:expr, $src:expr, [$($e:tt)*]) => ({
let expected = list!(tokens [$($e)*]);
let found = tokenize($src).collect::<Vec<_>>();
($cmp(&found, &expected), expected, found)
});
(p $($rest:tt)*) => (case!(@parse SpanlessEq::spanless_eq, $($rest)*));
(ps $($rest:tt)*) => (case!(@parse PartialEq::eq, $($rest)*));
(@parse $cmp:expr, $src:expr, [$($e:tt)*]) => ({
let expected = SyntaxModel { nodes: list!(nodes [$($e)*]) };
let found = parse($src, ParseContext { scope: &scope() }).0;
($cmp(&found, &expected), expected, found)
});
(c $src:expr, [$($e:tt)*]) => ({
let expected = Colorization { tokens: list!(decorations [$($e)*]) };
let found = parse($src, ParseContext { scope: &scope() }).1;
(expected == found, expected, found)
});
(e $src:expr, [$($e:tt)*]) => ({
let expected = list!([$($e)*]).into_iter()
.map(|s| s.map(|m| m.to_string()))
.collect();
let found = parse($src, ParseContext { scope: &scope() }).2;
(expected == found, expected, found)
});
}
/// A scope containing the `DebugFn` as a fallback.
fn scope() -> Scope {
Scope::with_fallback::<DebugFn>()
}
/// Parses possibly-spanned lists of token or node expressions.
macro_rules! list {
(expr [$($item:expr),* $(,)?]) => ({
#[allow(unused_imports)]
use cuts::expr::*;
Tuple { items: vec![$(zspan($item)),*] }
});
(expr [$($key:expr =>($_:expr)=> $value:expr),* $(,)?]) => ({
#[allow(unused_imports)]
use cuts::expr::*;
Object {
pairs: vec![$(Pair {
key: zspan(Ident($key.to_string())),
value: zspan($value),
}),*]
}
});
($cut:ident [$($e:tt)*]) => ({
#[allow(unused_imports)]
use cuts::$cut::*;
list!([$($e)*])
});
([$(($sl:tt:$sc:tt, $el:tt:$ec:tt, $v:expr)),* $(,)?]) => ({
vec![
$(Spanned { v: $v, span: Span {
start: Position { line: $sl, column: $sc },
end: Position { line: $el, column: $ec },
}}),*
]
});
([$($e:tt)*]) => (vec![$($e)*].into_iter().map(zspan).collect::<Vec<_>>());
}
/// Composes a function expression.
macro_rules! func {
($name:expr $(,pos: [$($p:tt)*])? $(,key: [$($k:tt)*])?; $($b:tt)*) => ({
#![allow(unused_mut, unused_assignments)]
let mut positional = Tuple::new();
let mut keyword = Object::new();
$(positional = list!(expr [$($p)*]);)?
$(keyword = list!(expr [$($k)*]);)?
Node::Model(Box::new(DebugFn {
header: FuncHeader {
name: zspan(Ident($name.to_string())),
args: FuncArgs {
positional,
keyword,
},
},
body: func!(@body $($b)*),
}))
});
(@body Some($($b:tt)*)) => (Some(SyntaxModel{ nodes: list!(nodes $($b)*) }));
(@body None) => (None);
}
function! {
/// Most functions in the tests are parsed into the debug function for easy
/// inspection of arguments and body.
#[derive(Debug, PartialEq)]
pub struct DebugFn {
header: FuncHeader,
body: Option<SyntaxTree>,
}
parse(header, body, ctx) {
let cloned = header.clone();
header.args.clear();
DebugFn {
header: cloned,
body: parse!(optional: body, ctx),
}
}
layout() { vec![] }
}
/// Span an element with a zero span.
fn zspan<T>(v: T) -> Spanned<T> {
Spanned { v, span: Span::ZERO }
}
/// Abbreviations for tokens, nodes, colors and expressions.
#[allow(non_snake_case, dead_code)]
mod cuts {
pub mod tokens {
pub use typstc::syntax::Token::{
Whitespace as W,
LineComment as LC,
BlockComment as BC,
StarSlash as SS,
LeftBracket as LB,
RightBracket as RB,
LeftParen as LP,
RightParen as RP,
LeftBrace as LBR,
RightBrace as RBR,
Colon as CL,
Comma as CM,
Equals as EQ,
ExprIdent as ID,
ExprStr as STR,
ExprSize as SIZE,
ExprNumber as NUM,
ExprBool as BOOL,
Star as S,
Underscore as U,
Backtick as B,
Text as T,
};
}
pub mod nodes {
use typstc::syntax::Node;
pub use Node::{
Space as S,
Newline as N,
ToggleItalic as I,
ToggleBolder as B,
ToggleMonospace as M,
};
pub fn T(text: &str) -> Node {
Node::Text(text.to_string())
}
}
pub mod decorations {
pub use typstc::syntax::Decoration::*;
}
pub mod expr {
use typstc::syntax::{Expression, Ident};
pub use Expression::{
Number as NUM,
Size as SIZE,
Bool as BOOL,
};
pub fn ID(text: &str) -> Expression {
Expression::Ident(Ident(text.to_string()))
}
pub fn STR(text: &str) -> Expression {
Expression::Str(text.to_string())
}
}
}
fn main() {
test(include!("../cache/parser-tests.rs"))
}

View File

@ -8,15 +8,13 @@ from PIL import Image, ImageDraw, ImageFont
BASE = os.path.dirname(__file__)
CACHE = os.path.join(BASE, '../cache/')
SERIAL = os.path.join(CACHE, 'serial/')
RENDER = os.path.join(CACHE, 'render/')
def main():
assert len(sys.argv) == 2, 'usage: python render.py <name>'
name = sys.argv[1]
filename = os.path.join(SERIAL, name)
filename = os.path.join(CACHE, f'{name}.serialized')
with open(filename, encoding='utf-8') as file:
lines = [line[:-1] for line in file.readlines()]
@ -24,8 +22,7 @@ def main():
renderer.render()
image = renderer.export()
pathlib.Path(RENDER).mkdir(parents=True, exist_ok=True)
image.save(os.path.join(RENDER, f'{name}.png'))
image.save(os.path.join(CACHE, f'{name}.png'))
class MultiboxRenderer:

View File

@ -1,64 +0,0 @@
use super::*;
/// Compares elements by only looking at values and ignoring spans.
pub trait SpanlessEq<T> {
fn spanless_eq(&self, other: &T) -> bool;
}
impl SpanlessEq<Vec<Spanned<Token<'_>>>> for Vec<Spanned<Token<'_>>> {
fn spanless_eq(&self, other: &Vec<Spanned<Token>>) -> bool {
self.len() == other.len()
&& self.iter().zip(other).all(|(x, y)| x.v == y.v)
}
}
impl SpanlessEq<SyntaxModel> for SyntaxModel {
fn spanless_eq(&self, other: &SyntaxModel) -> bool {
fn downcast(func: &dyn Model) -> &DebugFn {
func.downcast::<DebugFn>().expect("not a debug fn")
}
self.nodes.len() == other.nodes.len()
&& self.nodes.iter().zip(&other.nodes).all(|(x, y)| match (&x.v, &y.v) {
(Node::Model(a), Node::Model(b)) => {
downcast(a.as_ref()).spanless_eq(downcast(b.as_ref()))
}
(a, b) => a == b,
})
}
}
impl SpanlessEq<DebugFn> for DebugFn {
fn spanless_eq(&self, other: &DebugFn) -> bool {
self.header.name.v == other.header.name.v
&& self.header.args.positional.spanless_eq(&other.header.args.positional)
&& self.header.args.keyword.spanless_eq(&other.header.args.keyword)
}
}
impl SpanlessEq<Expression> for Expression {
fn spanless_eq(&self, other: &Expression) -> bool {
match (self, other) {
(Expression::Tuple(a), Expression::Tuple(b)) => a.spanless_eq(b),
(Expression::Object(a), Expression::Object(b)) => a.spanless_eq(b),
(a, b) => a == b,
}
}
}
impl SpanlessEq<Tuple> for Tuple {
fn spanless_eq(&self, other: &Tuple) -> bool {
self.items.len() == other.items.len()
&& self.items.iter().zip(&other.items)
.all(|(x, y)| x.v.spanless_eq(&y.v))
}
}
impl SpanlessEq<Object> for Object {
fn spanless_eq(&self, other: &Object) -> bool {
self.pairs.len() == other.pairs.len()
&& self.pairs.iter().zip(&other.pairs)
.all(|(x, y)| x.key.v == y.key.v && x.value.v.spanless_eq(&y.value.v))
}
}

View File

@ -21,11 +21,9 @@ type DynResult<T> = Result<T, Box<dyn Error>>;
fn main() -> DynResult<()> {
let opts = Options::parse();
create_dir_all("tests/cache/serial")?;
create_dir_all("tests/cache/render")?;
create_dir_all("tests/cache/pdf")?;
create_dir_all("tests/cache")?;
let tests: Vec<_> = read_dir("tests/layouter/")?.collect();
let tests: Vec<_> = read_dir("tests/")?.collect();
let mut filtered = Vec::new();
for entry in tests {
@ -79,6 +77,11 @@ fn test(name: &str, src: &str) -> DynResult<()> {
let layouts = compile(&typesetter, src);
// Write the PDF file.
let path = format!("tests/cache/{}.pdf", name);
let file = BufWriter::new(File::create(path)?);
pdf::export(&layouts, typesetter.loader(), file)?;
// Compute the font's paths.
let mut fonts = HashMap::new();
let loader = typesetter.loader().borrow();
@ -91,27 +94,29 @@ fn test(name: &str, src: &str) -> DynResult<()> {
drop(loader);
// Write the serialized layout file.
let path = format!("tests/cache/serial/{}", name);
let mut file = BufWriter::new(File::create(path)?);
let path = format!("tests/cache/{}.serialized", name);
let mut file = BufWriter::new(File::create(&path)?);
// Write the font mapping into the serialization file.
writeln!(file, "{}", fonts.len())?;
for (index, path) in fonts.iter() {
writeln!(file, "{} {} {}", index.id, index.variant, path)?;
}
layouts.serialize(&mut file)?;
file.flush()?;
drop(file);
// Render the layout into a PNG.
Command::new("python")
.arg("tests/src/render.py")
.arg(name)
.spawn()
.expect("failed to run python renderer");
.expect("failed to run python renderer")
.wait()
.expect("command did not run");
// Write the PDF file.
let path = format!("tests/cache/pdf/{}.pdf", name);
let file = BufWriter::new(File::create(path)?);
pdf::export(&layouts, typesetter.loader(), file)?;
std::fs::remove_file(path)?;
Ok(())
}