about summary refs log tree commit diff
path: root/web/panettone/test/inline-markdown_test.lisp
blob: 2b6c3b89002c0ad652b0b58f343756a3c0e1540f (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
(in-package :panettone.tests)
(declaim (optimize (safety 3)))

(defmacro inline-markdown-unit-test (name input expected)
  `(test ,name
     (is (equal
           ,expected
           (with-output-to-string (*standard-output*)
             (render-inline-markdown ,input))))))

(inline-markdown-unit-test
  inline-markdown-typical-test
  "hello _world_, here is ~~no~~ `code`!"
  "hello <em>world</em>, here is <del>no</del> <code>code</code>!")

(inline-markdown-unit-test
  inline-markdown-two-emphasize-types-test
  "_stress_ *this*"
  "<em>stress</em> <em>this</em>")

(inline-markdown-unit-test
  inline-markdown-html-escaping-test
  "<tag>öäü"
  "&lt;tag&gt;&#246;&#228;&#252;")

(inline-markdown-unit-test
  inline-markdown-nesting-test
  "`inside code *anything* goes`, but also ~~*here*~~"
  "<code>inside code *anything* goes</code>, but also <del>*here*</del>")

(inline-markdown-unit-test
  inline-markdown-escaping-test
  "A backslash \\\\ shows: \\*, \\_, \\` and \\~~"
  "A backslash \\ shows: *, _, ` and ~~")

(inline-markdown-unit-test
  inline-markdown-nested-escaping-test
  "`prevent \\`code\\` from ending, but never stand alone \\\\`"
  "<code>prevent `code` from ending, but never stand alone \\</code>")

(inline-markdown-unit-test
  inline-markdown-escape-normal-tokens-test
  "\\Normal tokens \\escaped?"
  "\\Normal tokens \\escaped?")

(inline-markdown-unit-test
  inline-markdown-no-unclosed-tags-test
  "A tag, once opened, _must be closed"
  "A tag, once opened, <em>must be closed</em>")

(inline-markdown-unit-test
  inline-markdown-unicode-safe
  "Does Unicode 👨‍👨‍👧‍👦 break \\👩🏾‍🦰 tokenization?"
  "Does Unicode &#128104;&#8205;&#128104;&#8205;&#128103;&#8205;&#128102; break \\&#128105;&#127998;&#8205;&#129456; tokenization?")