2023-02-28 14:51:54 -05:00
|
|
|
|
module Tests.Lexer
|
|
|
|
|
|
|
|
|
|
import Quox.Name
|
|
|
|
|
import Quox.Lexer
|
|
|
|
|
import TAP
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
private
|
|
|
|
|
data Failure = LexError Lexer.Error
|
|
|
|
|
| WrongLex (List Token) (List Token)
|
|
|
|
|
| ExpectedFail (List Token)
|
|
|
|
|
|
|
|
|
|
private
|
|
|
|
|
ToInfo Failure where
|
|
|
|
|
toInfo (LexError err) =
|
|
|
|
|
[("type", "LexError"),
|
|
|
|
|
("info", show err)]
|
|
|
|
|
toInfo (WrongLex want got) =
|
|
|
|
|
[("type", "WrongLex"),
|
|
|
|
|
("want", show want),
|
|
|
|
|
("got", show got)]
|
|
|
|
|
toInfo (ExpectedFail got) =
|
|
|
|
|
[("type", "ExpectedFail"),
|
|
|
|
|
("got", show got)]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
private
|
|
|
|
|
denewline : String -> String
|
|
|
|
|
denewline = pack . map (\case '\n' => ''; c => c) . unpack
|
|
|
|
|
|
|
|
|
|
private
|
|
|
|
|
lexes : String -> List Token -> Test
|
2023-03-04 15:02:51 -05:00
|
|
|
|
lexes str toks = test "「\{denewline str}」" $ do
|
|
|
|
|
res <- bimap LexError (map val) $ lex str
|
|
|
|
|
unless (toks == res) $ throwError $ WrongLex toks res
|
2023-02-28 14:51:54 -05:00
|
|
|
|
|
|
|
|
|
private
|
|
|
|
|
lexFail : String -> Test
|
2023-03-04 15:02:51 -05:00
|
|
|
|
lexFail str = test "「\{denewline str}」 # fails" $
|
|
|
|
|
either (const $ Right ()) (Left . ExpectedFail . map val) $ lex str
|
2023-02-28 14:51:54 -05:00
|
|
|
|
|
|
|
|
|
export
|
|
|
|
|
tests : Test
|
|
|
|
|
tests = "lexer" :- [
|
|
|
|
|
"comments" :- [
|
|
|
|
|
lexes "" [],
|
|
|
|
|
lexes " " [],
|
|
|
|
|
lexes "-- line comment" [],
|
|
|
|
|
lexes "name -- line comment" [I "name"],
|
|
|
|
|
lexes "-- line comment\nnameBetween -- and another" [I "nameBetween"],
|
|
|
|
|
lexes "{- block comment -}" [],
|
|
|
|
|
lexes "{- {- nested -} block comment -}" []
|
|
|
|
|
],
|
|
|
|
|
|
|
|
|
|
"identifiers & keywords" :- [
|
|
|
|
|
lexes "abc" [I "abc"],
|
|
|
|
|
lexes "abc def" [I "abc", I "def"],
|
2023-03-05 06:18:39 -05:00
|
|
|
|
lexes "abc_def" [I "abc_def"],
|
|
|
|
|
lexes "abc-def" [I "abc-def"],
|
2023-02-28 14:51:54 -05:00
|
|
|
|
lexes "abc{-comment-}def" [I "abc", I "def"],
|
|
|
|
|
lexes "λ" [R "λ"],
|
|
|
|
|
lexes "fun" [R "λ"],
|
|
|
|
|
lexes "δ" [R "δ"],
|
|
|
|
|
lexes "dfun" [R "δ"],
|
|
|
|
|
lexes "funky" [I "funky"],
|
|
|
|
|
lexes "δελτα" [I "δελτα"],
|
2023-03-04 15:02:51 -05:00
|
|
|
|
lexes "★★" [I "★★"],
|
|
|
|
|
lexes "Types" [I "Types"],
|
2023-02-28 14:51:54 -05:00
|
|
|
|
lexes "a.b.c.d.e" [I $ MakeName [< "a","b","c","d"] "e"],
|
|
|
|
|
lexes "normalïse" [I "normalïse"],
|
2023-03-04 15:02:51 -05:00
|
|
|
|
lexes "map#" [I "map#"],
|
|
|
|
|
lexes "write!" [I "write!"],
|
|
|
|
|
lexes "uhh??!?!?!?" [I "uhh??!?!?!?"],
|
2023-02-28 14:51:54 -05:00
|
|
|
|
-- ↑ replace i + combining ¨ with precomposed ï
|
|
|
|
|
|
|
|
|
|
todo "check for reserved words in a qname",
|
|
|
|
|
-- lexes "abc.fun.def" [I "abc", R ".", R "λ", R ".", I "def"],
|
|
|
|
|
|
|
|
|
|
lexes "+" [I "+"],
|
|
|
|
|
lexes "*" [I "*"],
|
|
|
|
|
lexes "**" [R "×"],
|
|
|
|
|
lexes "***" [I "***"],
|
|
|
|
|
lexes "+**" [I "+**"],
|
2023-03-04 15:02:51 -05:00
|
|
|
|
lexes "+#" [I "+#"],
|
2023-02-28 14:51:54 -05:00
|
|
|
|
lexes "+.+.+" [I $ MakeName [< "+", "+"] "+"],
|
|
|
|
|
lexes "a.+" [I $ MakeName [< "a"] "+"],
|
|
|
|
|
lexes "+.a" [I $ MakeName [< "+"] "a"],
|
|
|
|
|
|
|
|
|
|
lexes "+a" [I "+", I "a"],
|
|
|
|
|
|
2023-03-04 15:02:51 -05:00
|
|
|
|
lexes "x." [I "x", R "·"],
|
|
|
|
|
lexes "&." [I "&", R "·"],
|
|
|
|
|
lexes ".x" [R "·", I "x"],
|
|
|
|
|
lexes "a.b.c." [I $ MakeName [< "a", "b"] "c", R "·"],
|
2023-02-28 14:51:54 -05:00
|
|
|
|
|
|
|
|
|
lexes "case" [R "case"],
|
|
|
|
|
lexes "caseω" [R "caseω"],
|
|
|
|
|
lexes "case#" [R "caseω"],
|
|
|
|
|
lexes "case1" [R "case1"],
|
|
|
|
|
lexes "case0" [I "case0"],
|
|
|
|
|
lexes "case##" [I "case##"],
|
|
|
|
|
|
|
|
|
|
lexes "_" [R "_"],
|
|
|
|
|
lexes "_a" [I "_a"],
|
|
|
|
|
lexes "a_" [I "a_"],
|
|
|
|
|
|
|
|
|
|
lexes "a'" [I "a'"],
|
2023-03-04 15:02:51 -05:00
|
|
|
|
lexes "+'" [I "+'"]
|
2023-02-28 14:51:54 -05:00
|
|
|
|
],
|
|
|
|
|
|
|
|
|
|
"syntax characters" :- [
|
|
|
|
|
lexes "()" [R "(", R ")"],
|
|
|
|
|
lexes "(a)" [R "(", I "a", R ")"],
|
|
|
|
|
lexes "(^)" [R "(", I "^", R ")"],
|
2023-03-04 15:02:51 -05:00
|
|
|
|
lexes "{a,b}" [R "{", I "a", R ",", I "b", R "}"],
|
|
|
|
|
lexes "{+,-}" [R "{", I "+", R ",", I "-", R "}"]
|
2023-02-28 14:51:54 -05:00
|
|
|
|
],
|
|
|
|
|
|
|
|
|
|
"tags" :- [
|
2023-03-04 15:02:51 -05:00
|
|
|
|
lexes #" 'a "# [T "a"],
|
|
|
|
|
lexes #" 'abc "# [T "abc"],
|
|
|
|
|
lexes #" '+ "# [T "+"],
|
|
|
|
|
lexes #" '$$$ "# [T "$$$"],
|
|
|
|
|
lexes #" 'tag.with.dots "# [T "tag.with.dots"],
|
|
|
|
|
lexes #" '"multi word tag" "# [T "multi word tag"],
|
|
|
|
|
lexes #" '"" "# [T ""],
|
|
|
|
|
lexFail #" ' "#,
|
|
|
|
|
lexFail #" '' "#
|
2023-02-28 14:51:54 -05:00
|
|
|
|
],
|
|
|
|
|
|
|
|
|
|
"strings" :- [
|
|
|
|
|
lexes #" "" "# [S ""],
|
|
|
|
|
lexes #" "abc" "# [S "abc"],
|
|
|
|
|
lexes #" "\"" "# [S "\""],
|
|
|
|
|
lexes #" "\\" "# [S "\\"],
|
|
|
|
|
lexes #" "\\\"" "# [S "\\\""],
|
|
|
|
|
todo "other escapes"
|
|
|
|
|
],
|
|
|
|
|
|
|
|
|
|
"universes" :- [
|
|
|
|
|
lexes "Type0" [TYPE 0],
|
|
|
|
|
lexes "Type₀" [TYPE 0],
|
|
|
|
|
lexes "Type9999999" [TYPE 9999999],
|
|
|
|
|
lexes "★₀" [TYPE 0],
|
|
|
|
|
lexes "★₆₉" [TYPE 69],
|
|
|
|
|
lexes "★4" [TYPE 4],
|
|
|
|
|
lexes "Type" [R "★"],
|
|
|
|
|
lexes "★" [R "★"]
|
|
|
|
|
]
|
|
|
|
|
]
|