quox/tests/Tests/Lexer.idr

166 lines
4.6 KiB
Idris
Raw Permalink Normal View History

2023-02-28 14:51:54 -05:00
module Tests.Lexer
import Quox.Name
2023-03-17 16:51:28 -04:00
import Quox.Parser.Lexer
2023-02-28 14:51:54 -05:00
import TAP
private
data Failure = LexError Lexer.Error
| WrongLex (List Token) (List Token)
| ExpectedFail (List Token)
private
ToInfo Failure where
toInfo (LexError err) =
[("type", "LexError"),
("info", show err)]
toInfo (WrongLex want got) =
[("type", "WrongLex"),
("want", show want),
("got", show got)]
toInfo (ExpectedFail got) =
[("type", "ExpectedFail"),
("got", show got)]
private
denewline : String -> String
denewline = pack . map (\case '\n' => ''; c => c) . unpack
private
lexes : String -> List Token -> Test
2023-03-04 15:02:51 -05:00
lexes str toks = test "\{denewline str}」" $ do
res <- bimap LexError (map val) $ lex str
unless (toks == res) $ throwError $ WrongLex toks res
2023-02-28 14:51:54 -05:00
private
lexFail : String -> Test
2023-03-04 15:02:51 -05:00
lexFail str = test "\{denewline str}」 # fails" $
either (const $ Right ()) (Left . ExpectedFail . map val) $ lex str
2023-02-28 14:51:54 -05:00
export
tests : Test
tests = "lexer" :- [
"comments" :- [
lexes "" [],
lexes " " [],
lexes "-- line comment" [],
lexes "name -- line comment" [Name "name"],
lexes "-- line comment\nnameBetween -- and another" [Name "nameBetween"],
2023-02-28 14:51:54 -05:00
lexes "{- block comment -}" [],
lexes "{- {- nested -} block comment -}" []
],
"identifiers & keywords" :- [
lexes "abc" [Name "abc"],
lexes "abc def" [Name "abc", Reserved "def"],
lexes "abc_def" [Name "abc_def"],
lexes "abc-def" [Name "abc-def"],
lexes "abc{-comment-}def" [Name "abc", Reserved "def"],
lexes "λ" [Reserved "λ"],
lexes "fun" [Reserved "λ"],
lexes "δ" [Reserved "δ"],
lexes "dfun" [Reserved "δ"],
lexes "funky" [Name "funky"],
lexes "δελτα" [Name "δελτα"],
lexes "★★" [Name "★★"],
lexes "Types" [Name "Types"],
2023-03-17 16:51:28 -04:00
lexes "a.b.c.d.e" [Name $ MakePName [< "a","b","c","d"] "e"],
lexes "normalïse" [Name "normalïse"],
2023-02-28 14:51:54 -05:00
-- ↑ replace i + combining ¨ with precomposed ï
lexes "map#" [Name "map#"],
lexes "write!" [Name "write!"],
lexes "uhh??!?!?!?" [Name "uhh??!?!?!?"],
2023-02-28 14:51:54 -05:00
todo "check for reserved words in a qname",
2023-05-21 14:09:34 -04:00
skip $
lexes "abc.fun.def"
[Name "abc", Reserved ".", Reserved "λ", Reserved ".", Name "def"],
lexes "+" [Name "+"],
lexes "*" [Name "*"],
lexes "**" [Reserved "×"],
lexes "***" [Name "***"],
lexes "+**" [Name "+**"],
lexes "+#" [Name "+#"],
2023-03-17 16:51:28 -04:00
lexes "+.+.+" [Name $ MakePName [< "+", "+"] "+"],
lexes "a.+" [Name $ MakePName [< "a"] "+"],
lexes "+.a" [Name $ MakePName [< "+"] "a"],
lexes "+a" [Name "+", Name "a"],
lexes "x." [Name "x", Reserved "."],
lexes "&." [Name "&", Reserved "."],
lexes ".x" [Reserved ".", Name "x"],
lexes "a.b.c." [Name $ MakePName [< "a", "b"] "c", Reserved "."],
lexes "case" [Reserved "case"],
lexes "caseω" [Reserved "caseω"],
lexes "case#" [Reserved "caseω"],
lexes "case1" [Reserved "case1"],
lexes "case0" [Reserved "case0"],
lexes "case##" [Name "case##"],
lexes "_" [Reserved "_"],
lexes "_a" [Name "_a"],
lexes "a_" [Name "a_"],
lexes "a'" [Name "a'"],
lexes "+'" [Name "+'"],
2023-05-21 14:09:34 -04:00
lexes "a₁" [Name "a₁"],
lexes "a⁰" [Name "a", Sup 0],
lexes "a^0" [Name "a", Sup 0],
lexes "0.x" [Nat 0, Reserved ".", Name "x"],
lexes "1.x" [Nat 1, Reserved ".", Name "x"],
lexes "ω.x" [Reserved "ω", Reserved ".", Name "x"],
lexes "#.x" [Reserved "ω", Reserved ".", Name "x"]
2023-02-28 14:51:54 -05:00
],
"syntax characters" :- [
lexes "()" [Reserved "(", Reserved ")"],
lexes "(a)" [Reserved "(", Name "a", Reserved ")"],
2023-05-21 14:09:34 -04:00
lexFail "(^)",
lexes "{a,b}"
[Reserved "{", Name "a", Reserved ",", Name "b", Reserved "}"],
lexes "{+,-}"
[Reserved "{", Name "+", Reserved ",", Name "-", Reserved "}"]
2023-02-28 14:51:54 -05:00
],
"tags" :- [
lexes #" 'a "# [Tag "a"],
lexes #" 'abc "# [Tag "abc"],
lexes #" '+ "# [Tag "+"],
lexes #" '$$$ "# [Tag "$$$"],
lexes #" 'tag.with.dots "# [Tag "tag.with.dots"],
lexes #" '"multi word tag" "# [Tag "multi word tag"],
lexes #" '"" "# [Tag ""],
2023-03-04 15:02:51 -05:00
lexFail #" ' "#,
lexFail #" '' "#
2023-02-28 14:51:54 -05:00
],
"strings" :- [
lexes #" "" "# [Str ""],
lexes #" "abc" "# [Str "abc"],
lexes #" "\"" "# [Str "\""],
lexes #" "\\" "# [Str "\\"],
lexes #" "\\\"" "# [Str "\\\""],
2023-02-28 14:51:54 -05:00
todo "other escapes"
],
todo "naturals",
2023-02-28 14:51:54 -05:00
"universes" :- [
lexes "Type0" [TYPE 0],
2023-05-21 14:09:34 -04:00
lexes "Type⁰" [Reserved "", Sup 0],
2023-02-28 14:51:54 -05:00
lexes "Type9999999" [TYPE 9999999],
2023-05-21 14:09:34 -04:00
lexes "★⁰" [Reserved "", Sup 0],
lexes "★⁶⁹" [Reserved "", Sup 69],
2023-02-28 14:51:54 -05:00
lexes "★4" [TYPE 4],
lexes "Type" [Reserved ""],
lexes "" [Reserved ""]
2023-02-28 14:51:54 -05:00
]
]