module Tests.Lexer import Quox.Name import Quox.Lexer import TAP private data Failure = LexError Lexer.Error | WrongLex (List Token) (List Token) | ExpectedFail (List Token) private ToInfo Failure where toInfo (LexError err) = [("type", "LexError"), ("info", show err)] toInfo (WrongLex want got) = [("type", "WrongLex"), ("want", show want), ("got", show got)] toInfo (ExpectedFail got) = [("type", "ExpectedFail"), ("got", show got)] private denewline : String -> String denewline = pack . map (\case '\n' => '␤'; c => c) . unpack private lexes : String -> List Token -> Test lexes str toks = test "「\{denewline str}」" $ do res <- bimap LexError (map val) $ lex str unless (toks == res) $ throwError $ WrongLex toks res private lexFail : String -> Test lexFail str = test "「\{denewline str}」 # fails" $ either (const $ Right ()) (Left . ExpectedFail . map val) $ lex str export tests : Test tests = "lexer" :- [ "comments" :- [ lexes "" [], lexes " " [], lexes "-- line comment" [], lexes "name -- line comment" [I "name"], lexes "-- line comment\nnameBetween -- and another" [I "nameBetween"], lexes "{- block comment -}" [], lexes "{- {- nested -} block comment -}" [] ], "identifiers & keywords" :- [ lexes "abc" [I "abc"], lexes "abc def" [I "abc", I "def"], lexes "abc_def" [I "abc_def"], lexes "abc-def" [I "abc-def"], lexes "abc{-comment-}def" [I "abc", I "def"], lexes "λ" [R "λ"], lexes "fun" [R "λ"], lexes "δ" [R "δ"], lexes "dfun" [R "δ"], lexes "funky" [I "funky"], lexes "δελτα" [I "δελτα"], lexes "★★" [I "★★"], lexes "Types" [I "Types"], lexes "a.b.c.d.e" [I $ MakeName [< "a","b","c","d"] "e"], lexes "normalïse" [I "normalïse"], lexes "map#" [I "map#"], lexes "write!" [I "write!"], lexes "uhh??!?!?!?" [I "uhh??!?!?!?"], -- ↑ replace i + combining ¨ with precomposed ï todo "check for reserved words in a qname", -- lexes "abc.fun.def" [I "abc", R ".", R "λ", R ".", I "def"], lexes "+" [I "+"], lexes "*" [I "*"], lexes "**" [R "×"], lexes "***" [I "***"], lexes "+**" [I "+**"], lexes "+#" [I "+#"], lexes "+.+.+" [I $ MakeName [< "+", "+"] "+"], lexes "a.+" [I $ MakeName [< "a"] "+"], lexes "+.a" [I $ MakeName [< "+"] "a"], lexes "+a" [I "+", I "a"], lexes "x." [I "x", R "·"], lexes "&." [I "&", R "·"], lexes ".x" [R "·", I "x"], lexes "a.b.c." [I $ MakeName [< "a", "b"] "c", R "·"], lexes "case" [R "case"], lexes "caseω" [R "caseω"], lexes "case#" [R "caseω"], lexes "case1" [R "case1"], lexes "case0" [I "case0"], lexes "case##" [I "case##"], lexes "_" [R "_"], lexes "_a" [I "_a"], lexes "a_" [I "a_"], lexes "a'" [I "a'"], lexes "+'" [I "+'"] ], "syntax characters" :- [ lexes "()" [R "(", R ")"], lexes "(a)" [R "(", I "a", R ")"], lexes "(^)" [R "(", I "^", R ")"], lexes "{a,b}" [R "{", I "a", R ",", I "b", R "}"], lexes "{+,-}" [R "{", I "+", R ",", I "-", R "}"] ], "tags" :- [ lexes #" 'a "# [T "a"], lexes #" 'abc "# [T "abc"], lexes #" '+ "# [T "+"], lexes #" '$$$ "# [T "$$$"], lexes #" 'tag.with.dots "# [T "tag.with.dots"], lexes #" '"multi word tag" "# [T "multi word tag"], lexes #" '"" "# [T ""], lexFail #" ' "#, lexFail #" '' "# ], "strings" :- [ lexes #" "" "# [S ""], lexes #" "abc" "# [S "abc"], lexes #" "\"" "# [S "\""], lexes #" "\\" "# [S "\\"], lexes #" "\\\"" "# [S "\\\""], todo "other escapes" ], "universes" :- [ lexes "Type0" [TYPE 0], lexes "Type₀" [TYPE 0], lexes "Type9999999" [TYPE 9999999], lexes "★₀" [TYPE 0], lexes "★₆₉" [TYPE 69], lexes "★4" [TYPE 4], lexes "Type" [R "★"], lexes "★" [R "★"] ] ]