#[fail] #30
9 changed files with 293 additions and 127 deletions
10
examples/fail.quox
Normal file
10
examples/fail.quox
Normal file
|
@ -0,0 +1,10 @@
|
|||
#[fail "but cases for"]
|
||||
def missing-b : {a, b} → {a} =
|
||||
λ x ⇒ case x return {a} of { 'a ⇒ 'a }
|
||||
|
||||
#[fail "duplicate arms"]
|
||||
def repeat-enum-case : {a} → {a} =
|
||||
λ x ⇒ case x return {a} of { 'a ⇒ 'a; 'a ⇒ 'a }
|
||||
|
||||
#[fail "duplicate tags"]
|
||||
def repeat-enum-type : {a, a} = 'a
|
|
@ -3,6 +3,7 @@ module Quox.Parser.FromParser
|
|||
|
||||
import public Quox.Parser.FromParser.Error as Quox.Parser.FromParser
|
||||
|
||||
import Quox.Pretty
|
||||
import Quox.Parser.Syntax
|
||||
import Quox.Parser.Parser
|
||||
import public Quox.Parser.LoadFile
|
||||
|
@ -42,6 +43,35 @@ FromParserIO : List (Type -> Type)
|
|||
FromParserIO = LoadFile :: FromParserPure
|
||||
|
||||
|
||||
export
|
||||
fromParserPure : NameSuf -> Definitions ->
|
||||
Eff FromParserPure a ->
|
||||
Either Error (a, NameSuf, Definitions)
|
||||
fromParserPure suf defs act = runSTErr $ do
|
||||
suf <- liftST $ newSTRef suf
|
||||
defs <- liftST $ newSTRef defs
|
||||
res <- runEff act $ with Union.(::)
|
||||
[handleExcept (\e => stLeft e),
|
||||
handleStateSTRef defs,
|
||||
handleStateSTRef !(liftST $ newSTRef [<]),
|
||||
handleStateSTRef suf]
|
||||
pure (res, !(liftST $ readSTRef suf), !(liftST $ readSTRef defs))
|
||||
|
||||
|
||||
export covering
|
||||
fromParserIO : (MonadRec io, HasIO io) =>
|
||||
IncludePath -> IORef SeenSet ->
|
||||
IORef NameSuf -> IORef Definitions ->
|
||||
Eff FromParserIO a -> io (Either Error a)
|
||||
fromParserIO inc seen suf defs act = liftIO $ fromIOErr $ do
|
||||
runEff act $ with Union.(::)
|
||||
[handleLoadFileIOE LoadError seen inc,
|
||||
handleExcept (\e => ioLeft e),
|
||||
handleStateIORef defs,
|
||||
handleStateIORef !(newIORef [<]),
|
||||
handleStateIORef suf]
|
||||
|
||||
|
||||
parameters {auto _ : Functor m} (b : Var n -> m a) (f : PName -> m a)
|
||||
(xs : Context' PatVar n)
|
||||
private
|
||||
|
@ -151,7 +181,7 @@ mutual
|
|||
map E $ CaseEnum (fromPQty pi)
|
||||
<$> fromPTermElim ds ns tag
|
||||
<*> fromPTermTScope ds ns [< r] ret
|
||||
<*> assert_total fromPTermEnumArms ds ns arms
|
||||
<*> assert_total fromPTermEnumArms loc ds ns arms
|
||||
<*> pure loc
|
||||
|
||||
Nat loc => pure $ Nat loc
|
||||
|
@ -166,12 +196,11 @@ mutual
|
|||
<*> fromPTermTScope ds ns [< s, ih] suc
|
||||
<*> pure loc
|
||||
|
||||
Enum strs loc =>
|
||||
let set = SortedSet.fromList strs in
|
||||
if length strs == length (SortedSet.toList set) then
|
||||
Enum strs loc => do
|
||||
let set = SortedSet.fromList strs
|
||||
unless (length strs == length (SortedSet.toList set)) $
|
||||
throw $ DuplicatesInEnumType loc strs
|
||||
pure $ Enum set loc
|
||||
else
|
||||
throw $ DuplicatesInEnum loc strs
|
||||
|
||||
Tag str loc => pure $ Tag str loc
|
||||
|
||||
|
@ -229,12 +258,15 @@ mutual
|
|||
<*> pure loc
|
||||
|
||||
private
|
||||
fromPTermEnumArms : Context' PatVar d -> Context' PatVar n ->
|
||||
fromPTermEnumArms : Loc -> Context' PatVar d -> Context' PatVar n ->
|
||||
List (PTagVal, PTerm) ->
|
||||
Eff FromParserPure (CaseEnumArms d n)
|
||||
fromPTermEnumArms ds ns =
|
||||
map SortedMap.fromList .
|
||||
traverse (bitraverse (pure . fromPTagVal) (fromPTermWith ds ns))
|
||||
fromPTermEnumArms loc ds ns arms = do
|
||||
res <- SortedMap.fromList <$>
|
||||
traverse (bitraverse (pure . fromPTagVal) (fromPTermWith ds ns)) arms
|
||||
unless (length (keys res) == length arms) $
|
||||
throw $ DuplicatesInEnumCase loc (map (fromPTagVal . fst) arms)
|
||||
pure res
|
||||
|
||||
private
|
||||
fromPTermElim : Context' PatVar d -> Context' PatVar n ->
|
||||
|
@ -318,12 +350,43 @@ fromPDef (MkPDef qty pname ptype pterm defLoc) = do
|
|||
res <- liftTC $ inferC empty sqty elim
|
||||
addDef name gqty res.type term defLoc
|
||||
|
||||
|
||||
public export
|
||||
data HasFail = NoFail | AnyFail | FailWith String
|
||||
|
||||
export
|
||||
hasFail : List PDeclMod -> HasFail
|
||||
hasFail [] = NoFail
|
||||
hasFail (PFail str _ :: _) = maybe AnyFail FailWith str
|
||||
|
||||
export covering
|
||||
fromPDecl : PDecl -> Eff FromParserPure (List NDefinition)
|
||||
fromPDecl (PDef def) = singleton <$> fromPDef def
|
||||
fromPDecl (PNs ns) =
|
||||
|
||||
export covering
|
||||
fromPDeclBody : PDeclBody -> Eff FromParserPure (List NDefinition)
|
||||
fromPDeclBody (PDef def) = singleton <$> fromPDef def
|
||||
fromPDeclBody (PNs ns) =
|
||||
localAt NS (<+> ns.name) $ concat <$> traverse fromPDecl ns.decls
|
||||
|
||||
export covering
|
||||
expectFail : PDeclBody -> Eff FromParserPure Error
|
||||
expectFail body =
|
||||
case fromParserPure !(getAt GEN) !(getAt DEFS) $ fromPDeclBody body of
|
||||
Left err => pure err
|
||||
Right _ => throw $ ExpectedFail body.loc
|
||||
|
||||
|
||||
fromPDecl (MkPDecl mods decl loc) = case hasFail mods of
|
||||
NoFail => fromPDeclBody decl
|
||||
AnyFail => expectFail decl $> []
|
||||
FailWith str => do
|
||||
err <- expectFail decl
|
||||
let msg = runPretty $ prettyError False err {opts = Opts 10_000} -- w/e
|
||||
if str `isInfixOf` renderInfinite msg
|
||||
then pure []
|
||||
else throw $ WrongFail str err loc
|
||||
|
||||
|
||||
mutual
|
||||
export covering
|
||||
loadProcessFile : Loc -> String -> Eff FromParserIO (List NDefinition)
|
||||
|
@ -339,32 +402,3 @@ mutual
|
|||
fromPTopLevel : PTopLevel -> Eff FromParserIO (List NDefinition)
|
||||
fromPTopLevel (PD decl) = lift $ fromPDecl decl
|
||||
fromPTopLevel (PLoad file loc) = loadProcessFile loc file
|
||||
|
||||
|
||||
export
|
||||
fromParserPure : NameSuf -> Definitions ->
|
||||
Eff FromParserPure a ->
|
||||
Either Error (a, NameSuf, Definitions)
|
||||
fromParserPure suf defs act = runSTErr $ do
|
||||
suf <- liftST $ newSTRef suf
|
||||
defs <- liftST $ newSTRef defs
|
||||
res <- runEff act $ with Union.(::)
|
||||
[handleExcept (\e => stLeft e),
|
||||
handleStateSTRef defs,
|
||||
handleStateSTRef !(liftST $ newSTRef [<]),
|
||||
handleStateSTRef suf]
|
||||
pure (res, !(liftST $ readSTRef suf), !(liftST $ readSTRef defs))
|
||||
|
||||
|
||||
export covering
|
||||
fromParserIO : (MonadRec io, HasIO io) =>
|
||||
IncludePath -> IORef SeenSet ->
|
||||
IORef NameSuf -> IORef Definitions ->
|
||||
Eff FromParserIO a -> io (Either Error a)
|
||||
fromParserIO inc seen suf defs act = liftIO $ fromIOErr $ do
|
||||
runEff act $ with Union.(::)
|
||||
[handleLoadFileIOE LoadError seen inc,
|
||||
handleExcept (\e => ioLeft e),
|
||||
handleStateIORef defs,
|
||||
handleStateIORef !(newIORef [<]),
|
||||
handleStateIORef suf]
|
||||
|
|
|
@ -22,7 +22,8 @@ ParseError = Parser.Error
|
|||
public export
|
||||
data Error =
|
||||
AnnotationNeeded Loc (NameContexts d n) (Term d n)
|
||||
| DuplicatesInEnum Loc (List TagVal)
|
||||
| DuplicatesInEnumType Loc (List TagVal)
|
||||
| DuplicatesInEnumCase Loc (List TagVal)
|
||||
| TermNotInScope Loc Name
|
||||
| DimNotInScope Loc PBaseName
|
||||
| QtyNotGlobal Loc Qty
|
||||
|
@ -30,18 +31,22 @@ data Error =
|
|||
| DisplacedBoundVar Loc PName
|
||||
| WrapTypeError TypeError
|
||||
| LoadError Loc FilePath FileError
|
||||
| ExpectedFail Loc
|
||||
| WrongFail String Error Loc
|
||||
| WrapParseError String ParseError
|
||||
|
||||
|
||||
export
|
||||
prettyLexError : {opts : _} -> String -> LexError -> Eff Pretty (Doc opts)
|
||||
prettyLexError file (Err reason line col char) = do
|
||||
let loc = makeLoc file (MkBounds line col line col)
|
||||
reason <- case reason of
|
||||
EndInput => pure "unexpected end of input"
|
||||
NoRuleApply => pure $ text "unrecognised character: \{show char}"
|
||||
Other msg => pure $ text msg
|
||||
NoRuleApply => case char of
|
||||
Just char => pure $ text "unrecognised character: \{show char}"
|
||||
Nothing => pure $ text "unexpected end of input"
|
||||
ComposeNotClosing (sl, sc) (el, ec) => pure $
|
||||
hsep ["unterminated token at", !(prettyBounds (MkBounds sl sc el ec))]
|
||||
let loc = makeLoc file (MkBounds line col line col)
|
||||
pure $ vappend !(prettyLoc loc) reason
|
||||
|
||||
export
|
||||
|
@ -62,19 +67,23 @@ prettyParseError file (ParseError errs) =
|
|||
traverse (map ("-" <++>) . prettyParseError1 file) (toList errs)
|
||||
|
||||
|
||||
parameters (showContext : Bool)
|
||||
parameters {opts : LayoutOpts} (showContext : Bool)
|
||||
export
|
||||
prettyError : {opts : _} -> Error -> Eff Pretty (Doc opts)
|
||||
prettyError : Error -> Eff Pretty (Doc opts)
|
||||
prettyError (AnnotationNeeded loc ctx tm) =
|
||||
[|vappend (prettyLoc loc)
|
||||
(hangD "type annotation needed on"
|
||||
!(prettyTerm ctx.dnames ctx.tnames tm))|]
|
||||
-- [todo] print the original PTerm instead
|
||||
|
||||
prettyError (DuplicatesInEnum loc tags) =
|
||||
prettyError (DuplicatesInEnumType loc tags) =
|
||||
[|vappend (prettyLoc loc)
|
||||
(hangD "duplicate tags in enum type" !(prettyEnum tags))|]
|
||||
|
||||
prettyError (DuplicatesInEnumCase loc tags) =
|
||||
[|vappend (prettyLoc loc)
|
||||
(hangD "duplicate arms in enum case" !(prettyEnum tags))|]
|
||||
|
||||
prettyError (DimNotInScope loc i) =
|
||||
[|vappend (prettyLoc loc)
|
||||
(pure $ hsep ["dimension", !(hl DVar $ text i), "not in scope"])|]
|
||||
|
@ -106,5 +115,13 @@ parameters (showContext : Bool)
|
|||
"couldn't load file" <++> text file,
|
||||
text $ show err]
|
||||
|
||||
prettyError (ExpectedFail loc) = pure $
|
||||
sep [!(prettyLoc loc), "expected error"]
|
||||
|
||||
prettyError (WrongFail str err loc) = pure $
|
||||
sep [!(prettyLoc loc),
|
||||
"wrong error, expected to match", !(hl Tag $ text "\"\{str}\""),
|
||||
"but got", !(prettyError err)]
|
||||
|
||||
prettyError (WrapParseError file err) =
|
||||
prettyParseError file err
|
||||
|
|
|
@ -34,16 +34,27 @@ data Token =
|
|||
| Sup Nat
|
||||
%runElab derive "Token" [Eq, Ord, Show]
|
||||
|
||||
-- token or whitespace
|
||||
||| token or whitespace
|
||||
||| @ Skip whitespace, comments, etc
|
||||
||| @ Invalid a token which failed a post-lexer check
|
||||
||| (e.g. a qualified name containing a keyword)
|
||||
||| @ T a well formed token
|
||||
public export
|
||||
0 TokenW : Type
|
||||
TokenW = Maybe Token
|
||||
data ExtToken = Skip | Invalid String String | T Token
|
||||
%runElab derive "ExtToken" [Eq, Ord, Show]
|
||||
|
||||
|
||||
public export
|
||||
data ErrorReason =
|
||||
NoRuleApply
|
||||
| ComposeNotClosing (Int, Int) (Int, Int)
|
||||
| Other String
|
||||
%runElab derive "ErrorReason" [Eq, Ord, Show]
|
||||
|
||||
public export
|
||||
record Error where
|
||||
constructor Err
|
||||
reason : StopReason
|
||||
reason : ErrorReason
|
||||
line, col : Int
|
||||
||| `Nothing` if the error is at the end of the input
|
||||
char : Maybe Char
|
||||
|
@ -52,19 +63,14 @@ record Error where
|
|||
|
||||
|
||||
private
|
||||
skip : Lexer -> Tokenizer TokenW
|
||||
skip t = match t $ const Nothing
|
||||
skip : Lexer -> Tokenizer ExtToken
|
||||
skip t = match t $ const Skip
|
||||
|
||||
private
|
||||
match : Lexer -> (String -> Token) -> Tokenizer TokenW
|
||||
match t f = Tokenizer.match t (Just . f)
|
||||
%hide Tokenizer.match
|
||||
tmatch : Lexer -> (String -> Token) -> Tokenizer ExtToken
|
||||
tmatch t f = match t (T . f)
|
||||
|
||||
|
||||
private
|
||||
name : Tokenizer TokenW
|
||||
name = match name $ Name . fromListP . split (== '.') . normalizeNfc
|
||||
|
||||
||| [todo] escapes other than `\"` and (accidentally) `\\`
|
||||
export
|
||||
fromStringLit : String -> String
|
||||
|
@ -76,17 +82,17 @@ fromStringLit = pack . go . unpack . drop 1 . dropLast 1 where
|
|||
go (c :: cs) = c :: go cs
|
||||
|
||||
private
|
||||
string : Tokenizer TokenW
|
||||
string = match stringLit (Str . fromStringLit)
|
||||
string : Tokenizer ExtToken
|
||||
string = tmatch stringLit (Str . fromStringLit)
|
||||
|
||||
private
|
||||
nat : Tokenizer TokenW
|
||||
nat = match (some (range '0' '9')) (Nat . cast)
|
||||
nat : Tokenizer ExtToken
|
||||
nat = tmatch (some (range '0' '9')) (Nat . cast)
|
||||
|
||||
private
|
||||
tag : Tokenizer TokenW
|
||||
tag = match (is '\'' <+> name) (Tag . drop 1)
|
||||
<|> match (is '\'' <+> stringLit) (Tag . fromStringLit . drop 1)
|
||||
tag : Tokenizer ExtToken
|
||||
tag = tmatch (is '\'' <+> name) (Tag . drop 1)
|
||||
<|> tmatch (is '\'' <+> stringLit) (Tag . fromStringLit . drop 1)
|
||||
|
||||
|
||||
|
||||
|
@ -112,17 +118,17 @@ supToNat = cast . pack . map fromSup . unpack
|
|||
|
||||
-- ★0, Type0. base ★/Type is a Reserved
|
||||
private
|
||||
universe : Tokenizer TokenW
|
||||
universe : Tokenizer ExtToken
|
||||
universe = universeWith "★" <|> universeWith "Type" where
|
||||
universeWith : String -> Tokenizer TokenW
|
||||
universeWith : String -> Tokenizer ExtToken
|
||||
universeWith pfx =
|
||||
let len = length pfx in
|
||||
match (exact pfx <+> digits) (TYPE . cast . drop len)
|
||||
tmatch (exact pfx <+> digits) (TYPE . cast . drop len)
|
||||
|
||||
private
|
||||
sup : Tokenizer TokenW
|
||||
sup = match (some $ pred isSupDigit) (Sup . supToNat)
|
||||
<|> match (is '^' <+> digits) (Sup . cast . drop 1)
|
||||
sup : Tokenizer ExtToken
|
||||
sup = tmatch (some $ pred isSupDigit) (Sup . supToNat)
|
||||
<|> tmatch (is '^' <+> digits) (Sup . cast . drop 1)
|
||||
|
||||
|
||||
private %inline
|
||||
|
@ -134,9 +140,11 @@ namespace Reserved
|
|||
||| description of a reserved symbol
|
||||
||| @ Word a reserved word (must not be followed by letters, digits, etc)
|
||||
||| @ Sym a reserved symbol (must not be followed by symbolic chars)
|
||||
||| @ Punc a character that doesn't show up in names (brackets, etc)
|
||||
||| @ Punc a character that doesn't show up in names (brackets, etc);
|
||||
||| also a sequence ending in one of those, like `#[`, since the
|
||||
||| difference relates to lookahead
|
||||
public export
|
||||
data Reserved1 = Word String | Sym String | Punc Char
|
||||
data Reserved1 = Word String | Sym String | Punc String
|
||||
%runElab derive "Reserved1" [Eq, Ord, Show]
|
||||
|
||||
||| description of a token that might have unicode & ascii-only aliases
|
||||
|
@ -145,17 +153,14 @@ namespace Reserved
|
|||
%runElab derive "Reserved" [Eq, Ord, Show]
|
||||
|
||||
public export
|
||||
Sym1, Word1 : String -> Reserved
|
||||
Sym1, Word1, Punc1 : String -> Reserved
|
||||
Sym1 = Only . Sym
|
||||
Word1 = Only . Word
|
||||
|
||||
public export
|
||||
Punc1 : Char -> Reserved
|
||||
Punc1 = Only . Punc
|
||||
|
||||
public export
|
||||
resString1 : Reserved1 -> String
|
||||
resString1 (Punc x) = singleton x
|
||||
resString1 (Punc x) = x
|
||||
resString1 (Word w) = w
|
||||
resString1 (Sym s) = s
|
||||
|
||||
|
@ -166,17 +171,23 @@ resString : Reserved -> String
|
|||
resString (Only r) = resString1 r
|
||||
resString (r `Or` _) = resString1 r
|
||||
|
||||
||| return both representative strings for a token description
|
||||
public export
|
||||
resString2 : Reserved -> List String
|
||||
resString2 (Only r) = [resString1 r]
|
||||
resString2 (r `Or` s) = [resString1 r, resString1 s]
|
||||
|
||||
private
|
||||
resTokenizer1 : Reserved1 -> String -> Tokenizer TokenW
|
||||
resTokenizer1 : Reserved1 -> String -> Tokenizer ExtToken
|
||||
resTokenizer1 r str =
|
||||
let res : String -> Token := const $ Reserved str in
|
||||
case r of Word w => match (exact w <+> reject idContEnd) res
|
||||
Sym s => match (exact s <+> reject symCont) res
|
||||
Punc x => match (is x) res
|
||||
case r of Word w => tmatch (exact w <+> reject idContEnd) res
|
||||
Sym s => tmatch (exact s <+> reject symCont) res
|
||||
Punc x => tmatch (exact x) res
|
||||
|
||||
||| match a reserved token
|
||||
export
|
||||
resTokenizer : Reserved -> Tokenizer TokenW
|
||||
resTokenizer : Reserved -> Tokenizer ExtToken
|
||||
resTokenizer (Only r) = resTokenizer1 r (resString1 r)
|
||||
resTokenizer (r `Or` s) =
|
||||
resTokenizer1 r (resString1 r) <|> resTokenizer1 s (resString1 r)
|
||||
|
@ -188,8 +199,8 @@ resTokenizer (r `Or` s) =
|
|||
public export
|
||||
reserved : List Reserved
|
||||
reserved =
|
||||
[Punc1 '(', Punc1 ')', Punc1 '[', Punc1 ']', Punc1 '{', Punc1 '}',
|
||||
Punc1 ',', Punc1 ';',
|
||||
[Punc1 "(", Punc1 ")", Punc1 "[", Punc1 "]", Punc1 "{", Punc1 "}",
|
||||
Punc1 ",", Punc1 ";", Punc1 "#[",
|
||||
Sym1 "@",
|
||||
Sym1 ":",
|
||||
Sym "⇒" `Or` Sym "=>",
|
||||
|
@ -197,7 +208,7 @@ reserved =
|
|||
Sym "×" `Or` Sym "**",
|
||||
Sym "≡" `Or` Sym "==",
|
||||
Sym "∷" `Or` Sym "::",
|
||||
Punc1 '.',
|
||||
Punc1 ".",
|
||||
Word1 "case",
|
||||
Word1 "case0", Word1 "case1",
|
||||
Word "caseω" `Or` Word "case#",
|
||||
|
@ -220,14 +231,31 @@ reserved =
|
|||
Word1 "load",
|
||||
Word1 "namespace"]
|
||||
|
||||
public export
|
||||
reservedStrings : List String
|
||||
reservedStrings = map resString reserved
|
||||
|
||||
public export
|
||||
allReservedStrings : List String
|
||||
allReservedStrings = foldMap resString2 reserved
|
||||
|
||||
||| `IsReserved str` is true if `Reserved str` might actually show up in
|
||||
||| the token stream
|
||||
public export
|
||||
IsReserved : String -> Type
|
||||
IsReserved str = str `Elem` map resString reserved
|
||||
IsReserved str = So (str `elem` allReservedStrings)
|
||||
|
||||
private
|
||||
name : Tokenizer ExtToken
|
||||
name =
|
||||
match name $ \str =>
|
||||
let parts = split (== '.') $ normalizeNfc str in
|
||||
case find (`elem` allReservedStrings) (toList parts) of
|
||||
Nothing => T $ Name $ fromListP parts
|
||||
Just w => Invalid "reserved word '\{w}' inside name \{str}" str
|
||||
|
||||
export
|
||||
tokens : Tokenizer TokenW
|
||||
tokens : Tokenizer ExtToken
|
||||
tokens = choice $
|
||||
map skip [pred isWhitespace,
|
||||
lineComment (exact "--" <+> reject symCont),
|
||||
|
@ -236,10 +264,24 @@ tokens = choice $
|
|||
map resTokenizer reserved <+>
|
||||
[sup, nat, string, tag, name]
|
||||
|
||||
export
|
||||
check : Alternative f =>
|
||||
WithBounds ExtToken -> Either Error (f (WithBounds Token))
|
||||
check (MkBounded val irr bounds@(MkBounds line col _ _)) = case val of
|
||||
Skip => Right empty
|
||||
T tok => Right $ pure $ MkBounded tok irr bounds
|
||||
Invalid msg tok => Left $ Err (Other msg) line col (index 0 tok)
|
||||
|
||||
export
|
||||
toErrorReason : StopReason -> Maybe ErrorReason
|
||||
toErrorReason EndInput = Nothing
|
||||
toErrorReason NoRuleApply = Just NoRuleApply
|
||||
toErrorReason (ComposeNotClosing s e) = Just $ ComposeNotClosing s e
|
||||
|
||||
export
|
||||
lex : String -> Either Error (List (WithBounds Token))
|
||||
lex str =
|
||||
let (res, reason, line, col, str) = lex tokens str in
|
||||
case reason of
|
||||
EndInput => Right $ mapMaybe sequence res
|
||||
_ => Left $ Err {reason, line, col, char = index 0 str}
|
||||
case toErrorReason reason of
|
||||
Nothing => concatMap check res @{MonoidApplicative}
|
||||
Just e => Left $ Err {reason = e, line, col, char = index 0 str}
|
||||
|
|
|
@ -149,6 +149,12 @@ export
|
|||
qty : FileName -> Grammar True PQty
|
||||
qty fname = withLoc fname [|PQ qtyVal|]
|
||||
|
||||
export
|
||||
exactName : String -> Grammar True ()
|
||||
exactName name = terminal "expected '\{name}'" $ \case
|
||||
Name (MakePName [<] x) => guard $ x == name
|
||||
_ => Nothing
|
||||
|
||||
|
||||
||| pattern var (unqualified name or _)
|
||||
export
|
||||
|
@ -576,6 +582,15 @@ term fname = lamTerm fname
|
|||
<|> sigmaTerm fname
|
||||
|
||||
|
||||
export
|
||||
pragma : Grammar True a -> Grammar True a
|
||||
pragma p = resC "#[" *> p <* mustWork (resC "]")
|
||||
|
||||
export
|
||||
declMod : FileName -> Grammar True PDeclMod
|
||||
declMod fname = withLoc fname $ pragma $
|
||||
exactName "fail" *> [|PFail $ optional strLit|]
|
||||
|
||||
export
|
||||
decl : FileName -> Grammar True PDecl
|
||||
|
||||
|
@ -610,7 +625,11 @@ where
|
|||
nsInner = [] <$ resC "}"
|
||||
<|> [|(assert_total decl fname <* commit) :: assert_total nsInner|]
|
||||
|
||||
decl fname = [|PDef $ definition fname|] <|> [|PNs $ namespace_ fname|]
|
||||
export
|
||||
declBody : FileName -> Grammar True PDeclBody
|
||||
declBody fname = [|PDef $ definition fname|] <|> [|PNs $ namespace_ fname|]
|
||||
|
||||
decl fname = withLoc fname [|MkPDecl (many $ declMod fname) (declBody fname)|]
|
||||
|
||||
export
|
||||
load : FileName -> Grammar True PTopLevel
|
||||
|
|
|
@ -153,6 +153,12 @@ record PDefinition where
|
|||
|
||||
export Located PDefinition where def.loc = def.loc_
|
||||
|
||||
public export
|
||||
data PDeclMod =
|
||||
PFail (Maybe String) Loc
|
||||
%name PDeclMod mod
|
||||
%runElab derive "PDeclMod" [Eq, Ord, Show]
|
||||
|
||||
mutual
|
||||
public export
|
||||
record PNamespace where
|
||||
|
@ -163,16 +169,25 @@ mutual
|
|||
%name PNamespace ns
|
||||
|
||||
public export
|
||||
data PDecl =
|
||||
record PDecl where
|
||||
constructor MkPDecl
|
||||
mods : List PDeclMod
|
||||
decl : PDeclBody
|
||||
loc_ : Loc
|
||||
|
||||
public export
|
||||
data PDeclBody =
|
||||
PDef PDefinition
|
||||
| PNs PNamespace
|
||||
%name PDecl decl
|
||||
%runElab deriveMutual ["PNamespace", "PDecl"] [Eq, Ord, Show]
|
||||
%name PDeclBody decl
|
||||
%runElab deriveMutual ["PNamespace", "PDecl", "PDeclBody"] [Eq, Ord, Show]
|
||||
|
||||
export Located PNamespace where ns.loc = ns.loc_
|
||||
|
||||
export Located PDecl where decl.loc = decl.loc_
|
||||
|
||||
export
|
||||
Located PDecl where
|
||||
Located PDeclBody where
|
||||
(PDef def).loc = def.loc
|
||||
(PNs ns).loc = ns.loc
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ module Text.PrettyPrint.Bernardy.Core.Decorate
|
|||
|
||||
import public Text.PrettyPrint.Bernardy.Core
|
||||
import Data.DPair
|
||||
import Data.String
|
||||
|
||||
|
||||
public export
|
||||
|
@ -43,3 +44,12 @@ decorateLayout h l@(MkLayout content stats) =
|
|||
export
|
||||
decorate : {opts : _} -> Highlight -> Doc opts -> Doc opts
|
||||
decorate h doc = doc >>= \l => pure (decorateLayout h l)
|
||||
|
||||
|
||||
||| render a doc with no line breaks at all
|
||||
export
|
||||
renderInfinite : Doc opts -> String
|
||||
renderInfinite (MkDoc (MkLayout content _) _) = unwords content where
|
||||
unwords : SnocList String -> String
|
||||
unwords [<] = ""
|
||||
unwords (xs :< x) = foldMap (++ " ") xs ++ x
|
||||
|
|
|
@ -47,7 +47,12 @@ tests = "lexer" :- [
|
|||
lexes " " [],
|
||||
lexes "-- line comment" [],
|
||||
lexes "name -- line comment" [Name "name"],
|
||||
lexes "-- line comment\nnameBetween -- and another" [Name "nameBetween"],
|
||||
lexes
|
||||
"""
|
||||
-- line comment
|
||||
nameBetween -- and another
|
||||
"""
|
||||
[Name "nameBetween"],
|
||||
lexes "{- block comment -}" [],
|
||||
lexes "{- {- nested -} block comment -}" []
|
||||
],
|
||||
|
@ -70,13 +75,14 @@ tests = "lexer" :- [
|
|||
lexes "normalïse" [Name "normalïse"],
|
||||
-- ↑ replace i + combining ¨ with precomposed ï
|
||||
lexes "map#" [Name "map#"],
|
||||
lexes "map#[" [Name "map#", Reserved "["], -- don't actually do this
|
||||
lexes "map #[" [Name "map", Reserved "#["],
|
||||
lexes "write!" [Name "write!"],
|
||||
lexes "uhh??!?!?!?" [Name "uhh??!?!?!?"],
|
||||
|
||||
todo "check for reserved words in a qname",
|
||||
skip $
|
||||
lexes "abc.fun.def"
|
||||
[Name "abc", Reserved ".", Reserved "λ", Reserved ".", Name "def"],
|
||||
lexFail "abc.fun.ghi",
|
||||
lexFail "abc.λ.ghi",
|
||||
lexFail "abc.ω.ghi",
|
||||
|
||||
lexes "+" [Name "+"],
|
||||
lexes "*" [Name "*"],
|
||||
|
|
|
@ -424,33 +424,46 @@ tests = "parser" :- [
|
|||
|
||||
"top level" :- [
|
||||
parseMatch input "def0 A : ★⁰ = {}; def0 B : ★¹ = A;"
|
||||
`([PD $ PDef $ MkPDef (PQ Zero _) "A" (Just $ TYPE 0 _) (Enum [] _) _,
|
||||
PD $ PDef $ MkPDef (PQ Zero _) "B" (Just $ TYPE 1 _) (V "A" {}) _]),
|
||||
`([PD $ MkPDecl []
|
||||
(PDef $ MkPDef (PQ Zero _) "A" (Just $ TYPE 0 _) (Enum [] _) _) _,
|
||||
PD $ MkPDecl []
|
||||
(PDef $ MkPDef (PQ Zero _) "B" (Just $ TYPE 1 _) (V "A" {}) _) _]),
|
||||
parseMatch input "def0 A : ★⁰ = {} def0 B : ★¹ = A" $
|
||||
`([PD $ PDef $ MkPDef (PQ Zero _) "A" (Just $ TYPE 0 _) (Enum [] _) _,
|
||||
PD $ PDef $ MkPDef (PQ Zero _) "B" (Just $ TYPE 1 _) (V "A" {}) _]),
|
||||
`([PD $ MkPDecl []
|
||||
(PDef $ MkPDef (PQ Zero _) "A" (Just $ TYPE 0 _) (Enum [] _) _) _,
|
||||
PD $ MkPDecl []
|
||||
(PDef $ MkPDef (PQ Zero _) "B" (Just $ TYPE 1 _) (V "A" {}) _) _]),
|
||||
note "empty input",
|
||||
parsesAs input "" [],
|
||||
parseFails input ";;;;;;;;;;;;;;;;;;;;;;;;;;",
|
||||
parseMatch input "namespace a {}"
|
||||
`([PD $ PNs $ MkPNamespace [< "a"] [] _]),
|
||||
`([PD $ MkPDecl [] (PNs $ MkPNamespace [< "a"] [] _) _]),
|
||||
parseMatch input "namespace a.b.c {}"
|
||||
`([PD $ PNs $ MkPNamespace [< "a", "b", "c"] [] _]),
|
||||
`([PD $ MkPDecl []
|
||||
(PNs $ MkPNamespace [< "a", "b", "c"] [] _) _]),
|
||||
parseMatch input "namespace a {namespace b {}}"
|
||||
`([PD $ PNs $ MkPNamespace [< "a"] [PNs $ MkPNamespace [< "b"] [] _] _]),
|
||||
`([PD (MkPDecl []
|
||||
(PNs $ MkPNamespace [< "a"]
|
||||
[MkPDecl [] (PNs $ MkPNamespace [< "b"] [] _) _] _) _)]),
|
||||
parseMatch input "namespace a {def x = 't ∷ {t}}"
|
||||
`([PD $ PNs $ MkPNamespace [< "a"]
|
||||
[PDef $ MkPDef (PQ Any _) "x" Nothing
|
||||
(Ann (Tag "t" _) (Enum ["t"] _) _) _] _]),
|
||||
`([PD (MkPDecl []
|
||||
(PNs $ MkPNamespace [< "a"]
|
||||
[MkPDecl []
|
||||
(PDef $ MkPDef (PQ Any _) "x" Nothing
|
||||
(Ann (Tag "t" _) (Enum ["t"] _) _) _) _] _) _)]),
|
||||
parseMatch input "namespace a {def x = 't ∷ {t}} def y = a.x"
|
||||
`([PD $ PNs $ MkPNamespace [< "a"]
|
||||
[PDef $ MkPDef (PQ Any _) "x" Nothing
|
||||
(Ann (Tag "t" _) (Enum ["t"] _) _) _] _,
|
||||
PD $ PDef $ MkPDef (PQ Any _) "y" Nothing
|
||||
(V (MakePName [< "a"] "x") {}) _]),
|
||||
`([PD (MkPDecl []
|
||||
(PNs $ MkPNamespace [< "a"]
|
||||
[MkPDecl []
|
||||
(PDef $ MkPDef (PQ Any _) "x" Nothing
|
||||
(Ann (Tag "t" _) (Enum ["t"] _) _) _) _] _) _),
|
||||
PD (MkPDecl []
|
||||
(PDef $ MkPDef (PQ Any _) "y" Nothing
|
||||
(V (MakePName [< "a"] "x") Nothing _) _) _)]),
|
||||
parseMatch input #" load "a.quox"; def b = a.b "#
|
||||
`([PLoad "a.quox" _,
|
||||
PD $ PDef $ MkPDef (PQ Any _) "b" Nothing
|
||||
(V (MakePName [< "a"] "b") {}) _])
|
||||
PD (MkPDecl []
|
||||
(PDef $ MkPDef (PQ Any _) "b" Nothing
|
||||
(V (MakePName [< "a"] "b") Nothing _) _) _)])
|
||||
]
|
||||
]
|
||||
|
|
Loading…
Reference in a new issue