#[fail] #30

Merged
rhi merged 6 commits from 💣 into 🐉 2023-09-24 11:39:29 -04:00
9 changed files with 293 additions and 127 deletions

10
examples/fail.quox Normal file
View file

@ -0,0 +1,10 @@
#[fail "but cases for"]
def missing-b : {a, b} → {a} =
λ x ⇒ case x return {a} of { 'a ⇒ 'a }
#[fail "duplicate arms"]
def repeat-enum-case : {a} → {a} =
λ x ⇒ case x return {a} of { 'a ⇒ 'a; 'a ⇒ 'a }
#[fail "duplicate tags"]
def repeat-enum-type : {a, a} = 'a

View file

@ -3,6 +3,7 @@ module Quox.Parser.FromParser
import public Quox.Parser.FromParser.Error as Quox.Parser.FromParser import public Quox.Parser.FromParser.Error as Quox.Parser.FromParser
import Quox.Pretty
import Quox.Parser.Syntax import Quox.Parser.Syntax
import Quox.Parser.Parser import Quox.Parser.Parser
import public Quox.Parser.LoadFile import public Quox.Parser.LoadFile
@ -42,6 +43,35 @@ FromParserIO : List (Type -> Type)
FromParserIO = LoadFile :: FromParserPure FromParserIO = LoadFile :: FromParserPure
export
fromParserPure : NameSuf -> Definitions ->
Eff FromParserPure a ->
Either Error (a, NameSuf, Definitions)
fromParserPure suf defs act = runSTErr $ do
suf <- liftST $ newSTRef suf
defs <- liftST $ newSTRef defs
res <- runEff act $ with Union.(::)
[handleExcept (\e => stLeft e),
handleStateSTRef defs,
handleStateSTRef !(liftST $ newSTRef [<]),
handleStateSTRef suf]
pure (res, !(liftST $ readSTRef suf), !(liftST $ readSTRef defs))
export covering
fromParserIO : (MonadRec io, HasIO io) =>
IncludePath -> IORef SeenSet ->
IORef NameSuf -> IORef Definitions ->
Eff FromParserIO a -> io (Either Error a)
fromParserIO inc seen suf defs act = liftIO $ fromIOErr $ do
runEff act $ with Union.(::)
[handleLoadFileIOE LoadError seen inc,
handleExcept (\e => ioLeft e),
handleStateIORef defs,
handleStateIORef !(newIORef [<]),
handleStateIORef suf]
parameters {auto _ : Functor m} (b : Var n -> m a) (f : PName -> m a) parameters {auto _ : Functor m} (b : Var n -> m a) (f : PName -> m a)
(xs : Context' PatVar n) (xs : Context' PatVar n)
private private
@ -151,7 +181,7 @@ mutual
map E $ CaseEnum (fromPQty pi) map E $ CaseEnum (fromPQty pi)
<$> fromPTermElim ds ns tag <$> fromPTermElim ds ns tag
<*> fromPTermTScope ds ns [< r] ret <*> fromPTermTScope ds ns [< r] ret
<*> assert_total fromPTermEnumArms ds ns arms <*> assert_total fromPTermEnumArms loc ds ns arms
<*> pure loc <*> pure loc
Nat loc => pure $ Nat loc Nat loc => pure $ Nat loc
@ -166,12 +196,11 @@ mutual
<*> fromPTermTScope ds ns [< s, ih] suc <*> fromPTermTScope ds ns [< s, ih] suc
<*> pure loc <*> pure loc
Enum strs loc => Enum strs loc => do
let set = SortedSet.fromList strs in let set = SortedSet.fromList strs
if length strs == length (SortedSet.toList set) then unless (length strs == length (SortedSet.toList set)) $
throw $ DuplicatesInEnumType loc strs
pure $ Enum set loc pure $ Enum set loc
else
throw $ DuplicatesInEnum loc strs
Tag str loc => pure $ Tag str loc Tag str loc => pure $ Tag str loc
@ -229,12 +258,15 @@ mutual
<*> pure loc <*> pure loc
private private
fromPTermEnumArms : Context' PatVar d -> Context' PatVar n -> fromPTermEnumArms : Loc -> Context' PatVar d -> Context' PatVar n ->
List (PTagVal, PTerm) -> List (PTagVal, PTerm) ->
Eff FromParserPure (CaseEnumArms d n) Eff FromParserPure (CaseEnumArms d n)
fromPTermEnumArms ds ns = fromPTermEnumArms loc ds ns arms = do
map SortedMap.fromList . res <- SortedMap.fromList <$>
traverse (bitraverse (pure . fromPTagVal) (fromPTermWith ds ns)) traverse (bitraverse (pure . fromPTagVal) (fromPTermWith ds ns)) arms
unless (length (keys res) == length arms) $
throw $ DuplicatesInEnumCase loc (map (fromPTagVal . fst) arms)
pure res
private private
fromPTermElim : Context' PatVar d -> Context' PatVar n -> fromPTermElim : Context' PatVar d -> Context' PatVar n ->
@ -318,12 +350,43 @@ fromPDef (MkPDef qty pname ptype pterm defLoc) = do
res <- liftTC $ inferC empty sqty elim res <- liftTC $ inferC empty sqty elim
addDef name gqty res.type term defLoc addDef name gqty res.type term defLoc
public export
data HasFail = NoFail | AnyFail | FailWith String
export
hasFail : List PDeclMod -> HasFail
hasFail [] = NoFail
hasFail (PFail str _ :: _) = maybe AnyFail FailWith str
export covering export covering
fromPDecl : PDecl -> Eff FromParserPure (List NDefinition) fromPDecl : PDecl -> Eff FromParserPure (List NDefinition)
fromPDecl (PDef def) = singleton <$> fromPDef def
fromPDecl (PNs ns) = export covering
fromPDeclBody : PDeclBody -> Eff FromParserPure (List NDefinition)
fromPDeclBody (PDef def) = singleton <$> fromPDef def
fromPDeclBody (PNs ns) =
localAt NS (<+> ns.name) $ concat <$> traverse fromPDecl ns.decls localAt NS (<+> ns.name) $ concat <$> traverse fromPDecl ns.decls
export covering
expectFail : PDeclBody -> Eff FromParserPure Error
expectFail body =
case fromParserPure !(getAt GEN) !(getAt DEFS) $ fromPDeclBody body of
Left err => pure err
Right _ => throw $ ExpectedFail body.loc
fromPDecl (MkPDecl mods decl loc) = case hasFail mods of
NoFail => fromPDeclBody decl
AnyFail => expectFail decl $> []
FailWith str => do
err <- expectFail decl
let msg = runPretty $ prettyError False err {opts = Opts 10_000} -- w/e
if str `isInfixOf` renderInfinite msg
then pure []
else throw $ WrongFail str err loc
mutual mutual
export covering export covering
loadProcessFile : Loc -> String -> Eff FromParserIO (List NDefinition) loadProcessFile : Loc -> String -> Eff FromParserIO (List NDefinition)
@ -339,32 +402,3 @@ mutual
fromPTopLevel : PTopLevel -> Eff FromParserIO (List NDefinition) fromPTopLevel : PTopLevel -> Eff FromParserIO (List NDefinition)
fromPTopLevel (PD decl) = lift $ fromPDecl decl fromPTopLevel (PD decl) = lift $ fromPDecl decl
fromPTopLevel (PLoad file loc) = loadProcessFile loc file fromPTopLevel (PLoad file loc) = loadProcessFile loc file
export
fromParserPure : NameSuf -> Definitions ->
Eff FromParserPure a ->
Either Error (a, NameSuf, Definitions)
fromParserPure suf defs act = runSTErr $ do
suf <- liftST $ newSTRef suf
defs <- liftST $ newSTRef defs
res <- runEff act $ with Union.(::)
[handleExcept (\e => stLeft e),
handleStateSTRef defs,
handleStateSTRef !(liftST $ newSTRef [<]),
handleStateSTRef suf]
pure (res, !(liftST $ readSTRef suf), !(liftST $ readSTRef defs))
export covering
fromParserIO : (MonadRec io, HasIO io) =>
IncludePath -> IORef SeenSet ->
IORef NameSuf -> IORef Definitions ->
Eff FromParserIO a -> io (Either Error a)
fromParserIO inc seen suf defs act = liftIO $ fromIOErr $ do
runEff act $ with Union.(::)
[handleLoadFileIOE LoadError seen inc,
handleExcept (\e => ioLeft e),
handleStateIORef defs,
handleStateIORef !(newIORef [<]),
handleStateIORef suf]

View file

@ -22,7 +22,8 @@ ParseError = Parser.Error
public export public export
data Error = data Error =
AnnotationNeeded Loc (NameContexts d n) (Term d n) AnnotationNeeded Loc (NameContexts d n) (Term d n)
| DuplicatesInEnum Loc (List TagVal) | DuplicatesInEnumType Loc (List TagVal)
| DuplicatesInEnumCase Loc (List TagVal)
| TermNotInScope Loc Name | TermNotInScope Loc Name
| DimNotInScope Loc PBaseName | DimNotInScope Loc PBaseName
| QtyNotGlobal Loc Qty | QtyNotGlobal Loc Qty
@ -30,18 +31,22 @@ data Error =
| DisplacedBoundVar Loc PName | DisplacedBoundVar Loc PName
| WrapTypeError TypeError | WrapTypeError TypeError
| LoadError Loc FilePath FileError | LoadError Loc FilePath FileError
| ExpectedFail Loc
| WrongFail String Error Loc
| WrapParseError String ParseError | WrapParseError String ParseError
export export
prettyLexError : {opts : _} -> String -> LexError -> Eff Pretty (Doc opts) prettyLexError : {opts : _} -> String -> LexError -> Eff Pretty (Doc opts)
prettyLexError file (Err reason line col char) = do prettyLexError file (Err reason line col char) = do
let loc = makeLoc file (MkBounds line col line col)
reason <- case reason of reason <- case reason of
EndInput => pure "unexpected end of input" Other msg => pure $ text msg
NoRuleApply => pure $ text "unrecognised character: \{show char}" NoRuleApply => case char of
Just char => pure $ text "unrecognised character: \{show char}"
Nothing => pure $ text "unexpected end of input"
ComposeNotClosing (sl, sc) (el, ec) => pure $ ComposeNotClosing (sl, sc) (el, ec) => pure $
hsep ["unterminated token at", !(prettyBounds (MkBounds sl sc el ec))] hsep ["unterminated token at", !(prettyBounds (MkBounds sl sc el ec))]
let loc = makeLoc file (MkBounds line col line col)
pure $ vappend !(prettyLoc loc) reason pure $ vappend !(prettyLoc loc) reason
export export
@ -62,19 +67,23 @@ prettyParseError file (ParseError errs) =
traverse (map ("-" <++>) . prettyParseError1 file) (toList errs) traverse (map ("-" <++>) . prettyParseError1 file) (toList errs)
parameters (showContext : Bool) parameters {opts : LayoutOpts} (showContext : Bool)
export export
prettyError : {opts : _} -> Error -> Eff Pretty (Doc opts) prettyError : Error -> Eff Pretty (Doc opts)
prettyError (AnnotationNeeded loc ctx tm) = prettyError (AnnotationNeeded loc ctx tm) =
[|vappend (prettyLoc loc) [|vappend (prettyLoc loc)
(hangD "type annotation needed on" (hangD "type annotation needed on"
!(prettyTerm ctx.dnames ctx.tnames tm))|] !(prettyTerm ctx.dnames ctx.tnames tm))|]
-- [todo] print the original PTerm instead -- [todo] print the original PTerm instead
prettyError (DuplicatesInEnum loc tags) = prettyError (DuplicatesInEnumType loc tags) =
[|vappend (prettyLoc loc) [|vappend (prettyLoc loc)
(hangD "duplicate tags in enum type" !(prettyEnum tags))|] (hangD "duplicate tags in enum type" !(prettyEnum tags))|]
prettyError (DuplicatesInEnumCase loc tags) =
[|vappend (prettyLoc loc)
(hangD "duplicate arms in enum case" !(prettyEnum tags))|]
prettyError (DimNotInScope loc i) = prettyError (DimNotInScope loc i) =
[|vappend (prettyLoc loc) [|vappend (prettyLoc loc)
(pure $ hsep ["dimension", !(hl DVar $ text i), "not in scope"])|] (pure $ hsep ["dimension", !(hl DVar $ text i), "not in scope"])|]
@ -106,5 +115,13 @@ parameters (showContext : Bool)
"couldn't load file" <++> text file, "couldn't load file" <++> text file,
text $ show err] text $ show err]
prettyError (ExpectedFail loc) = pure $
sep [!(prettyLoc loc), "expected error"]
prettyError (WrongFail str err loc) = pure $
sep [!(prettyLoc loc),
"wrong error, expected to match", !(hl Tag $ text "\"\{str}\""),
"but got", !(prettyError err)]
prettyError (WrapParseError file err) = prettyError (WrapParseError file err) =
prettyParseError file err prettyParseError file err

View file

@ -34,16 +34,27 @@ data Token =
| Sup Nat | Sup Nat
%runElab derive "Token" [Eq, Ord, Show] %runElab derive "Token" [Eq, Ord, Show]
-- token or whitespace ||| token or whitespace
||| @ Skip whitespace, comments, etc
||| @ Invalid a token which failed a post-lexer check
||| (e.g. a qualified name containing a keyword)
||| @ T a well formed token
public export public export
0 TokenW : Type data ExtToken = Skip | Invalid String String | T Token
TokenW = Maybe Token %runElab derive "ExtToken" [Eq, Ord, Show]
public export
data ErrorReason =
NoRuleApply
| ComposeNotClosing (Int, Int) (Int, Int)
| Other String
%runElab derive "ErrorReason" [Eq, Ord, Show]
public export public export
record Error where record Error where
constructor Err constructor Err
reason : StopReason reason : ErrorReason
line, col : Int line, col : Int
||| `Nothing` if the error is at the end of the input ||| `Nothing` if the error is at the end of the input
char : Maybe Char char : Maybe Char
@ -52,19 +63,14 @@ record Error where
private private
skip : Lexer -> Tokenizer TokenW skip : Lexer -> Tokenizer ExtToken
skip t = match t $ const Nothing skip t = match t $ const Skip
private private
match : Lexer -> (String -> Token) -> Tokenizer TokenW tmatch : Lexer -> (String -> Token) -> Tokenizer ExtToken
match t f = Tokenizer.match t (Just . f) tmatch t f = match t (T . f)
%hide Tokenizer.match
private
name : Tokenizer TokenW
name = match name $ Name . fromListP . split (== '.') . normalizeNfc
||| [todo] escapes other than `\"` and (accidentally) `\\` ||| [todo] escapes other than `\"` and (accidentally) `\\`
export export
fromStringLit : String -> String fromStringLit : String -> String
@ -76,17 +82,17 @@ fromStringLit = pack . go . unpack . drop 1 . dropLast 1 where
go (c :: cs) = c :: go cs go (c :: cs) = c :: go cs
private private
string : Tokenizer TokenW string : Tokenizer ExtToken
string = match stringLit (Str . fromStringLit) string = tmatch stringLit (Str . fromStringLit)
private private
nat : Tokenizer TokenW nat : Tokenizer ExtToken
nat = match (some (range '0' '9')) (Nat . cast) nat = tmatch (some (range '0' '9')) (Nat . cast)
private private
tag : Tokenizer TokenW tag : Tokenizer ExtToken
tag = match (is '\'' <+> name) (Tag . drop 1) tag = tmatch (is '\'' <+> name) (Tag . drop 1)
<|> match (is '\'' <+> stringLit) (Tag . fromStringLit . drop 1) <|> tmatch (is '\'' <+> stringLit) (Tag . fromStringLit . drop 1)
@ -112,17 +118,17 @@ supToNat = cast . pack . map fromSup . unpack
-- ★0, Type0. base ★/Type is a Reserved -- ★0, Type0. base ★/Type is a Reserved
private private
universe : Tokenizer TokenW universe : Tokenizer ExtToken
universe = universeWith "" <|> universeWith "Type" where universe = universeWith "" <|> universeWith "Type" where
universeWith : String -> Tokenizer TokenW universeWith : String -> Tokenizer ExtToken
universeWith pfx = universeWith pfx =
let len = length pfx in let len = length pfx in
match (exact pfx <+> digits) (TYPE . cast . drop len) tmatch (exact pfx <+> digits) (TYPE . cast . drop len)
private private
sup : Tokenizer TokenW sup : Tokenizer ExtToken
sup = match (some $ pred isSupDigit) (Sup . supToNat) sup = tmatch (some $ pred isSupDigit) (Sup . supToNat)
<|> match (is '^' <+> digits) (Sup . cast . drop 1) <|> tmatch (is '^' <+> digits) (Sup . cast . drop 1)
private %inline private %inline
@ -134,9 +140,11 @@ namespace Reserved
||| description of a reserved symbol ||| description of a reserved symbol
||| @ Word a reserved word (must not be followed by letters, digits, etc) ||| @ Word a reserved word (must not be followed by letters, digits, etc)
||| @ Sym a reserved symbol (must not be followed by symbolic chars) ||| @ Sym a reserved symbol (must not be followed by symbolic chars)
||| @ Punc a character that doesn't show up in names (brackets, etc) ||| @ Punc a character that doesn't show up in names (brackets, etc);
||| also a sequence ending in one of those, like `#[`, since the
||| difference relates to lookahead
public export public export
data Reserved1 = Word String | Sym String | Punc Char data Reserved1 = Word String | Sym String | Punc String
%runElab derive "Reserved1" [Eq, Ord, Show] %runElab derive "Reserved1" [Eq, Ord, Show]
||| description of a token that might have unicode & ascii-only aliases ||| description of a token that might have unicode & ascii-only aliases
@ -145,17 +153,14 @@ namespace Reserved
%runElab derive "Reserved" [Eq, Ord, Show] %runElab derive "Reserved" [Eq, Ord, Show]
public export public export
Sym1, Word1 : String -> Reserved Sym1, Word1, Punc1 : String -> Reserved
Sym1 = Only . Sym Sym1 = Only . Sym
Word1 = Only . Word Word1 = Only . Word
public export
Punc1 : Char -> Reserved
Punc1 = Only . Punc Punc1 = Only . Punc
public export public export
resString1 : Reserved1 -> String resString1 : Reserved1 -> String
resString1 (Punc x) = singleton x resString1 (Punc x) = x
resString1 (Word w) = w resString1 (Word w) = w
resString1 (Sym s) = s resString1 (Sym s) = s
@ -166,17 +171,23 @@ resString : Reserved -> String
resString (Only r) = resString1 r resString (Only r) = resString1 r
resString (r `Or` _) = resString1 r resString (r `Or` _) = resString1 r
||| return both representative strings for a token description
public export
resString2 : Reserved -> List String
resString2 (Only r) = [resString1 r]
resString2 (r `Or` s) = [resString1 r, resString1 s]
private private
resTokenizer1 : Reserved1 -> String -> Tokenizer TokenW resTokenizer1 : Reserved1 -> String -> Tokenizer ExtToken
resTokenizer1 r str = resTokenizer1 r str =
let res : String -> Token := const $ Reserved str in let res : String -> Token := const $ Reserved str in
case r of Word w => match (exact w <+> reject idContEnd) res case r of Word w => tmatch (exact w <+> reject idContEnd) res
Sym s => match (exact s <+> reject symCont) res Sym s => tmatch (exact s <+> reject symCont) res
Punc x => match (is x) res Punc x => tmatch (exact x) res
||| match a reserved token ||| match a reserved token
export export
resTokenizer : Reserved -> Tokenizer TokenW resTokenizer : Reserved -> Tokenizer ExtToken
resTokenizer (Only r) = resTokenizer1 r (resString1 r) resTokenizer (Only r) = resTokenizer1 r (resString1 r)
resTokenizer (r `Or` s) = resTokenizer (r `Or` s) =
resTokenizer1 r (resString1 r) <|> resTokenizer1 s (resString1 r) resTokenizer1 r (resString1 r) <|> resTokenizer1 s (resString1 r)
@ -188,8 +199,8 @@ resTokenizer (r `Or` s) =
public export public export
reserved : List Reserved reserved : List Reserved
reserved = reserved =
[Punc1 '(', Punc1 ')', Punc1 '[', Punc1 ']', Punc1 '{', Punc1 '}', [Punc1 "(", Punc1 ")", Punc1 "[", Punc1 "]", Punc1 "{", Punc1 "}",
Punc1 ',', Punc1 ';', Punc1 ",", Punc1 ";", Punc1 "#[",
Sym1 "@", Sym1 "@",
Sym1 ":", Sym1 ":",
Sym "" `Or` Sym "=>", Sym "" `Or` Sym "=>",
@ -197,7 +208,7 @@ reserved =
Sym "×" `Or` Sym "**", Sym "×" `Or` Sym "**",
Sym "" `Or` Sym "==", Sym "" `Or` Sym "==",
Sym "" `Or` Sym "::", Sym "" `Or` Sym "::",
Punc1 '.', Punc1 ".",
Word1 "case", Word1 "case",
Word1 "case0", Word1 "case1", Word1 "case0", Word1 "case1",
Word "caseω" `Or` Word "case#", Word "caseω" `Or` Word "case#",
@ -220,14 +231,31 @@ reserved =
Word1 "load", Word1 "load",
Word1 "namespace"] Word1 "namespace"]
public export
reservedStrings : List String
reservedStrings = map resString reserved
public export
allReservedStrings : List String
allReservedStrings = foldMap resString2 reserved
||| `IsReserved str` is true if `Reserved str` might actually show up in ||| `IsReserved str` is true if `Reserved str` might actually show up in
||| the token stream ||| the token stream
public export public export
IsReserved : String -> Type IsReserved : String -> Type
IsReserved str = str `Elem` map resString reserved IsReserved str = So (str `elem` allReservedStrings)
private
name : Tokenizer ExtToken
name =
match name $ \str =>
let parts = split (== '.') $ normalizeNfc str in
case find (`elem` allReservedStrings) (toList parts) of
Nothing => T $ Name $ fromListP parts
Just w => Invalid "reserved word '\{w}' inside name \{str}" str
export export
tokens : Tokenizer TokenW tokens : Tokenizer ExtToken
tokens = choice $ tokens = choice $
map skip [pred isWhitespace, map skip [pred isWhitespace,
lineComment (exact "--" <+> reject symCont), lineComment (exact "--" <+> reject symCont),
@ -236,10 +264,24 @@ tokens = choice $
map resTokenizer reserved <+> map resTokenizer reserved <+>
[sup, nat, string, tag, name] [sup, nat, string, tag, name]
export
check : Alternative f =>
WithBounds ExtToken -> Either Error (f (WithBounds Token))
check (MkBounded val irr bounds@(MkBounds line col _ _)) = case val of
Skip => Right empty
T tok => Right $ pure $ MkBounded tok irr bounds
Invalid msg tok => Left $ Err (Other msg) line col (index 0 tok)
export
toErrorReason : StopReason -> Maybe ErrorReason
toErrorReason EndInput = Nothing
toErrorReason NoRuleApply = Just NoRuleApply
toErrorReason (ComposeNotClosing s e) = Just $ ComposeNotClosing s e
export export
lex : String -> Either Error (List (WithBounds Token)) lex : String -> Either Error (List (WithBounds Token))
lex str = lex str =
let (res, reason, line, col, str) = lex tokens str in let (res, reason, line, col, str) = lex tokens str in
case reason of case toErrorReason reason of
EndInput => Right $ mapMaybe sequence res Nothing => concatMap check res @{MonoidApplicative}
_ => Left $ Err {reason, line, col, char = index 0 str} Just e => Left $ Err {reason = e, line, col, char = index 0 str}

View file

@ -149,6 +149,12 @@ export
qty : FileName -> Grammar True PQty qty : FileName -> Grammar True PQty
qty fname = withLoc fname [|PQ qtyVal|] qty fname = withLoc fname [|PQ qtyVal|]
export
exactName : String -> Grammar True ()
exactName name = terminal "expected '\{name}'" $ \case
Name (MakePName [<] x) => guard $ x == name
_ => Nothing
||| pattern var (unqualified name or _) ||| pattern var (unqualified name or _)
export export
@ -576,6 +582,15 @@ term fname = lamTerm fname
<|> sigmaTerm fname <|> sigmaTerm fname
export
pragma : Grammar True a -> Grammar True a
pragma p = resC "#[" *> p <* mustWork (resC "]")
export
declMod : FileName -> Grammar True PDeclMod
declMod fname = withLoc fname $ pragma $
exactName "fail" *> [|PFail $ optional strLit|]
export export
decl : FileName -> Grammar True PDecl decl : FileName -> Grammar True PDecl
@ -610,7 +625,11 @@ where
nsInner = [] <$ resC "}" nsInner = [] <$ resC "}"
<|> [|(assert_total decl fname <* commit) :: assert_total nsInner|] <|> [|(assert_total decl fname <* commit) :: assert_total nsInner|]
decl fname = [|PDef $ definition fname|] <|> [|PNs $ namespace_ fname|] export
declBody : FileName -> Grammar True PDeclBody
declBody fname = [|PDef $ definition fname|] <|> [|PNs $ namespace_ fname|]
decl fname = withLoc fname [|MkPDecl (many $ declMod fname) (declBody fname)|]
export export
load : FileName -> Grammar True PTopLevel load : FileName -> Grammar True PTopLevel

View file

@ -153,6 +153,12 @@ record PDefinition where
export Located PDefinition where def.loc = def.loc_ export Located PDefinition where def.loc = def.loc_
public export
data PDeclMod =
PFail (Maybe String) Loc
%name PDeclMod mod
%runElab derive "PDeclMod" [Eq, Ord, Show]
mutual mutual
public export public export
record PNamespace where record PNamespace where
@ -163,16 +169,25 @@ mutual
%name PNamespace ns %name PNamespace ns
public export public export
data PDecl = record PDecl where
constructor MkPDecl
mods : List PDeclMod
decl : PDeclBody
loc_ : Loc
public export
data PDeclBody =
PDef PDefinition PDef PDefinition
| PNs PNamespace | PNs PNamespace
%name PDecl decl %name PDeclBody decl
%runElab deriveMutual ["PNamespace", "PDecl"] [Eq, Ord, Show] %runElab deriveMutual ["PNamespace", "PDecl", "PDeclBody"] [Eq, Ord, Show]
export Located PNamespace where ns.loc = ns.loc_ export Located PNamespace where ns.loc = ns.loc_
export Located PDecl where decl.loc = decl.loc_
export export
Located PDecl where Located PDeclBody where
(PDef def).loc = def.loc (PDef def).loc = def.loc
(PNs ns).loc = ns.loc (PNs ns).loc = ns.loc

View file

@ -5,6 +5,7 @@ module Text.PrettyPrint.Bernardy.Core.Decorate
import public Text.PrettyPrint.Bernardy.Core import public Text.PrettyPrint.Bernardy.Core
import Data.DPair import Data.DPair
import Data.String
public export public export
@ -43,3 +44,12 @@ decorateLayout h l@(MkLayout content stats) =
export export
decorate : {opts : _} -> Highlight -> Doc opts -> Doc opts decorate : {opts : _} -> Highlight -> Doc opts -> Doc opts
decorate h doc = doc >>= \l => pure (decorateLayout h l) decorate h doc = doc >>= \l => pure (decorateLayout h l)
||| render a doc with no line breaks at all
export
renderInfinite : Doc opts -> String
renderInfinite (MkDoc (MkLayout content _) _) = unwords content where
unwords : SnocList String -> String
unwords [<] = ""
unwords (xs :< x) = foldMap (++ " ") xs ++ x

View file

@ -47,7 +47,12 @@ tests = "lexer" :- [
lexes " " [], lexes " " [],
lexes "-- line comment" [], lexes "-- line comment" [],
lexes "name -- line comment" [Name "name"], lexes "name -- line comment" [Name "name"],
lexes "-- line comment\nnameBetween -- and another" [Name "nameBetween"], lexes
"""
-- line comment
nameBetween -- and another
"""
[Name "nameBetween"],
lexes "{- block comment -}" [], lexes "{- block comment -}" [],
lexes "{- {- nested -} block comment -}" [] lexes "{- {- nested -} block comment -}" []
], ],
@ -70,13 +75,14 @@ tests = "lexer" :- [
lexes "normalïse" [Name "normalïse"], lexes "normalïse" [Name "normalïse"],
-- ↑ replace i + combining ¨ with precomposed ï -- ↑ replace i + combining ¨ with precomposed ï
lexes "map#" [Name "map#"], lexes "map#" [Name "map#"],
lexes "map#[" [Name "map#", Reserved "["], -- don't actually do this
lexes "map #[" [Name "map", Reserved "#["],
lexes "write!" [Name "write!"], lexes "write!" [Name "write!"],
lexes "uhh??!?!?!?" [Name "uhh??!?!?!?"], lexes "uhh??!?!?!?" [Name "uhh??!?!?!?"],
todo "check for reserved words in a qname", lexFail "abc.fun.ghi",
skip $ lexFail "abc.λ.ghi",
lexes "abc.fun.def" lexFail "abc.ω.ghi",
[Name "abc", Reserved ".", Reserved "λ", Reserved ".", Name "def"],
lexes "+" [Name "+"], lexes "+" [Name "+"],
lexes "*" [Name "*"], lexes "*" [Name "*"],

View file

@ -424,33 +424,46 @@ tests = "parser" :- [
"top level" :- [ "top level" :- [
parseMatch input "def0 A : ★⁰ = {}; def0 B : ★¹ = A;" parseMatch input "def0 A : ★⁰ = {}; def0 B : ★¹ = A;"
`([PD $ PDef $ MkPDef (PQ Zero _) "A" (Just $ TYPE 0 _) (Enum [] _) _, `([PD $ MkPDecl []
PD $ PDef $ MkPDef (PQ Zero _) "B" (Just $ TYPE 1 _) (V "A" {}) _]), (PDef $ MkPDef (PQ Zero _) "A" (Just $ TYPE 0 _) (Enum [] _) _) _,
PD $ MkPDecl []
(PDef $ MkPDef (PQ Zero _) "B" (Just $ TYPE 1 _) (V "A" {}) _) _]),
parseMatch input "def0 A : ★⁰ = {} def0 B : ★¹ = A" $ parseMatch input "def0 A : ★⁰ = {} def0 B : ★¹ = A" $
`([PD $ PDef $ MkPDef (PQ Zero _) "A" (Just $ TYPE 0 _) (Enum [] _) _, `([PD $ MkPDecl []
PD $ PDef $ MkPDef (PQ Zero _) "B" (Just $ TYPE 1 _) (V "A" {}) _]), (PDef $ MkPDef (PQ Zero _) "A" (Just $ TYPE 0 _) (Enum [] _) _) _,
PD $ MkPDecl []
(PDef $ MkPDef (PQ Zero _) "B" (Just $ TYPE 1 _) (V "A" {}) _) _]),
note "empty input", note "empty input",
parsesAs input "" [], parsesAs input "" [],
parseFails input ";;;;;;;;;;;;;;;;;;;;;;;;;;", parseFails input ";;;;;;;;;;;;;;;;;;;;;;;;;;",
parseMatch input "namespace a {}" parseMatch input "namespace a {}"
`([PD $ PNs $ MkPNamespace [< "a"] [] _]), `([PD $ MkPDecl [] (PNs $ MkPNamespace [< "a"] [] _) _]),
parseMatch input "namespace a.b.c {}" parseMatch input "namespace a.b.c {}"
`([PD $ PNs $ MkPNamespace [< "a", "b", "c"] [] _]), `([PD $ MkPDecl []
(PNs $ MkPNamespace [< "a", "b", "c"] [] _) _]),
parseMatch input "namespace a {namespace b {}}" parseMatch input "namespace a {namespace b {}}"
`([PD $ PNs $ MkPNamespace [< "a"] [PNs $ MkPNamespace [< "b"] [] _] _]), `([PD (MkPDecl []
(PNs $ MkPNamespace [< "a"]
[MkPDecl [] (PNs $ MkPNamespace [< "b"] [] _) _] _) _)]),
parseMatch input "namespace a {def x = 't ∷ {t}}" parseMatch input "namespace a {def x = 't ∷ {t}}"
`([PD $ PNs $ MkPNamespace [< "a"] `([PD (MkPDecl []
[PDef $ MkPDef (PQ Any _) "x" Nothing (PNs $ MkPNamespace [< "a"]
(Ann (Tag "t" _) (Enum ["t"] _) _) _] _]), [MkPDecl []
(PDef $ MkPDef (PQ Any _) "x" Nothing
(Ann (Tag "t" _) (Enum ["t"] _) _) _) _] _) _)]),
parseMatch input "namespace a {def x = 't ∷ {t}} def y = a.x" parseMatch input "namespace a {def x = 't ∷ {t}} def y = a.x"
`([PD $ PNs $ MkPNamespace [< "a"] `([PD (MkPDecl []
[PDef $ MkPDef (PQ Any _) "x" Nothing (PNs $ MkPNamespace [< "a"]
(Ann (Tag "t" _) (Enum ["t"] _) _) _] _, [MkPDecl []
PD $ PDef $ MkPDef (PQ Any _) "y" Nothing (PDef $ MkPDef (PQ Any _) "x" Nothing
(V (MakePName [< "a"] "x") {}) _]), (Ann (Tag "t" _) (Enum ["t"] _) _) _) _] _) _),
PD (MkPDecl []
(PDef $ MkPDef (PQ Any _) "y" Nothing
(V (MakePName [< "a"] "x") Nothing _) _) _)]),
parseMatch input #" load "a.quox"; def b = a.b "# parseMatch input #" load "a.quox"; def b = a.b "#
`([PLoad "a.quox" _, `([PLoad "a.quox" _,
PD $ PDef $ MkPDef (PQ Any _) "b" Nothing PD (MkPDecl []
(V (MakePName [< "a"] "b") {}) _]) (PDef $ MkPDef (PQ Any _) "b" Nothing
(V (MakePName [< "a"] "b") Nothing _) _) _)])
] ]
] ]