code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Exception
(
module Control.Exception,
module Exception
)
where
import Control.Applicative as A
import Control.Exception
import Control.Monad.IO.Class
catchIO :: IO a -> (IOException -> IO a) -> IO a
catchIO = Control.Exception.catch
handleIO :: (IOException -> IO a) -> IO a -> IO a
handleIO = flip catchIO
tryIO :: IO a -> IO (Either IOException a)
tryIO = try
-- | A monad that can catch exceptions. A minimal definition
-- requires a definition of 'gcatch'.
--
-- Implementations on top of 'IO' should implement 'gmask' to
-- eventually call the primitive 'Control.Exception.mask'.
-- These are used for
-- implementations that support asynchronous exceptions. The default
-- implementations of 'gbracket' and 'gfinally' use 'gmask'
-- thus rarely require overriding.
--
class (A.Applicative m, MonadIO m) => ExceptionMonad m where
-- | Generalised version of 'Control.Exception.catch', allowing an arbitrary
-- exception handling monad instead of just 'IO'.
gcatch :: Exception e => m a -> (e -> m a) -> m a
-- | Generalised version of 'Control.Exception.mask_', allowing an arbitrary
-- exception handling monad instead of just 'IO'.
gmask :: ((m a -> m a) -> m b) -> m b
-- | Generalised version of 'Control.Exception.bracket', allowing an arbitrary
-- exception handling monad instead of just 'IO'.
gbracket :: m a -> (a -> m b) -> (a -> m c) -> m c
-- | Generalised version of 'Control.Exception.finally', allowing an arbitrary
-- exception handling monad instead of just 'IO'.
gfinally :: m a -> m b -> m a
gbracket before after thing =
gmask $ \restore -> do
a <- before
r <- restore (thing a) `gonException` after a
_ <- after a
return r
a `gfinally` sequel =
gmask $ \restore -> do
r <- restore a `gonException` sequel
_ <- sequel
return r
instance ExceptionMonad IO where
gcatch = Control.Exception.catch
gmask f = mask (\x -> f x)
gtry :: (ExceptionMonad m, Exception e) => m a -> m (Either e a)
gtry act = gcatch (act >>= \a -> return (Right a))
(\e -> return (Left e))
-- | Generalised version of 'Control.Exception.handle', allowing an arbitrary
-- exception handling monad instead of just 'IO'.
ghandle :: (ExceptionMonad m, Exception e) => (e -> m a) -> m a -> m a
ghandle = flip gcatch
-- | Always executes the first argument. If this throws an exception the
-- second argument is executed and the exception is raised again.
gonException :: (ExceptionMonad m) => m a -> m b -> m a
gonException ioA cleanup = ioA `gcatch` \e ->
do _ <- cleanup
liftIO $ throwIO (e :: SomeException)
| AlexanderPankiv/ghc | compiler/utils/Exception.hs | bsd-3-clause | 2,756 | 0 | 15 | 647 | 696 | 362 | 334 | 42 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- | Accepting arguments to be passed through to a sub-process.
module Options.Applicative.Args
(argsArgument
,argsOption
,parseArgsFromString)
where
import Data.Attoparsec.Args
import qualified Data.Attoparsec.Text as P
import qualified Data.Text as T
import qualified Options.Applicative as O
-- | An argument which accepts a list of arguments e.g. @--ghc-options="-X P.hs \"this\""@.
argsArgument :: O.Mod O.ArgumentFields [String] -> O.Parser [String]
argsArgument =
O.argument
(do string <- O.str
either O.readerError return (parseArgsFromString string))
-- | An option which accepts a list of arguments e.g. @--ghc-options="-X P.hs \"this\""@.
argsOption :: O.Mod O.OptionFields [String] -> O.Parser [String]
argsOption =
O.option
(do string <- O.str
either O.readerError return (parseArgsFromString string))
-- | Parse from a string.
parseArgsFromString :: String -> Either String [String]
parseArgsFromString = P.parseOnly (argsParser Escaping) . T.pack
| wskplho/stack | src/Options/Applicative/Args.hs | bsd-3-clause | 1,077 | 0 | 11 | 206 | 231 | 128 | 103 | 21 | 1 |
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign
-- Copyright : (c) The FFI task force 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : ffi@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- A collection of data types, classes, and functions for interfacing
-- with another programming language.
--
-----------------------------------------------------------------------------
module Foreign
( module Data.Bits
, module Data.Int
, module Data.Word
, module Foreign.Ptr
, module Foreign.ForeignPtr
, module Foreign.StablePtr
, module Foreign.Storable
, module Foreign.Marshal
-- * Unsafe Functions
-- | 'unsafePerformIO' is exported here for backwards
-- compatibility reasons only. For doing local marshalling in
-- the FFI, use 'unsafeLocalState'. For other uses, see
-- 'System.IO.Unsafe.unsafePerformIO'.
, unsafePerformIO
) where
import Data.Bits
import Data.Int
import Data.Word
import Foreign.Ptr
import Foreign.ForeignPtr
import Foreign.StablePtr
import Foreign.Storable
import Foreign.Marshal
import GHC.IO (IO)
import qualified GHC.IO (unsafePerformIO)
{-# DEPRECATED unsafePerformIO "Use System.IO.Unsafe.unsafePerformIO instead; This function will be removed in the next release" #-}
{-# INLINE unsafePerformIO #-}
unsafePerformIO :: IO a -> a
unsafePerformIO = GHC.IO.unsafePerformIO
| beni55/haste-compiler | libraries/ghc-7.8/base/Foreign.hs | bsd-3-clause | 1,617 | 0 | 6 | 330 | 161 | 109 | 52 | 26 | 1 |
{-# LANGUAGE TemplateHaskell, DataKinds, PolyKinds, TypeFamilies #-}
module T9081 where
import Data.Proxy
$( [d|
class kproxy ~ 'KProxy => C (kproxy :: KProxy a) where
type TF (x :: a) :: Bool
|])
| urbanslug/ghc | testsuite/tests/th/T9081.hs | bsd-3-clause | 208 | 0 | 6 | 44 | 21 | 14 | 7 | 6 | 0 |
module Parser where
import Control.Applicative
type Loc a l = a -> l -> l
type Err l = ([String], l)
type Suc a l b = ([a], l, Loc a l, b)
type Ret a l b = Either (Err l) (Suc a l b)
type Pred a = a -> Bool
newtype Parser a l b = Parser ([a] -> l -> Loc a l -> Ret a l b)
parse :: Parser a l b -> [a] -> l -> Loc a l -> Ret a l b
parse (Parser p) = p
prettyParse :: Parser a l b -> [a] -> l -> Loc a l -> Either (Err l) ([a], l, b)
prettyParse (Parser p) i l u = case p i l u of
Right (i_, l_, _, o) -> Right (i_, l_, o)
Left (e_, l_) -> Left (e_, l_)
instance Functor (Parser a l) where
fmap f (Parser p) = Parser $ \i l u -> case p i l u of
Right (i_, l_, _, o) -> Right (i_, l_, u, f o)
Left x -> Left x
instance Applicative (Parser a l) where
pure v = Parser $ \i l u -> Right (i, l, u, v)
(Parser p1) <*> p2 = Parser $ \i l u -> case p1 i l u of
Right (i_, l_, _, f) -> parse (fmap f p2) i_ l_ u
Left x -> Left x
instance Alternative (Parser a l) where
empty = Parser $ \_ l _ -> Left (["<Alternative.empty>"], l)
(Parser p1) <|> (Parser p2) = Parser $ \i l u -> case p1 i l u of
Left (e_, _) -> case p2 i l u of
Left (e__, _) -> Left (e__ ++ e_, l)
Right x -> Right x
Right x -> Right x
instance Monad (Parser a l) where
return = pure
(Parser p) >>= f = Parser $ \i l u -> case p i l u of
Right (i_, l_, _, o) -> parse (f o) i_ l_ u
Left x -> Left x
loc :: Parser a l l
loc = Parser $ \i l u -> Right (i, l, u, l)
err :: Maybe l -> String -> Parser a l b
err Nothing e = Parser $ \_ l _ -> Left ([e], l)
err (Just l) e = Parser $ \_ _ _ -> Left ([e], l)
eof :: Parser a l Bool
eof = Parser $ \i l u -> Right (i, l, u, null i)
errEOF :: Bool -> String -> String
errEOF False e = unwords ["| Unexpected EOF, optionally expecting", e]
errEOF True e = unwords ["| Unexpected EOF, expecting", e]
peek :: String -> Parser a l a
peek e = Parser $ \i l u ->
if null i then Left ([errEOF False e], l)
else let h = head i in Right (i, l, u, h)
satisfy :: Pred a -> String -> Parser a l a
satisfy p e = Parser $ \i l u ->
if null i then Left ([errEOF True e], l)
else let h = head i
in if p h then Right (tail i, u h l, u, h)
else Left ([unwords ["| Predicate not satisfied, expecting", e]], l)
item :: Parser a l a
item = satisfy (const True) "<Parser.item>"
| fredmorcos/attic | projects/pet/archive/pet_haskell_cli/cli/Parser.hs | isc | 2,367 | 0 | 16 | 675 | 1,395 | 735 | 660 | 57 | 3 |
*Main > :t cf
cf :: Functor f => (CardFacts -> f CardFacts) -> KB -> f KB
*Main > :t (^.)
(^.) :: s -> Getting a s a -> a
*Main > :t foo
foo :: KB
*Main > :t foo^.cf
foo^.cf :: CardFacts
-- view cf foo is exactly equal to foo^.cf
*Main > :t view
MonadReader s m => Getting a s a -> m a
-- this is legal
*Main > foo & cf .~ initCardFacts
-- but this isn't
-- foo^.cf .~ initCardFacts
*Main > Map.fromList [("hello",12)] ^.at "hello"
Just 12
*Main > at 10 .~ Just "hello" $ Map.empty
fromList [(10,"hello")]
------------------------------------------------------------------------------
*Main > let bar = foo^.pc
*Main > at 1 .~ Just initCardSet $ bar
fromList [(1,fromList []),(2,fromList []),(3,fromList []),(4,fromList []),(5,fromList []),(6,fromList [])]
-- so we might try something like this:
foo^.pc^.at 1 .~ Just initCardSet
-- but that doesn't work
-- this however does:
-- *Main> foo^.pc^.at 1
Just (fromList [])
-- *Main> foo^.pc^.at 8
Nothing
-- *Main> ix 3 %~ (map toUpper) $ Map.fromList [(2, "Earth"), (3, "Mars")]
fromList [(2,"Earth"),(3,"MARS")]
-- *Main> ix 3 .~ "foo" $ Map.fromList [(2, "Earth"), (3, "Mars")]
fromList [(2,"Earth"),(3,"foo")]
------------------------------------------------------------------------------
-- working from the simple example this is legit
*Main> db^.fs & at 3 ?~ 'x'
fromList [(1,'a'),(2,'b'),(3,'x'),(4,'d'),(5,'e')]
-- and so this works
*Main> db^.cf & at 'a' ?~ Noone
fromList [('a',Noone),('b',Unknown),('c',Unknown),('d',Unknown)]
*Main > db^.cf & at 'a' ?~ ID 5
fromList [('a',ID 5),('b',Unknown),('c',Unknown),('d',Unknown)]
-- here's an example of a nested map insertion
*Main > Map.empty & at "hello" . non Map.empty . at "world" ?~ "!!!"
fromList [("hello",fromList [("world","!!!")])]
------------------------------------------------------------------------------
*Main > foo^.um.at 1
Nothing
*Main > foo^.um & at 1 ?~ 's'
fromList [(1,'s')]
-- but
*Main > foo^.um.at 1 ?~ 's'
-- <bombs>
*Main > at 1 ?~ 'j' $ foo^.um
*Main > foo^.myMap & at 1 ?~ 'j'
fromList [(1,'j'),(2,'b')]
*Main > foo^.myList & ix 1 .~ 5
[1,5,3,4,5,6,7,8,9,10]
------------------------------------------------------------------------------
>>> let m = M.fromList [('a',1), ('b',2), ('c',3)]
>>> let k = S.fromList ['b','c','e']
>>> m ^.. foldMap at k
[Just 2,Just 3,Nothing]
>>> m ^.. foldMap ix k
[2,3]
------------------------------------------------------------------------------
*Main > fes foo (cf.at MS .= (Just Noone))
KB {_cf = fromList [(MS,Noone),(CM,Unknown),(MW,Unknown),(RG,Unknown),(MP,Unknown),(PP,Unknown),(Ck,Unknown),(Dg,Unknown),(Lp,Unknown),(Rp,Unknown),
(Sp,Unknown),(Kn,Unknown),(Bl,Unknown),(Cn,Unknown),(Dr,Unknown),(Br,Unknown),(Lb,Unknown),(Lg,Unknown),(Hl,Unknown),(Sy,Unknown)],
_pc = fromList [(1,fromList []),(2,fromList []),(3,fromList []),(4,fromList []),(5,fromList []),(6,fromList [])],
_pcl = fromList [(1,[]),(2,[]),(3,[]),(4,[]),(5,[]),(6,[])], _pc' = fromList [(1,fromList []),(2,fromList []),(3,fromList []),(4,fromList []),
(5,fromList []),(6,fromList [])],
_pm = fromList [(1,fromList []),(2,fromList []),(3,fromList []),(4,fromList []),(5,fromList []),(6,fromList [])],
_pm' = fromList [(1,fromList []),(2,fromList []),(3,fromList []),(4,fromList []),(5,fromList []),(6,fromList [])]}
*Main > fes foo (cf.at MS .= (Just (ID 1)))
KB {_cf = fromList [(MS,ID 1),(CM,Unknown),(MW,Unknown),(RG,Unknown),(MP,Unknown),(PP,Unknown),(Ck,Unknown),(Dg,Unknown),(Lp,Unknown),(Rp,Unknown),
(Sp,Unknown),(Kn,Unknown),(Bl,Unknown),(Cn,Unknown),(Dr,Unknown),(Br,Unknown),(Lb,Unknown),(Lg,Unknown),(Hl,Unknown),(Sy,Unknown)],
_pc = fromList [(1,fromList []),(2,fromList []),(3,fromList []),(4,fromList []),(5,fromList []),(6,fromList [])],
_pcl = fromList [(1,[]),(2,[]),(3,[]),(4,[]),(5,[]),(6,[])], _pc' = fromList [(1,fromList []),(2,fromList []),(3,fromList []),(4,fromList []),
(5,fromList []),(6,fromList [])],
_pm = fromList [(1,fromList []),(2,fromList []),(3,fromList []),(4,fromList []),(5,fromList []),(6,fromList [])],
_pm' = fromList [(1,fromList []),(2,fromList []),(3,fromList []),(4,fromList []),(5,fromList []),(6,fromList [])]}
-- add2Map using state transformer
add2MapT :: Int -> Char -> StateT DB IO ()
add2MapT i c = myMap.at i .= (Just c)
-- i worked this out from the types, i did!
*Main> foo
fromList [(1,'a'),(2,'b'),(3,'c'),(4,'d'),(5,'e')]
*Main> bar
fromList [(4,'n'),(5,'o'),(6,'p'),(7,'q'),(8,'r'),(9,'s')]
*Main> over myMap (Map.union bar) db
DB {_myMap = fromList [(1,'a'),(2,'b'),(3,'c'),(4,'n'),(5,'o'),(6,'p'),(7,'q'),(8,'r'),(9,'s')]}
| afcondon/cluedoHS | notes not executable.hs | mit | 4,584 | 72 | 24 | 579 | 2,719 | 1,553 | 1,166 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Text.Marquee.Parser.HTML (
info
, comment
, cdata
, special
, preFormated
, simpleTag
, tag
, ctag) where
import Control.Applicative
import Control.Monad
import Data.Char (isLetter, isDigit)
import Data.Text(Text())
import qualified Data.Text as T
import Data.Attoparsec.Text as Atto
import Data.Attoparsec.Combinator
import Text.Marquee.Parser.Common
info :: Parser Text
info = do
start <- startP
content <- T.concat <$> manyTill (Atto.takeWhile1 (/= '?') <|> Atto.take 2) (lookAhead endP)
end <- endP
return $ T.concat [start, content, end]
where startP = string "<?"
endP = string "?>"
comment :: Parser Text
comment = do
start <- startP
content <- T.concat <$> manyTill (Atto.takeWhile1 (/= '-') <|> Atto.take 3) (lookAhead endP)
end <- endP
return $ T.concat [start, content, end]
where startP = string "<!--"
endP = string "-->"
cdata :: Parser Text
cdata = do
start <- startP
content <- T.concat <$> manyTill (Atto.takeWhile1 (/= ']') <|> Atto.take 3) (lookAhead endP)
end <- endP
return $ T.concat [start, content, end]
where startP = string "<![CDATA["
endP = string "]]>"
special :: Parser Text
special = do
start <- startP
content <- T.concat <$> manyTill (Atto.takeWhile1 (/= '>') <|> Atto.take 1) (lookAhead endP)
end <- endP
return $ T.concat [start, content, end]
where startP = string "<!"
endP = string ">"
preFormated :: Parser Text
preFormated = do
start <- startP
content <- T.concat <$> manyTill (Atto.takeWhile1 (/= '<') <|> Atto.take 6) (lookAhead endP)
end <- endP
return $ T.concat [start, content, end]
where startP = stringCI "<script" <|> stringCI "<pre" <|> stringCI "<style"
endP = stringCI "</script>" <|> stringCI "</pre>" <|> stringCI "</style>"
simpleTag :: Parser Text
simpleTag = do
start <- startP
tname <- choice $ map stringCI tagCache
end <- endP
return $ T.concat [start, tname, end]
where startP = string "<" <|> string "</"
endP = T.singleton <$> whitespace <|> string ">" <|> string "/>"
-- Any valid tag
tag :: Parser Text
tag = do
open <- string "<"
tname <- tagName
tattrs <- T.concat <$> many tagAttribute
space <- Atto.takeWhile isWhitespace
close <- string "/>" <|> string ">"
return $ T.concat [open, tname, tattrs, space, close]
ctag :: Parser Text
ctag = do
open <- string "</"
tname <- tagName
space <- Atto.takeWhile isWhitespace
close <- string ">"
return $ T.concat [open, tname, space, close]
tagName :: Parser Text
tagName = do
x <- satisfy isLetter
xs <- Atto.takeWhile (\c -> isLetter c || isDigit c || c == '-')
return $ T.cons x xs
tagAttribute :: Parser Text
tagAttribute = do
space <- Atto.takeWhile1 isWhitespace
name <- attributeName
value <- option "" attributeValueSpec
return $ T.concat [space, name, value]
attributeName :: Parser Text
attributeName = do
x <- satisfy (\c -> isLetter c || c `elem` ("_:" :: String))
xs <- Atto.takeWhile (\c -> isLetter c || isDigit c || c `elem` ("_:.-" :: String))
return $ T.cons x xs
attributeValueSpec :: Parser Text
attributeValueSpec = do
space0 <- Atto.takeWhile isWhitespace
eq <- string "="
space1 <- Atto.takeWhile isWhitespace
val <- attributeValue
return $ T.concat [space0, eq, space1, val]
attributeValue :: Parser Text
attributeValue = unquoted <|> quoted '\'' <|> quoted '"'
where unquoted = Atto.takeWhile1 (flip notElem ("\"'=<>`" :: String))
quoted c = do
open <- T.singleton <$> char c
xs <- Atto.takeWhile (/= c)
close <- T.singleton <$> char c
return $ T.concat [open, xs, close]
-- Tag cache
tagCache :: [Text]
tagCache = ["address"
, "article"
, "aside"
, "base"
, "basefont"
, "blockquote"
, "body"
, "caption"
, "center"
, "col"
, "colgroup"
, "dd"
, "details"
, "dialog"
, "dir"
, "div"
, "dl"
, "dt"
, "fieldset"
, "figcaption"
, "figure"
, "footer"
, "form"
, "frame"
, "frameset"
, "h1"
, "head"
, "header"
, "hr"
, "html"
, "iframe"
, "legend"
, "li"
, "link"
, "main"
, "menu"
, "menuitem"
, "meta"
, "nav"
, "noframes"
, "ol"
, "optgroup"
, "option"
, "p"
, "param"
, "section"
, "source"
, "summary"
, "table"
, "tbody"
, "td"
, "tfoot"
, "th"
, "thead"
, "title"
, "tr"
, "track"
, "ul"]
| DanielRS/marquee | src/Text/Marquee/Parser/HTML.hs | mit | 4,820 | 0 | 14 | 1,427 | 1,614 | 839 | 775 | 171 | 1 |
{-|
Module : Main
Description : Entry point for parsebnf
Copyright : (c) chop-lang, 2015
License : MIT
Maintainer : carterhinsley@gmail.com
-}
module Main where
import qualified CLI
import qualified EBNFParser as EBNF
main = do
argResults <- CLI.processArgs
ebnfFile <- CLI.ebnfFile . head $ argResults
print . EBNF.parse $ ebnfFile
| chop-lang/parsebnf | src/Main.hs | mit | 360 | 0 | 10 | 78 | 59 | 33 | 26 | 7 | 1 |
nwd :: Integer -> Integer -> Integer
nwd 0 y = abs y
nwd x 0 = abs x
nwd x y
| x < 0 = nwd (abs x) y
| y < 0 = nwd x (abs y)
| x > y = if (x-y) > y then nwd (x-y) y else nwd y (x-y)
| x == y = abs x
| x < y = nwd y x | RAFIRAF/HASKELL | nwd3euler.hs | mit | 226 | 1 | 9 | 81 | 200 | 93 | 107 | 9 | 2 |
module TreeSpec
( main
, spec
) where
import Data.Binary
import MLUtil
import Test.Hspec
{-
leaf :: LeafLabel b => String -> Tree a b
leaf = Leaf . C
node :: ArrowLabel a => String -> [Arrow a b] -> Tree a b
node = Node . L
-}
spec :: Spec
spec = do
{-
describe "encode" $ do
it "should roundtrip single leaf" $
let tree = leaf "c0"
bs = encode tree
tree' = decode bs
in tree' `shouldBe` tree
it "should roundtrip tree" $
let tree = node "l0"
[ A (leaf "c0") "f0"
, A (node "l1"
[ A (leaf "c1") "f1"
, A (leaf "c2") "f2"
]) "f3"]
bs = encode tree
tree' = decode bs
in tree' `shouldBe` tree
-}
return ()
main :: IO ()
main = hspec spec
| rcook/mlutil | mlutil/spec/TreeSpec.hs | mit | 971 | 0 | 8 | 474 | 63 | 36 | 27 | 11 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# OPTIONS_GHC -Wall #-}
module Generator where
import Control.Monad
import Control.Arrow (first)
import Control.Monad.Trans.Class
import Data.Void
data Generator o m r
= Yield (Generator o m r) o
| Done r
| M (m (Generator o m r))
| Check (Generator o m r) (Generator o m r)
instance Monad m => Monad (Generator o m) where
return = Done
Done r >>= f = f r
Yield next o >>= f = Yield (next >>= f) o
M m >>= f = M (liftM (>>= f) m)
Check next done >>= f = Check (next >>= f) (done >>= f)
instance MonadTrans (Generator o) where
lift = M . liftM Done
draw :: Monad m
=> Generator o m r
-> m (Generator o m r, Maybe o)
draw (Yield gen o) = return (gen, Just o)
draw (Done r) = return (Done r, Nothing)
draw (M m) = m >>= draw
draw (Check next _) = draw next
closeGen :: Monad m => Generator o m r -> m r
closeGen =
go
where
go (Yield gen _) = go gen
go (Done r) = return r
go (M m) = m >>= go
go (Check _ done) = go done
newtype ConduitM i o m r = ConduitM
{ unConduitM :: forall u.
Generator i m u
-> Generator o m (Generator i m u, r)
}
runConduitM :: Monad m => ConduitM () Void m r -> m r
runConduitM (ConduitM f) = do
(gen, r) <- closeGen $ f $ Done ()
closeGen gen
return r
instance Monad m => Monad (ConduitM i o m) where
return x = ConduitM $ \up -> Done (up, x)
ConduitM f >>= g = ConduitM $ \up -> do
(up', a) <- f up
unConduitM (g a) up'
instance MonadTrans (ConduitM i o) where
lift m = ConduitM $ \up -> M $ do
x <- m
return $ Done (up, x)
yield :: Monad m => o -> ConduitM i o m ()
yield o = ConduitM $ \up -> Yield (Done (up, ())) o
await :: Monad m => ConduitM i o m (Maybe i)
await = ConduitM $ M . liftM Done . draw
check :: Monad m => ConduitM i o m Bool
check = ConduitM $ \up -> Check (Done (up, True)) (Done (up, False))
leftover :: Monad m => i -> ConduitM i o m ()
leftover i = ConduitM $ \up -> Done (Yield up i, ())
fuse :: Monad m
=> ConduitM a b m ()
-> ConduitM b c m r
-> ConduitM a c m r
fuse (ConduitM up) (ConduitM down) = ConduitM $ liftM (first drain) . down . up
drain :: Monad m
=> Generator b m (Generator a m u, ())
-> Generator a m u
drain (Yield g _b) = drain g
drain (M m) = M (liftM drain m)
drain (Done (gen, ())) = gen
drain (Check _ done) = drain done
---
main :: IO ()
main = do
let src = do
let go i = say ("yielding: " ++ show i) >> yield i
mapM_ go [1..10 :: Int]
say "Finalize src"
conduit =
loop (5 :: Int)
where
loop 0 = say "finishing conduit"
loop i = await >>= maybe (say "conduit: early term") (\x -> yield x >> loop (i - 1))
sink =
loop 0
where
loop r = await >>= maybe (say "Finalize sink" >> return r) (\i -> loop $! r + i)
say :: String -> ConduitM i o IO ()
say = lift . putStrLn
runConduitM (src `fuse` conduit `fuse` sink) >>= print | snoyberg/nonterminating-pipes | src/Generator.hs | mit | 3,097 | 0 | 19 | 988 | 1,531 | 767 | 764 | 88 | 4 |
module Problem34 where
{--
Task description:
145 is a curious number, as 1! + 4! + 5! = 1 + 24 + 120 = 145.
Find the sum of all numbers which are equal to the sum of the factorial of their digits.
Note: as 1! = 1 and 2! = 2 are not sums they are not included.
--}
import Data.Set (Set)
import qualified Data.Set as Set
type N = Int
fac :: N -> N
fac 0 = 1
fac n = n * fac (n - 1)
facs = fmap fac [0..9]
digits :: N -> [N]
digits = fmap (read . return) . show
facSum :: N -> N
facSum = sum . fmap (facs !!) . digits
predicate :: N -> Bool
predicate n = facSum n == n
combinations :: N -> Set N
combinations 0 = Set.fromList $ do
x <- facs
y <- facs
return $ x+y
combinations n =
let s1 = combinations $ n - 1
s2 = Set.fromList $ do
x <- facs
y <- Set.toList s1
return $ x+y
in Set.union s1 s2
{-
An upper range is given by:
* 9999999 as fac 9 * 7 < 9999999
…but I should opt to construct the searchspace
in a manner of combining 2-7 different facs so that
the resulting searchspace would be smaller.
-}
searchSpace = filter (>= 10) . Set.toList $ combinations 6
main = print . sum $ filter predicate searchSpace
| runjak/projectEuler | src/Problem34.hs | mit | 1,198 | 0 | 14 | 330 | 337 | 175 | 162 | 28 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, BangPatterns #-}
module Control.OperationalTransformation.ClientServerTests
( tests
) where
import Control.OperationalTransformation
import Control.OperationalTransformation.Client
import qualified Control.OperationalTransformation.JSON ()
import qualified Control.OperationalTransformation.JSON.Gen as JSONGen
import Control.OperationalTransformation.Server
-- import qualified Control.OperationalTransformation.Text.Gen as TextGen
import qualified Control.OperationalTransformation.Text0.Gen as Text0Gen
import Data.Maybe (fromJust)
import Test.QuickCheck hiding (reason)
import Test.QuickCheck.Property
import Test.Tasty
import Test.Tasty.QuickCheck hiding (reason)
type Queue a = [a]
emptyQueue :: Queue a
emptyQueue = []
appendQueue :: a -> Queue a -> Queue a
appendQueue a q = q ++ [a]
type ClientId = Int
data ExtendedClient doc op = ExtendedClient { clientId :: !ClientId
, clientRevision :: !Revision
, clientSendQueue :: Queue (Revision, op)
, clientReceiveQueue :: Queue (Maybe op)
, clientDoc :: !doc
, clientState :: !(ClientState op)
} deriving (Show)
prop_client_server :: (Eq doc, Arbitrary doc, OTSystem doc op, OTComposableOperation op)
=> (doc -> Gen op) -> Property
prop_client_server genOp = property $ do
doc <- arbitrary
let server = initialServerState doc
clients = createClients doc $ take numClients [1..]
(server', clients') <- simulate numActions server clients
return $ if not (all isSynchronized clients')
then property $ failed { reason = "some clients are not synchronized" }
else let ServerState _ doc' _ = server'
in if all ((== doc') . clientDoc) clients'
then property True
else property $ failed { reason = "client documents did not converge" }
where
numClients, numActions :: Int
numClients = 2
numActions = 100
firstRevision = 0
createClients doc = map $ \n ->
ExtendedClient { clientId = n
, clientRevision = firstRevision
, clientSendQueue = emptyQueue
, clientReceiveQueue = emptyQueue
, clientDoc = doc
, clientState = initialClientState
}
simulate !n !server !clients = do
clientN <- choose (0, length clients - 1)
actionN <- choose (0, 2) :: Gen Int
let client = clients !! clientN
(server', clients') <- case actionN of
0 | canReceive client -> do
let client' = receiveClient client
return (server, replace clientN client' clients)
1 | canSend client -> do
let ((rev, op), client') = sendClient client
Right (op', (), server') = applyOperation server rev op ()
clients' = replace clientN client' clients
clients'' = broadcast (clientId client) op' clients'
return (server', clients'')
_ | n < 0 -> return (server, clients)
| otherwise -> do
client' <- editClient client
return (server, replace clientN client' clients)
if n > 0 || any (\c -> canReceive c || canSend c) clients'
then simulate (n-1) server' clients'
else return (server', clients')
replace 0 e (_:xs) = e:xs
replace n e (x:xs) = x:(replace (n-1) e xs)
replace _ _ [] = error "replacing empty list"
canReceive = not . null . clientReceiveQueue
canSend = not . null . clientSendQueue
receiveClient client = case clientReceiveQueue client of
[] -> error "empty receive queue"
msg:ops ->
let
client' = client { clientReceiveQueue = ops
, clientRevision = clientRevision client + 1
}
in case msg of
Nothing -> case fromJust $ serverAck (clientState client') of
(Just op, clientState') -> client'
{ clientState = clientState'
, clientSendQueue = appendQueue (clientRevision client', op) (clientSendQueue client')
}
(Nothing, clientState') -> client' { clientState = clientState' }
Just op -> case applyServer (clientState client) op of
Left err -> error $ "should not happen: " ++ err
Right (op', clientState') -> case apply op' (clientDoc client') of
Left err -> error $ "apply failed: " ++ err
Right doc' -> client' { clientState = clientState', clientDoc = doc' }
sendClient client = case clientSendQueue client of
[] -> error "empty send queue"
op:ops -> (op, client { clientSendQueue = ops })
editClient client = do
op <- genOp $ clientDoc client
let doc' = fromRight $ apply op $ clientDoc client
(shouldSend, state') = fromRight $ applyClient (clientState client) op
client' = client { clientState = state', clientDoc = doc' }
return $ if shouldSend
then client' { clientSendQueue = appendQueue (clientRevision client', op) (clientSendQueue client) }
else client'
fromRight (Right a) = a
fromRight (Left err) = error err
broadcast creator op = map $ \client ->
let msg = if creator == clientId client then Nothing else Just op
in client { clientReceiveQueue = appendQueue msg (clientReceiveQueue client) }
isSynchronized client = case clientState client of
ClientSynchronized -> True
_ -> False
text0Tests = testGroup "Control.OperationalTransformation.ClientServerTests.Text0" [
testProperty "prop_client_server_single_ops" $ prop_client_server Text0Gen.genOperation,
testProperty "prop_client_server_multi_ops" $ prop_client_server Text0Gen.genMultiOperation
]
jsonTests = testGroup "Control.OperationalTransformation.ClientServerTests.JSON" [
testProperty "prop_client_server_single_ops" $ prop_client_server JSONGen.genOperation,
testProperty "prop_client_server_multi_ops" $ prop_client_server JSONGen.genMultiOperation
]
-- textTests = testGroup "Control.OperationalTransformation.ClientServerTests.Text" [
-- testProperty "prop_client_server" $ prop_client_server TextGen.genOperation
-- ]
tests :: TestTree
tests = testGroup "Control.OperationalTransformation.ClientServerTests" [
jsonTests,
text0Tests
]
main, jsonMain, textMain :: IO ()
main = defaultMain tests
jsonMain = defaultMain jsonTests
textMain = defaultMain text0Tests
| thomasjm/ot.hs | test/Control/OperationalTransformation/ClientServerTests.hs | mit | 6,709 | 0 | 22 | 1,880 | 1,757 | 922 | 835 | 135 | 18 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Web.Spock
import Web.Spock.Config
import Control.Monad.Trans
import Data.Aeson hiding (json)
import Data.Monoid ((<>))
import Data.Text (Text, pack)
import GHC.Generics
import Lib
import Data.IORef
data Transcript = Transcript
{ t :: String
} deriving (Generic, Show)
data NewReplaceCommand = NewReplaceCommand
{ pat :: String,
replacement :: String
} deriving (Generic, Show)
instance ToJSON Transcript
instance FromJSON Transcript
instance ToJSON NewReplaceCommand
instance FromJSON NewReplaceCommand
data AppState = DummyAppState (IORef [Command])
type Api = SpockM () () AppState ()
type ApiAction a = SpockAction () () AppState a
main :: IO ()
main =
do ref <- newIORef predefinedCommands
spockCfg <- defaultSpockCfg () PCNoDatabase (DummyAppState ref)
runSpock 8080 (spock spockCfg app)
app :: Api
app =
do
post root $ do
(DummyAppState ref) <- getState
commands <- liftIO $ atomicModifyIORef' ref $ \commands -> (commands, commands)
theTranscript <- jsonBody' :: ApiAction Transcript
text (pack $ (processTranscript commands) (t theTranscript))
post "add" $ do
theCommand <- jsonBody' :: ApiAction NewReplaceCommand
(DummyAppState ref) <- getState
commands <- liftIO $ atomicModifyIORef' ref $ \commands -> (addCustomReplaceCommand commands (pat theCommand, replacement theCommand),
addCustomReplaceCommand commands (pat theCommand, replacement theCommand))
text ("ok")
| muchas/voice-dot | backend/app/Main.hs | mit | 1,675 | 0 | 16 | 401 | 491 | 255 | 236 | 45 | 1 |
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Hadoop.Protos.YarnServerCommonProtos.NodeActionProto (NodeActionProto(..)) where
import Prelude ((+), (/), (.))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data NodeActionProto = NORMAL
| RESYNC
| SHUTDOWN
deriving (Prelude'.Read, Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable NodeActionProto
instance Prelude'.Bounded NodeActionProto where
minBound = NORMAL
maxBound = SHUTDOWN
instance P'.Default NodeActionProto where
defaultValue = NORMAL
toMaybe'Enum :: Prelude'.Int -> P'.Maybe NodeActionProto
toMaybe'Enum 0 = Prelude'.Just NORMAL
toMaybe'Enum 1 = Prelude'.Just RESYNC
toMaybe'Enum 2 = Prelude'.Just SHUTDOWN
toMaybe'Enum _ = Prelude'.Nothing
instance Prelude'.Enum NodeActionProto where
fromEnum NORMAL = 0
fromEnum RESYNC = 1
fromEnum SHUTDOWN = 2
toEnum
= P'.fromMaybe
(Prelude'.error "hprotoc generated code: toEnum failure for type Hadoop.Protos.YarnServerCommonProtos.NodeActionProto")
. toMaybe'Enum
succ NORMAL = RESYNC
succ RESYNC = SHUTDOWN
succ _ = Prelude'.error "hprotoc generated code: succ failure for type Hadoop.Protos.YarnServerCommonProtos.NodeActionProto"
pred RESYNC = NORMAL
pred SHUTDOWN = RESYNC
pred _ = Prelude'.error "hprotoc generated code: pred failure for type Hadoop.Protos.YarnServerCommonProtos.NodeActionProto"
instance P'.Wire NodeActionProto where
wireSize ft' enum = P'.wireSize ft' (Prelude'.fromEnum enum)
wirePut ft' enum = P'.wirePut ft' (Prelude'.fromEnum enum)
wireGet 14 = P'.wireGetEnum toMaybe'Enum
wireGet ft' = P'.wireGetErr ft'
wireGetPacked 14 = P'.wireGetPackedEnum toMaybe'Enum
wireGetPacked ft' = P'.wireGetErr ft'
instance P'.GPB NodeActionProto
instance P'.MessageAPI msg' (msg' -> NodeActionProto) NodeActionProto where
getVal m' f' = f' m'
instance P'.ReflectEnum NodeActionProto where
reflectEnum = [(0, "NORMAL", NORMAL), (1, "RESYNC", RESYNC), (2, "SHUTDOWN", SHUTDOWN)]
reflectEnumInfo _
= P'.EnumInfo
(P'.makePNF (P'.pack ".hadoop.yarn.NodeActionProto") ["Hadoop", "Protos"] ["YarnServerCommonProtos"] "NodeActionProto")
["Hadoop", "Protos", "YarnServerCommonProtos", "NodeActionProto.hs"]
[(0, "NORMAL"), (1, "RESYNC"), (2, "SHUTDOWN")]
instance P'.TextType NodeActionProto where
tellT = P'.tellShow
getT = P'.getRead | alexbiehl/hoop | hadoop-protos/src/Hadoop/Protos/YarnServerCommonProtos/NodeActionProto.hs | mit | 2,689 | 0 | 11 | 442 | 662 | 361 | 301 | 57 | 1 |
import Data.List.Split
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Either
data Direction
= N
| E
| S
| W
deriving (Show, Ord, Eq)
turnMap =
Map.fromList $
[ ((dirs !! i, 'L'), dirs !! ((i - 1) `mod` 4))
| i <- [0 .. 3] ] ++
[ ((dirs !! i, 'R'), dirs !! ((i + 1) `mod` 4))
| i <- [0 .. 3] ]
where
dirs = [N, E, S, W]
move facing x y turn numSteps
| facing' == N =
( facing'
, [ (x + i, y)
| i <- [1 .. numSteps] ])
| facing' == S =
( facing'
, [ (x - i, y)
| i <- [1 .. numSteps] ])
| facing' == E =
( facing'
, [ (x, y + i)
| i <- [1 .. numSteps] ])
| facing' == W =
( facing'
, [ (x, y - i)
| i <- [1 .. numSteps] ])
where
facing' = turnMap Map.! (facing, turn)
followMap facing (x, y) visited ((turn, numSteps):ss) = respond updated
where
(facing', path) = move facing x y turn numSteps
updated = updateVisited visited path
respond (Left visited') = followMap facing' (last path) visited' ss
respond (Right pos) = pos
updateVisited visited [] = Left visited
updateVisited visited (p:ps)
| Set.member p visited = Right p
| otherwise = updateVisited (Set.insert p visited) ps
parseStep :: String -> (Char, Integer)
parseStep (turn:numSteps) = (turn, read numSteps)
main = do
line <- getLine
let steps = map parseStep $ splitOn ", " line
let (x, y) = followMap N (0, 0) (Set.singleton (0, 0)) steps
print $ abs x + abs y
| lzlarryli/advent_of_code_2016 | day1/part2.hs | mit | 1,487 | 8 | 14 | 415 | 749 | 403 | 346 | 51 | 2 |
module Actions where
import Graphics.SpriteKit
import GameState
-- The bump used to lift Lambda on key press.
--
bumpAction :: LambdaNode -> TimeInterval -> LambdaNode
bumpAction sprite@Sprite{ nodePhysicsBody = Just body } _dt
= sprite
{ nodePhysicsBody
= Just body
{ bodyVelocity = vectorZero
, bodyForcesAndImpulses = [ApplyImpulse (Vector 0 20) Nothing]
}
}
bumpAction node _dt = node
-- Tilt Lambda in dependence on its vertical velocity vector.
--
tiltAction :: LambdaNode -> TimeInterval -> LambdaNode
tiltAction sprite@Sprite{ nodePhysicsBody = Just body } _dt
= sprite
{ nodeZRotation = (-1) `max` zRotation `min` 0.5 }
where
zRotation = dY * (if dY < 0 then 0.003 else 0.001 )
dY = vectorDy . bodyVelocity $ body
tiltAction node _dt = node
| mchakravarty/lazy-lambda | LazyLambda.hsproj/Actions.hs | mit | 853 | 0 | 13 | 225 | 217 | 123 | 94 | 18 | 2 |
{-# LANGUAGE KindSignatures #-}
module Graphics.Solidhs where
import Data.VectorSpace
type R = Float
type R3 = (R, R, R)
type R4 = (R, R, R, R)
type N = Int
type N3 = (Int, Int, Int)
data CSG = Union [CSG]
| Diff [CSG]
| Intersection [CSG]
| Sphere R R3 -- size, center
| Cube R R3 -- size, center
| Cylinder R R R R R3 N -- r, h, r1, r2, center, segments
| Polyhedron [R3] [N3] N -- points, triangles, convexity
-- | Hole', 'args': [], 'kwargs': []} ,
-- | Part', 'args': [], 'kwargs': []} ,
| Translate R3 [CSG] -- vector, block
| Scale R3 [CSG] -- vector, block
| Rotate R R3 [CSG] -- angle, vector, block
| Mirror R3 [CSG] -- vector, block
-- | Multmatrix', 'args': ['m'], 'kwargs': []},
| Color R4
| Minkowski [CSG]
| Hull [CSG]
| Render R [CSG]
| LinearExtrude R R3 N -- height, center, convexity, twist, slices
-- | RotateExtrude', 'args': [], 'kwargs': ['convexity', 'segments']} ,
-- | Dxf_linear_extrude', 'args': ['file'], 'kwargs': ['layer', 'height', 'center', 'convexity', 'twist', 'slices']} ,
-- | Projection', 'args': [], 'kwargs': ['cut']} ,
| Surface String R3 N -- file, center, convexity
| ECSG
deriving Show
data CsgM a = CsgM {lst :: [CSG], val :: a}
runCM :: CsgM a -> a
runCM m = val m
instance Monad (CsgM) where
csgm >>= f = CsgM (lst csgm ++ [evl]) evl
where evl = (val.f.val) csgm
return x = CsgM [] x
-- \x -> a ++ [f a]
(+) (Union a) (Union b) = Union (b ++ a)
(+) (Union a) b = Union (b : a)
(+) a (Union b) = Union (a : b)
(+) a b = Union [a, b]
(-) (Diff a) (Diff b) = Diff (b ++ a)
(-) (Diff a) b = Diff (b : a)
(-) a (Diff b) = Diff (a : b)
(-) a b = Diff [a, b]
(/) (Intersection a) (Intersection b) = Intersection (b ++ a)
(/) (Intersection a) b = Intersection (b : a)
(/) a (Intersection b) = Intersection (a : b)
(/) a b = Intersection [a, b]
translate r3 = \csg -> Translate r3 csg
sphere r r3 = \csg -> Sphere r3 csg
cylinder r r3 = \csg -> Cylinder r3 csg
cube r r3 = \csg -> Cube r3 csg
--main = do
-- translate (1.0, 2.3, 0.0) $ runCM $ sphere 1.0 (0.1,0.3,0.5) >>= cube 0.4 (-4, 0.0, 2.4)
-- translate (1.0, 2.3, 0.0) $ do
-- sphere 1.0 (0.1,0.3,0.5)
-- cube 0.4 (-4, 0.0, 2.4)
--
| hacxman/solidhs | src/Graphics/Solidhs.hs | mit | 2,612 | 0 | 11 | 922 | 795 | 453 | 342 | 50 | 1 |
import Network.Wai.Handler.Warp (run)
import Landing.App (app)
import Landing.Cache (makeCache)
import System.Environment (getEnv)
import Control.Exception
getPort :: IO Int
getPort = (getEnv "PORT" >>= return . read) `catch` (\(SomeException _) -> return 3000)
main :: IO ()
main = do
_ <- getEnv "GITHUB_TOKEN"
p <- getPort
putStrLn $ "Listening on port " ++ show p
cache <- makeCache
run p $ app cache
| dennis84/landing-haskell | Main.hs | mit | 417 | 2 | 9 | 75 | 170 | 85 | 85 | 14 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Coinbase.Exchange.Types.Private where
import Control.Applicative
import Control.DeepSeq
import Control.Monad
import Data.Aeson.Casing
import Data.Aeson.Types
import Data.Char
import Data.Data
import Data.Hashable
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time
import Data.UUID
import Data.Word
import GHC.Generics
import Coinbase.Exchange.Types
import Coinbase.Exchange.Types.Core
-- Accounts
newtype AccountId = AccountId { unAccountId :: UUID }
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic, NFData, Hashable, FromJSON, ToJSON)
data Account
= Account
{ accId :: AccountId
, accBalance :: CoinScientific
, accHold :: CoinScientific
, accAvailable :: CoinScientific
, accCurrency :: CurrencyId
}
deriving (Show, Eq, Data, Typeable, Generic)
instance NFData Account
instance ToJSON Account where
toJSON = genericToJSON coinbaseAesonOptions
instance FromJSON Account where
parseJSON = genericParseJSON coinbaseAesonOptions
--
newtype EntryId = EntryId { unEntryId :: Word64 }
deriving (Eq, Ord, Num, Show, Read, Data, Typeable, Generic, NFData, Hashable, FromJSON, ToJSON)
data Entry
= Entry
{ entryId :: EntryId
, entryCreatedAt :: UTCTime
, entryAmount :: CoinScientific
, entryBalance :: CoinScientific
, entryType :: EntryType
, entryDetails :: EntryDetails
}
deriving (Show, Data, Typeable, Generic)
instance NFData Entry
instance ToJSON Entry where
toJSON Entry{..} = object [ "id" .= entryId
, "created_at" .= entryCreatedAt
, "amount" .= entryAmount
, "balance" .= entryBalance
, "type" .= entryType
, "details" .= entryDetails
]
instance FromJSON Entry where
parseJSON (Object m) = Entry
<$> m .: "id"
<*> m .: "created_at"
<*> m .: "amount"
<*> m .: "balance"
<*> m .: "type"
<*> m .: "details"
parseJSON _ = mzero
data EntryType
= Match
| Fee
| Transfer
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic)
instance NFData EntryType
instance Hashable EntryType
instance ToJSON EntryType where
toJSON = genericToJSON defaultOptions { constructorTagModifier = map toLower }
instance FromJSON EntryType where
parseJSON = genericParseJSON defaultOptions { constructorTagModifier = map toLower }
data EntryDetails
= EntryDetails
{ detailOrderId :: Maybe OrderId
, detailTradeId :: Maybe TradeId
, detailProductId :: Maybe ProductId
}
deriving (Show, Data, Typeable, Generic)
instance NFData EntryDetails
instance ToJSON EntryDetails where
toJSON = genericToJSON coinbaseAesonOptions
instance FromJSON EntryDetails where
parseJSON = genericParseJSON coinbaseAesonOptions
--
newtype HoldId = HoldId { unHoldId :: UUID }
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic, NFData, Hashable, FromJSON, ToJSON)
data Hold
= OrderHold
{ holdId :: HoldId
, holdAccountId :: AccountId
, holdCreatedAt :: UTCTime
, holdUpdatedAt :: UTCTime
, holdAmount :: CoinScientific
, holdOrderRef :: OrderId
}
| TransferHold
{ holdId :: HoldId
, holdAccountId :: AccountId
, holdCreatedAt :: UTCTime
, holdUpdatedAt :: UTCTime
, holdAmount :: CoinScientific
, holdTransferRef :: TransferId
}
deriving (Show, Data, Typeable, Generic)
instance NFData Hold
instance ToJSON Hold where
toJSON = genericToJSON coinbaseAesonOptions
instance FromJSON Hold where
parseJSON = genericParseJSON coinbaseAesonOptions
-- Orders
data OrderContigency
= GoodTillCanceled
| GoodTillTime
| ImmediateOrCancel
| FillOrKill
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic)
instance NFData OrderContigency
instance Hashable OrderContigency
instance ToJSON OrderContigency where
toJSON GoodTillCanceled = String "GTC"
toJSON GoodTillTime = String "GTT"
toJSON ImmediateOrCancel = String "IOC"
toJSON FillOrKill = String "FOK"
instance FromJSON OrderContigency where
parseJSON (String "GTC") = return GoodTillCanceled
parseJSON (String "GTT") = return GoodTillTime
parseJSON (String "IOC") = return ImmediateOrCancel
parseJSON (String "FOK") = return FillOrKill
parseJSON _ = mzero
data OrderCancelAfter
= Min
| Hour
| Day
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic)
instance NFData OrderCancelAfter
instance Hashable OrderCancelAfter
instance ToJSON OrderCancelAfter where
toJSON Min = String "min"
toJSON Hour = String "hour"
toJSON Day = String "day"
instance FromJSON OrderCancelAfter where
parseJSON (String "min") = return Min
parseJSON (String "hour") = return Hour
parseJSON (String "day") = return Day
parseJSON _ = mzero
data SelfTrade
= DecrementAndCancel
| CancelOldest
| CancelNewest
| CancelBoth
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic)
instance NFData SelfTrade
instance Hashable SelfTrade
instance ToJSON SelfTrade where
toJSON DecrementAndCancel = String "dc"
toJSON CancelOldest = String "co"
toJSON CancelNewest = String "cn"
toJSON CancelBoth = String "cb"
instance FromJSON SelfTrade where
parseJSON (String "dc") = return DecrementAndCancel
parseJSON (String "co") = return CancelOldest
parseJSON (String "cn") = return CancelNewest
parseJSON (String "cb") = return CancelBoth
parseJSON _ = mzero
data NewOrder
= NewLimitOrder
{ noProductId :: ProductId
, noSide :: Side
, noSelfTrade :: SelfTrade
, noClientOid :: Maybe ClientOrderId
---
, noPrice :: Price
, noSize :: Size
,noTimeInForce:: OrderContigency
,noCancelAfter:: Maybe OrderCancelAfter
, noPostOnly :: Bool
}
| NewMarketOrder
{ noProductId :: ProductId
, noSide :: Side
, noSelfTrade :: SelfTrade
, noClientOid :: Maybe ClientOrderId
---
, noSizeAndOrFunds :: Either Size (Maybe Size, Cost)
}
| NewStopOrder
{ noProductId :: ProductId
, noSide :: Side
, noSelfTrade :: SelfTrade
, noClientOid :: Maybe ClientOrderId
---
, noPrice :: Price
, noSizeAndOrFunds :: Either Size (Maybe Size, Cost)
}
deriving (Show, Data, Typeable, Generic)
instance NFData NewOrder
instance ToJSON NewOrder where
toJSON NewLimitOrder{..} = object
([ "type" .= ("limit" :: Text)
, "product_id" .= noProductId
, "side" .= noSide
, "stp" .= noSelfTrade
, "price" .= noPrice
, "size" .= noSize
, "time_in_force" .= noTimeInForce
, "post_only" .= noPostOnly
] ++ clientID ++ cancelAfter )
where
clientID = case noClientOid of
Just cid -> [ "client_oid" .= cid ]
Nothing -> []
cancelAfter = case noCancelAfter of
Just time -> [ "cancel_after" .= time ]
Nothing -> []
toJSON NewMarketOrder{..} = object
([ "type" .= ("market" :: Text)
, "product_id" .= noProductId
, "side" .= noSide
, "stp" .= noSelfTrade
] ++ clientID ++ size ++ funds )
where
clientID = case noClientOid of
Just cid -> [ "client_oid" .= cid ]
Nothing -> []
(size,funds) = case noSizeAndOrFunds of
Left s -> (["size" .= s],[])
Right (ms,f) -> case ms of
Nothing -> ( [] , ["funds" .= f] )
Just s' -> ( ["size" .= s'], ["funds" .= f] )
toJSON NewStopOrder{..} = object
([ "type" .= ("stop" :: Text)
, "product_id" .= noProductId
, "side" .= noSide
, "stp" .= noSelfTrade
, "price" .= noPrice
] ++ clientID ++ size ++ funds )
where
clientID = case noClientOid of
Just cid -> [ "client_oid" .= cid ]
Nothing -> []
(size,funds) = case noSizeAndOrFunds of
Left s -> (["size" .= s],[])
Right (ms,f) -> case ms of
Nothing -> ( [] , ["funds" .= f] )
Just s' -> ( ["size" .= s'], ["funds" .= f] )
data OrderConfirmation
= OrderConfirmation
{ ocId :: OrderId
}
deriving (Show, Data, Typeable, Generic)
instance NFData OrderConfirmation
instance ToJSON OrderConfirmation where
toJSON = genericToJSON coinbaseAesonOptions
instance FromJSON OrderConfirmation where
parseJSON = genericParseJSON coinbaseAesonOptions
data Order
= LimitOrder
{ orderId :: OrderId
, orderProductId :: ProductId
, orderStatus :: OrderStatus
, orderSelfTrade :: SelfTrade
, orderSettled :: Bool
, orderSide :: Side
, orderCreatedAt :: UTCTime
, orderFilledSize :: Maybe Size
, orderFilledFees :: Maybe Price
, orderDoneAt :: Maybe UTCTime
, orderDoneReason :: Maybe Reason
, orderPrice :: Price
, orderSize :: Size
, orderTimeInForce:: OrderContigency
, orderCancelAfter:: Maybe OrderCancelAfter
, orderPostOnly :: Bool
}
| MarketOrder
{ orderId :: OrderId
, orderProductId :: ProductId
, orderStatus :: OrderStatus
, orderSelfTrade :: SelfTrade
, orderSettled :: Bool
, orderSide :: Side
, orderCreatedAt :: UTCTime
, orderFilledSize :: Maybe Size
, orderFilledFees :: Maybe Price
, orderDoneAt :: Maybe UTCTime
, orderDoneReason :: Maybe Reason
, orderSizeAndOrFunds :: Either Size (Maybe Size, Cost)
}
| StopOrder
{ orderId :: OrderId
, orderProductId :: ProductId
, orderStatus :: OrderStatus
, orderSelfTrade :: SelfTrade
, orderSettled :: Bool
, orderSide :: Side
, orderCreatedAt :: UTCTime
, orderFilledSize :: Maybe Size
, orderFilledFees :: Maybe Price
, orderDoneAt :: Maybe UTCTime
, orderDoneReason :: Maybe Reason
, orderPrice :: Price
, orderSizeAndOrFunds :: Either Size (Maybe Size, Cost)
}
deriving (Show, Data, Typeable, Generic)
instance NFData Order
instance ToJSON Order where
toJSON LimitOrder{..} = object
[ "type" .= ("limit" :: Text)
, "id" .= orderId
, "product_id" .= orderProductId
, "status" .= orderStatus
, "stp" .= orderSelfTrade
, "settled" .= orderSettled
, "side" .= orderSide
, "created_at" .= orderCreatedAt
, "filled_size" .= orderFilledSize
, "filled_fees" .= orderFilledFees
, "done_at" .= orderDoneAt
, "done_reason" .= orderDoneReason
, "price" .= orderPrice
, "size" .= orderSize
, "time_in_force" .= orderTimeInForce
, "cancel_after" .= orderCancelAfter
, "post_only" .= orderPostOnly
]
toJSON MarketOrder{..} = object
([ "type" .= ("market" :: Text)
, "id" .= orderId
, "product_id" .= orderProductId
, "status" .= orderStatus
, "stp" .= orderSelfTrade
, "settled" .= orderSettled
, "side" .= orderSide
, "created_at" .= orderCreatedAt
, "filled_size" .= orderFilledSize
, "filled_fees" .= orderFilledFees
, "done_at" .= orderDoneAt
, "done_reason" .= orderDoneReason
] ++ size ++ funds )
where (size,funds) = case orderSizeAndOrFunds of
Left s -> (["size" .= s],[])
Right (ms,f) -> case ms of
Nothing -> ( [] , ["funds" .= f] )
Just s' -> ( ["size" .= s'], ["funds" .= f] )
toJSON StopOrder{..} = object
([ "type" .= ("market" :: Text)
, "id" .= orderId
, "product_id" .= orderProductId
, "status" .= orderStatus
, "stp" .= orderSelfTrade
, "settled" .= orderSettled
, "side" .= orderSide
, "created_at" .= orderCreatedAt
, "filled_size" .= orderFilledSize
, "filled_fees" .= orderFilledFees
, "done_at" .= orderDoneAt
, "done_reason" .= orderDoneReason
, "price" .= orderPrice
] ++ size ++ funds )
where (size,funds) = case orderSizeAndOrFunds of
Left s -> (["size" .= s],[])
Right (ms,f) -> case ms of
Nothing -> ( [] , ["funds" .= f] )
Just s' -> ( ["size" .= s'], ["funds" .= f] )
instance FromJSON Order where
parseJSON (Object m) = do
ordertype <- m .: "type"
case (ordertype :: String) of
"limit" -> LimitOrder
<$> m .: "id"
<*> m .: "product_id"
<*> m .: "status"
<*> m .:? "stp" .!= DecrementAndCancel
<*> m .: "settled"
<*> m .: "side"
<*> m .: "created_at"
<*> m .:? "filled_size"
<*> m .:? "filled_fees"
<*> m .:? "done_at"
<*> m .:? "done_reason"
<*> m .: "price"
<*> m .: "size"
<*> m .:? "time_in_force" .!= GoodTillCanceled -- older orders don't seem to have this field
<*> m .:? "cancel_after"
<*> m .: "post_only"
"market" -> MarketOrder
<$> m .: "id"
<*> m .: "product_id"
<*> m .: "status"
<*> m .: "stp"
<*> m .: "settled"
<*> m .: "side"
<*> m .: "created_at"
<*> m .:? "filled_size"
<*> m .:? "filled_fees"
<*> m .:? "done_at"
<*> m .:? "done_reason"
<*> (do
ms <- m .:? "size"
mf <- m .:? "funds"
case (ms,mf) of
(Nothing, Nothing) -> mzero
(Just s , Nothing) -> return $ Left s
(Nothing, Just f ) -> return $ Right (Nothing, f)
(Just s , Just f ) -> return $ Right (Just s , f)
)
"stop" -> StopOrder
<$> m .: "id"
<*> m .: "product_id"
<*> m .: "status"
<*> m .: "stp"
<*> m .: "settled"
<*> m .: "side"
<*> m .: "created_at"
<*> m .:? "filled_size"
<*> m .:? "filled_fees"
<*> m .:? "done_at"
<*> m .:? "done_reason"
<*> m .: "price"
<*> (do
ms <- m .:? "size"
mf <- m .:? "funds"
case (ms,mf) of
(Nothing, Nothing) -> mzero
(Just s , Nothing) -> return $ Left s
(Nothing, Just f ) -> return $ Right (Nothing, f)
(Just s , Just f ) -> return $ Right (Just s , f)
)
_ -> mzero
parseJSON _ = mzero
-- Fills
data Liquidity
= Maker
| Taker
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic)
instance NFData Liquidity
instance Hashable Liquidity
instance ToJSON Liquidity where
toJSON Maker = String "M"
toJSON Taker = String "T"
instance FromJSON Liquidity where
parseJSON (String "M") = return Maker
parseJSON (String "T") = return Taker
parseJSON _ = mzero
data Fill
= Fill
{ fillTradeId :: TradeId
, fillProductId :: ProductId
, fillPrice :: Price
, fillSize :: Size
, fillOrderId :: OrderId
, fillCreatedAt :: UTCTime
, fillLiquidity :: Liquidity
, fillFee :: Price
, fillSettled :: Bool
, fillSide :: Side
}
deriving (Show, Data, Typeable, Generic)
instance NFData Fill
instance ToJSON Fill where
toJSON Fill{..} = object
[ "trade_id" .= fillTradeId
, "product_id" .= fillProductId
, "price" .= fillPrice
, "size" .= fillSize
, "order_id" .= fillOrderId
, "created_at" .= fillCreatedAt
, "liquidity" .= fillLiquidity
, "fee" .= fillFee
, "settled" .= fillSettled
, "side" .= fillSide
]
instance FromJSON Fill where
parseJSON (Object m) = Fill
<$> m .: "trade_id"
<*> m .: "product_id"
<*> m .: "price"
<*> m .: "size"
<*> m .: "order_id"
<*> m .: "created_at"
<*> m .: "liquidity"
<*> m .: "fee"
<*> m .: "settled"
<*> m .: "side"
parseJSON _ = mzero
-- Transfers
newtype TransferId = TransferId { unTransferId :: UUID }
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic, NFData, FromJSON, ToJSON)
newtype CoinbaseAccountId = CoinbaseAccountId { unCoinbaseAccountId :: UUID }
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic, NFData, FromJSON, ToJSON)
data TransferToCoinbase
= Deposit
{ trAmount :: Size
, trCoinbaseAccountId :: CoinbaseAccountId
}
| Withdraw
{ trAmount :: Size
, trCoinbaseAccountId :: CoinbaseAccountId
}
deriving (Show, Data, Typeable, Generic)
instance NFData TransferToCoinbase
instance ToJSON TransferToCoinbase where
toJSON = genericToJSON coinbaseAesonOptions
data CryptoWithdrawal
= Withdrawal
{ wdAmount :: Size
, wdCurrency :: CurrencyId
, wdCryptoAddress :: CryptoWallet
}
deriving (Show, Data, Typeable, Generic)
instance NFData CryptoWithdrawal
instance ToJSON CryptoWithdrawal where
toJSON = genericToJSON coinbaseAesonOptions
instance FromJSON CryptoWithdrawal where
parseJSON = genericParseJSON coinbaseAesonOptions
---------------------------
data TransferToCoinbaseResponse
= TransferResponse
{ trId :: TransferId
-- FIX ME! and other stuff I'm going to ignore.
} deriving (Eq, Show, Generic, Typeable)
instance NFData TransferToCoinbaseResponse
instance FromJSON TransferToCoinbaseResponse where
parseJSON (Object m) = TransferResponse
<$> m .: "id"
parseJSON _ = mzero
data CryptoWithdrawalResp
= WithdrawalResp
{ wdrId :: TransferId
, wdrAmount :: Size
, wdrCurrency :: CurrencyId
} deriving (Eq, Show, Generic, Typeable)
instance NFData CryptoWithdrawalResp
instance ToJSON CryptoWithdrawalResp where
toJSON = genericToJSON coinbaseAesonOptions
instance FromJSON CryptoWithdrawalResp where
parseJSON = genericParseJSON coinbaseAesonOptions
---------------------------
data CryptoWallet
= BTCWallet BitcoinWallet deriving (Show, Data, Typeable, Generic)
-- | To Do: add other...
-- | ... possibilities here later
instance NFData CryptoWallet
instance ToJSON CryptoWallet where
toJSON = genericToJSON coinbaseAesonOptions
instance FromJSON CryptoWallet where
parseJSON = genericParseJSON coinbaseAesonOptions
newtype BitcoinWallet = FromBTCAddress { btcAddress :: String }
deriving (Show, Data, Typeable, Generic, ToJSON, FromJSON)
instance NFData BitcoinWallet
data BTCTransferReq
= SendBitcoin
{ sendAmount :: Size
, bitcoinWallet :: BitcoinWallet
}
deriving (Show, Data, Typeable, Generic)
instance NFData BTCTransferReq
instance ToJSON BTCTransferReq where
toJSON SendBitcoin {..} = object
[ "type" .= ("send" :: Text)
, "currency" .= ("BTC" :: Text)
, "to" .= btcAddress bitcoinWallet
, "amount" .= sendAmount
]
---------------------------
newtype BTCTransferId = BTCTransferId { getBtcTransferId :: UUID }
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic, NFData, FromJSON, ToJSON)
data BTCTransferResponse = BTCTransferResponse
{ sendId :: BTCTransferId
-- FIX ME! and other stuff I'm going to ignore.
} deriving (Eq, Data, Show, Generic, Typeable)
instance NFData BTCTransferResponse
instance FromJSON BTCTransferResponse where
parseJSON (Object m) = do
transferData <- m .:? "data" -- FIX ME! I should factor this out of all responses from Coinbase
case transferData of
Nothing -> mzero
Just da -> BTCTransferResponse <$> da .: "id"
parseJSON _ = mzero
---------------------------
data CoinbaseAccount =
CoinbaseAccount
{ cbAccID :: CoinbaseAccountId
, resourcePath :: String
, primary :: Bool
, name :: String
, btcBalance :: Size
}
deriving (Show, Data, Typeable, Generic)
instance FromJSON CoinbaseAccount where
parseJSON (Object m) = do
transferData <- m .:? "data" -- FIX ME! I should factor this out of all responses from Coinbase
case transferData of
Nothing -> mzero
Just da -> CoinbaseAccount
<$> da .: "id"
<*> da .: "resource_path"
<*> da .: "primary"
<*> da .: "name"
<*> (do
btcBalance <- da .: "balance"
case btcBalance of
Object b -> b .: "amount"
_ -> mzero
)
parseJSON _ = mzero
-- Reports
newtype ReportId = ReportId { unReportId :: UUID }
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic, NFData, FromJSON, ToJSON)
data ReportType
= FillsReport
| AccountReport
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic)
instance NFData ReportType
instance Hashable ReportType
instance ToJSON ReportType where
toJSON FillsReport = String "fills"
toJSON AccountReport = String "account"
instance FromJSON ReportType where
parseJSON (String "fills") = return FillsReport
parseJSON (String "account") = return AccountReport
parseJSON _ = mzero
data ReportFormat
= PDF
| CSV
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic)
instance NFData ReportFormat
instance Hashable ReportFormat
instance ToJSON ReportFormat where
toJSON PDF = String "pdf"
toJSON CSV = String "csv"
instance FromJSON ReportFormat where
parseJSON (String "pdf") = return PDF
parseJSON (String "csv") = return CSV
parseJSON _ = mzero
data ReportRequest -- analgous to Transfer or NewOrder
= ReportRequest
{ rrqType :: ReportType
, rrqStartDate :: UTCTime
, rrqEndDate :: UTCTime
, rrqProductId :: ProductId
, rrqAccountId :: AccountId
, rrqFormat :: ReportFormat
, rrqEmail :: Maybe String
}
deriving (Show, Data, Typeable, Generic)
instance NFData ReportRequest
instance ToJSON ReportRequest where
toJSON = genericToJSON coinbaseAesonOptions
instance FromJSON ReportRequest where
parseJSON = genericParseJSON coinbaseAesonOptions
data ReportParams
= ReportParams
{ reportStartDate :: UTCTime
, reportEndDate :: UTCTime
}
deriving (Show, Data, Typeable, Generic)
instance NFData ReportParams
instance ToJSON ReportParams where
toJSON ReportParams{..} = object
([ "start_date" .= reportStartDate
, "end_date" .= reportEndDate
])
instance FromJSON ReportParams where
parseJSON (Object m) = ReportParams
<$> m .: "start_date"
<*> m .: "end_date"
parseJSON _ = mzero
data ReportStatus
= ReportPending
| ReportCreating
| ReportReady
deriving (Eq, Ord, Show, Read, Data, Typeable, Generic)
instance NFData ReportStatus
instance Hashable ReportStatus
instance ToJSON ReportStatus where
toJSON ReportPending = String "pending"
toJSON ReportCreating = String "creating"
toJSON ReportReady = String "ready"
instance FromJSON ReportStatus where
parseJSON (String "pending") = return ReportPending
parseJSON (String "creating") = return ReportCreating
parseJSON (String "ready") = return ReportReady
parseJSON _ = mzero
data ReportInfo
= ReportInfo
{ reportId :: ReportId
, reportType :: ReportType
, reportStatus :: ReportStatus
, reportCreated :: Maybe UTCTime
, reportCompleted :: Maybe UTCTime
, reportExpires :: Maybe UTCTime
, reportUrl :: Maybe String
, reportParams :: Maybe ReportParams
}
deriving (Show, Data, Typeable, Generic)
instance NFData ReportInfo
instance ToJSON ReportInfo where
toJSON ReportInfo{..} = object
([ "id" .= reportId
, "type" .= reportType
, "status" .= reportStatus
, "created_at" .= reportCreated
, "completed_at" .= reportCompleted
, "expires_at" .= reportExpires
, "file_url" .= reportUrl
, "params" .= reportParams
])
instance FromJSON ReportInfo where
parseJSON (Object m) = ReportInfo
<$> m .: "id"
<*> m .: "type"
<*> m .: "status"
<*> m .:? "created_at"
<*> m .:? "completed_at"
<*> m .:? "expires_at"
<*> m .:? "file_url"
<*> m .:? "params"
parseJSON _ = mzero
| dimitri-xyz/coinbase | src/Coinbase/Exchange/Types/Private.hs | mit | 27,748 | 0 | 44 | 10,175 | 6,546 | 3,521 | 3,025 | 678 | 0 |
{-# htermination keysFM_LE :: FiniteMap Float b -> Float -> [Float] #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_keysFM_LE_6.hs | mit | 89 | 0 | 3 | 14 | 5 | 3 | 2 | 1 | 0 |
import Closure
import Permutation
main :: IO ()
main = mapM_ (putStrLn . showCycles) $ closure2A compose
$ map (transposition 1) [2..5]
| jwodder/groups | haskell/tests/symm.hs | mit | 138 | 3 | 8 | 24 | 64 | 30 | 34 | 5 | 1 |
{-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving, DeriveGeneric, ForeignFunctionInterface #-}
-- |
-- Module : Crypto.Saltine.Internal.AEAD.ChaCha20Poly1305
-- Copyright : (c) Max Amanshauser 2021
-- License : MIT
--
-- Maintainer : max@lambdalifting.org
-- Stability : experimental
-- Portability : non-portable
--
module Crypto.Saltine.Internal.AEAD.ChaCha20Poly1305 (
aead_chacha20poly1305_keybytes
, aead_chacha20poly1305_npubbytes
, aead_chacha20poly1305_abytes
, c_aead
, c_aead_open
, c_aead_detached
, c_aead_open_detached
, Key(..)
, Nonce(..)
) where
import Control.DeepSeq
import Crypto.Saltine.Class
import Crypto.Saltine.Core.Hash (shorthash)
import Crypto.Saltine.Internal.Hash (nullShKey)
import Crypto.Saltine.Internal.Util as U
import Data.ByteString (ByteString)
import Data.Data (Data, Typeable)
import Data.Hashable (Hashable)
import Data.Monoid
import Foreign.C
import Foreign.Ptr
import GHC.Generics (Generic)
import qualified Data.ByteString as S
-- | An opaque 'ChaCha20Poly1305' cryptographic key.
newtype Key = Key { unKey :: ByteString } deriving (Ord, Hashable, Data, Typeable, Generic, NFData)
instance Eq Key where
Key a == Key b = U.compare a b
instance Show Key where
show k = "AEAD.ChaCha20Poly1305.Key {hashesTo = \"" <> (bin2hex . shorthash nullShKey $ encode k) <> "}\""
instance IsEncoding Key where
decode v = if S.length v == aead_chacha20poly1305_keybytes
then Just (Key v)
else Nothing
{-# INLINE decode #-}
encode (Key v) = v
{-# INLINE encode #-}
-- | An opaque 'ChaCha20Poly1305' nonce.
newtype Nonce = Nonce { unNonce :: ByteString } deriving (Eq, Ord, Hashable, Data, Typeable, Generic, NFData)
instance Show Nonce where
show k = "AEAD.ChaCha20Poly1305.Nonce " <> bin2hex (encode k)
instance IsEncoding Nonce where
decode v = if S.length v == aead_chacha20poly1305_npubbytes
then Just (Nonce v)
else Nothing
{-# INLINE decode #-}
encode (Nonce v) = v
{-# INLINE encode #-}
instance IsNonce Nonce where
zero = Nonce (S.replicate aead_chacha20poly1305_npubbytes 0)
nudge (Nonce n) = Nonce (nudgeBS n)
aead_chacha20poly1305_keybytes, aead_chacha20poly1305_abytes, aead_chacha20poly1305_npubbytes :: Int
-- | Size of a ChaCha20-Poly1305 key
aead_chacha20poly1305_keybytes = fromIntegral c_crypto_aead_chacha20poly1305_keybytes
-- | Size of a ChaCha20-Poly1305 nonce
aead_chacha20poly1305_npubbytes = fromIntegral c_crypto_aead_chacha20poly1305_npubbytes
-- | Size of a ChaCha20-Poly1305 authentication tag
aead_chacha20poly1305_abytes = fromIntegral c_crypto_aead_chacha20poly1305_abytes
-- src/libsodium/crypto_aead/xchacha20poly1305/sodium/aead_xchacha20poly1305.c
-- src/libsodium/include/sodium/crypto_aead_xchacha20poly1305.h
foreign import ccall "crypto_aead_chacha20poly1305_keybytes"
c_crypto_aead_chacha20poly1305_keybytes :: CSize
foreign import ccall "crypto_aead_chacha20poly1305_npubbytes"
c_crypto_aead_chacha20poly1305_npubbytes:: CSize
foreign import ccall "crypto_aead_chacha20poly1305_abytes"
c_crypto_aead_chacha20poly1305_abytes :: CSize
-- | The aead C API uses C strings. Always returns 0.
foreign import ccall "crypto_aead_chacha20poly1305_encrypt"
c_aead
:: Ptr CChar
-- ^ Cipher output buffer
-> Ptr CULLong
-- ^ Cipher output bytes used
-> Ptr CChar
-- ^ Constant message input buffer
-> CULLong
-- ^ Length of message input buffer
-> Ptr CChar
-- ^ Constant aad input buffer
-> CULLong
-- ^ Length of aad input buffer
-> Ptr CChar
-- ^ Unused 'nsec' value (must be NULL)
-> Ptr CChar
-- ^ Constant nonce buffer
-> Ptr CChar
-- ^ Constant key buffer
-> IO CInt
-- | The aead open C API uses C strings. Returns 0 if successful.
foreign import ccall "crypto_aead_chacha20poly1305_decrypt"
c_aead_open
:: Ptr CChar
-- ^ Message output buffer
-> Ptr CULLong
-- ^ Message output bytes used
-> Ptr CChar
-- ^ Unused 'nsec' value (must be NULL)
-> Ptr CChar
-- ^ Constant ciphertext input buffer
-> CULLong
-- ^ Length of ciphertext input buffer
-> Ptr CChar
-- ^ Constant aad input buffer
-> CULLong
-- ^ Length of aad input buffer
-> Ptr CChar
-- ^ Constant nonce buffer
-> Ptr CChar
-- ^ Constant key buffer
-> IO CInt
-- | The aead C API uses C strings. Always returns 0.
foreign import ccall "crypto_aead_chacha20poly1305_encrypt_detached"
c_aead_detached
:: Ptr CChar
-- ^ Cipher output buffer
-> Ptr CChar
-- ^ Tag output buffer
-> Ptr CULLong
-- ^ Tag bytes used
-> Ptr CChar
-- ^ Constant message input buffer
-> CULLong
-- ^ Length of message input buffer
-> Ptr CChar
-- ^ Constant aad input buffer
-> CULLong
-- ^ Length of aad input buffer
-> Ptr CChar
-- ^ Unused 'nsec' value (must be NULL)
-> Ptr CChar
-- ^ Constant nonce buffer
-> Ptr CChar
-- ^ Constant key buffer
-> IO CInt
-- | The aead open C API uses C strings. Returns 0 if successful.
foreign import ccall "crypto_aead_chacha20poly1305_decrypt_detached"
c_aead_open_detached
:: Ptr CChar
-- ^ Message output buffer
-> Ptr CChar
-- ^ Unused 'nsec' value (must be NULL)
-> Ptr CChar
-- ^ Constant ciphertext input buffer
-> CULLong
-- ^ Length of ciphertext input buffer
-> Ptr CChar
-- ^ Constant tag input buffer
-> Ptr CChar
-- ^ Constant aad input buffer
-> CULLong
-- ^ Length of aad input buffer
-> Ptr CChar
-- ^ Constant nonce buffer
-> Ptr CChar
-- ^ Constant key buffer
-> IO CInt
| tel/saltine | src/Crypto/Saltine/Internal/AEAD/ChaCha20Poly1305.hs | mit | 5,779 | 0 | 16 | 1,286 | 929 | 516 | 413 | 108 | 1 |
-- Copyright (c) 2015-16 Nicola Bonelli <nicola@pfq.io>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
module Lang.FDescr
(
compile
) where
import Network.PFQ.Lang as Q
import Options
compile :: (Monad m) => Q.Function (Qbuff -> Action Qbuff) -> OptionT m String
compile comp = return $ show (fst $ Q.serialize comp 0)
| pfq/PFQ | user/pfq-lang/src/Lang/FDescr.hs | gpl-2.0 | 999 | 0 | 10 | 181 | 105 | 64 | 41 | 7 | 1 |
{-# language TemplateHaskell #-}
{-# language OverloadedStrings #-}
{-# language QuasiQuotes #-}
{-# language TypeFamilies #-}
import Yesod
import Text.Blaze.Renderer.String (renderHtml)
import Text.Blaze
import Text.Blaze.Html4.Strict hiding (map)
import Network.Wai
data This = This
mkYesod "This" [parseRoutes|
/ TopR GET
|]
instance Yesod This
getTopR :: Handler Html
getTopR = do
req <- waiRequest
defaultLayout [whamlet|
requestMethod = #{show $ requestMethod req}
requestHeaders = #{show $ requestHeaders req}
|]
main = warpDebug 3000 This
| marcellussiegburg/autotool | shib/echo.hs | gpl-2.0 | 588 | 0 | 7 | 114 | 109 | 64 | 45 | 17 | 1 |
{-
This file is part of Fronsduk.
Fronsduk is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Fronsduk is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Fronsduk. If not, see <http://www.gnu.org/licenses/>.
-}
module Assembler where
import Machine
import Bytecode
import Text.ParserCombinators.Parsec
import qualified Text.ParserCombinators.Parsec.Token as P
import Text.ParserCombinators.Parsec.Language (emptyDef)
import qualified Data.ByteString.Lazy as BL
import Data.Binary.Put
lexer :: P.TokenParser ()
lexer = P.makeTokenParser
(emptyDef { P.commentLine = ";"
, P.caseSensitive = False
})
whiteSpace = P.whiteSpace lexer
natural = P.natural lexer
parens = P.parens lexer
-- reserved = P.reserved lexer
identifier = P.identifier lexer
control :: Parser Control
control = many1 expr
getOperator :: String -> Operator
getOperator "Ld" = Ld
getOperator "Ldc" = Ldc
getOperator "Ldf" = Ldf
getOperator "Nil" = Nil
getOperator "Dum" = Dum
getOperator "Ap" = Ap
getOperator "Rap" = Rap
getOperator "Rtn" = Rtn
getOperator "Cons" = Cons
getOperator "Car" = Car
getOperator "Cdr" = Cdr
getOperator "Sel" = Sel
getOperator "Join" = Join
getOperator "Plus" = Plus
getOperator "Minus" = Minus
getOperator "Times" = Times
getOperator "Divide" = Divide
getOperator "Eq" = Eq
getOperator "Cmp" = Cmp
getOperator "And" = And
getOperator "Or" = Or
getOperator "Not" = Not
getOperator "Print" = Print
getOperator "Read" = Read
expr :: Parser Value
expr = whiteSpace >>=
(\_ -> (do{ i <- natural
; return $ NumberValue $ fromIntegral i
}
<|>
(do{ s <- identifier
; return $ OperatorValue $ getOperator s
})
<|>
(do { s <- parens control
; return $ ListValue $ s
})))
main :: IO ()
main = do
input <- getContents
let c = case (parse control "" input) of
Left err -> error $ "Parsing failed : " ++ show err
Right x -> x
in BL.putStr $ runPut $ serializeControl c
| stevenremot/fronsduk | src/Assembler.hs | gpl-3.0 | 2,525 | 0 | 15 | 619 | 561 | 296 | 265 | 60 | 2 |
import Control.Monad.CryptoRandom (crandomRs)
import Crypto.Random.DRBG (CtrDRBG, newGenIO)
import Control.Monad (guard, liftM)
import Data.ByteString.Char8 hiding (map, putStrLn)
import Data.Maybe
import Data.Serialize
import Generator
import Network.Haskoin.Constants
import Network.Haskoin.Crypto
import Network.Haskoin.Transaction
import Network.Haskoin.Util
-- testnet address:moCvBdctTGGBwquWx647GvQWAsr4XBQBXh
-- run with `stack runhaskell demo-fairexchange-txs.hs`
-- use https://testnet.blockexplorer.com/tx/send to broadcast
main = do
setTestnet
--makeBobCommitTx
--makeAliceCommitTx
-- Once you have the hex-encoded redeem scripts from the above commit txs as well as the
-- tx info, fill in the claim tx inputs below and re-run this script
makeBobClaimTx
makeAliceClaimTx
makeBobCommitTx = do
let txhash = "TxHash \"6eee3902841753298e98a556f02425ea2b75c390cb4ff7e3d25bf9ebdab77db8\""
n = 1
value = 100000
-- hex scriptPubKey
script = "76a9145457c1cbd45710c749b7aba24f9d9e97382893d588ac"
-- wif prvkey
p = "PrvKey \"cQrgind4kVZbZpfAVfZq8Nw6HcPBZyT2pktrn2t5dSS7H9aeNFmx\""
secrets = [42, 56] :: [Integer]
let utxo = toUTXO txhash n value script
prv = read p
pub = derivePubKey prv
hashes = map (doubleHash256 . encode) secrets
let (tx, redeem) = either undefined id $ makeBobCommit [utxo] [prv] pub pub hashes
putStrLn "Bob commit tx:"
putStrLn $ show tx
putStrLn "Redeem script for Bob's commit:"
putStrLn $ unpack $ encodeHex $ encode redeem
putStrLn $ show $ hash160 $ getHash256 $ hash256 $ encode redeem
makeAliceCommitTx = do
let txhash = "TxHash \"c58f3a355b1bece578d757b5597c25a42a0b4ec43d8c8a4af2048319fb9543da\""
n = 1
value = 100000
-- hex scriptPubKey
script = "76a9145457c1cbd45710c749b7aba24f9d9e97382893d588ac"
-- wif prvkey
p = "PrvKey \"cQrgind4kVZbZpfAVfZq8Nw6HcPBZyT2pktrn2t5dSS7H9aeNFmx\""
secrets = [56, 123] :: [Integer]
let utxo = toUTXO txhash n value script
prv = read p
pub = derivePubKey prv
hashes = map (doubleHash256 . encode) secrets
let (tx, redeem) = either undefined id $ makeAliceCommit [utxo] [prv] pub pub hashes
putStrLn "Alice commit tx:"
putStrLn $ show tx
putStrLn "Redeem script for Alice's commit:"
putStrLn $ unpack $ encodeHex $ encode redeem
putStrLn $ show $ hash160 $ getHash256 $ hash256 $ encode redeem
makeBobClaimTx = do
let txhash = "TxHash \"69f72c3105704a421584cc5eb27dcf4fce20cc6820012eaa22a67cb2fc9ac352\""
n = 0
value = 90000
-- hex scriptPubKey
script = "a914d7029e2efa92ea7257159337250e9ffa09fb633187"
-- wif prvkey
p = "PrvKey \"cQrgind4kVZbZpfAVfZq8Nw6HcPBZyT2pktrn2t5dSS7H9aeNFmx\""
secrets = [56, 123] :: [Integer]
redeemHex = "630500000186a0b1752103f7653b7d6db6dbb4139ef05484a92e620312a822105c526c4f91b65e76f5ad15ac672103f7653b7d6db6dbb4139ef05484a92e620312a822105c526c4f91b65e76f5ad15adaa200880e9d991ac4931bf64bb2809100af425ab761e081b349d54cfb8f2ec1dbd2788aa2032115336ddd57f8423974294d75418a27128aca19c71162287e457aab85f31658768"
let utxo = toUTXO txhash n value script
prv = read p
pub = derivePubKey prv
-- TODO maybe the other generators should take [Integer] rather than bytestrings
--hashes = map encode secrets
redeem = either undefined id . decode . fromMaybe undefined . decodeHex $ pack redeemHex
let tx = either undefined id $ makeBobClaim [utxo] [prv] redeem secrets pub
putStrLn "Bob claim tx:"
putStrLn $ show tx
makeAliceClaimTx = do
let txhash = "TxHash \"ab562be2be6255acbf5e595f29b3da730b7f5e5961bc89472c965aa9184168ed\""
n = 0
value = 90000
-- hex scriptPubKey
script = "a91449cfe9e4fe08a9ddf5859aca9bb84d916cf1a5ae87"
-- wif prvkey
p = "PrvKey \"cQrgind4kVZbZpfAVfZq8Nw6HcPBZyT2pktrn2t5dSS7H9aeNFmx\""
secrets = [56, 123] :: [Integer]
redeemHex = "630500000186a0b1752103f7653b7d6db6dbb4139ef05484a92e620312a822105c526c4f91b65e76f5ad15ac672103f7653b7d6db6dbb4139ef05484a92e620312a822105c526c4f91b65e76f5ad15adaa7620da149891917e58f3808d3c0fbbe5385cfa33f2cc93e4bfd234d2817fdb82da0e877c200880e9d991ac4931bf64bb2809100af425ab761e081b349d54cfb8f2ec1dbd27879b68"
let utxo = toUTXO txhash n value script
prv = read p
pub = derivePubKey prv
--hashes = map encode secrets
redeem = either undefined id . decode . fromMaybe undefined . decodeHex $ pack redeemHex
let tx = either undefined id $ makeAliceClaim [utxo] [prv] redeem (Prelude.head secrets) pub
putStrLn "Alice claim tx"
putStrLn $ show tx
toUTXO txhash n value script =
let s = fromMaybe undefined . decodeHex $ pack script
op = OutPoint { outPointHash=(read txhash), outPointIndex=n }
to = TxOut { outValue=value, scriptOutput=s }
in UTXO { _outPoint=op, _txOut=to }
| hudon/refraction-hs | demo-fairexchange.hs | gpl-3.0 | 5,046 | 0 | 14 | 1,060 | 994 | 522 | 472 | 84 | 1 |
module Language.Core.Test.TestSyntax(tests) where
import Language.Core.Syntax
import Test.HUnit
import qualified Language.Haskell.Exts as LHE
testEquality = [equalityTest (Free "var"),
equalityTest (Lambda "x" (Bound 0)),
equalityTest (Con "ConsTransformer" [Free "x", Con "NilTransformer" []]),
equalityTest (Apply (Fun "f") (Free "x")),
equalityTest (Fun "functionCall"),
equalityTest (Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)])),
equalityTest (Let "x" (Free "var") (Bound 0)),
equalityTest (Where (Fun "x") [("x", Free "x")]),
equalityTest (Lambda "x" (Lambda "y" (Tuple [Bound 0, Bound 1]))),
equalityTest (TupleLet ["x","y","z"] (Tuple [Free "a", Free "b", Free "c"]) (Bound 0))]
testInequality = [inequalityTest (Free "var") (Free "var'"),
inequalityTest (Lambda "x" (Bound 0)) (Lambda "x'" (Free "v")),
inequalityTest (Con "ConsTransformer" [Free "x", Con "NilTransformer" []]) (Con "NilTransformer" []),
inequalityTest (Apply (Fun "f") (Free "x")) (Apply (Lambda "x" (Bound 0)) (Free "x")),
inequalityTest (Fun "f") (Fun "f'"),
inequalityTest (Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)])) (Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 1)])),
inequalityTest (Let "x" (Free "var") (Bound 0)) (Let "x" (Free "var'") (Bound 0)),
inequalityTest (Where (Fun "x") [("x", Free "x")]) (Where (Fun "x") [("x", Free "x"), ("x'", Free "x'")]),
inequalityTest (Lambda "x" (Lambda "y" (Tuple [Bound 0, Bound 1]))) (Lambda "x" (Lambda "y" (Tuple [Bound 0, Bound 0]))),
inequalityTest (TupleLet ["x","y","z"] (Tuple [Free "a", Free "b", Free "c"]) (Bound 0)) (TupleLet ["x","y","z"] (Tuple [Free "a", Free "b", Free "c"]) (Bound 2))]
equalityTest :: (Eq a, Show a) => a -> Test
equalityTest t = TestCase (assertBool ("Equality test for " ++ show t ++ " failed") (t == t))
inequalityTest :: (Eq a, Show a) => a -> a -> Test
inequalityTest t t' = TestCase (assertBool ("Inequality test for " ++ show t ++ " and " ++ show t' ++ " failed") (t /= t'))
testRebuildQualConDecl = [(LHE.QualConDecl (LHE.SrcLoc "" 0 0) [] [] (LHE.ConDecl (LHE.Ident "List") [LHE.UnBangedTy (LHE.TyVar (LHE.Ident "name")), (LHE.UnBangedTy (LHE.TyApp (LHE.TyCon (LHE.UnQual (LHE.Ident "name"))) (LHE.TyVar (LHE.Ident "var")))), (LHE.UnBangedTy (LHE.TyApp (LHE.TyApp (LHE.TyCon (LHE.UnQual (LHE.Ident "name"))) (LHE.TyVar (LHE.Ident "var"))) (LHE.TyVar (LHE.Ident "var'"))))])) ~=? (rebuildConDecl ("List", [makeBangDataType "name" [], makeBangDataType "name" ["var"], makeBangDataType "name" ["var", "var'"]]))]
testRebuildBangType = [(LHE.UnBangedTy (LHE.TyVar (LHE.Ident "name"))) ~=? (rebuildBangType (makeBangDataType "name" [])),
(LHE.UnBangedTy (LHE.TyApp (LHE.TyCon (LHE.UnQual (LHE.Ident "name"))) (LHE.TyVar (LHE.Ident "var")))) ~=? (rebuildBangType (makeBangDataType "name" ["var"])),
(LHE.UnBangedTy (LHE.TyApp (LHE.TyApp (LHE.TyCon (LHE.UnQual (LHE.Ident "name"))) (LHE.TyVar (LHE.Ident "var"))) (LHE.TyVar (LHE.Ident "var'")))) ~=? (rebuildBangType (makeBangDataType "name" ["var", "var'"]))]
makeBangDataType name vars = DataType name vars [] Nothing []
testRebuildDecl = [(makeDecl "f" (LHE.Var (LHE.UnQual (LHE.Ident "v")))) ~=? (rebuildDecl ("f", Free "v")),
(makeDecl "f'" (LHE.Case (LHE.Var (LHE.UnQual (LHE.Ident "x"))) [(makeAlt (LHE.PList []) (LHE.Var (LHE.UnQual (LHE.Ident "v")))), (makeAlt (LHE.PParen (LHE.PInfixApp (LHE.PVar (LHE.Ident "x")) (LHE.Special LHE.Cons) (LHE.PList []))) (LHE.Var (LHE.UnQual (LHE.Ident "x"))))])) ~=? (rebuildDecl ("f'", (Case (Free "x") [Branch "NilTransformer" [] (Free "v"), Branch "ConsTransformer" ["x"] (Bound 0)])))]
makeDecl n e = (LHE.FunBind [LHE.Match (LHE.SrcLoc "" 0 0) (LHE.Ident n) [] Nothing (LHE.UnGuardedRhs e) (LHE.BDecls [])])
testRebuildExp = [(LHE.Var (LHE.UnQual (LHE.Ident "v"))) ~=? (rebuildExp (Free "v")),
(LHE.Lambda (LHE.SrcLoc "" 0 0) [LHE.PVar (LHE.Ident "v")] (LHE.Var (LHE.UnQual (LHE.Ident "v")))) ~=? (rebuildExp (Lambda "v" (Bound 0))),
(LHE.Let (LHE.BDecls [LHE.FunBind [LHE.Match (LHE.SrcLoc "" 0 0) (LHE.Ident "v") [] Nothing (LHE.UnGuardedRhs (LHE.Var (LHE.UnQual (LHE.Ident "v'")))) (LHE.BDecls [])]]) (LHE.Var (LHE.UnQual (LHE.Ident "v")))) ~=? (rebuildExp (Let "v" (Free "v'") (Bound 0))),
(LHE.Var (LHE.UnQual (LHE.Ident "funName"))) ~=? (rebuildExp (Fun "funName")),
(LHE.Lit (LHE.Int 0)) ~=? (rebuildExp (Con "Z" [])),
(LHE.Lit (LHE.Int 1)) ~=? (rebuildExp (Con "S" [Con "Z" []])),
(LHE.Lit (LHE.Int 2)) ~=? (rebuildExp (Con "S" [Con "S" [Con "Z" []]])),
(LHE.Lit (LHE.String "a")) ~=? (rebuildExp (Con "StringTransformer" [Con "a" []])),
(LHE.Lit (LHE.String "abc")) ~=? (rebuildExp (Con "StringTransformer" [Con "a" [Con "b" [Con "c" []]]])),
(LHE.Lit (LHE.Char 'c')) ~=? (rebuildExp (Con "CharTransformer" [Con "c" []])),
(LHE.Con (LHE.Special LHE.ListCon)) ~=? (rebuildExp (Con "NilTransformer" [])),
(LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "v"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Con (LHE.Special LHE.ListCon)))) ~=? (rebuildExp (Con "ConsTransformer" (Free "v":[]))),
(LHE.Paren (LHE.InfixApp (LHE.Con (LHE.Special LHE.ListCon)) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Con (LHE.Special LHE.ListCon)))) ~=? (rebuildExp (Con "ConsTransformer" (Con "NilTransformer" []:[]))),
(LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "x"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Var (LHE.UnQual (LHE.Ident "y"))))) ~=? (rebuildExp (Con "ConsTransformer" [Free "x", Free "y"])),
(LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "x"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "y"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Var (LHE.UnQual (LHE.Ident "z"))))))) ~=? (rebuildExp (Con "ConsTransformer" [Free "x", Free "y", Free "z"])),
(LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "x"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "y"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "z"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Con (LHE.Special LHE.ListCon)))))))) ~=? (rebuildExp (Con "ConsTransformer" [Free "x", Free "y", Free "z", Con "NilTransformer" []])),
(LHE.App (LHE.App (LHE.Con (LHE.UnQual (LHE.Ident "Just"))) (LHE.Var (LHE.UnQual (LHE.Ident "x")))) (LHE.Var (LHE.UnQual (LHE.Ident "y")))) ~=? (rebuildExp (Con "Just" [Free "x", Free "y"])),
(LHE.InfixApp (LHE.Paren (LHE.Var (LHE.UnQual (LHE.Ident "x")))) (LHE.QVarOp (LHE.UnQual (LHE.Ident "pseq"))) (LHE.Paren (LHE.Var (LHE.UnQual (LHE.Ident "y"))))) ~=? (rebuildExp (Apply (Apply (Fun "pseq") (Free "x")) (Free "y"))),
(LHE.InfixApp (LHE.Paren (LHE.Var (LHE.UnQual (LHE.Ident "x")))) (LHE.QVarOp (LHE.UnQual (LHE.Ident "par"))) (LHE.Paren (LHE.Var (LHE.UnQual (LHE.Ident "y"))))) ~=? (rebuildExp (Apply (Apply (Fun "par") (Free "x")) (Free "y"))),
(LHE.App (LHE.Var (LHE.UnQual (LHE.Ident "x"))) (LHE.Lit (LHE.String "abc"))) ~=? (rebuildExp (Apply (Free "x") (Con "StringTransformer" [Con "a" [Con "b" [Con "c" []]]]))),
(LHE.Case (LHE.Var (LHE.UnQual (LHE.Ident "x"))) [(makeAlt (LHE.PList []) (LHE.Var (LHE.UnQual (LHE.Ident "v")))), (makeAlt (LHE.PParen (LHE.PInfixApp (LHE.PVar (LHE.Ident "x")) (LHE.Special LHE.Cons) (LHE.PList []))) (LHE.Var (LHE.UnQual (LHE.Ident "x"))))]) ~=? (rebuildExp (Case (Free "x") [Branch "NilTransformer" [] (Free "v"), Branch "ConsTransformer" ["x"] (Bound 0)])),
(LHE.Tuple [(LHE.Var (LHE.UnQual (LHE.Ident "v"))), (LHE.Lit (LHE.Int 2)), (LHE.Lit (LHE.String "abc"))]) ~=? (rebuildExp (Tuple [Free "v", Con "S" [Con "S" [Con "Z" []]], Con "StringTransformer" [Con "a" [Con "b" [Con "c" []]]]])),
(LHE.Let (LHE.BDecls [LHE.PatBind (LHE.SrcLoc "" 0 0) (LHE.PTuple [LHE.PVar (LHE.Ident "x"), LHE.PVar (LHE.Ident "y")]) Nothing (LHE.UnGuardedRhs (LHE.App (LHE.Var (LHE.UnQual (LHE.Ident "f"))) (LHE.Var (LHE.UnQual (LHE.Ident "g"))))) (LHE.BDecls [])]) (LHE.App (LHE.Var (LHE.UnQual (LHE.Ident "y"))) (LHE.Var (LHE.UnQual (LHE.Ident "x"))))) ~=? (rebuildExp (TupleLet ["x", "y"] (Apply (Free "f") (Free "g")) (Apply (Bound 0) (Bound 1))))]
testRebuildInt = [(LHE.Lit (LHE.Int 0)) ~=? (rebuildInt (Con "Z" [])),
(LHE.Lit (LHE.Int 1)) ~=? (rebuildInt (Con "S" [Con "Z" []])),
(LHE.Lit (LHE.Int 2)) ~=? (rebuildInt (Con "S" [Con "S" [Con "Z" []]]))]
testRebuildString = ["a" ~=? (rebuildString (Con "a" [])),
"abc" ~=? (rebuildString (Con "a" [Con "b" [Con "c" []]]))]
testRebuildAlt = [(makeAlt (LHE.PList []) (LHE.Var (LHE.UnQual (LHE.Ident "v")))) ~=? (rebuildAlt (Branch "NilTransformer" [] (Free "v"))),
(makeAlt (LHE.PParen (LHE.PInfixApp (LHE.PVar (LHE.Ident "x")) (LHE.Special LHE.Cons) (LHE.PList []))) (LHE.Var (LHE.UnQual (LHE.Ident "x")))) ~=? (rebuildAlt (Branch "ConsTransformer" ["x"] (Bound 0))),
(makeAlt (LHE.PParen (LHE.PInfixApp (LHE.PVar (LHE.Ident "x")) (LHE.Special LHE.Cons) (LHE.PVar (LHE.Ident "xs")))) (LHE.App (LHE.Var (LHE.UnQual (LHE.Ident "sumList"))) (LHE.Var (LHE.UnQual (LHE.Ident "xs"))))) ~=? (rebuildAlt (Branch "ConsTransformer" ["x","xs"] (Apply (Fun "sumList") (Bound 0)))),
(makeAlt (LHE.PApp (LHE.UnQual (LHE.Ident "JoinList")) [(LHE.PVar (LHE.Ident "x")), (LHE.PVar (LHE.Ident "y"))]) (LHE.Var (LHE.UnQual (LHE.Ident "y")))) ~=? (rebuildAlt (Branch "JoinList" ["x", "y"] (Bound 0)))]
makeAlt pat expr = LHE.Alt (LHE.SrcLoc "" 0 0) pat (LHE.UnGuardedAlt expr) (LHE.BDecls [])
testRebuildCon = [(LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "v"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Con (LHE.Special LHE.ListCon)))) ~=? (rebuildCon (Free "v":[])),
(LHE.Paren (LHE.InfixApp (LHE.Con (LHE.Special LHE.ListCon)) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Con (LHE.Special LHE.ListCon)))) ~=? (rebuildCon (Con "NilTransformer" []:[])),
(LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "x"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Var (LHE.UnQual (LHE.Ident "y"))))) ~=? (rebuildCon ([Free "x", Free "y"])),
(LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "x"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "y"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Var (LHE.UnQual (LHE.Ident "z"))))))) ~=? (rebuildCon ([Free "x", Free "y", Free "z"])),
(LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "x"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "y"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Paren (LHE.InfixApp (LHE.Var (LHE.UnQual (LHE.Ident "z"))) (LHE.QConOp (LHE.Special LHE.Cons)) (LHE.Con (LHE.Special LHE.ListCon)))))))) ~=? (rebuildCon ([Free "x", Free "y", Free "z", Con "NilTransformer" []]))]
testMatch = [True ~=? (match (Free "x") (Free "x")),
False ~=? (match (Free "x") (Free "y")),
True ~=? (match (Bound 0) (Bound 0)),
False ~=? (match (Bound 1) (Bound 90)),
False ~=? (match (Lambda "x" (Bound 0)) (Free "y")),
True ~=? (match (Lambda "x" (Bound 0)) (Lambda "x" (Bound 1))),
True ~=? (match (Con "c" [Bound 1, Free "x"]) (Con "c" [Bound 1, Free "x"])),
False ~=? (match (Con "c" []) (Con "d" [])),
False ~=? (match (Con "c" [Bound 1, Free "x"]) (Con "c" [Free "x"])),
True ~=? (match (Apply (Free "x") (Bound 0)) (Apply (Free "x") (Bound 0))),
True ~=? (match (Apply (Free "x") (Bound 0)) (Apply (Free "x") (Bound 1))),
False ~=? (match (Apply (Free "x") (Bound 0)) (Apply (Bound 0) (Bound 1))),
True ~=? (match (Fun "f") (Fun "f")),
False ~=? (match (Fun "f") (Fun "g")),
True ~=? (match (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)]) (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)])),
False ~=? (match (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)]) (Case (Bound 0) [Branch "ConsTransformer" ["x"] (Bound 0)])),
False ~=? (match (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)]) (Case (Bound 0) [Branch "ConsTransformer'" ["x", "xs"] (Bound 0)])),
False ~=? (match (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)]) (Case (Bound 0) [Branch "ConsTransformer'" ["x"] (Bound 0)])),
True ~=? (match (Let "x" (Free "x") (Bound 0)) (Let "x" (Free "x") (Bound 0))),
True ~=? (match (Where (Free "x") [("f", Bound 0), ("g", Bound 1)]) (Where (Free "x") [("f", Bound 0), ("g", Bound 1)])),
False ~=? (match (Where (Free "x") [("f", Bound 0), ("g", Bound 1)]) (Where (Free "x") [("f", Bound 0)])),
True ~=? (match (Tuple [Free "x", Bound 0]) (Tuple [Free "x", Bound 0])),
False ~=? (match (Tuple [Free "x", Bound 0]) (Tuple [Bound 0, Free "x"])),
False ~=? (match (Tuple [Free "x"]) (Tuple [Free "x", Bound 0])),
True ~=? (match (TupleLet ["a", "b", "c"] (Bound 0) (Bound 1)) (TupleLet ["a", "b", "c"] (Bound 0) (Bound 1))),
False ~=? (match (TupleLet ["a", "b", "c"] (Bound 0) (Bound 1)) (TupleLet ["a", "b"] (Bound 0) (Bound 1))),
False ~=? (match (Free "x") (Bound 0))]
testFree = [["x"] ~=? (free (Free "x")),
[] ~=? (free (Bound 0)),
["x"] ~=? (free (Lambda "x" (Free "x"))),
[] ~=? (free (Lambda "x" (Fun "fun"))),
[] ~=? (free (Lambda "x" (Apply (Fun "fun") (Fun "fun'")))),
["x"] ~=? (free (Con "ConsTransformer" [Free "x", Con "NilTransformer" []])),
["fun", "fun'"] ~=? (free (Con "ConsTransformer" [Free "fun", Free "fun'"])),
["x"] ~=? (free (Apply (Fun "f") (Free "x"))),
[] ~=? (free (Fun "f")),
[] ~=? (free (Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)]))),
["x", "y"] ~=? (free (Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Con "ConsTransformer" [Free "x", Free "y"])]))),
[] ~=? (free (Let "x" (Fun "var") (Bound 0))),
["var"] ~=? (free (Let "x" (Free "var") (Fun "x"))),
["x"] ~=? (free (Where (Fun "x") [("x", Free "x")])),
[] ~=? (free (Where (Fun "x") [("x", Fun "x")])),
["x", "y"] ~=? (free (Where (Free "x") [("x", Free "y")])),
[] ~=? (free (Lambda "x" (Lambda "y" (Tuple [Bound 0, Bound 1])))),
["x", "y"] ~=? (free (Lambda "x" (Lambda "y" (Tuple [Free "x", Free "y"])))),
["a", "b", "c"] ~=? (free (TupleLet ["x","y","z"] (Tuple [Free "a", Free "b", Free "c"]) (Bound 0))),
[] ~=? (free (TupleLet ["x","y","z"] (Tuple [Fun "a", Fun "b", Fun "c"]) (Bound 0)))]
testBound = [[] ~=? (bound (Free "x")),
[0] ~=? (bound (Bound 0)),
[] ~=? (bound (Lambda "x" (Free "x"))),
[] ~=? (bound (Lambda "x" (Bound 0))),
[0] ~=? (bound (Lambda "x" (Apply (Bound 0) (Bound 1)))),
[] ~=? (bound (Con "ConsTransformer" [Free "x", Con "NilTransformer" []])),
[0, 1] ~=? (bound (Con "ConsTransformer" [Bound 0, Bound 1])),
[1] ~=? (bound (Apply (Bound 1) (Free "x"))),
[0] ~=? (bound (Bound 0)),
[] ~=? (bound (Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)]))),
[] ~=? (bound (Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Con "ConsTransformer" [Fun "x", Fun "y"])]))),
[] ~=? (bound (Let "x" (Free "var") (Bound 0))),
[] ~=? (bound (Let "x" (Free "var") (Fun "x"))),
[0] ~=? (bound (Where (Fun "x") [("x", Bound 0)])),
[0, 1] ~=? (bound (Where (Bound 0) [("x", Bound 1)])),
[] ~=? (bound (Where (Fun "x") [("x", Fun "y")])),
[] ~=? (bound (Lambda "x" (Lambda "y" (Tuple [Bound 0, Bound 1])))),
[] ~=? (bound (Lambda "x" (Lambda "y" (Tuple [Fun "x", Fun "y"])))),
[] ~=? (bound (TupleLet ["x","y","z"] (Tuple [Free "a", Free "b", Free "c"]) (Bound 0))),
[3, 2, 1] ~=? (bound (TupleLet ["x","y","z"] (Tuple [Bound 3, Bound 2, Bound 1]) (Bound 0)))]
testFuns = [[] ~=? (funs (Free "x")),
[] ~=? (funs (Bound 0)),
[] ~=? (funs (Lambda "x" (Free "x"))),
["fun"] ~=? (funs (Lambda "x" (Fun "fun"))),
["fun", "fun'"] ~=? (funs (Lambda "x" (Apply (Fun "fun") (Fun "fun'")))),
[] ~=? (funs (Con "ConsTransformer" [Free "x", Con "NilTransformer" []])),
["fun", "fun'"] ~=? (funs (Con "ConsTransformer" [Fun "fun", Fun "fun'"])),
["f"] ~=? (funs (Apply (Fun "f") (Free "x"))),
["f"] ~=? (funs (Fun "f")),
[] ~=? (funs (Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)]))),
["x", "y"] ~=? (funs (Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Con "ConsTransformer" [Fun "x", Fun "y"])]))),
[] ~=? (funs (Let "x" (Free "var") (Bound 0))),
["x"] ~=? (funs (Let "x" (Free "var") (Fun "x"))),
["x"] ~=? (funs (Where (Fun "x") [("x", Free "x")])),
["x", "x"] ~=? (funs (Where (Fun "x") [("x", Fun "x")])),
["x", "y"] ~=? (funs (Where (Fun "x") [("x", Fun "y")])),
[] ~=? (funs (Lambda "x" (Lambda "y" (Tuple [Bound 0, Bound 1])))),
["x", "y"] ~=? (funs (Lambda "x" (Lambda "y" (Tuple [Fun "x", Fun "y"])))),
[] ~=? (funs (TupleLet ["x","y","z"] (Tuple [Free "a", Free "b", Free "c"]) (Bound 0))),
["a", "b", "c"] ~=? (funs (TupleLet ["x","y","z"] (Tuple [Fun "a", Fun "b", Fun "c"]) (Bound 0)))]
testShift = [(Free "x") ~=? (shift 0 1 (Free "x")),
(Free "a") ~=? (shift 1 1 (Free "a")),
(Bound 2) ~=? (shift 1 1 (Bound 1)),
(Bound 12) ~=? (shift 10 2 (Bound 2)),
(Bound 1) ~=? (shift 10 2 (Bound 1)),
(Lambda "x" (Bound 3)) ~=? (shift 2 0 (Lambda "x" (Bound 1))),
(Con "ConsTransformer" [Bound 10, Con "NilTransformer" []]) ~=? (shift 5 5 (Con "ConsTransformer" [Bound 5, Con "NilTransformer" []])),
(Apply (Lambda "x" (Bound 0)) (Free "x")) ~=? (shift 4 0 (Apply (Lambda "x" (Bound 0)) (Free "x"))),
(Apply (Lambda "x" (Bound 5)) (Free "x")) ~=? (shift 4 0 (Apply (Lambda "x" (Bound 1)) (Free "x"))),
(Fun "f") ~=? (shift 0 0 (Fun "f")),
(Lambda "x" (Case (Bound 3) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)])) ~=? (shift 2 0 (Lambda "x" (Case (Bound 1) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)]))),
(Let "x" (Free "var") (Bound 5)) ~=? (shift 4 0 (Let "x" (Free "var") (Bound 1))),
(Let "x" (Free "var") (Bound 0)) ~=? (shift 4 0 (Let "x" (Free "var") (Bound 0))),
(Let "x" (Bound 4) (Bound 5)) ~=? (shift 4 0 (Let "x" (Bound 0) (Bound 1))),
(Where (Fun "x") [("x", Free "x")]) ~=? (shift 4 0 (Where (Fun "x") [("x", Free "x")])),
(Lambda "x" (Lambda "y" (Tuple [Bound 0, Bound 1]))) ~=? (shift 4 0 (Lambda "x" (Lambda "y" (Tuple [Bound 0, Bound 1])))),
(Lambda "x" (Lambda "y" (Tuple [Bound 6, Bound 7]))) ~=? (shift 4 0 (Lambda "x" (Lambda "y" (Tuple [Bound 2, Bound 3])))),
(TupleLet ["x","y","z"] (Tuple [Free "a", Free "b", Free "c"]) (Bound 0)) ~=? (shift 4 0 (TupleLet ["x","y","z"] (Tuple [Free "a", Free "b", Free "c"]) (Bound 0))),
(TupleLet ["x","y","z"] (Tuple [Bound 3, Bound 4, Bound 5]) (Bound 0)) ~=? (shift 3 0 (TupleLet ["x","y","z"] (Tuple [Bound 0, Bound 1, Bound 2]) (Bound 0)))]
testSubst = [(Free "x") ~=? (subst 0 (Free "y") (Free "x")),
(Free "y") ~=? (subst 0 (Free "y") (Bound 0)),
(Bound 1) ~=? (subst 0 (Bound 1) (Bound 0)),
(Bound 1) ~=? (subst 1 (Bound 0) (Bound 1)),
(Lambda "x" (Free "y")) ~=? (subst 0 (Free "y") (Lambda "x" (Bound 1))),
(Con "ConsTransformer" [Free "x", Con "NilTransformer" []]) ~=? (subst 0 (Free "x") (Con "ConsTransformer" [Bound 0, Con "NilTransformer" []])),
(Apply (Fun "f") (Free "x")) ~=? (subst 0 (Fun "f") (Apply (Bound 0) (Free "x"))),
(Fun "f") ~=? (subst 0 (Free "x") (Fun "f")),
(Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)])) ~=? (subst 0 (Free "x") (Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)]))),
(Case (Free "x") [Branch "ConsTransformer" ["x", "xs"] (Bound 0)]) ~=? (subst 0 (Free "x") (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 0)])),
(Case (Free "x") [Branch "ConsTransformer" ["x", "xs"] (Free "x")]) ~=? (subst 0 (Free "x") (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 2)])),
(Let "x" (Free "x") (Bound 0)) ~=? (subst 0 (Free "x") (Let "x" (Bound 0) (Bound 0))),
(Where (Fun "x") [("x", Free "x")]) ~=? (subst 0 (Fun "x") (Where (Bound 0) [("x", Free "x")])),
(Lambda "x" (Lambda "y" (Tuple [Free "a", Free "b"]))) ~=? (subst 0 (Free "b") (subst 0 (Free "a") (Lambda "x" (Lambda "y" (Tuple [Bound 2, Bound 3]))))),
(TupleLet ["x","y","z"] (Tuple [Free "a", Free "b", Free "c"]) (Free "a")) ~=? (subst 0 (Free "c") (subst 0 (Free "b") (subst 0 (Free "a") (TupleLet ["x","y","z"] (Tuple [Bound 0, Bound 1, Bound 2]) (Bound 3)))))]
testAbstract = [(Bound 0) ~=? (abstract 0 "x" (Free "x")),
(Free "y") ~=? (abstract 0 "x" (Free "y")),
(Lambda "x" (Apply (Bound 1) (Bound 0))) ~=? (abstract 0 "y" (Lambda "x" (Apply (Free "y") (Bound 0)))),
(Lambda "x" (Apply (Free "z") (Bound 0))) ~=? (abstract 0 "y" (Lambda "x" (Apply (Free "z") (Bound 0)))),
(Con "ConsTransformer" [Bound 2, Free "z"]) ~=? (abstract 2 "y" (Con "ConsTransformer" [Free "y", Free "z"])),
(Apply (Bound 0) (Free "x")) ~=? (abstract 0 "y" (Apply (Free "y") (Free "x"))),
(Fun "f") ~=? (abstract 0 "x" (Fun "f")),
(Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Bound 3)])) ~=? (abstract 0 "z" (Lambda "x" (Case (Bound 0) [Branch "ConsTransformer" ["x", "xs"] (Free "z")]))),
(Let "x" (Bound 0) (Bound 0)) ~=? (abstract 0 "z" (Let "x" (Free "z") (Bound 0))),
(Where (Fun "x") [("x", Bound 0)]) ~=? (abstract 0 "y" (Where (Fun "x") [("x", Free "y")])),
(TupleLet ["x","y","z"] (Tuple [Bound 1, Bound 0, Free "c"]) (Bound 0)) ~=? abstract 0 "b" (abstract 0 "a" (TupleLet ["x","y","z"] (Tuple [Free "a", Free "b", Free "c"]) (Bound 0)))]
testRename = ["x''" ~=? (rename ["x", "x'"] "x"),
"x" ~=? (rename ["y", "z"] "x"),
"x" ~=? (rename [] "x"),
"x'" ~=? (rename ["x","x''"] "x")]
tests = (testEquality ++
testInequality ++
testRebuildQualConDecl ++
testRebuildBangType ++
testRebuildDecl ++
testRebuildExp ++
testRebuildInt ++
testRebuildString ++
testRebuildAlt ++
testRebuildCon ++
testMatch ++
testFree ++
testBound ++
testFuns ++
testShift ++
testSubst ++
testAbstract ++
testRename) | distillation/language-core | Language/Core/Test/TestSyntax.hs | gpl-3.0 | 24,222 | 0 | 25 | 5,808 | 13,369 | 6,918 | 6,451 | 228 | 1 |
module Reexport2(module Reexport2, module Reexport3) where
import Reexport3
q = 4
| Helium4Haskell/helium | test/correct/Reexport2.hs | gpl-3.0 | 84 | 0 | 4 | 13 | 22 | 15 | 7 | 3 | 1 |
-- Copyright 2016, 2017 Robin Raymond
--
-- This file is part of Purple Muon
--
-- Purple Muon is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- Purple Muon is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Purple Muon. If not, see <http://www.gnu.org/licenses/>.
module Client.Video.Sprite
( renderSprite
, noFlip
, renderGameObject
, updateRenderInfo
, relToAbs
) where
import Protolude
import qualified Control.Lens as CLE
import qualified Data.IntMap.Strict as DIS
import qualified Foreign.C.Types as FCT
import qualified SDL
import qualified Client.Assets.Generic as CAG
import qualified Client.Assets.Sprite as CAS
import qualified Client.Video.Types as CVT
import qualified PurpleMuon.Game.Types as PGT
import qualified PurpleMuon.Physics.Types as PPT
import qualified PurpleMuon.Types as PTY
-- | Draw sprite without flipping
noFlip :: SDL.V2 Bool
noFlip = SDL.V2 False False
relToAbs :: CVT.Resolution -> SDL.Rectangle PTY.FlType -> SDL.Rectangle FCT.CInt
relToAbs (CVT.Resolution (SDL.V2 x y))
(SDL.Rectangle (SDL.P (SDL.V2 px py)) (SDL.V2 sx sy)) =
SDL.Rectangle (SDL.P $ SDL.V2 npx npy) (SDL.V2 nsx nsy)
where
npx = FCT.CInt $ truncate (fromIntegral x * px)
npy = FCT.CInt $ truncate (fromIntegral y * py)
nsx = FCT.CInt $ truncate (fromIntegral x * sx)
nsy = FCT.CInt $ truncate (fromIntegral y * sy)
-- | Render a sprite
--
-- TODO: The renderer as an argument is unnecessary if we take it out of the
-- Textureloader
renderSprite :: (MonadIO m, MonadError Text m)
=> SDL.Renderer
-> CAS.SpriteLoaderType
-> CAS.SpriteID
-> Maybe (SDL.Rectangle FCT.CInt)
-> FCT.CDouble
-> SDL.V2 Bool
-> m ()
renderSprite ren sl id mr phi flips = do
CAS.Sprite t r c <- CAG.getAsset sl id
tex <- CAG.getAsset (CAG.extData sl) t
SDL.copyEx ren tex r mr phi (Just c) flips
renderGameObject :: (MonadIO m, MonadError Text m)
=> SDL.Renderer
-> CAS.SpriteLoaderType
-> CVT.Resolution
-> PGT.GameObject
-> m ()
renderGameObject ren sl (CVT.Resolution res) (PGT.GameObject _ _ _ mri) =
case mri of
Just (PGT.RenderInfo p a si sp) ->
renderSprite ren sl sp (Just apos) an (SDL.V2 False False)
where
xy = (fmap fromIntegral res) * (PTY.unPosition p)
an = FCT.CDouble $ float2Double a
xyS = (fmap fromIntegral res) * si
v1 = fmap truncate xy
v2 = fmap truncate xyS
apos = SDL.Rectangle (SDL.P v1) v2
Nothing -> return ()
-- | Update the sprite position with the position of the physical Object
updateRenderInfo :: PPT.PhysicalObjects -> PGT.GameObject -> PGT.GameObject
updateRenderInfo pos go@(PGT.GameObject _ _ (Just po) (Just ri)) =
case newp of
(Just np) -> go { PGT._mReInfo = Just $ CLE.set PGT.pos npos ri }
where
npos = CLE.view PPT.pos np
Nothing -> go
where
newp = DIS.lookup (PTY.unKey po) pos
updateRenderInfo _ go = go
| r-raymond/purple-muon | src/Client/Video/Sprite.hs | gpl-3.0 | 3,726 | 0 | 13 | 1,062 | 951 | 510 | 441 | -1 | -1 |
{-# OPTIONS_GHC -Wall #-}
-- {-# OPTIONS_GHC -fno-state-hack #-}
----------------------------------------------------------------------
-- |
-- Module : FRP.Reactive.Internal.IVar
-- Copyright : (c) Conal Elliott 2008
-- License : GNU AGPLv3 (see COPYING)
--
-- Maintainer : conal@conal.net
-- Stability : experimental
--
-- Write-once variables.
----------------------------------------------------------------------
module FRP.Reactive.Internal.IVar
( IVar, newIVar, readIVar, tryReadIVar, writeIVar
) where
import Control.Concurrent.MVar
import Control.Applicative ((<$>))
import System.IO.Unsafe (unsafePerformIO)
newtype IVar a = IVar (MVar a)
newIVar :: IO (IVar a)
newIVar = IVar <$> newEmptyMVar
-- | Returns the value in the IVar. The *value* will block
-- until the variable becomes filled.
readIVar :: IVar a -> a
readIVar (IVar v) = unsafePerformIO $ do -- putStrLn "readIVar"
readMVar v
-- | Returns Nothing if the IVar has no value yet, otherwise
-- returns the value.
tryReadIVar :: IVar a -> IO (Maybe a)
tryReadIVar (IVar v) = do
empty <- isEmptyMVar v
if empty
then return Nothing
else Just <$> readMVar v
-- | Puts the value of the IVar. If it already has a value,
-- block forever.
writeIVar :: IVar a -> a -> IO ()
writeIVar (IVar v) x = putMVar v x
{-
-- From: Bertram Felgenhauer <int-e@gmx.de>
-- to: conal@conal.net
-- date: Mon, Nov 10, 2008 at 1:02 PM
-- subject: About IVars
-- Interestingly, the code triggers a bug in ghc; you have to compile
-- it with -fno-state-hack if you enable optimization. (Though Simon
-- Marlow says that it's not the state hack's fault. See
-- http://hackage.haskell.org/trac/ghc/ticket/2756)
-- Hm: ghc balks at {-# OPTIONS_GHC -fno-state-hack #-}
-- with a few tweaks by conal
import Control.Concurrent.MVar
import System.IO.Unsafe (unsafePerformIO)
-- an IVar consists of
-- a) A lock for the writers. (This avoids the bug explained above.)
-- b) An MVar to put the value into
-- c) The value of the IVar. This is the main difference between
-- our implementations.
data IVar a = IVar (MVar ()) (MVar a) a
-- Creating an IVar creates two MVars and sets up a suspended
-- takeMVar for reading the value.
-- It relies on unsafePerformIO to execute its body at most once;
-- As far as I know this is true since ghc 6.6.1 -- see
-- http://hackage.haskell.org/trac/ghc/ticket/986
newIVar :: IO (IVar a)
newIVar = do
lock <- newMVar ()
trans <- newEmptyMVar
let {-# NOINLINE value #-}
value = unsafePerformIO $ takeMVar trans
return (IVar lock trans value)
-- Reading an IVar just returns its value.
readIVar :: IVar a -> a
readIVar (IVar _ _ value) = value
-- Writing an IVar takes the writer's lock and writes the value.
-- (To match your interface, use takeMVar instead of tryTakeMVar)
writeIVar :: IVar a -> a -> IO ()
writeIVar (IVar lock trans _) value = do
a <- tryTakeMVar lock
case a of
Just () -> putMVar trans value
Nothing -> error "writeIVar: already written"
-- writeIVar :: IVar a -> a -> IO Bool
-- writeIVar (IVar lock trans _) value = do
-- a <- tryTakeMVar lock
-- case a of
-- Just _ -> putMVar trans value >> return True
-- Nothing -> return False
-- I didn't originally support tryReadIVar, but it's easily implemented,
-- too.
tryReadIVar :: IVar a -> IO (Maybe a)
tryReadIVar (IVar lock _ value) = fmap f (isEmptyMVar lock)
where
f True = Just value
f False = Nothing
-- tryReadIVar (IVar lock _ value) = do
-- empty <- isEmptyMVar lock
-- if empty then return (Just value) else return Nothing
-}
| ekmett/reactive | src/FRP/Reactive/Internal/IVar.hs | agpl-3.0 | 3,675 | 0 | 9 | 786 | 257 | 146 | 111 | 20 | 2 |
{- |
Module: Postmaster.Prelude
Copyright: (C) 2004-2019 Peter Simons
License: GNU AFFERO GPL v3 or later
Maintainer: simons@cryp.to
Stability: experimental
Portability: non-portable
-}
module Postmaster.Prelude
( module Control.Lens
, module Control.Monad.Extra
, module Control.Monad.Fail
, module Control.Monad.IO.Class
, module Control.Monad.IO.Unlift
, module Control.Monad.Reader
, module Control.Monad.State.Strict
, module Data.ByteString.Builder, display
, module Data.Either
, module Data.Default.Class
, module Data.Maybe
, module Data.String
, module Data.Word
, module System.Directory
, module System.FilePath
, module System.IO
, module UnliftIO.Async
, module UnliftIO.Concurrent
, module UnliftIO.Exception
, Text, packText, unpackText, encodeUtf8Text, decodeUtf8Text
, LazyText, packLazyText, unpackLazyText, encodeUtf8LazyText, decodeUtf8LazyText, BSL.toStrict, BSL.fromStrict
, ByteString, packBS, packBS8, unpackBS, unpackBS8, unsafeUseAsCStringLen
, LazyByteString, packLBS, unpackLBS
) where
import Prelude hiding ( fail )
import Control.Lens hiding ( Context, (<.>) )
import Control.Monad.Extra hiding ( fail )
import Control.Monad.Fail
import Control.Monad.IO.Class
import Control.Monad.IO.Unlift
import Control.Monad.Reader hiding ( fail )
import Control.Monad.State.Strict hiding ( fail )
import qualified Data.ByteString as BS
import Data.ByteString.Builder ( Builder, char8, charUtf8, string8, stringUtf8, toLazyByteString )
import qualified Data.ByteString.Char8 as BS8
import qualified Data.ByteString.Lazy as BSL
import Data.ByteString.Unsafe ( unsafeUseAsCStringLen )
import Data.Default.Class
import Data.Either
import Data.Maybe
import Data.String
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import qualified Data.Text.Lazy as LText
import qualified Data.Text.Lazy.Encoding as LText
import Data.Word
import System.Directory
import System.FilePath
import System.IO hiding ( char8 )
import UnliftIO.Async
import UnliftIO.Concurrent
import UnliftIO.Exception
display :: Show a => a -> Builder
display = stringUtf8 . show
type ByteString = BS.ByteString
packBS :: [Word8] -> ByteString
packBS = BS.pack
packBS8 :: String -> ByteString
packBS8 = BS8.pack
unpackBS :: ByteString -> [Word8]
unpackBS = BS.unpack
unpackBS8 :: ByteString -> String
unpackBS8 = BS8.unpack
type LazyByteString = BSL.ByteString
packLBS :: [Word8] -> LazyByteString
packLBS = BSL.pack
unpackLBS :: [Word8] -> LazyByteString
unpackLBS = BSL.pack
type Text = Text.Text
packText :: String -> Text
packText = Text.pack
unpackText :: Text -> String
unpackText = Text.unpack
encodeUtf8Text :: Text -> ByteString
encodeUtf8Text = Text.encodeUtf8
decodeUtf8Text :: ByteString -> Text
decodeUtf8Text = Text.decodeUtf8
type LazyText = LText.Text
packLazyText :: String -> LazyText
packLazyText = LText.pack
unpackLazyText :: LazyText -> String
unpackLazyText = LText.unpack
encodeUtf8LazyText :: LazyText -> LazyByteString
encodeUtf8LazyText = LText.encodeUtf8
decodeUtf8LazyText :: LazyByteString -> LazyText
decodeUtf8LazyText = LText.decodeUtf8
| peti/postmaster | src/Postmaster/Prelude.hs | agpl-3.0 | 3,183 | 0 | 6 | 469 | 740 | 472 | 268 | 86 | 1 |
{-# OPTIONS -fno-warn-orphans #-}
-- |
-- Module: SwiftNav.SBP.Encoding
-- Copyright: Copyright (C) 2015 Swift Navigation, Inc.
-- License: LGPL-3
-- Maintainer: Mark Fine <dev@swiftnav.com>
-- Stability: experimental
-- Portability: portable
--
-- Encoding utilities.
module SwiftNav.SBP.Encoding where
import BasicPrelude
import Data.Aeson
import qualified Data.ByteString as B
import Data.ByteString.Base64 as Base64
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error
-- ByteString doesn't have Aeson instances defined for it since
-- arbitrary ByteString's aren't really valid JSON. This defines
-- orphaned instances for ByteStrings that are expected to be valid
-- text.
instance ToJSON B.ByteString where
toJSON = toJSON . decodeUtf8With ignore . Base64.encode
instance FromJSON B.ByteString where
parseJSON = withText "ByteString" (pure . Base64.decodeLenient . encodeUtf8)
| swift-nav/libsbp | haskell/src/SwiftNav/SBP/Encoding.hs | lgpl-3.0 | 996 | 0 | 10 | 203 | 124 | 79 | 45 | 12 | 0 |
module ProductAbsorption where
import Product
import Products
-- left
pal :: (d -> a) -> (e -> b) -> (c -> d) -> (c -> e) -> c -> (a, b)
pal i j g h = Product.product i j . pair g h
-- right
par :: (d -> a) -> (e -> b) -> (c -> d) -> (c -> e) -> c -> (a, b)
par i j g h = pair (i . g) (j . h)
pal0,par0 :: (String,Int)
pal0 = pal show read (*2) show 4
par0 = par show read (*2) show 4
-- ("8",4)
-- End
| haroldcarr/learn-haskell-coq-ml-etc | haskell/book/2019-Program_Design_by_Calculation-Oliveira/2015-05-LambdaConf/ProductAbsorption.hs | unlicense | 453 | 0 | 10 | 154 | 248 | 139 | 109 | 10 | 1 |
module Lycopene.Database.Datapath
( tempDatapath
) where
tempDatapath :: FilePath
tempDatapath = ":memory:"
| utky/lycopene | src/Lycopene/Database/Datapath.hs | apache-2.0 | 144 | 0 | 4 | 48 | 22 | 14 | 8 | 4 | 1 |
-- Douglas Peucker using Shortest Distance
module Data.Geometry.Simplify.DouglasPeucker
( distance
, shortestDistance
, splitAtMaxDistance
, douglasPeucker
) where
import qualified Data.Geospatial as Geospatial
import qualified Data.Sequence as Sequence
import qualified Data.Geometry.Types.Geography as TypesGeography
import Prelude hiding (last, tail)
type Distance = Double
type Index = Int
-- https://en.wikipedia.org/wiki/Ramer%E2%80%93Douglas%E2%80%93Peucker_algorithm
douglasPeucker :: Double -> Sequence.Seq Geospatial.PointXY -> Sequence.Seq Geospatial.PointXY
douglasPeucker epsilon points
| points == Sequence.empty = Sequence.empty
| dMax > epsilon = douglasPeucker epsilon left Sequence.>< tail (douglasPeucker epsilon right)
| otherwise = firstAndLastPoint points
where
(left, right) = (Sequence.take index points, Sequence.drop (index - 1) points)
(dMax, index) = splitAtMaxDistance points
splitAtMaxDistance :: Sequence.Seq Geospatial.PointXY -> (Distance, Index)
splitAtMaxDistance points@((first Sequence.:<| _) Sequence.:|> last) =
Sequence.foldlWithIndex (\(accMax, index) i point ->
if shortestDistance point lineSegment > accMax
then (shortestDistance point lineSegment, i + 1)
else (accMax, index)
) (0.0, Sequence.length points) points
where
lineSegment = TypesGeography.GeoStorableLine first last
splitAtMaxDistance _ = (0.0, 0)
-- http://paulbourke.net/geometry/pointlineplane/DistancePoint.java
shortestDistance :: Geospatial.PointXY -> TypesGeography.GeoStorableLine -> Distance
shortestDistance p@(Geospatial.PointXY pX pY) (TypesGeography.GeoStorableLine a@(Geospatial.PointXY aX aY) b@(Geospatial.PointXY bX bY))
| a == b = distance p a
| u < 0 = distance p a
| u > 1 = distance p b
| otherwise = distance p (Geospatial.PointXY (aX + u * deltaX) (aY + u * deltaY))
where
(deltaX, deltaY) = (bX - aX, bY - aY)
u = ((pX - aX) * deltaX + (pY - aY) * deltaY) / (deltaX * deltaX + deltaY * deltaY)
distance :: Geospatial.PointXY -> Geospatial.PointXY -> Distance
distance (Geospatial.PointXY x1 y1) (Geospatial.PointXY x2 y2) = sqrt(((x1 - x2) ** 2) + ((y1 - y2) ** 2))
firstAndLastPoint :: Sequence.Seq Geospatial.PointXY -> Sequence.Seq Geospatial.PointXY
firstAndLastPoint ((first Sequence.:<| _) Sequence.:|> last) = Sequence.fromList [first, last]
firstAndLastPoint _ = Sequence.empty
tail :: Sequence.Seq a -> Sequence.Seq a
tail s =
case Sequence.viewl s of
Sequence.EmptyL -> Sequence.empty
(_ Sequence.:< xs) -> xs
| sitewisely/zellige | src/Data/Geometry/Simplify/DouglasPeucker.hs | apache-2.0 | 2,632 | 0 | 13 | 495 | 854 | 455 | 399 | 45 | 2 |
-----------------------------------------------------------------------------
-- Copyright 2019, Ideas project team. This file is distributed under the
-- terms of the Apache License 2.0. For more information, see the files
-- "LICENSE.txt" and "NOTICE.txt", which are included in the distribution.
-----------------------------------------------------------------------------
-- |
-- Maintainer : bastiaan.heeren@ou.nl
-- Stability : provisional
-- Portability : portable (depends on ghc)
--
-----------------------------------------------------------------------------
module Ideas.Common.Traversal.Tests
( testIterator, testNavigator, tests
, uniGen, listGen
) where
import Control.Monad
import Data.Maybe
import Data.Semigroup as Sem
import Ideas.Common.Traversal.Iterator
import Ideas.Common.Traversal.Navigator
import Ideas.Common.Traversal.Utils
import Ideas.Utils.TestSuite
import Ideas.Utils.Uniplate
import Test.QuickCheck hiding ((===))
testIterator :: (Show a, Eq a, Iterator a) => String -> Gen a -> TestSuite
testIterator s gen = suite (s ++ " Iterator")
[ suite "previous/next"
[ prop gen "previous; next" $ hasPrevious ==>> previous >=> next ==! id
, prop gen "next; previous" $ hasNext ==>> next >=> previous ==! id
]
, suite "next/final"
[ prop gen "isFinal" $ isFinal . final
, prop gen "next to final" $ fixp next === final
]
, suite "previous/first"
[ prop gen "isFirst" $ isFirst . first
, prop gen "previous to first" $ fixp previous === first
]
, suite "position"
[ prop gen "pos previous" $
hasPrevious ==>> fmap position . previous ==! pred . position
, prop gen "pos next" $
hasNext ==>> fmap position . next ==! succ . position
, prop gen "pos first" $
(==0) . position . first
, prop gen "pos final" $
position . final === position . fixp next
]
]
testNavigator :: (Show a, Eq a, Navigator a) => String -> Gen a -> TestSuite
testNavigator s gen = suite (s ++ " Navigator")
[ suite "up/down"
[ prop gen "down; up" $ hasDown ==>> down >=> up ==! id
, prop gen "up; down" $ hasUp ==>> up >=> down ==! leftMost
, prop gen "up; downLast" $ hasUp ==>> up >=> downLast ==! rightMost
]
, suite "left/right"
[ prop gen "right; left" $ hasRight ==>> right >=> left ==! id
, prop gen "left; right" $ hasLeft ==>> left >=> right ==! id
]
, suite "up/left+right"
[ prop gen "left; up" $ hasLeft ==>> left >=> up === up
, prop gen "right; up" $ hasRight ==>> right >=> up === up
]
, suite "down/downLast"
[ prop gen "down; rightMost" $ fmap rightMost . down === downLast
, prop gen "downLast; leftMost" $ fmap leftMost . downLast === down
, prop gen "down is leftMost" $ isNothing . (down >=> left)
, prop gen "downLast is rightMost" $ isNothing . (downLast >=> right)
]
, suite "location"
[ prop gen "loc up" $ hasUp ==>>
fmap locationList . up ==! init . locationList
, prop gen "loc down" $ hasDown ==>>
fmap locationList . down ==! (++[0]) . locationList
, prop gen "loc downLast" $ hasDown ==>>
fmap locationList . downLast ==! (\a -> locationList a ++ [arity a-1])
, prop gen "loc left" $ hasLeft ==>>
fmap locationList . left ==! changeLast pred . locationList
, prop gen "loc right" $ hasRight ==>>
fmap locationList . right ==! changeLast succ . locationList
, prop gen "childnr" $
childnr === fromMaybe 0 . listToMaybe . reverse . locationList
]
]
locationList :: Navigator a => a -> [Int]
locationList = fromLocation . location
-------------------------------------------------------------------------
-- tests
tests :: TestSuite
tests =
suite "Iterators"
[ testIterator "List" listGen
, testIterator "Mirror" $ makeMirror <$> listGen
, testIterator "Leafs" $ makeLeafs <$> uniGen
, testIterator "PreOrder" $ makePreOrder <$> uniGen
, testIterator "PostOrder" $ makePostOrder <$> uniGen
, testIterator "Horizontal" $ makeHorizontal <$> uniGen
, testIterator "LevelOrder" $ makeLevelOrder <$> uniGen
] <>
suite "Navigators"
[ testNavigator "Uniplate" uniGen
, testNavigator "Mirror" $ makeMirror <$> uniGen
]
_go :: IO ()
_go = runTestSuiteResult True tests >>= print
-------------------------------------------------------------------------
-- test utils
infixr 0 ===, ==!
(===) :: Eq b => (a -> b) -> (a -> b) -> a -> Bool
(f === g) a = f a == g a
(==!) :: Eq b => (a -> Maybe b) -> (a -> b) -> a -> Bool
f ==! g = f === Just . g
infixr 0 ==>>
(==>>) :: Testable prop => (a -> Bool) -> (a -> prop) -> a -> Property
(p ==>> f) a = p a ==> f a
prop :: (Testable prop, Show a) => Gen a -> String -> (a -> prop) -> TestSuite
prop gen s = useProperty s . forAll gen
changeLast :: (a -> a) -> [a] -> [a]
changeLast _ [] = []
changeLast f [x] = [f x]
changeLast f (x:xs) = x:changeLast f xs
data T a = T a [T a] deriving (Show, Eq)
instance Uniplate (T a) where
uniplate (T a xs) = plate (T a) ||* xs
instance Arbitrary a => Arbitrary (T a) where
arbitrary = sized genT
where
genT n = do
a <- arbitrary
i <- if n==0 then return 0 else choose (0, 5)
xs <- vectorOf i (genT (n `div` 2))
return (T a xs)
listGen :: Gen (ListIterator Int)
listGen = arbitrary
uniGen :: Gen (UniplateNavigator (T Int))
uniGen = arbitrary | ideas-edu/ideas | src/Ideas/Common/Traversal/Tests.hs | apache-2.0 | 5,940 | 0 | 15 | 1,764 | 1,784 | 921 | 863 | 108 | 1 |
{-# LANGUAGE TypeFamilies, FlexibleInstances, FlexibleContexts #-}
module HEP.Automation.MadGraph.Model.ZpH where
import Text.Printf
import Text.Parsec
import Control.Monad.Identity
import Text.StringTemplate
import Text.StringTemplate.Helpers
import HEP.Automation.MadGraph.Model
import HEP.Automation.MadGraph.Model.Common
import System.FilePath ((</>))
data ZpH = ZpH
deriving Show
instance Model ZpH where
data ModelParam ZpH = ZpHParam { massZp :: Double, gRZp :: Double }
deriving Show
briefShow ZpH = "Zp"
madgraphVersion _ = MadGraph4
modelName ZpH = "zHorizontal_MG"
modelFromString str = case str of
"zHorizontal_MG" -> Just ZpH
_ -> Nothing
paramCard4Model ZpH = "param_card_zHorizontal.dat"
paramCardSetup tpath ZpH (ZpHParam m g) = do
templates <- directoryGroup tpath
return $ ( renderTemplateGroup
templates
[ ("masszp" , (printf "%.4e" m :: String))
, ("gRoverSqrtTwo" , (printf "%.4e" (g / (sqrt 2.0)) :: String))
, ("widthzp" , (printf "%.4e" (gammaWpZp m g) :: String)) ]
(paramCard4Model ZpH) ) ++ "\n\n\n"
briefParamShow (ZpHParam m g) = "M"++show m++"G"++show g
interpreteParam str = let r = parse zphparse "" str
in case r of
Right param -> param
Left err -> error (show err)
zphparse :: ParsecT String () Identity (ModelParam ZpH)
zphparse = do
char 'M'
massstr <- many1 (oneOf "+-0123456789.")
char 'G'
gstr <- many1 (oneOf "+-0123456789.")
return (ZpHParam (read massstr) (read gstr))
gammaWpZp :: Double -> Double -> Double
gammaWpZp mass coup =
let r = mtop^(2 :: Int)/ mass^(2 :: Int)
in coup^(2 :: Int) / (16.0 * pi) *mass*( 1.0 - 1.5 * r + 0.5 * r^(3 :: Int))
| wavewave/madgraph-auto-model | src/HEP/Automation/MadGraph/Model/FQ8C1V.hs | bsd-2-clause | 1,934 | 0 | 19 | 578 | 611 | 322 | 289 | 46 | 1 |
{-# LANGUAGE BangPatterns, CPP, GeneralizedNewtypeDeriving #-}
-- |
-- Module : Data.Text.Foreign
-- Copyright : (c) 2009, 2010 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : bos@serpentine.com
-- Portability : GHC
--
-- Support for using 'Text' data with native code via the Haskell
-- foreign function interface.
module Data.Text.Foreign
(
-- * Interoperability with native code
-- $interop
I16
-- * Safe conversion functions
, fromPtr
, useAsPtr
, asForeignPtr
-- ** Encoding as UTF-8
, peekCStringLen
, withCStringLen
-- * Unsafe conversion code
, lengthWord16
, unsafeCopyToPtr
-- * Low-level manipulation
-- $lowlevel
, dropWord16
, takeWord16
) where
#if defined(ASSERTS)
import Control.Exception (assert)
#endif
#if MIN_VERSION_base(4,4,0)
import Control.Monad.ST.Unsafe (unsafeIOToST)
#else
import Control.Monad.ST (unsafeIOToST)
#endif
import Data.ByteString.Unsafe (unsafePackCStringLen, unsafeUseAsCStringLen)
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
import Data.Text.Internal (Text(..), empty)
import Data.Text.Unsafe (lengthWord16)
import Data.Word (Word16)
import Foreign.C.String (CStringLen)
import Foreign.ForeignPtr (ForeignPtr, mallocForeignPtrArray, withForeignPtr)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (Ptr, castPtr, plusPtr)
import Foreign.Storable (peek, poke)
import qualified Data.Text.Array as A
-- $interop
--
-- The 'Text' type is implemented using arrays that are not guaranteed
-- to have a fixed address in the Haskell heap. All communication with
-- native code must thus occur by copying data back and forth.
--
-- The 'Text' type's internal representation is UTF-16, using the
-- platform's native endianness. This makes copied data suitable for
-- use with native libraries that use a similar representation, such
-- as ICU. To interoperate with native libraries that use different
-- internal representations, such as UTF-8 or UTF-32, consider using
-- the functions in the 'Data.Text.Encoding' module.
-- | A type representing a number of UTF-16 code units.
newtype I16 = I16 Int
deriving (Bounded, Enum, Eq, Integral, Num, Ord, Read, Real, Show)
-- | /O(n)/ Create a new 'Text' from a 'Ptr' 'Word16' by copying the
-- contents of the array.
fromPtr :: Ptr Word16 -- ^ source array
-> I16 -- ^ length of source array (in 'Word16' units)
-> IO Text
fromPtr _ (I16 0) = return empty
fromPtr ptr (I16 len) =
#if defined(ASSERTS)
assert (len > 0) $
#endif
return $! Text arr 0 len
where
arr = A.run (A.new len >>= copy)
copy marr = loop ptr 0
where
loop !p !i | i == len = return marr
| otherwise = do
A.unsafeWrite marr i =<< unsafeIOToST (peek p)
loop (p `plusPtr` 2) (i + 1)
-- $lowlevel
--
-- Foreign functions that use UTF-16 internally may return indices in
-- units of 'Word16' instead of characters. These functions may
-- safely be used with such indices, as they will adjust offsets if
-- necessary to preserve the validity of a Unicode string.
-- | /O(1)/ Return the prefix of the 'Text' of @n@ 'Word16' units in
-- length.
--
-- If @n@ would cause the 'Text' to end inside a surrogate pair, the
-- end of the prefix will be advanced by one additional 'Word16' unit
-- to maintain its validity.
takeWord16 :: I16 -> Text -> Text
takeWord16 (I16 n) t@(Text arr off len)
| n <= 0 = empty
| n >= len || m >= len = t
| otherwise = Text arr off m
where
m | w < 0xD800 || w > 0xDBFF = n
| otherwise = n+1
w = A.unsafeIndex arr (off+n-1)
-- | /O(1)/ Return the suffix of the 'Text', with @n@ 'Word16' units
-- dropped from its beginning.
--
-- If @n@ would cause the 'Text' to begin inside a surrogate pair, the
-- beginning of the suffix will be advanced by one additional 'Word16'
-- unit to maintain its validity.
dropWord16 :: I16 -> Text -> Text
dropWord16 (I16 n) t@(Text arr off len)
| n <= 0 = t
| n >= len || m >= len = empty
| otherwise = Text arr (off+m) (len-m)
where
m | w < 0xD800 || w > 0xDBFF = n
| otherwise = n+1
w = A.unsafeIndex arr (off+n-1)
-- | /O(n)/ Copy a 'Text' to an array. The array is assumed to be big
-- enough to hold the contents of the entire 'Text'.
unsafeCopyToPtr :: Text -> Ptr Word16 -> IO ()
unsafeCopyToPtr (Text arr off len) ptr = loop ptr off
where
end = off + len
loop !p !i | i == end = return ()
| otherwise = do
poke p (A.unsafeIndex arr i)
loop (p `plusPtr` 2) (i + 1)
-- | /O(n)/ Perform an action on a temporary, mutable copy of a
-- 'Text'. The copy is freed as soon as the action returns.
useAsPtr :: Text -> (Ptr Word16 -> I16 -> IO a) -> IO a
useAsPtr t@(Text _arr _off len) action =
allocaBytes (len * 2) $ \buf -> do
unsafeCopyToPtr t buf
action (castPtr buf) (fromIntegral len)
-- | /O(n)/ Make a mutable copy of a 'Text'.
asForeignPtr :: Text -> IO (ForeignPtr Word16, I16)
asForeignPtr t@(Text _arr _off len) = do
fp <- mallocForeignPtrArray len
withForeignPtr fp $ unsafeCopyToPtr t
return (fp, I16 len)
-- | /O(n)/ Decode a C string with explicit length, which is assumed
-- to have been encoded as UTF-8. If decoding fails, a
-- 'UnicodeException' is thrown.
--
-- @since 1.0.0.0
peekCStringLen :: CStringLen -> IO Text
peekCStringLen cs = do
bs <- unsafePackCStringLen cs
return $! decodeUtf8 bs
-- | Marshal a 'Text' into a C string encoded as UTF-8 in temporary
-- storage, with explicit length information. The encoded string may
-- contain NUL bytes, and is not followed by a trailing NUL byte.
--
-- The temporary storage is freed when the subcomputation terminates
-- (either normally or via an exception), so the pointer to the
-- temporary storage must /not/ be used after this function returns.
--
-- @since 1.0.0.0
withCStringLen :: Text -> (CStringLen -> IO a) -> IO a
withCStringLen t act = unsafeUseAsCStringLen (encodeUtf8 t) act
| bgamari/text | src/Data/Text/Foreign.hs | bsd-2-clause | 6,123 | 0 | 15 | 1,412 | 1,248 | 677 | 571 | 78 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
module Spec.Graph where
import Spec.Spec
import Spec.Bitmask
import Spec.Command as Command
import Spec.Constant as Constant
import Spec.Enum
import Spec.Type hiding ( AnInclude, ADefine, ABaseType, APlatformType
, ABitmaskType, AHandleType, AnEnumType
, AFuncPointerType, AStructType, AUnionType )
import qualified Spec.Type as T
import Data.HashMap.Lazy as M
import Data.HashSet as S
import Data.Maybe(maybeToList, catMaybes)
import Language.C.Types
import Control.Arrow((&&&))
import Prelude hiding (Enum)
import Write.Utils
-- | Info is a more useful representation of the specification
data SpecGraph = SpecGraph{ gVertices :: [Vertex]
, gNameVertexMap :: M.HashMap String Vertex
}
data Vertex = Vertex{ vName :: String
, vDependencies :: [Vertex]
, vSourceEntity :: SourceEntity
}
data SourceEntity = AnInclude Include
| ADefine Define
| ABaseType BaseType
| APlatformType PlatformType
| ABitmaskType BitmaskType
| AHandleType HandleType
| AnEnumType EnumType
| AFuncPointerType FuncPointerType
| AStructType StructType
| AUnionType UnionType
| ACommand Command
| AnEnum Enum
| ABitmask Bitmask
| AConstant Constant
deriving (Show)
-- | Look up the name in the graph, error if it's not there
requiredLookup :: SpecGraph -> String -> Vertex
requiredLookup graph name =
case M.lookup name (gNameVertexMap graph) of
Nothing -> error ("Failed to find required name in graph: " ++ name)
Just v -> v
allReachableFromNames :: SpecGraph -> [String] -> S.HashSet String
allReachableFromNames graph names = allReachable vertices
where vertices = getVertex <$> names
getVertex name =
case M.lookup name (gNameVertexMap graph) of
Nothing ->
error ("allReachableFromNames given name not in graph: " ++
name)
Just v -> v
allReachable :: [Vertex] -> S.HashSet String
allReachable vs = go (S.fromList (vName <$> vs)) (concatMap vDependencies vs)
where go s [] = s
go s (x:xs) = if S.member (vName x) s
then go s xs
else go (S.insert (vName x) s) (xs ++ vDependencies x)
--------------------------------------------------------------------------------
-- Converting a spec
--------------------------------------------------------------------------------
getSpecGraph :: Spec -> SpecGraph
getSpecGraph spec = graph
where gVertices = (typeDeclToVertex graph <$> sTypes spec) ++
(constantToVertex graph <$> sConstants spec) ++
(enumToVertex graph <$> sEnums spec) ++
(bitmaskToVertex graph <$> sBitmasks spec) ++
(commandToVertex graph <$> sCommands spec)
gNameVertexMap = M.fromList ((vName &&& id) <$> gVertices)
graph = SpecGraph{..}
--------------------------------------------------------------------------------
-- Boring boilerplate conversions
--------------------------------------------------------------------------------
typeDeclToVertex :: SpecGraph -> TypeDecl -> Vertex
typeDeclToVertex graph td =
let lookupNameMay name = M.lookup name (gNameVertexMap graph)
lookupName name =
case lookupNameMay name of
Nothing -> error ("Depended upon name not in spec: " ++ name)
Just v -> v
in case td of
T.AnInclude i ->
Vertex{ vName = iName i
, vDependencies = []
, vSourceEntity = AnInclude i
}
T.ADefine d ->
Vertex{ vName = dName d
, vDependencies = []
, vSourceEntity = ADefine d
}
T.ABaseType bt ->
Vertex{ vName = btName bt
, vDependencies = lookupName <$>
cTypeDependencyNames (btCType bt)
, vSourceEntity = ABaseType bt
}
T.APlatformType pt ->
Vertex{ vName = ptName pt
, vDependencies = [lookupName (ptRequires pt)]
, vSourceEntity = APlatformType pt
}
T.ABitmaskType bmt ->
Vertex{ vName = bmtName bmt
, vDependencies = (fmap lookupName $
(cTypeDependencyNames (bmtCType bmt) ++
maybeToList (bmtRequires bmt)))
++ maybeToList
(lookupNameMay =<< swapSuffix "Flags" "FlagBits" (bmtName bmt))
, vSourceEntity = ABitmaskType bmt
}
T.AHandleType ht ->
Vertex{ vName = htName ht
, vDependencies = catMaybes . fmap lookupNameMay $
cTypeDependencyNames (htCType ht)
, vSourceEntity = AHandleType ht
}
T.AnEnumType et ->
Vertex{ vName = etName et
, vDependencies = []
, vSourceEntity = AnEnumType et
}
T.AFuncPointerType fpt ->
Vertex{ vName = fptName fpt
, vDependencies = lookupName <$> cTypeDependencyNames (fptCType fpt)
, vSourceEntity = AFuncPointerType fpt
}
T.AStructType st ->
Vertex{ vName = stName st
, vDependencies = lookupName <$>
concatMap memberDependencyNames (stMembers st)
, vSourceEntity = AStructType st
}
T.AUnionType ut ->
Vertex{ vName = utName ut
, vDependencies = lookupName <$>
concatMap memberDependencyNames (utMembers ut)
, vSourceEntity = AUnionType ut
}
commandToVertex :: SpecGraph -> Command -> Vertex
commandToVertex graph command =
let lookupName name =
case M.lookup name (gNameVertexMap graph) of
Nothing -> error ("Depended upon name not in spec: " ++ name)
Just v -> v
in Vertex{ vName = Command.cName command
, vDependencies = let parameterTypes = pType <$> cParameters command
allTypes = cReturnType command : parameterTypes
in lookupName <$>
concatMap cTypeDependencyNames allTypes
, vSourceEntity = ACommand command
}
enumToVertex :: SpecGraph -> Enum -> Vertex
enumToVertex graph enum =
let lookupNameMay name = M.lookup name (gNameVertexMap graph)
in Vertex{ vName = eName enum
, vDependencies = []
, vSourceEntity = AnEnum enum
}
bitmaskToVertex :: SpecGraph -> Bitmask -> Vertex
bitmaskToVertex _ bitmask =
Vertex{ vName = bmName bitmask
, vDependencies = []
, vSourceEntity = ABitmask bitmask
}
constantToVertex :: SpecGraph -> Constant -> Vertex
constantToVertex _ constant =
Vertex{ vName = Constant.cName constant
, vDependencies = []
, vSourceEntity = AConstant constant
}
--
-- Converting from a graph
--
vertexToBitmask :: Vertex -> Maybe Bitmask
vertexToBitmask v = case vSourceEntity v of
ABitmask bm -> Just bm
_ -> Nothing
vertexToConstant :: Vertex -> Maybe Constant
vertexToConstant v = case vSourceEntity v of
AConstant c -> Just c
_ -> Nothing
getGraphConstants :: SpecGraph -> [Constant]
getGraphConstants graph = catMaybes (vertexToConstant <$> gVertices graph)
vertexCType :: Vertex -> Maybe CType
vertexCType v = case vSourceEntity v of
ABaseType bt -> Just $ btCType bt
ABitmaskType bmt -> Just $ bmtCType bmt
AHandleType ht -> Just $ htCType ht
AFuncPointerType fpt -> Just $ fptCType fpt
_ -> Nothing
getGraphCTypes :: SpecGraph -> [(String, CType)]
getGraphCTypes graph =
catMaybes $ (\v -> (vName v,) <$> vertexCType v) <$> gVertices graph
vertexToUnionType :: Vertex -> Maybe UnionType
vertexToUnionType v = case vSourceEntity v of
AUnionType u -> Just u
_ -> Nothing
getGraphUnionTypes :: SpecGraph -> [UnionType]
getGraphUnionTypes graph = catMaybes (vertexToUnionType <$> gVertices graph)
vertexToStructType :: Vertex -> Maybe StructType
vertexToStructType v = case vSourceEntity v of
AStructType s -> Just s
_ -> Nothing
getGraphStructTypes :: SpecGraph -> [StructType]
getGraphStructTypes graph = catMaybes (vertexToStructType <$> gVertices graph)
vertexToEnumType :: Vertex -> Maybe EnumType
vertexToEnumType v = case vSourceEntity v of
AnEnumType et -> Just et
_ -> Nothing
getGraphEnumTypes :: SpecGraph -> [EnumType]
getGraphEnumTypes graph = catMaybes (vertexToEnumType <$> gVertices graph)
------------------------------------------------------------------------------
-- predicates
------------------------------------------------------------------------------
isIncludeVertex :: Vertex -> Bool
isIncludeVertex vertex
| AnInclude _ <- vSourceEntity vertex = True
| otherwise = False
isTypeConstructor :: Vertex -> Bool
isTypeConstructor v =
case vSourceEntity v of
AnInclude _ -> False
ADefine _ -> False
ABaseType _ -> True
APlatformType _ -> False
ABitmaskType _ -> True
AHandleType _ -> True
AnEnumType _ -> True
AFuncPointerType _ -> False
AStructType _ -> True
AUnionType _ -> True
ACommand _ -> False
AnEnum _ -> True
ABitmask _ -> True
AConstant _ -> False
------------------------------------------------------------------------------
-- Dependency utils
------------------------------------------------------------------------------
cTypeDependencyNames :: CType -> [String]
cTypeDependencyNames cType =
case cType of
TypeSpecifier _ Void
-> ["void"]
TypeSpecifier _ (Char Nothing)
-> ["char"]
TypeSpecifier _ Float
-> ["float"]
TypeSpecifier _ (TypeName t)
-> [unCIdentifier t]
TypeSpecifier _ (Struct t)
-> [unCIdentifier t]
Ptr _ t
-> cTypeDependencyNames t
Array s t
-> arraySizeDependencyNames s ++ cTypeDependencyNames t
Proto ret ps
-> cTypeDependencyNames ret ++ concatMap parameterTypeNames ps
_ -> error ("Failed to get depended on names for C type:\n" ++ show cType)
arraySizeDependencyNames :: ArrayType CIdentifier -> [String]
arraySizeDependencyNames arraySize =
case arraySize of
VariablySized -> []
Unsized -> []
SizedByInteger i -> []
SizedByIdentifier i -> [unCIdentifier i]
parameterTypeNames :: ParameterDeclaration CIdentifier -> [String]
parameterTypeNames (ParameterDeclaration _ t) = cTypeDependencyNames t
memberDependencyNames :: StructMember -> [String]
memberDependencyNames = cTypeDependencyNames . smCType
| oldmanmike/vulkan | generate/src/Spec/Graph.hs | bsd-3-clause | 11,364 | 0 | 19 | 3,620 | 2,714 | 1,398 | 1,316 | 242 | 14 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Web.RTBBidder.Types.Request.User (User(..)) where
import qualified Data.Aeson as AESON
import Data.Aeson ((.=), (.:), (.:?), (.!=))
import qualified Data.Text as TX
import Web.RTBBidder.Types.Request.Data (Data(..))
import Web.RTBBidder.Types.Request.Geo (Geo(..))
data User = User
{ userId :: Maybe TX.Text
, userBuyeruid :: Maybe TX.Text
, userYob :: Maybe Int
, userGender :: Maybe TX.Text
, userKeywords :: Maybe TX.Text
, userCustomdata :: Maybe TX.Text
, userGeo :: Maybe Geo
, userData :: [Data]
, userExt :: Maybe AESON.Value
} deriving (Show, Eq)
instance AESON.FromJSON User where
parseJSON = AESON.withObject "user" $ \o -> do
userId <- o .:? "id"
userBuyeruid <- o .:? "buyeruid"
userYob <- o .:? "yob"
userGender <- o .:? "gender"
userKeywords <- o .:? "keywords"
userCustomdata <- o .:? "customdata"
userGeo <- o .:? "geo"
userData <- o .:? "data" .!= []
userExt <- o .:? "ext"
return User{..}
instance AESON.ToJSON User where
toJSON User{..} = AESON.object
[ "id" .= userId
, "buyeruid" .= userBuyeruid
, "yob" .= userYob
, "gender" .= userGender
, "keywords" .= userKeywords
, "customdata" .= userCustomdata
, "geo" .= userGeo
, "data" .= userData
, "ext" .= userExt
]
| hiratara/hs-rtb-bidder | src/Web/RTBBidder/Types/Request/User.hs | bsd-3-clause | 1,372 | 0 | 12 | 304 | 447 | 252 | 195 | 42 | 0 |
{-# LANGUAGE DeriveDataTypeable, MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances, FlexibleContexts #-}
import XMonad
import XMonad.ManageHook
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Layout.NoBorders
import XMonad.Layout.Maximize
import XMonad.Util.Run (spawnPipe)
import Codec.Binary.UTF8.String
import System.Posix.Process
import System.IO
import System.Posix.IO
import DBus
import DBus.Client
import qualified XMonad.Util.ExtensibleState as XS
import Control.Monad
import Data.Monoid
import Graphics.X11.ExtraTypes.XF86 -- For media keys.
import qualified Data.Map as M
import Control.Applicative
import System.IO (hPutStrLn)
import System.Directory (getHomeDirectory)
import System.FilePath ((</>))
import System.Process
import System.Posix.Types
main :: IO ()
main = do
-- FIXME: Spawn process directly, not through shell.
--runFehBg
--_ <- runTrayer
--xmPipe <- spawnPipe ("xmobar ~/.xmobarrc")
xmonad
. alterKeys myKeys
$ defaultConfig
{ manageHook = manageDocks <+> testVlc <+> manageHook defaultConfig
, layoutHook = smartBorders . avoidStruts $ myLayout
--, layoutHook = lessBorders OtherIndicated $ avoidStruts $ myLayout
--, layoutHook = avoidStruts $ myLayout
, logHook = traceXS >> logToXmobar
--, startupHook = runP xmobarP >> return ()
, startupHook = traceXS >> restartXmobar2'
, modMask = mod4Mask
--, handleEventHook = toggleDocksHook 30 xK_v <+> myDocksEventHook
, handleEventHook = myDocksEventHook
--, modMask = controlMask
, focusFollowsMouse = False
, terminal = "xterm -fg black -bg white"
--, layoutHook = smartBorders $ layoutHook xfceConfig
}
spawnPipe' :: [String] -> X (Handle, ProcessID)
spawnPipe' (x : xs) = io $ do
(rd, wr) <- createPipe
setFdOption wr CloseOnExec True
h <- fdToHandle wr
hSetBuffering h LineBuffering
p <- xfork $ do
_ <- dupTo rd stdInput
--executeFile "/bin/sh" False ["-c", encodeString x] Nothing
executeFile x False xs Nothing
closeFd rd
return (h, p)
logToXmobar :: X ()
logToXmobar = do
(XmobarHandle m) <- XS.get
whenJust m $ \xmPipe ->
dynamicLogWithPP xmobarPP
{ ppOutput = hPutStrLn xmPipe
, ppTitle = xmobarColor "green" ""
. shorten 50
}
traceXS :: X ()
traceXS = do
trace "Abc"
mp <- XS.get
whenJust (view (mp :: XmobarPID2)) $ trace . show
{-
-- Variant 1: Data type and (Maybe a) instance.
traceXS :: X ()
traceXS = do
trace "Abc"
mp <- XS.get
whenJust (mp :: Maybe XmobarPID) $ trace . show
data Restartable a = Restartable
{ killP :: a -> X ()
, runP :: X a
}
restartP :: (ExtensionClass (Maybe a)) => Restartable a -> X ()
restartP r = do
mp <- XS.get
whenJust mp (killP r)
p' <- runP r
XS.put (Just p')
instance (Show a, Read a, Typeable a) => ExtensionClass (Maybe a) where
initialValue = Nothing
extensionType = PersistentExtension
-- For data type..
newtype XmobarPID = XmobarPID ProcessID
deriving (Show, Read, Typeable)
newtype XmobarHandle = XmobarHandle (Maybe Handle)
deriving (Typeable)
instance ExtensionClass XmobarHandle where
initialValue = XmobarHandle Nothing
xmobarP :: Restartable XmobarPID
xmobarP = Restartable killXmobar runXmobar
where
killXmobar :: XmobarPID -> X ()
killXmobar (XmobarPID p) = io $ spawn ("kill " ++ show p)
runXmobar :: X XmobarPID
runXmobar = do
(h, p) <- spawnPipe' ["/usr/bin/xmobar", "/home/dmitriym/.xmobarrc"]
XS.put (XmobarHandle (Just h))
return (XmobarPID p)
restartXmobar :: X ()
restartXmobar = restartP xmobarP
-}
{-
-- Variant 2. Class and (Maybe a) instance.
traceXS :: X ()
traceXS = do
trace "Abc"
mp <- XS.get
whenJust (mp :: Maybe XmobarPID) $ trace . show
class RestartClass a where
killP' :: a -> X ()
runP' :: X a
restartP' :: (ExtensionClass (Maybe a), RestartClass a) => X a
restartP' = do
mp <- XS.get
whenJust mp killP'
p' <- runP'
XS.put (Just p' `asTypeOf` mp)
return p'
instance (Show a, Read a, Typeable a) => ExtensionClass (Maybe a) where
initialValue = Nothing
extensionType = PersistentExtension
-- For data type..
newtype XmobarPID = XmobarPID ProcessID
deriving (Show, Read, Typeable)
newtype XmobarHandle = XmobarHandle (Maybe Handle)
deriving (Typeable)
instance ExtensionClass XmobarHandle where
initialValue = XmobarHandle Nothing
instance RestartClass XmobarPID where
killP' (XmobarPID p) = io $ spawn ("kill " ++ show p)
runP' = do
(h, p) <- spawnPipe' ["/usr/bin/xmobar", "/home/dmitriym/.xmobarrc"]
XS.put (XmobarHandle (Just h))
return (XmobarPID p)
restartXmobar' :: X ()
restartXmobar' = do
p <- restartP'
let _ = p `asTypeOf` XmobarPID undefined
return ()
-}
{-
-- Variant 3. Data type with Maybe and Lens a (Maybe ..).
traceXS :: X ()
traceXS = do
trace "Abc"
mp <- XS.get
whenJust (view (mp :: XmobarPID2)) $ trace . show
data Restartable a = Restartable
{ killP :: a -> X ()
, runP :: X a
}
newtype XmobarPID = XmobarPID ProcessID
deriving (Show, Read, Typeable)
newtype XmobarHandle = XmobarHandle (Maybe Handle)
deriving (Typeable)
instance ExtensionClass XmobarHandle where
initialValue = XmobarHandle Nothing
instance (Show a, Read a, Typeable a) => ExtensionClass (Maybe a) where
initialValue = Nothing
extensionType = PersistentExtension
-- For data type..
xmobarP :: Restartable XmobarPID
xmobarP = Restartable killXmobar runXmobar
where
killXmobar :: XmobarPID -> X ()
killXmobar (XmobarPID p) = io $ spawn ("kill " ++ show p)
runXmobar :: X XmobarPID
runXmobar = do
(h, p) <- spawnPipe' ["/usr/bin/xmobar", "/home/dmitriym/.xmobarrc"]
XS.put (XmobarHandle (Just h))
return (XmobarPID p)
newtype XmobarPID2 = XmobarPID2 (Maybe ProcessID)
deriving (Typeable, Show, Read)
instance ExtensionClass XmobarPID2 where
initialValue = XmobarPID2 Nothing
extensionType = PersistentExtension
class Lens a b | a -> b where
view :: a -> b
set :: b -> a -> a
instance Lens XmobarPID2 (Maybe XmobarPID) where
view (XmobarPID2 x) = fmap XmobarPID x
set (Just (XmobarPID x)) _ = XmobarPID2 (Just x)
set Nothing z = z
restartP2 :: (ExtensionClass a, Lens a (Maybe b)) => Restartable b -> X a
restartP2 r = do
mp <- XS.get
whenJust (view mp) (killP r)
p' <- runP r
let mp' = set (Just p') mp
XS.put mp'
return mp'
restartXmobar2 :: X ()
restartXmobar2 = do
p <- restartP2 xmobarP
let _ = p `asTypeOf` XmobarPID2 undefined
return ()
-}
{-
-- Variant 4. RestartClass with Lens.
traceXS :: X ()
traceXS = do
trace "Abc"
mp <- XS.get
whenJust (view (mp :: XmobarPID2)) $ trace . show
class RestartClass a where
killP' :: a -> X ()
runP' :: X a
newtype XmobarPID = XmobarPID ProcessID
deriving (Show, Read, Typeable)
newtype XmobarHandle = XmobarHandle (Maybe Handle)
deriving (Typeable)
instance ExtensionClass XmobarHandle where
initialValue = XmobarHandle Nothing
-- For type-class..
instance RestartClass XmobarPID where
killP' (XmobarPID p) = io $ spawn ("kill " ++ show p)
runP' = do
(h, p) <- spawnPipe' ["/usr/bin/xmobar", "/home/dmitriym/.xmobarrc"]
XS.put (XmobarHandle (Just h))
return (XmobarPID p)
newtype XmobarPID2 = XmobarPID2 (Maybe ProcessID)
deriving (Typeable, Show, Read)
instance ExtensionClass XmobarPID2 where
initialValue = XmobarPID2 Nothing
extensionType = PersistentExtension
class Lens a b | a -> b where
view :: a -> b
set :: b -> a -> a
instance Lens XmobarPID2 (Maybe XmobarPID) where
view (XmobarPID2 x) = fmap XmobarPID x
set (Just (XmobarPID x)) _ = XmobarPID2 (Just x)
set Nothing z = z
restartP2' :: (ExtensionClass a, Lens a (Maybe b), RestartClass b) => X a
restartP2' = do
mp <- XS.get
whenJust (view mp) killP'
p' <- runP'
let mp' = set (Just p') mp
XS.put mp'
return mp'
restartXmobar2' :: X ()
restartXmobar2' = do
p <- restartP2'
let _ = p `asTypeOf` XmobarPID2 undefined
return ()
-}
-- Layouts definition from defaultConfig with Full layout without borders.
myLayout = maximize tiled ||| Mirror tiled ||| noBorders Full
where
-- default tiling algorithm partitions the screen into two panes
tiled = Tall nmaster delta ratio
-- The default number of windows in the master pane
nmaster = 1
-- Default proportion of screen occupied by master pane
ratio = 1/2
-- Percent of screen to increment by when resizing panes
delta = 3/100
testVlc :: ManageHook
testVlc = className =? "Vlc" --> doShift "7"
myDocksEventHook :: Event -> X All
myDocksEventHook e = do
when (et == mapNotify || et == unmapNotify) $
whenX ((not `fmap` (isClient w)) <&&> runQuery checkDock w) refresh
return (All True)
where w = ev_window e
et = ev_event_type e
data DockToggleTime = DTT { lastTime :: Time } deriving (Eq, Show, Typeable)
instance ExtensionClass DockToggleTime where
initialValue = DTT 0
toggleDocksHook :: Int -> KeySym -> Event -> X All
toggleDocksHook to ks ( KeyEvent { ev_event_display = d
, ev_event_type = et
, ev_keycode = ekc
, ev_time = etime
} ) =
io (keysymToKeycode d ks) >>= toggleDocks >> return (All True)
where
toggleDocks kc
| ekc == kc && et == keyPress = do
safeSendSignal ["Reveal 0", "TogglePersistent"]
XS.put ( DTT etime )
| ekc == kc && et == keyRelease = do
gap <- XS.gets ( (-) etime . lastTime )
safeSendSignal [ "TogglePersistent"
, "Hide " ++ show (if gap < 400 then to else 0)
]
| otherwise = return ()
safeSendSignal s = catchX (io $ sendSignal s) (return ())
sendSignal = withSession . callSignal
withSession mc = connectSession >>= \c -> callNoReply c mc >> disconnect c
callSignal :: [String] -> MethodCall
callSignal s = ( methodCall
( objectPath_ "/org/Xmobar/Control" )
( interfaceName_ "org.Xmobar.Control" )
( memberName_ "SendSignal" )
) { methodCallDestination = Just $ busName_ "org.Xmobar.Control"
, methodCallBody = map toVariant s
}
toggleDocksHook _ _ _ = return (All True)
-- Run trayer.
runTrayer :: IO ProcessHandle
runTrayer = do
let tray = proc "trayer"
[ "--edge", "top", "--align", "right"
, "--SetDockType", "true", "--SetPartialStrut", "true"
, "--expand", "true", "--width", "10"
, "--transparent", "true" , "--tint", "0x191970"
, "--height", "12"
]
(_, _, _, p) <- createProcess tray
return p
-- Set up background image using feh.
runFehBg :: IO ()
runFehBg = spawn "xsetroot -blue"
-- Union my keys config with current one in ((->) XConfig Layout) applicative
-- functor. Union prefers left argument, when duplicate keys are found, thus
-- my should go first.
alterKeys :: (XConfig Layout -> M.Map (ButtonMask, KeySym) (X ()))
-> XConfig l -> XConfig l
alterKeys myKs cf@(XConfig {keys = ks}) = cf {keys = M.union <$> myKs <*> ks}
-- My key bindings. They are intended for use with alterKeys only.
myKeys :: XConfig l -> M.Map (ButtonMask, KeySym) (X ())
myKeys (XConfig {modMask = m}) =
M.fromList
[
--((m .|. shiftMask, xK_p), spawn "exec gmrun")
((m .|. shiftMask, xK_z), spawn "xscreensaver-command -lock")
, ((controlMask, xK_Print), spawn "sleep 0.2; scrot -s")
, ((m , xK_m), withFocused (sendMessage . maximizeRestore))
, ((m , xK_b), sendMessage (ToggleStrut U))
, ((m , xK_v), return ())
--, ((m , xK_n), withWindowSet (\wset -> ...))
, ((0, xK_Print), spawn "scrot")
-- Audio keys.
, ((0, xF86XK_AudioLowerVolume), spawn "amixer set Master 1311-")
-- FIXME: This really not exactly what i want. I want, that if sound is
-- muted, one VolUp only unmutes it. Otherwise, just VolUp-s.
, ((0, xF86XK_AudioRaiseVolume), spawn $ "amixer set Master unmute; "
++ "amixer set Master 1311+")
, ((0, xF86XK_AudioMute ), spawn "amixer set Master mute")
]
| sgf-dma/sgf-xmonad-config | src/variants-xmonad.hs | bsd-3-clause | 13,146 | 2 | 16 | 3,769 | 1,664 | 909 | 755 | 135 | 2 |
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE QuantifiedConstraints #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
module Data.Type.BitRecords.Builder.LazyByteStringBuilder
( BitBuilder,
execBitBuilder,
writeBits,
appendBitBuffer64,
appendStrictByteString,
BuilderWithSize (..),
getByteStringBuilder,
toBuilderWithSizeConstructor,
toLazyByteStringConstructor,
)
where
import Control.Category
import Data.Bits
import qualified Data.ByteString as SB
import qualified Data.ByteString.Builder as SB
import qualified Data.ByteString.Lazy as LB
import Data.FunctionBuilder
import Data.Monoid
import Data.Type.BitRecords.Builder.BitBuffer64
import Data.Word
import Prelude hiding (id, (.))
-- | A wrapper around a builder derived from a 'BitBuilderState'
data BuilderWithSize where
MkBuilderWithSize :: !Word64 -> !SB.Builder -> BuilderWithSize
getByteStringBuilder :: BuilderWithSize -> SB.Builder
getByteStringBuilder (MkBuilderWithSize _ !b) = b
instance Semigroup BuilderWithSize where
(MkBuilderWithSize !ls !lb) <> (MkBuilderWithSize !rs !rb) =
MkBuilderWithSize (ls + rs) (lb <> rb)
instance Monoid BuilderWithSize where
mempty = MkBuilderWithSize 0 mempty
-- * Low-level interface to building 'BitRecord's and other things
data BitBuilderState where
BitBuilderState ::
!SB.Builder ->
!BitBuffer64 ->
!Word64 ->
BitBuilderState
newtype BitBuilder = BitBuilder {unBitBuilder :: Dual (Endo BitBuilderState)}
deriving (Monoid, Semigroup)
execBitBuilder :: BitBuilder -> BuilderWithSize
execBitBuilder !w =
case flushBitBuilder (appEndo (getDual (unBitBuilder w)) initialBitBuilderState) of
(BitBuilderState !builder _ !wsize) -> MkBuilderWithSize wsize builder
where
initialBitBuilderState =
BitBuilderState mempty emptyBitBuffer64 0
writeBits :: BitBuilder -> LB.ByteString
writeBits !w =
case flushBitBuilder (appEndo (getDual (unBitBuilder w)) initialBitBuilderState) of
(BitBuilderState !builder _ _) -> SB.toLazyByteString builder
where
initialBitBuilderState =
BitBuilderState mempty emptyBitBuffer64 0
-- | Write the partial buffer contents using any number of 'word8' The unwritten
-- parts of the bittr buffer are at the top. If the
--
-- > 63 ... (63-off-1)(63-off) ... 0
-- > ^^^^^^^^^^^^^^^^^^^
-- > Relevant bits start to the top!
flushBitBuilder ::
BitBuilderState -> BitBuilderState
flushBitBuilder (BitBuilderState !bldr !buff !totalSize) =
BitBuilderState
(writeRestBytes bldr 0)
emptyBitBuffer64
totalSize'
where
!off = bitBuffer64Length buff
off_ :: Word64
!off_ = fromIntegral off
!totalSize' = totalSize + signum (off_ `rem` 8) + (off_ `div` 8)
!part = bitBuffer64Content buff
-- write bytes from msb to lsb until the offset is reached
-- > 63 ... (63-off-1)(63-off) ... 0
-- > ^^^^^^^^^^^^^^^^^^^
-- > AAAAAAAABBBBBBBBCCC00000
-- > |byte A| byte B| byte C|
writeRestBytes !bldr' !flushOffset =
if off <= flushOffset
then bldr'
else
let !flushOffset' = flushOffset + 8
!bldr'' =
bldr'
<> SB.word8
( fromIntegral
( ( part
`unsafeShiftR` (bitBuffer64MaxLength - flushOffset')
)
.&. 0xFF
)
)
in writeRestBytes bldr'' flushOffset'
-- | Write all the bits, in chunks, filling and writing the 'BitBuffer64'
-- in the 'BitBuilderState' as often as necessary.
appendBitBuffer64 :: BitBuffer64 -> BitBuilder
appendBitBuffer64 !x' =
BitBuilder . Dual . Endo $
\(BitBuilderState !builder !buff !totalSizeIn) -> go x' builder buff totalSizeIn
where
go !x !builder !buff !totalSize
| bitBuffer64Length x == 0 = BitBuilderState builder buff totalSize
| otherwise =
let (!rest, !buff') = bufferBits x buff
in if bitBuffer64SpaceLeft buff' > 0
then BitBuilderState builder buff' totalSize
else
let !nextBuilder =
builder
<> SB.word64BE (bitBuffer64Content buff')
!totalSize' = totalSize + bitBuffer64MaxLengthBytes
in go rest nextBuilder emptyBitBuffer64 totalSize'
-- | Write all the b*y*tes, into the 'BitBuilderState' this allows general
-- purposes non-byte aligned builders.
appendStrictByteString :: SB.ByteString -> BitBuilder
appendStrictByteString !sb =
foldMap (appendBitBuffer64 . bitBuffer64 8 . fromIntegral) (SB.unpack sb)
instance HasFunctionBuilder BitBuilder BitBuffer64 where
toFunctionBuilder = immediate . appendBitBuffer64
-- | Create a 'SB.Builder' and store it in a 'BuilderWithSize'
toBuilderWithSizeConstructor ::
HasFunctionBuilder BitBuilder a =>
a ->
ToFunction BitBuilder a BuilderWithSize
toBuilderWithSizeConstructor = toFunction . mapAccumulator execBitBuilder . toFunctionBuilder
-- | Create a 'SB.Builder' from a 'BitRecord' and store it in a 'BuilderWithSize'
toLazyByteStringConstructor ::
HasFunctionBuilder BitBuilder a =>
a ->
ToFunction BitBuilder a LB.ByteString
toLazyByteStringConstructor = toFunction . mapAccumulator writeBits . toFunctionBuilder
| sheyll/isobmff-builder | src/Data/Type/BitRecords/Builder/LazyByteStringBuilder.hs | bsd-3-clause | 5,430 | 0 | 21 | 1,247 | 1,049 | 543 | 506 | -1 | -1 |
--------------------------------------------------------------------------------
-- Copyright © 2011 National Institute of Aerospace / Galois, Inc.
--------------------------------------------------------------------------------
-- | Main import module for the front-end lanugage.
module Copilot.Library.Libraries (
module Copilot.Library.Clocks
, module Copilot.Library.LTL
, module Copilot.Library.PTLTL
, module Copilot.Library.Statistics
, module Copilot.Library.RegExp
, module Copilot.Library.Utils
, module Copilot.Library.Voting
, module Copilot.Library.Stacks
) where
import Copilot.Library.Clocks
import Copilot.Library.LTL
import Copilot.Library.PTLTL
import Copilot.Library.Statistics
import Copilot.Library.RegExp
import Copilot.Library.Utils
import Copilot.Library.Voting
import Copilot.Library.Stacks
| seni/copilot-libraries | src/Copilot/Library/Libraries.hs | bsd-3-clause | 841 | 0 | 5 | 90 | 117 | 82 | 35 | 17 | 0 |
module Types where
import EmbeddedLootLanguage (BlockType, Category, Style)
-- abstract Syntax Tree
type CatName = String
type StyleName = String
type Source = String
newtype AST = AST [Expr]
data Expr = Def Definition
| GeneralRuleExpr RuleExpr
data Definition = ImportExpr FilePath AST
| CatExpr CatName IntCat
| StyleExpr StyleName IntStyle
data RuleExpr = SimpleRuleExpr BlockType CatName StyleName
| GlobalRuleExpr [(CatName, StyleName)] [RuleExpr]
-- intermiate representations
data IntCat = IdCat CatName | PropCat Category | OpCat Op IntCat IntCat
data Op = Or | And
deriving (Eq, Show)
data IntStyle = IdStyle StyleName | PropStyle Style | CombStyle IntStyle IntStyle
-- Show instances
instance Show AST where
show (AST xs) = "AST: \n\n" ++ unlines (map show xs)
instance Show Expr where
show (Def d) = show d
show (GeneralRuleExpr r) = show r
instance Show Definition where
show (CatExpr name x) = pPrint ["Set", name, "=", indentBelowLine 1 (show x) (length name + 8)]
show (StyleExpr name x) = pPrint ["Style", name, "=", show x, "\n"]
show (ImportExpr x _) = pPrint ["Import", show x ,"\n"]
instance Show RuleExpr where
show (SimpleRuleExpr t x y) = pPrint [show t, x, y,"\n"]
show (GlobalRuleExpr grs rs) = pPrint ["Global", show grs, show rs ,"\n"]
instance Show IntStyle where
show (IdStyle name) = name
show (PropStyle x) = show x
show (CombStyle x y) = pPrint [show x, "+", show y]
instance Show IntCat where
show = showCat
showCat :: IntCat -> String
showCat (IdCat name) = name
showCat (PropCat x) = show x
showCat (OpCat op x y) = combineShows (show op) (showCat x) (showCat y)
combineShows :: String -> String -> String -> String
combineShows op xs ys = unlines $
[topFront ++ x']
++ [midFront ++ x | x <- xs']
++ [botFront ++ y']
++ [restFront ++ y | y <- ys']
where (x':xs') = lines xs
(y':ys') = lines ys
topFront = op ++ "--"
midFront = "| " ++ blanks
botFront = "|--"
restFront = " "
blanks = map (const ' ') [1..length op]
-- pretty print a list of strings
pPrint :: [String] -> String
pPrint (x:xs) = concat $ x:map (' ':) xs
pPrint _ = ""
-- indent a block of text
indent :: String -> Int -> String
indent = indentBelowLine 0
indentBelowLine :: Int -> String -> Int -> String
indentBelowLine i xs n = unlines $
take i (lines xs)
++ [ blanks ++ xss | xss <- drop i $ lines xs]
where blanks = map (const ' ') [1..n]
| owestphal/PoE-FilterScript | src/haskell/Types.hs | bsd-3-clause | 2,585 | 0 | 11 | 664 | 991 | 525 | 466 | 62 | 1 |
{-# LANGUAGE BangPatterns, TemplateHaskell #-}
module App.Benchmarks where
import Criterion.Main as Crit
import Data.Maybe
import BoolExpr.BoolExpr as BE
import BoolExpr.Env as Env
import BoolExpr.ROBDD as ROBDD
runBenchmarks :: BoolExpr -> ROBDD -> (Env -> Bool) -> IO ()
runBenchmarks !expr !robdd !fn =
robdd `seq` Crit.defaultMain $ map mkBenchGroup envs
where envs = mkEnvs $ 1 + (fromMaybe (-1) $ BE.maximumVar expr)
mkBenchGroup (n, e) = Crit.bgroup n [ evalBenchmark expr e
, robddBenchmark robdd e
, thBenchmark fn e
]
mkEnvs :: Int -> [(String, Env)]
mkEnvs m = zip (map show ([0..] :: [Int])) (map Env.mkEnv $ bs m)
where bs 0 = [[]]
bs i = map (False:) (bs (i-1)) ++ map (True:) (bs (i-1))
evalBenchmark :: BoolExpr -> Env -> Benchmark
evalBenchmark !expr !e = Crit.bench "eval" $ whnf (BE.eval expr) e
robddBenchmark :: ROBDD -> Env -> Benchmark
robddBenchmark !robdd !e = Crit.bench "robdd" $ whnf (ROBDD.eval robdd) e
thBenchmark :: (Env -> Bool) -> Env -> Benchmark
thBenchmark !fn !e = Crit.bench "th" $ whnf fn e
| robrene/robdd-with-template-hs | app/App/Benchmarks.hs | bsd-3-clause | 1,194 | 0 | 12 | 336 | 458 | 239 | 219 | 24 | 2 |
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE TupleSections #-}
module Main where
-- The basic imports for working with rationals and ratios
import Data.Ratio
-- memoize important computations to prevent repetition
import Data.Function.Memoize
-- Add qualified instance so that we can use other definitions as needed
import Prelude hiding (min,max,(<*>))
import qualified Prelude as P
import qualified Math.Combinatorics.Exact.Binomial as C
import Math.Combinatorics.Exact.Factorial
import Data.Bits (bit)
import Crypto.Number.Basic (log2)
import Data.Maybe
import Data.Complex
import Data.Bits
import Data.List (group,sort)
import Data.Aeson hiding (Array)
import Data.Aeson.Encode.Pretty
import GHC.Generics
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Text.Show.Pretty
import Foreign.C.Types
import Data.Array.Repa.FFTW
import Data.Array.Repa.Index
import Data.Array.Repa.Repr.ForeignPtr
import Data.Array.Repa hiding (map,zipWith,(++))
import qualified Data.Array.Repa as A
import Data.Array.Repa.Eval (fromList)
-- import Numeric.FFT
import qualified Data.ByteString.Lazy as BS
import Debug.Trace
import Data.IORef
import Data.Foldable (foldl')
import System.Directory (createDirectoryIfMissing)
import Control.Monad (when)
rmdups :: (Ord a) => [a] -> [a]
rmdups = map head . group . sort
-- | This is the type we use for various sorts of distributions we're working
-- with
data Dist prob dom where
Dist :: (Ord dom,Enum dom, Real prob) => {
-- | The cumulative distribution function over the space in which we're
-- working, we assume that the domain is an enumerable type with a useful
-- predecessor and sucessor function.
cdf :: dom -> prob
-- | The PDF of the function we're working with, it should always be
-- identical to `(\ x -> cdf x - cdf (pred x))`
, pdf :: dom -> prob
-- | The minimum value of a range outside of which the PDF is always 0
, min :: !dom
-- | The maximum value of a range outside of which the PDF is always 0
, max :: !dom
-- | The list of CDF values such that
-- `cLst == map cdf [min..max]`
-- with whatever intermediate coersion that implies
, cLst :: Array F DIM1 (Complex Double)
-- | The list of PDF values such that
-- `pLst == map pdf [min..max]`
-- with whatever intermediate coersion that implies
, pLst :: Array F DIM1 (Complex Double)
} -> Dist prob dom
printDist :: (Integral d,Enum d,Real p) => Dist p d -> String
printDist Dist{..} = ppShow (
toInteger min,
toInteger max,
map (\ p -> (toInteger p,fromRational @Double . toRational $ cdf p)) [min..max])
printCDF :: (Integral d,Enum d,Real p) => CDF p d -> String
printCDF CDF{..} = ppShow (
toInteger min,
toInteger max,
map (\ p -> (toInteger p,fromRational @Double . toRational $ cdf p)) [min..max])
-- This is the temporary CDF constructor that we use to construct a Dist as
-- needed, we generally work with things in their CDF form since it's much
-- more convinient and efficient than the PDF form.
data CDF prob dom where
CDF :: (Ord dom, Fractional prob) => {
cdf :: dom -> prob
, min :: !dom
, max :: !dom
} -> CDF prob dom
-- typeclass to covert specific distribution types into Dist values that we
-- can use quickly.
class ToDist a prob dom where
toDist :: a prob dom -> Dist prob dom
-- | Get the index of the last zero in a CDF's distribution, useful for
-- shrinking the domain of Dist when things look like a normal distribution
--
-- TODO :: Good lord, i think I've managed to fuck up writing an elegant
-- binary search.
getLastZero :: (Ord dom, Integral dom, RealFrac prob) => CDF prob dom -> dom
getLastZero c@CDF{..} | cdf min > 0 = min
| otherwise = {- trace ("cdf: " ++ printCDF c) $ -} search (min,max)
where
search (min,max) | min + 1 == max && cdf max <= thresh = max
| min + 1 == max && cdf min <= thresh = min
| min >= max && cdf max <= thresh = max
| cdf min <= thresh && cdf mid > thresh = search (min,mid)
| cdf mid <= thresh && cdf max > thresh = search (mid,max)
| otherwise = error $ "getLastZero is broken:" ++ printCDF c
where
thresh = 0.000001
mid = {- trace ("glz: " ++ show (
(toInteger min,fromRational @Double . toRational $ cdf min)
,(toInteger m' ,fromRational @Double . toRational $ cdf m')
,(toInteger max,fromRational @Double . toRational $ cdf max))) $ -} m'
m' = (min + max) `div` 2
-- | Get the index of the first 1 in a CDF's distribution, useful for
-- shrinking the domain of Dist when things look like a normal distribution
getFirstOne :: (Ord dom, Integral dom, RealFrac prob) => CDF prob dom -> dom
getFirstOne CDF{..} | cdf max < 1 = max
| otherwise = search (min,max)
where
search (min,max) | min + 1 == max && cdf min >= thresh = min
| min + 1 == max && cdf max >= thresh = max
| min >= max && cdf min >= thresh = min
| cdf mid < thresh && cdf max >= thresh = search (mid,max)
| cdf min < thresh && cdf mid >= thresh = search (min,mid)
| otherwise = error "GetFirstOne is Broken"
where
thresh = 0.999999
mid = {- trace ("gfo: " ++ show (toInteger min,toInteger m',toInteger max)) $ -} m'
m' = (min + max) `div` 2
-- | Get the index of the last zero in a CDF's distribution, useful for
-- shrinking the domain of Dist when things look like a normal distribution
--
-- TODO :: Good lord, i think I've managed to fuck up writing an elegant
-- binary search.
getLastZero' :: (Ord dom, Integral dom, RealFrac prob) => Dist prob dom -> dom
getLastZero' Dist{..} | cdf min > 0 = min
| otherwise = search (min,max)
where
search (min,max) | min + 1 == max && cdf max <= thresh = max
| min + 1 == max && cdf min <= thresh = min
| min >= max && cdf max <= thresh = max
| cdf min <= thresh && cdf mid > thresh = search (min,mid)
| cdf mid <= thresh && cdf max > thresh = search (mid,max)
| otherwise = error $ "getLastZero' is broken"
where
thresh = 0.000001
mid = {- trace ("glz: " ++ show (toInteger min,toInteger m',toInteger max)) $ -} m'
m' = (min + max) `div` 2
-- | Get the index of the first 1 in a CDF's distribution, useful for
-- shrinking the domain of Dist when things look like a normal distribution
getFirstOne' :: (Ord dom, Integral dom, RealFrac prob) => Dist prob dom -> dom
getFirstOne' Dist{..} | cdf max < 1 = max
| otherwise = search (min,max)
where
search (min,max) | min + 1 == max && cdf min >= thresh = min
| min + 1 == max && cdf max >= thresh = max
| min >= max && cdf min >= thresh = min
| cdf mid < thresh && cdf max >= thresh = search (mid,max)
| cdf min < thresh && cdf mid >= thresh = search (min,mid)
| otherwise = error "GetFirstOne' is Broken"
where
thresh = 0.999999
mid = {- trace ("gfo: " ++ show (toInteger min,toInteger m',toInteger max)) $ -} m'
m' = (min + max) `div` 2
-- | Trim the portions of the CDF that have shrunk to be basically 0 or 1
-- minimizing work for convolutions and similar operations that work with
-- the list of relevant values.
shrinkCDF :: forall prob dom .(Ord dom, Integral dom, RealFrac prob) => CDF prob dom -> CDF prob dom
shrinkCDF c = {- to . tf $ -} c'
where
c' = (c{min = getLastZero c, max = getFirstOne c} :: CDF prob dom)
to = trace $ "oldC:" ++ printCDF c
tf = trace $ "newC:" ++ printCDF c'
-- This sort of wrapping allows us to make sure things are well memoized, in
-- general the CDF function will be the one which gets wrapped in a memoize,
-- with the rest of the instance just backed by that as needed.
instance (Ord dom, Integral dom, RealFrac prob) => ToDist CDF prob dom where
toDist = assembleDist . shrinkCDF
where
assembleDist CDF{..} = Dist {
cdf = boundCDF (min,max) cdf
, pdf = boundPDF (min,max) pdf
, min = min
, max = max
, cLst = fromList (Z :. lLen) . map (toCD . cdf) $ lKeys
, pLst = fromList (Z :. lLen) . map (toCD . pdf) $ lKeys
}
where
lKeys = [min..max]
lLen = length lKeys
pdf :: dom -> prob
pdf x = cdf x - cdf (pred x)
toCD :: Real p => p -> Complex Double
toCD = (:+ 0) . fromRational . toRational
-- | Add sane bounds to the CDF so that weird functions don't end up doing
-- silly things.
boundCDF :: (Ord dom,Real prob) => (dom,dom) -> (dom -> prob) -> dom -> prob
boundCDF (min,max) cdf i
| i < min = 0
| i >= max = 1
| otherwise = cdf i
-- | Add sane bounds to the PDF so that weird functions don't end up doing
-- silly things.
boundPDF :: (Ord dom,Real prob) => (dom,dom) -> (dom -> prob) -> dom -> prob
boundPDF (min,max) pdf i
| i < min = 0
| i > max = 0
| otherwise = pdf i
-- | Right padding a list.
rPad :: Integer -> c -> [c] -> [c]
rPad !i !e !ls = ls ++ (replicate (fromInteger (toInteger i) - len) e)
where len = length ls
-- | Left padding a list.
lPad :: Integer -> c -> [c] -> [c]
lPad !i !e !ls = (replicate (fromInteger (toInteger i) - len) e) ++ ls
where len = length ls
-- ldConv :: [Complex Double] -> [Complex Double] -> [Complex Double]
-- convolve a cLst and pLst to get a cLst for their sum distribution
--
-- baically, a PDF convolved with a CDF is the CDF of the sums of the random
-- variables involved.
ldConv :: Array F DIM1 (Complex Double) -> Array F DIM1 (Complex Double) -> Array F DIM1 Double
ldConv !c !p = {- t -} o
where
t = trace ("c':" ++ ppShow c') . trace ("p':" ++ ppShow p') . trace ("o':" ++ ppShow (toList o))
c' = toList c
p' = toList p
len :: Array F DIM1 (Complex Double) -> Int
len a = let (Z :. l) = extent a in l
cLen = len c
pLen = len p
-- The fft library we're using only works on lists that are a power of
-- two long, so we take the smallest power of 2 strictly larger than the
-- space we need.
oLen = fromInteger . bit $ 2 + log2 (toInteger $ cLen + (2 * pLen) - 2)
olExt _ = ix1 oLen
-- You need to pad the CDF with pLen '1's otherwise it convolves with the
-- 0s that are around the CDF and gets you odd resules.
-- cPad = fromList (Z :. (fromInteger oLen)) $! 0 : rPad (oLen - 1) 0 (rPad (pLen + cLen) 1 c)
cPad = computeS $ A.traverse c olExt clGen
clGen ol z@(Z :. i) | i == 0 = 0
| i > 0 && i <= cLen = {- trace (show (i,cLen,ol (ix1 0))) $ -} ol (ix1 $ i - 1)
| i > cLen && i <= (cLen + pLen) = 1 :+ 0
| otherwise = 0
-- pPad = fromList (Z :. (fromInteger oLen)) $! rPad oLen 0 p
pPad = computeS $ A.traverse p olExt plGen
plGen ol (Z :. i) | i >= 0 && i < pLen = ol (ix1 i)
| otherwise = 0
cFFT = fft cPad
pFFT = fft pPad
oFFT = computeS $ cFFT *^ pFFT
o :: Array F DIM1 Double
o = computeS $ A.traverse (ifft oFFT) newExt result
oLen' = cLen + pLen - 1
-- newExt = id
result ol (Z :. i) = realPart $ ol (ix1 $ i + 1)
newExt _ = ix1 oLen'
-- result ol (Z :. ind) = realPart $ ol (ix1 $ oLen' - ind :: DIM1)
--main :: IO ()
--main = do
-- let cl = [1,1]
-- pl = [0.2,0.2,0.2,0.2,0.2]
-- ca = fromList (ix1 $ length cl) . map (\ x -> x :+ 0) $ cl
-- pa = fromList (ix1 $ length pl) . map (\ x -> x :+ 0) $ pl
-- o = ldConv ca pa
-- ol = zip @Int [0..] $ toList o
-- pPrint $ ol
-- let gen = (<*>) (1 `d` 3)
-- mapM_ (\ i -> putStrLn $ "output(" ++ show i ++ "):" ++ printDist (gen i :: Dist Double Integer)) [1..3]
instance (Ord d, Integral d, Memoizable d, RealFrac p) => Num (Dist p d) where
(+) df@(Dist _ _ fMin fMax fcLst _) dg@(Dist _ _ gMin gMax _ gpLst)
= toDist CDF{
cdf = newFun
, min = newMin
, max = newMax
}
where
newCL = ldConv fcLst gpLst
newMin = fMin + gMin
newMax = fMax + gMax
newFun x | newInd x < 0 = 0
| newInd x >= (let (Z :. x) = extent newCL in x) = 1
| otherwise = (P.max 0) . (P.min 1) . fromRational . toRational $ newCL ! (ix1 $ newInd x)
newInd x = (fromIntegral $ fromIntegral x - newMin)
(-) a b = a + (negate b)
(*) = undefined
negate Dist{..} = toDist CDF{
cdf = (\ x -> 1 - (cdf $ negate x))
, min = -max
, max = -min
}
signum Dist{..} = toDist CDF{
cdf = f
, min = -1
, max = 1
}
where
f x | x < -1 = 0
| x < 0 = cdf (pred 0)
| x < 1 = cdf 0
| otherwise = 1
abs Dist{..} = undefined
fromInteger i = toDist CDF{
cdf = (\ x -> if x >= fromInteger i then 1 else 0)
, min = fromInteger i
, max = fromInteger i
}
-- | This gets the highest power of 2 less than or equal to a given number
gpow2 :: Integer -> Integer
gpow2 = bit . log2
-- | this uses a scaled continues approximation of the distribution for rolling
-- multiple dice.
--
-- The formula is from http://www.math.uah.edu/stat/special/IrwinHall.html
irwinHall :: forall num dom prob. (Integral num, Bits num, Real dom, Memoizable dom, Fractional prob)
=> num -- Number of Dice
-> num -- Sides on each Dice
-> CDF prob dom
irwinHall !n !s = CDF{cdf = sEmbed,min = min,max = max}
where
n' :: Integer
n' = fromIntegral n
s' :: Integer
s' = fromIntegral s
min :: dom
min = fromIntegral $ n
max :: dom
max = fromIntegral $ s * n + 2
-- sanity wrapper
sEmbed :: dom -> prob
sEmbed i | i < min = 0
| i >= max = 1
| otherwise = embed i
-- embed the range and divide the output
embed :: dom -> prob
embed = memoize (\ i -> fromRational (iw (tfun $ toRational i) n'))
-- Transform the normal input into the range of the irwinHall distribution
tfun :: Rational -> Rational
tfun !i = ((toRational n') * (i - (toRational $ n') + 1)) / (toRational $ (s' * n') - n' + 1)
-- Standard irwin hall CDF function
iw :: Rational -> Integer -> Rational
iw !x !n = (1 % 2) + ((1 % (2 * (factorial . fromInteger $ n))) * (sumTerm x n))
sumTerm :: Rational -> Integer -> Rational
sumTerm !x !n = sum $! map (sq x n) [0..n]
-- All the stuff in the summation term
sq :: Rational -> Integer -> Integer -> Rational
sq !x !n !k = (toRational $! ((-1) ^ k) * (n `C.choose` k))
* (signum $! x - fromInteger k)
* ((x - fromInteger k) ^ n)
-- This just dives us the nicer "100 `d` 100" style syntax for CDFs and
-- some memoization to prevent huge amounts of overwork.
d :: (Integral dom, Memoizable dom, RealFrac prob)
=> Integer -- Number of Dice
-> Integer -- Sides on each Dice
-> Dist prob dom
d !n !s = toDist $ irwinHall n s
-- The maximum of some CDF and a constant.
dMax :: (Integral d,RealFrac p) => d -> Dist p d -> Dist p d
dMax !n Dist{..} = toDist CDF{
cdf = (\x -> if x < n then 0 else cdf x)
, min = P.max n min
, max = P.max n max
}
-- transform a CDF by multiplying the output by some constant, this just
-- assumes the original CDF is a step function.
--
-- Yes, this means the PDF will be weird and spiky, just deal with it.
dMul :: (Integral d,Real a,RealFrac p) => a -> Dist p d -> Dist p d
dMul !n Dist{..} = toDist CDF{
cdf = cdf . floor . (\x -> toRational x / toRational n)
, min = floor (toRational min * toRational n)
, max = floor (toRational max * toRational n)
}
-- Add a constant to a CDF
dPlus :: (Integral d,RealFrac p) => d -> Dist p d -> Dist p d
dPlus !n Dist{..} = toDist CDF{
cdf = cdf . (\ x -> x - n)
, min = min + n
, max = max + n
}
-- Given a specific fraction between 0 and 1 , will just run a binary search
-- on a distribution until it finds the point where the CDF goes over that
-- point
findPercentile :: (Integral d,Ord d) => p -> Dist p d -> d
findPercentile !t Dist{..} | t == 0 = min
| t == 1 = max
| otherwise = fpHelp (min,max)
where
fpHelp (min,max) | min == max = min
| (cdf min < t) && (cdf mid >= t) = fpHelp (min,mid - 1)
| (cdf mid < t) && (cdf max >= t) = fpHelp (mid + 1,max)
| (cdf min >= t) = min
| (cdf max < t) = max
| otherwise = error "This should never happen"
where
mid = {- trace ("fp:" ++ show (toInteger min,toInteger m',toInteger max))-} m'
m' = fromInteger $ (toInteger min + toInteger max) `div` 2
-- | Iterated convolution over some CDF in order to find the location we care
-- about.
--
-- Returns a nice memoized function that you should keep handy where possible
(<*>) :: (Integral d, Memoizable d, RealFrac p) => Dist p d -> Integer -> Dist p d
(<*>) c = memoConv
where
memoConv = memoize conv
conv 1 = c
conv 2 = c + c
conv !i | i <= 0 = error "can't take 0 or fewer convolutions of CDF"
| i == gpow2 i = let n = memoConv (i `div` 2) in n + n
| otherwise = memoConv (gpow2 i) + memoConv (i - gpow2 i)
-- | The number of research points one expects to get in a single day
--
-- n = # of Sealing Dice
-- t = Daily Sealing Roll Target
-- r = Cumulative Research target
-- c = Current research counter
-- a = number of days worth of porogress
--
-- params n t
--
-- This function takes the number of sealing dice you have, and the daily
-- target and gives you the distribution for expected increase in C
--
-- Expected progress for `a` days is `singleDaysProgress n t <*> a`
singleDaysProgress :: Integer -> Integer -> Dist Double Integer
singleDaysProgress = sdp
where
sdp !n !t = (1 / ((n' + (t'/50)) ** 0.65)) `dMul` (dMax 0 (dPlus (-t) (n `d` 100)))
where
n' = fromInteger n
t' = fromInteger t
-- | Slightly more readable wrapper that pops out a nice memozed function that
-- we can pass to other things as needed
--
-- params = n t a
multipleDaysProgress :: Integer -> Integer -> Integer -> Dist Double Integer
multipleDaysProgress n t = (<*>) (singleDaysProgress n t)
-- The number of days needed to get a higher than X chance of completion given
-- a given n t and r. This does a binary search in the range of days that we're
-- looking at, but scales up with powers of two first.
--
-- x = percentage sucess change you want to find the correct set of days for.
--
-- params are x n t r
daysToComplete :: Double -> Integer -> Integer -> Integer -> Maybe Integer
daysToComplete x n t r = dta 1
where
-- | function that scales up by 2 each time, looking for a range, since
-- all the relevant intermediate products are memoized, this is just
-- doing work that would have to be done anyway.
dta a | progress > x && a == 1 = Just 1
| progress > x = dtc (a `div` 2,a)
| (a * 2) > maxDays = dtb a
| otherwise = dta (a * 2)
where
progress = (1 - (cdf (multipleDaysProgress n t a :: Dist Double Integer) r))
-- | This one just keeps us from going through a huge pile of intermediate
-- results in order to find that it takes more than our maximum. I figure
-- this is going to be a very common outcome when generating a diagram
-- or table.
dtb a | progress < x = Nothing
| otherwise = dtc (a,maxDays)
where
progress = (1 - (cdf (multipleDaysProgress n t maxDays :: Dist Double Integer) r))
-- | This function does the binary search for the actual point of change,
-- once we've got a range set up and should skip a lot of useless computing.
dtc (min,max)
| min == maxDays && progress < x = Nothing
| min >= max = Just min
| progress >= x = dtc (min,mid - 1)
| progress < x = dtc (mid,max)
| otherwise = error "should never reach here"
where
mid = (min + 1 + max) `div` 2
progress = (1 - (cdf (multipleDaysProgress n t mid :: Dist Double Integer) r))
-- 12 weeks is the limit we search, if a project takes more than 3 months
-- our thresholds are too high. We're not going to find a 6 month strech
-- basically ever.
maxDays = 7 * 12
-- The number of days needed to get a higher than X chance of completion given
-- a given n t and r. This does a binary search in the range of days that we're
-- looking at, but scales up with powers of two first.
--
-- but this time allowing you to cache more effectively
--
-- md = function that, when given the day, gives you the distribution for that
-- day.
-- x = percentage sucess change you want to find the correct set of days for.
-- r = target threshold you're looking to pass
--
-- params are md x r
--
--
daysToComplete' :: (Integer -> Dist Double Integer) -> Double -> Integer -> Maybe Integer
daysToComplete' !md !x !r = {-(trace $ "days: " ++ show (x,r))-} dta 1
where
-- | function that scales up by 2 each time, looking for a range, since
-- all the relevant intermediate products are memoized, this is just
-- doing work that would have to be done anyway.
dta !a | progress > x && a == 1 = {- t $ -} Just 1
| progress > x = {- t $ -} dtc (a `div` 2,a)
| (a * 2) >= maxDays = {- t $ -} dtb a
| otherwise = {- t $ -} dta (a * 2)
where
progress = (1 - (cdf (md a :: Dist Double Integer) r))
t = trace $ "dta: " ++ show (x,progress,a)
-- | This one just keeps us from going through a huge pile of intermediate
-- results in order to find that it takes more than our maximum. I figure
-- this is going to be a very common outcome when generating a diagram
-- or table.
dtb !a | progress < x = {- t $ -} Nothing
| otherwise = {- t $ -} dtc (a,maxDays)
where
progress = (1 - (cdf (md maxDays :: Dist Double Integer) r))
t = trace $ "dtb: " ++ show (x,progress,a)
-- | This function does the binary search for the actual point of change,
-- once we've got a range set up and should skip a lot of useless computing.
dtc !(min,max)
| min + 1 == max && progress > x = {- t $ -} Just mid
| min + 1 == max && progress < x = {- t $ -} Just max
| min == maxDays && progress < x = {- t $ -} Nothing
| mid >= max && progress > x = {- t $ -} Just mid
| progress >= x = {- t $ -} dtc (min,mid)
| progress < x = {- t $ -} dtc (mid,max)
| otherwise = {- t $ -} error "DaysToComplete is broken"
where
mid = (min + 1 + max) `div` 2
progress = (1 - (cdf (md mid :: Dist Double Integer) r))
progressmin = (1 - (cdf (md min :: Dist Double Integer) r))
progressmax = (1 - (cdf (md max :: Dist Double Integer) r))
t = trace $ "dtc: " ++ show ((min,progressmin),(mid,progress),(max,progressmax))
-- | The probability of success thresholds we're looking for when given a
-- sealing dice x daily target x number of days (research target -> prob)
psThresholds :: [Double]
psThresholds = [0,0.025..1]
-- | Datastructure for a probability of success threshold query , basically
-- just so we can derive nice Show and Read instances for this
data PS p d = PS {
sealingDice :: !Integer
, dailyTarget :: !Integer
, numDays :: !Integer
, probOfTarget :: ![(d,p)]
} deriving (Show,Read,Generic,FromJSON,ToJSON)
-- | Type we're using for prettier JSON output
data PSJ p d = PSJ {
numDays :: !Integer
, dataPoints :: ![PSE p d]
} deriving (Show,Read,Generic,FromJSON,ToJSON)
-- This is the type we're using to get prettier JSON output for the pairs of
-- researchTargets and Probility of sucess
data PSE p d = PSE {
researchTarget :: !d
, probabilityOfSuccess :: !p
} deriving (Show,Read,Generic,FromJSON,ToJSON)
psToPSJ :: (Ord d,Ord p) => PS p d -> PSJ p d
psToPSJ !PS{..} = PSJ{
numDays = numDays
, dataPoints = map (\ (rt,ps) -> PSE rt ps) probOfTarget
}
-- | Given a bunch of information, generate a PS structure for a set of known
-- parameters.
calculatePS :: (Integer,Integer) -> Integer
-> (Integer -> Dist Double Integer)
-> PS Double Integer
calculatePS !(sealingDice,dailyTarget) !numDays !distGen
= PS sealingDice dailyTarget numDays probOfTarget
where
-- The Distribution for the paticular day
dayDist :: Dist Double Integer
dayDist = distGen numDays
getRT p = findPercentile p dayDist
getPair p = (rt,1 - tp)
where
-- | Research Target
rt = getRT p
-- | True Percent
tp = cdf (dayDist :: Dist Double Integer) rt
probOfTarget = rmdups $ map getPair psThresholds
-- | Gets the minimum target in the PS
minTarget :: Ord d => PS p d -> d
minTarget = minimum . map fst . probOfTarget
-- | Gets the maximum target in the PS
maxTarget :: Ord d => PS p d -> d
maxTarget = maximum . map fst . probOfTarget
-- | Figure out an interesting set of research targets when given a list of
-- PS querys. first a bunch of things between the 0 and the minimum we
-- find, and then a whole pile of increments in the range that's more
-- interesting. This just gets us a full range of useful output.
getRTList :: (Show d, Show p, Ord d,Integral d) => [PS p d]-> [d]
getRTList !pl = {- t . -} rmdups $ [0,zDivs..min] ++ [min,min+inc..max] ++ [max]
where
t = trace ("foo:" ++ show (pl, min, max, inc, zInc))
min = minimum . map minTarget $ pl
max = maximum . map maxTarget $ pl
divs = 40
inc = P.max 1 $ (max - min) `div` divs
zDivs = 20
zInc = P.max 1 $ min `div` zDivs
-- | Datastructure for DaysToComplete queries, this is just a convinient
-- way to get nice read and show instances for this stype of information.
-- That way others can work with it too.
data DC p d = DC {
sealingDice :: !Integer
, dailyTarget :: !Integer
, researchTarget :: !Integer
, probOfNumDays :: ![(Integer,p)]
} deriving (Show,Read,Generic,FromJSON,ToJSON)
data DCJ p d = DCJ {
researchTarget :: !Integer
, dataPoints :: ![DCE p d]
} deriving (Show,Read,Generic,FromJSON,ToJSON)
data DCE p d = DCE {
numDays :: !Integer
, probabilityOfSuccess :: !p
} deriving (Show,Read,Generic,FromJSON,ToJSON)
dcToDCJ :: (Ord d,Ord p) => DC p d -> DCJ p d
dcToDCJ !DC{..} = DCJ{
researchTarget = researchTarget
, dataPoints = map (\ (nd,pr) -> DCE nd pr) probOfNumDays
}
calculateDC :: (Integer,Integer) -> Integer
-> (Integer -> Dist Double Integer)
-> DC Double Integer
calculateDC !(sealingDice,dailyTarget) !researchTarget !distGen
= DC sealingDice dailyTarget researchTarget probOfNumDays
where
getDays :: Double -> Maybe Integer
getDays p = daysToComplete' distGen p researchTarget
-- just get the actual probability for a particular day
getPair p = (prob <$>) $! dayOf
where
dayOf = getDays p
prob d = (d,1 - cdf (distGen d :: Dist Double Integer) researchTarget)
-- Go through each day and get research thresholds for it
probOfNumDays = rmdups . catMaybes . map getPair $ [0.001] ++ [0.025,0.05..0.975] ++ [0.999]
-- -- Find the days to get 1/10th of 1% probability of success
-- minDay :: Maybe Integer
-- minDay = getDays 0.001
-- -- Find the days to get 1/10th of 1% probability of failure
-- maxDay :: Maybe Integer
-- maxDay = Just . fromMaybe maxDays $ getDays 0.999
-- -- list of days we're going to search
-- allDays :: [Maybe Integer]
-- allDays = map Just . fromMaybe [] $ (\ a b -> [a..b]) <$> minDay P.<*> maxDay
-- -- just get the actual probability for a particular day
-- getPair dayOf = prob <$> dayOf
-- where
-- prob d = (d,1 - cdf (distGen d :: Dist Double Integer) researchTarget)
-- -- Go through each day and get research thresholds for it
-- probOfNumDays = rmdups . catMaybes . map getPair $ allDays
-- | Get a list of interesting numbers of days to have researched
researchDays :: [Integer]
researchDays = [1..maxDays] -- rmdups $ [1..7] ++ [10,15..maxDays] ++ [maxDays]
data GStore p d = GStore {dice :: Map Integer (GS1 p d)}
deriving (Show,Read,Generic,Eq,Ord,FromJSON,ToJSON)
data GS1 p d = GS1 {difficulty :: Map Integer (GS2 p d)}
deriving (Show,Read,Generic,Eq,Ord,FromJSON,ToJSON)
data GS2 p d = GS2 {target :: Map d [(Integer,p)],days :: Map Integer [(d,p)]}
deriving (Show,Read,Generic,Eq,Ord,FromJSON,ToJSON)
type IOStore = IORef (GStore Double Integer)
emptyGS :: GStore p d
emptyGS = GStore $ Map.empty
addPS :: GStore Double Integer -> PS Double Integer -> GStore Double Integer
addPS GStore{..} PS{..} = GStore $ Map.alter alt1 sealingDice dice
where
alt1 Nothing = Just . GS1 $ Map.singleton dailyTarget (fromJust $ alt2 Nothing)
alt1 (Just GS1{..}) = Just . GS1 $ Map.alter alt2 dailyTarget difficulty
alt2 Nothing = Just $ GS2{
target = Map.empty
, days = Map.singleton numDays probOfTarget
}
alt2 (Just GS2{..}) = Just $ GS2 {
target = target
, days = Map.insert numDays probOfTarget days
}
addDC :: GStore Double Integer -> DC Double Integer -> GStore Double Integer
addDC GStore{..} DC{..} = GStore $ Map.alter alt1 sealingDice dice
where
alt1 Nothing = Just . GS1 $ Map.singleton dailyTarget (fromJust $ alt2 Nothing)
alt1 (Just GS1{..}) = Just . GS1 $ Map.alter alt2 dailyTarget difficulty
alt2 Nothing = Just $ GS2{
target = Map.singleton researchTarget probOfNumDays
, days = Map.empty
}
alt2 (Just GS2{..}) = Just $ GS2 {
target = Map.insert researchTarget probOfNumDays target
, days = days
}
addPSs :: [PS Double Integer] -> GStore Double Integer -> GStore Double Integer
addPSs = flip $ foldl' addPS
addDCs :: [DC Double Integer] -> GStore Double Integer -> GStore Double Integer
addDCs = flip $ foldl' addDC
addPSDC :: ([PS Double Integer],[DC Double Integer]) -> GStore Double Integer -> GStore Double Integer
addPSDC (pss,dcs) = addPSs pss . addDCs dcs
-- | Given a number of sealing dice and a daily threshold, generate a number
-- of interesting DC and PS queries,
printPSDC :: Maybe IOStore -> (Integer,Integer) -> IO ([PS Double Integer],[DC Double Integer])
printPSDC gs !setPair@(numDice,dailyThresh) = do
let !distGen = multipleDaysProgress numDice dailyThresh
pss <- mapM (genPrintPS distGen) researchDays
let !researchTargets = getRTList pss
dcs <- mapM (genPrintDC distGen) researchTargets
let psdc = (pss,dcs)
sequence $ flip modifyIORef (addPSDC psdc) <$> gs
return psdc
where
genPrintPS !distGen !rd = do
let ps = calculatePS setPair rd distGen
--print dc
putStrLn $ "ps: " ++ show (setPair,rd)
return ps
genPrintDC !distGen !nd = do
let dc = calculateDC setPair nd distGen
-- when (probOfNumDays dc /= []) $ print dc
putStrLn $ "dc: " ++ show (setPair,nd)
return dc
writePSDC :: (Integer,Integer)
-> ([PS Double Integer],[DC Double Integer])
-> IO ()
writePSDC !pair@(sealingDice,dailyThresh) !(pss,dcs) = do
createDirectoryIfMissing False dir
BS.writeFile file $ encodePretty jsonBlob
where
dir = "out/"
file = dir ++ "sealingCalcs" ++ show pair ++ ".json"
jsonBlob = object [
"sealingDice" .= sealingDice
, "dailyReseachThreshold" .= dailyThresh
, "Probability of Success Calculations" .= map psToPSJ pss
, "Days to Completion Calculations" .= map dcToDCJ dcs
]
-- Given a number of dice figure out a number of interesting daily thresholds
--
-- param "number of sealing dice"
--
getDailyThresholds :: Integer -> [Integer]
getDailyThresholds !nd = rmdups $ zList ++ [ndMin] ++ pList ++ [ndMax]
where
dist = nd `d` 100
-- With nDice the probability of getting fewer than nMin is basically 0
ndMin = getLastZero' (dist :: Dist Double Integer)
-- With nDice the probability of getting nore than nMax is basically 0
ndMax = getFirstOne' (dist :: Dist Double Integer)
-- number of daily thresholds we're going to be checking in the range
-- [0..ndMin], where the probability of getting more is basically 100 %
zDivs = 20
-- increment in the z range
zInc = P.max 1 $ ndMin `div` zDivs
-- List of thresholds to check in the z range
zList = [0,zInc..ndMin]
-- for the p range, we're going to get numbers of dice that which have a
-- p chance of failing outright and not progressing any research.
pProbs = [0,1/40..1]
pList = map (flip findPercentile dist) pProbs
-- For a given number of sealing dice, figure out a bunch of interesting
-- thresholds to test and then generate all the neccesary distributions
genForNumDice :: Maybe IOStore -> Integer -> IO ()
genForNumDice gs sealingDice = do
let !thresholds = getDailyThresholds sealingDice
!settingPairs = map (\ dt -> (sealingDice,dt)) thresholds
mapM_ runPSDC settingPairs
where
runPSDC settingPair = printPSDC gs settingPair >>= writePSDC settingPair
-- Generate all the PS and DC data we want.
genAndWritePSDC :: IO ()
genAndWritePSDC = do
gs <- newIORef emptyGS
mapM_ (genForNumDice $ Just gs) [10..70]
finalStore <- readIORef gs
BS.writeFile "out/allData.json" $ encodePretty finalStore
BS.writeFile "out/allData.min.json" $ encode finalStore
-- | Data type for the storage of plotdata for "sealing lvl -> (D,R -> T)"
-- plots
type EstTimeMap = Map Integer EstTimePlot
data EstTimePlot = EstTimePlot {
-- Y axis
diffs :: ![Integer]
-- x axis
, totals :: ![Integer]
-- Plotdata
, estDays :: ![[Integer]]
, variance :: ![[Integer]]
} deriving (Show,Read,Generic,Eq,Ord,FromJSON,ToJSON)
-- | For a given number of sealing dice, get the list of difficulties
-- we're looking for
getDiffs :: Integer -> [Integer]
getDiffs numDice = [0,zInc..ndMax]
where
dist = numDice `d` 100
-- With nDice the probability of getting nore than nMax is basically 0
ndMax = findPercentile 0.9 (dist :: Dist Double Integer)
-- number of daily thresholds we're going to be checking in the range
-- [0..ndMin], where the probability of getting more is basically 100 %
zDivs = 200
-- increment in the z range
zInc = P.max 1 $ ndMax `div` zDivs
-- | For a given number of dice get the list of reasonable research targets
getTotals :: Integer -> [Integer]
getTotals numDice = rmdups $ [1] ++ [2,4..200] ++ [200, 200 + rInc..rMax]
where
dist = multipleDaysProgress numDice 0 maxDays
rMax = findPercentile 0.6 (dist :: Dist Double Integer)
rDivs = 200
rInc = P.max 1 $ rMax `div` rDivs
getEstTimePlot :: Integer -> EstTimePlot
getEstTimePlot numDice = EstTimePlot{
diffs = {- trace ("for diffs " ++ show diffs) -} diffs
, totals = {- trace ("for totals " ++ show totals) -} totals
, estDays = estDays
, variance = variance
}
where
diffs = getDiffs numDice
totals = getTotals numDice
-- get all rows
(estDays,variance) = unzip $ map getSingDiff diffs
-- Get the row for a single difficulty
getSingDiff :: Integer -> ([Integer],[Integer])
-- The trace is here so I can see progress, but it's an otherwise pure
-- function, :V medium bad practice I suppose.
getSingDiff diff = {- trace ("for difficulty " ++ show diff) $ -} (estList,varList)
where
distGen = multipleDaysProgress numDice diff
(estList,varList) = unzip $ map tp totals
where
tp t = let e5 = ed 0.5 t in (e5,if (e5 == -1) then -1 else P.max (e5 - ed 0.05 t) (ed 0.95 t - e5))
-- Number of days to some percentage with >max just set to -1
ed p t = fromMaybe (-1) $ daysToComplete' distGen p t
-- Generate the full pile fo estimated time plot sas needed.
genAndWriteEstTimePlot :: IO ()
genAndWriteEstTimePlot = do
etpL <- mapM (\ nd -> (nd,) <$> getSingleETP nd) [10,15..70]
createDirectoryIfMissing False dir
let ets = Map.fromList etpL
BS.writeFile (dir ++ "estTimeData.json") $ encodePretty ets
BS.writeFile (dir ++ "estTimeData.min.json") $ encode ets
where
dir = "out/"
getSingleETP :: Integer -> IO (EstTimePlot)
getSingleETP numDice = do
putStrLn $ "Beginning generating ETP for Sealing Lvl" ++ (show numDice)
let !etp = getEstTimePlot numDice
print etp
return etp
-- | The code that's actually run when we execute the program
main :: IO ()
main = genAndWriteEstTimePlot
-- So we've got the following variables :
--
-- -- | Research points these are what we accululate as we work.
-- type Points = Int
--
-- -- | Days of work on the project (1 is start, T is curent)
-- type Day = Int
--
-- numDice :: Int
-- numDice := Level of sealing skill / Number of Sealing Dice
-- - Always Known
--
-- diff :: Int
-- diff := Project Difficulty
-- - Hidden Parameter
-- - Have priors P(diff)
--
-- total :: Points
-- total := Total research points needed to complete project
-- - Hidden Parameter
-- - Have priors P(total)
--
-- date :: Day
-- date := Time Spent on specific project (days)
-- - Always known
--
-- roll :: Day -> Int
-- roll d := Sealing roll on day d
-- roll d = sample (asSeed d) $ n `d` 100
-- - Possibly visible?
-- - Can easily get P(roll|numDice,diff)
--
-- effort :: Day -> Int
-- effort d := Effort put towards sealing reseach on day d
-- effort d = max(0, roll t - diff)
-- - Possibly visible?
-- - Can easily get P(effort|numDice,diff)
--
-- inc :: Day -> Points
-- inc d := amount the project counter is incremented by on day d
-- inc d = (effort t) / (numDice - diff/50)^0.65
-- - Possibly visible?
-- - Can easily get P(inc|numDice,diff)
--
-- progress :: Day -> Points
-- progress d := total progress on research project by end-of-day d
-- progress d = sum [inc d' | d' <- [1..d]]
-- - Possibly visible?
-- - Can easily get P(progress|d,numDice,diff)
--
-- finished :: Day -> Bool
-- finished d := Is the project done by end-of-day d
-- finished d = progress d >= total
-- - Always Known
-- - Can easily get P(finished d|numDice,diff,total)
--
-- Bayes Law:
--
-- P(A|B) = P(A)* P(B|A)/P(B)
--
-- Case 1: We don't get any additional information.
--
-- Known:
-- - numDice
-- - date
-- - (finished d == false) `forall` 1 <= d <= date
--
-- - P(diff,total@0) = P(diff) * P(total)
-- - P(diff,total@x) =
-- P(diff,total@x-1)
-- * P(finished x|diff,total@x-1)
-- / P(finished x@x-1)
--
-- - P(finished d@n) = forall diff
--
--
-- - P(diff,total|d=0) = P(diff,total)
-- - P(diff,total|d=x) = P(diff,total|d=x-1,finished x = false)
-- = P(diff,total|d=x-1) *
-- ( P(finished x = false|d=x-1,diff,total)
-- / P(finished x = false|d=x-1))
-- - P(finished 0) = 0
-- - P(finished x|d=n) = P(finished x|numDice,
--
-- (d = 0) = empty-set
-- (d = x) = d=x-1,finished x == false
--
-- Want:
-- - P(diff,total|d=date)
--
-- Proposal 1;
--
-- proxyRoll :: Day -> Int
-- proxyRoll d := Hidden die roll made for estimating time to completion
-- proxyRoll d = sample (asSeed d) $ n d 100
-- - Hidden Parameter
-- - Can easily get P(proxyRoll|numDice,diff)
--
-- proxyEffort :: Day -> Int
-- proxyEffort d := Estimated daily effort put towards sealing reseach on day d
-- proxyEffort d = max(0, proxyRoll t - diff)
-- - Hidden Parameter
-- - Can easily get P(proxyEffort|numDice,diff)
--
-- proxyInc :: Day -> Points
-- proxyInc d := Estimated amount the project counter is incremented by on day d
-- proxyInc d = (proxyEffort t) / (numDice - diff/50)^0.65
-- - Possibly visible?
-- - Can easily get P(proxyInc|numDice,diff)
--
-- estimated :: Day -> Either Day Infinity
-- estimated d := Estimated time to project completion using proxy roll
-- estimated d = (total - finished d) / (proxyInc d)
-- - Known
-- We can calculate
-- We always know:
--
-- - N = Number of sealing die
-- - T = Time spent on project
-- - S = Is the project complete or not?
--
--
-- We want to predict:
--
-- - D = Project Difficulty
-- - R = Research Threshold
--
-- We might also have access to:
--
-- - L := Each day's roll for Max(0, Nd100 - D)
-- - Only unknown variable is D.
-- - Cannot provide information on R
-- - Is a pretty easy distribution to work with in order to figure out D
-- - C := Each day's addition to the progress counter
-- - Only dependent on D
-- - Cannot provide informatio on R
-- - Can
--
-- Key questions:
--
-- - How does choice of D & R affect time to complete a seal?
-- - How does my ability to complete a project change as my level changes?
-- - How
--
--
-- Notation for plots:
-- (Fixed Variables) -> (Inependent Variables) -> (Dependent Variables)
--
--
--
--- Leftover code from various tests of main :V
----let pair = (10,888) in printPSDC pair >>= writePSDC pair
-- let cl = [1]
-- pl = [0.2,0.2,0.2,0.2,0.2]
-- ca = fromList (ix1 $ length cl) . map (\ x -> x :+ 0) $ cl
-- pa = fromList (ix1 $ length pl) . map (\ x -> x :+ 0) $ pl
-- o = ldConv ca pa
-- ol = zip @Int [0..] $ toList o
-- pPrint $ ol
-- let gen = (<*>) (1 `d` 3)
-- mapM_ (\ i -> putStrLn $ "output(" ++ show i ++ "):" ++ printDist (gen i :: Dist Double Integer)) [80]
-- let dayGen = (multipleDaysProgress 10 2)
-- print $ calculateDC (10,2) 4732 dayGen
-- mapM_ (\ x -> print (x,cdf (dayGen x :: Dist Double Integer) 4732)) [32..64]
-- let gen = (<*>) (1 `d` 10)
-- mapM_ (\ i -> putStrLn $ "output(" ++ show i ++ "):" ++ printDist (gen i :: Dist Double Integer)) [1..3]
--- print $ min (singleDaysProgress 20 1000 :: Dist Double Integer)
-- print $ max (multipleDaysProg:w
-- ress 20 1000 :: Dist Double Integer)
-- print @[(Integer,Double)] $ map (\x -> (x,cdf (singleDaysProgress 20 1000 :: Dist Double Integer) x)) [-5..95]
--print @[(Integer,Double)] $ map (\x -> (x,cdf (multipleDaysProgress 20 1000 2 :: Dist Double Integer) x)) [-5,-3..200]
-- where
-- c = multipleDaysProgress 70 100 20
-- getData :: [T Double Integer]
-- getData = do
-- sd <- [10,15..60]
-- dt <- 1 : [20,40..80] ++ [0,100..6000]
-- nd <- 1 : [8,16..80]
-- ct <- [1..19] ++ [20,40..100] ++ [200,400..5000] ++ [5500,6000..10000]
-- let p = 1 - cdf (multipleDaysProgress sd dt nd :: Dist Double Integer) ct
-- return $ T sd dt nd ct p
| rohit507/mfd-experiments | app/Sealing.hs | bsd-3-clause | 43,463 | 0 | 18 | 11,659 | 10,579 | 5,686 | 4,893 | -1 | -1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
{- |
Use this library to get an efficient, optimal, type-safe 'diff' and
'patch' function for your datatypes of choice by defining a simple GADT and
some class instances. The process is explained in the documentation of the
'Family' type class.
The result of 'diff' is an optimal 'EditScript' that contains the operations
that need to be performed to get from the /source/ value to the /destination/
value. The edit script can be used by 'patch', inspected with 'show' and used
for all kinds of interesting stuff /you/ come up with.
The library has been extracted from Eelco Lempsink's Master's Thesis
/Generic type-safe diff and patch for families of datatypes/ (available online:
<http://eelco.lempsink.nl/thesis.pdf>). See Appendix C for a large example.
Note that some types and functions have been renamed for the benefit of the API
(e.g., @diff@ to @diffL@, @diff1@ to @diff@, @Diff@ to @EditScriptL@).
-}
module Data.Generic.Diff (
-- * Diffing and patching
diff,
patch,
-- ** For multiple datatypes
diffL,
patchL,
-- ** Patch compression
compress,
-- * Patch datatype
EditScriptL(..),
EditScript,
-- * Type classes
Family(..),
Type(..),
-- ** Supporting datatypes
(:~:)(..),
Con(..),
Nil(..),
Cons(..),
-- ** Exports necessary to reimplement patch
List(..),
IsList(..),
Append,
append,
split,
isList
) where
import Data.Type.Equality ( (:~:)(..) )
-- | Edit script type for two single values.
type EditScript f x y = EditScriptL f (Cons x Nil) (Cons y Nil)
-- | Apply the edit script to a value.
patch :: EditScript f x y -> x -> y
patch d x = case patchL d (CCons x CNil) of
CCons y CNil -> y
-- | Calculate the difference between two values in the form of an edit script.
--
-- See the documentation for 'Family' for how to make your own data types work
-- with this function.
diff :: (Type f x, Type f y) => x -> y -> EditScript f x y
diff x y = diffL (CCons x CNil) (CCons y CNil)
-- | Underlying implementation of 'patch', works with (heterogeneous) lists of
-- values.
patchL :: forall f txs tys . EditScriptL f txs tys -> txs -> tys
patchL (Ins c d) = insert c . patchL d
patchL (Del c d) = patchL d . delete c
patchL (Cpy c d) = insert c . patchL d . delete c
patchL (CpyTree d) = \(CCons x xs) -> CCons x . patchL d $ xs
patchL End = \CNil -> CNil
-- | Underlying implementation of 'diff', works with (heterogeneous) lists of
-- values.
diffL :: (Family f, List f txs, List f tys) => txs -> tys -> EditScriptL f txs tys
diffL x y = getEditScriptL $ diffLMemo x y
-- | Datatype for type level lists, corresponding to '[]'. Use when creating
-- your 'Family' instance.
data Nil = CNil
-- | Datatype for type level lists, corresponding to '(:)'. Use when creating
-- your 'Family' instance.
data Cons x xs = CCons x xs
infixr 5 `Cons`
infixr 5 `CCons`
{- |
To use 'diff' and 'patch' on your datatypes, you must create an instance of
'Family'.
There are four steps to set up everything for 'diff' and 'patch'.
(1) Define your datatypes. (Presumably, you already have done this.)
(2) Create a family datatype, grouping your datatypes together.
(3) Make the family datatype an instance of 'Family'
(4) Create 'Type' instances for each member of the family.
Steps 1-3 are explained below, step 4 is explained in the documentation for
'Type'.
As a running example, we create a simple family of datatypes (step 1):
> data Expr = Min Expr Term
> data Term = Parens Expr
> | Number Int
The second step is creating the family datatype. Each constructor in the
datatypes above gets a constructor in a family GADT:
> data ExprTermFamily :: * -> * -> * where
> Min' :: ExprTermFamily Expr (Cons Expr (Cons Term Nil))
> Parens' :: ExprTermFamily Term (Cons Expr Nil)
> Number' :: ExprTermFamily Term (Cons Int Nil)
> Int' :: Int -> ExprTermFamily Int Nil
The first type argument of the datatype must be the resulting type of the
constructor. The second argument captures the types of the arguments the
constructor expects. Note how to use 'Cons' and 'Nil' to create type level
lists.
The @Int'@ constructor is special, in the sense that it captures the 'Int' type
abstractly (listing all 'Int''s constructors would be quite impossible).
/Caveat emptor/: polymorphic datatypes (like lists) are ill-supported and
require family constructors for each instance. It might require another master
thesis project to solve this.
Step 3 is to create the instance for the 'Family' class. For each function you
will have to implement four functions. It's straightforward albeit a bit
boring.
> instance Family ExprTermFamily where
> decEq Min' Min' = Just (Refl, Refl)
> decEq Parens' Parens' = Just (Refl, Refl)
> decEq Number' Number' = Just (Refl, Refl)
> decEq (Int' x) (Int' y) | x == y = Just (Refl, Refl)
> | otherwise = Nothing
> decEq _ _ = Nothing
>
> fields Min' (Min e t) = Just (CCons e (CCons t CNil))
> fields Parens' (Parens e) = Just (CCons e CNil)
> fields Number' (Number i) = Just (CCons i CNil)
> fields (Int' _) _ = Just CNil
> fields _ _ = Nothing
>
> apply Min' (CCons e (CCons t CNil)) = Min e t
> apply Parens' (CCons e CNil) = Parens e
> apply Number' (CCons i CNil) = Number i
> apply (Int' i) CNil = i
>
> string Min' = "Min"
> string Parens' = "Parens"
> string Number' = "Number"
> string (Int' i) = show i
-}
class Family f where
-- | Return an instance of the equality GADT ('Refl') of the type and
-- constructor arguments are equal, else return 'Nothing'.
decEq :: f tx txs -> f ty tys -> Maybe (tx :~: ty, txs :~: tys)
-- | When the constructor from the family matches the 'real' constructor,
-- return the arguments in a heterogeneous list, else return 'Nothing'.
fields :: f t ts -> t -> Maybe ts
-- | When the constructor from the family and the heterogeneous list of
-- arguments match, apply the 'real' constructor. For abstract
-- constructors, the list of arguments should be 'CNil', but you project
-- out the value saved with the family constructor.
apply :: f t ts -> ts -> t
-- | For 'show'ing the constructors from the family.
string :: f t ts -> String
{- |
For each type in the family, you need to create an instance of 'Type', listing
all the members of the family GADT which belong to one type.
This is step 4 of the four steps needed to be able to use 'diff' and 'patch'.
Steps 1-3 are explained in the documentation for 'Family'.
Continuing the example from the 'Family' documentation, the instances for
'Type' are:
> instance Type ExprTermFamily Term where
> constructors = [Concr Number', Concr Parens']
>
> instance Type ExprTermFamily Expr where
> constructors = [Concr Min']
>
> instance Type ExprTermFamily Int where
> constructors = [Abstr Int']
-}
class (Family f) => Type f t where
-- | List of constructors from the family GADT for one type in your family
constructors :: [Con f t]
-- | 'Con' wraps both concrete and abstract constructors to a simple type so
-- constructors for a single type can be put together in a list, see 'Type' for
-- more information and an example.
--
-- Use 'Concr' for concrete constructors (e.g., for simple algebraic datatypes).
--
-- Use 'Abstr' for abstract constructors (e.g., for built-in types or types with many
-- (or infinite) constructors)
data Con :: (* -> * -> *) -> * -> * where
Concr :: (List f ts) => f t ts -> Con f t
Abstr :: (List f ts) => (t -> f t ts) -> Con f t
class List f ts where
list :: IsList f ts
data IsList :: (* -> * -> *) -> * -> * where
IsNil :: IsList f Nil
IsCons :: (Type f t) => IsList f ts -> IsList f (Cons t ts)
instance List f Nil where
list = IsNil
instance (Type f t, List f ts) => List f (Cons t ts) where
list = IsCons list
{- |
The 'EditScriptL' datatype captures the result of 'diffL' and can be used as the input
to 'patchL' (and 'compress').
The 'diffL' function use a depth-first preorder traversal to traverse the
expression trees. The edit script it outputs contains the operation that must
be applied to the constructor at that point: either keeping it ('Cpy'),
removing it ('Del', which does not remove the complete subtree, but /contracts/
the edge of the removed node) or inserting a new constructor ('Ins', which can
only be done if the available subtrees at that point correspond to the types
the constructor expects). (The 'CpyTree' is only used when running 'compress'
over an existing edit script.)
For more information about this datatype, you're advised to look at Eelco
Lempsink's thesis at <http://eelco.lempsink.nl/thesis.pdf>.
-}
data EditScriptL :: (* -> * -> *) -> * -> * -> * where
Ins :: (Type f t, List f ts, List f tys) => f t ts ->
EditScriptL f txs (Append ts tys) ->
EditScriptL f txs (Cons t tys)
Del :: (Type f t, List f ts, List f txs) => f t ts ->
EditScriptL f (Append ts txs) tys ->
EditScriptL f (Cons t txs) tys
Cpy :: (Type f t, List f ts, List f txs, List f tys) => f t ts ->
EditScriptL f (Append ts txs) (Append ts tys) ->
EditScriptL f (Cons t txs) (Cons t tys)
CpyTree :: (Type f t, List f txs, List f tys) =>
EditScriptL f txs tys ->
EditScriptL f (Cons t txs) (Cons t tys)
End :: EditScriptL f Nil Nil
type family Append txs tys :: * where
Append Nil tys = tys
Append (Cons tx txs) tys = Cons tx (Append txs tys)
appendList :: IsList f txs -> IsList f tys -> IsList f (Append txs tys)
appendList IsNil isys = isys
appendList (IsCons isxs) isys = IsCons (appendList isxs isys)
append :: IsList f txs -> IsList f tys -> txs -> tys -> Append txs tys
append IsNil _ CNil ys = ys
append (IsCons isxs) isys (CCons x xs) ys = CCons x (append isxs isys xs ys)
instance Show (EditScriptL f txs tys) where
show (Ins c d) = "Ins " ++ string c ++ " $ " ++ show d
show (Del c d) = "Del " ++ string c ++ " $ " ++ show d
show (Cpy c d) = "Cpy " ++ string c ++ " $ " ++ show d
show (CpyTree d) = "CpyTree" ++ " $ " ++ show d
show End = "End"
delete :: (Type f t, List f ts, List f txs) => f t ts -> Cons t txs -> Append ts txs
delete c (CCons x xs) =
case fields c x of
Nothing -> error "Patching failed"
Just ts -> append (isList c) list ts xs
isList :: (List f ts) => f t ts -> IsList f ts
isList _ = list
insert :: (Type f t, List f ts) => f t ts -> Append ts txs -> Cons t txs
insert c xs = CCons (apply c txs) tys
where (txs, tys) = split (isList c) xs
split :: IsList f txs -> Append txs tys -> (txs, tys)
split IsNil ys = (CNil, ys)
split (IsCons isxs) (CCons x xsys) = let (xs, ys) = split isxs xsys
in (CCons x xs, ys)
matchConstructor :: (Type f t) => t ->
(forall ts. f t ts -> IsList f ts -> ts -> r) -> r
matchConstructor = tryEach constructors
where
tryEach :: (Type f t) => [Con f t] -> t ->
(forall ts. f t ts -> IsList f ts -> ts -> r) -> r
tryEach (Concr c : cs) x k = matchOrRetry c cs x k
tryEach (Abstr c : cs) x k = matchOrRetry (c x) cs x k
tryEach [] _ _ = error "Incorrect Family or Type instance."
matchOrRetry :: (List f ts, Type f t) => f t ts -> [Con f t] -> t ->
(forall ts'. f t ts' -> IsList f ts' -> ts' -> r) -> r
matchOrRetry c cs x k = case fields c x of
Just ts -> k c (isList c) ts
Nothing -> tryEach cs x k
best :: EditScriptL f txs tys -> EditScriptL f txs tys -> EditScriptL f txs tys
best dx dy = bestSteps (steps dx) dx (steps dy) dy
data Nat = Zero | Succ Nat
deriving (Eq, Show)
steps :: EditScriptL f txs tys -> Nat
steps (Ins _ d) = Succ $ steps d
steps (Del _ d) = Succ $ steps d
steps (Cpy _ d) = Succ $ steps d
steps (CpyTree d) = Succ $ steps d -- we're not calling 'steps' on compressed paths; still no reason to crash
steps End = Zero
bestSteps :: Nat -> d -> Nat -> d -> d
bestSteps Zero x _ _ = x
bestSteps _ _ Zero y = y
bestSteps (Succ nx) x (Succ ny) y = bestSteps nx x ny y
data RListT :: (* -> * -> *) -> * -> * where
RList :: List f ts => RListT f ts
reify :: IsList f ts -> RListT f ts
reify IsNil = RList
reify (IsCons ists) = case reify ists of
RList -> RList
ins :: (Type f t) => IsList f ts -> IsList f tys ->
f t ts -> EditScriptL f txs (Append ts tys) -> EditScriptL f txs (Cons t tys)
ins ists isys =
case (reify ists, reify isys) of
(RList, RList) -> Ins
del :: (Type f t) => IsList f ts -> IsList f txs ->
f t ts -> EditScriptL f (Append ts txs) tys -> EditScriptL f (Cons t txs) tys
del ists isxs =
case (reify ists, reify isxs) of
(RList, RList) -> Del
cpy :: (Type f t) => IsList f ts -> IsList f txs -> IsList f tys ->
f t ts -> EditScriptL f (Append ts txs) (Append ts tys) ->
EditScriptL f (Cons t txs) (Cons t tys)
cpy ists isxs isys =
case (reify ists, reify isxs, reify isys) of
(RList, RList, RList) -> Cpy
compress :: (Family f) => EditScriptL f txs tys -> EditScriptL f txs tys
compress End = End
compress (Ins c d) = Ins c (compress d)
compress (Del c d) = Del c (compress d)
compress (CpyTree d) = CpyTree (compress d)
compress (Cpy c d) = let d' = compress d in
case copied (isList c) d' of
Just d'' -> CpyTree d''
Nothing -> Cpy c d'
copied :: (Family f) => IsList f ts ->
EditScriptL f (Append ts txs) (Append ts tys) -> Maybe (EditScriptL f txs tys)
copied IsNil d = Just d
copied (IsCons xs) (CpyTree d) = copied xs d
copied (IsCons _) _ = Nothing
data EditScriptLMemo :: (* -> * -> *) -> * -> * -> * where
CC :: (Type f tx, Type f ty, List f txs', List f tys') =>
f tx txs' -> f ty tys' ->
EditScriptL f (Cons tx txs) (Cons ty tys) ->
EditScriptLMemo f (Cons tx txs) (Append tys' tys) ->
EditScriptLMemo f (Append txs' txs) (Cons ty tys) ->
EditScriptLMemo f (Append txs' txs) (Append tys' tys) ->
EditScriptLMemo f (Cons tx txs) (Cons ty tys)
CN :: (Type f tx, List f txs') => f tx txs' ->
EditScriptL f (Cons tx txs) Nil ->
EditScriptLMemo f (Append txs' txs) Nil ->
EditScriptLMemo f (Cons tx txs) Nil
NC :: (Type f ty, List f tys') => f ty tys' ->
EditScriptL f Nil (Cons ty tys) ->
EditScriptLMemo f Nil (Append tys' tys) ->
EditScriptLMemo f Nil (Cons ty tys)
NN :: EditScriptL f Nil Nil ->
EditScriptLMemo f Nil Nil
diffLMemo :: (Family f, List f txs, List f tys) => txs -> tys -> EditScriptLMemo f txs tys
diffLMemo = diffLMemo' list list
diffLMemo' :: (Family f) => forall txs tys. IsList f txs -> IsList f tys ->
txs -> tys -> EditScriptLMemo f txs tys
diffLMemo' IsNil IsNil CNil CNil =
NN End
diffLMemo' (IsCons isxs) IsNil (CCons x xs) CNil =
matchConstructor x
(\ cx isxs' xs' ->
let d = diffLMemo' (appendList isxs' isxs) IsNil
(append isxs' isxs xs' xs) CNil
in cn isxs' isxs cx (del isxs' isxs cx (getEditScriptL d)) d)
diffLMemo' IsNil (IsCons isys) CNil (CCons y ys) =
matchConstructor y
(\ cy isys' ys' ->
let i = diffLMemo' IsNil (appendList isys' isys)
CNil (append isys' isys ys' ys)
in nc isys' isys cy (ins isys' isys cy (getEditScriptL i)) i)
diffLMemo' (IsCons isxs) (IsCons isys) (CCons x xs) (CCons y ys) =
matchConstructor x
(\ cx isxs' xs' ->
matchConstructor y
(\ cy isys' ys' ->
let c = diffLMemo' (appendList isxs' isxs) (appendList isys' isys)
(append isxs' isxs xs' xs) (append isys' isys ys' ys)
d = extendd isys' isys cy c
i = extendi isxs' isxs cx c
in cc isxs' isxs isys' isys cx cy
(bestEditScriptLMemo cx cy isxs' isxs isys' isys i d c) i d c))
getEditScriptL :: EditScriptLMemo f txs tys -> EditScriptL f txs tys
getEditScriptL (CC _ _ d _ _ _) = d
getEditScriptL (CN _ d _) = d
getEditScriptL (NC _ d _) = d
getEditScriptL (NN d) = d
bestEditScriptLMemo :: (Type f tx, Type f ty) => f tx txs' -> f ty tys' ->
IsList f txs' -> IsList f txs -> IsList f tys' -> IsList f tys ->
EditScriptLMemo f (Cons tx txs) (Append tys' tys) ->
EditScriptLMemo f (Append txs' txs) (Cons ty tys) ->
EditScriptLMemo f (Append txs' txs) (Append tys' tys) ->
EditScriptL f (Cons tx txs) (Cons ty tys)
bestEditScriptLMemo cx cy isxs' isxs isys' isys i d c = case decEq cx cy of
Just (Refl, Refl) -> cpy isxs' isxs isys cx (getEditScriptL c)
Nothing -> best (ins isys' isys cy (getEditScriptL i))
(del isxs' isxs cx (getEditScriptL d))
extendd :: (Type f ty) => IsList f tys' -> IsList f tys -> f ty tys' ->
EditScriptLMemo f txs' (Append tys' tys) ->
EditScriptLMemo f txs' (Cons ty tys)
extendd isys' isys cy dt@(NN d) = nc isys' isys cy (ins isys' isys cy d) dt
extendd isys' isys cy dt@(NC _ d _) = nc isys' isys cy (ins isys' isys cy d) dt
extendd isys' isys cy dt@(CN _ _ _) = extendd' isys' isys cy dt
extendd isys' isys cy dt@(CC _ _ _ _ _ _) = extendd' isys' isys cy dt
extendd' :: (Type f ty, Type f tx) => IsList f tys' -> IsList f tys -> f ty tys' ->
EditScriptLMemo f (Cons tx txs) (Append tys' tys) ->
EditScriptLMemo f (Cons tx txs) (Cons ty tys)
extendd' isys' isys cy dt =
extractd dt (\ isxs' isxs cx dt' ->
let i = dt
d = extendd isys' isys cy dt'
c = dt'
in cc isxs' isxs isys' isys cx cy (bestEditScriptLMemo cx cy isxs' isxs isys' isys i d c) i d c)
extendi :: (Type f tx) => IsList f txs' -> IsList f txs -> f tx txs' ->
EditScriptLMemo f (Append txs' txs) tys' ->
EditScriptLMemo f (Cons tx txs) tys'
extendi isxs' isxs cx dt@(NN d) = cn isxs' isxs cx (del isxs' isxs cx d) dt
extendi isxs' isxs cx dt@(CN _ d _) = cn isxs' isxs cx (del isxs' isxs cx d) dt
extendi isxs' isxs cx dt@(NC _ _ _) = extendi' isxs' isxs cx dt
extendi isxs' isxs cx dt@(CC _ _ _ _ _ _) = extendi' isxs' isxs cx dt
extendi' :: (Type f tx, Type f ty) => IsList f txs' -> IsList f txs -> f tx txs' ->
EditScriptLMemo f (Append txs' txs) (Cons ty tys) ->
EditScriptLMemo f (Cons tx txs) (Cons ty tys)
extendi' isxs' isxs cx dt =
extracti dt (\ isys' isys cy dt' ->
let i = extendi isxs' isxs cx dt'
d = dt
c = dt'
in cc isxs' isxs isys' isys cx cy
(bestEditScriptLMemo cx cy isxs' isxs isys' isys i d c)
i d c)
extractd :: EditScriptLMemo f (Cons tx txs) tys' ->
(forall txs'. IsList f txs' -> IsList f txs -> f tx txs' ->
EditScriptLMemo f (Append txs' txs) tys' -> r) -> r
extractd (CC c _ d' _ d _) k = k (isList c) (sourceTail d') c d
extractd (CN c d' d) k = k (isList c) (sourceTail d') c d
sourceTail :: EditScriptL f (Cons tx txs) tys -> IsList f txs
sourceTail (Ins _ d) = sourceTail d
sourceTail (Del _ _) = list
sourceTail (Cpy _ _) = list
sourceTail (CpyTree _) = list
extracti :: EditScriptLMemo f txs' (Cons ty tys) ->
(forall tys'. IsList f tys' -> IsList f tys -> f ty tys' ->
EditScriptLMemo f txs' (Append tys' tys) -> r) -> r
extracti (CC _ c d i _ _) k = k (isList c) (targetTail d) c i
extracti (NC c d i) k = k (isList c) (targetTail d) c i
targetTail :: EditScriptL f txs (Cons ty tys) -> IsList f tys
targetTail (Ins _ _) = list
targetTail (Del _ d) = targetTail d
targetTail (Cpy _ _) = list
targetTail (CpyTree _) = list
nc :: (Type f t) => IsList f ts -> IsList f tys ->
f t ts -> EditScriptL f Nil (Cons t tys) ->
EditScriptLMemo f Nil (Append ts tys) -> EditScriptLMemo f Nil (Cons t tys)
nc ists isys =
case (reify ists, reify isys) of
(RList, RList) -> NC
cn :: (Type f t) => IsList f ts -> IsList f txs ->
f t ts -> EditScriptL f (Cons t txs) Nil ->
EditScriptLMemo f (Append ts txs) Nil -> EditScriptLMemo f (Cons t txs) Nil
cn ists isxs =
case (reify ists, reify isxs) of
(RList, RList) -> CN
cc :: (Type f tx, Type f ty) =>
IsList f txs' -> IsList f txs -> IsList f tys' -> IsList f tys ->
f tx txs' -> f ty tys' -> EditScriptL f (Cons tx txs) (Cons ty tys) ->
EditScriptLMemo f (Cons tx txs) (Append tys' tys) ->
EditScriptLMemo f (Append txs' txs) (Cons ty tys) ->
EditScriptLMemo f (Append txs' txs) (Append tys' tys) ->
EditScriptLMemo f (Cons tx txs) (Cons ty tys)
cc isxs' isxs isys' isys =
case (reify isxs', reify isxs, reify isys', reify isys) of
(RList, RList, RList, RList) -> CC
| eelco/gdiff | src/Data/Generic/Diff.hs | bsd-3-clause | 22,149 | 0 | 18 | 6,678 | 6,829 | 3,431 | 3,398 | 308 | 4 |
{-# LANGUAGE CPP #-}
{- Copyright (c) 2008 David Roundy
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE. -}
{-# OPTIONS_GHC -fomit-interface-pragmas #-}
module Distribution.Franchise.Env
( setEnv, getEnv, unsetEnv, addToPath, extraPath,
getEnvironment, getPrivateEnvironment ) where
import Data.Maybe ( catMaybes )
import Control.Monad ( msum )
import qualified System.Environment as E ( getEnv,
#ifdef GETENVIRONMENTWORKS
getEnvironment
#endif
)
import Distribution.Franchise.ConfigureState
( C, getAllExtraData, getExtraData, (<<=), addExtra, getExtra, rmExtra,
io, catchC, amInWindows )
import Distribution.Franchise.ListUtils ( stripPrefix )
-- | Find the value of an environment variable, if present.
getEnv :: String -> C (Maybe String)
getEnv e =
do mv <- getExtraData ("env-"++e)
case mv of
Just v -> return (Just v)
Nothing -> fmap Just (io (E.getEnv e)) `catchC` \_ -> return Nothing
-- | Set the value of an environment variable.
setEnv :: String -> String -> C ()
setEnv e v = ("env-"++e) <<= v
unsetEnv :: String -> C ()
unsetEnv e = rmExtra ("env-"++e)
getPrivateEnvironment :: C [(String, String)]
getPrivateEnvironment = (catMaybes . map cleanEnv) `fmap` getAllExtraData
where cleanEnv (e,v) = do e' <- stripPrefix "env-" e
return (e',v)
getEnvironment :: C [(String, String)]
getEnvironment = do pe <- getPrivateEnvironment
#ifdef GETENVIRONMENTWORKS
e <- io E.getEnvironment
#else
let gete x = do v <- getEnv x
return $ fmap (\y -> (x,y)) v
e <- catMaybes `fmap`
mapM gete ["HOME","PATH","PWD","PREFIX",
"GHC_PACKAGE_PATH",
"FRANCHISE_GHC_PACKAGE_CONF",
"ALLUSERSPROFILE", "APPDATA", "ComSpec",
"DISPLAY", "EDITOR", "NUMBER_OF_PROCESSORS",
"OS", "USER", "USERNAME", "USERPROFILE",
"windir", "winsysdir", "TEMP"]
#endif
return (pe ++ filter ((`notElem` (map fst pe)) . fst) e)
-- | 'addToPath' adds a directory to the executable search path in a
-- platform-independent way. See <../10-add-to-path.html> for a
-- demonstration of how 'addToPath' works.
addToPath :: FilePath -> C ()
addToPath d = do amw <- amInWindows
if amw
then do -- environment variables are
-- case-insensitive on windows
Just oldpath <- msum `fmap`
sequence [getEnv "PATH",
getEnv "Path",
return $ Just ""]
rmExtra "env-Path"
setEnv "PATH" $ d++';':oldpath
else do oldpath <- maybe "" id `fmap` getEnv "PATH"
setEnv "PATH" $ d++':':oldpath
ps <- extraPath
addExtra "extra-path" (d:ps)
extraPath :: C [FilePath]
extraPath = getExtra "extra-path"
| droundy/franchise | Distribution/Franchise/Env.hs | bsd-3-clause | 4,740 | 0 | 16 | 1,509 | 653 | 352 | 301 | 55 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main where
import Database.Redis (connect, defaultConnectInfo, connectHost)
import TicTacToe
import Web.Scotty
import Network.HTTP.Types.Status
import Data.Maybe
import Control.Monad
import Control.Monad.IO.Class
import System.Environment
import qualified Network.Wai.Handler.Warp as W
opts :: Options
opts = Options 1 (W.setHost "0.0.0.0" W.defaultSettings)
instance Parsable Player where
parseParam "1" = Right Player1
parseParam "2" = Right Player2
parseParam _ = Left "Illegal player id"
instance Parsable GameId where
parseParam = Right . GameId
main :: IO ()
main = do
redisHost <- fromMaybe "redis" <$> lookupEnv "REDIS_HOST"
conn <- connect defaultConnectInfo {connectHost = redisHost}
scottyOpts opts $ do
get "/" $ redirect "/history"
get "/test/:id" $ do
taskId <- param "id"
unless (taskId >= 1 && taskId <= taskQuantity) next
moves <- liftIO randomMoves
extra <- liftIO randomMoves
text $ testingModule taskId moves extra
get "/history" $ do
result <- liftIO $ gameTop conn
html result
get "/history/:gid" $ do
gameId <- param "gid"
result <- liftIO $ gameHistory conn gameId
text result
post "/game/:gid/player/:pid" $ do
gameId <- param "gid"
playerId <- param "pid"
ct <- fmap (lookup "Content-Type") headers
bd <- body
(c, m, d) <-
case readBoardFromWire ct bd of
Left l -> return l
Right moves -> liftIO $ record conn moves gameId playerId
status $ Status c m
raw d
get "/game/:gid/player/:pid" $ do
gameId <- param "gid"
playerId <- param "pid"
acc <- fmap (lookup "Accept") headers
(c, m, d, ct) <- liftIO $ retrieveMove conn acc gameId playerId
status $ Status c m
setHeader "Content-Type" ct
raw d
| vipo/TicTacToe | app/Main.hs | bsd-3-clause | 1,929 | 0 | 18 | 486 | 614 | 289 | 325 | 58 | 2 |
module Main where
import Control.Applicative
import Data.Array
import Data.List (maximumBy)
import Data.List.Extras.Argmax
import qualified Data.Map as M
import Data.Maybe
import Data.Ord
import System.Directory (getCurrentDirectory,
getDirectoryContents)
import System.FilePath
import System.IO ()
import System.Random (newStdGen)
import qualified System.Random.Shuffle as Shuffler
import Text.XML.Light
------------------------------------------------------------------------
-- Constants
------------------------------------------------------------------------
modelTestRatio :: Double
modelTestRatio = 0.9
------------------------------------------------------------------------
-- types
------------------------------------------------------------------------
type Word = String
type Tag = String
type Pr = Double
type TaggedWord = (Word, Tag)
type Sentence = [Word]
type TaggedSentence = [TaggedWord]
type Bigram = (Tag, Tag)
type Frequencies = M.Map (Word, Word) Integer
type TagTransitionPr = M.Map (Tag, Tag) Pr
type WordLikelihoodPr = M.Map (Word, Tag) Pr
type TagHistogram = M.Map Tag Int
type WordTagHistogram = M.Map (Word, Tag) Int
------------------------------------------------------------------------
-- Hidden Markov Model
------------------------------------------------------------------------
data HMM = HMM [Tag] TagTransitionPr WordLikelihoodPr deriving(Show)
------------------------------------------------------------------------
-- IO
------------------------------------------------------------------------
isRegularFile :: FilePath -> Bool
isRegularFile f = f /= "." && f /= ".." && takeExtension f == ".xml"
-- | read dir
readDir :: String -> IO [FilePath]
readDir path = do
directory <- getCurrentDirectory
filter isRegularFile <$> getDirectoryContents (directory ++ "/" ++ path)
parseXml :: String -> [TaggedSentence]
parseXml source =
let contents = parseXML source
sentenceValues = concatMap (findElements $ simpleName "sentence") (onlyElems contents)
sentences = map (findElements $ simpleName "tok") sentenceValues
nestedWords = map (map (\x -> (strContent x, fromJust $ findAttr (simpleName "cat") x))) sentences
simpleName s = QName s Nothing Nothing
in
map (("<s>", "BOS"):) nestedWords
------------------------------------------------------------------------
-- Train
-- Calculating parameters of Hidden Markov Model
------------------------------------------------------------------------
train :: [TaggedSentence] -> HMM
train taggedSentences = HMM (filter (/= "BOS") (M.keys tagHistogram)) transitionPr wordLikelihoodPr
where
taggedWords = concat taggedSentences
tagHistogram = histogram $ map snd taggedWords
tagBigramHistogram = histogram $ concatMap (bigrams . map snd) taggedSentences
wordTagHistogram = histogram taggedWords
tagMapFunc (_, tag) v = fromIntegral v / fromIntegral (M.findWithDefault 0 tag tagHistogram)
transitionPr = M.mapWithKey tagMapFunc tagBigramHistogram
wordLikelihoodPr = M.mapWithKey tagMapFunc wordTagHistogram
histogram :: (Ord a) => [a] -> M.Map a Int
histogram = foldr (flip (M.insertWith (+)) 1) M.empty
bigrams :: [Tag] -> [(Tag, Tag)]
bigrams tags = zip (tail tags) tags
------------------------------------------------------------------------
-- Viterbi
------------------------------------------------------------------------
viterbi :: HMM -> Sentence -> [Tag]
viterbi (HMM tags transitionPr wordPr) sentence =
traceback [] (maximumBy (comparing (\ti -> snd $ matrix!(sentLen-1, ti))) tagRange) (sentLen-1)
where
sentLen = length sentence
tagLen = length tags
tagRange = [0..tagLen-1]
sentRange = [0..sentLen-1]
matrix = listArray ((0, 0), (sentLen-1, tagLen-1)) [probability x y | x <- sentRange, y <- tagRange]
probability :: Int -> Int -> (Int, Pr)
probability 0 _ = (0, 1)
probability si ti = (fst tagMax, snd tagMax * findPr (sentence!!si, tags!!ti) wordPr)
where tagMax = tagmax si ti
tagmax :: Int -> Int -> (Int, Pr)
tagmax si ti = argmaxWithMax (\y -> findPr (tags!!ti, tags!!y) transitionPr * snd (matrix!(si-1, y))) tagRange
traceback :: [Tag] -> Int -> Int -> [Tag]
traceback resultTags _ 0 = resultTags
traceback resultTags ti si = traceback ((tags!!ti):resultTags) (fst (matrix!(si, ti))) (si-1)
findPr :: (Fractional v, Ord k) => k -> M.Map k v -> v
findPr = M.findWithDefault 0.00001
------------------------------------------------------------------------
-- Evaluation
------------------------------------------------------------------------
precision :: [TaggedSentence] -> HMM -> Pr
precision testSentences hmm = truePositiveCount / fromIntegral (length result)
where
sentences = map (map fst) testSentences
expectedTags = filter (/= "BOS") $ concatMap (map snd) testSentences
bestTagSequences = concatMap (viterbi hmm) sentences
result = zipWith (==) expectedTags bestTagSequences
truePositiveCount = fromIntegral $ length $ filter (==True) result
------------------------------------------------------------------------
-- Train-Test model separation
------------------------------------------------------------------------
splitIntoModelAndTest :: [a] -> ([a], [a])
splitIntoModelAndTest x = (take modelSize x, drop modelSize x)
where len = fromIntegral $ length x
modelSize = truncate $ len * modelTestRatio
shuffle :: [a] -> IO [a]
shuffle list = do
gen <- newStdGen
return $ Shuffler.shuffle' list (length list) gen
------------------------------------------------------------------------
-- main
------------------------------------------------------------------------
main :: IO ()
main = do
let corpusPath = "corpus/"
files <- readDir corpusPath
filePaths <- shuffle $ map (corpusPath++) files
contents <- mapM readFile filePaths
putStrLn "Calculating..."
let (model, test) = splitIntoModelAndTest contents
modelSentences = concatMap parseXml model
testModelSentences = concatMap parseXml test
hiddenMarkovModel = train modelSentences
putStr "Precision: "
print $ precision testModelSentences hiddenMarkovModel
| vimster/part-of-speech-tagging | src/Main.hs | bsd-3-clause | 6,423 | 0 | 18 | 1,183 | 1,743 | 946 | 797 | 104 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Trombone.RoutePattern where
import Data.Text ( Text )
import qualified Data.Text as Text
-- | A string literal or a placeholder variable name.
data RouteSegment = Atom Text | Variable Text
deriving (Show, Eq)
-- | A route pattern is a list of segments.
newtype RoutePattern = RoutePattern [RouteSegment]
deriving (Show, Eq)
-- | The result from matching a uri with a route pattern.
data RouteMatch = NoMatch | Params [(Text, Text)]
deriving (Show, Eq)
-- | Match a route pattern against a list of uri path segments and return
-- either a NoMatch or the list of key-value pairs with the extracted uri
-- parameter values.
match :: RoutePattern -> [Text] -> RouteMatch
match (RoutePattern segms) ts | length ts /= length segms = NoMatch
match (RoutePattern segms) ts = foldr f (Params []) $ zip segms ts
where
f _ NoMatch = NoMatch
f (Atom a, t) (Params ps) | a == t = Params ps
| otherwise = NoMatch
f (Variable v, t) (Params ps) = Params ((v, t):ps)
-- | Translate the uri to component form.
decompose :: Text -> RoutePattern
decompose = RoutePattern . map f . filter blanks . Text.splitOn "/"
where
f p | ':' == Text.head p = Variable $ Text.tail p
| otherwise = Atom p
-- | Predicate to filter out blank uri segments.
blanks :: Text -> Bool
{-# INLINE blanks #-}
blanks "" = False
blanks ":" = False
blanks _ = True
| johanneshilden/trombone | Trombone/RoutePattern.hs | bsd-3-clause | 1,534 | 0 | 12 | 423 | 415 | 218 | 197 | 26 | 3 |
import Data.Hashable
import Data.Text hiding (map)
import Data.Time.Clock
import Data.Typeable
import Database.Datalog
import Data.EDN
main = do
putStrLn "Hello World"
-- Datomic play
-- Schema
--
-- Each Datomic database has a schema that describes the set of
-- attributes that can be associated with entities. A schema only
-- defines the characteristics of the attributes themselves. It does
-- not define which attributes can be associated with which entities.
-- Every new attribute is described by three required attributes:
-- :db/ident
-- :db/valueType
-- :db/cardinality
-- optional attribute
-- :db/doc
-- :db/unique
-- and some others
| alanz/hdedalus | src/main.hs | bsd-3-clause | 671 | 0 | 7 | 121 | 64 | 43 | 21 | 8 | 1 |
{-# LANGUAGE RankNTypes #-}
{-|
@pipes-tar@ is a library for the @pipes@-ecosystem that provides the ability
to read from tar files in constant memory, and write tar files using as much
memory as the largest file.
-}
module Pipes.Tar
( -- * Reading
-- $reading
parseTarEntries
, iterTarArchive
-- * Writing
-- $writing
, writeTarArchive
-- * 'TarEntry'
, TarHeader(..)
, TarEntry, tarHeader, tarContent
, EntryType(..)
-- ** Constructing 'TarEntry's
, directoryEntry
, fileEntry
, TarArchive(..)
) where
--------------------------------------------------------------------------------
import Control.Applicative
import Control.Monad ((>=>), guard, join, unless, void)
import Control.Monad.Trans.Free (FreeT(..), FreeF(..), iterT, transFreeT, wrap)
import Control.Monad.Writer.Class (tell)
import Data.Char (digitToInt, intToDigit, isDigit, ord)
import Data.Digits (digitsRev, unDigits)
import Data.Monoid (Monoid(..), mconcat, (<>), Sum(..))
import Data.Time (UTCTime)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime, utcTimeToPOSIXSeconds)
import Data.Tuple (swap)
import Pipes ((>->))
import System.Posix.Types (CMode(..), FileMode)
--------------------------------------------------------------------------------
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as Char8
import qualified Data.Serialize.Get as Get
import qualified Pipes
import qualified Pipes.Lift as Pipes
import qualified Pipes.Prelude as Pipes
import qualified Pipes.ByteString as PBS
import qualified Pipes.Parse as Parse
--------------------------------------------------------------------------------
-- | A 'TarEntry' contains all the metadata about a single entry in a tar file.
data TarHeader
= TarHeader
{ entryPath :: !FilePath
, entryMode :: !FileMode
, entryUID :: !Int
, entryGID :: !Int
, entrySize :: !Int
, entryLastModified :: !UTCTime
, entryType :: !EntryType
, entryLinkName :: !String
}
deriving (Eq, Show)
--------------------------------------------------------------------------------
data EntryType = File | Directory
deriving (Eq, Show)
--------------------------------------------------------------------------------
data TarEntry m r
= TarEntry { tarHeader :: TarHeader
, tarContent :: Pipes.Producer BS.ByteString m r
}
instance Monad m => Functor (TarEntry m) where
fmap f (TarEntry header r) = TarEntry header (fmap f r)
----------------------------------------------------------------------------------
newtype TarArchive m = TarArchive (FreeT (TarEntry m) m
(Pipes.Producer BS.ByteString m ()))
instance Monad m => Monoid (TarArchive m) where
(TarArchive a) `mappend` (TarArchive b) = TarArchive (a >> b)
mempty = TarArchive (return (return ()))
--iterTarArchive
-- :: (Pipes.MonadCatch m, Monad m, Pipes.MonadIO m)
-- => (TarEntry m a -> m a)
-- -> TarArchive m -> m ()
iterTarArchive f (TarArchive free) = iterT f (void free)
----------------------------------------------------------------------------------
--{- $reading
-- tar files are read using 'parseTarEntries'. In short, 'parseTarEntries' is
-- used to read the format of a tar file, turning a stream of bytes into a
-- stream of 'TarEntry's. The stream is contained under a free monad, which
-- forces you to consume each tar entry in order (as streams can only be read
-- in the forward direction).
--
-- Here is an example of how to read a tar file, outputting just the names of
-- all entries in the archive:
--
-- > import Control.Monad (void)
-- > import Control.Monad.IO.Class (liftIO)
-- > import Pipes
-- > import Pipes.ByteString (fromHandle)
-- > import Pipes.Parse (wrap)
-- > import Pipes.Prelude (discard)
-- > import Pipes.Tar
-- > import System.Environment (getArgs)
-- > import System.IO (withFile, IOMode(ReadMode))
-- >
-- > main :: IO ()
-- > main = do
-- > (tarPath:_) <- getArgs
-- > withFile tarPath ReadMode $ \h ->
-- > iterTarArchive readEntry (parseTarEntry (fromHandle h))
-- >
-- > where
-- >
-- > readEntry header = do
-- > liftIO (putStrLn . entryPath $ header)
-- > forever await
--
-- We use 'System.IO.withFile' to open a handle to the tar archive we wish to
-- read, and then 'Pipes.ByteString.fromHandle' to stream the contents of this
-- handle into something that can work with @pipes@.
--
-- As mentioned before, 'parseTarEntries' yields a free monad, and we can
-- destruct this using 'iterT'. We pass 'iterT' a function to operate on each
-- element, and the whole tar entry will be consumed from the start. The
-- important thing to note here is that each element contains the next
-- element in the stream as the return type of its content producer. It is
-- this property that forces us to consume every entry in order.
--
-- Our function to 'iterT' simply prints the 'entryPath' of a 'TarHeader',
-- and then runs the 'Pipes.Producer' for the content - substituting each
-- 'Pipes.yield' with an empty action using 'Pipes.for'. Running this will
-- return us the 'IO ()' action that streams the /next/ 'TarEntry', so we
-- just 'join' that directly to ourselves.
--
-- Discarding content is a little boring though - here's an example of how we
-- can extend @readEntry@ to show all files ending with @.txt@:
--
-- > import Data.List (isSuffixOf)
-- > ...
-- > where
-- > readEntry header =
-- > let reader = if ".txt" `isSuffixOf` entryPath tarEntry &&
-- > entryType tarEntry == File =
-- > then print
-- > else forever await
-- > in reader
--
-- We now have two branches in @readEntry@ - and we choose a branch depending
-- on whether or not we are looking at a file with a file name ending in
-- ".txt". If so, we read the body by substitutting each 'Pipes.yield' with
-- 'putStr'. If not, then we discard the contents as before. Either way, we
-- run the entire producer, and make sure to 'join' the next entry.
---}
parseTarEntries
:: (Functor m, Monad m) => Pipes.Producer BS.ByteString m () -> TarArchive m
parseTarEntries = TarArchive . loop
where
loop upstream = FreeT $ do
(headerBytes, rest) <- drawBytesUpTo 512 upstream
go headerBytes rest
go headerBytes remainder
| BS.length headerBytes < 512 =
return $ Pure (Pipes.yield headerBytes >> remainder)
| BS.all (== 0) headerBytes = do
(eofMarker, rest) <- drawBytesUpTo 512 remainder
return $ Pure $
if BS.all (== 0) eofMarker
then rest
else Pipes.yield eofMarker >> rest
| otherwise =
case decodeTar headerBytes of
Left _ -> return (Pure (Pipes.yield headerBytes >> remainder))
Right header ->
return $ Free $ TarEntry header $ parseBody header remainder
parseBody header = fmap loop . produceBody
where
produceBody = PBS.splitAt (entrySize header) >=> consumePadding
consumePadding p
| entrySize header == 0 = return p
| otherwise =
let padding = 512 - entrySize header `mod` 512
in Pipes.for (PBS.splitAt padding p) (const $ return ())
--------------------------------------------------------------------------------
decodeTar :: BS.ByteString -> Either String TarHeader
decodeTar header = flip Get.runGet header $
TarHeader <$> parseASCII 100
<*> fmap fromIntegral (readOctal 8)
<*> readOctal 8
<*> readOctal 8
<*> readOctal 12
<*> (posixSecondsToUTCTime . fromIntegral <$> readOctal 12)
<* (do checksum <- Char8.takeWhile isDigit .
Char8.dropWhile (== ' ') <$> Get.getBytes 8
guard (parseOctal checksum == expectedChecksum))
<*> (Get.getWord8 >>= parseType . toEnum . fromIntegral)
<*> parseASCII 100
<* Get.getBytes 255
where
readOctal :: Int -> Get.Get Int
readOctal n = parseOctal <$> Get.getBytes n
parseOctal :: BS.ByteString -> Int
parseOctal x
| BS.head x == 128 =
foldl (\acc y -> acc * 256 + fromIntegral y) 0 .
BS.unpack . BS.drop 1 $ x
| otherwise =
unDigits 8 . map digitToInt . Char8.unpack .
Char8.dropWhile (== ' ') .
BS.takeWhile (not . (`elem` [ fromIntegral $ ord ' ', 0 ])) $ x
parseType '\0' = return File
parseType '0' = return File
parseType '5' = return Directory
parseType x = error . show $ x
parseASCII n = Char8.unpack . BS.takeWhile (/= 0) <$> Get.getBytes n
expectedChecksum =
let (left, rest) = BS.splitAt 148 header
right = BS.drop 8 rest
in (sum :: [Int] -> Int) $ map fromIntegral $ concatMap BS.unpack
[ left, Char8.replicate 8 ' ', right ]
--------------------------------------------------------------------------------
encodeTar :: TarHeader -> BS.ByteString
encodeTar e =
let truncated = take 100 (entryPath e)
filePath = Char8.pack truncated <>
BS.replicate (100 - length truncated) 0
mode = toOctal 7 $ case entryMode e of CMode m -> m
uid = toOctal 7 $ entryUID e
gid = toOctal 7 $ entryGID e
size = toOctal 11 (entrySize e)
modified = toOctal 11 . toInteger . round . utcTimeToPOSIXSeconds $
entryLastModified e
eType = Char8.singleton $ case entryType e of
File -> '0'
Directory -> '5'
linkName = BS.replicate 100 0
checksum = toOctal 6 $ (sum :: [Int] -> Int) $ map fromIntegral $
concatMap BS.unpack
[ filePath, mode, uid, gid, size, modified
, Char8.replicate 8 ' '
, eType, linkName
]
in mconcat [ filePath, mode, uid, gid, size, modified
, checksum <> Char8.singleton ' '
, eType
, linkName
, BS.replicate 255 0
]
where
toOctal n = Char8.pack . zeroPad . reverse . ('\000' :) .
map (intToDigit . fromIntegral) . digitsRev 8
where
zeroPad l = replicate (max 0 $ n - length l + 1) '0' ++ l
----------------------------------------------------------------------------------
{- $writing
Like reading, writing tar files is done using @writeEntry@ type functions for
the individual files inside a tar archive, and a final 'writeTar' 'P.Pipe' to
produce a correctly formatted stream of 'BS.ByteString's.
However, unlike reading, writing tar files can be done entirely with *pull
composition*. Here's an example of producing a tar archive with one file in a
directory:
> import Data.Text
> import Data.Text.Encoding (encodeUtf8)
> import Pipes
> import Pipes.ByteString (writeHandle)
> import Pipes.Parse (wrap)
> import Pipes.Tar
> import Data.Time (getCurrentTime)
> import System.IO (withFile, IOMode(WriteMode))
>
> main :: IO ()
> main = withFile "hello.tar" WriteMode $ \h ->
> runEffect $
> (wrap . tarEntries >-> writeTar >-> writeHandle h) ()
>
> where
>
> tarEntries () = do
> now <- lift getCurrentTime
> writeDirectoryEntry "text" 0 0 0 now
> writeFileEntry "text/hello" 0 0 0 now <-<
> (wrap . const (respond (encodeUtf8 "Hello!"))) $ ()
First, lets try and understand @tarEntries@, as this is the bulk of the work.
@tarEntries@ is a 'Pipes.Producer', which is responsible for producing
'CompleteEntry's for 'writeTar' to consume. A 'CompleteEntry' is a
combination of a 'TarEntry' along with any associated data. This bundled is
necessary to ensure that when writing the tar, we don't produce a header
where the size header itself doesn't match the actual amount of data that
follows. It's for this reason 'CompleteEntry''s constructor is private - you
can only respond with 'CompleteEntry's using primitives provided by
@pipes-tar@.
The first primitive we use is 'writeDirectory', which takes no input and
responds with a directory entry. The next response is a little bit more
complicated. Here, I'm writing a text file to the path \"text/hello\" with the
contents \"Hello!\". 'writeFileEntry' is a 'Pipes.Pipe' that consumes a
'Nothing' terminated stream of 'BS.ByteStrings', and responds with a
corresponding 'CompleteEntry'. I've used flipped pull combination ('Pipes.<-<') to
avoid more parenthesis, and 'Pipes.Parse.wrap' a single 'respond' call which
produces the actual file contents.
-}
writeTarArchive
:: Monad m
=> TarArchive m -> Pipes.Producer BS.ByteString m ()
writeTarArchive (TarArchive archive) = Parse.concat (transFreeT f (void archive))
where
f (TarEntry header content) = do
(r, bytes) <- Pipes.runWriterP $
Pipes.for (Pipes.hoist Pipes.lift content) tell
let fileSize = BS.length bytes
Pipes.yield (encodeTar $ header { entrySize = BS.length bytes })
Pipes.yield bytes
pad fileSize
return r
pad n | n `mod` 512 == 0 = return ()
| otherwise = Pipes.yield (BS.replicate (512 - n `mod` 512) 0)
--------------------------------------------------------------------------------
fileEntry
:: Monad m
=> FilePath -> FileMode -> Int -> Int -> UTCTime
-> m (Pipes.Producer BS.ByteString m ())
-> TarArchive m
fileEntry path mode uid gid modified mkContents =
TarArchive $ FreeT $ do
contents <- mkContents
return $ Free $ TarEntry
(TarHeader { entryPath = path
, entrySize = 0 -- This will get replaced when we write the archive
, entryMode = mode
, entryUID = uid
, entryGID = gid
, entryLastModified = modified
, entryType = File
, entryLinkName = ""
})
((return (return ())) <$ contents)
--------------------------------------------------------------------------------
directoryEntry
:: Monad m
=> FilePath -> FileMode -> Int -> Int -> UTCTime
-> TarArchive m
directoryEntry path mode uid gid modified =
let header =
TarHeader { entryPath = path
, entrySize = 0
, entryMode = mode
, entryUID = uid
, entryGID = gid
, entryLastModified = modified
, entryType = Directory
, entryLinkName = ""
}
in TarArchive $ wrap $ TarEntry header (return $ return $ return ())
--------------------------------------------------------------------------------
drawBytesUpTo
:: (Monad m, Functor m)
=> Int
-> Pipes.Producer PBS.ByteString m r
-> m (PBS.ByteString, Pipes.Producer PBS.ByteString m r)
drawBytesUpTo n p = fmap swap $ Pipes.runEffect $ Pipes.runWriterP $
Pipes.for (Pipes.hoist Pipes.lift (PBS.splitAt n p)) tell
| ocharles/pipes-tar | src/Pipes/Tar.hs | bsd-3-clause | 15,309 | 0 | 17 | 4,056 | 2,814 | 1,518 | 1,296 | 216 | 4 |
import Data.Bits (shiftL)
import Data.Word
import Data.Int
-- This magical #include brings in all the everybody-knows-these magic
-- constants unfortunately, we need to be *explicit* about which one
-- we want; if we just hope a -I... will get the right one, we could
-- be in trouble.
{-
Pull in the autoconf defines (HAVE_FOO), but don't include
ghcconfig.h, because that will include ghcplatform.h which has the
wrong platform settings for the compiler (it has the platform
settings for the target plat instead).
-}
#include "../includes/ghcautoconf.h"
#include "stg/HaskellMachRegs.h"
#include "rts/Constants.h"
#include "MachDeps.h"
#include "../includes/dist-derivedconstants/header/DerivedConstants.h"
-- import Util
-- All pretty arbitrary:
mAX_TUPLE_SIZE :: Int
mAX_TUPLE_SIZE = 62 -- Should really match the number
-- of decls in Data.Tuple
mAX_CONTEXT_REDUCTION_DEPTH :: Int
mAX_CONTEXT_REDUCTION_DEPTH = 200
-- Increase to 200; see Trac #5395
-- specialised fun/thunk/constr closure types
mAX_SPEC_THUNK_SIZE :: Int
mAX_SPEC_THUNK_SIZE = MAX_SPEC_THUNK_SIZE
mAX_SPEC_FUN_SIZE :: Int
mAX_SPEC_FUN_SIZE = MAX_SPEC_FUN_SIZE
mAX_SPEC_CONSTR_SIZE :: Int
mAX_SPEC_CONSTR_SIZE = MAX_SPEC_CONSTR_SIZE
-- pre-compiled thunk types
mAX_SPEC_SELECTEE_SIZE :: Int
mAX_SPEC_SELECTEE_SIZE = MAX_SPEC_SELECTEE_SIZE
mAX_SPEC_AP_SIZE :: Int
mAX_SPEC_AP_SIZE = MAX_SPEC_AP_SIZE
-- closure sizes: these do NOT include the header (see below for header sizes)
mIN_PAYLOAD_SIZE ::Int
mIN_PAYLOAD_SIZE = MIN_PAYLOAD_SIZE
mIN_INTLIKE, mAX_INTLIKE :: Int
mIN_INTLIKE = MIN_INTLIKE
mAX_INTLIKE = MAX_INTLIKE
mIN_CHARLIKE, mAX_CHARLIKE :: Int
mIN_CHARLIKE = MIN_CHARLIKE
mAX_CHARLIKE = MAX_CHARLIKE
mUT_ARR_PTRS_CARD_BITS :: Int
mUT_ARR_PTRS_CARD_BITS = MUT_ARR_PTRS_CARD_BITS
-- A section of code-generator-related MAGIC CONSTANTS.
mAX_Vanilla_REG :: Int
mAX_Vanilla_REG = MAX_VANILLA_REG
mAX_Float_REG :: Int
mAX_Float_REG = MAX_FLOAT_REG
mAX_Double_REG :: Int
mAX_Double_REG = MAX_DOUBLE_REG
mAX_Long_REG :: Int
mAX_Long_REG = MAX_LONG_REG
mAX_Real_Vanilla_REG :: Int
mAX_Real_Vanilla_REG = MAX_REAL_VANILLA_REG
mAX_Real_Float_REG :: Int
mAX_Real_Float_REG = MAX_REAL_FLOAT_REG
mAX_Real_Double_REG :: Int
mAX_Real_Double_REG = MAX_REAL_DOUBLE_REG
mAX_Real_Long_REG :: Int
#ifdef MAX_REAL_LONG_REG
mAX_Real_Long_REG = MAX_REAL_LONG_REG
#else
mAX_Real_Long_REG = 0
#endif
-- Closure header sizes.
sTD_HDR_SIZE :: Int
sTD_HDR_SIZE = STD_HDR_SIZE
pROF_HDR_SIZE :: Int
pROF_HDR_SIZE = PROF_HDR_SIZE
-- Size of a double in StgWords.
dOUBLE_SIZE :: Int
dOUBLE_SIZE = SIZEOF_DOUBLE
wORD64_SIZE :: Int
wORD64_SIZE = 8
iNT64_SIZE :: Int
iNT64_SIZE = wORD64_SIZE
-- This tells the native code generator the size of the spill
-- area is has available.
rESERVED_C_STACK_BYTES :: Int
rESERVED_C_STACK_BYTES = RESERVED_C_STACK_BYTES
-- The amount of (Haskell) stack to leave free for saving registers when
-- returning to the scheduler.
rESERVED_STACK_WORDS :: Int
rESERVED_STACK_WORDS = RESERVED_STACK_WORDS
-- Continuations that need more than this amount of stack should do their
-- own stack check (see bug #1466).
aP_STACK_SPLIM :: Int
aP_STACK_SPLIM = AP_STACK_SPLIM
-- Size of a word, in bytes
wORD_SIZE :: Int
wORD_SIZE = SIZEOF_HSWORD
wORD_SIZE_IN_BITS :: Int
wORD_SIZE_IN_BITS = wORD_SIZE * 8
-- Define a fixed-range integral type equivalent to the target Int/Word
#if SIZEOF_HSWORD == 4
type TargetInt = Int32
type TargetWord = Word32
#elif SIZEOF_HSWORD == 8
type TargetInt = Int64
type TargetWord = Word64
#else
#error unknown SIZEOF_HSWORD
#endif
tARGET_MIN_INT, tARGET_MAX_INT, tARGET_MAX_WORD :: Integer
tARGET_MIN_INT = fromIntegral (minBound :: TargetInt)
tARGET_MAX_INT = fromIntegral (maxBound :: TargetInt)
tARGET_MAX_WORD = fromIntegral (maxBound :: TargetWord)
tARGET_MAX_CHAR :: Int
tARGET_MAX_CHAR = 0x10ffff
-- Amount of pointer bits used for semi-tagging constructor closures
tAG_BITS :: Int
tAG_BITS = TAG_BITS
tAG_MASK :: Int
tAG_MASK = (1 `shiftL` tAG_BITS) - 1
mAX_PTR_TAG :: Int
mAX_PTR_TAG = tAG_MASK
-- Size of a C int, in bytes. May be smaller than wORD_SIZE.
cINT_SIZE :: Int
cINT_SIZE = SIZEOF_INT
cLONG_SIZE :: Int
cLONG_SIZE = SIZEOF_LONG
cLONG_LONG_SIZE :: Int
cLONG_LONG_SIZE = SIZEOF_LONG_LONG
-- Size of a storage manager block (in bytes).
bLOCK_SIZE :: Int
bLOCK_SIZE = BLOCK_SIZE
bLOCK_SIZE_W :: Int
bLOCK_SIZE_W = bLOCK_SIZE `quot` wORD_SIZE
-- blocks that fit in an MBlock, leaving space for the block descriptors
bLOCKS_PER_MBLOCK :: Int
bLOCKS_PER_MBLOCK = BLOCKS_PER_MBLOCK
-- Number of bits to shift a bitfield left by in an info table.
bITMAP_BITS_SHIFT :: Int
bITMAP_BITS_SHIFT = BITMAP_BITS_SHIFT
-- Constants derived from headers in ghc/includes, generated by the program
-- ../includes/mkDerivedConstants.c.
#include "../includes/dist-ghcconstants/header/GHCConstants.h"
| nomeata/ghc | includes/HaskellConstants.hs | bsd-3-clause | 4,893 | 0 | 7 | 715 | 564 | 358 | 206 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module AppSpec where
import Test.Hspec
import TestHelper
import Control.Applicative
import Network.Wai.Test (SResponse)
import Data.ByteString (ByteString)
import App
main :: IO ()
main = hspec spec
get :: ByteString -> IO SResponse
get path = app >>= getPath path
spec = do
describe "GET /" $ do
it "responds with HTTP 200" $ do
get "/" `shouldRespondWith` 200
| NorthParkSoftware/yournextgig-backend | test/AppSpec.hs | bsd-3-clause | 426 | 0 | 14 | 90 | 124 | 66 | 58 | 16 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module HipBot.Webhooks
( RoomLinks(..)
, Room(..)
, MessageItem(..)
, WebhookRoomItem(..)
, WebhookRoomEvent(..)
, HasMembers(..)
, HasParticipants(..)
, HasSelf(..)
, HasWebhooks(..)
, HasRoomId(..)
, HasName(..)
, HasLinks(..)
, HasMessage(..)
, HasWebhookId(..)
, HasOauthId(..)
, HasItem(..)
, decodeWebhookRoomEvent
, webhookResource
, roomMessageWebhookResource
, simpleWebhookResource
) where
import Control.Applicative
import Control.Lens hiding ((.=))
import Control.Monad.Reader
import Control.Monad.State
import Data.Aeson ((.:), (.:?))
import qualified Data.Aeson as A
import qualified Data.Aeson.TH as A
import qualified Data.Aeson.Types as A
import Data.Char (toLower)
import Data.Foldable
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import qualified Network.Wai as Wai
import Prelude hiding (foldl1)
import Webcrank
import Webcrank.Wai
import HipBot.AbsoluteURI
import HipBot.Internal.Types
import HipBot.Notification
data RoomLinks = RoomLinks
{ _roomLinksMembers :: Maybe AbsoluteURI
, _roomLinksParticipants :: AbsoluteURI
, _roomLinksSelf :: AbsoluteURI
, _roomLinksWebhooks :: AbsoluteURI
} deriving (Show, Eq)
makeFields ''RoomLinks
data Room = Room
{ _roomRoomId :: RoomId
, _roomName :: RoomName
, _roomLinks :: RoomLinks
} deriving (Show, Eq)
makeFields ''Room
instance A.FromJSON Room where
parseJSON = A.withObject "object" $ \o -> Room
<$> o .: "id"
<*> o .: "name"
<*> o .: "links"
data MessageItem = MessageItem
{ _messageItemMessage :: Text
} deriving (Show, Eq)
makeFields ''MessageItem
data WebhookRoomItem
= WebhookRoomMessage Room MessageItem
-- WebhookRoomArchived
-- WebhookRoomDeleted
-- WebhookRoomEnter
-- WebhookRoomExit
-- WebhookRoomNotification
-- WebhookRoomTopicChange
-- WebhookRoomUnarchived
deriving (Show, Eq)
data WebhookRoomEvent = WebhookRoomEvent
{ _webhookRoomEventWebhookId :: Int
, _webhookRoomEventOauthId :: Maybe String
, _webhookRoomEventItem :: WebhookRoomItem
} deriving (Show, Eq)
makeFields ''WebhookRoomEvent
instance A.FromJSON WebhookRoomEvent where
parseJSON = A.withObject "object" $ \o -> WebhookRoomEvent
<$> o .: "webhook_id"
<*> o .:? "oauth_client_id"
<*> readItem o
readItem :: A.Object -> A.Parser WebhookRoomItem
readItem o = do
oi <- o .: "item"
o .: "event" >>= \case
RoomMessage -> WebhookRoomMessage <$> oi .: "room" <*> oi .: "message"
_ -> A.typeMismatch "only supports room_message events at this time" (A.Object o)
decodeWebhookRoomEvent :: (Functor m, MonadIO m, MonadReader s m, HasRequest s Wai.Request) => m (Either String WebhookRoomEvent)
decodeWebhookRoomEvent = A.eitherDecode <$> getRequestBodyLBS
$(A.deriveFromJSON
A.defaultOptions
{ A.fieldLabelModifier = \l -> toLower (l !! 10) : drop 11 l
, A.omitNothingFields = True
}
''RoomLinks)
$(A.deriveFromJSON
A.defaultOptions
{ A.fieldLabelModifier = \l -> toLower (l !! 12) : drop 13 l
, A.omitNothingFields = True
}
''MessageItem)
webhookResource
:: (MonadIO m, MonadReader r m, HasRequest r Wai.Request, MonadState s m, HasReqData s)
=> String -- ^ webhook name
-> (WebhookRoomEvent -> HaltT m (Maybe Notification)) -- ^ event processor
-> Resource m
webhookResource hookName f = resource
{ allowedMethods = return [ methodPost ]
, postAction = postProcess $
decodeWebhookRoomEvent >>= \case
Left e -> liftIO . putStrLn . mconcat $
[ "[ERROR] Failed to parse event to "
, hookName
, " webhook: "
, e
]
Right ev -> f ev >>= traverse_ (writeLBS . A.encode)
}
roomMessageWebhookResource
:: (MonadIO m, MonadReader r m, MonadState s m, HasReqData s, HasRequest r Wai.Request)
=> String
-> (Room -> MessageItem -> HaltT m (Maybe Notification))
-> Resource m
roomMessageWebhookResource hookName f = webhookResource hookName $ \ev ->
case ev ^. item of
WebhookRoomMessage room msg -> f room msg
-- | Creates a simple "command" processing webhook resource.
-- Commands processes are limited to pure functions that may
-- or may not produce a reply.
simpleWebhookResource
:: MonadIO m
=> String -- ^ webhook name
-> [Text] -- ^ command aliases, they will be removed before calling the processing function
-> (Text -> Maybe Text) -- ^ processing function, the result will become a room notification
-> WaiResource m
simpleWebhookResource hookName aliases f =
let
expr t = T.strip <$> foldl1 (<|>) (fmap (`T.stripPrefix` t) aliases)
command = views message (return . fmap textNotification . (f =<<) . expr)
in
roomMessageWebhookResource hookName (const command)
| purefn/hipbot | src/HipBot/Webhooks.hs | bsd-3-clause | 5,050 | 0 | 15 | 975 | 1,333 | 750 | 583 | 138 | 2 |
module Main where
{-
This module is about configuring and launching a web server. It delegates
wiring resources to the Resources module.
-}
import Web.Scotty(scotty)
import Resources
main :: IO ()
main = scotty 8099 resources
| aztecrex/haskell-scotty-spike | app/Main.hs | bsd-3-clause | 230 | 0 | 6 | 40 | 38 | 22 | 16 | 5 | 1 |
{-|
Module : Idris.Elab.Term
Description : Code to elaborate terms.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE LambdaCase, PatternGuards, ViewPatterns #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Elab.Term where
import Idris.AbsSyntax
import Idris.AbsSyntaxTree
import Idris.Core.CaseTree (SC, SC'(STerm), findCalls, findUsedArgs)
import Idris.Core.Elaborate hiding (Tactic(..))
import Idris.Core.Evaluate
import Idris.Core.ProofTerm (getProofTerm)
import Idris.Core.TT
import Idris.Core.Typecheck (check, converts, isType, recheck)
import Idris.Core.Unify
import Idris.Core.WHNF (whnf,whnfArgs)
import Idris.Coverage (genClauses, recoverableCoverage, validCoverageCase)
import Idris.Delaborate
import Idris.DSL
import Idris.Elab.Quasiquote (extractUnquotes)
import Idris.Elab.Rewrite
import Idris.Elab.Utils
import Idris.Error
import Idris.ErrReverse (errReverse)
import Idris.Output (pshow)
import Idris.ProofSearch
import Idris.Reflection
import Idris.Termination(buildSCG, checkDeclTotality, checkPositive)
import qualified Util.Pretty as U
import Control.Applicative ((<$>))
import Control.Monad
import Control.Monad.State.Strict
import Data.Foldable (for_)
import Data.List
import qualified Data.Map as M
import Data.Maybe (catMaybes, fromMaybe, mapMaybe, maybeToList)
import qualified Data.Set as S
import qualified Data.Text as T
import Debug.Trace
data ElabMode = ETyDecl | ETransLHS | ELHS | EImpossible | ERHS
deriving Eq
data ElabResult = ElabResult {
-- | The term resulting from elaboration
resultTerm :: Term
-- | Information about new metavariables
, resultMetavars :: [(Name, (Int, Maybe Name, Type, [Name]))]
-- | Deferred declarations as the meaning of case blocks
, resultCaseDecls :: [PDecl]
-- | The potentially extended context from new definitions
, resultContext :: Context
-- | Meta-info about the new type declarations
, resultTyDecls :: [RDeclInstructions]
-- | Saved highlights from elaboration
, resultHighlighting :: [(FC, OutputAnnotation)]
-- | The new global name counter
, resultName :: Int
}
-- | Using the elaborator, convert a term in raw syntax to a fully
-- elaborated, typechecked term.
--
-- If building a pattern match, we convert undeclared variables from
-- holes to pattern bindings.
--
-- Also find deferred names in the term and their types
build :: IState
-> ElabInfo
-> ElabMode
-> FnOpts
-> Name
-> PTerm
-> ElabD ElabResult
build ist info emode opts fn tm
= do elab ist info emode opts fn tm
let tmIn = tm
let inf = case lookupCtxt fn (idris_tyinfodata ist) of
[TIPartial] -> True
_ -> False
hs <- get_holes
ivs <- get_implementations
ptm <- get_term
-- Resolve remaining interfaces. Two passes - first to get the
-- default Num implementations, second to clean up the rest
when (not pattern) $
mapM_ (\n -> when (n `elem` hs) $
do focus n
g <- goal
try (resolveTC' True True 10 g fn ist)
(movelast n)) ivs
ivs <- get_implementations
hs <- get_holes
when (not pattern) $
mapM_ (\n -> when (n `elem` hs) $
do focus n
g <- goal
ptm <- get_term
resolveTC' True True 10 g fn ist) ivs
when (not pattern) $ solveAutos ist fn False
tm <- get_term
ctxt <- get_context
probs <- get_probs
u <- getUnifyLog
hs <- get_holes
when (not pattern) $
traceWhen u ("Remaining holes:\n" ++ show hs ++ "\n" ++
"Remaining problems:\n" ++ qshow probs) $
do unify_all; matchProblems True; unifyProblems
when (not pattern) $ solveAutos ist fn True
probs <- get_probs
case probs of
[] -> return ()
((_,_,_,_,e,_,_):es) -> traceWhen u ("Final problems:\n" ++ qshow probs ++ "\nin\n" ++ show tm) $
if inf then return ()
else lift (Error e)
when tydecl (do mkPat
update_term liftPats
update_term orderPats)
EState is _ impls highlights _ _ <- getAux
tt <- get_term
ctxt <- get_context
let (tm, ds) = runState (collectDeferred (Just fn) (map fst is) ctxt tt) []
log <- getLog
g_nextname <- get_global_nextname
if log /= ""
then trace log $ return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
else return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
where pattern = emode == ELHS || emode == EImpossible
tydecl = emode == ETyDecl
mkPat = do hs <- get_holes
tm <- get_term
case hs of
(h: hs) -> do patvar h; mkPat
[] -> return ()
-- | Build a term autogenerated as an interface method definition.
--
-- (Separate, so we don't go overboard resolving things that we don't
-- know about yet on the LHS of a pattern def)
buildTC :: IState -> ElabInfo -> ElabMode -> FnOpts -> Name ->
[Name] -> -- Cached names in the PTerm, before adding PAlternatives
PTerm ->
ElabD ElabResult
buildTC ist info emode opts fn ns tm
= do let tmIn = tm
let inf = case lookupCtxt fn (idris_tyinfodata ist) of
[TIPartial] -> True
_ -> False
-- set name supply to begin after highest index in tm
initNextNameFrom ns
elab ist info emode opts fn tm
probs <- get_probs
tm <- get_term
case probs of
[] -> return ()
((_,_,_,_,e,_,_):es) -> if inf then return ()
else lift (Error e)
dots <- get_dotterm
-- 'dots' are the PHidden things which have not been solved by
-- unification
when (not (null dots)) $
lift (Error (CantMatch (getInferTerm tm)))
EState is _ impls highlights _ _ <- getAux
tt <- get_term
ctxt <- get_context
let (tm, ds) = runState (collectDeferred (Just fn) (map fst is) ctxt tt) []
log <- getLog
g_nextname <- get_global_nextname
if (log /= "")
then trace log $ return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
else return (ElabResult tm ds (map snd is) ctxt impls highlights g_nextname)
where pattern = emode == ELHS || emode == EImpossible
-- | return whether arguments of the given constructor name can be
-- matched on. If they're polymorphic, no, unless the type has beed
-- made concrete by the time we get around to elaborating the
-- argument.
getUnmatchable :: Context -> Name -> [Bool]
getUnmatchable ctxt n | isDConName n ctxt && n /= inferCon
= case lookupTyExact n ctxt of
Nothing -> []
Just ty -> checkArgs [] [] ty
where checkArgs :: [Name] -> [[Name]] -> Type -> [Bool]
checkArgs env ns (Bind n (Pi _ t _) sc)
= let env' = case t of
TType _ -> n : env
_ -> env in
checkArgs env' (intersect env (refsIn t) : ns)
(instantiate (P Bound n t) sc)
checkArgs env ns t
= map (not . null) (reverse ns)
getUnmatchable ctxt n = []
data ElabCtxt = ElabCtxt { e_inarg :: Bool,
e_isfn :: Bool, -- ^ Function part of application
e_guarded :: Bool,
e_intype :: Bool,
e_qq :: Bool,
e_nomatching :: Bool -- ^ can't pattern match
}
initElabCtxt = ElabCtxt False False False False False False
goal_polymorphic :: ElabD Bool
goal_polymorphic =
do ty <- goal
case ty of
P _ n _ -> do env <- get_env
case lookup n env of
Nothing -> return False
_ -> return True
_ -> return False
-- | Returns the set of declarations we need to add to complete the
-- definition (most likely case blocks to elaborate) as well as
-- declarations resulting from user tactic scripts (%runElab)
elab :: IState
-> ElabInfo
-> ElabMode
-> FnOpts
-> Name
-> PTerm
-> ElabD ()
elab ist info emode opts fn tm
= do let loglvl = opt_logLevel (idris_options ist)
when (loglvl > 5) $ unifyLog True
whnf_compute_args -- expand type synonyms, etc
let fc = maybe "(unknown)"
elabE initElabCtxt (elabFC info) tm -- (in argument, guarded, in type, in qquote)
est <- getAux
sequence_ (get_delayed_elab est)
end_unify
ptm <- get_term
when (pattern || intransform) -- convert remaining holes to pattern vars
(do unify_all
matchProblems False -- only the ones we matched earlier
unifyProblems
mkPat
update_term liftPats)
where
pattern = emode == ELHS || emode == EImpossible
eimpossible = emode == EImpossible
intransform = emode == ETransLHS
bindfree = emode == ETyDecl || emode == ELHS || emode == ETransLHS
|| emode == EImpossible
autoimpls = opt_autoimpls (idris_options ist)
get_delayed_elab est =
let ds = delayed_elab est in
map snd $ sortBy (\(p1, _) (p2, _) -> compare p1 p2) ds
tcgen = Dictionary `elem` opts
reflection = Reflection `elem` opts
isph arg = case getTm arg of
Placeholder -> (True, priority arg)
tm -> (False, priority arg)
toElab ina arg = case getTm arg of
Placeholder -> Nothing
v -> Just (priority arg, elabE ina (elabFC info) v)
toElab' ina arg = case getTm arg of
Placeholder -> Nothing
v -> Just (elabE ina (elabFC info) v)
mkPat = do hs <- get_holes
tm <- get_term
case hs of
(h: hs) -> do patvar h; mkPat
[] -> return ()
elabRec = elabE initElabCtxt Nothing
-- | elabE elaborates an expression, possibly wrapping implicit coercions
-- and forces/delays. If you make a recursive call in elab', it is
-- normally correct to call elabE - the ones that don't are `desugarings
-- typically
elabE :: ElabCtxt -> Maybe FC -> PTerm -> ElabD ()
elabE ina fc' t =
do solved <- get_recents
as <- get_autos
hs <- get_holes
-- If any of the autos use variables which have recently been solved,
-- have another go at solving them now.
mapM_ (\(a, (failc, ns)) ->
if any (\n -> n `elem` solved) ns && head hs /= a
then solveAuto ist fn False (a, failc)
else return ()) as
apt <- expandToArity t
itm <- if not pattern then insertImpLam ina apt else return apt
ct <- insertCoerce ina itm
t' <- insertLazy ina ct
g <- goal
tm <- get_term
ps <- get_probs
hs <- get_holes
--trace ("Elaborating " ++ show t' ++ " in " ++ show g
-- ++ "\n" ++ show tm
-- ++ "\nholes " ++ show hs
-- ++ "\nproblems " ++ show ps
-- ++ "\n-----------\n") $
--trace ("ELAB " ++ show t') $
env <- get_env
let fc = fileFC "Force"
handleError (forceErr t' env)
(elab' ina fc' t')
(elab' ina fc' (PApp fc (PRef fc [] (sUN "Force"))
[pimp (sUN "t") Placeholder True,
pimp (sUN "a") Placeholder True,
pexp ct]))
forceErr orig env (CantUnify _ (t,_) (t',_) _ _ _)
| (P _ (UN ht) _, _) <- unApply (whnf (tt_ctxt ist) env t),
ht == txt "Delayed" = notDelay orig
forceErr orig env (CantUnify _ (t,_) (t',_) _ _ _)
| (P _ (UN ht) _, _) <- unApply (whnf (tt_ctxt ist) env t'),
ht == txt "Delayed" = notDelay orig
forceErr orig env (InfiniteUnify _ t _)
| (P _ (UN ht) _, _) <- unApply (whnf (tt_ctxt ist) env t),
ht == txt "Delayed" = notDelay orig
forceErr orig env (Elaborating _ _ _ t) = forceErr orig env t
forceErr orig env (ElaboratingArg _ _ _ t) = forceErr orig env t
forceErr orig env (At _ t) = forceErr orig env t
forceErr orig env t = False
notDelay t@(PApp _ (PRef _ _ (UN l)) _) | l == txt "Delay" = False
notDelay _ = True
local f = do e <- get_env
return (f `elem` map fst e)
-- | Is a constant a type?
constType :: Const -> Bool
constType (AType _) = True
constType StrType = True
constType VoidType = True
constType _ = False
-- "guarded" means immediately under a constructor, to help find patvars
elab' :: ElabCtxt -- ^ (in an argument, guarded, in a type, in a quasiquote)
-> Maybe FC -- ^ The closest FC in the syntax tree, if applicable
-> PTerm -- ^ The term to elaborate
-> ElabD ()
elab' ina fc (PNoImplicits t) = elab' ina fc t -- skip elabE step
elab' ina fc (PType fc') =
do apply RType []
solve
highlightSource fc' (AnnType "Type" "The type of types")
elab' ina fc (PUniverse fc' u) =
do unless (UniquenessTypes `elem` idris_language_extensions ist
|| e_qq ina) $
lift $ tfail $ At fc' (Msg "You must turn on the UniquenessTypes extension to use UniqueType or AnyType")
apply (RUType u) []
solve
highlightSource fc' (AnnType (show u) "The type of unique types")
-- elab' (_,_,inty) (PConstant c)
-- | constType c && pattern && not reflection && not inty
-- = lift $ tfail (Msg "Typecase is not allowed")
elab' ina fc tm@(PConstant fc' c)
| pattern && not reflection && not (e_qq ina) && not (e_intype ina)
&& isTypeConst c
= lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
| pattern && not reflection && not (e_qq ina) && e_nomatching ina
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| otherwise = do apply (RConstant c) []
solve
highlightSource fc' (AnnConst c)
elab' ina fc (PQuote r) = do fill r; solve
elab' ina _ (PTrue fc _) =
do whnf_compute
g <- goal
case g of
TType _ -> elab' ina (Just fc) (PRef fc [] unitTy)
UType _ -> elab' ina (Just fc) (PRef fc [] unitTy)
_ -> elab' ina (Just fc) (PRef fc [] unitCon)
elab' ina fc (PResolveTC (FC "HACK" _ _)) -- for chasing parent interfaces
= do g <- goal; resolveTC False False 5 g fn elabRec ist
elab' ina fc (PResolveTC fc')
= do c <- getNameFrom (sMN 0 "__interface")
implementationArg c
-- Elaborate the equality type first homogeneously, then
-- heterogeneously as a fallback
elab' ina _ (PApp fc (PRef _ _ n) args)
| n == eqTy, [Placeholder, Placeholder, l, r] <- map getTm args
= try (do tyn <- getNameFrom (sMN 0 "aqty")
claim tyn RType
movelast tyn
elab' ina (Just fc) (PApp fc (PRef fc [] eqTy)
[pimp (sUN "A") (PRef NoFC [] tyn) True,
pimp (sUN "B") (PRef NoFC [] tyn) False,
pexp l, pexp r]))
(do atyn <- getNameFrom (sMN 0 "aqty")
btyn <- getNameFrom (sMN 0 "bqty")
claim atyn RType
movelast atyn
claim btyn RType
movelast btyn
elab' ina (Just fc) (PApp fc (PRef fc [] eqTy)
[pimp (sUN "A") (PRef NoFC [] atyn) True,
pimp (sUN "B") (PRef NoFC [] btyn) False,
pexp l, pexp r]))
elab' ina _ (PPair fc hls _ l r)
= do whnf_compute
g <- goal
let (tc, _) = unApply g
case g of
TType _ -> elab' ina (Just fc) (PApp fc (PRef fc hls pairTy)
[pexp l,pexp r])
UType _ -> elab' ina (Just fc) (PApp fc (PRef fc hls upairTy)
[pexp l,pexp r])
_ -> case tc of
P _ n _ | n == upairTy
-> elab' ina (Just fc) (PApp fc (PRef fc hls upairCon)
[pimp (sUN "A") Placeholder False,
pimp (sUN "B") Placeholder False,
pexp l, pexp r])
_ -> elab' ina (Just fc) (PApp fc (PRef fc hls pairCon)
[pimp (sUN "A") Placeholder False,
pimp (sUN "B") Placeholder False,
pexp l, pexp r])
elab' ina _ (PDPair fc hls p l@(PRef nfc hl n) t r)
= case p of
IsType -> asType
IsTerm -> asValue
TypeOrTerm ->
do whnf_compute
g <- goal
case g of
TType _ -> asType
_ -> asValue
where asType = elab' ina (Just fc) (PApp fc (PRef NoFC hls sigmaTy)
[pexp t,
pexp (PLam fc n nfc Placeholder r)])
asValue = elab' ina (Just fc) (PApp fc (PRef fc hls sigmaCon)
[pimp (sMN 0 "a") t False,
pimp (sMN 0 "P") Placeholder True,
pexp l, pexp r])
elab' ina _ (PDPair fc hls p l t r) = elab' ina (Just fc) (PApp fc (PRef fc hls sigmaCon)
[pimp (sMN 0 "a") t False,
pimp (sMN 0 "P") Placeholder True,
pexp l, pexp r])
elab' ina fc (PAlternative ms (ExactlyOne delayok) as)
= do as_pruned <- doPrune as
-- Finish the mkUniqueNames job with the pruned set, rather than
-- the full set.
uns <- get_usedns
let as' = map (mkUniqueNames (uns ++ map snd ms) ms) as_pruned
(h : hs) <- get_holes
ty <- goal
case as' of
[] -> do hds <- mapM showHd as
lift $ tfail $ NoValidAlts hds
[x] -> elab' ina fc x
-- If there's options, try now, and if that fails, postpone
-- to later.
_ -> handleError isAmbiguous
(do hds <- mapM showHd as'
tryAll (zip (map (elab' ina fc) as')
hds))
(do movelast h
delayElab 5 $ do
hs <- get_holes
when (h `elem` hs) $ do
focus h
as'' <- doPrune as'
case as'' of
[x] -> elab' ina fc x
_ -> do hds <- mapM showHd as''
tryAll' False (zip (map (elab' ina fc) as'')
hds))
where showHd (PApp _ (PRef _ _ (UN l)) [_, _, arg])
| l == txt "Delay" = showHd (getTm arg)
showHd (PApp _ (PRef _ _ n) _) = return n
showHd (PRef _ _ n) = return n
showHd (PApp _ h _) = showHd h
showHd x = getNameFrom (sMN 0 "_") -- We probably should do something better than this here
doPrune as =
do compute -- to get 'Delayed' if it's there
ty <- goal
ctxt <- get_context
env <- get_env
let ty' = unDelay ty
let (tc, _) = unApply ty'
return $ pruneByType eimpossible env tc ty' ist as
unDelay t | (P _ (UN l) _, [_, arg]) <- unApply t,
l == txt "Delayed" = unDelay arg
| otherwise = t
isAmbiguous (CantResolveAlts _) = delayok
isAmbiguous (Elaborating _ _ _ e) = isAmbiguous e
isAmbiguous (ElaboratingArg _ _ _ e) = isAmbiguous e
isAmbiguous (At _ e) = isAmbiguous e
isAmbiguous _ = False
elab' ina fc (PAlternative ms FirstSuccess as_in)
= do -- finish the mkUniqueNames job
uns <- get_usedns
let as = map (mkUniqueNames (uns ++ map snd ms) ms) as_in
trySeq as
where -- if none work, take the error from the first
trySeq (x : xs) = let e1 = elab' ina fc x in
try' e1 (trySeq' e1 xs) True
trySeq [] = fail "Nothing to try in sequence"
trySeq' deferr [] = do deferr; unifyProblems
trySeq' deferr (x : xs)
= try' (tryCatch (do elab' ina fc x
solveAutos ist fn False
unifyProblems)
(\_ -> trySeq' deferr []))
(trySeq' deferr xs) True
elab' ina fc (PAlternative ms TryImplicit (orig : alts)) = do
env <- get_env
whnf_compute
ty <- goal
let doelab = elab' ina fc orig
tryCatch doelab
(\err ->
if recoverableErr err
then -- trace ("NEED IMPLICIT! " ++ show orig ++ "\n" ++
-- show alts ++ "\n" ++
-- showQuick err) $
-- Prune the coercions so that only the ones
-- with the right type to fix the error will be tried!
case pruneAlts err alts env of
[] -> lift $ tfail err
alts' -> do
try' (elab' ina fc (PAlternative ms (ExactlyOne False) alts'))
(lift $ tfail err) -- take error from original if all fail
True
else lift $ tfail err)
where
recoverableErr (CantUnify _ _ _ _ _ _) = True
recoverableErr (TooManyArguments _) = False
recoverableErr (CantSolveGoal _ _) = False
recoverableErr (CantResolveAlts _) = False
recoverableErr (NoValidAlts _) = True
recoverableErr (ProofSearchFail (Msg _)) = True
recoverableErr (ProofSearchFail _) = False
recoverableErr (ElaboratingArg _ _ _ e) = recoverableErr e
recoverableErr (At _ e) = recoverableErr e
recoverableErr (ElabScriptDebug _ _ _) = False
recoverableErr _ = True
pruneAlts (CantUnify _ (inc, _) (outc, _) _ _ _) alts env
= case unApply (whnf (tt_ctxt ist) env inc) of
(P (TCon _ _) n _, _) -> filter (hasArg n env) alts
(Constant _, _) -> alts
_ -> filter isLend alts -- special case hack for 'Borrowed'
pruneAlts (ElaboratingArg _ _ _ e) alts env = pruneAlts e alts env
pruneAlts (At _ e) alts env = pruneAlts e alts env
pruneAlts (NoValidAlts as) alts env = alts
pruneAlts err alts _ = filter isLend alts
hasArg n env ap | isLend ap = True -- special case hack for 'Borrowed'
hasArg n env (PApp _ (PRef _ _ a) _)
= case lookupTyExact a (tt_ctxt ist) of
Just ty -> let args = map snd (getArgTys (normalise (tt_ctxt ist) env ty)) in
any (fnIs n) args
Nothing -> False
hasArg n env (PAlternative _ _ as) = any (hasArg n env) as
hasArg n _ tm = False
isLend (PApp _ (PRef _ _ l) _) = l == sNS (sUN "lend") ["Ownership"]
isLend _ = False
fnIs n ty = case unApply ty of
(P _ n' _, _) -> n == n'
_ -> False
showQuick (CantUnify _ (l, _) (r, _) _ _ _)
= show (l, r)
showQuick (ElaboratingArg _ _ _ e) = showQuick e
showQuick (At _ e) = showQuick e
showQuick (ProofSearchFail (Msg _)) = "search fail"
showQuick _ = "No chance"
elab' ina _ (PPatvar fc n) | bindfree
= do patvar n
update_term liftPats
highlightSource fc (AnnBoundName n False)
-- elab' (_, _, inty) (PRef fc f)
-- | isTConName f (tt_ctxt ist) && pattern && not reflection && not inty
-- = lift $ tfail (Msg "Typecase is not allowed")
elab' ec _ tm@(PRef fc hl n)
| pattern && not reflection && not (e_qq ec) && not (e_intype ec)
&& isTConName n (tt_ctxt ist)
= lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
| pattern && not reflection && not (e_qq ec) && e_nomatching ec
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| (pattern || intransform || (bindfree && bindable n)) && not (inparamBlock n) && not (e_qq ec)
= do ty <- goal
testImplicitWarning fc n ty
let ina = e_inarg ec
guarded = e_guarded ec
inty = e_intype ec
ctxt <- get_context
let defined = case lookupTy n ctxt of
[] -> False
_ -> True
-- this is to stop us resolve interfaces recursively
-- trace (show (n, guarded)) $
if (tcname n && ina && not intransform)
then erun fc $
do patvar n
update_term liftPats
highlightSource fc (AnnBoundName n False)
else if defined
then do apply (Var n) []
annot <- findHighlight n
solve
highlightSource fc annot
else try (do apply (Var n) []
annot <- findHighlight n
solve
highlightSource fc annot)
(do patvar n
update_term liftPats
highlightSource fc (AnnBoundName n False))
where inparamBlock n = case lookupCtxtName n (inblock info) of
[] -> False
_ -> True
bindable (NS _ _) = False
bindable (MN _ _) = True
bindable n = implicitable n && autoimpls
elab' ina _ f@(PInferRef fc hls n) = elab' ina (Just fc) (PApp NoFC f [])
elab' ina fc' tm@(PRef fc hls n)
| pattern && not reflection && not (e_qq ina) && not (e_intype ina)
&& isTConName n (tt_ctxt ist)
= lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
| pattern && not reflection && not (e_qq ina) && e_nomatching ina
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| otherwise =
do fty <- get_type (Var n) -- check for implicits
ctxt <- get_context
env <- get_env
let a' = insertScopedImps fc (whnfArgs ctxt env fty) []
if null a'
then erun fc $
do apply (Var n) []
hilite <- findHighlight n
solve
mapM_ (uncurry highlightSource) $
(fc, hilite) : map (\f -> (f, hilite)) hls
else elab' ina fc' (PApp fc tm [])
elab' ina _ (PLam _ _ _ _ PImpossible) = lift . tfail . Msg $ "Only pattern-matching lambdas can be impossible"
elab' ina _ (PLam fc n nfc Placeholder sc)
= do -- if n is a type constructor name, this makes no sense...
ctxt <- get_context
when (isTConName n ctxt) $
lift $ tfail (Msg $ "Can't use type constructor " ++ show n ++ " here")
checkPiGoal n
attack; intro (Just n);
addPSname n -- okay for proof search
-- trace ("------ intro " ++ show n ++ " ---- \n" ++ show ptm)
elabE (ina { e_inarg = True } ) (Just fc) sc; solve
highlightSource nfc (AnnBoundName n False)
elab' ec _ (PLam fc n nfc ty sc)
= do tyn <- getNameFrom (sMN 0 "lamty")
-- if n is a type constructor name, this makes no sense...
ctxt <- get_context
when (isTConName n ctxt) $
lift $ tfail (Msg $ "Can't use type constructor " ++ show n ++ " here")
checkPiGoal n
claim tyn RType
explicit tyn
attack
ptm <- get_term
hs <- get_holes
introTy (Var tyn) (Just n)
addPSname n -- okay for proof search
focus tyn
elabE (ec { e_inarg = True, e_intype = True }) (Just fc) ty
elabE (ec { e_inarg = True }) (Just fc) sc
solve
highlightSource nfc (AnnBoundName n False)
elab' ina fc (PPi p n nfc Placeholder sc)
= do attack; arg n (is_scoped p) (sMN 0 "phTy")
addAutoBind p n
addPSname n -- okay for proof search
elabE (ina { e_inarg = True, e_intype = True }) fc sc
solve
highlightSource nfc (AnnBoundName n False)
elab' ina fc (PPi p n nfc ty sc)
= do attack; tyn <- getNameFrom (sMN 0 "piTy")
claim tyn RType
n' <- case n of
MN _ _ -> unique_hole n
_ -> return n
forall n' (is_scoped p) (Var tyn)
addAutoBind p n'
addPSname n' -- okay for proof search
focus tyn
let ec' = ina { e_inarg = True, e_intype = True }
elabE ec' fc ty
elabE ec' fc sc
solve
highlightSource nfc (AnnBoundName n False)
elab' ina _ tm@(PLet fc n nfc ty val sc)
= do attack
ivs <- get_implementations
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
explicit valn
letbind n (Var tyn) (Var valn)
addPSname n
case ty of
Placeholder -> return ()
_ -> do focus tyn
explicit tyn
elabE (ina { e_inarg = True, e_intype = True })
(Just fc) ty
focus valn
elabE (ina { e_inarg = True, e_intype = True })
(Just fc) val
ivs' <- get_implementations
env <- get_env
elabE (ina { e_inarg = True }) (Just fc) sc
when (not (pattern || intransform)) $
mapM_ (\n -> do focus n
g <- goal
hs <- get_holes
if all (\n -> n == tyn || not (n `elem` hs)) (freeNames g)
then handleError (tcRecoverable emode)
(resolveTC True False 10 g fn elabRec ist)
(movelast n)
else movelast n)
(ivs' \\ ivs)
-- HACK: If the name leaks into its type, it may leak out of
-- scope outside, so substitute in the outer scope.
expandLet n (case lookup n env of
Just (Let t v) -> v
other -> error ("Value not a let binding: " ++ show other))
solve
highlightSource nfc (AnnBoundName n False)
elab' ina _ (PGoal fc r n sc) = do
rty <- goal
attack
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letbind n (Var tyn) (Var valn)
focus valn
elabE (ina { e_inarg = True, e_intype = True }) (Just fc) (PApp fc r [pexp (delab ist rty)])
env <- get_env
computeLet n
elabE (ina { e_inarg = True }) (Just fc) sc
solve
-- elab' ina fc (PLet n Placeholder
-- (PApp fc r [pexp (delab ist rty)]) sc)
elab' ina _ tm@(PApp fc (PInferRef _ _ f) args) = do
rty <- goal
ds <- get_deferred
ctxt <- get_context
-- make a function type a -> b -> c -> ... -> rty for the
-- new function name
env <- get_env
argTys <- claimArgTys env args
fn <- getNameFrom (sMN 0 "inf_fn")
let fty = fnTy argTys rty
-- trace (show (ptm, map fst argTys)) $ focus fn
-- build and defer the function application
attack; deferType (mkN f) fty (map fst argTys); solve
-- elaborate the arguments, to unify their types. They all have to
-- be explicit.
mapM_ elabIArg (zip argTys args)
where claimArgTys env [] = return []
claimArgTys env (arg : xs) | Just n <- localVar env (getTm arg)
= do nty <- get_type (Var n)
ans <- claimArgTys env xs
return ((n, (False, forget nty)) : ans)
claimArgTys env (_ : xs)
= do an <- getNameFrom (sMN 0 "inf_argTy")
aval <- getNameFrom (sMN 0 "inf_arg")
claim an RType
claim aval (Var an)
ans <- claimArgTys env xs
return ((aval, (True, (Var an))) : ans)
fnTy [] ret = forget ret
fnTy ((x, (_, xt)) : xs) ret = RBind x (Pi Nothing xt RType) (fnTy xs ret)
localVar env (PRef _ _ x)
= case lookup x env of
Just _ -> Just x
_ -> Nothing
localVar env _ = Nothing
elabIArg ((n, (True, ty)), def) =
do focus n; elabE ina (Just fc) (getTm def)
elabIArg _ = return () -- already done, just a name
mkN n@(NS _ _) = n
mkN n@(SN _) = n
mkN n = case namespace info of
xs@(_:_) -> sNS n xs
_ -> n
elab' ina _ (PMatchApp fc fn)
= do (fn', imps) <- case lookupCtxtName fn (idris_implicits ist) of
[(n, args)] -> return (n, map (const True) args)
_ -> lift $ tfail (NoSuchVariable fn)
ns <- match_apply (Var fn') (map (\x -> (x,0)) imps)
solve
-- if f is local, just do a simple_app
-- FIXME: Anyone feel like refactoring this mess? - EB
elab' ina topfc tm@(PApp fc (PRef ffc hls f) args_in)
| pattern && not reflection && not (e_qq ina) && e_nomatching ina
= lift $ tfail $ Msg ("Attempting concrete match on polymorphic argument: " ++ show tm)
| otherwise = implicitApp $
do env <- get_env
ty <- goal
fty <- get_type (Var f)
ctxt <- get_context
let dataCon = isDConName f ctxt
annot <- findHighlight f
mapM_ checkKnownImplicit args_in
let args = insertScopedImps fc (whnfArgs ctxt env fty) args_in
let unmatchableArgs = if pattern
then getUnmatchable (tt_ctxt ist) f
else []
-- trace ("BEFORE " ++ show f ++ ": " ++ show ty) $
when (pattern && not reflection && not (e_qq ina) && not (e_intype ina)
&& isTConName f (tt_ctxt ist)) $
lift $ tfail $ Msg ("No explicit types on left hand side: " ++ show tm)
-- trace (show (f, args_in, args)) $
if (f `elem` map fst env && length args == 1 && length args_in == 1)
then -- simple app, as below
do simple_app False
(elabE (ina { e_isfn = True }) (Just fc) (PRef ffc hls f))
(elabE (ina { e_inarg = True,
e_guarded = dataCon }) (Just fc) (getTm (head args)))
(show tm)
solve
mapM (uncurry highlightSource) $
(ffc, annot) : map (\f -> (f, annot)) hls
return []
else
do ivs <- get_implementations
ps <- get_probs
-- HACK: we shouldn't resolve interfaces if we're defining an implementation
-- function or default definition.
let isinf = f == inferCon || tcname f
-- if f is an interface, we need to know its arguments so that
-- we can unify with them
case lookupCtxt f (idris_interfaces ist) of
[] -> return ()
_ -> do mapM_ setInjective (map getTm args)
-- maybe more things are solvable now
unifyProblems
let guarded = isConName f ctxt
-- trace ("args is " ++ show args) $ return ()
ns <- apply (Var f) (map isph args)
-- trace ("ns is " ++ show ns) $ return ()
-- mark any interface arguments as injective
-- when (not pattern) $
mapM_ checkIfInjective (map snd ns)
unifyProblems -- try again with the new information,
-- to help with disambiguation
ulog <- getUnifyLog
annot <- findHighlight f
mapM (uncurry highlightSource) $
(ffc, annot) : map (\f -> (f, annot)) hls
elabArgs ist (ina { e_inarg = e_inarg ina || not isinf,
e_guarded = dataCon })
[] fc False f
(zip ns (unmatchableArgs ++ repeat False))
(f == sUN "Force")
(map (\x -> getTm x) args) -- TODO: remove this False arg
imp <- if (e_isfn ina) then
do guess <- get_guess
env <- get_env
case safeForgetEnv (map fst env) guess of
Nothing ->
return []
Just rguess -> do
gty <- get_type rguess
let ty_n = whnf ctxt env gty
return $ getReqImps ty_n
else return []
-- Now we find out how many implicits we needed at the
-- end of the application by looking at the goal again
-- - Have another go, but this time add the
-- implicits (can't think of a better way than this...)
case imp of
rs@(_:_) | not pattern -> return rs -- quit, try again
_ -> do solve
hs <- get_holes
ivs' <- get_implementations
-- Attempt to resolve any interfaces which have 'complete' types,
-- i.e. no holes in them
when (not pattern || (e_inarg ina && not tcgen)) $
mapM_ (\n -> do focus n
g <- goal
env <- get_env
hs <- get_holes
if all (\n -> not (n `elem` hs)) (freeNames g)
then handleError (tcRecoverable emode)
(resolveTC False False 10 g fn elabRec ist)
(movelast n)
else movelast n)
(ivs' \\ ivs)
return []
where
-- Run the elaborator, which returns how many implicit
-- args were needed, then run it again with those args. We need
-- this because we have to elaborate the whole application to
-- find out whether any computations have caused more implicits
-- to be needed.
implicitApp :: ElabD [ImplicitInfo] -> ElabD ()
implicitApp elab
| pattern || intransform = do elab; return ()
| otherwise
= do s <- get
imps <- elab
case imps of
[] -> return ()
es -> do put s
elab' ina topfc (PAppImpl tm es)
checkKnownImplicit imp
| UnknownImp `elem` argopts imp
= lift $ tfail $ UnknownImplicit (pname imp) f
checkKnownImplicit _ = return ()
getReqImps (Bind x (Pi (Just i) ty _) sc)
= i : getReqImps sc
getReqImps _ = []
checkIfInjective n = do
env <- get_env
case lookup n env of
Nothing -> return ()
Just b ->
case unApply (whnf (tt_ctxt ist) env (binderTy b)) of
(P _ c _, args) ->
case lookupCtxtExact c (idris_interfaces ist) of
Nothing -> return ()
Just ci -> -- interface, set as injective
do mapM_ setinjArg (getDets 0 (interface_determiners ci) args)
-- maybe we can solve more things now...
ulog <- getUnifyLog
probs <- get_probs
inj <- get_inj
traceWhen ulog ("Injective now " ++ show args ++ "\nAll: " ++ show inj
++ "\nProblems: " ++ qshow probs) $
unifyProblems
probs <- get_probs
traceWhen ulog (qshow probs) $ return ()
_ -> return ()
setinjArg (P _ n _) = setinj n
setinjArg _ = return ()
getDets i ds [] = []
getDets i ds (a : as) | i `elem` ds = a : getDets (i + 1) ds as
| otherwise = getDets (i + 1) ds as
tacTm (PTactics _) = True
tacTm (PProof _) = True
tacTm _ = False
setInjective (PRef _ _ n) = setinj n
setInjective (PApp _ (PRef _ _ n) _) = setinj n
setInjective _ = return ()
elab' ina _ tm@(PApp fc f [arg]) =
erun fc $
do simple_app (not $ headRef f)
(elabE (ina { e_isfn = True }) (Just fc) f)
(elabE (ina { e_inarg = True }) (Just fc) (getTm arg))
(show tm)
solve
where headRef (PRef _ _ _) = True
headRef (PApp _ f _) = headRef f
headRef (PAlternative _ _ as) = all headRef as
headRef _ = False
elab' ina fc (PAppImpl f es) = do appImpl (reverse es) -- not that we look...
solve
where appImpl [] = elab' (ina { e_isfn = False }) fc f -- e_isfn not set, so no recursive expansion of implicits
appImpl (e : es) = simple_app False
(appImpl es)
(elab' ina fc Placeholder)
(show f)
elab' ina fc Placeholder
= do (h : hs) <- get_holes
movelast h
elab' ina fc (PMetavar nfc n) =
do ptm <- get_term
-- When building the metavar application, leave out the unique
-- names which have been used elsewhere in the term, since we
-- won't be able to use them in the resulting application.
let unique_used = getUniqueUsed (tt_ctxt ist) ptm
let n' = metavarName (namespace info) n
attack
psns <- getPSnames
n' <- defer unique_used n'
solve
highlightSource nfc (AnnName n' (Just MetavarOutput) Nothing Nothing)
elab' ina fc (PProof ts) = do compute; mapM_ (runTac True ist (elabFC info) fn) ts
elab' ina fc (PTactics ts)
| not pattern = do mapM_ (runTac False ist fc fn) ts
| otherwise = elab' ina fc Placeholder
elab' ina fc (PElabError e) = lift $ tfail e
elab' ina mfc (PRewrite fc substfn rule sc newg)
= elabRewrite (elab' ina mfc) ist fc substfn rule sc newg
-- A common error case if trying to typecheck an autogenerated case block
elab' ina _ c@(PCase fc Placeholder opts)
= lift $ tfail (Msg "No expression for the case to inspect.\nYou need to replace the _ with an expression.")
elab' ina _ c@(PCase fc scr opts)
= do attack
tyn <- getNameFrom (sMN 0 "scty")
claim tyn RType
valn <- getNameFrom (sMN 0 "scval")
scvn <- getNameFrom (sMN 0 "scvar")
claim valn (Var tyn)
letbind scvn (Var tyn) (Var valn)
-- Start filling in the scrutinee type, if we can work one
-- out from the case options
let scrTy = getScrType (map fst opts)
case scrTy of
Nothing -> return ()
Just ty -> do focus tyn
elabE ina (Just fc) ty
focus valn
elabE (ina { e_inarg = True }) (Just fc) scr
-- Solve any remaining implicits - we need to solve as many
-- as possible before making the 'case' type
unifyProblems
matchProblems True
args <- get_env
envU <- mapM (getKind args) args
let namesUsedInRHS = nub $ scvn : concatMap (\(_,rhs) -> allNamesIn rhs) opts
-- Drop the unique arguments used in the term already
-- and in the scrutinee (since it's
-- not valid to use them again anyway)
--
-- Also drop unique arguments which don't appear explicitly
-- in either case branch so they don't count as used
-- unnecessarily (can only do this for unique things, since we
-- assume they don't appear implicitly in types)
ptm <- get_term
let inOpts = (filter (/= scvn) (map fst args)) \\ (concatMap (\x -> allNamesIn (snd x)) opts)
let argsDropped = filter (isUnique envU)
(nub $ allNamesIn scr ++ inApp ptm ++
inOpts)
let args' = filter (\(n, _) -> n `notElem` argsDropped) args
attack
cname' <- defer argsDropped (mkN (mkCaseName fc fn))
solve
-- if the scrutinee is one of the 'args' in env, we should
-- inspect it directly, rather than adding it as a new argument
let newdef = PClauses fc [] cname'
(caseBlock fc cname' scr
(map (isScr scr) (reverse args')) opts)
-- elaborate case
updateAux (\e -> e { case_decls = (cname', newdef) : case_decls e } )
-- if we haven't got the type yet, hopefully we'll get it later!
movelast tyn
solve
where mkCaseName fc (NS n ns) = NS (mkCaseName fc n) ns
mkCaseName fc n = SN (CaseN (FC' fc) n)
-- mkCaseName (UN x) = UN (x ++ "_case")
-- mkCaseName (MN i x) = MN i (x ++ "_case")
mkN n@(NS _ _) = n
mkN n = case namespace info of
xs@(_:_) -> sNS n xs
_ -> n
getScrType [] = Nothing
getScrType (f : os) = maybe (getScrType os) Just (getAppType f)
getAppType (PRef _ _ n) =
case lookupTyName n (tt_ctxt ist) of
[(n', ty)] | isDConName n' (tt_ctxt ist) ->
case unApply (getRetTy ty) of
(P _ tyn _, args) ->
Just (PApp fc (PRef fc [] tyn)
(map pexp (map (const Placeholder) args)))
_ -> Nothing
_ -> Nothing -- ambiguity is no help to us!
getAppType (PApp _ t as) = getAppType t
getAppType _ = Nothing
inApp (P _ n _) = [n]
inApp (App _ f a) = inApp f ++ inApp a
inApp (Bind n (Let _ v) sc) = inApp v ++ inApp sc
inApp (Bind n (Guess _ v) sc) = inApp v ++ inApp sc
inApp (Bind n b sc) = inApp sc
inApp _ = []
isUnique envk n = case lookup n envk of
Just u -> u
_ -> False
getKind env (n, _)
= case lookup n env of
Nothing -> return (n, False) -- can't happen, actually...
Just b ->
do ty <- get_type (forget (binderTy b))
case ty of
UType UniqueType -> return (n, True)
UType AllTypes -> return (n, True)
_ -> return (n, False)
tcName tm | (P _ n _, _) <- unApply tm
= case lookupCtxt n (idris_interfaces ist) of
[_] -> True
_ -> False
tcName _ = False
usedIn ns (n, b)
= n `elem` ns
|| any (\x -> x `elem` ns) (allTTNames (binderTy b))
elab' ina fc (PUnifyLog t) = do unifyLog True
elab' ina fc t
unifyLog False
elab' ina fc (PQuasiquote t goalt)
= do -- First extract the unquoted subterms, replacing them with fresh
-- names in the quasiquoted term. Claim their reflections to be
-- an inferred type (to support polytypic quasiquotes).
finalTy <- goal
(t, unq) <- extractUnquotes 0 t
let unquoteNames = map fst unq
mapM_ (\uqn -> claim uqn (forget finalTy)) unquoteNames
-- Save the old state - we need a fresh proof state to avoid
-- capturing lexically available variables in the quoted term.
ctxt <- get_context
datatypes <- get_datatypes
g_nextname <- get_global_nextname
saveState
updatePS (const .
newProof (sMN 0 "q") (constraintNS info) ctxt datatypes g_nextname $
P Ref (reflm "TT") Erased)
-- Re-add the unquotes, letting Idris infer the (fictional)
-- types. Here, they represent the real type rather than the type
-- of their reflection.
mapM_ (\n -> do ty <- getNameFrom (sMN 0 "unqTy")
claim ty RType
movelast ty
claim n (Var ty)
movelast n)
unquoteNames
-- Determine whether there's an explicit goal type, and act accordingly
-- Establish holes for the type and value of the term to be
-- quasiquoted
qTy <- getNameFrom (sMN 0 "qquoteTy")
claim qTy RType
movelast qTy
qTm <- getNameFrom (sMN 0 "qquoteTm")
claim qTm (Var qTy)
-- Let-bind the result of elaborating the contained term, so that
-- the hole doesn't disappear
nTm <- getNameFrom (sMN 0 "quotedTerm")
letbind nTm (Var qTy) (Var qTm)
-- Fill out the goal type, if relevant
case goalt of
Nothing -> return ()
Just gTy -> do focus qTy
elabE (ina { e_qq = True }) fc gTy
-- Elaborate the quasiquoted term into the hole
focus qTm
elabE (ina { e_qq = True }) fc t
end_unify
-- We now have an elaborated term. Reflect it and solve the
-- original goal in the original proof state, preserving highlighting
env <- get_env
EState _ _ _ hs _ _ <- getAux
loadState
updateAux (\aux -> aux { highlighting = hs })
let quoted = fmap (explicitNames . binderVal) $ lookup nTm env
isRaw = case unApply (normaliseAll ctxt env finalTy) of
(P _ n _, []) | n == reflm "Raw" -> True
_ -> False
case quoted of
Just q -> do ctxt <- get_context
(q', _, _) <- lift $ recheck (constraintNS info) ctxt [(uq, Lam Erased) | uq <- unquoteNames] (forget q) q
if pattern
then if isRaw
then reflectRawQuotePattern unquoteNames (forget q')
else reflectTTQuotePattern unquoteNames q'
else do if isRaw
then -- we forget q' instead of using q to ensure rechecking
fill $ reflectRawQuote unquoteNames (forget q')
else fill $ reflectTTQuote unquoteNames q'
solve
Nothing -> lift . tfail . Msg $ "Broken elaboration of quasiquote"
-- Finally fill in the terms or patterns from the unquotes. This
-- happens last so that their holes still exist while elaborating
-- the main quotation.
mapM_ elabUnquote unq
where elabUnquote (n, tm)
= do focus n
elabE (ina { e_qq = False }) fc tm
elab' ina fc (PUnquote t) = fail "Found unquote outside of quasiquote"
elab' ina fc (PQuoteName n False nfc) =
do fill $ reflectName n
solve
elab' ina fc (PQuoteName n True nfc) =
do ctxt <- get_context
env <- get_env
case lookup n env of
Just _ -> do fill $ reflectName n
solve
highlightSource nfc (AnnBoundName n False)
Nothing ->
case lookupNameDef n ctxt of
[(n', _)] -> do fill $ reflectName n'
solve
highlightSource nfc (AnnName n' Nothing Nothing Nothing)
[] -> lift . tfail . NoSuchVariable $ n
more -> lift . tfail . CantResolveAlts $ map fst more
elab' ina fc (PAs _ n t) = lift . tfail . Msg $ "@-pattern not allowed here"
elab' ina fc (PHidden t)
| reflection = elab' ina fc t
| otherwise
= do (h : hs) <- get_holes
-- Dotting a hole means that either the hole or any outer
-- hole (a hole outside any occurrence of it)
-- must be solvable by unification as well as being filled
-- in directly.
-- Delay dotted things to the end, then when we elaborate them
-- we can check the result against what was inferred
movelast h
delayElab 10 $ do hs <- get_holes
when (h `elem` hs) $ do
focus h
dotterm
elab' ina fc t
elab' ina fc (PRunElab fc' tm ns) =
do unless (ElabReflection `elem` idris_language_extensions ist) $
lift $ tfail $ At fc' (Msg "You must turn on the ElabReflection extension to use %runElab")
attack
n <- getNameFrom (sMN 0 "tacticScript")
let scriptTy = RApp (Var (sNS (sUN "Elab")
["Elab", "Reflection", "Language"]))
(Var unitTy)
claim n scriptTy
focus n
attack -- to get an extra hole
elab' ina (Just fc') tm
script <- get_guess
fullyElaborated script
solve -- eliminate the hole. Because there are no references, the script is only in the binding
env <- get_env
runElabAction info ist (maybe fc' id fc) env script ns
solve
elab' ina fc (PConstSugar constFC tm) =
-- Here we elaborate the contained term, then calculate
-- highlighting for constFC. The highlighting is the
-- highlighting for the outermost constructor of the result of
-- evaluating the elaborated term, if one exists (it always
-- should, but better to fail gracefully for something silly
-- like highlighting info). This is how implicit applications of
-- fromInteger get highlighted.
do saveState -- so we don't pollute the elaborated term
n <- getNameFrom (sMN 0 "cstI")
n' <- getNameFrom (sMN 0 "cstIhole")
g <- forget <$> goal
claim n' g
movelast n'
-- In order to intercept the elaborated value, we need to
-- let-bind it.
attack
letbind n g (Var n')
focus n'
elab' ina fc tm
env <- get_env
ctxt <- get_context
let v = fmap (normaliseAll ctxt env . finalise . binderVal)
(lookup n env)
loadState -- we have the highlighting - re-elaborate the value
elab' ina fc tm
case v of
Just val -> highlightConst constFC val
Nothing -> return ()
where highlightConst fc (P _ n _) =
highlightSource fc (AnnName n Nothing Nothing Nothing)
highlightConst fc (App _ f _) =
highlightConst fc f
highlightConst fc (Constant c) =
highlightSource fc (AnnConst c)
highlightConst _ _ = return ()
elab' ina fc x = fail $ "Unelaboratable syntactic form " ++ showTmImpls x
-- delay elaboration of 't', with priority 'pri' until after everything
-- else is done.
-- The delayed things with lower numbered priority will be elaborated
-- first. (In practice, this means delayed alternatives, then PHidden
-- things.)
delayElab pri t
= updateAux (\e -> e { delayed_elab = delayed_elab e ++ [(pri, t)] })
isScr :: PTerm -> (Name, Binder Term) -> (Name, (Bool, Binder Term))
isScr (PRef _ _ n) (n', b) = (n', (n == n', b))
isScr _ (n', b) = (n', (False, b))
caseBlock :: FC -> Name
-> PTerm -- original scrutinee
-> [(Name, (Bool, Binder Term))] -> [(PTerm, PTerm)] -> [PClause]
caseBlock fc n scr env opts
= let args' = findScr env
args = map mkarg (map getNmScr args') in
map (mkClause args) opts
where -- Find the variable we want as the scrutinee and mark it as
-- 'True'. If the scrutinee is in the environment, match on that
-- otherwise match on the new argument we're adding.
findScr ((n, (True, t)) : xs)
= (n, (True, t)) : scrName n xs
findScr [(n, (_, t))] = [(n, (True, t))]
findScr (x : xs) = x : findScr xs
-- [] can't happen since scrutinee is in the environment!
findScr [] = error "The impossible happened - the scrutinee was not in the environment"
-- To make sure top level pattern name remains in scope, put
-- it at the end of the environment
scrName n [] = []
scrName n [(_, t)] = [(n, t)]
scrName n (x : xs) = x : scrName n xs
getNmScr (n, (s, _)) = (n, s)
mkarg (n, s) = (PRef fc [] n, s)
-- may be shadowed names in the new pattern - so replace the
-- old ones with an _
-- Also, names which don't appear on the rhs should not be
-- fixed on the lhs, or this restricts the kind of matching
-- we can do to non-dependent types.
mkClause args (l, r)
= let args' = map (shadowed (allNamesIn l)) args
args'' = map (implicitable (allNamesIn r ++
keepscrName scr)) args'
lhs = PApp (getFC fc l) (PRef NoFC [] n)
(map (mkLHSarg l) args'') in
PClause (getFC fc l) n lhs [] r []
-- Keep scrutinee available if it's just a name (this makes
-- the names in scope look better when looking at a hole on
-- the rhs of a case)
keepscrName (PRef _ _ n) = [n]
keepscrName _ = []
mkLHSarg l (tm, True) = pexp l
mkLHSarg l (tm, False) = pexp tm
shadowed new (PRef _ _ n, s) | n `elem` new = (Placeholder, s)
shadowed new t = t
implicitable rhs (PRef _ _ n, s) | n `notElem` rhs = (Placeholder, s)
implicitable rhs t = t
getFC d (PApp fc _ _) = fc
getFC d (PRef fc _ _) = fc
getFC d (PAlternative _ _ (x:_)) = getFC d x
getFC d x = d
-- Fail if a term is not yet fully elaborated (e.g. if it contains
-- case block functions that don't yet exist)
fullyElaborated :: Term -> ElabD ()
fullyElaborated (P _ n _) =
do estate <- getAux
case lookup n (case_decls estate) of
Nothing -> return ()
Just _ -> lift . tfail $ ElabScriptStaging n
fullyElaborated (Bind n b body) = fullyElaborated body >> for_ b fullyElaborated
fullyElaborated (App _ l r) = fullyElaborated l >> fullyElaborated r
fullyElaborated (Proj t _) = fullyElaborated t
fullyElaborated _ = return ()
-- If the goal type is a "Lazy", then try elaborating via 'Delay'
-- first. We need to do this brute force approach, rather than anything
-- more precise, since there may be various other ambiguities to resolve
-- first.
insertLazy :: ElabCtxt -> PTerm -> ElabD PTerm
insertLazy ina t@(PApp _ (PRef _ _ (UN l)) _) | l == txt "Delay" = return t
insertLazy ina t@(PApp _ (PRef _ _ (UN l)) _) | l == txt "Force" = return t
insertLazy ina (PCoerced t) = return t
-- Don't add a delay to top level pattern variables, since they
-- can be forced on the rhs if needed
insertLazy ina t@(PPatvar _ _) | pattern && not (e_guarded ina) = return t
insertLazy ina t =
do ty <- goal
env <- get_env
let (tyh, _) = unApply (normalise (tt_ctxt ist) env ty)
let tries = [mkDelay env t, t]
case tyh of
P _ (UN l) _ | l == txt "Delayed"
-> return (PAlternative [] FirstSuccess tries)
_ -> return t
where
mkDelay env (PAlternative ms b xs) = PAlternative ms b (map (mkDelay env) xs)
mkDelay env t
= let fc = fileFC "Delay" in
addImplBound ist (map fst env) (PApp fc (PRef fc [] (sUN "Delay"))
[pexp t])
-- Don't put implicit coercions around applications which are marked
-- as '%noImplicit', or around case blocks, otherwise we get exponential
-- blowup especially where there are errors deep in large expressions.
notImplicitable (PApp _ f _) = notImplicitable f
-- TMP HACK no coercing on bind (make this configurable)
notImplicitable (PRef _ _ n)
| [opts] <- lookupCtxt n (idris_flags ist)
= NoImplicit `elem` opts
notImplicitable (PAlternative _ _ as) = any notImplicitable as
-- case is tricky enough without implicit coercions! If they are needed,
-- they can go in the branches separately.
notImplicitable (PCase _ _ _) = True
notImplicitable _ = False
-- Elaboration works more smoothly if we expand function applications
-- to their full arity and elaborate it all at once (better error messages
-- in particular)
expandToArity tm@(PApp fc f a) = do
env <- get_env
case fullApp tm of
-- if f is global, leave it alone because we've already
-- expanded it to the right arity
PApp fc ftm@(PRef _ _ f) args | Just aty <- lookup f env ->
do let a = length (getArgTys (normalise (tt_ctxt ist) env (binderTy aty)))
return (mkPApp fc a ftm args)
_ -> return tm
expandToArity t = return t
fullApp (PApp _ (PApp fc f args) xs) = fullApp (PApp fc f (args ++ xs))
fullApp x = x
insertScopedImps fc (Bind n (Pi im@(Just i) _ _) sc) xs
| tcimplementation i && not (toplevel_imp i)
= pimp n (PResolveTC fc) True : insertScopedImps fc sc xs
| not (toplevel_imp i)
= pimp n Placeholder True : insertScopedImps fc sc xs
insertScopedImps fc (Bind n (Pi _ _ _) sc) (x : xs)
= x : insertScopedImps fc sc xs
insertScopedImps _ _ xs = xs
insertImpLam ina t =
do ty <- goal
env <- get_env
let ty' = normalise (tt_ctxt ist) env ty
addLam ty' t
where
-- just one level at a time
addLam (Bind n (Pi (Just _) _ _) sc) t =
do impn <- unique_hole n -- (sMN 0 "scoped_imp")
if e_isfn ina -- apply to an implicit immediately
then return (PApp emptyFC
(PLam emptyFC impn NoFC Placeholder t)
[pexp Placeholder])
else return (PLam emptyFC impn NoFC Placeholder t)
addLam _ t = return t
insertCoerce ina t@(PCase _ _ _) = return t
insertCoerce ina t | notImplicitable t = return t
insertCoerce ina t =
do ty <- goal
-- Check for possible coercions to get to the goal
-- and add them as 'alternatives'
env <- get_env
let ty' = normalise (tt_ctxt ist) env ty
let cs = getCoercionsTo ist ty'
let t' = case (t, cs) of
(PCoerced tm, _) -> tm
(_, []) -> t
(_, cs) -> PAlternative [] TryImplicit
(t : map (mkCoerce env t) cs)
return t'
where
mkCoerce env (PAlternative ms aty tms) n
= PAlternative ms aty (map (\t -> mkCoerce env t n) tms)
mkCoerce env t n = let fc = maybe (fileFC "Coercion") id (highestFC t) in
addImplBound ist (map fst env)
(PApp fc (PRef fc [] n) [pexp (PCoerced t)])
-- | Elaborate the arguments to a function
elabArgs :: IState -- ^ The current Idris state
-> ElabCtxt -- ^ (in an argument, guarded, in a type, in a qquote)
-> [Bool]
-> FC -- ^ Source location
-> Bool
-> Name -- ^ Name of the function being applied
-> [((Name, Name), Bool)] -- ^ (Argument Name, Hole Name, unmatchable)
-> Bool -- ^ under a 'force'
-> [PTerm] -- ^ argument
-> ElabD ()
elabArgs ist ina failed fc retry f [] force _ = return ()
elabArgs ist ina failed fc r f (((argName, holeName), unm):ns) force (t : args)
= do hs <- get_holes
if holeName `elem` hs then
do focus holeName
case t of
Placeholder -> do movelast holeName
elabArgs ist ina failed fc r f ns force args
_ -> elabArg t
else elabArgs ist ina failed fc r f ns force args
where elabArg t =
do -- solveAutos ist fn False
now_elaborating fc f argName
wrapErr f argName $ do
hs <- get_holes
tm <- get_term
-- No coercing under an explicit Force (or it can Force/Delay
-- recursively!)
let elab = if force then elab' else elabE
failed' <- -- trace (show (n, t, hs, tm)) $
-- traceWhen (not (null cs)) (show ty ++ "\n" ++ showImp True t) $
do focus holeName;
g <- goal
-- Can't pattern match on polymorphic goals
poly <- goal_polymorphic
ulog <- getUnifyLog
traceWhen ulog ("Elaborating argument " ++ show (argName, holeName, g)) $
elab (ina { e_nomatching = unm && poly }) (Just fc) t
return failed
done_elaborating_arg f argName
elabArgs ist ina failed fc r f ns force args
wrapErr f argName action =
do elabState <- get
while <- elaborating_app
let while' = map (\(x, y, z)-> (y, z)) while
(result, newState) <- case runStateT action elabState of
OK (res, newState) -> return (res, newState)
Error e -> do done_elaborating_arg f argName
lift (tfail (elaboratingArgErr while' e))
put newState
return result
elabArgs _ _ _ _ _ _ (((arg, hole), _) : _) _ [] =
fail $ "Can't elaborate these args: " ++ show arg ++ " " ++ show hole
addAutoBind :: Plicity -> Name -> ElabD ()
addAutoBind (Imp _ _ _ _ False) n
= updateAux (\est -> est { auto_binds = n : auto_binds est })
addAutoBind _ _ = return ()
testImplicitWarning :: FC -> Name -> Type -> ElabD ()
testImplicitWarning fc n goal
| implicitable n && emode == ETyDecl
= do env <- get_env
est <- getAux
when (n `elem` auto_binds est) $
tryUnify env (lookupTyName n (tt_ctxt ist))
| otherwise = return ()
where
tryUnify env [] = return ()
tryUnify env ((nm, ty) : ts)
= do inj <- get_inj
hs <- get_holes
case unify (tt_ctxt ist) env (ty, Nothing) (goal, Nothing)
inj hs [] [] of
OK _ ->
updateAux (\est -> est { implicit_warnings =
(fc, nm) : implicit_warnings est })
_ -> tryUnify env ts
-- For every alternative, look at the function at the head. Automatically resolve
-- any nested alternatives where that function is also at the head
pruneAlt :: [PTerm] -> [PTerm]
pruneAlt xs = map prune xs
where
prune (PApp fc1 (PRef fc2 hls f) as)
= PApp fc1 (PRef fc2 hls f) (fmap (fmap (choose f)) as)
prune t = t
choose f (PAlternative ms a as)
= let as' = fmap (choose f) as
fs = filter (headIs f) as' in
case fs of
[a] -> a
_ -> PAlternative ms a as'
choose f (PApp fc f' as) = PApp fc (choose f f') (fmap (fmap (choose f)) as)
choose f t = t
headIs f (PApp _ (PRef _ _ f') _) = f == f'
headIs f (PApp _ f' _) = headIs f f'
headIs f _ = True -- keep if it's not an application
-- Rule out alternatives that don't return the same type as the head of the goal
-- (If there are none left as a result, do nothing)
pruneByType :: Bool -> Env -> Term -> -- head of the goal
Type -> -- goal
IState -> [PTerm] -> [PTerm]
-- if an alternative has a locally bound name at the head, take it
pruneByType imp env t goalty c as
| Just a <- locallyBound as = [a]
where
locallyBound [] = Nothing
locallyBound (t:ts)
| Just n <- getName t,
n `elem` map fst env = Just t
| otherwise = locallyBound ts
getName (PRef _ _ n) = Just n
getName (PApp _ (PRef _ _ (UN l)) [_, _, arg]) -- ignore Delays
| l == txt "Delay" = getName (getTm arg)
getName (PApp _ f _) = getName f
getName (PHidden t) = getName t
getName _ = Nothing
-- 'n' is the name at the head of the goal type
pruneByType imp env (P _ n _) goalty ist as
-- if the goal type is polymorphic, keep everything
| Nothing <- lookupTyExact n ctxt = as
-- if the goal type is a ?metavariable, keep everything
| Just _ <- lookup n (idris_metavars ist) = as
| otherwise
= let asV = filter (headIs True n) as
as' = filter (headIs False n) as in
case as' of
[] -> asV
_ -> as'
where
ctxt = tt_ctxt ist
-- Get the function at the head of the alternative and see if it's
-- a plausible match against the goal type. Keep if so. Also keep if
-- there is a possible coercion to the goal type.
headIs var f (PRef _ _ f') = typeHead var f f'
headIs var f (PApp _ (PRef _ _ (UN l)) [_, _, arg])
| l == txt "Delay" = headIs var f (getTm arg)
headIs var f (PApp _ (PRef _ _ f') _) = typeHead var f f'
headIs var f (PApp _ f' _) = headIs var f f'
headIs var f (PPi _ _ _ _ sc) = headIs var f sc
headIs var f (PHidden t) = headIs var f t
headIs var f t = True -- keep if it's not an application
typeHead var f f'
= -- trace ("Trying " ++ show f' ++ " for " ++ show n) $
case lookupTyExact f' ctxt of
Just ty -> case unApply (getRetTy ty) of
(P _ ctyn _, _) | isTConName ctyn ctxt && not (ctyn == f)
-> False
_ -> let ty' = whnf ctxt [] ty in
-- trace ("Trying " ++ show f' ++ " : " ++ show (getRetTy ty') ++ " for " ++ show goalty
-- ++ "\nMATCH: " ++ show (pat, matching (getRetTy ty') goalty)) $
case unApply (getRetTy ty') of
(V _, _) ->
isPlausible ist var env n ty
_ -> matchingTypes imp (getRetTy ty') goalty
|| isCoercion (getRetTy ty') goalty
-- May be useful to keep for debugging purposes for a bit:
-- let res = matching (getRetTy ty') goalty in
-- traceWhen (not res)
-- ("Rejecting " ++ show (getRetTy ty', goalty)) res
_ -> False
matchingTypes True = matchingHead
matchingTypes False = matching
-- If the goal is a constructor, it must match the suggested function type
matching (P _ ctyn _) (P _ n' _)
| isTConName n' ctxt && isTConName ctyn ctxt = ctyn == n'
| otherwise = True
-- Variables match anything
matching (V _) _ = True
matching _ (V _) = True
matching _ (P _ n _) = not (isTConName n ctxt)
matching (P _ n _) _ = not (isTConName n ctxt)
-- Binders are a plausible match, so keep them
matching (Bind n _ sc) _ = True
matching _ (Bind n _ sc) = True
-- If we hit a function name, it's a plausible match
matching apl@(App _ _ _) apr@(App _ _ _)
| (P _ fl _, argsl) <- unApply apl,
(P _ fr _, argsr) <- unApply apr
= fl == fr && and (zipWith matching argsl argsr)
|| (not (isConName fl ctxt && isConName fr ctxt))
-- If the application structures aren't easily comparable, it's a
-- plausible match
matching (App _ f a) (App _ f' a') = True
matching (TType _) (TType _) = True
matching (UType _) (UType _) = True
matching l r = l == r
-- In impossible-case mode, only look at the heads (this is to account for
-- the non type-directed case with 'impossible' - we'd be ruling out
-- too much and wouldn't find the mismatch we're looking for)
matchingHead apl@(App _ _ _) apr@(App _ _ _)
| (P _ fl _, argsl) <- unApply apl,
(P _ fr _, argsr) <- unApply apr,
isConName fl ctxt && isConName fr ctxt
= fl == fr
matchingHead _ _ = True
-- Return whether there is a possible coercion between the return type
-- of an alternative and the goal type
isCoercion rty gty | (P _ r _, _) <- unApply rty
= not (null (getCoercionsBetween r gty))
isCoercion _ _ = False
getCoercionsBetween :: Name -> Type -> [Name]
getCoercionsBetween r goal
= let cs = getCoercionsTo ist goal in
findCoercions r cs
where findCoercions t [] = []
findCoercions t (n : ns) =
let ps = case lookupTy n (tt_ctxt ist) of
[ty'] -> let as = map snd (getArgTys (normalise (tt_ctxt ist) [] ty')) in
[n | any useR as]
_ -> [] in
ps ++ findCoercions t ns
useR ty =
case unApply (getRetTy ty) of
(P _ t _, _) -> t == r
_ -> False
pruneByType _ _ t _ _ as = as
-- Could the name feasibly be the return type?
-- If there is an interface constraint on the return type, and no implementation
-- in the environment or globally for that name, then no
-- Otherwise, yes
-- (FIXME: This isn't complete, but I'm leaving it here and coming back
-- to it later - just returns 'var' for now. EB)
isPlausible :: IState -> Bool -> Env -> Name -> Type -> Bool
isPlausible ist var env n ty
= let (hvar, interfaces) = collectConstraints [] [] ty in
case hvar of
Nothing -> True
Just rth -> var -- trace (show (rth, interfaces)) var
where
collectConstraints :: [Name] -> [(Term, [Name])] -> Type ->
(Maybe Name, [(Term, [Name])])
collectConstraints env tcs (Bind n (Pi _ ty _) sc)
= let tcs' = case unApply ty of
(P _ c _, _) ->
case lookupCtxtExact c (idris_interfaces ist) of
Just tc -> ((ty, map fst (interface_implementations tc))
: tcs)
Nothing -> tcs
_ -> tcs
in
collectConstraints (n : env) tcs' sc
collectConstraints env tcs t
| (V i, _) <- unApply t = (Just (env !! i), tcs)
| otherwise = (Nothing, tcs)
-- | Use the local elab context to work out the highlighting for a name
findHighlight :: Name -> ElabD OutputAnnotation
findHighlight n = do ctxt <- get_context
env <- get_env
case lookup n env of
Just _ -> return $ AnnBoundName n False
Nothing -> case lookupTyExact n ctxt of
Just _ -> return $ AnnName n Nothing Nothing Nothing
Nothing -> lift . tfail . InternalMsg $
"Can't find name " ++ show n
-- Try again to solve auto implicits
solveAuto :: IState -> Name -> Bool -> (Name, [FailContext]) -> ElabD ()
solveAuto ist fn ambigok (n, failc)
= do hs <- get_holes
when (not (null hs)) $ do
env <- get_env
g <- goal
handleError cantsolve (when (n `elem` hs) $ do
focus n
isg <- is_guess -- if it's a guess, we're working on it recursively, so stop
when (not isg) $
proofSearch' ist True ambigok 100 True Nothing fn [] [])
(lift $ Error (addLoc failc
(CantSolveGoal g (map (\(n, b) -> (n, binderTy b)) env))))
return ()
where addLoc (FailContext fc f x : prev) err
= At fc (ElaboratingArg f x
(map (\(FailContext _ f' x') -> (f', x')) prev) err)
addLoc _ err = err
cantsolve (CantSolveGoal _ _) = True
cantsolve (InternalMsg _) = True
cantsolve (At _ e) = cantsolve e
cantsolve (Elaborating _ _ _ e) = cantsolve e
cantsolve (ElaboratingArg _ _ _ e) = cantsolve e
cantsolve _ = False
solveAutos :: IState -> Name -> Bool -> ElabD ()
solveAutos ist fn ambigok
= do autos <- get_autos
mapM_ (solveAuto ist fn ambigok) (map (\(n, (fc, _)) -> (n, fc)) autos)
-- Return true if the given error suggests an interface failure is
-- recoverable
tcRecoverable :: ElabMode -> Err -> Bool
tcRecoverable ERHS (CantResolve f g _) = f
tcRecoverable ETyDecl (CantResolve f g _) = f
tcRecoverable e (ElaboratingArg _ _ _ err) = tcRecoverable e err
tcRecoverable e (At _ err) = tcRecoverable e err
tcRecoverable _ _ = True
trivial' ist
= trivial (elab ist toplevel ERHS [] (sMN 0 "tac")) ist
trivialHoles' psn h ist
= trivialHoles psn h (elab ist toplevel ERHS [] (sMN 0 "tac")) ist
proofSearch' ist rec ambigok depth prv top n psns hints
= do unifyProblems
proofSearch rec prv ambigok (not prv) depth
(elab ist toplevel ERHS [] (sMN 0 "tac")) top n psns hints ist
resolveTC' di mv depth tm n ist
= resolveTC di mv depth tm n (elab ist toplevel ERHS [] (sMN 0 "tac")) ist
collectDeferred :: Maybe Name -> [Name] -> Context ->
Term -> State [(Name, (Int, Maybe Name, Type, [Name]))] Term
collectDeferred top casenames ctxt tm = cd [] tm
where
cd env (Bind n (GHole i psns t) app) =
do ds <- get
t' <- collectDeferred top casenames ctxt t
when (not (n `elem` map fst ds)) $ put (ds ++ [(n, (i, top, t', psns))])
cd env app
cd env (Bind n b t)
= do b' <- cdb b
t' <- cd ((n, b) : env) t
return (Bind n b' t')
where
cdb (Let t v) = liftM2 Let (cd env t) (cd env v)
cdb (Guess t v) = liftM2 Guess (cd env t) (cd env v)
cdb b = do ty' <- cd env (binderTy b)
return (b { binderTy = ty' })
cd env (App s f a) = liftM2 (App s) (cd env f)
(cd env a)
cd env t = return t
case_ :: Bool -> Bool -> IState -> Name -> PTerm -> ElabD ()
case_ ind autoSolve ist fn tm = do
attack
tyn <- getNameFrom (sMN 0 "ity")
claim tyn RType
valn <- getNameFrom (sMN 0 "ival")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "irule")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
env <- get_env
let (Just binding) = lookup letn env
let val = binderVal binding
if ind then induction (forget val)
else casetac (forget val)
when autoSolve solveAll
-- | Compute the appropriate name for a top-level metavariable
metavarName :: [String] -> Name -> Name
metavarName _ n@(NS _ _) = n
metavarName (ns@(_:_)) n = sNS n ns
metavarName _ n = n
runElabAction :: ElabInfo -> IState -> FC -> Env -> Term -> [String] -> ElabD Term
runElabAction info ist fc env tm ns = do tm' <- eval tm
runTacTm tm'
where
eval tm = do ctxt <- get_context
return $ normaliseAll ctxt env (finalise tm)
returnUnit = return $ P (DCon 0 0 False) unitCon (P (TCon 0 0) unitTy Erased)
patvars :: [(Name, Term)] -> Term -> ([(Name, Term)], Term)
patvars ns (Bind n (PVar t) sc) = patvars ((n, t) : ns) (instantiate (P Bound n t) sc)
patvars ns tm = (ns, tm)
pullVars :: (Term, Term) -> ([(Name, Term)], Term, Term)
pullVars (lhs, rhs) = (fst (patvars [] lhs), snd (patvars [] lhs), snd (patvars [] rhs)) -- TODO alpha-convert rhs
requireError :: Err -> ElabD a -> ElabD ()
requireError orErr elab =
do state <- get
case runStateT elab state of
OK (_, state') -> lift (tfail orErr)
Error e -> return ()
-- create a fake TT term for the LHS of an impossible case
fakeTT :: Raw -> Term
fakeTT (Var n) =
case lookupNameDef n (tt_ctxt ist) of
[(n', TyDecl nt _)] -> P nt n' Erased
_ -> P Ref n Erased
fakeTT (RBind n b body) = Bind n (fmap fakeTT b) (fakeTT body)
fakeTT (RApp f a) = App Complete (fakeTT f) (fakeTT a)
fakeTT RType = TType (UVar [] (-1))
fakeTT (RUType u) = UType u
fakeTT (RConstant c) = Constant c
defineFunction :: RFunDefn Raw -> ElabD ()
defineFunction (RDefineFun n clauses) =
do ctxt <- get_context
ty <- maybe (fail "no type decl") return $ lookupTyExact n ctxt
let info = CaseInfo True True False -- TODO document and figure out
clauses' <- forM clauses (\case
RMkFunClause lhs rhs ->
do (lhs', lty) <- lift $ check ctxt [] lhs
(rhs', rty) <- lift $ check ctxt [] rhs
lift $ converts ctxt [] lty rty
return $ Right (lhs', rhs')
RMkImpossibleClause lhs ->
do requireError (Msg "Not an impossible case") . lift $
check ctxt [] lhs
return $ Left (fakeTT lhs))
let clauses'' = map (\case Right c -> pullVars c
Left lhs -> let (ns, lhs') = patvars [] lhs
in (ns, lhs', Impossible))
clauses'
let clauses''' = map (\(ns, lhs, rhs) -> (map fst ns, lhs, rhs)) clauses''
let argtys = map (\x -> (x, isCanonical x ctxt))
(map snd (getArgTys (normalise ctxt [] ty)))
ctxt'<- lift $
addCasedef n (const [])
info False (STerm Erased)
True False -- TODO what are these?
argtys [] -- TODO inaccessible types
clauses'
clauses'''
clauses'''
ty
ctxt
set_context ctxt'
updateAux $ \e -> e { new_tyDecls = RClausesInstrs n clauses'' : new_tyDecls e}
return ()
checkClosed :: Raw -> Elab' aux (Term, Type)
checkClosed tm = do ctxt <- get_context
(val, ty) <- lift $ check ctxt [] tm
return $! (finalise val, finalise ty)
-- | Add another argument to a Pi
mkPi :: RFunArg -> Raw -> Raw
mkPi arg rTy = RBind (argName arg) (Pi Nothing (argTy arg) (RUType AllTypes)) rTy
mustBeType ctxt tm ty =
case normaliseAll ctxt [] (finalise ty) of
UType _ -> return ()
TType _ -> return ()
ty' -> lift . tfail . InternalMsg $
show tm ++ " is not a type: it's " ++ show ty'
mustNotBeDefined ctxt n =
case lookupDefExact n ctxt of
Just _ -> lift . tfail . InternalMsg $
show n ++ " is already defined."
Nothing -> return ()
-- | Prepare a constructor to be added to a datatype being defined here
prepareConstructor :: Name -> RConstructorDefn -> ElabD (Name, [PArg], Type)
prepareConstructor tyn (RConstructor cn args resTy) =
do ctxt <- get_context
-- ensure the constructor name is not qualified, and
-- construct a qualified one
notQualified cn
let qcn = qualify cn
-- ensure that the constructor name is not defined already
mustNotBeDefined ctxt qcn
-- construct the actual type for the constructor
let cty = foldr mkPi resTy args
(checkedTy, ctyTy) <- lift $ check ctxt [] cty
mustBeType ctxt checkedTy ctyTy
-- ensure that the constructor builds the right family
case unApply (getRetTy (normaliseAll ctxt [] (finalise checkedTy))) of
(P _ n _, _) | n == tyn -> return ()
t -> lift . tfail . Msg $ "The constructor " ++ show cn ++
" doesn't construct " ++ show tyn ++
" (return type is " ++ show t ++ ")"
-- add temporary type declaration for constructor (so it can
-- occur in later constructor types)
set_context (addTyDecl qcn (DCon 0 0 False) checkedTy ctxt)
-- Save the implicits for high-level Idris
let impls = map rFunArgToPArg args
return (qcn, impls, checkedTy)
where
notQualified (NS _ _) = lift . tfail . Msg $ "Constructor names may not be qualified"
notQualified _ = return ()
qualify n = case tyn of
(NS _ ns) -> NS n ns
_ -> n
getRetTy :: Type -> Type
getRetTy (Bind _ (Pi _ _ _) sc) = getRetTy sc
getRetTy ty = ty
elabScriptStuck :: Term -> ElabD a
elabScriptStuck x = lift . tfail $ ElabScriptStuck x
-- Should be dependent
tacTmArgs :: Int -> Term -> [Term] -> ElabD [Term]
tacTmArgs l t args | length args == l = return args
| otherwise = elabScriptStuck t -- Probably should be an argument size mismatch internal error
-- | Do a step in the reflected elaborator monad. The input is the
-- step, the output is the (reflected) term returned.
runTacTm :: Term -> ElabD Term
runTacTm tac@(unApply -> (P _ n _, args))
| n == tacN "Prim__Solve"
= do ~[] <- tacTmArgs 0 tac args -- patterns are irrefutable because `tacTmArgs` returns lists of exactly the size given to it as first argument
solve
returnUnit
| n == tacN "Prim__Goal"
= do ~[] <- tacTmArgs 0 tac args
hs <- get_holes
case hs of
(h : _) -> do t <- goal
fmap fst . checkClosed $
rawPair (Var (reflm "TTName"), Var (reflm "TT"))
(reflectName h, reflect t)
[] -> lift . tfail . Msg $
"Elaboration is complete. There are no goals."
| n == tacN "Prim__Holes"
= do ~[] <- tacTmArgs 0 tac args
hs <- get_holes
fmap fst . checkClosed $
mkList (Var $ reflm "TTName") (map reflectName hs)
| n == tacN "Prim__Guess"
= do ~[] <- tacTmArgs 0 tac args
g <- get_guess
fmap fst . checkClosed $ reflect g
| n == tacN "Prim__LookupTy"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
ctxt <- get_context
let getNameTypeAndType = \case Function ty _ -> (Ref, ty)
TyDecl nt ty -> (nt, ty)
Operator ty _ _ -> (Ref, ty)
CaseOp _ ty _ _ _ _ -> (Ref, ty)
-- Idris tuples nest to the right
reflectTriple (x, y, z) =
raw_apply (Var pairCon) [ Var (reflm "TTName")
, raw_apply (Var pairTy) [Var (reflm "NameType"), Var (reflm "TT")]
, x
, raw_apply (Var pairCon) [ Var (reflm "NameType"), Var (reflm "TT")
, y, z]]
let defs = [ reflectTriple (reflectName n, reflectNameType nt, reflect ty)
| (n, def) <- lookupNameDef n' ctxt
, let (nt, ty) = getNameTypeAndType def ]
fmap fst . checkClosed $
rawList (raw_apply (Var pairTy) [ Var (reflm "TTName")
, raw_apply (Var pairTy) [ Var (reflm "NameType")
, Var (reflm "TT")]])
defs
| n == tacN "Prim__LookupDatatype"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
datatypes <- get_datatypes
ctxt <- get_context
fmap fst . checkClosed $
rawList (Var (tacN "Datatype"))
(map reflectDatatype (buildDatatypes ist n'))
| n == tacN "Prim__LookupFunDefn"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
fmap fst . checkClosed $
rawList (RApp (Var $ tacN "FunDefn") (Var $ reflm "TT"))
(map reflectFunDefn (buildFunDefns ist n'))
| n == tacN "Prim__LookupArgs"
= do ~[name] <- tacTmArgs 1 tac args
n' <- reifyTTName name
let listTy = Var (sNS (sUN "List") ["List", "Prelude"])
listFunArg = RApp listTy (Var (tacN "FunArg"))
-- Idris tuples nest to the right
let reflectTriple (x, y, z) =
raw_apply (Var pairCon) [ Var (reflm "TTName")
, raw_apply (Var pairTy) [listFunArg, Var (reflm "Raw")]
, x
, raw_apply (Var pairCon) [listFunArg, Var (reflm "Raw")
, y, z]]
let out =
[ reflectTriple (reflectName fn, reflectList (Var (tacN "FunArg")) (map reflectArg args), reflectRaw res)
| (fn, pargs) <- lookupCtxtName n' (idris_implicits ist)
, (args, res) <- getArgs pargs . forget <$>
maybeToList (lookupTyExact fn (tt_ctxt ist))
]
fmap fst . checkClosed $
rawList (raw_apply (Var pairTy) [Var (reflm "TTName")
, raw_apply (Var pairTy) [ RApp listTy
(Var (tacN "FunArg"))
, Var (reflm "Raw")]])
out
| n == tacN "Prim__SourceLocation"
= do ~[] <- tacTmArgs 0 tac args
fmap fst . checkClosed $
reflectFC fc
| n == tacN "Prim__Namespace"
= do ~[] <- tacTmArgs 0 tac args
fmap fst . checkClosed $
rawList (RConstant StrType) (map (RConstant . Str) ns)
| n == tacN "Prim__Env"
= do ~[] <- tacTmArgs 0 tac args
env <- get_env
fmap fst . checkClosed $ reflectEnv env
| n == tacN "Prim__Fail"
= do ~[_a, errs] <- tacTmArgs 2 tac args
errs' <- eval errs
parts <- reifyReportParts errs'
lift . tfail $ ReflectionError [parts] (Msg "")
| n == tacN "Prim__PureElab"
= do ~[_a, tm] <- tacTmArgs 2 tac args
return tm
| n == tacN "Prim__BindElab"
= do ~[_a, _b, first, andThen] <- tacTmArgs 4 tac args
first' <- eval first
res <- eval =<< runTacTm first'
next <- eval (App Complete andThen res)
runTacTm next
| n == tacN "Prim__Try"
= do ~[_a, first, alt] <- tacTmArgs 3 tac args
first' <- eval first
alt' <- eval alt
try' (runTacTm first') (runTacTm alt') True
| n == tacN "Prim__Fill"
= do ~[raw] <- tacTmArgs 1 tac args
raw' <- reifyRaw =<< eval raw
apply raw' []
returnUnit
| n == tacN "Prim__Apply" || n == tacN "Prim__MatchApply"
= do ~[raw, argSpec] <- tacTmArgs 2 tac args
raw' <- reifyRaw =<< eval raw
argSpec' <- map (\b -> (b, 0)) <$> reifyList reifyBool argSpec
let op = if n == tacN "Prim__Apply"
then apply
else match_apply
ns <- op raw' argSpec'
fmap fst . checkClosed $
rawList (rawPairTy (Var $ reflm "TTName") (Var $ reflm "TTName"))
[ rawPair (Var $ reflm "TTName", Var $ reflm "TTName")
(reflectName n1, reflectName n2)
| (n1, n2) <- ns
]
| n == tacN "Prim__Gensym"
= do ~[hint] <- tacTmArgs 1 tac args
hintStr <- eval hint
case hintStr of
Constant (Str h) -> do
n <- getNameFrom (sMN 0 h)
fmap fst $ get_type_val (reflectName n)
_ -> fail "no hint"
| n == tacN "Prim__Claim"
= do ~[n, ty] <- tacTmArgs 2 tac args
n' <- reifyTTName n
ty' <- reifyRaw ty
claim n' ty'
returnUnit
| n == tacN "Prim__Check"
= do ~[env', raw] <- tacTmArgs 2 tac args
env <- reifyEnv env'
raw' <- reifyRaw =<< eval raw
ctxt <- get_context
(tm, ty) <- lift $ check ctxt env raw'
fmap fst . checkClosed $
rawPair (Var (reflm "TT"), Var (reflm "TT"))
(reflect tm, reflect ty)
| n == tacN "Prim__Attack"
= do ~[] <- tacTmArgs 0 tac args
attack
returnUnit
| n == tacN "Prim__Rewrite"
= do ~[rule] <- tacTmArgs 1 tac args
r <- reifyRaw rule
rewrite r
returnUnit
| n == tacN "Prim__Focus"
= do ~[what] <- tacTmArgs 1 tac args
n' <- reifyTTName what
hs <- get_holes
if elem n' hs
then focus n' >> returnUnit
else lift . tfail . Msg $ "The name " ++ show n' ++ " does not denote a hole"
| n == tacN "Prim__Unfocus"
= do ~[what] <- tacTmArgs 1 tac args
n' <- reifyTTName what
movelast n'
returnUnit
| n == tacN "Prim__Intro"
= do ~[mn] <- tacTmArgs 1 tac args
n <- case fromTTMaybe mn of
Nothing -> return Nothing
Just name -> fmap Just $ reifyTTName name
intro n
returnUnit
| n == tacN "Prim__Forall"
= do ~[n, ty] <- tacTmArgs 2 tac args
n' <- reifyTTName n
ty' <- reifyRaw ty
forall n' Nothing ty'
returnUnit
| n == tacN "Prim__PatVar"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
patvar' n'
returnUnit
| n == tacN "Prim__PatBind"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
patbind n'
returnUnit
| n == tacN "Prim__LetBind"
= do ~[n, ty, tm] <- tacTmArgs 3 tac args
n' <- reifyTTName n
ty' <- reifyRaw ty
tm' <- reifyRaw tm
letbind n' ty' tm'
returnUnit
| n == tacN "Prim__Compute"
= do ~[] <- tacTmArgs 0 tac args; compute ; returnUnit
| n == tacN "Prim__Normalise"
= do ~[env, tm] <- tacTmArgs 2 tac args
env' <- reifyEnv env
tm' <- reifyTT tm
ctxt <- get_context
let out = normaliseAll ctxt env' (finalise tm')
fmap fst . checkClosed $ reflect out
| n == tacN "Prim__Whnf"
= do ~[tm] <- tacTmArgs 1 tac args
tm' <- reifyTT tm
ctxt <- get_context
fmap fst . checkClosed . reflect $ whnf ctxt [] tm'
| n == tacN "Prim__Converts"
= do ~[env, tm1, tm2] <- tacTmArgs 3 tac args
env' <- reifyEnv env
tm1' <- reifyTT tm1
tm2' <- reifyTT tm2
ctxt <- get_context
lift $ converts ctxt env' tm1' tm2'
returnUnit
| n == tacN "Prim__DeclareType"
= do ~[decl] <- tacTmArgs 1 tac args
(RDeclare n args res) <- reifyTyDecl decl
ctxt <- get_context
let rty = foldr mkPi res args
(checked, ty') <- lift $ check ctxt [] rty
mustBeType ctxt checked ty'
mustNotBeDefined ctxt n
let decl = TyDecl Ref checked
ctxt' = addCtxtDef n decl ctxt
set_context ctxt'
updateAux $ \e -> e { new_tyDecls = (RTyDeclInstrs n fc (map rFunArgToPArg args) checked) :
new_tyDecls e }
returnUnit
| n == tacN "Prim__DefineFunction"
= do ~[decl] <- tacTmArgs 1 tac args
defn <- reifyFunDefn decl
defineFunction defn
returnUnit
| n == tacN "Prim__DeclareDatatype"
= do ~[decl] <- tacTmArgs 1 tac args
RDeclare n args resTy <- reifyTyDecl decl
ctxt <- get_context
let tcTy = foldr mkPi resTy args
(checked, ty') <- lift $ check ctxt [] tcTy
mustBeType ctxt checked ty'
mustNotBeDefined ctxt n
let ctxt' = addTyDecl n (TCon 0 0) checked ctxt
set_context ctxt'
updateAux $ \e -> e { new_tyDecls = RDatatypeDeclInstrs n (map rFunArgToPArg args) : new_tyDecls e }
returnUnit
| n == tacN "Prim__DefineDatatype"
= do ~[defn] <- tacTmArgs 1 tac args
RDefineDatatype n ctors <- reifyRDataDefn defn
ctxt <- get_context
tyconTy <- case lookupTyExact n ctxt of
Just t -> return t
Nothing -> lift . tfail . Msg $ "Type not previously declared"
datatypes <- get_datatypes
case lookupCtxtName n datatypes of
[] -> return ()
_ -> lift . tfail . Msg $ show n ++ " is already defined as a datatype."
-- Prepare the constructors
ctors' <- mapM (prepareConstructor n) ctors
ttag <- do ES (ps, aux) str prev <- get
let i = global_nextname ps
put $ ES (ps { global_nextname = global_nextname ps + 1 },
aux)
str
prev
return i
let ctxt' = addDatatype (Data n ttag tyconTy False (map (\(cn, _, cty) -> (cn, cty)) ctors')) ctxt
set_context ctxt'
-- the rest happens in a bit
updateAux $ \e -> e { new_tyDecls = RDatatypeDefnInstrs n tyconTy ctors' : new_tyDecls e }
returnUnit
| n == tacN "Prim__AddImplementation"
= do ~[cls, impl] <- tacTmArgs 2 tac args
interfaceName <- reifyTTName cls
implName <- reifyTTName impl
updateAux $ \e -> e { new_tyDecls = RAddImplementation interfaceName implName :
new_tyDecls e }
returnUnit
| n == tacN "Prim__IsTCName"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
case lookupCtxtExact n' (idris_interfaces ist) of
Just _ -> fmap fst . checkClosed $ Var (sNS (sUN "True") ["Bool", "Prelude"])
Nothing -> fmap fst . checkClosed $ Var (sNS (sUN "False") ["Bool", "Prelude"])
| n == tacN "Prim__ResolveTC"
= do ~[fn] <- tacTmArgs 1 tac args
g <- goal
fn <- reifyTTName fn
resolveTC' False True 100 g fn ist
returnUnit
| n == tacN "Prim__Search"
= do ~[depth, hints] <- tacTmArgs 2 tac args
d <- eval depth
hints' <- eval hints
case (d, unList hints') of
(Constant (I i), Just hs) ->
do actualHints <- mapM reifyTTName hs
unifyProblems
let psElab = elab ist toplevel ERHS [] (sMN 0 "tac")
proofSearch True True False False i psElab Nothing (sMN 0 "search ") [] actualHints ist
returnUnit
(Constant (I _), Nothing ) ->
lift . tfail . InternalMsg $ "Not a list: " ++ show hints'
(_, _) -> lift . tfail . InternalMsg $ "Can't reify int " ++ show d
| n == tacN "Prim__RecursiveElab"
= do ~[goal, script] <- tacTmArgs 2 tac args
goal' <- reifyRaw goal
ctxt <- get_context
script <- eval script
(goalTT, goalTy) <- lift $ check ctxt [] goal'
lift $ isType ctxt [] goalTy
recH <- getNameFrom (sMN 0 "recElabHole")
aux <- getAux
datatypes <- get_datatypes
env <- get_env
g_next <- get_global_nextname
(ctxt', ES (p, aux') _ _) <-
do (ES (current_p, _) _ _) <- get
lift $ runElab aux
(do runElabAction info ist fc [] script ns
ctxt' <- get_context
return ctxt')
((newProof recH (constraintNS info) ctxt datatypes g_next goalTT)
{ nextname = nextname current_p })
set_context ctxt'
let tm_out = getProofTerm (pterm p)
do (ES (prf, _) s e) <- get
let p' = prf { nextname = nextname p
, global_nextname = global_nextname p
}
put (ES (p', aux') s e)
env' <- get_env
(tm, ty, _) <- lift $ recheck (constraintNS info) ctxt' env (forget tm_out) tm_out
let (tm', ty') = (reflect tm, reflect ty)
fmap fst . checkClosed $
rawPair (Var $ reflm "TT", Var $ reflm "TT")
(tm', ty')
| n == tacN "Prim__Metavar"
= do ~[n] <- tacTmArgs 1 tac args
n' <- reifyTTName n
ctxt <- get_context
ptm <- get_term
-- See documentation above in the elab case for PMetavar
let unique_used = getUniqueUsed ctxt ptm
let mvn = metavarName ns n'
attack
defer unique_used mvn
solve
returnUnit
| n == tacN "Prim__Fixity"
= do ~[op'] <- tacTmArgs 1 tac args
opTm <- eval op'
case opTm of
Constant (Str op) ->
let opChars = ":!#$%&*+./<=>?@\\^|-~"
invalidOperators = [":", "=>", "->", "<-", "=", "?=", "|", "**", "==>", "\\", "%", "~", "?", "!"]
fixities = idris_infixes ist
in if not (all (flip elem opChars) op) || elem op invalidOperators
then lift . tfail . Msg $ "'" ++ op ++ "' is not a valid operator name."
else case nub [f | Fix f someOp <- fixities, someOp == op] of
[] -> lift . tfail . Msg $ "No fixity found for operator '" ++ op ++ "'."
[f] -> fmap fst . checkClosed $ reflectFixity f
many -> lift . tfail . InternalMsg $ "Ambiguous fixity for '" ++ op ++ "'! Found " ++ show many
_ -> lift . tfail . Msg $ "Not a constant string for an operator name: " ++ show opTm
| n == tacN "Prim__Debug"
= do ~[ty, msg] <- tacTmArgs 2 tac args
msg' <- eval msg
parts <- reifyReportParts msg
debugElaborator parts
runTacTm x = elabScriptStuck x
-- Running tactics directly
-- if a tactic adds unification problems, return an error
runTac :: Bool -> IState -> Maybe FC -> Name -> PTactic -> ElabD ()
runTac autoSolve ist perhapsFC fn tac
= do env <- get_env
g <- goal
let tac' = fmap (addImplBound ist (map fst env)) tac
if autoSolve
then runT tac'
else no_errors (runT tac')
(Just (CantSolveGoal g (map (\(n, b) -> (n, binderTy b)) env)))
where
runT (Intro []) = do g <- goal
attack; intro (bname g)
where
bname (Bind n _ _) = Just n
bname _ = Nothing
runT (Intro xs) = mapM_ (\x -> do attack; intro (Just x)) xs
runT Intros = do g <- goal
attack;
intro (bname g)
try' (runT Intros)
(return ()) True
where
bname (Bind n _ _) = Just n
bname _ = Nothing
runT (Exact tm) = do elab ist toplevel ERHS [] (sMN 0 "tac") tm
when autoSolve solveAll
runT (MatchRefine fn)
= do fnimps <-
case lookupCtxtName fn (idris_implicits ist) of
[] -> do a <- envArgs fn
return [(fn, a)]
ns -> return (map (\ (n, a) -> (n, map (const True) a)) ns)
let tacs = map (\ (fn', imps) ->
(match_apply (Var fn') (map (\x -> (x, 0)) imps),
fn')) fnimps
tryAll tacs
when autoSolve solveAll
where envArgs n = do e <- get_env
case lookup n e of
Just t -> return $ map (const False)
(getArgTys (binderTy t))
_ -> return []
runT (Refine fn [])
= do fnimps <-
case lookupCtxtName fn (idris_implicits ist) of
[] -> do a <- envArgs fn
return [(fn, a)]
ns -> return (map (\ (n, a) -> (n, map isImp a)) ns)
let tacs = map (\ (fn', imps) ->
(apply (Var fn') (map (\x -> (x, 0)) imps),
fn')) fnimps
tryAll tacs
when autoSolve solveAll
where isImp (PImp _ _ _ _ _) = True
isImp _ = False
envArgs n = do e <- get_env
case lookup n e of
Just t -> return $ map (const False)
(getArgTys (binderTy t))
_ -> return []
runT (Refine fn imps) = do ns <- apply (Var fn) (map (\x -> (x,0)) imps)
when autoSolve solveAll
runT DoUnify = do unify_all
when autoSolve solveAll
runT (Claim n tm) = do tmHole <- getNameFrom (sMN 0 "newGoal")
claim tmHole RType
claim n (Var tmHole)
focus tmHole
elab ist toplevel ERHS [] (sMN 0 "tac") tm
focus n
runT (Equiv tm) -- let bind tm, then
= do attack
tyn <- getNameFrom (sMN 0 "ety")
claim tyn RType
valn <- getNameFrom (sMN 0 "eqval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "equiv_val")
letbind letn (Var tyn) (Var valn)
focus tyn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
focus valn
when autoSolve solveAll
runT (Rewrite tm) -- to elaborate tm, let bind it, then rewrite by that
= do attack; -- (h:_) <- get_holes
tyn <- getNameFrom (sMN 0 "rty")
-- start_unify h
claim tyn RType
valn <- getNameFrom (sMN 0 "rval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "rewrite_rule")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
rewrite (Var letn)
when autoSolve solveAll
runT (Induction tm) -- let bind tm, similar to the others
= case_ True autoSolve ist fn tm
runT (CaseTac tm)
= case_ False autoSolve ist fn tm
runT (LetTac n tm)
= do attack
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- unique_hole n
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
when autoSolve solveAll
runT (LetTacTy n ty tm)
= do attack
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- unique_hole n
letbind letn (Var tyn) (Var valn)
focus tyn
elab ist toplevel ERHS [] (sMN 0 "tac") ty
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") tm
when autoSolve solveAll
runT Compute = compute
runT Trivial = do trivial' ist; when autoSolve solveAll
runT TCImplementation = runT (Exact (PResolveTC emptyFC))
runT (ProofSearch rec prover depth top psns hints)
= do proofSearch' ist rec False depth prover top fn psns hints
when autoSolve solveAll
runT (Focus n) = focus n
runT Unfocus = do hs <- get_holes
case hs of
[] -> return ()
(h : _) -> movelast h
runT Solve = solve
runT (Try l r) = do try' (runT l) (runT r) True
runT (TSeq l r) = do runT l; runT r
runT (ApplyTactic tm) = do tenv <- get_env -- store the environment
tgoal <- goal -- store the goal
attack -- let f : List (TTName, Binder TT) -> TT -> Tactic = tm in ...
script <- getNameFrom (sMN 0 "script")
claim script scriptTy
scriptvar <- getNameFrom (sMN 0 "scriptvar" )
letbind scriptvar scriptTy (Var script)
focus script
elab ist toplevel ERHS [] (sMN 0 "tac") tm
(script', _) <- get_type_val (Var scriptvar)
-- now that we have the script apply
-- it to the reflected goal and context
restac <- getNameFrom (sMN 0 "restac")
claim restac tacticTy
focus restac
fill (raw_apply (forget script')
[reflectEnv tenv, reflect tgoal])
restac' <- get_guess
solve
-- normalise the result in order to
-- reify it
ctxt <- get_context
env <- get_env
let tactic = normalise ctxt env restac'
runReflected tactic
where tacticTy = Var (reflm "Tactic")
listTy = Var (sNS (sUN "List") ["List", "Prelude"])
scriptTy = (RBind (sMN 0 "__pi_arg")
(Pi Nothing (RApp listTy envTupleType) RType)
(RBind (sMN 1 "__pi_arg")
(Pi Nothing (Var $ reflm "TT") RType) tacticTy))
runT (ByReflection tm) -- run the reflection function 'tm' on the
-- goal, then apply the resulting reflected Tactic
= do tgoal <- goal
attack
script <- getNameFrom (sMN 0 "script")
claim script scriptTy
scriptvar <- getNameFrom (sMN 0 "scriptvar" )
letbind scriptvar scriptTy (Var script)
focus script
ptm <- get_term
elab ist toplevel ERHS [] (sMN 0 "tac")
(PApp emptyFC tm [pexp (delabTy' ist [] tgoal True True True)])
(script', _) <- get_type_val (Var scriptvar)
-- now that we have the script apply
-- it to the reflected goal
restac <- getNameFrom (sMN 0 "restac")
claim restac tacticTy
focus restac
fill (forget script')
restac' <- get_guess
solve
-- normalise the result in order to
-- reify it
ctxt <- get_context
env <- get_env
let tactic = normalise ctxt env restac'
runReflected tactic
where tacticTy = Var (reflm "Tactic")
scriptTy = tacticTy
runT (Reflect v) = do attack -- let x = reflect v in ...
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "letvar")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") v
(value, _) <- get_type_val (Var letn)
ctxt <- get_context
env <- get_env
let value' = normalise ctxt env value
runTac autoSolve ist perhapsFC fn (Exact $ PQuote (reflect value'))
runT (Fill v) = do attack -- let x = fill x in ...
tyn <- getNameFrom (sMN 0 "letty")
claim tyn RType
valn <- getNameFrom (sMN 0 "letval")
claim valn (Var tyn)
letn <- getNameFrom (sMN 0 "letvar")
letbind letn (Var tyn) (Var valn)
focus valn
elab ist toplevel ERHS [] (sMN 0 "tac") v
(value, _) <- get_type_val (Var letn)
ctxt <- get_context
env <- get_env
let value' = normalise ctxt env value
rawValue <- reifyRaw value'
runTac autoSolve ist perhapsFC fn (Exact $ PQuote rawValue)
runT (GoalType n tac) = do g <- goal
case unApply g of
(P _ n' _, _) ->
if nsroot n' == sUN n
then runT tac
else fail "Wrong goal type"
_ -> fail "Wrong goal type"
runT ProofState = do g <- goal
return ()
runT Skip = return ()
runT (TFail err) = lift . tfail $ ReflectionError [err] (Msg "")
runT SourceFC =
case perhapsFC of
Nothing -> lift . tfail $ Msg "There is no source location available."
Just fc ->
do fill $ reflectFC fc
solve
runT Qed = lift . tfail $ Msg "The qed command is only valid in the interactive prover"
runT x = fail $ "Not implemented " ++ show x
runReflected t = do t' <- reify ist t
runTac autoSolve ist perhapsFC fn t'
elaboratingArgErr :: [(Name, Name)] -> Err -> Err
elaboratingArgErr [] err = err
elaboratingArgErr ((f,x):during) err = fromMaybe err (rewrite err)
where rewrite (ElaboratingArg _ _ _ _) = Nothing
rewrite (ProofSearchFail e) = fmap ProofSearchFail (rewrite e)
rewrite (At fc e) = fmap (At fc) (rewrite e)
rewrite err = Just (ElaboratingArg f x during err)
withErrorReflection :: Idris a -> Idris a
withErrorReflection x = idrisCatch x (\ e -> handle e >>= ierror)
where handle :: Err -> Idris Err
handle e@(ReflectionError _ _) = do logElab 3 "Skipping reflection of error reflection result"
return e -- Don't do meta-reflection of errors
handle e@(ReflectionFailed _ _) = do logElab 3 "Skipping reflection of reflection failure"
return e
-- At and Elaborating are just plumbing - error reflection shouldn't rewrite them
handle e@(At fc err) = do logElab 3 "Reflecting body of At"
err' <- handle err
return (At fc err')
handle e@(Elaborating what n ty err) = do logElab 3 "Reflecting body of Elaborating"
err' <- handle err
return (Elaborating what n ty err')
handle e@(ElaboratingArg f a prev err) = do logElab 3 "Reflecting body of ElaboratingArg"
hs <- getFnHandlers f a
err' <- if null hs
then handle err
else applyHandlers err hs
return (ElaboratingArg f a prev err')
-- ProofSearchFail is an internal detail - so don't expose it
handle (ProofSearchFail e) = handle e
-- TODO: argument-specific error handlers go here for ElaboratingArg
handle e = do ist <- getIState
logElab 2 "Starting error reflection"
logElab 5 (show e)
let handlers = idris_errorhandlers ist
applyHandlers e handlers
getFnHandlers :: Name -> Name -> Idris [Name]
getFnHandlers f arg = do ist <- getIState
let funHandlers = maybe M.empty id .
lookupCtxtExact f .
idris_function_errorhandlers $ ist
return . maybe [] S.toList . M.lookup arg $ funHandlers
applyHandlers e handlers =
do ist <- getIState
let err = fmap (errReverse ist) e
logElab 3 $ "Using reflection handlers " ++
concat (intersperse ", " (map show handlers))
let reports = map (\n -> RApp (Var n) (reflectErr err)) handlers
-- Typecheck error handlers - if this fails, then something else was wrong earlier!
handlers <- case mapM (check (tt_ctxt ist) []) reports of
Error e -> ierror $ ReflectionFailed "Type error while constructing reflected error" e
OK hs -> return hs
-- Normalize error handler terms to produce the new messages
-- Need to use 'normaliseAll' since we have to reduce private
-- names in error handlers too
ctxt <- getContext
let results = map (normaliseAll ctxt []) (map fst handlers)
logElab 3 $ "New error message info: " ++ concat (intersperse " and " (map show results))
-- For each handler term output, either discard it if it is Nothing or reify it the Haskell equivalent
let errorpartsTT = mapMaybe unList (mapMaybe fromTTMaybe results)
errorparts <- case mapM (mapM reifyReportPart) errorpartsTT of
Left err -> ierror err
Right ok -> return ok
return $ case errorparts of
[] -> e
parts -> ReflectionError errorparts e
solveAll = try (do solve; solveAll) (return ())
-- | Do the left-over work after creating declarations in reflected
-- elaborator scripts
processTacticDecls :: ElabInfo -> [RDeclInstructions] -> Idris ()
processTacticDecls info steps =
-- The order of steps is important: type declarations might
-- establish metavars that later function bodies resolve.
forM_ (reverse steps) $ \case
RTyDeclInstrs n fc impls ty ->
do logElab 3 $ "Declaration from tactics: " ++ show n ++ " : " ++ show ty
logElab 3 $ " It has impls " ++ show impls
updateIState $ \i -> i { idris_implicits =
addDef n impls (idris_implicits i) }
addIBC (IBCImp n)
ds <- checkDef info fc (\_ e -> e) True [(n, (-1, Nothing, ty, []))]
addIBC (IBCDef n)
ctxt <- getContext
case lookupDef n ctxt of
(TyDecl _ _ : _) ->
-- If the function isn't defined at the end of the elab script,
-- then it must be added as a metavariable. This needs guarding
-- to prevent overwriting case defs with a metavar, if the case
-- defs come after the type decl in the same script!
let ds' = map (\(n, (i, top, t, ns)) -> (n, (i, top, t, ns, True, True))) ds
in addDeferred ds'
_ -> return ()
RDatatypeDeclInstrs n impls ->
do addIBC (IBCDef n)
updateIState $ \i -> i { idris_implicits = addDef n impls (idris_implicits i) }
addIBC (IBCImp n)
RDatatypeDefnInstrs tyn tyconTy ctors ->
do let cn (n, _, _) = n
cimpls (_, impls, _) = impls
cty (_, _, t) = t
addIBC (IBCDef tyn)
mapM_ (addIBC . IBCDef . cn) ctors
ctxt <- getContext
let params = findParams tyn (normalise ctxt [] tyconTy) (map cty ctors)
let typeInfo = TI (map cn ctors) False [] params []
-- implicit precondition to IBCData is that idris_datatypes on the IState is populated.
-- otherwise writing the IBC just fails silently!
updateIState $ \i -> i { idris_datatypes =
addDef tyn typeInfo (idris_datatypes i) }
addIBC (IBCData tyn)
ttag <- getName -- from AbsSyntax.hs, really returns a disambiguating Int
let metainf = DataMI params
addIBC (IBCMetaInformation tyn metainf)
updateContext (setMetaInformation tyn metainf)
for_ ctors $ \(cn, impls, _) ->
do updateIState $ \i -> i { idris_implicits = addDef cn impls (idris_implicits i) }
addIBC (IBCImp cn)
for_ ctors $ \(ctorN, _, _) ->
do totcheck (NoFC, ctorN)
ctxt <- tt_ctxt <$> getIState
case lookupTyExact ctorN ctxt of
Just cty -> do checkPositive (tyn : map cn ctors) (ctorN, cty)
return ()
Nothing -> return ()
case ctors of
[ctor] -> do setDetaggable (cn ctor); setDetaggable tyn
addIBC (IBCOpt (cn ctor)); addIBC (IBCOpt tyn)
_ -> return ()
-- TODO: inaccessible
RAddImplementation interfaceName implName ->
do -- The interface resolution machinery relies on a special
logElab 2 $ "Adding elab script implementation " ++ show implName ++
" for " ++ show interfaceName
addImplementation False True interfaceName implName
addIBC (IBCImplementation False True interfaceName implName)
RClausesInstrs n cs ->
do logElab 3 $ "Pattern-matching definition from tactics: " ++ show n
solveDeferred emptyFC n
let lhss = map (\(ns, lhs, _) -> (map fst ns, lhs)) cs
let fc = fileFC "elab_reflected"
pmissing <-
do ist <- getIState
possible <- genClauses fc n lhss
(map (\ (ns, lhs) ->
delab' ist lhs True True) lhss)
missing <- filterM (checkPossible n) possible
let undef = filter (noMatch ist (map snd lhss)) missing
return undef
let tot = if null pmissing
then Unchecked -- still need to check recursive calls
else Partial NotCovering -- missing cases implies not total
setTotality n tot
updateIState $ \i -> i { idris_patdefs =
addDef n (cs, pmissing) $ idris_patdefs i }
addIBC (IBCDef n)
ctxt <- getContext
case lookupDefExact n ctxt of
Just (CaseOp _ _ _ _ _ cd) ->
-- Here, we populate the call graph with a list of things
-- we refer to, so that if they aren't total, the whole
-- thing won't be.
let (scargs, sc) = cases_compiletime cd
calls = map fst $ findCalls sc scargs
in do logElab 2 $ "Called names in reflected elab: " ++ show calls
addCalls n calls
addIBC $ IBCCG n
Just _ -> return () -- TODO throw internal error
Nothing -> return ()
-- checkDeclTotality requires that the call graph be present
-- before calling it.
-- TODO: reduce code duplication with Idris.Elab.Clause
buildSCG (fc, n)
-- Actually run the totality checker. In the main clause
-- elaborator, this is deferred until after. Here, we run it
-- now to get totality information as early as possible.
tot' <- checkDeclTotality (fc, n)
setTotality n tot'
when (tot' /= Unchecked) $ addIBC (IBCTotal n tot')
where
-- TODO: see if the code duplication with Idris.Elab.Clause can be
-- reduced or eliminated.
-- These are always cases generated by genClauses
checkPossible :: Name -> PTerm -> Idris Bool
checkPossible fname lhs_in =
do ctxt <- getContext
ist <- getIState
let lhs = addImplPat ist lhs_in
let fc = fileFC "elab_reflected_totality"
case elaborate (constraintNS info) ctxt (idris_datatypes ist) (idris_name ist) (sMN 0 "refPatLHS") infP initEState
(erun fc (buildTC ist info EImpossible [] fname (allNamesIn lhs_in)
(infTerm lhs))) of
OK (ElabResult lhs' _ _ _ _ _ name', _) ->
do -- not recursively calling here, because we don't
-- want to run infinitely many times
let lhs_tm = orderPats (getInferTerm lhs')
updateIState $ \i -> i { idris_name = name' }
case recheck (constraintNS info) ctxt [] (forget lhs_tm) lhs_tm of
OK _ -> return True
err -> return False
-- if it's a recoverable error, the case may become possible
Error err -> return (recoverableCoverage ctxt err)
-- TODO: Attempt to reduce/eliminate code duplication with Idris.Elab.Clause
noMatch i cs tm = all (\x -> case matchClause i (delab' i x True True) tm of
Right _ -> False
Left _ -> True) cs
| eklavya/Idris-dev | src/Idris/Elab/Term.hs | bsd-3-clause | 134,623 | 1,311 | 29 | 57,999 | 26,806 | 15,469 | 11,337 | 2,378 | 240 |
module Baum.Traverse where
-- $Id$
import Baum.Type
import Baum.Order
import Autolib.ToDoc
traverse :: Order -> Term a c -> [ c ]
traverse o = case o of
Pre -> preorder
In -> inorder
Post -> postorder
Level -> levelorder
announce :: Order -> Doc -> Doc
announce o d = hsep [ toDoc o <> text "order", parens d , equals ]
preorder :: Term a c -> [c]
preorder (Node f args) = [ f ] ++ concat ( map preorder args )
postorder :: Term a c -> [c]
postorder (Node f args) = concat ( map postorder args ) ++ [ f ]
-- | only for binary trees
inorder :: Term a c -> [c]
inorder (Node f []) = [ f ]
inorder (Node f [l,r]) = inorder l ++ [ f ] ++ inorder r
levelorder :: Term a c -> [c]
levelorder t =
let ts = t : concat ( map children ts )
in take (size t) -- to avoid the black hole at the end
$ map top ts
| Erdwolf/autotool-bonn | src/Baum/Traverse.hs | gpl-2.0 | 844 | 2 | 12 | 227 | 379 | 195 | 184 | 24 | 4 |
{-# LANGUAGE TupleSections #-}
-- | Test examples from RFC 6902 sections A.1 to A.16.
module Main where
import Control.Applicative
import Control.Exception
import Control.Monad
import Data.Aeson
import Data.Aeson.Diff
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Char
import Data.Either
import Data.Functor
import Data.List (isInfixOf, nub)
import Data.Maybe
import Data.Monoid
import System.Directory
import System.Environment
import System.Exit
import System.FilePath
import System.FilePath.Glob
roots :: [FilePath]
roots = ["test/data/rfc6902", "test/data/cases"]
globPattern :: FilePath
globPattern = "*.*"
derp :: String -> a
derp msg = throw (AssertionFailed $ " " <> msg)
readDocument :: FilePath -> FilePath -> IO Value
readDocument root name = do
let file = root </> name <> "-original.json"
doc <- eitherDecodeStrict <$> BS.readFile file
return $ either (\e -> derp $ "Could not decode document: " <> e) id doc
readPatch :: FilePath -> FilePath -> IO (Either String Patch)
readPatch root name = do
let file = root </> name <> "-patch.json"
eitherDecodeStrict <$> BS.readFile file
readResult :: FilePath -> FilePath -> IO (Either String Value)
readResult root name = do
let err_path = root </> name <> "-error.txt"
let doc_path = root </> name <> "-result.json"
err <- catch (Just . BC.unpack . BC.dropWhile isSpace . fst . BC.spanEnd isSpace
<$> BS.readFile err_path) handle
doc <- catch (decodeStrict <$> BS.readFile doc_path) handle
case (err, doc) of
(Nothing, Just d) -> return (Right d)
(Just er, Nothing) -> return (Left er)
(Just er, Just d) -> derp "Expecting both error and success"
(Nothing, Nothing) -> derp "No result defined; add `*-error.txt' or `*-result.json'"
where
handle :: IOException -> IO (Maybe a)
handle e = return Nothing
readExample :: FilePath -> FilePath -> IO (Value, Either String Patch, Either String Value)
readExample root name =
(,,) <$> readDocument root name
<*> readPatch root name
<*> readResult root name
-- | Check example and, if it fails, return an error message.
runExample :: (Value, Either String Patch, Either String Value) -> Maybe String
runExample (doc, diff, res) =
case (diff, res) of
(Left perr, Left err)
| err `isInfixOf` perr -> success "Patch has expected error."
| perr `isInfixOf` err -> success "Patch has expected error."
| otherwise -> failure ("Unexpected error `" <> perr <> "' was not '" <> err <> "'.")
(Left err, Right _) ->
failure ("Couldn't load patch: " <> err)
(Right diff, Right res) ->
case patch diff doc of
Success dest
| dest == res -> success "Result matches target"
| otherwise -> failure ("Result document did not match: " <> BL.unpack (encode dest))
Error dest -> failure ("Couldn't apply patch " <> dest)
(Right diff, Left err) ->
case patch diff doc of
Success _ -> Just "Test Fails - Expected a failure but patch succeeded."
Error msg
| msg /= err -> Just $ "Test Fails - Got: " <> msg <> "\nExpected: " <> err
| otherwise -> Nothing
where
success n = Nothing
failure n = Just ("Test Fails - " <> n)
testExample :: FilePath -> FilePath -> IO (Maybe String)
testExample root name = do
r <- try (runExample <$> readExample root name)
return $ either err id r
where
err :: AssertionFailed -> Maybe String
err e = Just ("Error: " <> show e)
runSuite :: FilePath -> IO [(FilePath, Maybe String)]
runSuite root = do
-- Gather directories in test/data
let p = simplify (compile globPattern)
examples <- nub . fmap (takeWhile (/= '-')) . filter (match p) <$> getDirectoryContents root
-- Test each of them
mapM (\nom -> (nom,) <$> testExample root nom) examples
main :: IO ()
main = do
args <- getArgs
results <- concat <$> mapM runSuite (if null args then roots else args)
mapM_ display results
-- Failure.
when (any (isJust . snd) results)
exitFailure
where
display :: (FilePath, Maybe String) -> IO ()
display (name, Nothing) =
putStrLn $ "SUCCESS: " <> name
display (name, Just err) =
putStrLn $ "FAILURE: " <> name <> ": " <> err
| thsutton/aeson-diff | test/examples.hs | bsd-2-clause | 4,694 | 0 | 17 | 1,340 | 1,453 | 737 | 716 | 101 | 6 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE DeriveAnyClass #-}
-- | GZip compression
module Haskus.Format.Compression.GZip
( Member(..)
, Flag(..)
, Flags
, decompressGet
, decompress
)
where
import Data.Foldable (toList)
import qualified Haskus.Format.Compression.Algorithms.Deflate as D
import Haskus.Format.Binary.Get as Get
import Haskus.Format.Binary.Bits.Order
import Haskus.Format.Binary.Buffer
import Haskus.Format.Binary.Word
import Haskus.Format.Binary.BitSet (BitSet,CBitSet)
import qualified Haskus.Format.Binary.BitSet as BitSet
import qualified Haskus.Format.Text as Text
import Haskus.Format.Text (Text,getTextUtf8Nul,unpack)
import Haskus.Format.Text (textFormat,hex,(%))
import Haskus.Utils.Flow (when)
-- | Member file
data Member = Member
{ memberFlags :: Flags
, memberTime :: Word32
, memberExtraFlags :: Word8
, memberOS :: Word8
, memberName :: Text
, memberComment :: Text
, memberContent :: Buffer
, memberCRC :: Word16
, memberCRC32 :: Word32
, memberSize :: Word32 -- ^ uncompressed input size (module 1^32)
}
deriving (Show)
-- | Decompress the members of the archive
decompress :: Buffer -> [Member]
decompress = runGetOrFail decompressGet
-- | Decompress the members of the archive
decompressGet :: Get [Member]
decompressGet = rec []
where
rec xs = Get.isEmpty >>= \case
True -> return (reverse xs)
False -> do
x <- getMember
rec (x:xs)
-- | Get a member of the archive
getMember :: Get Member
getMember = do
id1 <- getWord8
id2 <- getWord8
when (id1 /= 0x1f || id2 /= 0x8b) $
error $ unpack $ textFormat ("Invalid archive file: " % hex % " " % hex) id1 id2
comp <- getWord8
when (comp /= 8) $
error "Unknown compression method"
flags <- BitSet.fromBits <$> getWord8
mtime <- getWord32le -- modification time
xfl <- getWord8 -- extra flags
os <- getWord8 -- os
when (BitSet.member flags FlagExtra) $ do
xlen <- getWord16le
skip (fromIntegral xlen)
name <- if BitSet.member flags FlagName
then getTextUtf8Nul
else return Text.empty
comment <- if BitSet.member flags FlagComment
then getTextUtf8Nul
else return Text.empty
crc <- if BitSet.member flags FlagCRC
then getWord16le
else return 0
getBitGet BB D.decompress $ \content -> do
crc32 <- getWord32le
isize <- getWord32le
return $ Member flags mtime xfl os name comment
(bufferPackByteList (toList content)) crc crc32 isize
-- | Information flag
data Flag
= FlagText
| FlagCRC
| FlagExtra
| FlagName
| FlagComment
deriving (Show,Eq,Enum,CBitSet)
-- | Flags
type Flags = BitSet Word8 Flag
| hsyl20/ViperVM | haskus-system/src/lib/Haskus/Format/Compression/GZip.hs | bsd-3-clause | 2,860 | 0 | 16 | 751 | 738 | 413 | 325 | 79 | 4 |
module Language.Haskell.Liquid.UX.Server (getType) where
-- import Control.Monad ((<<))
import Language.Haskell.Liquid.Types (Output(..))
import qualified Language.Haskell.Liquid.UX.ACSS as A
import Text.PrettyPrint.HughesPJ hiding (Mode)
import Language.Fixpoint.Utils.Files
import System.Directory
import Data.Time.Clock (UTCTime)
import qualified Control.Exception as Ex
import Data.Aeson
import qualified Data.ByteString.Lazy as B
-- data Time = TimeTodo deriving (Eq, Ord, Show)
getType :: IO (Output Doc) -> FilePath -> Int -> Int -> IO String
getType k srcF line col = do
act <- action srcF
case act of
Reuse -> getTypeInfo line col <$> getAnnMap srcF
Rebuild -> getTypeInfo line col <$> (k >> getAnnMap srcF)
NoSource -> return "Missing Source"
--------------------------------------------------------------------------------
-- | How to Get Info
--------------------------------------------------------------------------------
data Action = Rebuild | Reuse | NoSource
action :: FilePath -> IO Action
action srcF = timeAction <$> modificationTime srcF <*> modificationTime jsonF
where
jsonF = extFileName Json srcF
timeAction :: Maybe UTCTime -> Maybe UTCTime -> Action
timeAction (Just srcT) (Just jsonT)
| srcT < jsonT = Reuse
timeAction (Just _) _ = Rebuild
timeAction Nothing _ = NoSource
modificationTime :: FilePath -> IO (Maybe UTCTime)
modificationTime f = (Just <$> getModificationTime f) `Ex.catch` handler
where
handler :: IOError -> IO (Maybe UTCTime)
handler = const (return Nothing)
--------------------------------------------------------------------------------
getTypeInfo :: Int -> Int -> Maybe A.AnnMap -> String
getTypeInfo _ _ Nothing = "ERROR: corrupt annotation info"
getTypeInfo l c (Just info) = error "TODO: getTypeInfo"
getAnnMap :: FilePath -> IO (Maybe A.AnnMap)
getAnnMap srcF = decode <$> B.readFile jsonF
where
jsonF = extFileName Json srcF
| abakst/liquidhaskell | src/Language/Haskell/Liquid/UX/Server.hs | bsd-3-clause | 2,030 | 0 | 13 | 395 | 538 | 287 | 251 | 36 | 3 |
module Test (testArm8, testArm11, testIntel7, testIntel8, testIntel9,
testTexasInstrument7, testTexasInstrument8) where
import System.FilePath
import Control.Monad
import Tuura.Encode
import Tuura.Code
import Tuura.Synthesis
import Tuura.TechnologyMapping
import Tuura.Graph
import Tuura.Library
testPath :: FilePath
testPath = "test"
techLibPath :: FilePath
techLibPath = (testPath </> "90nm.genlib")
testArm8 :: IO ()
testArm8 = do
putStrLn "========== ARM Cortex M0+ (8 Partial orders)"
runTests "arm_8" "arm_8"
testArm11 :: IO ()
testArm11 = do
putStrLn "========== ARM Cortex M0+ (11 Partial orders)"
runTests "arm_11" "arm_11"
testIntel7 :: IO ()
testIntel7 = do
putStrLn "========== Intel 8051 (7 Partial orders)"
runTests "Intel8051_7" "Intel8051_7"
testIntel8 :: IO ()
testIntel8 = do
putStrLn "========== Intel 8051 (8 Partial orders)"
runTests "Intel8051_8" "Intel8051_8"
testIntel9 :: IO ()
testIntel9 = do
putStrLn "========== Intel 8051 (9 Partial orders)"
runTests "Intel8051_9" "Intel8051_9"
testTexasInstrument7 :: IO ()
testTexasInstrument7 = do
putStrLn "========== Texas Instrument MSP 430 (7 Partial orders)"
runTests "TI_MSP_430_7" "TI_MSP_430_7"
testTexasInstrument8 :: IO ()
testTexasInstrument8 = do
putStrLn "========== Texas Instrument MSP 430 (8 Partial orders)"
runTests "TI_MSP_430_8" "TI_MSP_430_8"
runTests :: FilePath -> FilePath -> IO ()
runTests cpog codes = do
let codesPath = (testPath </> codes <.> "opcodes")
codesFile = loadCodes codesPath
graphsPath = (testPath </> cpog <.> "cpog")
codeConstraints <- parseCustomCode codesFile
loadTest graphsPath codesPath
testSingleLiteral graphsPath
testSequential graphsPath --("sequential_literal" <.> "v")
testRandom graphsPath codeConstraints --("random_literal" <.> "v")
testHeuristic graphsPath codeConstraints --("heuristic_literal" <.> "v")
unloadTest
loadTest :: FilePath -> FilePath -> IO ()
loadTest cpogFile codesFile = do
let graphs = loadGraph cpogFile
codes = loadCodes codesFile
result <- loadGraphsAndCodes graphs codes
let err = readError result
check err "Graphs and codes loaded" "Error loading graphs"
testSingleLiteral :: FilePath -> IO ()
testSingleLiteral graphsPath = do
result <- encodeGraphs SingleLiteral Nothing
let err = readError result
check err "Single literal encoding: OK" "Single literal encoding: ERROR"
codeSingleLiteral <- getCodes
assertSynthesisAndMapping graphsPath codeSingleLiteral
testSequential :: FilePath -> IO ()
testSequential graphsPath = do
result <- encodeGraphs Sequential Nothing
let err = readError result
check err "Sequential encoding: OK" "Sequential encoding: ERROR"
codesSequential <- getCodes
assertSynthesisAndMapping graphsPath codesSequential
testRandom :: FilePath -> [CodeWithUnknowns] -> IO ()
testRandom graphsPath codeConstraints = do
codesFinal <- encode Random (Just 10)
putStr "Random encoding: "
shouldMeet codesFinal codeConstraints
assertSynthesisAndMapping graphsPath codesFinal
testHeuristic :: FilePath -> [CodeWithUnknowns] -> IO ()
testHeuristic graphsPath codeConstraints = do
codesFinal <- encode Heuristic (Just 10)
putStr "Heuristic encoding: "
shouldMeet codesFinal codeConstraints
assertSynthesisAndMapping graphsPath codesFinal
unloadTest :: IO ()
unloadTest = do
result <- unloadGraphsAndCodes
let err = readError result
check err "Graphs and codes unloaded" "Error unloading graphs"
assertSynthesisAndMapping :: FilePath -> [CodeWithoutUnknowns] -> IO ()
assertSynthesisAndMapping graphs codes = do
let graphsFile = loadGraph graphs
libFile = loadLibrary techLibPath
formulae <- synthesiseControllerIO graphsFile codes
area <- estimateArea libFile formulae
let size = parseArea area
putStrLn ("\tArea of the controller: " ++ show size)
-- resultV <- writeVerilog libFile formulae verilogPath
-- let err = readError resultV
-- check err "\tVerilog file generation: OK" "Verilog file generation: ERROR"
check :: Int -> String -> String -> IO ()
check result msgOk msgError
| result == 0 = putStrLn msgOk
| otherwise = error $ msgError ++ " (code " ++ show result ++ ")"
shouldMeet :: [CodeWithoutUnknowns] -> [CodeWithUnknowns] -> IO ()
shouldMeet [] [] = putStrLn "Valid encoding"
shouldMeet xs [] = error $ "Extra codes found " ++ show xs
shouldMeet [] ys = error $ "Missing codes for " ++ show ys
shouldMeet (x:xs) (y:ys) = do
let result = validate y x
when (result /= Valid) . error $
show result ++ ": " ++ show y ++ " => " ++ show x
shouldMeet xs ys
| allegroCoder/scenco-1 | Test.hs | bsd-3-clause | 4,732 | 0 | 15 | 910 | 1,191 | 561 | 630 | 113 | 1 |
module Examples where
import UU.Parsing
import UU.Parsing.CharParser
a = pSym 'a'
b = pSym 'b'
c = pSym 'c'
test p inp = do result <- parseIO p inp
putStrLn (show result)
ta = test a "a"
tb = test b "a"
tc = test c "abc"
t3 = test (pToks "xyz" ) "xy"
t4 = test (pToks "xyz" ) "xz"
pChar = 'a' <..> 'z'
pIdent = pList pChar
if_as_ident = ((("This is the identifier: ") ++) <$> pIdent)
if_as_keyword = ((("This is the keyword: ") ++) <$> pToks "if")
t5 = test if_as_ident "if"
t6 = test if_as_keyword "if"
t7 = test (if_as_ident <* pCost 2 <|> if_as_keyword <* pCost 2) "if"
| guillep19/uulib | src/UU/Parsing/Examples.hs | bsd-3-clause | 604 | 0 | 10 | 145 | 244 | 127 | 117 | 20 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Ranged.Boundaries
-- Copyright : (c) Paul Johnson 2006
-- License : BSD-style
-- Maintainer : paul@cogito.org.uk
-- Stability : experimental
-- Portability : portable
--
-----------------------------------------------------------------------------
module Data.Ranged.Boundaries (
DiscreteOrdered (..),
enumAdjacent,
boundedAdjacent,
boundedBelow,
Boundary (..),
above,
(/>/)
) where
import Data.Ratio
import Data.Word
import Test.QuickCheck
infix 4 />/
{- |
Distinguish between dense and sparse ordered types. A dense type is
one in which any two values @v1 < v2@ have a third value @v3@ such that
@v1 < v3 < v2@.
In theory the floating types are dense, although in practice they can only have
finitely many values. This class treats them as dense.
Tuples up to 4 members are declared as instances. Larger tuples may be added
if necessary.
Most values of sparse types have an @adjacentBelow@, such that, for all x:
> case adjacentBelow x of
> Just x1 -> adjacent x1 x
> Nothing -> True
The exception is for bounded types when @x == lowerBound@. For dense types
@adjacentBelow@ always returns 'Nothing'.
This approach was suggested by Ben Rudiak-Gould on comp.lang.functional.
-}
class Ord a => DiscreteOrdered a where
-- | Two values @x@ and @y@ are adjacent if @x < y@ and there does not
-- exist a third value between them. Always @False@ for dense types.
adjacent :: a -> a -> Bool
-- | The value immediately below the argument, if it can be determined.
adjacentBelow :: a -> Maybe a
-- Implementation note: the precise rules about unbounded enumerated vs
-- bounded enumerated types are difficult to express using Haskell 98, so
-- the prelude types are listed individually here.
instance DiscreteOrdered Bool where
adjacent = boundedAdjacent
adjacentBelow = boundedBelow
instance DiscreteOrdered Ordering where
adjacent = boundedAdjacent
adjacentBelow = boundedBelow
instance DiscreteOrdered Char where
adjacent = boundedAdjacent
adjacentBelow = boundedBelow
instance DiscreteOrdered Int where
adjacent = boundedAdjacent
adjacentBelow = boundedBelow
instance DiscreteOrdered Integer where
adjacent = enumAdjacent
adjacentBelow = Just . pred
instance DiscreteOrdered Double where
adjacent _ _ = False
adjacentBelow = const Nothing
instance DiscreteOrdered Float where
adjacent _ _ = False
adjacentBelow = const Nothing
instance (Integral a) => DiscreteOrdered (Ratio a) where
adjacent _ _ = False
adjacentBelow = const Nothing
instance Ord a => DiscreteOrdered [a] where
adjacent _ _ = False
adjacentBelow = const Nothing
instance (Ord a, DiscreteOrdered b) => DiscreteOrdered (a, b)
where
adjacent (x1, x2) (y1, y2) = (x1 == y1) && adjacent x2 y2
adjacentBelow (x1, x2) = do -- Maybe monad
x2' <- adjacentBelow x2
return (x1, x2')
instance (Ord a, Ord b, DiscreteOrdered c) => DiscreteOrdered (a, b, c)
where
adjacent (x1, x2, x3) (y1, y2, y3) =
(x1 == y1) && (x2 == y2) && adjacent x3 y3
adjacentBelow (x1, x2, x3) = do -- Maybe monad
x3' <- adjacentBelow x3
return (x1, x2, x3')
instance (Ord a, Ord b, Ord c, DiscreteOrdered d) =>
DiscreteOrdered (a, b, c, d)
where
adjacent (x1, x2, x3, x4) (y1, y2, y3, y4) =
(x1 == y1) && (x2 == y2) && (x3 == y3) && adjacent x4 y4
adjacentBelow (x1, x2, x3, x4) = do -- Maybe monad
x4' <- adjacentBelow x4
return (x1, x2, x3, x4')
instance DiscreteOrdered Word8 where
adjacent x y = x + 1 == y
adjacentBelow 0 = Nothing
adjacentBelow x = Just (x-1)
-- | Check adjacency for sparse enumerated types (i.e. where there
-- is no value between @x@ and @succ x@).
enumAdjacent :: (Ord a, Enum a) => a -> a -> Bool
enumAdjacent x y = (succ x == y)
-- | Check adjacency, allowing for case where x = maxBound. Use as the
-- definition of "adjacent" for bounded enumerated types such as Int and Char.
boundedAdjacent :: (Ord a, Enum a) => a -> a -> Bool
boundedAdjacent x y = if x < y then succ x == y else False
-- | The usual implementation of 'adjacentBelow' for bounded enumerated types.
boundedBelow :: (Eq a, Enum a, Bounded a) => a -> Maybe a
boundedBelow x = if x == minBound then Nothing else Just $ pred x
{- |
A Boundary is a division of an ordered type into values above
and below the boundary. No value can sit on a boundary.
Known bug: for Bounded types
* @BoundaryAbove maxBound < BoundaryAboveAll@
* @BoundaryBelow minBound > BoundaryBelowAll@
This is incorrect because there are no possible values in
between the left and right sides of these inequalities.
-}
data Boundary a =
-- | The argument is the highest value below the boundary.
BoundaryAbove a |
-- | The argument is the lowest value above the boundary.
BoundaryBelow a |
-- | The boundary above all values.
BoundaryAboveAll |
-- | The boundary below all values.
BoundaryBelowAll
deriving (Show)
-- | True if the value is above the boundary, false otherwise.
above :: Ord v => Boundary v -> v -> Bool
above (BoundaryAbove b) v = v > b
above (BoundaryBelow b) v = v >= b
above BoundaryAboveAll _ = False
above BoundaryBelowAll _ = True
-- | Same as 'above', but with the arguments reversed for more intuitive infix
-- usage.
(/>/) :: Ord v => v -> Boundary v -> Bool
(/>/) = flip above
instance (DiscreteOrdered a) => Eq (Boundary a) where
b1 == b2 = compare b1 b2 == EQ
instance (DiscreteOrdered a) => Ord (Boundary a) where
-- Comparison alogrithm based on brute force and ignorance:
-- enumerate all combinations.
compare boundary1 boundary2 =
case boundary1 of
BoundaryAbove b1 ->
case boundary2 of
BoundaryAbove b2 -> compare b1 b2
BoundaryBelow b2 ->
if b1 < b2
then
if adjacent b1 b2 then EQ else LT
else GT
BoundaryAboveAll -> LT
BoundaryBelowAll -> GT
BoundaryBelow b1 ->
case boundary2 of
BoundaryAbove b2 ->
if b1 > b2
then
if adjacent b2 b1 then EQ else GT
else LT
BoundaryBelow b2 -> compare b1 b2
BoundaryAboveAll -> LT
BoundaryBelowAll -> GT
BoundaryAboveAll ->
case boundary2 of
BoundaryAboveAll -> EQ
_ -> GT
BoundaryBelowAll ->
case boundary2 of
BoundaryBelowAll -> EQ
_ -> LT
-- QuickCheck Generator
instance Arbitrary a => Arbitrary (Boundary a) where
arbitrary = frequency [
(1, return BoundaryAboveAll),
(1, return BoundaryBelowAll),
(18, do
v <- arbitrary
oneof [return $ BoundaryAbove v, return $ BoundaryBelow v]
)]
instance CoArbitrary a => CoArbitrary (Boundary a) where
coarbitrary BoundaryBelowAll = variant (0 :: Int)
coarbitrary BoundaryAboveAll = variant (1 :: Int)
coarbitrary (BoundaryBelow v) = variant (2 :: Int) . coarbitrary v
coarbitrary (BoundaryAbove v) = variant (3 :: Int) . coarbitrary v
| Teino1978-Corp/Teino1978-Corp-alex | src/Data/Ranged/Boundaries.hs | bsd-3-clause | 7,442 | 0 | 14 | 1,998 | 1,608 | 865 | 743 | 126 | 2 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Aws.ElasticTranscoder.Json.Types
( S3Object
, SNSTopic
, emptySNSTopic
, IAMRole
, JobId(..)
, PresetId(..)
, PageToken(..)
, JobSpec(..)
, JobSingle(..)
, JobList(..)
, JobSpecId(..)
, JSInput(..)
, JSOutput(..)
, JSOutputStatus(..)
, FrameRate(..)
, Resolution(..)
, AspectRatio(..)
, Container(..)
, Rotate(..)
, Status(..)
, status_t
, PipelineName(..)
, PipelineId(..)
, Pipeline(..)
, PipelineSingle(..)
, PipelineList(..)
, PipelineIdStatus(..)
, Notifications(..)
, PipelineStatus
, pipelineStatusActive
, pipelineStatusPaused
, PipelineIdAndStatus(..)
, PipelineIdAndNotifications(..)
, RoleTest(..)
, PresetName(..)
, Preset(..)
, PresetSingle(..)
, PresetList(..)
, PresetResponse(..)
, Audio(..)
, Video(..)
, Thumbnails(..)
, CodecOptions(..)
, AudioCodec(..)
, SampleRate(..)
, Channels(..)
, VideoCodec(..)
, ThumbnailFormat(..)
, Profile(..)
, Level(..)
, PresetType(..)
, BitRate(..)
, KeyFrameRate(..)
, FixedGOP(..)
, Interval(..)
, MessagesSuccess(..)
, AutoBool(..)
, TextOrNull(..)
, SUCCESS(..)
, EtsServiceError(..)
, bool_t
) where
import Control.Monad
import Control.Applicative
import Text.Printf
import Text.Regex
import Data.String
import qualified Data.Map as Map
import Data.Aeson
import Data.Aeson.Types
import qualified Data.Text as T
import qualified Test.QuickCheck as QC
import Safe
--
-- | Aws.S3 uses Text for object keys
--
type S3Object = T.Text
--
-- | SNS Topics represented by Text values
--
type SNSTopic = T.Text
emptySNSTopic :: SNSTopic
emptySNSTopic = ""
--
-- | IAM Role represented by Yext
--
type IAMRole = T.Text
--
-- | Job Identifiers
--
newtype JobId = JobId { _JobId :: T.Text }
deriving (Show,IsString,Eq)
instance FromJSON JobId where
parseJSON = withText "JobId" $ return . JobId
instance ToJSON JobId where
toJSON = String . _JobId
instance QC.Arbitrary JobId where
arbitrary = JobId . T.pack <$> QC.arbitrary
--
-- | Preset Identifiers
--
newtype PresetId = PresetId { _PresetId :: T.Text }
deriving (Show,IsString,Eq)
instance FromJSON PresetId where
parseJSON = withText "PresetId" $ return . PresetId
instance ToJSON PresetId where
toJSON = String . _PresetId
instance QC.Arbitrary PresetId where
arbitrary = PresetId . T.pack <$> QC.arbitrary
--
-- | Page Tokens
--
newtype PageToken = PageToken { _PageToken :: T.Text }
deriving (Show,IsString,Eq)
instance FromJSON PageToken where
parseJSON = withText "PageToken" $ return . PageToken
instance ToJSON PageToken where
toJSON = String . _PageToken
instance QC.Arbitrary PageToken where
arbitrary = PageToken . T.pack <$> QC.arbitrary
--
-- | Job Specifications
--
data JobSpec
= JobSpec
{ jsInput :: JSInput
, jsOutput :: JSOutput
, jsPipelineId :: PipelineId
}
deriving (Show,Eq)
instance FromJSON JobSpec where
parseJSON (Object v) =
JobSpec <$>
v .: "Input" <*>
v .: "Output" <*>
v .: "PipelineId"
parseJSON _ = mzero
instance ToJSON JobSpec where
toJSON js@(JobSpec _ _ _) =
object
[ "Input" .= jsInput js
, "Output" .= jsOutput js
, "PipelineId" .= jsPipelineId js
]
instance QC.Arbitrary JobSpec where
arbitrary = JobSpec <$> QC.arbitrary <*> QC.arbitrary <*> QC.arbitrary
--
-- | Job Single
--
newtype JobSingle
= JobSingle
{ jsJob :: JobSpecId
}
deriving (Show,Eq)
instance FromJSON JobSingle where
parseJSON (Object v) =
JobSingle <$>
v .: "Job"
parseJSON _ = mzero
instance ToJSON JobSingle where
toJSON js =
object
[ "Job" .= jsJob js
]
instance QC.Arbitrary JobSingle where
arbitrary = JobSingle <$> QC.arbitrary
--
-- | Job List
--
data JobList
= JobList
{ jlJobs :: [JobSpecId]
, jlNextPageToken :: TextOrNull
}
deriving (Show,Eq)
instance FromJSON JobList where
parseJSON (Object v) =
JobList <$>
v .: "Jobs" <*>
v .: "NextPageToken"
parseJSON _ = mzero
instance ToJSON JobList where
toJSON js@(JobList _ _) =
object
[ "Jobs" .= jlJobs js
, "NextPageToken" .= jlNextPageToken js
]
instance QC.Arbitrary JobList where
arbitrary = JobList <$> QC.arbitrary <*> QC.arbitrary
--
-- | Job Specifications with JobId & Status
--
data JobSpecId
= JobSpecId
{ jsiId :: JobId
, jsiInput :: JSInput
, jsiOutput :: JSOutputStatus
, jsiPipelineId :: PipelineId
}
deriving (Show,Eq)
instance FromJSON JobSpecId where
parseJSON (Object v) =
JobSpecId <$>
v .: "Id" <*>
v .: "Input" <*>
v .: "Output" <*>
v .: "PipelineId"
parseJSON _ = mzero
instance ToJSON JobSpecId where
toJSON jsi@(JobSpecId _ _ _ _) =
object
[ "Id" .= jsiId jsi
, "Input" .= jsiInput jsi
, "Output" .= jsiOutput jsi
, "PipelineId" .= jsiPipelineId jsi
]
instance QC.Arbitrary JobSpecId where
arbitrary =
JobSpecId
<$> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
--
-- | Job Input Parameters
--
data JSInput
= JSInput
{ jsiKey :: S3Object
, jsiFrameRate :: FrameRate
, jsiResolution :: Resolution
, jsiAspectRatio :: AspectRatio
, jsiInterlaced :: AutoBool
, jsiContainer :: Container
}
deriving (Show,Eq)
instance FromJSON JSInput where
parseJSON (Object v) =
JSInput <$>
v .: "Key" <*>
v .: "FrameRate" <*>
v .: "Resolution" <*>
v .: "AspectRatio" <*>
v .: "Interlaced" <*>
v .: "Container"
parseJSON _ = mzero
instance ToJSON JSInput where
toJSON ijs@(JSInput _ _ _ _ _ _) =
object
[ "Key" .= jsiKey ijs
, "FrameRate" .= jsiFrameRate ijs
, "Resolution" .= jsiResolution ijs
, "AspectRatio" .= jsiAspectRatio ijs
, "Interlaced" .= jsiInterlaced ijs
, "Container" .= jsiContainer ijs
]
instance QC.Arbitrary JSInput where
arbitrary =
JSInput
<$> (T.pack <$> QC.arbitrary)
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
--
-- | Job Output Parameters
--
data JSOutput
= JSOutput
{ jsoKey :: S3Object
, jsoThumbnailPattern :: T.Text
, jsoRotate :: Rotate
, jsoPresetId :: PresetId
}
deriving (Show,Eq)
instance FromJSON JSOutput where
parseJSON (Object v) =
JSOutput <$>
v .: "Key" <*>
v .: "ThumbnailPattern" <*>
v .: "Rotate" <*>
v .: "PresetId"
parseJSON _ = mzero
instance ToJSON JSOutput where
toJSON jso@(JSOutput _ _ _ _) =
object
[ "Key" .= jsoKey jso
, "ThumbnailPattern".= jsoThumbnailPattern jso
, "Rotate" .= jsoRotate jso
, "PresetId" .= jsoPresetId jso
]
instance QC.Arbitrary JSOutput where
arbitrary = JSOutput
<$> (T.pack <$> QC.arbitrary)
<*> (T.pack <$> QC.arbitrary)
<*> QC.arbitrary
<*> QC.arbitrary
--
-- | Job Output Parameters with Status
--
data JSOutputStatus
= JSOutputStatus
{ jsosKey :: S3Object
, jsosThumbnailPattern :: Maybe T.Text
, jsosRotate :: Rotate
, jsosPresetId :: PresetId
, jsosStatus :: Status
, jsosStatusDetail :: TextOrNull
}
deriving (Show,Eq)
instance FromJSON JSOutputStatus where
parseJSON (Object v) =
JSOutputStatus <$>
v .: "Key" <*>
v .: "ThumbnailPattern" <*>
v .: "Rotate" <*>
v .: "PresetId" <*>
v .: "Status" <*>
v .: "StatusDetail"
parseJSON _ = mzero
instance ToJSON JSOutputStatus where
toJSON jsos@(JSOutputStatus _ _ _ _ _ _) =
object
[ "Key" .= jsosKey jsos
, "ThumbnailPattern".= jsosThumbnailPattern jsos
, "Rotate" .= jsosRotate jsos
, "PresetId" .= jsosPresetId jsos
, "Status" .= jsosStatus jsos
, "StatusDetail" .= jsosStatusDetail jsos
]
instance QC.Arbitrary JSOutputStatus where
arbitrary = JSOutputStatus
<$> ( T.pack <$> QC.arbitrary)
<*> (fmap T.pack <$> QC.arbitrary)
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
--
-- | Input Frame Rate
--
data FrameRate
= FRauto
| FR10
| FR15
| FR23_97
| FR24
| FR25
| FR29_97
| FR30
| FR60
deriving (Show,Eq,Ord,Bounded,Enum)
framerate_t :: FrameRate -> T.Text
framerate_t fr =
case fr of
FRauto -> "auto"
FR10 -> "10"
FR15 -> "15"
FR23_97 -> "23.97"
FR24 -> "24"
FR25 -> "25"
FR29_97 -> "29.97"
FR30 -> "30"
FR60 -> "60"
framerate_m :: Map.Map T.Text FrameRate
framerate_m = text_map framerate_t
instance FromJSON FrameRate where
parseJSON = json_str_map_p framerate_m
instance ToJSON FrameRate where
toJSON = String . framerate_t
instance QC.Arbitrary FrameRate where
arbitrary = QC.elements [minBound..maxBound]
--
-- | Input Resolution
--
data Resolution
= Rauto
| Rpixels (Int,Int)
deriving (Show,Eq)
resolution_t :: Resolution -> T.Text
resolution_t fr =
case fr of
Rauto -> "auto"
Rpixels (w,h) -> T.pack $ printf "%dx%d" w h
instance FromJSON Resolution where
parseJSON = withText "Resolution" $ parse_res . T.unpack
instance ToJSON Resolution where
toJSON = String . resolution_t
instance QC.Arbitrary Resolution where
arbitrary = inj <$> poss nat_pair
where
inj Nothing = Rauto
inj (Just p) = Rpixels p
parse_res :: String -> Parser Resolution
parse_res "auto" = return Rauto
parse_res s = maybe err return $
do [ws,hs] <- matchRegex res_re s
w <- readMay ws
h <- readMay hs
return $ Rpixels (w,h)
where
err = typeMismatch "resolution" $ toJSON s
res_re :: Regex
res_re = mkRegex "([0-9]+)[xX]([0-9]+)"
--
-- | Input Aspect Ratio
--
data AspectRatio
= ARauto
| AR1_1
| AR4_3
| AR3_2
| AR16_9
deriving (Show,Eq,Ord,Bounded,Enum)
aspectratio_t :: AspectRatio -> T.Text
aspectratio_t fr =
case fr of
ARauto -> "auto"
AR1_1 -> "1:1"
AR4_3 -> "4:3"
AR3_2 -> "3:2"
AR16_9 -> "16:9"
aspectratio_m :: Map.Map T.Text AspectRatio
aspectratio_m = text_map aspectratio_t
instance FromJSON AspectRatio where
parseJSON = json_str_map_p aspectratio_m
instance ToJSON AspectRatio where
toJSON = String . aspectratio_t
instance QC.Arbitrary AspectRatio where
arbitrary = QC.elements [minBound..maxBound]
--
-- | Input Container Type
--
data Container
= Cauto
| C3gp
| Casf
| Cavi
| Cdivx
| Cflv
| Cmkv
| Cmov
| Cmp4
| Cmpeg
| Cmpeg_ps
| Cmpeg_ts
| Cmxf
| Cogg
| Cvob
| Cwav
| Cwebm
deriving (Show,Eq,Ord,Bounded,Enum)
container_t :: Container -> T.Text
container_t fr =
case fr of
Cauto -> "auto"
C3gp -> "3gp"
Casf -> "asf"
Cavi -> "avi"
Cdivx -> "divx"
Cflv -> "flv"
Cmkv -> "mkv"
Cmov -> "mov"
Cmp4 -> "mp4"
Cmpeg -> "mpeg"
Cmpeg_ps -> "mpeg-ps"
Cmpeg_ts -> "mpeg-ts"
Cmxf -> "mxf"
Cogg -> "ogg"
Cvob -> "vob"
Cwav -> "wav"
Cwebm -> "webm"
container_m :: Map.Map T.Text Container
container_m = text_map container_t
instance FromJSON Container where
parseJSON = json_str_map_p container_m
instance ToJSON Container where
toJSON = String . container_t
instance QC.Arbitrary Container where
arbitrary = QC.elements [minBound..maxBound]
--
-- | Output Rotation
--
data Rotate
= ROTauto
| ROT0
| ROT90
| ROT180
| ROT270
deriving (Show,Eq,Ord,Bounded,Enum)
rotate_t :: Rotate -> T.Text
rotate_t rot =
case rot of
ROTauto -> "auto"
ROT0 -> "0"
ROT90 -> "90"
ROT180 -> "180"
ROT270 -> "270"
rotate_m :: Map.Map T.Text Rotate
rotate_m = text_map rotate_t
instance FromJSON Rotate where
parseJSON = json_str_map_p rotate_m
instance ToJSON Rotate where
toJSON = String . rotate_t
instance QC.Arbitrary Rotate where
arbitrary = QC.elements [minBound..maxBound]
--
-- | Job Status
--
data Status
= STSSubmitted
| STSProgressing
| STSComplete
| STSCancelled
| STSError
deriving (Show,Eq,Ord,Bounded,Enum)
status_t :: Status -> T.Text
status_t sts =
case sts of
STSSubmitted -> "Submitted"
STSProgressing -> "Progressing"
STSComplete -> "Complete"
STSCancelled -> "Canceled"
STSError -> "Error"
status_m :: Map.Map T.Text Status
status_m = text_map status_t
instance FromJSON Status where
parseJSON = json_str_map_p status_m
instance ToJSON Status where
toJSON = String . status_t
instance QC.Arbitrary Status where
arbitrary = QC.elements [minBound..maxBound]
--
-- | PipelineName
--
newtype PipelineName = PipelineName { _PipelineName :: T.Text }
deriving (Show,IsString,Eq)
instance FromJSON PipelineName where
parseJSON = withText "PipelineName" $ return . PipelineName
instance ToJSON PipelineName where
toJSON = String . _PipelineName
instance QC.Arbitrary PipelineName where
arbitrary = PipelineName . T.pack <$> QC.arbitrary
--
-- | PipelineId
--
newtype PipelineId = PipelineId { _PipelineId :: T.Text }
deriving (Show,IsString,Eq)
instance FromJSON PipelineId where
parseJSON = withText "PipelineId" $ return . PipelineId
instance ToJSON PipelineId where
toJSON = String . _PipelineId
instance QC.Arbitrary PipelineId where
arbitrary = PipelineId . T.pack <$> QC.arbitrary
--
-- | Pipeline
--
data Pipeline
= Pipeline
{ plnName :: PipelineName
, plnInputBucket :: S3Object
, plnOutputBucket :: S3Object
, plnRole :: IAMRole
, plnNotifications :: Notifications
}
deriving (Show,Eq)
instance FromJSON Pipeline where
parseJSON (Object v) =
Pipeline <$>
v .: "Name" <*>
v .: "InputBucket" <*>
v .: "OutputBucket" <*>
v .: "Role" <*>
v .: "Notifications"
parseJSON _ = mzero
instance ToJSON Pipeline where
toJSON pln@(Pipeline _ _ _ _ _) =
object
[ "Name" .= plnName pln
, "InputBucket" .= plnInputBucket pln
, "OutputBucket" .= plnOutputBucket pln
, "Role" .= plnRole pln
, "Notifications" .= plnNotifications pln
]
instance QC.Arbitrary Pipeline where
arbitrary = Pipeline
<$> QC.arbitrary
<*> (T.pack <$> QC.arbitrary)
<*> (T.pack <$> QC.arbitrary)
<*> (T.pack <$> QC.arbitrary)
<*> QC.arbitrary
--
-- | Pipeline Single
--
newtype PipelineSingle
= PipelineSingle
{ psPipeline :: PipelineIdStatus
}
deriving (Show,Eq)
instance FromJSON PipelineSingle where
parseJSON (Object v) =
PipelineSingle <$>
v .: "Pipeline"
parseJSON _ = mzero
instance ToJSON PipelineSingle where
toJSON js =
object
[ "Pipeline" .= psPipeline js
]
instance QC.Arbitrary PipelineSingle where
arbitrary = PipelineSingle <$> QC.arbitrary
--
-- | Pipeline List
--
data PipelineList
= PipelineList
{ plPipelines :: [PipelineIdStatus]
}
deriving (Show,Eq)
instance FromJSON PipelineList where
parseJSON (Object v) =
PipelineList <$>
v .: "Pipelines"
parseJSON _ = mzero
instance ToJSON PipelineList where
toJSON ps@(PipelineList _) =
object
[ "Pipelines" .= plPipelines ps
]
instance QC.Arbitrary PipelineList where
arbitrary = PipelineList <$> QC.arbitrary
--
-- | PipelineIdStatus
--
data PipelineIdStatus
= PipelineIdStatus
{ pisName :: PipelineName
, pisInputBucket :: S3Object
, pisOutputBucket :: S3Object
, pisRole :: IAMRole
, pisNotifications :: Notifications
, pisId :: PipelineId
, pisStatus :: PipelineStatus
}
deriving (Show,Eq)
instance FromJSON PipelineIdStatus where
parseJSON (Object v) =
PipelineIdStatus <$>
v .: "Name" <*>
v .: "InputBucket" <*>
v .: "OutputBucket" <*>
v .: "Role" <*>
v .: "Notifications" <*>
v .: "Id" <*>
v .: "Status"
parseJSON _ = mzero
instance ToJSON PipelineIdStatus where
toJSON pis@(PipelineIdStatus _ _ _ _ _ _ _) =
object
[ "Name" .= pisName pis
, "InputBucket" .= pisInputBucket pis
, "OutputBucket" .= pisOutputBucket pis
, "Role" .= pisRole pis
, "Notifications" .= pisNotifications pis
, "Id" .= pisId pis
, "Status" .= pisStatus pis
]
instance QC.Arbitrary PipelineIdStatus where
arbitrary = PipelineIdStatus
<$> QC.arbitrary
<*> (T.pack <$> QC.arbitrary)
<*> (T.pack <$> QC.arbitrary)
<*> (T.pack <$> QC.arbitrary)
<*> QC.arbitrary
<*> QC.arbitrary
<*> (T.pack <$> QC.arbitrary)
--
-- | Notifications
--
data Notifications
= Notifications
{ ntfCompleted :: SNSTopic
, ntfError :: SNSTopic
, ntfProgressing :: SNSTopic
, ntfWarning :: SNSTopic
}
deriving (Show,Eq)
instance FromJSON Notifications where
parseJSON (Object v) =
Notifications <$>
v .: "Completed" <*>
v .: "Error" <*>
v .: "Progressing" <*>
v .: "Warning"
parseJSON _ = mzero
instance ToJSON Notifications where
toJSON ntf@(Notifications _ _ _ _) =
object
[ "Completed" .= ntfCompleted ntf
, "Error" .= ntfError ntf
, "Progressing" .= ntfProgressing ntf
, "Warning" .= ntfWarning ntf
]
instance QC.Arbitrary Notifications where
arbitrary = Notifications
<$> (T.pack <$> QC.arbitrary)
<*> (T.pack <$> QC.arbitrary)
<*> (T.pack <$> QC.arbitrary)
<*> (T.pack <$> QC.arbitrary)
--
-- | PipelineStatus
--
-- Documentation is disturbingly vague on the values this type can
-- take so we represent it with Text
type PipelineStatus = T.Text
pipelineStatusActive :: PipelineStatus
pipelineStatusActive = "active"
pipelineStatusPaused :: PipelineStatus
pipelineStatusPaused = "paused"
--
-- | PipelineIdAndStatus
--
data PipelineIdAndStatus
= PipelineIdAndStatus
{ pasId :: PipelineId
, pasStatus :: PipelineStatus
}
deriving (Show,Eq)
instance FromJSON PipelineIdAndStatus where
parseJSON (Object v) =
PipelineIdAndStatus <$>
v .: "Id" <*>
v .: "Status"
parseJSON _ = mzero
instance ToJSON PipelineIdAndStatus where
toJSON pas@(PipelineIdAndStatus _ _) =
object
[ "Id" .= pasId pas
, "Status" .= pasStatus pas
]
instance QC.Arbitrary PipelineIdAndStatus where
arbitrary = PipelineIdAndStatus
<$> QC.arbitrary
<*> (T.pack <$> QC.arbitrary)
--
-- | PipelineIdAndNotifications
--
data PipelineIdAndNotifications
= PipelineIdAndNotifications
{ panId :: PipelineId
, panNotifications :: Notifications
}
deriving (Show,Eq)
instance FromJSON PipelineIdAndNotifications where
parseJSON (Object v) =
PipelineIdAndNotifications <$>
v .: "Id" <*>
v .: "Notifications"
parseJSON _ = mzero
instance ToJSON PipelineIdAndNotifications where
toJSON pas@(PipelineIdAndNotifications _ _) =
object
[ "Id" .= panId pas
, "Notifications" .= panNotifications pas
]
instance QC.Arbitrary PipelineIdAndNotifications where
arbitrary = PipelineIdAndNotifications
<$> QC.arbitrary
<*> QC.arbitrary
--
-- | RoleTest
--
data RoleTest
= RoleTest
{ rtInputBucket :: S3Object
, rtOutputBucket :: S3Object
, rtRole :: IAMRole
, rtTopics :: [SNSTopic]
}
deriving (Show,Eq)
instance FromJSON RoleTest where
parseJSON (Object v) =
RoleTest <$>
v .: "InputBucket" <*>
v .: "OutputBucket" <*>
v .: "Role" <*>
v .: "Topics"
parseJSON _ = mzero
instance ToJSON RoleTest where
toJSON rt@(RoleTest _ _ _ _) =
object
[ "InputBucket" .= rtInputBucket rt
, "OutputBucket" .= rtOutputBucket rt
, "Role" .= rtRole rt
, "Topics" .= rtTopics rt
]
instance QC.Arbitrary RoleTest where
arbitrary = RoleTest
<$> ( T.pack <$> QC.arbitrary)
<*> ( T.pack <$> QC.arbitrary)
<*> ( T.pack <$> QC.arbitrary)
<*> (map T.pack <$> QC.arbitrary)
--
-- | PresetName
--
newtype PresetName = PresetName { _PresetName :: T.Text }
deriving (Show,IsString,Eq)
instance FromJSON PresetName where
parseJSON = withText "PresetName" $ return . PresetName
instance ToJSON PresetName where
toJSON = String . _PresetName
instance QC.Arbitrary PresetName where
arbitrary = PresetName . T.pack <$> QC.arbitrary
--
-- | Preset
--
data Preset
= Preset
{ prName :: PresetName
, prDescription :: T.Text
, prContainer :: Container
, prAudio :: Audio
, prVideo :: Video
, prThumbnails :: Thumbnails
}
deriving (Show,Eq)
instance FromJSON Preset where
parseJSON (Object v) =
Preset <$>
v .: "Name" <*>
v .: "Description" <*>
v .: "Container" <*>
v .: "Audio" <*>
v .: "Video" <*>
v .: "Thumbnails"
parseJSON _ = mzero
instance ToJSON Preset where
toJSON pr@(Preset _ _ _ _ _ _) =
object
[ "Name" .= prName pr
, "Description" .= prDescription pr
, "Container" .= prContainer pr
, "Audio" .= prAudio pr
, "Video" .= prVideo pr
, "Thumbnails" .= prThumbnails pr
]
instance QC.Arbitrary Preset where
arbitrary = Preset
<$> QC.arbitrary
<*> (T.pack <$> QC.arbitrary)
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
--
-- | PresetSingle
--
newtype PresetSingle
= PresetSingle
{ psPreset :: PresetResponse
}
deriving (Show,Eq)
instance FromJSON PresetSingle where
parseJSON (Object v) =
PresetSingle <$>
v .: "Preset"
parseJSON _ = mzero
instance ToJSON PresetSingle where
toJSON js =
object
[ "Preset" .= psPreset js
]
instance QC.Arbitrary PresetSingle where
arbitrary = PresetSingle <$> QC.arbitrary
--
-- | PresetList
--
data PresetList
= PresetList
{ plPresets :: [PresetResponse]
}
deriving (Show,Eq)
instance FromJSON PresetList where
parseJSON (Object v) =
PresetList <$>
v .: "Presets"
parseJSON _ = mzero
instance ToJSON PresetList where
toJSON ps@(PresetList _) =
object
[ "Presets" .= plPresets ps
]
instance QC.Arbitrary PresetList where
arbitrary = PresetList <$> QC.arbitrary
--
-- | PresetResponse
--
data PresetResponse
= PresetResponse
{ prrName :: PresetName
, prrDescription :: T.Text
, prrContainer :: Container
, prrAudio :: Audio
, prrVideo :: Video
, prrThumbnails :: Thumbnails
, prrId :: PresetId
, prrType :: PresetType
, prrWarning :: T.Text
}
deriving (Show,Eq)
instance FromJSON PresetResponse where
parseJSON (Object v) =
PresetResponse <$>
v .: "Name" <*>
v .: "Description" <*>
v .: "Container" <*>
v .: "Audio" <*>
v .: "Video" <*>
v .: "Thumbnails" <*>
v .: "Id" <*>
v .: "Type" <*>
v .: "Warning"
parseJSON _ = mzero
instance ToJSON PresetResponse where
toJSON prr@(PresetResponse _ _ _ _ _ _ _ _ _) =
object
[ "Name" .= prrName prr
, "Description" .= prrDescription prr
, "Container" .= prrContainer prr
, "Audio" .= prrAudio prr
, "Video" .= prrVideo prr
, "Thumbnails" .= prrThumbnails prr
, "Id" .= prrId prr
, "Type" .= prrType prr
, "Warning" .= prrWarning prr
]
instance QC.Arbitrary PresetResponse where
arbitrary = PresetResponse
<$> QC.arbitrary
<*> (T.pack <$> QC.arbitrary)
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> (T.pack <$> QC.arbitrary)
--
-- | Audio
--
data Audio
= Audio
{ auCodec :: AudioCodec
, auSampleRate :: SampleRate
, auBitRate :: BitRate
, auChannels :: Channels
}
deriving (Show,Eq)
instance FromJSON Audio where
parseJSON (Object v) =
Audio <$>
v .: "Codec" <*>
v .: "SampleRate" <*>
v .: "BitRate" <*>
v .: "Channels"
parseJSON _ = mzero
instance ToJSON Audio where
toJSON au@(Audio _ _ _ _) =
object
[ "Codec" .= auCodec au
, "SampleRate" .= auSampleRate au
, "BitRate" .= auBitRate au
, "Channels" .= auChannels au
]
instance QC.Arbitrary Audio where
arbitrary = Audio
<$> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
--
-- | Video
--
data Video
= Video
{ vdCodec :: VideoCodec
, vdCodecOptions :: CodecOptions
, vdKeyFrameRateDist :: KeyFrameRate
, vdFixedGOP :: FixedGOP
, vdBitRate :: BitRate
, vdFrameRate :: FrameRate
, vdResolution :: Resolution
, vdAspectRatio :: AspectRatio
}
deriving (Show,Eq)
instance FromJSON Video where
parseJSON (Object v) =
Video <$>
v .: "Codec" <*>
v .: "CodecOptions" <*>
v .: "KeyFrameRateDist" <*>
v .: "FixedGOP" <*>
v .: "BitRate" <*>
v .: "FrameRate" <*>
v .: "Resolution" <*>
v .: "AspectRatio"
parseJSON _ = mzero
instance ToJSON Video where
toJSON vd@(Video _ _ _ _ _ _ _ _) =
object
[ "Codec" .= vdCodec vd
, "CodecOptions" .= vdCodecOptions vd
, "KeyFrameRateDist" .= vdKeyFrameRateDist vd
, "FixedGOP" .= vdFixedGOP vd
, "BitRate" .= vdBitRate vd
, "FrameRate" .= vdFrameRate vd
, "Resolution" .= vdResolution vd
, "AspectRatio" .= vdAspectRatio vd
]
instance QC.Arbitrary Video where
arbitrary = Video
<$> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
--
-- | Thumbnails
--
data Thumbnails
= Thumbnails
{ thFormat :: ThumbnailFormat
, thInterval :: Interval
, thResolution :: Resolution
, thAspectRatio :: AspectRatio
}
deriving (Show,Eq)
instance FromJSON Thumbnails where
parseJSON (Object v) =
Thumbnails <$>
v .: "thFormat" <*>
v .: "thInterval" <*>
v .: "thResolution" <*>
v .: "thAspectRatio"
parseJSON _ = mzero
instance ToJSON Thumbnails where
toJSON th@(Thumbnails _ _ _ _) =
object
[ "thFormat" .= thFormat th
, "thInterval" .= thInterval th
, "thResolution" .= thResolution th
, "thAspectRatio" .= thAspectRatio th
]
instance QC.Arbitrary Thumbnails where
arbitrary = Thumbnails
<$> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
--
-- | CodecOptions
--
data CodecOptions
= CodecOptions
{ coProfile :: Profile
, coLevel :: Level
, coMaxReferenceFrames :: MaxReferenceFrames
}
deriving (Show,Eq)
instance FromJSON CodecOptions where
parseJSON (Object v) =
CodecOptions <$>
v .: "Profile" <*>
v .: "Level" <*>
v .: "MaxReferenceFrames"
parseJSON _ = mzero
instance ToJSON CodecOptions where
toJSON th@(CodecOptions _ _ _) =
object
[ "Profile" .= coProfile th
, "Level" .= coLevel th
, "MaxReferenceFrames" .= coMaxReferenceFrames th
]
instance QC.Arbitrary CodecOptions where
arbitrary = CodecOptions
<$> QC.arbitrary
<*> QC.arbitrary
<*> QC.arbitrary
--
-- | AudioCodec
--
data AudioCodec
= AC_AAC
deriving (Show,Eq,Ord,Bounded,Enum)
acodec_t :: AudioCodec -> T.Text
acodec_t ac =
case ac of
AC_AAC -> "AAC"
acodec_m :: Map.Map T.Text AudioCodec
acodec_m = text_map acodec_t
instance FromJSON AudioCodec where
parseJSON = json_str_map_p acodec_m
instance ToJSON AudioCodec where
toJSON = String . acodec_t
instance QC.Arbitrary AudioCodec where
arbitrary = QC.elements [minBound..maxBound]
--
-- | SampleRate
--
data SampleRate
= SRauto
| SR22050
| SR32050
| SR44100
| SR48000
| SR96000
deriving (Show,Eq,Ord,Bounded,Enum)
srate_t :: SampleRate -> T.Text
srate_t sr =
case sr of
SRauto -> "auto"
SR22050 -> "22050"
SR32050 -> "32050"
SR44100 -> "44100"
SR48000 -> "48000"
SR96000 -> "96000"
srate_m :: Map.Map T.Text SampleRate
srate_m = text_map srate_t
instance FromJSON SampleRate where
parseJSON = json_str_map_p srate_m
instance ToJSON SampleRate where
toJSON = String . srate_t
instance QC.Arbitrary SampleRate where
arbitrary = QC.elements [minBound..maxBound]
--
-- | Channels
--
data Channels
= Chs_auto
| Chs_0
| Chs_1
| Chs_2
deriving (Show,Eq,Ord,Bounded,Enum)
channels_t :: Channels -> T.Text
channels_t ac =
case ac of
Chs_auto -> "auto"
Chs_0 -> "0"
Chs_1 -> "1"
Chs_2 -> "2"
channels_m :: Map.Map T.Text Channels
channels_m = text_map channels_t
instance FromJSON Channels where
parseJSON = json_str_map_p channels_m
instance ToJSON Channels where
toJSON = String . channels_t
instance QC.Arbitrary Channels where
arbitrary = QC.elements [minBound..maxBound]
--
-- | VideoCodec
--
data VideoCodec
= VC_H_264
deriving (Show,Eq,Ord,Bounded,Enum)
vcodec_t :: VideoCodec -> T.Text
vcodec_t ac =
case ac of
VC_H_264 -> "H.264"
vcodec_m :: Map.Map T.Text VideoCodec
vcodec_m = text_map vcodec_t
instance FromJSON VideoCodec where
parseJSON = json_str_map_p vcodec_m
instance ToJSON VideoCodec where
toJSON = String . vcodec_t
instance QC.Arbitrary VideoCodec where
arbitrary = QC.elements [minBound..maxBound]
--
-- | ThumbnailFormat
--
data ThumbnailFormat
= TF_png
deriving (Show,Eq,Ord,Bounded,Enum)
thumb_t :: ThumbnailFormat -> T.Text
thumb_t ac =
case ac of
TF_png -> "png"
thumb_m :: Map.Map T.Text ThumbnailFormat
thumb_m = text_map thumb_t
instance FromJSON ThumbnailFormat where
parseJSON = json_str_map_p thumb_m
instance ToJSON ThumbnailFormat where
toJSON = String . thumb_t
instance QC.Arbitrary ThumbnailFormat where
arbitrary = QC.elements [minBound..maxBound]
--
-- | Profile
--
data Profile
= Pr_baseline
| Pr_main
| Pr_high
deriving (Show,Eq,Ord,Bounded,Enum)
profile_t :: Profile -> T.Text
profile_t ac =
case ac of
Pr_baseline -> "baseline"
Pr_main -> "main"
Pr_high -> "high"
profile_m :: Map.Map T.Text Profile
profile_m = text_map profile_t
instance FromJSON Profile where
parseJSON = json_str_map_p profile_m
instance ToJSON Profile where
toJSON = String . profile_t
instance QC.Arbitrary Profile where
arbitrary = QC.elements [minBound..maxBound]
--
-- | Level
--
data Level
= Lv_1
| Lv_1b
| Lv_1_1
| Lv_1_2
| Lv_1_3
| Lv_2
| Lv_2_1
| Lv_2_2
| Lv_3
| Lv_3_1
| Lv_3_2
| Lv_4
| Lv_4_1
deriving (Show,Eq,Ord,Bounded,Enum)
level_t :: Level -> T.Text
level_t ac =
case ac of
Lv_1 -> "1"
Lv_1b -> "1b"
Lv_1_1 -> "1.1"
Lv_1_2 -> "1.2"
Lv_1_3 -> "1.3"
Lv_2 -> "2"
Lv_2_1 -> "2.1"
Lv_2_2 -> "2.2"
Lv_3 -> "3"
Lv_3_1 -> "3.1"
Lv_3_2 -> "3.2"
Lv_4 -> "4"
Lv_4_1 -> "4.1"
level_m :: Map.Map T.Text Level
level_m = text_map level_t
instance FromJSON Level where
parseJSON = json_str_map_p level_m
instance ToJSON Level where
toJSON = String . level_t
instance QC.Arbitrary Level where
arbitrary = QC.elements [minBound..maxBound]
--
-- | PresetType
--
data PresetType
= PTcustom
| PTsystem
deriving (Show,Eq,Ord,Bounded,Enum)
prtype_t :: PresetType -> T.Text
prtype_t pt =
case pt of
PTcustom -> "custom"
PTsystem -> "system"
prtype_m :: Map.Map T.Text PresetType
prtype_m = text_map prtype_t
instance FromJSON PresetType where
parseJSON = json_str_map_p prtype_m
instance ToJSON PresetType where
toJSON = String . prtype_t
instance QC.Arbitrary PresetType where
arbitrary = QC.elements [minBound..maxBound]
--
-- | BitRate
--
newtype BitRate = KbPerSec { _KbPerSec :: Int }
deriving (Show,Eq,Ord,Bounded,Enum)
instance FromJSON BitRate where
parseJSON = withText "BitRate" $ \t -> KbPerSec <$> read_p t
instance ToJSON BitRate where
toJSON = String . T.pack . show . _KbPerSec
instance QC.Arbitrary BitRate where
arbitrary = KbPerSec <$> QC.arbitrary
--
-- | KeyFrameRate
--
newtype KeyFrameRate = KeyFrameRate { _KeyFrameRate :: Int }
deriving (Show,Eq)
instance FromJSON KeyFrameRate where
parseJSON = withText "KeyFrameRate" $ \t -> KeyFrameRate <$> read_p t
instance ToJSON KeyFrameRate where
toJSON = String . T.pack . show . _KeyFrameRate
instance QC.Arbitrary KeyFrameRate where
arbitrary = KeyFrameRate <$> QC.arbitrary
--
-- | FixedGOP
--
newtype FixedGOP = FixedGOP { _FixedGOP :: Bool }
deriving (Show,Eq)
instance FromJSON FixedGOP where
parseJSON = withText "FixedGOP" $ \t -> FixedGOP <$> read_p t
instance ToJSON FixedGOP where
toJSON = String . T.pack . show . _FixedGOP
instance QC.Arbitrary FixedGOP where
arbitrary = FixedGOP <$> QC.arbitrary
--
-- | Interval
--
newtype Interval = Interval { _Interval :: Int }
deriving (Show,Eq)
instance FromJSON Interval where
parseJSON = withText "Interval" $ \t -> Interval <$> read_p t
instance ToJSON Interval where
toJSON = String . T.pack . show . _Interval
instance QC.Arbitrary Interval where
arbitrary = Interval <$> QC.arbitrary
--
-- | MaxReferenceFrames
--
newtype MaxReferenceFrames = MaxReferenceFrames
{ _MaxReferenceFrames :: Int }
deriving (Show,Eq)
instance FromJSON MaxReferenceFrames where
parseJSON = withText "MaxReferenceFrames"
$ \t -> MaxReferenceFrames <$> read_p t
instance ToJSON MaxReferenceFrames where
toJSON = String . T.pack . show . _MaxReferenceFrames
instance QC.Arbitrary MaxReferenceFrames where
arbitrary = MaxReferenceFrames <$> QC.arbitrary
--
-- | MessagesSuccess
--
data MessagesSuccess
= MessagesSuccess
{ msMessages :: [T.Text]
, msSuccess :: Bool
}
deriving (Show,Eq)
instance FromJSON MessagesSuccess where
parseJSON (Object v) =
MessagesSuccess <$>
v .: "Messages" <*>
v .: "Success"
parseJSON _ = mzero
instance ToJSON MessagesSuccess where
toJSON rt@(MessagesSuccess _ _) =
object
[ "Messages" .= msMessages rt
, "Success" .= msSuccess rt
]
instance QC.Arbitrary MessagesSuccess where
arbitrary = MessagesSuccess
<$> (map T.pack <$> QC.arbitrary)
<*> QC.arbitrary
--
-- | 'auto', 'true' or 'false'
--
data AutoBool
= ABauto
| ABtrue
| ABfalse
deriving (Show,Eq,Ord,Bounded,Enum)
autobool_t :: AutoBool -> T.Text
autobool_t rot =
case rot of
ABauto -> "auto"
ABtrue -> "true"
ABfalse -> "false"
autobool_m :: Map.Map T.Text AutoBool
autobool_m = text_map autobool_t
instance FromJSON AutoBool where
parseJSON = json_str_map_p autobool_m
instance ToJSON AutoBool where
toJSON = String . autobool_t
instance QC.Arbitrary AutoBool where
arbitrary = QC.elements [minBound..maxBound]
--
-- | Text or Null
--
data TextOrNull
= TNText T.Text
| TNNull
deriving (Show,Eq)
instance IsString TextOrNull where
fromString = TNText . T.pack
instance FromJSON TextOrNull where
parseJSON Null = return TNNull
parseJSON (String t) = return $ TNText t
parseJSON _ = mzero
instance ToJSON TextOrNull where
toJSON TNNull = Null
toJSON (TNText t) = String t
instance QC.Arbitrary TextOrNull where
arbitrary = maybe TNNull TNText <$> (poss $ T.pack <$> QC.arbitrary)
--
-- | Ets error message
--
newtype EtsServiceError = ESE { _ESE :: T.Text }
deriving (Show,IsString,Eq)
instance FromJSON EtsServiceError where
parseJSON (Object v) = ESE <$> v .: "message"
parseJSON _ = mzero
instance ToJSON EtsServiceError where
toJSON (ESE msg) =
object
[ "message" .= msg
]
instance QC.Arbitrary EtsServiceError where
arbitrary = ESE . T.pack <$> QC.arbitrary
--
-- | 'success'
--
newtype SUCCESS = SUCCESS { _SUCCESS :: Bool }
deriving (Show,Eq)
instance FromJSON SUCCESS where
parseJSON (Object v) = SUCCESS <$> v .: "success"
parseJSON _ = mzero
instance ToJSON SUCCESS where
toJSON = Bool . _SUCCESS
instance QC.Arbitrary SUCCESS where
arbitrary = SUCCESS <$> QC.arbitrary
--
-- | 'true', 'false'
--
bool_t :: Bool -> T.Text
bool_t True = "true"
bool_t False = "false"
------------------------------------------------------------------------------
--
-- Parser Toolkit
--
------------------------------------------------------------------------------
json_str_map_p :: Ord a => Map.Map T.Text a -> Value -> Parser a
json_str_map_p mp = json_string_p $ flip Map.lookup mp
json_string_p :: Ord a => (T.Text->Maybe a) -> Value -> Parser a
json_string_p p (String t) | Just val <- p t = return val
| otherwise = mzero
json_string_p _ _ = mzero
text_map :: (Ord a,Bounded a,Enum a) => (a->T.Text) -> Map.Map T.Text a
text_map f = Map.fromList [ (f x,x) | x<-[minBound..maxBound] ]
read_p :: Read a => T.Text -> Parser a
read_p txt = maybe mzero return $ readMay $ T.unpack txt
------------------------------------------------------------------------------
--
-- QC Toolkit
--
------------------------------------------------------------------------------
poss :: QC.Gen a -> QC.Gen (Maybe a)
poss gen = QC.frequency
[ (,) 1 $ QC.elements [Nothing]
, (,) 20 $ Just <$> gen
]
nat_pair :: QC.Gen (Int,Int)
nat_pair = two $ QC.sized $ \n -> QC.choose (0, n)
two :: QC.Gen a -> QC.Gen (a,a)
two gen = (,) <$> gen <*> gen
| RayRacine/aws | Aws/ElasticTranscoder/Json/Types.hs | bsd-3-clause | 45,339 | 2 | 23 | 17,175 | 10,785 | 5,847 | 4,938 | 1,231 | 17 |
{-# OPTIONS_HADDOCK hide #-}
-- #hide
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Safe #-}
#endif
module Text.XHtml.Frameset.Elements where
import Text.XHtml.Internals
-- * Extra elements in XHTML Frameset
frame :: Html -> Html
frame = tag "frame"
frameset :: Html -> Html
frameset = tag "frameset"
noframes :: Html -> Html
noframes = tag "noframes"
| DavidAlphaFox/ghc | libraries/xhtml/Text/XHtml/Frameset/Elements.hs | bsd-3-clause | 431 | 0 | 5 | 135 | 71 | 43 | 28 | 9 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude, BangPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Device
-- Copyright : (c) The University of Glasgow, 1994-2008
-- License : see libraries/base/LICENSE
--
-- Maintainer : libraries@haskell.org
-- Stability : internal
-- Portability : non-portable
--
-- Type classes for I/O providers.
--
-----------------------------------------------------------------------------
module GHC.IO.Device (
RawIO(..),
IODevice(..),
IODeviceType(..),
SeekMode(..)
) where
import GHC.Base
import GHC.Word
import GHC.Arr
import GHC.Enum
import GHC.Read
import GHC.Show
import GHC.Ptr
import GHC.Num
import GHC.IO
import {-# SOURCE #-} GHC.IO.Exception ( unsupportedOperation )
-- | A low-level I/O provider where the data is bytes in memory.
class RawIO a where
-- | Read up to the specified number of bytes, returning the number
-- of bytes actually read. This function should only block if there
-- is no data available. If there is not enough data available,
-- then the function should just return the available data. A return
-- value of zero indicates that the end of the data stream (e.g. end
-- of file) has been reached.
read :: a -> Ptr Word8 -> Int -> IO Int
-- | Read up to the specified number of bytes, returning the number
-- of bytes actually read, or 'Nothing' if the end of the stream has
-- been reached.
readNonBlocking :: a -> Ptr Word8 -> Int -> IO (Maybe Int)
-- | Write the specified number of bytes.
write :: a -> Ptr Word8 -> Int -> IO ()
-- | Write up to the specified number of bytes without blocking. Returns
-- the actual number of bytes written.
writeNonBlocking :: a -> Ptr Word8 -> Int -> IO Int
-- | I/O operations required for implementing a 'System.IO.Handle'.
class IODevice a where
-- | @ready dev write msecs@ returns 'True' if the device has data
-- to read (if @write@ is 'False') or space to write new data (if
-- @write@ is 'True'). @msecs@ specifies how long to wait, in
-- milliseconds.
--
ready :: a -> Bool -> Int -> IO Bool
-- | closes the device. Further operations on the device should
-- produce exceptions.
close :: a -> IO ()
-- | returns 'True' if the device is a terminal or console.
isTerminal :: a -> IO Bool
isTerminal _ = return False
-- | returns 'True' if the device supports 'seek' operations.
isSeekable :: a -> IO Bool
isSeekable _ = return False
-- | seek to the specified position in the data.
seek :: a -> SeekMode -> Integer -> IO ()
seek _ _ _ = ioe_unsupportedOperation
-- | return the current position in the data.
tell :: a -> IO Integer
tell _ = ioe_unsupportedOperation
-- | return the size of the data.
getSize :: a -> IO Integer
getSize _ = ioe_unsupportedOperation
-- | change the size of the data.
setSize :: a -> Integer -> IO ()
setSize _ _ = ioe_unsupportedOperation
-- | for terminal devices, changes whether characters are echoed on
-- the device.
setEcho :: a -> Bool -> IO ()
setEcho _ _ = ioe_unsupportedOperation
-- | returns the current echoing status.
getEcho :: a -> IO Bool
getEcho _ = ioe_unsupportedOperation
-- | some devices (e.g. terminals) support a "raw" mode where
-- characters entered are immediately made available to the program.
-- If available, this operations enables raw mode.
setRaw :: a -> Bool -> IO ()
setRaw _ _ = ioe_unsupportedOperation
-- | returns the 'IODeviceType' corresponding to this device.
devType :: a -> IO IODeviceType
-- | duplicates the device, if possible. The new device is expected
-- to share a file pointer with the original device (like Unix @dup@).
dup :: a -> IO a
dup _ = ioe_unsupportedOperation
-- | @dup2 source target@ replaces the target device with the source
-- device. The target device is closed first, if necessary, and then
-- it is made into a duplicate of the first device (like Unix @dup2@).
dup2 :: a -> a -> IO a
dup2 _ _ = ioe_unsupportedOperation
ioe_unsupportedOperation :: IO a
ioe_unsupportedOperation = throwIO unsupportedOperation
-- | Type of a device that can be used to back a
-- 'GHC.IO.Handle.Handle' (see also 'GHC.IO.Handle.mkFileHandle'). The
-- standard libraries provide creation of 'GHC.IO.Handle.Handle's via
-- Posix file operations with file descriptors (see
-- 'GHC.IO.Handle.FD.mkHandleFromFD') with FD being the underlying
-- 'GHC.IO.Device.IODevice' instance.
--
-- Users may provide custom instances of 'GHC.IO.Device.IODevice'
-- which are expected to conform the following rules:
data IODeviceType
= Directory -- ^ The standard libraries do not have direct support
-- for this device type, but a user implementation is
-- expected to provide a list of file names in
-- the directory, in any order, separated by @\'\\0\'@
-- characters, excluding the @"."@ and @".."@ names. See
-- also 'System.Directory.getDirectoryContents'. Seek
-- operations are not supported on directories (other
-- than to the zero position).
| Stream -- ^ A duplex communications channel (results in
-- creation of a duplex 'GHC.IO.Handle.Handle'). The
-- standard libraries use this device type when
-- creating 'GHC.IO.Handle.Handle's for open sockets.
| RegularFile -- ^ A file that may be read or written, and also
-- may be seekable.
| RawDevice -- ^ A "raw" (disk) device which supports block binary
-- read and write operations and may be seekable only
-- to positions of certain granularity (block-
-- aligned).
deriving ( Eq -- ^ @since 4.2.0.0
)
-- -----------------------------------------------------------------------------
-- SeekMode type
-- | A mode that determines the effect of 'System.IO.hSeek' @hdl mode i@.
data SeekMode
= AbsoluteSeek -- ^ the position of @hdl@ is set to @i@.
| RelativeSeek -- ^ the position of @hdl@ is set to offset @i@
-- from the current position.
| SeekFromEnd -- ^ the position of @hdl@ is set to offset @i@
-- from the end of the file.
deriving ( Eq -- ^ @since 4.2.0.0
, Ord -- ^ @since 4.2.0.0
, Ix -- ^ @since 4.2.0.0
, Enum -- ^ @since 4.2.0.0
, Read -- ^ @since 4.2.0.0
, Show -- ^ @since 4.2.0.0
)
| sdiehl/ghc | libraries/base/GHC/IO/Device.hs | bsd-3-clause | 6,662 | 0 | 12 | 1,657 | 703 | 414 | 289 | 66 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Main (main) where
import FastString
import Control.Concurrent
import Control.DeepSeq
import Control.Exception
import Control.Monad
import Data.ByteString (ByteString)
import Data.ByteString.Builder
import qualified Data.ByteString.Char8 as Char
import Data.ByteString.Lazy (toStrict)
import Data.List
import Data.Monoid
import qualified Data.Sequence as Seq
import Data.Time
import GHC.Conc
import System.IO
import System.Random
import Text.Printf
data Options = Options
{ optThreads :: Int -- ^ the number of threads to run concurrently
, optRepeat :: Int -- ^ how many times do we create the same 'FastString'
, optCount :: Int -- ^ the total number of different 'FastString's
, optPrefix :: Int -- ^ the length of prefix in each 'FastString'
}
defOptions :: Options
defOptions = Options
{ optThreads = 8
, optRepeat = 16
, optCount = 10000
, optPrefix = 0
}
run :: [[ByteString]] -> (ByteString -> Int) -> IO Int
run jobs op = do
mvars <- forM ([0 ..] `zip` jobs) $ \(i, job) -> do
mvar <- newEmptyMVar
forkOn i $ do
uniq <- evaluate $ force $ maximum $ map op job
putMVar mvar uniq
return mvar
uniqs <- mapM takeMVar mvars
evaluate $ force $ maximum uniqs - 603979775
summary :: IO [[[a]]] -> IO Int
summary getTable = do
table <- getTable
evaluate $ force $ length $ concat $ concat table
timeIt :: String -> IO a -> IO a
timeIt name io = do
before <- getCurrentTime
ret <- io
after <- getCurrentTime
hPrintf stderr "%s: %.2fms\n" name
(realToFrac $ diffUTCTime after before * 1000 :: Double)
return ret
main :: IO ()
main = do
seed <- randomIO
let Options{..} = defOptions
shuffle (i:is) s
| Seq.null s = []
| otherwise = m: shuffle is (l <> r)
where
(l, m Seq.:< r) = Seq.viewl <$> Seq.splitAt (i `rem` Seq.length s) s
inputs =
shuffle (randoms $ mkStdGen seed) $
mconcat $ replicate optRepeat $
Seq.fromFunction optCount $ \i -> toStrict $ toLazyByteString $
byteString (Char.replicate optPrefix '_') <> intDec i
jobs <- evaluate $ force $ transpose $
map (take optThreads) $
takeWhile (not . null) $
iterate (drop optThreads) inputs
setNumCapabilities (length jobs)
-- The maximum unique may be greater than 'optCount'
u <- timeIt "run" $ run jobs $ uniqueOfFS . mkFastStringByteString
print $ optCount <= u && u <= min optThreads optRepeat * optCount
-- But we should never have duplicate 'FastString's in the table
n <- timeIt "summary" $ summary getFastStringTable
print $ n == optCount
| sdiehl/ghc | testsuite/tests/utils/should_run/T14854.hs | bsd-3-clause | 2,639 | 0 | 18 | 611 | 862 | 444 | 418 | 74 | 1 |
{-# LANGUAGE Arrows #-}
{-
******************************************************************************
* A F R P *
* *
* Module: Elevator *
* Purpose: Elevator simulation based on the Fran version *
* from Simon Thompson's paper "A functional reactive *
* animation of a lift using Fran". *
* Authors: Henrik Nilsson *
* *
* Copyright (c) The University of Nottingham, 2004 *
* *
******************************************************************************
-}
module Elevator where
import FRP.Yampa
import FRP.Yampa.Utilities -- ((^<<), dHold)
------------------------------------------------------------------------------
-- Auxiliary definitions
------------------------------------------------------------------------------
type Position = Double -- [m]
type Distance = Double -- [m]
type Velocity = Double -- [m/s]
------------------------------------------------------------------------------
-- Elevator simulator
------------------------------------------------------------------------------
lower, upper :: Position
lower = 0
upper = 5
upRate, downRate :: Velocity
upRate = 1
downRate = 1.1
elevator :: SF (Event (), Event ()) Position
elevator = proc (lbp,rbp) -> do
rec
-- This delayed hold can be thought of as modelling acceleration.
-- It is not "physical" to expect a desire to travel at a certain
-- velocity to be immediately reflected in the actual velocity.
-- (The reason we get into trouble here is that the stop/go events
-- depends instantaneously on "stopped" which in turn depends
-- instantaneously on "v".)
v <- dHold 0 -< stop `tag` 0
`lMerge`
goUp `tag` upRate
`lMerge`
goDown `tag` (-downRate)
y <- (lower +) ^<< integral -< v
let atBottom = y <= lower
atTop = y >= upper
stopped = v == 0 -- Somewhat dubious ...
waitingBottom = atBottom && stopped
waitingTop = atTop && stopped
arriveBottom <- edge -< atBottom
arriveTop <- edge -< atTop
let setUp = lbp `tag` True
setDown = rbp `tag` True
-- This does not work. The reset events would be generated as soon
-- as the corresponding go event was generated, but the latter
-- depend instantaneusly on the reset signals.
-- resetUp = goUp `tag` False
-- resetDown = goDown `tag` False
-- One approach would be to wait for "physical confiramtion"
-- that the elevator actually is moving in the desired direction:
-- resetUp <- (`tag` True) ^<< edge -< v > 0
-- resetDown <- (`tag` False) ^<< edge -< v < 0
-- Another approach is to simply delay the reset events to avoid
-- suppressing the very event that generates the reset event.
resetUp <- iPre noEvent -< goUp `tag` False
resetDown <- iPre noEvent -< goDown `tag` False
-- Of course, a third approach would be to just use dHold below.
-- But that does not seem to be the right solution to me.
upPending <- hold False -< setUp `lMerge` resetUp
downPending <- hold False -< setDown `lMerge` resetDown
let pending = upPending || downPending
eitherButton = lbp `lMerge` rbp
goDown = arriveTop `gate` pending
`lMerge`
eitherButton `gate` waitingTop
goUp = arriveBottom `gate` pending
`lMerge`
eitherButton `gate` waitingBottom
stop = (arriveTop `lMerge` arriveBottom) `gate` not pending
returnA -< y
| ony/Yampa-core | examples/Elevator/Elevator.hs | bsd-3-clause | 4,145 | 6 | 24 | 1,495 | 482 | 290 | 192 | 45 | 1 |
module Graphics.UI.Gtk.Layout.BackgroundContainer where
import Control.Monad.Trans (liftIO)
import Data.IORef
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Gdk.GC
import System.Glib.Types
data BackgroundContainer = BackgroundContainer EventBox (IORef (Maybe Pixbuf))
instance WidgetClass BackgroundContainer
instance ObjectClass BackgroundContainer
instance GObjectClass BackgroundContainer where
toGObject (BackgroundContainer ev _) = toGObject ev
unsafeCastGObject ev = (BackgroundContainer (unsafeCastGObject ev) undefined)
instance EventBoxClass BackgroundContainer
instance ContainerClass BackgroundContainer
instance BinClass BackgroundContainer
backgroundContainerNew :: IO BackgroundContainer
backgroundContainerNew = do
ev <- eventBoxNew
ref <- newIORef Nothing
return $ BackgroundContainer ev ref
backgroundContainerNewWithPicture :: FilePath -> IO BackgroundContainer
backgroundContainerNewWithPicture fp = do
ev <- eventBoxNew
pb <- pixbufNewFromFile fp
ref <- newIORef (Just pb)
let wdgt = BackgroundContainer ev ref
wdgt `on` exposeEvent $ liftIO (backgroundExpose wdgt) >> return False
return wdgt
backgroundContainerNewWithPixbuf :: Pixbuf -> IO BackgroundContainer
backgroundContainerNewWithPixbuf pb = do
ev <- eventBoxNew
ref <- newIORef (Just pb)
let wdgt = BackgroundContainer ev ref
wdgt `on` exposeEvent $ liftIO (backgroundExpose wdgt) >> return False
return wdgt
backgroundExpose :: BackgroundContainer -> IO ()
backgroundExpose (BackgroundContainer ev ref) = do
dw <- widgetGetDrawWindow ev
drawWindowClear dw
pixbufM <- readIORef ref
case pixbufM of
Nothing -> return ()
Just pb -> do sz@(w,h) <- widgetGetSize ev
pb' <- pixbufScaleSimple pb w h InterpBilinear
drawWindowBeginPaintRect dw (Rectangle 0 0 w h)
gc <- gcNew dw
drawPixbuf dw gc pb' 0 0 0 0 (-1) (-1) RgbDitherNone (-1) (-1)
drawWindowEndPaint dw
backgroundSetPicture :: BackgroundContainer -> Maybe FilePath -> IO()
backgroundSetPicture (BackgroundContainer ev ref) fpM = do
pbM <- maybe (return Nothing) (fmap Just . pixbufNewFromFile) fpM
writeIORef ref pbM
backgroundSetPixbuf :: BackgroundContainer -> Maybe Pixbuf -> IO()
backgroundSetPixbuf (BackgroundContainer ev ref) pbM =
writeIORef ref pbM
| keera-studios/gtk-helpers | gtk3/src/Graphics/UI/Gtk/Layout/BackgroundContainer.hs | bsd-3-clause | 2,342 | 2 | 15 | 416 | 709 | 340 | 369 | 55 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hu-HU">
<title>Simple Example Add-On</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Keresés</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/simpleexample/src/main/javahelp/org/zaproxy/addon/simpleexample/resources/help_hu_HU/helpset_hu_HU.hs | apache-2.0 | 968 | 77 | 67 | 157 | 416 | 210 | 206 | -1 | -1 |
module C1
(fringe, myFringe, Tree(..), SameOrNot(..)) where
data Tree a = Leaf a | Branch (Tree a) (Tree a)
fringe :: (Tree a) -> [a]
fringe (Leaf x) = [x]
fringe (Branch left right)
= (fringe left) ++ (fringe right)
sumTree :: Num a => (Tree a) -> a
sumTree (Leaf x) = x
sumTree (Branch left right)
= (sumTree left) + (sumTree right)
myFringe :: (Tree a) -> [a]
myFringe (Leaf x) = [x]
myFringe (Branch left right) = myFringe left
class SameOrNot a
where
isSame :: a -> a -> Bool
isNotSame :: a -> a -> Bool
instance SameOrNot Int
where
isSame a b = a == b
isNotSame a b = a /= b
| SAdams601/HaRe | old/testing/moveDefBtwMods/C1AST.hs | bsd-3-clause | 638 | 0 | 8 | 175 | 315 | 167 | 148 | 20 | 1 |
module B3 (myFringe) where
import D3 hiding (myFringe, sumSquares, fringe)
import D3 (fringe)
import C3 hiding ()
myFringe :: (Tree a) -> [a]
myFringe (Leaf x) = [x]
myFringe (Branch left right) = myFringe right
sumSquares ((x : xs)) = (x ^ 2) + (sumSquares xs)
sumSquares [] = 0
| SAdams601/HaRe | old/testing/moveDefBtwMods/B3_AstOut.hs | bsd-3-clause | 296 | 0 | 8 | 66 | 138 | 78 | 60 | 9 | 1 |
{-@ LIQUID "--no-termination" @-}
{-@ LIQUID "--short-names" @-}
module Foo () where
import Language.Haskell.Liquid.Prelude (liquidAssert)
import Data.IORef
{-@ data variance IO bivariant @-}
{-@ data variance IORef bivariant @-}
job :: IO ()
job = do
p <- newIORef (0 :: Int)
writeIORef p 10
v <- readIORef p
liquidAssert (v == 0) $ return ()
| mightymoose/liquidhaskell | tests/neg/contra0.hs | bsd-3-clause | 361 | 0 | 10 | 73 | 99 | 53 | 46 | 9 | 1 |
type X = 1 + + 2
| sdiehl/ghc | testsuite/tests/parser/should_fail/typeops_C.hs | bsd-3-clause | 17 | 2 | 5 | 7 | 13 | 7 | 6 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
module Queue where
import qualified Stack as Stack
-- | *Queue
data Queue a = Queue { queueFront :: Stack.Stack a
, queueBack :: Stack.Stack a
}
-- .end
-- | *empty
empty :: Queue a
empty = Queue { queueFront = Stack.empty
, queueBack = Stack.empty
}
-- .end
-- | *rotate
rotate :: Queue a -> Queue a
rotate Queue{..} =
Queue { queueFront = Stack.reverse queueBack
, queueBack = Stack.reverse queueFront
}
-- .end
-- | *top
top :: Queue a -> Maybe a
top Queue{..} =
case (Stack.head queueFront, Stack.head $ Stack.reverse queueBack) of
(Nothing, Nothing) -> Nothing
(Just x, _) -> Just x
(Nothing, Just y) -> Just y
-- .end
-- | *back
back :: Queue a -> Maybe a
back Queue{..} =
case (Stack.head queueBack, Stack.head $ Stack.reverse queueFront) of
(Nothing, Nothing) -> Nothing
(Just y, _) -> Just y
(Nothing, Just x) -> Just x
-- .end
-- | *pop
pop :: Queue a -> Maybe (Queue a)
pop Queue{..} =
case (Stack.tail queueFront, Stack.tail $ Stack.reverse queueBack) of
(Nothing, Nothing) -> Nothing
(Just xs, _) -> Just $ Queue {queueFront = xs, queueBack = queueBack}
(Nothing, Just ys) -> Just $ Queue {queueFront = ys, queueBack = Stack.empty}
-- .end
-- | *eject
eject :: Queue a -> Maybe (Queue a)
eject Queue{..} =
case (Stack.tail queueBack, Stack.tail $ Stack.reverse queueFront) of
(Nothing, Nothing) -> Nothing
(Just ys, _) -> Just $ Queue {queueBack = ys, queueFront = queueFront}
(Nothing, Just xs) -> Just $ Queue {queueBack = xs, queueFront = Stack.empty}
-- .end
-- | *push
push :: a -> Queue a -> Queue a
push val Queue{..} =
Queue { queueFront = Stack.cons val queueFront
, queueBack = queueBack
}
-- .end
-- | *inject
inject :: a -> Queue a -> Queue a
inject val Queue{..} =
Queue { queueBack = Stack.cons val queueBack
, queueFront = queueFront
}
-- .end
| nomicflux/okasaki-visualization | src/Structures/hs/Queue.hs | mit | 1,995 | 0 | 11 | 517 | 745 | 404 | 341 | 44 | 3 |
module Examples.First where
import Test.HUnit hiding (test)
import Improb.Parser
import Improb.CodeGen
import Examples.Tests.Branching
import Examples.Tests.BranchingEuterpea
import Examples.Tests.Motif
import Examples.Tests.MotifEuterpea
import Examples.Tests.Duet
import Examples.Tests.DuetEuterpea
import Text.Parsec.Error
import System.Random
test = runTestTT testList
testList = TestList
[ TestLabel "branching" branchingTest
, TestLabel "branching Euterpea representation" branchEuterpeaTest
, TestLabel "motif" motifTest
, TestLabel "motif Euterpea representation" motifEuterpeaTest
, TestLabel "duet" duetTest
, TestLabel "duet Euterpea representation" duetEuterpeaTest
]
-- Uh, this feels distinctly bad.
-- I've never used HUnit before, hopefully better practices will come
-- later in the course.
instance Eq ParseError where
e1 == e2 = (show e1) == (show e2)
branchingObserved = parseProgram branchingString
branchingTest = TestCase (branchExpects @=? branchingObserved)
branchEuterpeaObserved =
case parseProgram branchingString of
Right prg -> genEuterpeaMusic (mkStdGen 0) prg
Left _ -> error "branch did not parse"
branchEuterpeaTest = TestCase (branchingEuterpea @=? branchEuterpeaObserved)
motifObserved = parseProgram motifString
motifTest = TestCase (motifExpects @=? motifObserved)
motifEuterpeaObserved =
case parseProgram motifString of
Right prg -> genEuterpeaMusic (mkStdGen 0) prg
Left _ -> error "branch did not parse"
motifEuterpeaTest = TestCase (motifEuterpea @=? motifEuterpeaObserved)
duetObserved = parseProgram duetString
duetTest = TestCase (duetExpects @=? duetObserved)
duetEuterpeaObserved =
case parseProgram duetString of
Right prg -> genEuterpeaMusic (mkStdGen 0) prg
Left _ -> error "branch did not parse"
duetEuterpeaTest = TestCase (duetEuterpea @=? duetEuterpeaObserved)
| michaelbjames/improb | Examples/First.hs | mit | 1,930 | 0 | 10 | 325 | 422 | 220 | 202 | 43 | 2 |
module Main (main) where
import Data.Foldable (for_)
import Data.List (intercalate)
import Data.Ratio (denominator, numerator)
import System.Directory (createDirectoryIfMissing)
import System.Exit (exitFailure)
import System.IO (IOMode(WriteMode), hFlush, hPutStrLn,
stderr, stdout, withFile)
import Crypto.Hash (Digest, MD5, hashlazy)
import qualified Data.ByteString.Lazy as ByteString_Lazy
import Common
import Prelude
import WignerSymbols
import WignerSymbols.Internal
type ByteString_Lazy = ByteString_Lazy.ByteString
main :: IO ()
main = do
checkResults knownHashes_cg 25 "cg" $ \ tjMax write ->
for_ (get3tjms tjMax) $ \ (tj1, tm1, tj2, tm2, tj3, tm3) ->
let r = clebschGordanSq (tj1, tm1, tj2, tm2, tj3, -tm3) in
write . intercalate "\t" $
(show <$> [tj1, tm1, tj2, tm2, tj3, -tm3]) <>
[show (ssr_signum r * ssr_numerator r) <>
"/" <> show (ssr_denominator r)]
checkResults knownHashes_cg 5 "cgw" $ \ tjMax write ->
for_ (get3tjms tjMax) $ \ (tj1, tm1, tj2, tm2, tj3, tm3) ->
let SignedSqrtRational wr = wigner3jSq (tj1, tm1, tj2, tm2, tj3, tm3)
r = wr
* (fromIntegral tj3 + 1)
/ (-1) ^^ ((tj1 - tj2 - tm3) `div` 2) in
write . intercalate "\t" $
(show <$> [tj1, tm1, tj2, tm2, tj3, -tm3]) <>
[show (numerator r) <> "/" <> show (denominator r)]
checkResults knownHashes_w6j 15 "w6j" $ \ tjMax write ->
for_ (get6tjs tjMax) $ \ tjs ->
let r = wigner6jSq tjs in
write . intercalate "\t" $
(show <$> tuple6ToList tjs) <>
[show (ssr_signum r * ssr_numerator r) <>
"/" <> show (ssr_denominator r)]
checkResults knownHashes_w9j 7 "w9j" $ \ tjMax write ->
for_ (get9tjs tjMax) $ \ tjs ->
let r = wigner9jSq tjs in
write . intercalate "\t" $
(show <$> tuple9ToList tjs) <>
[show (ssr_signum r * ssr_numerator r) <>
"/" <> show (ssr_denominator r)]
knownHashes_cg :: [(Int, String)]
knownHashes_cg =
[ (5, "e74c501299b456a6cb29e4f5714e9061") -- 681
, (10, "b6d0770101f4ebdaa9a55d94f07b001f") -- 11487
, (15, "9192023f26dae0eebcce11afa7372eb6") -- 69272
, (20, "75ef56391b61e1bb2336e36ac7834216") -- 259523
, (25, "5901128892a264b73b5479b70b331fd0") -- 737113
, (30, "75ef56391b61e1bb2336e36ac7834216") -- 1747984
, (40, "2f9b936ea977249c1fea8a22d190a4cf") -- 6931995
]
knownHashes_w6j :: [(Int, String)]
knownHashes_w6j =
[ (5, "26c24e568fc96f1732ebb3130a46f22a") -- 1479
, (10, "f892f4b466e0558179ca870941d0a456") -- 42393
, (15, "f50b0163194cef1699727b7064760ec0") -- 363196
, (20, "e1b5dad0f1469cc54b6139533f982815") -- 1766270
, (25, "f326bf6e12a94120d2f46582e95e92f8") -- 6171698
]
knownHashes_w9j :: [(Int, String)]
knownHashes_w9j =
[ (3, "4005ef20e2ed8c789917dce99d027bc4") -- 1616
, (4, "92cfc13320e7fd6a34b3970ebef58e06") -- 9060
, (5, "d596fa3960aafae148754b6f3274507d") -- 38031
, (7, "7b338708ef3aa4ba0a4f5bd8c8b4e6aa") -- 401899
, (10, "479c0a020eaceff5539e2dda2200c1ab") -- 5898846
]
checkResults :: [(Int, String)]
-> Int
-> String
-> (Int -> (String -> IO ()) -> IO ())
-> IO ()
checkResults knownHashes tjMax name compute = do
createDirectoryIfMissing True "dist"
withFile filename WriteMode $ \ h ->
compute tjMax (hPutStrLn h)
newHash <- hexMD5 <$> ByteString_Lazy.readFile filename
case lookup tjMax knownHashes of
Nothing -> do
hPutStrLn stderr (errorPrefix <> "no known hash available")
hPutStrLn stderr ("actual: " <> newHash)
hFlush stderr
exitFailure
Just oldHash
| oldHash == newHash -> do
putStrLn (okPrefix <> name <> ": hash matched (" <> oldHash <> ")")
hFlush stdout
| otherwise -> do
hPutStrLn stderr (errorPrefix <> name <> ": hash does not match!")
hPutStrLn stderr ("expected: " <> oldHash)
hPutStrLn stderr ("actual: " <> newHash)
hFlush stderr
exitFailure
where filename = "dist/" <> name <> "-tj" <> show tjMax <> ".txt"
errorPrefix = "[\ESC[31;1merror\ESC[0m] "
okPrefix = "[\ESC[32mok\ESC[0m] "
hexMD5 :: ByteString_Lazy -> String
hexMD5 s = show (hashlazy s :: Digest MD5)
| Rufflewind/wigner-symbols | src/TestMain.hs | mit | 4,308 | 11 | 33 | 1,039 | 1,331 | 725 | 606 | 101 | 2 |
{-# LANGUAGE GADTs #-}
module RecordGADT where
data Test where
Test :: Num t => {x :: t} -> Test
| vladfi1/hs-misc | RecordGADT.hs | mit | 101 | 0 | 7 | 24 | 30 | 19 | 11 | -1 | -1 |
-- Copyright 2017 Maximilian Huber <oss@maximilian-huber.de>
-- SPDX-License-Identifier: MIT
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
module XMonad.MyConfig.MyLogHookLayer
( getXMProcs
, applyMyLogHook
) where
import GHC.IO.Handle (Handle ())
import System.IO ( hPutStrLn )
import XMonad
import XMonad.Util.Run ( spawnPipe )
import XMonad.Hooks.DynamicLog ( dynamicLogWithPP
, PP(..)
, xmobarColor
, wrap )
import XMonad.MyConfig.Common
import XMonad.MyConfig.Scratchpads ( scratchpadPPSort )
import XMonad.Layout.IndependentScreens
getXMProcs :: IO [Handle]
getXMProcs = let
additinalConfig 0 = [pathToXmobarConfig]
additinalConfig _ = [pathToXmobarMinConfig]
mkXmobarCommand (S s) = unwords ([xmobarCMD, "-x", show s] ++ additinalConfig s)
in do
nScreens <- countScreens
mapM (spawnPipe . mkXmobarCommand) [0 .. nScreens-1]
applyMyLogHook xmprocs c =
let
maincolor = focusedBorderColor c
myXmobarPP xmproc s = def { ppOutput = hPutStrLn xmproc
, ppCurrent = xmobarColor maincolor "" . wrap "<" ">"
, ppSort = scratchpadPPSort
, ppTitle = if s == 0
then (" " ++) . shortenStatus . xmobarColor maincolor ""
else (const "")
, ppVisible = xmobarColor maincolor ""
}
myLogHook :: X ()
myLogHook = mapM_ dynamicLogWithPP $ zipWith myXmobarPP xmprocs [0..]
in c { logHook = myLogHook }
shortenStatus :: String -> String
shortenStatus = let
shortenPaths :: String -> String
shortenPaths ('~':('/': p)) = "~/" ++ shortenPaths' "" p
shortenPaths ('/': p) = "/" ++ shortenPaths' "" p
shortenPaths ('[': p) = '[' : if last p == ']'
then shortenPaths' "" p
else p
shortenPaths p = p
shortenPaths' :: String -- output
-> String -- input
-> String
shortenPaths' r p@('<':_) = r ++ p
shortenPaths' r p@('>':_) = r ++ p
shortenPaths' r "" = r
shortenPaths' r "/" = r ++ "/"
shortenPaths' _ ('/':p) = shortenPaths' ".../" p
shortenPaths' r (c:p) = shortenPaths' (r++[c]) p
shortenStatus' :: String -- output
-> String -- prefix of last word
-> String -- input
-> String
shortenStatus' r w "" = r ++ shortenPaths w
shortenStatus' r w ('\\':(' ':is)) = shortenStatus' r (w ++"\\ ") is
shortenStatus' r w (' ':is) = shortenStatus' (r ++ shortenPaths w ++ " ") "" is
shortenStatus' r w (i:is) = shortenStatus' r (w++[i]) is
in shortenStatus' "" ""
| maximilianhuber/myconfig | xmonad/lib/XMonad/MyConfig/MyLogHookLayer.hs | mit | 3,037 | 3 | 14 | 1,146 | 812 | 437 | 375 | 64 | 13 |
import Text.Regex.PCRE
import Data.List
import Data.Char
main = do
input <- getContents
putStrLn . unlines
. map (show . decode)
. filter checkValid
. map parseInput
. lines
$ input
parseInput :: String -> (String, Int, String)
parseInput s = (name, read id, checksum)
where
matched = s =~ "(.*)-(\\d+)\\[(.*)\\]"
[name, id, checksum] = tail . head $ matched
orderByFrequency :: String -> [(Int, Char)]
orderByFrequency = sortBy requiredOrder . map count . group . sort . filter (/= '-')
where
count s = (length s, head s)
requiredOrder (l, x) (l', x')
| l /= l' = compare l' l
| otherwise = compare x x'
checkValid (name, _, checksum) = checksum == actualChecksum
where
actualChecksum = map snd . take 5 . orderByFrequency $ name
decode (name, id, _) = (map decode name, id)
where
decode c = if isAsciiLower c
then chr $ (ord c - ord 'a' + id) `mod` 26 + ord 'a'
else ' '
| lzlarryli/advent_of_code_2016 | day4/part2.hs | mit | 1,012 | 0 | 14 | 300 | 398 | 209 | 189 | 27 | 2 |
module FS where
import Data.ByteString (ByteString)
import Control.Exception
import System.Posix.Types
import BufferedStream
type EntryName = ByteString
type EntryDate = EpochTime
type EntrySize = FileOffset
data FileStats = FileStats (Maybe EntrySize) (Maybe EntryDate)
deriving (Show, Eq, Ord)
data Entry = Dir | File FileStats
deriving (Show, Eq, Ord)
data EntryType = DirType | FileType
deriving (Show, Eq, Ord)
data NotFoundException = NotFoundException
deriving (Show, Eq, Ord)
instance Exception NotFoundException
data FS = FS {
getEntry :: Maybe EntryType -> ByteString -> IO Entry,
getDirectoryEntries :: ByteString -> IO [(EntryName, EntryType)],
getFileContent :: (FileStats -> IO ()) -> ByteString -> IO BufferedStream
}
getFileContent' :: FS -> ByteString -> IO BufferedStream
getFileContent' fs = getFileContent fs (const $ return ())
| ffwng/httpfs | src/FS.hs | mit | 877 | 0 | 13 | 146 | 286 | 157 | 129 | 23 | 1 |
import System.Exit (ExitCode(..), exitFailure, exitSuccess, exitWith)
import Scan (runScan)
import Options.Applicative
spec :: Parser FilePath
spec = strOption
(short 's'
<> long "spec"
<> metavar "SPEC"
<> help "Path to audit specification file")
opts :: ParserInfo FilePath
opts = info (spec <**> helper) (fullDesc
<> header "A tool for running security compliance scans")
exitOn :: Either String Bool -> IO ()
exitOn (Left e) = putStrLn e >> exitWith (ExitFailure 255)
exitOn (Right b) = if b
then exitSuccess
else exitFailure
main :: IO ()
main = execParser opts >>= runScan >>= exitOn
| rjosephwright/saytshen-haskell | src/bin/saytshen.hs | mit | 737 | 0 | 10 | 240 | 212 | 109 | 103 | 19 | 2 |
{-|
Module: Flaw.Oil.Repo
Description: General repo functions.
License: MIT
-}
{-# LANGUAGE DeriveGeneric #-}
module Flaw.Oil.Repo
( RepoVersion(..)
, serverRepoVersion
, clientRepoVersion
, Manifest(..)
, protocolVersion
, defaultManifest
, openRepoDb
, Revision
, UserId
, Push(..)
, Pull(..)
, checkPushLimits
, SyncError(..)
, Repo(..)
, repoDbCheckIntegrity
, repoDbVacuum
) where
import Control.Exception
import Control.Monad
import qualified Data.ByteString as B
import Data.Foldable
import Data.Int
import qualified Data.Serialize as S
import qualified Data.Text as T
import Foreign.C.Types
import GHC.Generics
import Flaw.Book
import Flaw.Data.Sqlite
import Flaw.Exception
newtype RepoVersion = RepoVersion CInt
-- | "application_id" in SQLite db of server repo.
-- Convention: oil<letter starting from A> in little-endian.
serverRepoVersion :: RepoVersion
serverRepoVersion = RepoVersion 0x416c696f -- "oilA"
-- | "application_id" in SQLite db of client repo.
-- Convention: oil<letter starting from a> in little-endian.
clientRepoVersion :: RepoVersion
clientRepoVersion = RepoVersion 0x616c696f -- "oila"
-- | Manifest with most limitations.
data Manifest = Manifest
{ manifestProtocolVersion :: !Int32
, manifestMaxKeySize :: !Int
, manifestMaxValueSize :: !Int
, manifestMaxPushItemsCount :: !Int
, manifestMaxPushValuesTotalSize :: !Int
, manifestMaxPullItemsCount :: !Int
, manifestMaxPullValuesTotalSize :: !Int
} deriving Generic
instance S.Serialize Manifest
-- | Current protocol version.
-- Convention: oil<digit starting from 0> in little-endian.
protocolVersion :: Int32
protocolVersion = 0x306c696f -- "oil0"
defaultManifest :: Manifest
defaultManifest = Manifest
{ manifestProtocolVersion = protocolVersion
, manifestMaxKeySize = 128
, manifestMaxValueSize = 1024 * 1024
, manifestMaxPushItemsCount = 1024
, manifestMaxPushValuesTotalSize = 1024 * 1024 * 2
, manifestMaxPullItemsCount = 1024
, manifestMaxPullValuesTotalSize = 1024 * 1024 * 2
}
openRepoDb :: T.Text -> RepoVersion -> IO (SqliteDb, IO ())
openRepoDb fileName (RepoVersion version) = withSpecialBook $ \bk -> do
-- open db
db <- book bk $ sqliteDb fileName
-- enable exclusive locking mode
sqliteExec db $ T.pack "PRAGMA locking_mode = EXCLUSIVE"
-- enable WAL journal mode
sqliteExec db $ T.pack "PRAGMA journal_mode = WAL"
-- check version
withBook $ \tempBk -> do
stmt <- book tempBk $ sqliteStmt db $ T.pack "PRAGMA application_id"
sqliteQuery stmt $ \query -> do
r <- sqliteStep query
unless r $ throwIO $ DescribeFirstException "failed to get application_id"
currentAppVersion <- sqliteColumn query 0
-- if version is not set yet
if currentAppVersion == 0 then
-- set it
sqliteExec db $ T.pack $ "PRAGMA application_id = " ++ show version
-- else check that version is correct
else when (currentAppVersion /= version) $ throwIO $ DescribeFirstException "wrong application_id"
return db
-- | Type for revisions.
-- Start revision is 1. 0 means no revisions.
type Revision = Int64
-- | Type for user ID.
type UserId = Int64
-- | Data sent by client to server.
data Push = Push
{
-- | Current revision of the client.
pushClientRevision :: !Revision
-- | Upper bound on revisions server may send to client.
-- Used to prevent sending revisions client already knows about.
, pushClientUpperRevision :: !Revision
-- | Pushed (key, value) pairs.
, pushItems :: [(B.ByteString, B.ByteString)]
} deriving Generic
instance S.Serialize Push
-- | Data sent by server to client.
data Pull = Pull
{
-- | Total number of revisions client needs to pull in order to catch up with server, counting from pushClientRevision
pullLag :: !Int64
-- | Server revision before pushing items.
, pullPrePushRevision :: !Revision
-- | Server revision after pushing items (should be equal to pre-push revision + number of items pushed).
, pullPostPushRevision :: !Revision
-- | Pairs (key, value) to pull.
, pullItems :: [(Revision, B.ByteString, B.ByteString)]
-- | New client revision after whole operation.
, pullNewClientRevision :: !Revision
} deriving Generic
instance S.Serialize Pull
-- | Check push limits.
checkPushLimits :: Manifest -> Push -> Maybe SyncError
checkPushLimits Manifest
{ manifestMaxKeySize = maxKeySize
, manifestMaxValueSize = maxValueSize
, manifestMaxPushItemsCount = maxPushItemsCount
, manifestMaxPushValuesTotalSize = maxPushValuesTotalSize
} Push
{ pushItems = items
} = maybeError where
maybeError =
if length items > maxPushItemsCount then Just SyncTooManyItemsError
else case foldrM f 0 items of
Right _valuesTotalSize -> Nothing
Left err -> Just err
f (key, value) valuesTotalSize
| B.length key > maxKeySize = Left SyncTooBigKeyError
| B.length value > maxValueSize = Left SyncTooBigValueError
| otherwise = let newValuesTotalSize = valuesTotalSize + B.length value in
if newValuesTotalSize > maxPushValuesTotalSize then Left SyncTooBigPushValuesTotalSize
else Right newValuesTotalSize
-- | Errors while syncing (reported to client).
data SyncError
= SyncTooManyItemsError
| SyncTooBigKeyError
| SyncTooBigValueError
| SyncTooBigPushValuesTotalSize
| SyncFatalError
deriving Generic
class Repo r where
-- | Get SQLite DB.
repoDb :: r -> SqliteDb
-- | Check integrity of DB.
repoDbCheckIntegrity :: SqliteDb -> IO (Bool, T.Text)
repoDbCheckIntegrity db = withBook $ \bk -> do
stmt <- book bk $ sqliteStmt db $ T.pack "PRAGMA integrity_check"
lns <- sqliteQuery stmt $ \query -> do
let
step = do
r <- sqliteStep query
if r then do
line <- sqliteColumn query 0
restLines <- step
return $ line : restLines
else return []
in step
return (lns == [T.pack "ok"], T.unlines lns)
-- | Optimize DB.
repoDbVacuum :: SqliteDb -> IO ()
repoDbVacuum db = sqliteExec db $ T.pack "VACUUM"
| quyse/flaw | flaw-oil/Flaw/Oil/Repo.hs | mit | 6,052 | 0 | 24 | 1,205 | 1,246 | 679 | 567 | 158 | 4 |
{-|
This module provides an API to render primitives and @'Sprite'@s within the
framework.
-}
module FRP.Spice.Internal.Graphics ( bindColor
, color4f
, color3f
, color4i
, color3i
, black
, white
, grey
, gray
, red
, green
, blue
, renderPoint
, renderLine
, renderTriangle
, renderRectangle
, renderSquare
, renderPolygon
, renderSprite
, renderSpriteWithSize
) where
--------------------
-- Global Imports --
import Graphics.Rendering.OpenGL hiding (Color)
import Control.Monad
-------------------
-- Local Imports --
import FRP.Spice.Internal.Types
----------
-- Code --
{-|
Converting a @'Float'@ to a @'GLfloat'@.
-}
toGL :: Float -> GLfloat
toGL = realToFrac
{-|
Binding a color to change the current OpenGL color.
-}
bindColor :: Color -> Scene
bindColor (Color r g b a) = color $ Color4 (toGL r) (toGL g) (toGL b) (toGL a)
{-|
Constructing a color from 4 @'Float'@s.
-}
color4f :: Float -> Float -> Float -> Float -> Color
color4f = Color
{-|
Constructing a color from 3 @'Float'@s, with the alpha channel defaulting to
its maximum (of 1.0).
-}
color3f :: Float -> Float -> Float -> Color
color3f r g b = color4f r g b 1.0
{-|
Constructing a color from 4 @'Int'@s.
-}
color4i :: Int -> Int -> Int -> Int -> Color
color4i r g b a =
color4f (fromIntegral r / 255)
(fromIntegral g / 255)
(fromIntegral b / 255)
(fromIntegral a / 255)
{-|
Constructing a color from 3 @'Int'@s, with the alpha channel defaulting to
its maximum (of 255).
-}
color3i :: Int -> Int -> Int -> Color
color3i r g b = color4i r g b 255
{-|
The color black.
-}
black :: Color
black = color3i 0 0 0
{-|
The color white.
-}
white :: Color
white = color3i 255 255 255
{-|
The color gray.
-}
grey :: Color
grey = color3i 255 255 255
{-|
A synonym for the color grey.
-}
gray :: Color
gray = grey
{-|
The color red.
-}
red :: Color
red = color3i 255 0 0
{-|
The color green.
-}
green :: Color
green = color3i 0 255 0
{-|
The color blue.
-}
blue :: Color
blue = color3i 0 0 255
{-|
Rendering a primitive.
-}
renderPrimitive' :: PrimitiveMode -> [Vector Float] -> Scene
renderPrimitive' pm l =
renderPrimitive pm $
forM_ l $ \(Vector x y) ->
vertex $ Vertex2 (toGL x) (toGL y)
{-|
Rendering a position.
-}
renderPoint :: Vector Float -> Scene
renderPoint = renderPrimitive' Points . return
{-|
Rendering a line.
-}
renderLine :: Vector Float -> Vector Float -> Scene
renderLine p1 p2 = renderPrimitive' Lines [p1, p2]
{-|
Rendering a triangle.
-}
renderTriangle :: Vector Float -> Vector Float -> Vector Float -> Scene
renderTriangle p1 p2 p3 = renderPrimitive' Triangles [p1, p2, p3]
{-|
Rendering a rectangle.
-}
renderRectangle :: Vector Float -> Vector Float -> Scene
renderRectangle (Vector x y) (Vector w h) =
renderPrimitive' Quads [ Vector (x ) (y )
, Vector (x + w) (y )
, Vector (x + w) (y + h)
, Vector (x ) (y + h)
]
{-|
Rendering a square.
-}
renderSquare :: Vector Float -> Float -> Scene
renderSquare pos x = renderRectangle pos $ Vector x x
{-|
Rendering a polygon of any n sides.
-}
renderPolygon :: [Vector Float] -> Scene
renderPolygon l = renderPrimitive' Polygon l
{-|
Rendering a @'Sprite'@ at the position specified.
-}
renderSprite :: Sprite -> Vector Float -> Scene
renderSprite sprite pos = do
textureWrapMode Texture2D S $= (Repeated, ClampToEdge)
textureWrapMode Texture2D T $= (Repeated, ClampToEdge)
textureFilter Texture2D $= ((Linear', Nothing), Linear')
textureFunction $= Replace
texture Texture2D $= Enabled
textureBinding Texture2D $= (Just $ spriteTex sprite)
renderPrimitive Quads $
forM_ (generateCoords pos $ spriteSize sprite) $ \(Vector x y, Vector tx ty) -> do
texCoord $ TexCoord2 (toGL tx) (toGL ty)
vertex $ Vertex2 (toGL x) (toGL y)
texture Texture2D $= Disabled
where generateCoords :: Vector Float -> Vector Float -> [(Vector Float, Vector Float)]
generateCoords (Vector x y) (Vector w h) =
[ (Vector (x ) (y ), Vector 0 0)
, (Vector (x + w) (y ), Vector 1 0)
, (Vector (x + w) (y + h), Vector 1 1)
, (Vector (x ) (y + h), Vector 0 1)
]
{-|
Rendering a @'Sprite'@ at the position specified with the size specified.
-}
renderSpriteWithSize :: Sprite -> Vector Float -> Vector Float -> Scene
renderSpriteWithSize sprite pos size =
renderSprite (sprite { spriteSize = size }) pos
| crockeo/spice | src/FRP/Spice/Internal/Graphics.hs | mit | 5,279 | 0 | 14 | 1,833 | 1,341 | 709 | 632 | 96 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Y2018.M07.D10.Exercise where
{--
So, yesterday we explored JSON structure
(EVERYTHING IS A MAP! ... EXCEPT WHAT ISN'T, BUT OKAY!)
Today we're going to explore TWO (much smaller) JSON structures and transform
one to another.
Because, like XML, JSON is all about transformation, baybee!
--}
import Data.Aeson
import Data.Aeson.Encode.Pretty (encodePretty)
import Data.Map (Map)
-- the input JSON (being output from an analysis tool)
exDir, input :: FilePath
exDir = "Y2018/M07/D10/"
input = "output.json"
-- yeah, the input is the output of the analysis tool. Deal.
{--
The input is in the following format:
Map EntityName { wiki_info: Wiki, scores: [related articles], queryEnt: Int }
where:
--}
data Wiki = Wikt { wname, wtitle, wurl :: String,
wtext, wsum, wimg :: Maybe String }
deriving Show
instance FromJSON Wiki where
parseJSON (Object o) = undefined
type EntityName = String
type Input = Map EntityName Analysis
-- and out Analysis is a composition of the wiki info, scores, and query
data Analysis = Ysis { wikt :: Wiki, scores :: [Value], query :: Double }
deriving Show
{--
Now, you would think this would Just Work(tm). And it would, if this were
well-structured JSON.
But it's not well-structured JSON. Check out this embedded entry:
"big brother": {
"scores": "",
"query_entity_score": ""
},
wut. So much for well-structured JSON. How do we deal with this? I don't know.
I think what we have to do is to stage the parsing into ProtoAnalysis then
convert ProtoAnalysis to Analysis iff it has wiki_info. Let's try that.
--}
data ProtoAnalysis = PA { paWik :: Maybe Wiki, paScores, paQuery :: Value }
deriving Show
instance FromJSON ProtoAnalysis where
parseJSON (Object o) = undefined
readProto :: FilePath -> IO (Map EntityName ProtoAnalysis)
readProto file = undefined
-- That will work. Now we convert a Proto to Analysis
proto2analysis :: ProtoAnalysis -> Maybe Analysis
proto2analysis prot = undefined
-- then we sequence the result to get our Input value from the JSON
readInputJSON :: FilePath -> IO Input
readInputJSON file = undefined
-- What is your result? How many entries does your Input Map have?
| geophf/1HaskellADay | exercises/HAD/Y2018/M07/D10/Exercise.hs | mit | 2,255 | 0 | 9 | 442 | 288 | 173 | 115 | 27 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RecordWildCards #-}
module Goolosh.SDL.Output where
import Control.Monad(Monad(..))
import System.IO
import Data.Maybe
import qualified Data.Conduit as C
import qualified SDL
import SDL(($=))
import qualified Linear as L
import Goolosh.Game.Output
import Goolosh.SDL.State
import Goolosh.SDL.Output.Color
import Goolosh.SDL.Output.Debug
import Goolosh.SDL.Output.BasicRaster
import Goolosh.Geom.Drawable
gameOutputSink :: SDLState -> C.Sink GameOutput IO ()
gameOutputSink s@SDLState{..} = do
game' <- C.await
SDL.rendererDrawColor sdlRenderer $= sdlBG
SDL.clear sdlRenderer
case game' of
Nothing -> return ()
Just game -> do
gameNaiveRaster s game
-- gameDrawDebug s game
SDL.present sdlRenderer
gameOutputSink s
where
bg = backGroundColor
sdlBG = toRGBA32 bg
-- | LeviSchuck/Goolosh | src/Goolosh/SDL/Output.hs | mit | 917 | 0 | 13 | 201 | 232 | 131 | 101 | 29 | 2 |
module Language.Paradocs.Types( module Language.Paradocs.EscapeEnv
, module Language.Paradocs.File
, module Language.Paradocs.MacroEnv
, module Language.Paradocs.MonadStorage
, module Language.Paradocs.RendererState
, module Language.Paradocs.Rule
, module Language.Paradocs.RuleEnv
, module Language.Paradocs.RuleName
, module Language.Paradocs.Token
) where
import Language.Paradocs.EscapeEnv(EscapeEnv)
import Language.Paradocs.File(File, sourceCode, sourceLine, sourceToken, sourcePath, sourceTokens, lineno, columnno)
import Language.Paradocs.MacroEnv(MacroEnv)
import Language.Paradocs.MonadStorage(MonadStorage, HashMapStorage)
import Language.Paradocs.RendererState(FileStackValue(..), RuleStackValue(..), RendererState, Rendered(..), stackRetFile, stackCallFile, stackRuleName, stackStructure, ruleEnv, workingFile, workingRuleName, defRuleName, workingStructure, fileStack, ruleStack)
import Language.Paradocs.Rule(Rule, ancestors, macroEnv, escapeEnv, before, after, indent)
import Language.Paradocs.RuleEnv(RuleEnv)
import Language.Paradocs.RuleName(RelativeRuleName, AbsoluteRuleName)
import Language.Paradocs.Token(TokenKind(..), Token(..))
| pasberth/paradocs | Language/Paradocs/Types.hs | mit | 1,426 | 0 | 6 | 349 | 281 | 189 | 92 | 18 | 0 |
-- | Astronomical calculations
module Astro where
import System.IO
-- | Convert from degrees to radians
fromDegrees :: Floating a => a -> a
fromDegrees deg = deg * pi / 180
-- | Check whether a Gregorian year is a leap year
isLeapYear :: Integral a => a -> Bool
isLeapYear year
| year `mod` 400 == 0 = True
| year `mod` 100 == 0 = False
| year `mod` 4 == 0 = True
| otherwise = False
-- | Convert a Julian day to a Gregorian date
fromJulianDay :: (Integral t, Fractional t) => t -> (t, t, t)
fromJulianDay jd = (year, month, day)
where
f = jd + 1401 + (((4 * jd + 274277) / 146097) * 3) / 4 - 38
e = 4 * f + 3
g = e `mod` 1461 / 4
h = 5 * g + 2
day = (h `mod` 153) / 5 + 1
month = (h / 153 + 2) `mod` 12 + 1
year = e / 1461 - 4716 + (12 + 2 - month) / 12
-- | Convert from radians to degrees
toDegrees :: Floating a => a -> a
toDegrees rad = rad * 180 / pi
-- | Convert a Gregorian date and time to a full Julian date fractional
toJulianDate :: Integral a => (a, a, a, Rational, Rational, Rational) -> Float
toJulianDate (year, month, day, h, m, s)
= fromRational (toRational (toJulianDay (year, month, day)) + toJulianTime (h, m, s))
-- | Convert a Gregorian date to a Julian day
toJulianDay :: Integral a => (a, a, a) -> a
toJulianDay (year, month, day)
= day + floor (((153 * m) + 2) / 5) + (365 * y)
+ floor (ry / 4) - floor (ry / 100)
+ floor (ry / 400) - 32045
where
a = floor ((14 - toRational month) / 12)
y = year + 4800 - a
ry = toRational y
m = toRational (month + (12 * a) - 3)
-- | Convert a time of day to a fractional of a Julian day
toJulianTime :: Fractional a => (a, a, a) -> a
toJulianTime (hour, minute, second)
= ((hour - 12) / 24) + (minute / 1440) + (second / 86400)
| joshforisha/astro | Astro.hs | mit | 1,786 | 0 | 17 | 479 | 766 | 422 | 344 | 36 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings #-}
-- | Utility code intended to simplify writing bindings to JSON APIs.
module Network.HTTP.API where
import Control.Applicative
import Control.Monad.Reader
import Control.Monad.Trans.Either
import Control.Monad.Trans.Resource
import Data.Aeson
import Data.ByteString (ByteString)
import Data.Maybe
import Data.Text.Encoding
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Network.HTTP.Conduit
-- | A catch-all error type for common request failures.
data APIError = InvalidJSON | ExceptionalStatusCodeError String
deriving (Show)
-- | RequestMiddleware allows requests to be customized before being sent.
--
-- This is particularly useful for dealing with APIs that require some form
-- of custom authenticaton or for attaching additional headers to requests.
type RequestMiddleware = Request (ResourceT IO) -> Request (ResourceT IO)
-- | Execute the API client
runAPIClient :: String -> RequestMiddleware -> APIClient a -> IO (Either APIError a)
runAPIClient base middleware m = withManager $ \man -> do
r <- parseUrl base
runEitherT $ runReaderT (fromAPIClient m) $ ClientSettings r man middleware
-- | Attempt to decode a response into the expected JSON type or fail with
-- an "InvalidJSON" response.
jsonize :: (FromJSON a) => Response L.ByteString -> APIClient (Response a)
jsonize r = APIClient $ case decode $ responseBody r of
Nothing -> lift $ left InvalidJSON
Just jsonResp -> return $ r { responseBody = jsonResp }
-- | The basic settings for the client to run each request with.
data ClientSettings = ClientSettings
{ baseRequest :: Request (ResourceT IO) -- ^ A request with the base URL and any other constants set.
, clientManager :: Manager -- ^ Manager to share across all client requests
, requestMiddleware :: RequestMiddleware -- ^ Any additional transformations that should be applied prior to sending a request.
}
-- | The Base API client type. Intended to be wrapped in a newtype for libraries that use it.
newtype APIClient a = APIClient { fromAPIClient :: ReaderT ClientSettings (EitherT APIError (ResourceT IO)) a }
deriving (Functor, Applicative, Monad, MonadIO)
-- | Perform a GET on the given path, decoding the response from JSON.
get :: FromJSON a => ByteString -> APIClient (Response a)
get p = APIClient $ do
(ClientSettings req man middleware) <- ask
let r = middleware $ req { path = p }
resp <- lift $ lift $ httpLbs r man
fromAPIClient $ jsonize resp
-- | Perform a PUT on the given path, encoding the request into JSON and decoding the response from JSON.
put :: (ToJSON a, FromJSON b) => ByteString -> a -> APIClient (Response b)
put p v = APIClient $ do
(ClientSettings req man middleware) <- ask
let r = middleware $ req { method = "PUT", path = p, requestBody = RequestBodyLBS $ encode v }
resp <- lift $ lift $ httpLbs r man
fromAPIClient $ jsonize resp
-- | Perform a POST on the given path, encoding the request into JSON and decoding the response from JSON.
post :: (ToJSON a, FromJSON b) => ByteString -> a -> APIClient (Response b)
post p v = APIClient $ do
(ClientSettings req man middleware) <- ask
let r = middleware $ req { method = "POST", path = p, requestBody = RequestBodyLBS $ encode v }
resp <- lift $ lift $ httpLbs r man
fromAPIClient $ jsonize resp
-- | Perform a PATCH on the given path, encoding the request into JSON and decoding the response from JSON.
patch :: (ToJSON a, FromJSON b) => ByteString -> a -> APIClient (Response b)
patch p v = APIClient $ do
(ClientSettings req man middleware) <- ask
let r = middleware $ req { method = "PATCH", path = p, requestBody = RequestBodyLBS $ encode v }
resp <- lift $ lift $ httpLbs r man
fromAPIClient $ jsonize resp
-- | Perform a DELETE on the given path, encoding the request into JSON and decoding the response from JSON.
--
-- An input value is often unnecessary for DELETEs, but most APIs seem to accept () as an input.
-- Future versions of this library will likely do away with the input value and make a second function
-- for deleting with an body value.
delete :: (ToJSON a, FromJSON b) => ByteString -> a -> APIClient (Response b)
delete p v = APIClient $ do
(ClientSettings req man middleware) <- ask
let r = middleware $ req { method = "DELETE", path = p, requestBody = RequestBodyLBS $ encode v }
resp <- lift $ lift $ httpLbs r man
fromAPIClient $ jsonize resp
| SaneApp/easy-api | src/Network/HTTP/API.hs | mit | 4,466 | 0 | 15 | 825 | 1,077 | 567 | 510 | 60 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Data.NGH.Formats.Gff
( readAnnotations
, genesAsIntervalMap
) where
import Data.NGH.Annotation
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Char8 as L8
import qualified Data.IntervalMap.FingerTree as IM
readAnnotations :: L.ByteString -> [GffLine]
readAnnotations = readAnnotations' . L8.lines
genesAsIntervalMap :: L.ByteString -> IM.IntervalMap Int S.ByteString
genesAsIntervalMap = intervals . filter ((==GffGene) . gffType) . readAnnotations
readAnnotations' :: [L.ByteString] -> [GffLine]
readAnnotations' [] = []
readAnnotations' (l:ls) = case L8.head l of
'#' -> readAnnotations' ls
'>' -> []
_ -> (readLine l:readAnnotations' ls)
readLine :: L.ByteString -> GffLine
readLine line = if length tokens == 9
then GffLine
(strict tk0)
(strict tk1)
(parsegffType $ strict tk2)
(read $ L8.unpack tk3)
(read $ L8.unpack tk4)
(score tk5)
(strand $ L8.head tk6)
(phase tk7)
(strict tk8)
else error (concat ["unexpected line in GFF: ", show line])
where
tokens = L8.split '\t' line
[tk0,tk1,tk2,tk3,tk4,tk5,tk6,tk7,tk8] = tokens
parsegffType "exon" = GffExon
parsegffType "gene" = GffGene
parsegffType "CDS" = GffCDS
parsegffType t = GffOther t
score "." = Nothing
score v = Just (read $ L8.unpack v)
strand '.' = GffUnStranded
strand '+' = GffPosStrand
strand '-' = GffNegStrand
strand '?' = GffUnknownStrand
strand _ = error "unhandled value for strand"
phase "." = -1
phase r = read (L8.unpack r)
strict :: L.ByteString -> S.ByteString
strict = S.concat . L.toChunks
| luispedro/NGH | Data/NGH/Formats/Gff.hs | mit | 1,944 | 0 | 11 | 583 | 578 | 311 | 267 | 49 | 11 |
module Main where
import System.IO( hFlush, stdout )
import Data.List
import Data.Dynamic
import Data.Maybe
import qualified Data.Set as S
import qualified Data.Map as M
import Test.QuickCheck
import SAT hiding ( Lit )
import SAT.Unary
import SAT.Val
import SAT.Equal
type Var = (String,Type)
makeVars :: Type -> [String] -> [Var]
makeVars t xs = [ (x,t) | x <- xs ] ++ [ (x++show i,t) | i <- [1..], x <- xs ]
data Type
= Type
{ tname :: String
, gen :: Gen Dynamic
, vars :: [Var]
, eq :: Dynamic -> Dynamic -> Bool
}
instance Show Type where
show tp = tname tp
instance Ord Type where
p `compare` q = tname p `compare` tname q
instance Eq Type where
a == b = a `compare` b == EQ
tBool :: Type
tBool = Type "Bool" (toDyn `fmap` (arbitrary :: Gen Bool)) (makeVars tBool ["B"])
(\x y -> fromDyn x (undefined :: Bool) == fromDyn y (undefined :: Bool))
tNat :: Type
tNat = Type "Nat" (toDyn `fmap` (arbi `suchThat` (>=0) :: Gen Nat)) (makeVars tNat ["N"])
(\x y -> fromDyn x (undefined :: Nat) == fromDyn y (undefined :: Nat))
tListNat :: Type
tListNat = Type "[Nat]" (toDyn `fmap` (listOf (arbitrary `suchThat` (>=0)) :: Gen [Nat])) (makeVars tListNat ["Ns"])
(\x y -> fromDyn x (undefined :: [Nat]) == fromDyn y (undefined :: [Nat]))
tA :: Type
tA = Type "A" (toDyn `fmap` (arbi :: Gen Int)) (makeVars tA ["X"])
(\x y -> fromDyn x (undefined :: Int) == fromDyn y (undefined :: Int))
tB :: Type
tB = Type "B" (toDyn `fmap` (arbi :: Gen Int)) (makeVars tB ["Y"])
(\x y -> fromDyn x (undefined :: Int) == fromDyn y (undefined :: Int))
tAB :: Type
tAB = Type "(A,B)" (toDyn `fmap` (arbitrary :: Gen (Int,Int))) (makeVars tAB ["P"])
(\x y -> fromDyn x (undefined :: (Int,Int)) == fromDyn y (undefined :: (Int,Int)))
tListA :: Type
tListA = Type "[A]" (toDyn `fmap` (listOf arbitrary :: Gen [Int])) (makeVars tListA ["Xs"])
(\x y -> fromDyn x (undefined :: [Int]) == fromDyn y (undefined :: [Int]))
tListB :: Type
tListB = Type "[B]" (toDyn `fmap` (listOf arbitrary :: Gen [Int])) (makeVars tListB ["Ys"])
(\x y -> fromDyn x (undefined :: [Int]) == fromDyn y (undefined :: [Int]))
tListAB :: Type
tListAB = Type "[(A,B)]" (toDyn `fmap` (listOf arbitrary :: Gen [(Int,Int)])) (makeVars tListAB ["Ps"])
(\x y -> fromDyn x (undefined :: [(Int,Int)]) == fromDyn y (undefined :: [(Int,Int)]))
tListAA :: Type
tListAA = Type "[(A,A)]" (toDyn `fmap` (listOf arbitrary :: Gen [(Int,Int)])) (makeVars tListAA ["Ps"])
(\x y -> fromDyn x (undefined :: [(Int,Int)]) == fromDyn y (undefined :: [(Int,Int)]))
arbi = choose (0,5)
data Fun
= Fun
{ name :: String
, args :: [Type]
, result :: Type
, app :: [Dynamic] -> Dynamic
}
instance Show Fun where
show f = name f
fun1 :: (Typeable a, Typeable b) => (a -> b) -> [Dynamic] -> Dynamic
fun1 f [x] = toDyn (f (fromDyn x undefined))
fun2 :: (Typeable a, Typeable b, Typeable c) => (a -> b -> c) -> [Dynamic] -> Dynamic
fun2 f [x,y] = toDyn (f (fromDyn x undefined) (fromDyn y undefined))
fun3 :: (Typeable a, Typeable b, Typeable c, Typeable d) => (a -> b -> c -> d) -> [Dynamic] -> Dynamic
fun3 f [x,y,z] = toDyn (f (fromDyn x undefined) (fromDyn y undefined) (fromDyn z undefined))
type Nat = Int
instance Ord Fun where
f `compare` g = name f `compare` name g
instance Eq Fun where
a == b = a `compare` b == EQ
type Sig = [Fun]
data Lit
= (Fun,[Var]) :/=: Var
| (Fun,[Var]) :<=>: Bool
| Var :=: Var
instance Show Lit where
show ((f,xs):/=:y) = show f ++ "(" ++ intercalate "," [ x | (x,_) <- xs] ++ ")!=" ++ fst y
show ((p,xs):<=>:b) = (if b then "" else "~") ++ show p ++ "(" ++ intercalate "," [ x | (x,_) <- xs] ++ ")"
show (x :=: y) = fst x ++ "=" ++ fst y
instance Ord Lit where
(x:=:y) `compare` (v:=:w) = (x,y) `compare` (v,w)
(x:=:y) `compare` _ = LT
_ `compare` (v:=:w) = GT
((f,xs):/=:x) `compare` ((g,ys):/=:y) = (f,xs,x) `compare` (g,ys,y)
((f,xs):<=>:a) `compare` ((g,ys):<=>:b) = (f,xs,a) `compare` (g,ys,b)
((f,xs):/=:x) `compare` ((g,ys):<=>:b) = f `compare` g
((f,xs):<=>:a) `compare` ((g,ys):/=:y) = f `compare` g
instance Eq Lit where
a == b = a `compare` b == EQ
type Sub = [(Var,Dynamic)]
eval :: Sub -> Lit -> Bool
eval sub ((f,xs):/=:y@(_,t)) =
not $ eq t (app f [ a | x <- xs, Just a <- [lookup x sub] ])
(fromJust (lookup y sub))
eval sub ((p,xs):<=>:b) =
fromDyn (app p [ a | x <- xs, Just a <- [lookup x sub] ])
(undefined :: Bool)
== b
eval sub (x:=:y@(_,t)) =
eq t (fromJust (lookup x sub)) (fromJust (lookup y sub))
literals :: (Type -> Int) -> Sig -> [Lit]
literals tyNum funs =
[ (f,xs) :/=: y
| f <- funs
, result f /= tBool
, xs <- cross [ take (tyNum t) (vars t) | t <- args f ]
, y <- take (tyNum (result f)) (vars (result f))
] ++
[ (p,xs) :<=>: b
| p <- funs
, result p == tBool
, xs <- cross [ take (tyNum t) (vars t) | t <- args p ]
, b <- [False,True]
] ++
[ x .=. y
| t <- types
, (x,i) <- take (tyNum t) (vars t) `zip` [1..]
, y <- drop i $ take (tyNum t) (vars t)
]
where
types = map head . group . sort $
[ t
| f <- funs
, t <- args f ++ [result f]
]
a .=. b | a > b = b :=: a
| otherwise = a :=: b
cross :: [[a]] -> [[a]]
cross [] = [[]]
cross (xs:xss) = [ y:ys | y <- xs, ys <- cross xss ]
vec :: Lit -> [Var]
vec ((_,xs) :/=: y) = xs ++ [y]
vec ((_,xs) :<=>: _) = xs
vec (x :=: y) = [x,y]
pre :: Type -> Lit -> Maybe Var
pre typ lit
| null xs = Nothing
| otherwise = gap (takeWhile (/=x) xs) (reverse (takeWhile (/=x) (vars typ)))
where
xs = nub [ x | x@(_,t) <- vec lit, t == typ ]
x = maxi (vars typ) xs
maxi _ [x] = x
maxi (y:ys) xs = maxi ys (filter (/=y) xs)
gap have [] = Nothing
gap have (w:want)
| w `notElem` have = Just w
| otherwise = gap (takeWhile (/=w) have) want
(<<) :: Lit -> Lit -> Bool
lit1 << lit2 = prnt lit1 < prnt lit2
where
prnt lit@(_:=:_) = Nothing
prnt lit@((f,_):/=:_) = Just (f, norm (vec lit))
prnt lit@((p,_):<=>:_) = Just (p, norm (vec lit))
swapv :: [(Var,Var)] -> Lit -> Lit
swapv ren lit =
case lit of
x:=:y -> sw x .=. sw y
(f,xs):/=: y -> (f, map sw xs) :/=: sw y
(p,xs):<=>:b -> (p, map sw xs) :<=>: b
where
sw z = case lookup z ren of
Just y -> y
Nothing -> z
norm :: [Var] -> [Int]
norm xs = go M.empty 0 xs
where
go tab i [] = []
go tab i (x:xs) =
case M.lookup x tab of
Nothing -> i : go (M.insert x i tab) (i+1) xs
Just j -> j : go tab i xs
type Model = ([(Fun,[Int],Int)],[(Fun,[Int],Bool)])
speculate :: [Lit] -> Model -> IO (Maybe [Lit])
speculate lits0 (fmod,pmod) =
withNewSolver $ \s ->
do ls <- sequence [ newLit s | l <- lits ]
let tab = lits `zip` ls
putStrLn ("-- " ++ show (length ls) ++ " literals")
-- no empty clause
addClause s ls
-- no trivial clauses
sequence_
[ addClause s [neg l1, neg l2]
| ((p,xs):<=>:False,l1) <- tab
, ((q,ys):<=>:True, l2) <- tab
, p == q && xs == ys
]
-- no unused definitions
{-
sequence_
[ addClause s (neg l1 : [ l2 | (lit2,l2) <- tab, l1 /= l2, y `elem` vec lit2 ])
| ((_,xs):/=:y,l1) <- tab
, y `notElem` xs
]
-}
-- no twice definitions
sequence_
[ addClause s [neg l1, neg l2]
| (fxs :/=:x,l1) <- tab
, (fxs':/=:y,l2) <- tab
, fxs == fxs'
, x /= y
]
-- no free variables in equations
{-
sequence_
[ do addClause s (neg l1 : [ l2 | (lit2,l2) <- tab, notEq lit2, x `elem` vec lit2 ])
addClause s (neg l1 : [ l2 | (lit2,l2) <- tab, notEq lit2, y `elem` vec lit2 ])
| (x:=:y,l1) <- tab
, let notEq (_:=:_) = False
notEq _ = True
]
-}
-- removing symmetries
putStrLn ("-- removing symmetries...")
sequence_
[ do putStrLn (show lit2 ++ " --> " ++ show (map (fst.fst) (take i ltab)))
ls <- sequence [ newLit s | j <- [0..i] ]
addClause s (neg l2 : l1 : ls)
sequence_
[ do addClause s [neg l, l1]
addClause s [neg l, neg l2]
| (l,((_,l1),(_,l2))) <- ls `zip` ltab
]
| x <- vs
, y <- vs
, snd x == snd y
, x < y
, let ltab = [ ((lit1,l1),(lit2,l2))
| (lit1,l1) <- tab
, let lit2 = swapv [(x,y),(y,x)] lit1
, lit2 > lit1
, (_,l2):_ <- [filter ((lit2==).fst) tab]
]
, i <- [0..length ltab-1]
, let ((lit1,l1),(lit2,l2)) = ltab !! i
]
-- adding test cases
putStrLn ("-- adding test cases...")
let tests' seen i j sub | i >= 500 =
do putStrLn ("(" ++ show (S.size seen) ++ " clauses)")
return seen
tests' seen i j sub | j >= length lits =
do tests seen i
tests' seen i j sub =
do new <- solve s (map neg cl)
let lits1 = [ lit | (lit@(_:/=:_),True) <- lits `zip` bs ]
if not (null lits1) then
do ((f,xs):/=:y) <- generate $ elements lits1
let b = app f [ a | x <- xs, Just a <- [lookup x sub] ]
sub' = [ if x == y then (y,b) else (x,a) | (x,a) <- sub ]
if new then
do putStr "."
hFlush stdout
addClause s cl
tests' (S.insert cl seen) 0 0 sub'
else
do tests' seen i (j+1) sub'
else
do tests seen i
where
bs = [ eval sub lit | lit <- lits ]
cl = [ l | (l,True) <- ls `zip` bs ]
tests seen i =
do --putStr "#"
--hFlush stdout
as <- sequence [ generate (resize (i `mod` 100) (gen t)) | (_,t) <- vs ]
tests' seen (i+1) 0 (vs `zip` as)
tsts <- tests S.empty 0
-- every speculated clause must be (locally) minimal
sequence_
[ do xs <- sequence [ newLit s | c <- cs ]
addClause s (neg l : xs)
sequence_
[ addClause s [neg x, neg l']
| (x,c) <- xs `zip` cs
, l' <- c
, l' /= l
]
| (l,cs) <- M.toList $ M.fromListWith (++)
[ (l,[ls])
| ls <- S.toList tsts
, l <- ls
]
]
-- adding model constraints
putStrLn ("-- adding model constraints...")
let tas = [ (t, nub $ [ a
| (f,as,b) <- fmod
, (a,t') <- (b:as) `zip` (result f:args f)
, t' == t
] ++
[ a
| (p,as,_) <- pmod
, (a,t') <- as `zip` args p
, t' == t
])
| t <- ts
, t /= tBool
] ++
[ (tBool, [0,1]) ]
vals <- sequence
[ do putStrLn (fst v ++ " = " ++ show as)
newVal s as
| v@(_,t) <- vs
, [as] <- [[ as | (t',as) <- tas, t' == t ]]
]
let vtab = vs `zip` vals
-- {-
sequence_
[ do cs <- sequence [ newLit s | c <- cases ]
addClause s (neg l : cs)
sequence_
[ addClause s [neg c, v .= a]
| (c,(as,b)) <- cs `zip` cases
, (x,a) <- (y:xs) `zip` (b:as)
, let v:_ = [ v | (x',v) <- vtab, x' == x ]
]
| ((f,xs):/=:y,l) <- tab
, let cases = [ (as,b) | (f',as,b) <- fmod, f' == f ]
]
sequence_
[ do cs <- sequence [ newLit s | c <- cases ]
addClause s (neg l : cs)
sequence_
[ addClause s [neg c, v .= a]
| (c,as) <- cs `zip` cases
, (x,a) <- xs `zip` as
, let v:_ = [ v | (x',v) <- vtab, x' == x ]
]
| ((p,xs):<=>:b,l) <- tab
, let cases = [ as | (p',as,b') <- pmod, p' == p, b' /= b ]
]
sequence_
[ do notEqualOr s [neg l] a b
| (x:=:y,l) <- tab
, [a] <- [[ v | (x',v) <- vtab, x' == x ]]
, [b] <- [[ v | (y',v) <- vtab, y' == y ]]
]
-- -}
-- counting literals
n <- count s ls
-- looping
let loop k | k > 10 =
do putStrLn "GIVE UP"
return Nothing
loop k =
do --putStrLn ("-- solving (k=" ++ show k ++ ")")
b <- solve s [n .<= k]
if b then
do bs <- sequence [ SAT.modelValue s l | l <- ls ]
print [ lit | (lit,True) <- lits `zip` bs ]
addClause s (map neg [ l | (l,True) <- ls `zip` bs ])
--return (Just [ lit | (lit,True) <- lits `zip` bs ])
loop k
else
do putStrLn ("-- solving (k=" ++ show k ++ ")")
loop (k+1)
loop 1
where
lits = sort lits0
vs = nub [ v | lit <- lits, v <- vec lit ]
ts = nub [ t | (_,t) <- vs ]
takeUntil p [] = []
takeUntil p (x:xs)
| p x = [x]
| otherwise = x : takeUntil p xs
main1 =
do c <- speculate (literals tyNum sig) (fmod,pmod)
print c
where
sig = [f_insert,f_sort,f_sorted]
a = 1
as = 2
sort_as = 3
sort_aas = 4
aas = 5
fmod = [ (f_sort,[as],sort_as)
, (f_sort,[aas],sort_aas)
, (f_insert,[a,sort_as],sort_aas)
]
pmod = [ (f_sorted,[sort_as],True)
, (f_sorted,[sort_aas],False)
]
tyNum t | t == tBool = 2
tyNum t | t == tNat = 2
tyNum t | t == tA = 3
tyNum t | t == tB = 2
tyNum t | t == tAB = 2
tyNum t | t == tListNat = 2
tyNum t | t == tListA = 2
tyNum t | t == tListB = 2
tyNum t | t == tListAB = 2
tyNum t | t == tListAA = 2
main2 =
do c <- speculate (literals tyNum sig) (fmod,pmod)
print c
where
sig = [f_rev,f_zip,f_lenAA,f_lenA]
as:cs:ras
:zip_as_cs:zip_as_ras
:len_as:len':_
= [1..]
fmod = [ (f_zip, [as,cs], zip_as_cs)
, (f_zip, [as,ras], zip_as_ras)
, (f_rev, [as], ras)
, (f_lenA, [as], len_as)
, (f_lenA, [ras], len_as)
, (f_lenA, [cs], len_as)
, (f_lenAA, [zip_as_ras], len_as)
, (f_lenAA, [zip_as_cs], len')
]
pmod = []
main =
do c <- speculate (literals tyNum sig) (fmod,pmod)
print c
where
sig = [f_memb,f_swap]
a:b:c:d
:das:swap_das:_
= [1..]
fmod = [ (f_swap, [a,b,das], swap_das)
]
pmod = [ (f_memb, [a, das], True)
, (f_memb, [b, das], True)
, (f_memb, [c, das], True)
, (f_memb, [c, swap_das], False)
]
f_insert :: Fun
f_insert = Fun "insert" [tA,tListA] tListA (fun2 (insert :: Nat -> [Nat] -> [Nat]))
f_sort :: Fun
f_sort = Fun "sort" [tListA] tListA (fun1 (sort :: [Nat] -> [Nat]))
f_sorted :: Fun
f_sorted = Fun "sorted" [tListA] tBool (fun1 (\xs -> sort xs == (xs :: [Nat])))
f_rev :: Fun
f_rev = Fun "rev" [tListB] tListB (fun1 (reverse :: [Int] -> [Int]))
f_zip :: Fun
f_zip = Fun "zip" [tListA,tListA] tListAA (fun2 (zip :: [Int] -> [Int] -> [(Int,Int)]))
f_lenA :: Fun
f_lenA = Fun "lenA" [tListA] tNat (fun1 (length :: [Int] -> Int))
f_lenB :: Fun
f_lenB = Fun "lenB" [tListB] tNat (fun1 (length :: [Int] -> Int))
f_lenAB :: Fun
f_lenAB = Fun "lenAB" [tListAB] tNat (fun1 (length :: [(Int,Int)] -> Int))
f_lenAA :: Fun
f_lenAA = Fun "lenAA" [tListAA] tNat (fun1 (length :: [(Int,Int)] -> Int))
f_memb :: Fun
f_memb = Fun "memb" [tA,tListA] tBool (fun2 (elem :: Int -> [Int] -> Bool))
f_swap :: Fun
f_swap = Fun "swap" [tA,tA,tListA] tListA (fun3 (swap :: Int -> Int -> [Int] -> [Int]))
where
swap x y xs = [ if z == x then y else if z == y then x else z | z <- xs ]
| koengit/turbospec | TurboSpec.hs | mit | 16,773 | 0 | 25 | 6,328 | 7,880 | 4,265 | 3,615 | 392 | 9 |
module ComponentModel.Parsers.ParserComponentModel
where
import BasicTypes
import ComponentModel.Parsers.AbsComponentModel
import ComponentModel.Parsers.SkelComponentModel
import ComponentModel.Parsers.ErrM
import ComponentModel.Parsers.LexComponentModel
import ComponentModel.Parsers.ParComponentModel
import qualified ComponentModel.Types as T
parseComponentModel fileName = do
x <- readFile (fileName)
let cm = parseResult (pComponentModel (myLexer x))
return cm
parseResult (Ok g) = Success (translateModel g)
parseResult (Bad s) = Fail s
translateModel :: ComponentModel -> T.ComponentModel
translateModel (TComponentModel cs) = map translateMapping cs
translateMapping :: ComponentMapping -> T.ComponentMapping
translateMapping (TComponentMapping (Ident i) p) = (i, path p)
path :: RelativePath -> String
path (BasicFilePath (Ident n)) = n
path (BasicFileExt (Ident n)) = "." ++ n
path (BasicFilePathExt (Ident n) (Ident e)) = n ++ "." ++ e
path (ComposedFilePath (Ident i) p ) = i ++ "/" ++ path p | hephaestus-pl/hephaestus | willian/hephaestus-integrated/asset-base/component-model/src/ComponentModel/Parsers/ParserComponentModel.hs | mit | 1,021 | 0 | 14 | 136 | 333 | 173 | 160 | 23 | 1 |
module ArmstrongNumbers (armstrong) where
digits :: Integral x => x -> [x]
digits 0 = []
digits x = x `rem` 10 : digits (x `quot` 10)
armstrong :: Integral a => a -> Bool
armstrong n = n == sum (map (^ length ds) ds)
where
ds = digits n
| exercism/xhaskell | exercises/practice/armstrong-numbers/.meta/examples/success-standard/src/ArmstrongNumbers.hs | mit | 245 | 0 | 10 | 59 | 124 | 66 | 58 | 7 | 1 |
module Main where
import Network.UDP.HolePunch
main = chownat
| danoctavian/chownat | chownat-demo/ChownatDemo.hs | mit | 64 | 0 | 4 | 10 | 15 | 10 | 5 | 3 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedLabels #-}
{-# LANGUAGE TypeApplications #-}
module Examples.Serialization.LowLevel.Write (main) where
import Data.Function ((&))
import Capnp.Gen.Addressbook.New
import qualified Capnp.New as C
import qualified Data.Text as T
main :: IO ()
main =
let Right msg = C.createPure C.defaultLimit buildMsg
in C.putMsg msg
buildMsg :: C.PureBuilder s (C.Message ('C.Mut s))
buildMsg = do
-- newMessage allocates a new, initially empty, mutable message. It
-- takes an optional size hint:
msg <- C.newMessage Nothing
-- newRoot allocates a new struct as the root object of the message.
-- The unit argument is a hint to the allocator to determine the size
-- of the object; for types whose size is not fixed (e.g. untyped structs,
-- lists), this may be something more meaningful.
addressbook <- C.newRoot @AddressBook () msg
-- newField can be used to allocate the value of a field, for pointer
-- types like lists. The number is the allocation hint, as used by newRoot.
-- We can use the OverloadedLabels extension to pass in fields by name.
people <- C.newField #people 2 addressbook
-- Index gets an object at a specified location in a list. Cap'N Proto
-- lists are flat arrays, and in the case of structs the structs are
-- unboxed, so there is no need to allocate each element:
alice <- C.index 0 people
-- encodeField takes the parsed form of a value and marshals it into
-- the specified field. For basic types like integers & booleans, this
-- is almost always what you want. For larger values, you may want to
-- use newField as above, or separately create the value and use setField,
-- as shown below.
C.encodeField #id 123 alice
C.encodeField #name (T.pack "Alice") alice
C.encodeField #email (T.pack "alice@example.com") alice
-- We would probably use newField here, but to demonstrate, we can allocate
-- the value separately with new, and then set it with setField.
phones <- C.new @(C.List Person'PhoneNumber) 1 msg
C.setField #phones phones alice
mobilePhone <- C.index 0 phones
-- It is sometimes more ergonomic to use (&) from Data.Function. You might
-- ask why not just make the container the first argument, but it works
-- out better this way for the read examples.
mobilePhone & C.encodeField #number (T.pack "555-1212")
mobilePhone & C.encodeField #type_ Person'PhoneNumber'Type'mobile
-- Since named unions act like unnamed unions inside a group, we first have
-- to get the group field:
employment <- C.readField #employment alice
-- Then, we can use encodeVariant to set both the tag of the union and the
-- value:
employment & C.encodeVariant #school (T.pack "MIT")
bob <- C.index 1 people
bob & C.encodeField #id 456
bob & C.encodeField #name (T.pack "Bob")
bob & C.encodeField #email (T.pack "bob@example.com")
phones <- bob & C.newField #phones 2
homePhone <- phones & C.index 0
homePhone & C.encodeField #number (T.pack "555-4567")
homePhone & C.encodeField #type_ Person'PhoneNumber'Type'home
workPhone <- phones & C.index 1
workPhone & C.encodeField #number (T.pack "555-7654")
workPhone & C.encodeField #type_ Person'PhoneNumber'Type'work
employment <- bob & C.readField #employment
employment & C.encodeVariant #selfEmployed () -- Note the (), since selfEmploy is Void.
pure msg
| zenhack/haskell-capnp | examples/lib/Examples/Serialization/LowLevel/Write.hs | mit | 3,508 | 0 | 12 | 761 | 627 | 309 | 318 | 42 | 1 |
module LinearAlgebra.Main where
import Notes
import LinearAlgebra.InproductSpaces
import LinearAlgebra.VectorSpaces
linearAlgebra :: Note
linearAlgebra = chapter "Linear Algebra" $ do
vectorSpaces
inproductSpaces
| NorfairKing/the-notes | src/LinearAlgebra/Main.hs | gpl-2.0 | 254 | 0 | 7 | 62 | 42 | 23 | 19 | 8 | 1 |
main = do
putStr "Hey"
putStr "I'm"
putStrLn "Anand!"
| softwaremechanic/Miscellaneous | Haskell/9.hs | gpl-2.0 | 60 | 0 | 7 | 15 | 24 | 9 | 15 | 4 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.