code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Numeric.Semiring.Integral
( IntegralSemiring
) where
import Numeric.Algebra.Class
import Numeric.Natural
-- | An integral semiring has no zero divisors
--
-- > a * b = 0 implies a == 0 || b == 0
class (Monoidal r, Semiring r) => IntegralSemiring r
instance IntegralSemiring Integer
instance IntegralSemiring Natural
instance IntegralSemiring Bool
| athanclark/algebra | src/Numeric/Semiring/Integral.hs | bsd-3-clause | 362 | 0 | 6 | 60 | 69 | 38 | 31 | -1 | -1 |
module Text.XHtml.Strict.Formlets ( input, textarea, password, file, checkbox
, hidden, inputInteger, radio, enumRadio
, label
, selectXHtml, selectRaw, select, enumSelect
, XHtmlForm, XHtmlFormlet
, module Text.Formlets
) where
import Text.Formlets hiding (massInput)
import qualified Text.Formlets as F
import qualified Text.XHtml.Strict as X
import Text.XHtml.Strict ((!), (+++), (<<))
import Control.Applicative
import Control.Applicative.Error
import Data.List (elemIndex)
type XHtmlForm m a = Form X.Html m a
type XHtmlFormlet m a = Formlet X.Html m a
-- | An input field with an optional value
input :: Monad m => XHtmlFormlet m String
input = input' (\n v -> X.textfield n ! [X.value v])
-- | A textarea with optional rows and columns, and an optional value
textarea :: Monad m => Maybe Int -> Maybe Int -> XHtmlFormlet m String
textarea r c = input' (\n v -> X.textarea (X.toHtml v) ! (attrs n))
where rows = maybe [] (\x -> [X.rows $ show x]) r
cols = maybe [] (\x -> [X.cols $ show x]) c
attrs n = [X.name n] ++ rows ++ cols
-- | A password field with an optional value
password :: Monad m => XHtmlFormlet m String
password = input' (\n v -> X.password n ! [X.value v])
-- | A hidden input field
hidden :: Monad m => XHtmlFormlet m String
hidden = input' X.hidden
-- | A validated integer component
inputInteger :: Monad m => XHtmlFormlet m Integer
inputInteger x = input (fmap show x) `check` asInteger
-- | A file upload form
file :: Monad m => XHtmlForm m File
file = inputFile X.afile
-- | A checkbox with an optional default value
checkbox :: Monad m => XHtmlFormlet m Bool
checkbox d = (optionalInput (xml d)) `check` asBool
where asBool (Just _) = Success True
asBool Nothing = Success False
xml (Just True) n = X.widget "checkbox" n [X.value "on", X.checked]
xml _ n = X.checkbox n "on"
-- | A radio choice
radio :: Monad m => [(String, String)] -> XHtmlFormlet m String
radio choices = input' mkRadios -- todo: validate that the result was in the choices
where radio n v i = X.input ! [X.thetype "radio", X.name n, X.identifier i, X.theclass "radio", X.value v]
mkRadios name selected = X.concatHtml $ map (mkRadio name selected) (zip choices [1..])
mkRadio name selected ((value, label), idx) = (radio name value ident) ! attrs
+++ X.label (X.toHtml label) ! [X.thefor ident, X.theclass "radio"]
where attrs | selected == value = [X.checked]
| otherwise = []
ident = name ++ "_" ++ show idx
-- | An radio choice for Enums
enumRadio :: (Monad m, Enum a) => [(a, String)] -> XHtmlFormlet m a
enumRadio values defaultValue = radio (map toS values) (fmap (show . fromEnum) defaultValue)
`check` convert `check` tryToEnum
where toS = fmapFst (show . fromEnum)
convert v = maybeRead' v "Conversion error"
label :: (Monad m, X.HTML h) => h -> Form X.Html m ()
label = xml . X.label . X.toHtml
-- | This is a helper function to generate select boxes
selectXHtml :: (X.HTML h)
=> [X.HtmlAttr] -- ^ Optional attributes for the select-box
-> [(String, h)] -- ^ The values and their labels
-> String -- ^ The name
-> String -- ^ The value that is selected
-> X.Html
selectXHtml attr choices name selected = X.select ! (X.name name:attr) $ X.concatHtml $ map (mkChoice selected) choices
where mkChoice selected (value, label) = X.option ! (attrs ++ [X.value value]) << label
where attrs | selected == value = [X.selected]
| otherwise = []
-- | A drop-down for selecting values
selectRaw :: (Monad m, X.HTML h)
=> [X.HtmlAttr] -- ^ Optional attributes for the select-element
-> [(String, h)] -- ^ Pairs of value/label
-> XHtmlFormlet m String
selectRaw attrs choices = input' $ selectXHtml attrs choices -- todo: validate that the result was in the choices
-- | A drop-down for anything that is an instance of Eq
select :: (Eq a, Monad m, X.HTML h) => [X.HtmlAttr] -> [(a, h)] -> XHtmlFormlet m a
select attrs ls v = selectRaw attrs (map f $ zip [0..] ls) selected `check` asInt `check` convert
where selected = show <$> (v >>= flip elemIndex (map fst ls))
f (idx, (_,l)) = (show idx, l)
convert i | i >= length ls || i < 0 = Failure ["Out of bounds"]
| otherwise = Success $ fst $ ls !! i
asInt s = maybeRead' s (s ++ " is not a valid int")
-- | A drop-down for all the options from |a|.
enumSelect :: (Enum a, Bounded a, Show a, Eq a, Monad m)
=> [X.HtmlAttr] -- Optional attributes on the select-box
-> XHtmlFormlet m a
enumSelect attrs = select attrs (zip items (map show items)) where items = [minBound..maxBound]
| chriseidhof/formlets | Text/XHtml/Strict/Formlets.hs | bsd-3-clause | 5,090 | 0 | 13 | 1,454 | 1,706 | 909 | 797 | 78 | 3 |
module AERN2.Poly.Power.SizeReduction where
import AERN2.Poly.Basics
import AERN2.Poly.Power.Type
import MixedTypesNumPrelude
import qualified Data.Map as Map
import AERN2.MP.Ball hiding (iterateUntilAccurate)
reduceDegree :: PowPoly MPBall -> MPBall -> MPBall -> Integer -> PowPoly MPBall
reduceDegree (PowPoly (Poly ts)) l r n =
PowPoly $ Poly $
Map.updateMin (\a -> Just $ a + errBall) $
Map.filterWithKey (\k _ -> k <= n) ts
where
m = max (abs l) (abs r)
errBall = hullMPBall (-err) err
err = Map.foldlWithKey' (\s k c -> s + (abs c) * (m^!k)) (mpBall 0)
$ Map.filterWithKey (\k _ -> k > n) ts
reduceDegreeI :: PowPoly MPBall -> Integer -> PowPoly MPBall
reduceDegreeI p =
reduceDegree p (mpBall $ -1) (mpBall 1)
| michalkonecny/aern2 | aern2-fun-univariate/src/AERN2/Poly/Power/SizeReduction.hs | bsd-3-clause | 750 | 0 | 14 | 149 | 315 | 169 | 146 | 18 | 1 |
-- File created: 2009-08-08 16:39:10
module Haschoo.Evaluator.Standard.Eval (procedures) where
import Data.IORef (newIORef)
import Haschoo.Types (ScmValue(..), runHaschoo)
import Haschoo.Utils (ErrOr)
import Haschoo.Evaluator.Eval (eval)
import Haschoo.Evaluator.Utils (tooFewArgs, tooManyArgs)
procedures :: [(String, ScmValue)]
procedures = map (\(a,b) -> (a, ScmFunc a b))
[ ("eval", scmEval)
-- scheme-report-environment and null-environment are in Haschoo.Stdlib to
-- avoid circular dependencies
]
scmEval :: [ScmValue] -> IO (ErrOr ScmValue)
scmEval [v, ScmContext ctx] = mapM newIORef ctx >>= flip runHaschoo (eval v)
scmEval [_,_] = return$ Left "Nonenvironmental argument to eval"
scmEval (_:_:_) = return$ tooManyArgs "eval"
scmEval _ = return$ tooFewArgs "eval"
| Deewiant/haschoo | Haschoo/Evaluator/Standard/Eval.hs | bsd-3-clause | 858 | 0 | 9 | 180 | 256 | 146 | 110 | 14 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE RankNTypes #-}
module Network.IIRCC where
import Codec.CBOR.Read
import Codec.CBOR.Write
import Codec.Serialise
import Codec.Serialise.Encoding
import Codec.Serialise.Decoding
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Lazy as BS
import qualified Data.ByteString.Lazy.Char8 as BS8
import Data.Dependent.Sum
import Data.Either.Combinators
import Data.Functor.Identity
import Data.Monoid
import Data.Text
import Data.Word
import GHC.Generics
type ChannelName = Text
type SessionName = Text
type QuitMessage = Text
type UninterpretedMessage = Text
type ErrorMessage = Text
type HostName = Text
type PortNumber = Word16
data ClientCommand =
StartSession SessionName HostName PortNumber |
ToSession SessionName SessionCommand |
EndClient
deriving (Show, Generic)
instance Serialise ClientCommand
data SessionCommand =
Connect |
Join ChannelName |
--ToChannel ChannelName ChannelCommand |
SendQuit QuitMessage |
Disconnect |
EndSession
deriving (Show, Generic)
instance Serialise SessionCommand
--data ChannelCommand =
-- SendMessage Text |
-- Part
data ClientEventTag f a where
SessionStarted :: ClientEventTag f SessionName
FromSessionF :: f (SessionName, f (SessionEventTag f a)) -> ClientEventTag f a
ClientUnableTo :: ClientEventTag f (ClientCommand, ErrorMessage)
ClientEnding :: ClientEventTag f ()
pattern FromSession sessionName sessionEventTag = FromSessionF (Identity (sessionName, Identity sessionEventTag))
type ClientEventFilter = ClientEventTag Maybe
type ClientEventHeader = ClientEventTag Identity
type ClientEvent = DSum ClientEventHeader Identity
pattern SessionStartedTagNum = 0
pattern FromSessionTagNum = 1
pattern ClientUnableToTagNum = 2
pattern ClientEndingTagNum = 3
data SessionEventTag (f :: * -> *) a where
Connecting :: SessionEventTag f (HostName, PortNumber)
Connected :: SessionEventTag f ()
FailedToConnect :: SessionEventTag f ErrorMessage
--FromChannel :: ChannelName -> ChannelEventTag a -> SessionEventTag a
ReceivedUninterpretedMessage :: SessionEventTag f UninterpretedMessage
SessionUnableTo :: SessionEventTag f (SessionCommand, ErrorMessage)
SentQuit :: SessionEventTag f QuitMessage
EndedConnection :: SessionEventTag f ()
LostConnection :: SessionEventTag f (Maybe ErrorMessage)
EndedSession :: SessionEventTag f ()
type SessionEventFilter = SessionEventTag Maybe
type SessionEventHeader = SessionEventTag Identity
type SessionEvent = DSum SessionEventHeader Identity
pattern ConnectingTagNum = 0
pattern ConnectedTagNum = 1
pattern FailedToConnectTagNum = 2
--pattern FromChannelTagNum = 3
pattern ReceivedUninterpretedMessageTagNum = 4
pattern SessionUnableToTagNum = 5
pattern SentQuitTagNum = 6
pattern EndedConnectionTagNum = 7
pattern LostConnectionTagNum = 8
pattern EndedSessionTagNum = 9
--data ChannelEventTag a where
-- ReceivedMessage :: ChannelEventTag Text
-- SentMessage :: ChannelEventTag Text
{-
data ChannelEventTag a where
-}
-- This looks generalized enough to handle any foldable structure, but it will ultimately
-- only be needed for Identity (1 element) or Maybe (0 or 1 element). Untested for other
-- foldable types. Implementing this way just saves us from code duplication.
encodeClientEventTag :: Foldable f => ClientEventTag f a -> Encoding
encodeClientEventTag = (<> encodeNull) . \case
SessionStarted -> encodeTag SessionStartedTagNum
FromSessionF details ->
encodeTag FromSessionTagNum <> (
foldFor details $ \(sessionName, sessionEventTags) ->
encodeString sessionName <> foldFor sessionEventTags encodeSessionEventTag
)
ClientUnableTo -> encodeTag ClientUnableToTagNum
ClientEnding -> encodeTag ClientEndingTagNum
where
foldFor = flip foldMap
encodeSessionEventTag :: SessionEventTag f a -> Encoding
encodeSessionEventTag = \case
Connecting -> encodeTag ConnectingTagNum <> encodeNull
Connected -> encodeTag ConnectedTagNum <> encodeNull
FailedToConnect -> encodeTag FailedToConnectTagNum <> encodeNull
--FromChannel -> encodeTag FromChannelTagNum <> encodeNull
ReceivedUninterpretedMessage -> encodeTag ReceivedUninterpretedMessageTagNum <> encodeNull
SessionUnableTo -> encodeTag SessionUnableToTagNum <> encodeNull
SentQuit -> encodeTag SentQuitTagNum <> encodeNull
EndedConnection -> encodeTag EndedConnectionTagNum <> encodeNull
LostConnection -> encodeTag LostConnectionTagNum <> encodeNull
EndedSession -> encodeTag EndedSessionTagNum <> encodeNull
encodeClientEventData :: ClientEvent -> Encoding
encodeClientEventData = \case
SessionStarted :=> Identity v -> encode v
FromSession _ sessionEventHeader :=> Identity v -> encodeSessionEventData (sessionEventHeader ==> v)
ClientUnableTo :=> Identity v -> encode v
ClientEnding :=> Identity v -> encode v
encodeSessionEventData :: SessionEvent -> Encoding
encodeSessionEventData = \case
Connecting :=> Identity v -> encode v
Connected :=> Identity v -> encode v
FailedToConnect :=> Identity v -> encode v
--FromChannel :=> Identity v -> encode v
ReceivedUninterpretedMessage :=> Identity v -> encode v
SessionUnableTo :=> Identity v -> encode v
SentQuit :=> Identity v -> encode v
EndedConnection :=> Identity v -> encode v
LostConnection :=> Identity v -> encode v
EndedSession :=> Identity v -> encode v
decodeDSumWithTag :: (Serialise a, Applicative f) => tag a -> Decoder s (DSum tag f)
decodeDSumWithTag tag = (tag ==>) <$> decode
decodeClientEvent :: Decoder s (Decoder s ClientEvent)
decodeClientEvent = decodeTag >>= \case
SessionStartedTagNum -> decodeTrivial SessionStarted
FromSessionTagNum -> do
sessionName <- decodeString
decodeNull
decodeSessionEvent <&.&> \(sessionEventHeader :=> v) -> FromSession sessionName sessionEventHeader :=> v
ClientUnableToTagNum -> decodeTrivial ClientUnableTo
ClientEndingTagNum -> decodeTrivial ClientEnding
_ -> undefined
where
(<$.$>) = (<$>) . (<$>)
(<&.&>) = flip (<$.$>)
decodeTrivial tag = decodeNull >> return (decodeDSumWithTag tag)
decodeSessionEvent :: Decoder s (Decoder s SessionEvent)
decodeSessionEvent = decodeTag >>= \case
ConnectingTagNum -> decodeTrivial Connecting
ConnectedTagNum -> decodeTrivial Connected
FailedToConnectTagNum -> decodeTrivial FailedToConnect
ReceivedUninterpretedMessageTagNum -> decodeTrivial ReceivedUninterpretedMessage
SessionUnableToTagNum -> decodeTrivial SessionUnableTo
SentQuitTagNum -> decodeTrivial SentQuit
EndedConnectionTagNum -> decodeTrivial EndedConnection
LostConnectionTagNum -> decodeTrivial LostConnection
EndedSessionTagNum -> decodeTrivial EndedSession
_ -> undefined
where
decodeTrivial tag = decodeNull >> return (decodeDSumWithTag tag)
| Rotaerk/iircc | src/common/Network/IIRCC.hs | bsd-3-clause | 6,967 | 0 | 14 | 1,027 | 1,514 | 791 | 723 | 146 | 10 |
{-#LANGUAGE FlexibleInstances #-}
{-#LANGUAGE MultiParamTypeClasses #-}
{-#LANGUAGE OverloadedStrings #-}
{-#LANGUAGE ViewPatterns #-}
module Twilio.Tokens
( -- * Resource
Token(..)
, IceServer(..)
, Twilio.Tokens.post
) where
import Control.Applicative
import Control.Error.Safe
import Control.Monad
import Control.Monad.Catch
import Data.Aeson
import qualified Data.HashMap.Strict as HashMap
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding
import Data.Time.Clock
import Network.URI
import Control.Monad.Twilio
import Twilio.Types
import Twilio.Internal.Parser
import Twilio.Internal.Request
import Twilio.Internal.Resource as Resource
{- Resource -}
data Token = Token
{ accountSID :: !AccountSID
, dateCreated :: !UTCTime
, dateUpdated :: !UTCTime
, iceServers :: [IceServer]
, password :: !Text
, ttl :: !Integer
, username :: !Text
} deriving (Eq, Show)
instance FromJSON Token where
parseJSON (Object v) = Token
<$> v .: "account_sid"
<*> (v .: "date_created" >>= parseDateTime)
<*> (v .: "date_updated" >>= parseDateTime)
<*> v .: "ice_servers"
<*> v .: "password"
<*> (v .: "ttl" >>= readZ)
<*> v .: "username"
parseJSON _ = mzero
data IceServer
= StunServer { stunURL :: !URI }
| TurnServer { turnURL :: !URI
, turnCredential :: !Text
, turnUsername :: !Text }
deriving (Eq, Show)
instance FromJSON IceServer where
parseJSON (Object map) =
let url = HashMap.lookup "url" map >>= valueToText >>= parseAbsoluteURI . T.unpack
in case url of
Nothing -> mzero
Just url' -> return . fromMaybe (StunServer url') $ TurnServer
<$> url
<*> (HashMap.lookup "credential" map >>= valueToText)
<*> (HashMap.lookup "username" map >>= valueToText)
parseJSON _ = mzero
instance Post0 Token where
post0 = request parseJSONFromResponse =<<
makeTwilioPOSTRequest "/Tokens.json" []
instance Post1 Integer Token where
post1 (show -> ttl) = request parseJSONFromResponse =<<
makeTwilioPOSTRequest "/Tokens.json"
[ ("Ttl", encodeUtf8 . T.pack $ ttl ) ]
instance Post1 (Maybe Integer) Token where
post1 Nothing = post0
post1 (Just ttl) = post1 ttl
post :: MonadThrow m => Maybe Integer -> TwilioT m Token
post = Resource.post
| seagreen/twilio-haskell | src/Twilio/Tokens.hs | bsd-3-clause | 2,390 | 0 | 18 | 530 | 665 | 366 | 299 | 93 | 1 |
import B
-- see slides 426-429 for the fold for arithmetic expressions
foldB :: r -> r -> (r -> r -> r -> r) -> B -> r
foldB r _ _ TrueB = r
foldB _ r _ FalseB = r
foldB r1 r2 f (IfB x y z) =
f (fold x) (fold y) (fold z)
where fold = foldB r1 r2 f
depth :: B -> Int
depth = foldB
1
1
(\ x y z -> 1 + maximum [x, y, z])
countT :: B -> Int
countT = foldB
1
0
(\ x y z -> x + y + z)
countF :: B -> Int
countF = foldB
0
1
(\ x y z -> x + y + z)
eval :: B -> Bool
eval = foldB
True
False
(\ x y z -> if x then y else z)
main =
print (eval (IfB (IfB TrueB FalseB FalseB) FalseB FalseB))
| grammarware/slps | topics/implementation/b/folding/foldB.hs | bsd-3-clause | 604 | 28 | 11 | 182 | 366 | 192 | 174 | 29 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DoAndIfThenElse #-}
-- |
-- Module : Verifier.SAW.Testing.Random
-- Copyright : (c) 2013-2015 Galois, Inc.
-- License : BSD3
-- Maintainer : jhendrix@galois.com, conathan@galois.com
-- Stability : experimental
-- Portability : portable
--
-- This module generates random values for 'FiniteValue.FiniteType' types.
--
-- Based on 'Cryptol.Testing.Random'.
module Verifier.SAW.Testing.Random where
import Verifier.SAW.FiniteValue
(asFiniteTypePure, scFiniteValue, FiniteType(..), FiniteValue(..))
import Verifier.SAW.Prim (Nat(..))
import Verifier.SAW.Recognizer (asBoolType, asPi)
import Verifier.SAW.SharedTerm
(scApplyAll, scModule, scWhnf, SharedContext, SharedTerm)
import Verifier.SAW.Simulator.Concrete (evalSharedTerm, CValue)
import Verifier.SAW.Simulator.Value (Value(..))
import Verifier.SAW.TypedAST (FieldName)
import Verifier.SAW.Utils (panic)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>), Applicative)
import Data.Traversable (traverse)
#endif
import Control.Monad (msum, replicateM)
import Control.Monad.IO.Class (liftIO, MonadIO)
import Control.Monad.Random
import Data.Map (Map)
import qualified Data.Map as Map
import System.Random.TF (newTFGen, TFGen)
----------------------------------------------------------------
-- Interface.
-- | Run @scRunTests@ in 'IO' using 'System.Random.TF.TFGen' for generation.
--
-- The caller should use @scTestableType@ to (maybe) compute the 'gens'.
scRunTestsTFIO ::
SharedContext s -> Integer -> SharedTerm s -> [RandT TFGen IO FiniteValue] ->
IO (Maybe [FiniteValue])
scRunTestsTFIO sc numTests fun gens = do
g <- newTFGen
evalRandT (scRunTests sc numTests fun gens) g
-- | Call @scRunTest@ many times, returning the first failure if any.
scRunTests :: (Functor m, MonadIO m, MonadRandom m) => SharedContext s ->
Integer -> SharedTerm s -> [m FiniteValue] -> m (Maybe [FiniteValue])
scRunTests sc numTests fun gens =
if numTests < 0 then
panic "scRunTests:" ["number of tests must be non-negative"]
else do
let oneTest = scRunTest sc fun gens
-- Use 'msum' to collapse the embedded 'Maybe's, retaining the
-- first counter example, if any.
msum <$> replicateM (fromIntegral numTests) oneTest
{- | Apply a testable value to some randomly-generated arguments.
Returns `Nothing` if the function returned `True`, or
`Just counterexample` if it returned `False`.
Use @scTestableType@ to compute the input generators.
Please note that this function assumes that the generators match
the supplied value, otherwise we'll panic.
-}
scRunTest :: (MonadIO m, MonadRandom m) => SharedContext s ->
SharedTerm s -> [m FiniteValue] -> m (Maybe [FiniteValue])
scRunTest sc fun gens = do
xs <- sequence gens
result <- liftIO $ apply xs
case result of
VBool True -> return $ Nothing
VBool False -> do
return $ Just xs
_ -> panic "Type error while running test"
[ "Expected a boolean, but got:"
, show result ]
where
apply :: [FiniteValue] -> IO CValue
apply xs = do
xs' <- mapM (scFiniteValue sc) xs
app <- scApplyAll sc fun xs'
return $ evalSharedTerm (scModule sc) Map.empty app
-- | Given a function type, compute generators for the function's
-- arguments. The supported function types are of the form
--
-- 'FiniteType -> ... -> FiniteType -> Bool'
--
-- and 'Nothing' is returned when attempting to generate arguments for
-- functions of unsupported type.
scTestableType :: (Applicative m, Functor m, MonadRandom m) =>
SharedContext s -> SharedTerm s -> IO (Maybe [m FiniteValue])
scTestableType sc ty = do
ty' <- scWhnf sc ty
case ty' of
(asPi -> Just (_nm, asFiniteTypePure -> Just dom, rng)) -> do
let domGen = randomFiniteValue dom
rngGens <- scTestableType sc rng
return $ (domGen :) <$> rngGens
(asBoolType -> Just ()) -> return $ Just []
_ -> return Nothing
----------------------------------------------------------------
randomFiniteValue :: (Applicative m, Functor m, MonadRandom m) =>
FiniteType -> m FiniteValue
randomFiniteValue FTBit = randomBit
randomFiniteValue (FTVec n FTBit) = randomWord n
randomFiniteValue (FTVec n t) = randomVec n t
randomFiniteValue (FTTuple ts) = randomTuple ts
randomFiniteValue (FTRec fields) = randomRec fields
----------------------------------------------------------------
-- The value generators below follow a pattern made clear in the
-- definition of 'randomFiniteValue' above: each 'FiniteValue' value
-- generator takes the same (non-constant) arguments as the
-- corresponding 'FiniteType' type constructor.
-- | Generate a random bit value.
randomBit :: (Functor m, MonadRandom m) => m FiniteValue
randomBit = FVBit <$> getRandom
-- | Generate a random word of the given length (i.e., a value of type @[w]@)
randomWord :: (Functor m, MonadRandom m) => Nat -> m FiniteValue
randomWord w = FVWord w <$> getRandomR (0, 2^(unNat w) - 1)
{- | Generate a random vector. Generally, this should be used for sequences
other than bits. For sequences of bits use "randomWord". The difference
is mostly about how the results will be displayed. -}
randomVec :: (Applicative m, Functor m, MonadRandom m) =>
Nat -> FiniteType -> m FiniteValue
randomVec w t =
FVVec t <$> replicateM (fromIntegral . unNat $ w) (randomFiniteValue t)
-- | Generate a random tuple value.
randomTuple :: (Applicative m, Functor m, MonadRandom m) =>
[FiniteType] -> m FiniteValue
randomTuple ts = FVTuple <$> mapM randomFiniteValue ts
-- | Generate a random record value.
randomRec :: (Applicative m, Functor m, MonadRandom m) =>
Map FieldName FiniteType -> m FiniteValue
randomRec fieldTys = FVRec <$> traverse randomFiniteValue fieldTys
_test :: IO ()
_test = do
s <- evalRandIO $ randomFiniteValue (FTVec (Nat 16) (FTVec (Nat 1) FTBit))
print s
| iblumenfeld/saw-core | src/Verifier/SAW/Testing/Random.hs | bsd-3-clause | 5,887 | 0 | 15 | 1,043 | 1,378 | 732 | 646 | -1 | -1 |
{-# LANGUAGE
DeriveDataTypeable ,
DeriveFoldable ,
ScopedTypeVariables,
RankNTypes,
TypeOperators,
DeriveTraversable,
GeneralizedNewtypeDeriving,
ImpredicativeTypes,
TypeFamilies,
PackageImports,
StandaloneDeriving,
FlexibleContexts,
DeriveFunctor,
TypeSynonymInstances,
FlexibleInstances,
GeneralizedNewtypeDeriving,
NoMonomorphismRestriction,
TemplateHaskell #-}
-- Generate a dataset
-- Using a the bicicle model
import System.Directory (doesFileExist)
import qualified MultiLinear.Class as M
import qualified System.IO as IO
import Debug.Trace as D
import Linear.V1
import Data.Distributive
import Linear.V2
import Solver.RungeKutta
import Linear.V3
import Linear.Metric
import Control.Monad (when)
import Sensor.Razor9DOF
import qualified Linear.Vector as V
import Control.Proxy.Prelude.Base hiding(Product)
import Display.Cube
import Data.List(transpose)
import Local
import Exponential.SO3
import Exponential.Class
import Rotation.SO3
import Space.SO3
import Space.Class
import Vectorization
import Kalman
import Prelude hiding(sequence,readFile,writeFile)
import As
import Display.Main
import Data.Data
import Linear.Matrix
import Data.Maybe
import qualified Data.Map as M
import Data.ByteString.Lazy (readFile,writeFile)
import Graphics.Gnuplot.Simple
import System.Environment
import Data.Foldable(toList,Foldable)
import Data.Vector.Unboxed.Base
import qualified Data.Vector.Unboxed as U
import Control.Lens
import Control.Lens.Getter
import Control.Lens.Setter
import Control.Lens.TH
import Control.Monad (liftM,replicateM,(>=>))
import Data.Word
import Data.Traversable
import Control.Applicative
import Data.Distributive
import Data.Functor.Product
import Data.Functor.Compose
import qualified Numeric.LinearAlgebra as L
import System.IO
import Data.Time
import Data.Time.Calendar
import Control.Proxy hiding (Product)
import qualified Control.Proxy.Trans.State as State
import Space.SE3
import Position
import Attitude
s0 :: Position Double
s0 = (0 `Pair` expM (pure 0)) `Pair` 0 `Pair` 0 `Pair` (expM (V3 0 (-0.35) 0 )) `Pair` accLin `Pair` magLin `Pair` gyroLin `Pair` V1 9.81 `Pair` V1 1
init0 =(0,(s0,100 *^^ gyroCov))
main = do
f:xs <- getArgs
let flog = "result/" ++ f
hLog <- openFile ("calibrate/" ++ f ) ReadMode
fLog <- openFile flog WriteMode
t <- getCurrentTime
configureDisplay
stop
hClose fLog
plot flog
prettyShow x = do
D.trace (show $ fst $ snd x) (return ())
--putStrLn "Covariance : "
--print (snd $ snd x)
imuPipe file x = do
let stateFile = "state/" ++ file
existFile <- lift$ doesFileExist stateFile
if existFile
then do
lift $ putStrLn "File Exist"
sfile <- lift$ IO.readFile stateFile
let st0 = read sfile
State.evalStateP (_2.~ st0 $ init0 ) imuKalman
else
State.evalStateP init0 imuKalman
state l = _2 ._1 . l
covariance l = _2 . _2 . l
acc = orientation |.| accCalib |.| gravity
mag = orientation |.| orientationOffset |.| magCalib |.| magField
gyr = orientation |.| gyroCalib
linscaleV3 = liftA2 linscale
--imuKalman ::Proxy p => State.StateP (Double, (Attitude Double,Covariance Attitude Double )) p (Attitude Double) (Double, IMU Double) () (Double,[V3 Double] ) IO ()
imuKalman = init where
init = do
slast <- State.get
(t0,mi@(IMU accR gyrR magR)) <- request (snd slast)
let
accUn = linscaleV3 (s0 ^. accCalib.deCompose ) accR
acc = fmap (*9.81) $ fmap (/norm accUn) accUn
gyr = linscaleV3 (s0 ^. gyroCalib.deCompose) gyrR
magUn = linscaleV3 (s0 ^. magCalib.deCompose) magR
mag = fmap (/norm magUn) magUn
angles = anglesMeasure (IMU acc gyr mag)
State.modify (_1 .~ t0)
State.modify (_2._1.orientation.~(rotation angles))
loop
loop = do
slast <- State.get
(t,mi@(IMU accR gyrR magR)) <- request (snd slast)
State.modify (predictionE (positionTransition accR gyrR) t gyroCov )
State.modify (_2 %~ measure magneticField magneticCov magR)
snew <- State.get
let
accUn = linscaleV3 (s0 ^. accCalib.deCompose) accR
acc = accUn
when (abs (norm acc ) - 9.81 < 2.0 ) $ State.modify (_2 %~ measure gravityField accelerometerCov accR)
() <-respond((snd $ snew , mi),[angles (snew ^. _2._1.orientation), anglesMeasure mi,angles (snew ^. _2._1.orientationOffset )])
loop
plot file = do
buff<- System.IO.readFile file
let convert = map read . lines
input = convert buff :: [(((Attitude Double ,Covariance Attitude Double),IMU Double ),[V3 Double])]
gyroLin' = map ( unCompose . (^. gyroCalib ) . fst . fst . fst)input
accLin' = map ( unCompose . (^. accCalib ) . fst . fst . fst)input
gyroRawVal = map ( gyroRaw . snd . fst)input
accRawVal = map ( accRaw . snd . fst)input
scaleparam f = map(toList . (\(Compose x )-> fmap _scale x).f . fst . fst . fst) input
offsetparam f = map(toList . (\(Compose x )-> fmap _offset x).f . fst . fst . fst) input
gyroVal = map (toList . fmap (*0.055) ) $ zipWith (liftA2 linscale) gyroLin' gyroRawVal
accVal = map (toList . fmap (*0.055) ) $ zipWith (liftA2 linscale) accLin' accRawVal
accScale = scaleparam (^. accCalib )
gyroScale = scaleparam (^. gyroCalib )
magScale = scaleparam (^. magCalib )
magOffset= offsetparam (^. magCalib )
gyroOffset= offsetparam (^. gyroCalib )
accOffset= offsetparam (^. accCalib )
grav= map(toList .(^. gravity ) . fst . fst . fst) input
magneticField = map(toList .(^. magField ) . fst . fst . fst) input
angles = map (concat . map toList . snd ) input
plotListsStyle [Title "Angles"] $ zipWith (\x y ->(PlotStyle Lines . CustomStyle .(\i -> [LineTitle i]) $ x,y)) ["true-roll","true-pitch","true-yaw","raw-roll","raw-pitch","raw-yaw","offset-roll","offset-pitch","offset-yaw"] $ transpose angles
plotLists [Title "Magnetometer Scale"] $ transpose $ magScale
plotLists [Title "Accelerometer Scale"] $ transpose $ accScale
plotLists [Title "Magnetometer Offset"] $ transpose $ magOffset
plotLists [Title "Accelerometer Offset"] $ transpose $ accOffset
plotLists [Title "Gyroscope Offset"] $ transpose $ gyroOffset
plotLists [Title "Gyroscope Scale"] $ transpose $ gyroScale
plotLists [Title "Gravity"] $ transpose $ grav
plotLists [Title "Magnetic"] $ transpose $ magneticField
plotLists [Title "Gyroscope Integrated"] (transpose $ scanl1 (zipWith(+)) gyroVal )
plotLists [Title "Accelerometer Integrated"] (transpose $ scanl1 (zipWith(+)) accVal )
degrees x = x*180/pi
| massudaw/mtk | filters/position/PositionFilter.hs | bsd-3-clause | 6,858 | 4 | 20 | 1,529 | 2,194 | 1,169 | 1,025 | -1 | -1 |
main = putStrLn "Hello World.\nWelcome to 30 Days of Code.\n"
| rhovland/hackerrank-30days2016 | day0.hs | bsd-3-clause | 62 | 0 | 5 | 10 | 9 | 4 | 5 | 1 | 1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
-----------------------------------------------------------------------------
--
-- Stg to C-- code generation: expressions
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmExpr ( cgExpr ) where
#define FAST_STRING_NOT_NEEDED
#include "HsVersions.h"
import {-# SOURCE #-} StgCmmBind ( cgBind )
import StgCmmMonad
import StgCmmHeap
import StgCmmEnv
import StgCmmCon
import StgCmmProf (saveCurrentCostCentre, restoreCurrentCostCentre, emitSetCCC)
import StgCmmLayout
import StgCmmPrim
import StgCmmHpc
import StgCmmTicky
import StgCmmUtils
import StgCmmClosure
import StgSyn
import MkGraph
import BlockId
import Cmm
import CmmInfo
import CoreSyn
import DataCon
import ForeignCall
import Id
import PrimOp
import TyCon
import Type
import CostCentre ( CostCentreStack, currentCCS )
import Maybes
import Util
import FastString
import Outputable
import Control.Monad (unless,void)
import Control.Arrow (first)
import Prelude hiding ((<*>))
------------------------------------------------------------------------
-- cgExpr: the main function
------------------------------------------------------------------------
cgExpr :: StgExpr -> FCode ReturnKind
cgExpr (StgApp fun args) = cgIdApp fun args
{- seq# a s ==> a -}
cgExpr (StgOpApp (StgPrimOp SeqOp) [StgVarArg a, _] _res_ty) =
cgIdApp a []
cgExpr (StgOpApp op args ty) = cgOpApp op args ty
cgExpr (StgConApp con args) = cgConApp con args
cgExpr (StgTick t e) = cgTick t >> cgExpr e
cgExpr (StgLit lit) = do cmm_lit <- cgLit lit
emitReturn [CmmLit cmm_lit]
cgExpr (StgLet binds expr) = do { cgBind binds; cgExpr expr }
cgExpr (StgLetNoEscape _ _ binds expr) =
do { u <- newUnique
; let join_id = mkBlockId u
; cgLneBinds join_id binds
; r <- cgExpr expr
; emitLabel join_id
; return r }
cgExpr (StgCase expr _live_vars _save_vars bndr _srt alt_type alts) =
cgCase expr bndr alt_type alts
cgExpr (StgLam {}) = panic "cgExpr: StgLam"
------------------------------------------------------------------------
-- Let no escape
------------------------------------------------------------------------
{- Generating code for a let-no-escape binding, aka join point is very
very similar to what we do for a case expression. The duality is
between
let-no-escape x = b
in e
and
case e of ... -> b
That is, the RHS of 'x' (ie 'b') will execute *later*, just like
the alternative of the case; it needs to be compiled in an environment
in which all volatile bindings are forgotten, and the free vars are
bound only to stable things like stack locations.. The 'e' part will
execute *next*, just like the scrutinee of a case. -}
-------------------------
cgLneBinds :: BlockId -> StgBinding -> FCode ()
cgLneBinds join_id (StgNonRec bndr rhs)
= do { local_cc <- saveCurrentCostCentre
-- See Note [Saving the current cost centre]
; (info, fcode) <- cgLetNoEscapeRhs join_id local_cc bndr rhs
; fcode
; addBindC info }
cgLneBinds join_id (StgRec pairs)
= do { local_cc <- saveCurrentCostCentre
; r <- sequence $ unzipWith (cgLetNoEscapeRhs join_id local_cc) pairs
; let (infos, fcodes) = unzip r
; addBindsC infos
; sequence_ fcodes
}
-------------------------
cgLetNoEscapeRhs
:: BlockId -- join point for successor of let-no-escape
-> Maybe LocalReg -- Saved cost centre
-> Id
-> StgRhs
-> FCode (CgIdInfo, FCode ())
cgLetNoEscapeRhs join_id local_cc bndr rhs =
do { (info, rhs_code) <- cgLetNoEscapeRhsBody local_cc bndr rhs
; let (bid, _) = expectJust "cgLetNoEscapeRhs" $ maybeLetNoEscape info
; let code = do { (_, body) <- getCodeScoped rhs_code
; emitOutOfLine bid (first (<*> mkBranch join_id) body) }
; return (info, code)
}
cgLetNoEscapeRhsBody
:: Maybe LocalReg -- Saved cost centre
-> Id
-> StgRhs
-> FCode (CgIdInfo, FCode ())
cgLetNoEscapeRhsBody local_cc bndr (StgRhsClosure cc _bi _ _upd _ args body)
= cgLetNoEscapeClosure bndr local_cc cc (nonVoidIds args) body
cgLetNoEscapeRhsBody local_cc bndr (StgRhsCon cc con args)
= cgLetNoEscapeClosure bndr local_cc cc [] (StgConApp con args)
-- For a constructor RHS we want to generate a single chunk of
-- code which can be jumped to from many places, which will
-- return the constructor. It's easy; just behave as if it
-- was an StgRhsClosure with a ConApp inside!
-------------------------
cgLetNoEscapeClosure
:: Id -- binder
-> Maybe LocalReg -- Slot for saved current cost centre
-> CostCentreStack -- XXX: *** NOT USED *** why not?
-> [NonVoid Id] -- Args (as in \ args -> body)
-> StgExpr -- Body (as in above)
-> FCode (CgIdInfo, FCode ())
cgLetNoEscapeClosure bndr cc_slot _unused_cc args body
= do dflags <- getDynFlags
return ( lneIdInfo dflags bndr args
, code )
where
code = forkLneBody $ do {
; withNewTickyCounterLNE (idName bndr) args $ do
; restoreCurrentCostCentre cc_slot
; arg_regs <- bindArgsToRegs args
; void $ noEscapeHeapCheck arg_regs (tickyEnterLNE >> cgExpr body) }
------------------------------------------------------------------------
-- Case expressions
------------------------------------------------------------------------
{- Note [Compiling case expressions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is quite interesting to decide whether to put a heap-check at the
start of each alternative. Of course we certainly have to do so if
the case forces an evaluation, or if there is a primitive op which can
trigger GC.
A more interesting situation is this (a Plan-B situation)
!P!;
...P...
case x# of
0# -> !Q!; ...Q...
default -> !R!; ...R...
where !x! indicates a possible heap-check point. The heap checks
in the alternatives *can* be omitted, in which case the topmost
heapcheck will take their worst case into account.
In favour of omitting !Q!, !R!:
- *May* save a heap overflow test,
if ...P... allocates anything.
- We can use relative addressing from a single Hp to
get at all the closures so allocated.
- No need to save volatile vars etc across heap checks
in !Q!, !R!
Against omitting !Q!, !R!
- May put a heap-check into the inner loop. Suppose
the main loop is P -> R -> P -> R...
Q is the loop exit, and only it does allocation.
This only hurts us if P does no allocation. If P allocates,
then there is a heap check in the inner loop anyway.
- May do more allocation than reqd. This sometimes bites us
badly. For example, nfib (ha!) allocates about 30\% more space if the
worst-casing is done, because many many calls to nfib are leaf calls
which don't need to allocate anything.
We can un-allocate, but that costs an instruction
Neither problem hurts us if there is only one alternative.
Suppose the inner loop is P->R->P->R etc. Then here is
how many heap checks we get in the *inner loop* under various
conditions
Alooc Heap check in branches (!Q!, !R!)?
P Q R yes no (absorb to !P!)
--------------------------------------
n n n 0 0
n y n 0 1
n . y 1 1
y . y 2 1
y . n 1 1
Best choices: absorb heap checks from Q and R into !P! iff
a) P itself does some allocation
or
b) P does allocation, or there is exactly one alternative
We adopt (b) because that is more likely to put the heap check at the
entry to a function, when not many things are live. After a bunch of
single-branch cases, we may have lots of things live
Hence: two basic plans for
case e of r { alts }
------ Plan A: the general case ---------
...save current cost centre...
...code for e,
with sequel (SetLocals r)
...restore current cost centre...
...code for alts...
...alts do their own heap checks
------ Plan B: special case when ---------
(i) e does not allocate or call GC
(ii) either upstream code performs allocation
or there is just one alternative
Then heap allocation in the (single) case branch
is absorbed by the upstream check.
Very common example: primops on unboxed values
...code for e,
with sequel (SetLocals r)...
...code for alts...
...no heap check...
-}
-------------------------------------
data GcPlan
= GcInAlts -- Put a GC check at the start the case alternatives,
[LocalReg] -- which binds these registers
| NoGcInAlts -- The scrutinee is a primitive value, or a call to a
-- primitive op which does no GC. Absorb the allocation
-- of the case alternative(s) into the upstream check
-------------------------------------
cgCase :: StgExpr -> Id -> AltType -> [StgAlt] -> FCode ReturnKind
cgCase (StgOpApp (StgPrimOp op) args _) bndr (AlgAlt tycon) alts
| isEnumerationTyCon tycon -- Note [case on bool]
= do { tag_expr <- do_enum_primop op args
-- If the binder is not dead, convert the tag to a constructor
-- and assign it.
; unless (isDeadBinder bndr) $ do
{ dflags <- getDynFlags
; tmp_reg <- bindArgToReg (NonVoid bndr)
; emitAssign (CmmLocal tmp_reg)
(tagToClosure dflags tycon tag_expr) }
; (mb_deflt, branches) <- cgAlgAltRhss (NoGcInAlts,AssignedDirectly)
(NonVoid bndr) alts
; emitSwitch tag_expr branches mb_deflt 0 (tyConFamilySize tycon - 1)
; return AssignedDirectly
}
where
do_enum_primop :: PrimOp -> [StgArg] -> FCode CmmExpr
do_enum_primop TagToEnumOp [arg] -- No code!
= getArgAmode (NonVoid arg)
do_enum_primop primop args
= do dflags <- getDynFlags
tmp <- newTemp (bWord dflags)
cgPrimOp [tmp] primop args
return (CmmReg (CmmLocal tmp))
{-
Note [case on bool]
~~~~~~~~~~~~~~~~~~~
This special case handles code like
case a <# b of
True ->
False ->
--> case tagToEnum# (a <$# b) of
True -> .. ; False -> ...
--> case (a <$# b) of r ->
case tagToEnum# r of
True -> .. ; False -> ...
If we let the ordinary case code handle it, we'll get something like
tmp1 = a < b
tmp2 = Bool_closure_tbl[tmp1]
if (tmp2 & 7 != 0) then ... // normal tagged case
but this junk won't optimise away. What we really want is just an
inline comparison:
if (a < b) then ...
So we add a special case to generate
tmp1 = a < b
if (tmp1 == 0) then ...
and later optimisations will further improve this.
Now that #6135 has been resolved it should be possible to remove that
special case. The idea behind this special case and pre-6135 implementation
of Bool-returning primops was that tagToEnum# was added implicitly in the
codegen and then optimized away. Now the call to tagToEnum# is explicit
in the source code, which allows to optimize it away at the earlier stages
of compilation (i.e. at the Core level).
Note [Scrutinising VoidRep]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have this STG code:
f = \[s : State# RealWorld] ->
case s of _ -> blah
This is very odd. Why are we scrutinising a state token? But it
can arise with bizarre NOINLINE pragmas (Trac #9964)
crash :: IO ()
crash = IO (\s -> let {-# NOINLINE s' #-}
s' = s
in (# s', () #))
Now the trouble is that 's' has VoidRep, and we do not bind void
arguments in the environment; they don't live anywhere. See the
calls to nonVoidIds in various places. So we must not look up
's' in the environment. Instead, just evaluate the RHS! Simple.
-}
cgCase (StgApp v []) _ (PrimAlt _) alts
| isVoidRep (idPrimRep v) -- See Note [Scrutinising VoidRep]
, [(DEFAULT, _, _, rhs)] <- alts
= cgExpr rhs
{- Note [Dodgy unsafeCoerce 1]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case (x :: HValue) |> co of (y :: MutVar# Int)
DEFAULT -> ...
We want to gnerate an assignment
y := x
We want to allow this assignment to be generated in the case when the
types are compatible, because this allows some slightly-dodgy but
occasionally-useful casts to be used, such as in RtClosureInspect
where we cast an HValue to a MutVar# so we can print out the contents
of the MutVar#. If instead we generate code that enters the HValue,
then we'll get a runtime panic, because the HValue really is a
MutVar#. The types are compatible though, so we can just generate an
assignment.
-}
cgCase (StgApp v []) bndr alt_type@(PrimAlt _) alts
| isUnliftedType (idType v) -- Note [Dodgy unsafeCoerce 1]
|| reps_compatible
= -- assignment suffices for unlifted types
do { dflags <- getDynFlags
; unless reps_compatible $
panic "cgCase: reps do not match, perhaps a dodgy unsafeCoerce?"
; v_info <- getCgIdInfo v
; emitAssign (CmmLocal (idToReg dflags (NonVoid bndr)))
(idInfoToAmode v_info)
; bindArgsToRegs [NonVoid bndr]
; cgAlts (NoGcInAlts,AssignedDirectly) (NonVoid bndr) alt_type alts }
where
reps_compatible = idPrimRep v == idPrimRep bndr
{- Note [Dodgy unsafeCoerce 2, #3132]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In all other cases of a lifted Id being cast to an unlifted type, the
Id should be bound to bottom, otherwise this is an unsafe use of
unsafeCoerce. We can generate code to enter the Id and assume that
it will never return. Hence, we emit the usual enter/return code, and
because bottom must be untagged, it will be entered. The Sequel is a
type-correct assignment, albeit bogus. The (dead) continuation loops;
it would be better to invoke some kind of panic function here.
-}
cgCase scrut@(StgApp v []) _ (PrimAlt _) _
= do { dflags <- getDynFlags
; mb_cc <- maybeSaveCostCentre True
; withSequel (AssignTo [idToReg dflags (NonVoid v)] False) (cgExpr scrut)
; restoreCurrentCostCentre mb_cc
; emitComment $ mkFastString "should be unreachable code"
; l <- newLabelC
; emitLabel l
; emit (mkBranch l) -- an infinite loop
; return AssignedDirectly
}
{- Note [Handle seq#]
~~~~~~~~~~~~~~~~~~~~~
case seq# a s of v
(# s', a' #) -> e
==>
case a of v
(# s', a' #) -> e
(taking advantage of the fact that the return convention for (# State#, a #)
is the same as the return convention for just 'a')
-}
cgCase (StgOpApp (StgPrimOp SeqOp) [StgVarArg a, _] _) bndr alt_type alts
= -- Note [Handle seq#]
-- Use the same return convention as vanilla 'a'.
cgCase (StgApp a []) bndr alt_type alts
cgCase scrut bndr alt_type alts
= -- the general case
do { dflags <- getDynFlags
; up_hp_usg <- getVirtHp -- Upstream heap usage
; let ret_bndrs = chooseReturnBndrs bndr alt_type alts
alt_regs = map (idToReg dflags) ret_bndrs
; simple_scrut <- isSimpleScrut scrut alt_type
; let do_gc | not simple_scrut = True
| isSingleton alts = False
| up_hp_usg > 0 = False
| otherwise = True
-- cf Note [Compiling case expressions]
gc_plan = if do_gc then GcInAlts alt_regs else NoGcInAlts
; mb_cc <- maybeSaveCostCentre simple_scrut
; let sequel = AssignTo alt_regs do_gc{- Note [scrut sequel] -}
; ret_kind <- withSequel sequel (cgExpr scrut)
; restoreCurrentCostCentre mb_cc
; _ <- bindArgsToRegs ret_bndrs
; cgAlts (gc_plan,ret_kind) (NonVoid bndr) alt_type alts
}
{-
Note [scrut sequel]
The job of the scrutinee is to assign its value(s) to alt_regs.
Additionally, if we plan to do a heap-check in the alternatives (see
Note [Compiling case expressions]), then we *must* retreat Hp to
recover any unused heap before passing control to the sequel. If we
don't do this, then any unused heap will become slop because the heap
check will reset the heap usage. Slop in the heap breaks LDV profiling
(+RTS -hb) which needs to do a linear sweep through the nursery.
Note [Inlining out-of-line primops and heap checks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If shouldInlinePrimOp returns True when called from StgCmmExpr for the
purpose of heap check placement, we *must* inline the primop later in
StgCmmPrim. If we don't things will go wrong.
-}
-----------------
maybeSaveCostCentre :: Bool -> FCode (Maybe LocalReg)
maybeSaveCostCentre simple_scrut
| simple_scrut = return Nothing
| otherwise = saveCurrentCostCentre
-----------------
isSimpleScrut :: StgExpr -> AltType -> FCode Bool
-- Simple scrutinee, does not block or allocate; hence safe to amalgamate
-- heap usage from alternatives into the stuff before the case
-- NB: if you get this wrong, and claim that the expression doesn't allocate
-- when it does, you'll deeply mess up allocation
isSimpleScrut (StgOpApp op args _) _ = isSimpleOp op args
isSimpleScrut (StgLit _) _ = return True -- case 1# of { 0# -> ..; ... }
isSimpleScrut (StgApp _ []) (PrimAlt _) = return True -- case x# of { 0# -> ..; ... }
isSimpleScrut _ _ = return False
isSimpleOp :: StgOp -> [StgArg] -> FCode Bool
-- True iff the op cannot block or allocate
isSimpleOp (StgFCallOp (CCall (CCallSpec _ _ safe)) _) _ = return $! not (playSafe safe)
isSimpleOp (StgPrimOp op) stg_args = do
arg_exprs <- getNonVoidArgAmodes stg_args
dflags <- getDynFlags
-- See Note [Inlining out-of-line primops and heap checks]
return $! isJust $ shouldInlinePrimOp dflags op arg_exprs
isSimpleOp (StgPrimCallOp _) _ = return False
-----------------
chooseReturnBndrs :: Id -> AltType -> [StgAlt] -> [NonVoid Id]
-- These are the binders of a case that are assigned
-- by the evaluation of the scrutinee
-- Only non-void ones come back
chooseReturnBndrs bndr (PrimAlt _) _alts
= nonVoidIds [bndr]
chooseReturnBndrs _bndr (UbxTupAlt _) [(_, ids, _, _)]
= nonVoidIds ids -- 'bndr' is not assigned!
chooseReturnBndrs bndr (AlgAlt _) _alts
= nonVoidIds [bndr] -- Only 'bndr' is assigned
chooseReturnBndrs bndr PolyAlt _alts
= nonVoidIds [bndr] -- Only 'bndr' is assigned
chooseReturnBndrs _ _ _ = panic "chooseReturnBndrs"
-- UbxTupALt has only one alternative
-------------------------------------
cgAlts :: (GcPlan,ReturnKind) -> NonVoid Id -> AltType -> [StgAlt]
-> FCode ReturnKind
-- At this point the result of the case are in the binders
cgAlts gc_plan _bndr PolyAlt [(_, _, _, rhs)]
= maybeAltHeapCheck gc_plan (cgExpr rhs)
cgAlts gc_plan _bndr (UbxTupAlt _) [(_, _, _, rhs)]
= maybeAltHeapCheck gc_plan (cgExpr rhs)
-- Here bndrs are *already* in scope, so don't rebind them
cgAlts gc_plan bndr (PrimAlt _) alts
= do { dflags <- getDynFlags
; tagged_cmms <- cgAltRhss gc_plan bndr alts
; let bndr_reg = CmmLocal (idToReg dflags bndr)
(DEFAULT,deflt) = head tagged_cmms
-- PrimAlts always have a DEFAULT case
-- and it always comes first
tagged_cmms' = [(lit,code)
| (LitAlt lit, code) <- tagged_cmms]
; emitCmmLitSwitch (CmmReg bndr_reg) tagged_cmms' deflt
; return AssignedDirectly }
cgAlts gc_plan bndr (AlgAlt tycon) alts
= do { dflags <- getDynFlags
; (mb_deflt, branches) <- cgAlgAltRhss gc_plan bndr alts
; let fam_sz = tyConFamilySize tycon
bndr_reg = CmmLocal (idToReg dflags bndr)
-- Is the constructor tag in the node reg?
; if isSmallFamily dflags fam_sz
then do
let -- Yes, bndr_reg has constr. tag in ls bits
tag_expr = cmmConstrTag1 dflags (CmmReg bndr_reg)
branches' = [(tag+1,branch) | (tag,branch) <- branches]
emitSwitch tag_expr branches' mb_deflt 1 fam_sz
return AssignedDirectly
else -- No, get tag from info table
do dflags <- getDynFlags
let -- Note that ptr _always_ has tag 1
-- when the family size is big enough
untagged_ptr = cmmRegOffB bndr_reg (-1)
tag_expr = getConstrTag dflags (untagged_ptr)
emitSwitch tag_expr branches mb_deflt 0 (fam_sz - 1)
return AssignedDirectly }
cgAlts _ _ _ _ = panic "cgAlts"
-- UbxTupAlt and PolyAlt have only one alternative
-- Note [alg-alt heap check]
--
-- In an algebraic case with more than one alternative, we will have
-- code like
--
-- L0:
-- x = R1
-- goto L1
-- L1:
-- if (x & 7 >= 2) then goto L2 else goto L3
-- L2:
-- Hp = Hp + 16
-- if (Hp > HpLim) then goto L4
-- ...
-- L4:
-- call gc() returns to L5
-- L5:
-- x = R1
-- goto L1
-------------------
cgAlgAltRhss :: (GcPlan,ReturnKind) -> NonVoid Id -> [StgAlt]
-> FCode ( Maybe CmmAGraphScoped
, [(ConTagZ, CmmAGraphScoped)] )
cgAlgAltRhss gc_plan bndr alts
= do { tagged_cmms <- cgAltRhss gc_plan bndr alts
; let { mb_deflt = case tagged_cmms of
((DEFAULT,rhs) : _) -> Just rhs
_other -> Nothing
-- DEFAULT is always first, if present
; branches = [ (dataConTagZ con, cmm)
| (DataAlt con, cmm) <- tagged_cmms ]
}
; return (mb_deflt, branches)
}
-------------------
cgAltRhss :: (GcPlan,ReturnKind) -> NonVoid Id -> [StgAlt]
-> FCode [(AltCon, CmmAGraphScoped)]
cgAltRhss gc_plan bndr alts = do
dflags <- getDynFlags
let
base_reg = idToReg dflags bndr
cg_alt :: StgAlt -> FCode (AltCon, CmmAGraphScoped)
cg_alt (con, bndrs, _uses, rhs)
= getCodeScoped $
maybeAltHeapCheck gc_plan $
do { _ <- bindConArgs con base_reg bndrs
; _ <- cgExpr rhs
; return con }
forkAlts (map cg_alt alts)
maybeAltHeapCheck :: (GcPlan,ReturnKind) -> FCode a -> FCode a
maybeAltHeapCheck (NoGcInAlts,_) code = code
maybeAltHeapCheck (GcInAlts regs, AssignedDirectly) code =
altHeapCheck regs code
maybeAltHeapCheck (GcInAlts regs, ReturnedTo lret off) code =
altHeapCheckReturnsTo regs lret off code
-----------------------------------------------------------------------------
-- Tail calls
-----------------------------------------------------------------------------
cgConApp :: DataCon -> [StgArg] -> FCode ReturnKind
cgConApp con stg_args
| isUnboxedTupleCon con -- Unboxed tuple: assign and return
= do { arg_exprs <- getNonVoidArgAmodes stg_args
; tickyUnboxedTupleReturn (length arg_exprs)
; emitReturn arg_exprs }
| otherwise -- Boxed constructors; allocate and return
= ASSERT2( stg_args `lengthIs` dataConRepRepArity con, ppr con <+> ppr stg_args )
do { (idinfo, fcode_init) <- buildDynCon (dataConWorkId con) False
currentCCS con stg_args
-- The first "con" says that the name bound to this
-- closure is is "con", which is a bit of a fudge, but
-- it only affects profiling (hence the False)
; emit =<< fcode_init
; emitReturn [idInfoToAmode idinfo] }
cgIdApp :: Id -> [StgArg] -> FCode ReturnKind
cgIdApp fun_id [] | isVoidTy (idType fun_id) = emitReturn []
cgIdApp fun_id args = do
dflags <- getDynFlags
fun_info <- getCgIdInfo fun_id
self_loop_info <- getSelfLoop
let cg_fun_id = cg_id fun_info
-- NB: use (cg_id fun_info) instead of fun_id, because
-- the former may be externalised for -split-objs.
-- See Note [Externalise when splitting] in StgCmmMonad
fun_arg = StgVarArg cg_fun_id
fun_name = idName cg_fun_id
fun = idInfoToAmode fun_info
lf_info = cg_lf fun_info
n_args = length args
v_args = length $ filter (isVoidTy . stgArgType) args
node_points dflags = nodeMustPointToIt dflags lf_info
case getCallMethod dflags fun_name cg_fun_id lf_info n_args v_args (cg_loc fun_info) self_loop_info of
-- A value in WHNF, so we can just return it.
ReturnIt -> emitReturn [fun] -- ToDo: does ReturnIt guarantee tagged?
EnterIt -> ASSERT( null args ) -- Discarding arguments
emitEnter fun
SlowCall -> do -- A slow function call via the RTS apply routines
{ tickySlowCall lf_info args
; emitComment $ mkFastString "slowCall"
; slowCall fun args }
-- A direct function call (possibly with some left-over arguments)
DirectEntry lbl arity -> do
{ tickyDirectCall arity args
; if node_points dflags
then directCall NativeNodeCall lbl arity (fun_arg:args)
else directCall NativeDirectCall lbl arity args }
-- Let-no-escape call or self-recursive tail-call
JumpToIt blk_id lne_regs -> do
{ adjustHpBackwards -- always do this before a tail-call
; cmm_args <- getNonVoidArgAmodes args
; emitMultiAssign lne_regs cmm_args
; emit (mkBranch blk_id)
; return AssignedDirectly }
-- Note [Self-recursive tail calls]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Self-recursive tail calls can be optimized into a local jump in the same
-- way as let-no-escape bindings (see Note [What is a non-escaping let] in
-- stgSyn/CoreToStg.hs). Consider this:
--
-- foo.info:
-- a = R1 // calling convention
-- b = R2
-- goto L1
-- L1: ...
-- ...
-- ...
-- L2: R1 = x
-- R2 = y
-- call foo(R1,R2)
--
-- Instead of putting x and y into registers (or other locations required by the
-- calling convention) and performing a call we can put them into local
-- variables a and b and perform jump to L1:
--
-- foo.info:
-- a = R1
-- b = R2
-- goto L1
-- L1: ...
-- ...
-- ...
-- L2: a = x
-- b = y
-- goto L1
--
-- This can be done only when function is calling itself in a tail position
-- and only if the call passes number of parameters equal to function's arity.
-- Note that this cannot be performed if a function calls itself with a
-- continuation.
--
-- This in fact implements optimization known as "loopification". It was
-- described in "Low-level code optimizations in the Glasgow Haskell Compiler"
-- by Krzysztof Woś, though we use different approach. Krzysztof performed his
-- optimization at the Cmm level, whereas we perform ours during code generation
-- (Stg-to-Cmm pass) essentially making sure that optimized Cmm code is
-- generated in the first place.
--
-- Implementation is spread across a couple of places in the code:
--
-- * FCode monad stores additional information in its reader environment
-- (cgd_self_loop field). This information tells us which function can
-- tail call itself in an optimized way (it is the function currently
-- being compiled), what is the label of a loop header (L1 in example above)
-- and information about local registers in which we should arguments
-- before making a call (this would be a and b in example above).
--
-- * Whenever we are compiling a function, we set that information to reflect
-- the fact that function currently being compiled can be jumped to, instead
-- of called. This is done in closureCodyBody in StgCmmBind.
--
-- * We also have to emit a label to which we will be jumping. We make sure
-- that the label is placed after a stack check but before the heap
-- check. The reason is that making a recursive tail-call does not increase
-- the stack so we only need to check once. But it may grow the heap, so we
-- have to repeat the heap check in every self-call. This is done in
-- do_checks in StgCmmHeap.
--
-- * When we begin compilation of another closure we remove the additional
-- information from the environment. This is done by forkClosureBody
-- in StgCmmMonad. Other functions that duplicate the environment -
-- forkLneBody, forkAlts, codeOnly - duplicate that information. In other
-- words, we only need to clean the environment of the self-loop information
-- when compiling right hand side of a closure (binding).
--
-- * When compiling a call (cgIdApp) we use getCallMethod to decide what kind
-- of call will be generated. getCallMethod decides to generate a self
-- recursive tail call when (a) environment stores information about
-- possible self tail-call; (b) that tail call is to a function currently
-- being compiled; (c) number of passed non-void arguments is equal to
-- function's arity. (d) loopification is turned on via -floopification
-- command-line option.
--
-- * Command line option to turn loopification on and off is implemented in
-- DynFlags.
--
--
-- Note [Void arguments in self-recursive tail calls]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- State# tokens can get in the way of the loopification optimization as seen in
-- #11372. Consider this:
--
-- foo :: [a]
-- -> (a -> State# s -> (# State s, Bool #))
-- -> State# s
-- -> (# State# s, Maybe a #)
-- foo [] f s = (# s, Nothing #)
-- foo (x:xs) f s = case f x s of
-- (# s', b #) -> case b of
-- True -> (# s', Just x #)
-- False -> foo xs f s'
--
-- We would like to compile the call to foo as a local jump instead of a call
-- (see Note [Self-recursive tail calls]). However, the generated function has
-- an arity of 2 while we apply it to 3 arguments, one of them being of void
-- type. Thus, we mustn't count arguments of void type when checking whether
-- we can turn a call into a self-recursive jump.
--
emitEnter :: CmmExpr -> FCode ReturnKind
emitEnter fun = do
{ dflags <- getDynFlags
; adjustHpBackwards
; sequel <- getSequel
; updfr_off <- getUpdFrameOff
; case sequel of
-- For a return, we have the option of generating a tag-test or
-- not. If the value is tagged, we can return directly, which
-- is quicker than entering the value. This is a code
-- size/speed trade-off: when optimising for speed rather than
-- size we could generate the tag test.
--
-- Right now, we do what the old codegen did, and omit the tag
-- test, just generating an enter.
Return _ -> do
{ let entry = entryCode dflags $ closureInfoPtr dflags $ CmmReg nodeReg
; emit $ mkJump dflags NativeNodeCall entry
[cmmUntag dflags fun] updfr_off
; return AssignedDirectly
}
-- The result will be scrutinised in the sequel. This is where
-- we generate a tag-test to avoid entering the closure if
-- possible.
--
-- The generated code will be something like this:
--
-- R1 = fun -- copyout
-- if (fun & 7 != 0) goto Lcall else goto Lret
-- Lcall:
-- call [fun] returns to Lret
-- Lret:
-- fun' = R1 -- copyin
-- ...
--
-- Note in particular that the label Lret is used as a
-- destination by both the tag-test and the call. This is
-- becase Lret will necessarily be a proc-point, and we want to
-- ensure that we generate only one proc-point for this
-- sequence.
--
-- Furthermore, we tell the caller that we generated a native
-- return continuation by returning (ReturnedTo Lret off), so
-- that the continuation can be reused by the heap-check failure
-- code in the enclosing case expression.
--
AssignTo res_regs _ -> do
{ lret <- newLabelC
; let (off, _, copyin) = copyInOflow dflags NativeReturn (Young lret) res_regs []
; lcall <- newLabelC
; updfr_off <- getUpdFrameOff
; let area = Young lret
; let (outArgs, regs, copyout) = copyOutOflow dflags NativeNodeCall Call area
[fun] updfr_off []
-- refer to fun via nodeReg after the copyout, to avoid having
-- both live simultaneously; this sometimes enables fun to be
-- inlined in the RHS of the R1 assignment.
; let entry = entryCode dflags (closureInfoPtr dflags (CmmReg nodeReg))
the_call = toCall entry (Just lret) updfr_off off outArgs regs
; tscope <- getTickScope
; emit $
copyout <*>
mkCbranch (cmmIsTagged dflags (CmmReg nodeReg))
lret lcall Nothing <*>
outOfLine lcall (the_call,tscope) <*>
mkLabel lret tscope <*>
copyin
; return (ReturnedTo lret off)
}
}
------------------------------------------------------------------------
-- Ticks
------------------------------------------------------------------------
-- | Generate Cmm code for a tick. Depending on the type of Tickish,
-- this will either generate actual Cmm instrumentation code, or
-- simply pass on the annotation as a @CmmTickish@.
cgTick :: Tickish Id -> FCode ()
cgTick tick
= do { dflags <- getDynFlags
; case tick of
ProfNote cc t p -> emitSetCCC cc t p
HpcTick m n -> emit (mkTickBox dflags m n)
SourceNote s n -> emitTick $ SourceNote s n
_other -> return () -- ignore
}
| nushio3/ghc | compiler/codeGen/StgCmmExpr.hs | bsd-3-clause | 34,111 | 8 | 20 | 9,128 | 4,847 | 2,563 | 2,284 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module CANOpen.Tower
( canopenTower
, CANOpenLEDs(..)
) where
import Ivory.Language
import Ivory.Language.Struct
import Ivory.Stdlib
import Ivory.Tower
import Ivory.Tower.HAL.Bus.CAN
import Ivory.Tower.HAL.Bus.Interface
import CANOpen.Ivory.Types
import CANOpen.Tower.LSS
import CANOpen.Tower.NMT
import CANOpen.Tower.NMT.Types
import CANOpen.Tower.LED
import CANOpen.Tower.SDO
import CANOpen.Tower.PDO
import CANOpen.Tower.Types
import CANOpen.Tower.Utils
import Ivory.Serialize
import CANOpen.Tower.Attr
import CANOpen.Tower.Interface.Base.Dict
canopenTower :: ChanOutput ('Struct "can_message")
-> AbortableTransmit ('Struct "can_message") ('Stored IBool)
-> CANOpenLEDs
-> ObjDict
-> Tower e ()
canopenTower res req leds objdictApp = do
canopenTowerDeps
(nid_update_in, nid_update_out) <- channel
(lss_in, lss_out) <- channel
(nmt_in, nmt_out) <- channel
(sdo_in, sdo_out) <- channel
(pdo_in, pdo_out) <- channel
ledState <- ledStatusTower leds
attrs@BaseAttrs{..} <- towerBaseAttrs initBaseAttrs
objdictInternal <- objDictTower attrs
objdictMerged <- objDictRouter objdictInternal objdictApp
(lss_nid_in, lss_nid_out) <- lssTower lss_out req attrs
(nmt_state_in, nmt_state_out) <- nmtTower nmt_out req nid_update_out attrs
sdoTower sdo_out req objdictMerged nid_update_out
pdoTower pdo_out req objdictMerged attrs
monitor "canopen_controller" $ do
received <- stateInit "canopen_received" (ival (0 :: Uint32))
lastmsg <- state "canopen_lastmsg"
stateLSS <- state "canopen_state_lss"
stateNMT <- state "canopen_state_nmt"
nodeId <- stateInit "canopen_nodeid" (ival (0 :: Uint8))
dbg <- state "dbg"
handler res "canmsg" $ do
lsse <- emitter lss_in 1
nmte <- emitter nmt_in 1
sdoe <- emitter sdo_in 1
pdoe <- emitter pdo_in 1
callback $ \msg -> do
received += 1
refCopy lastmsg msg
nid <- deref nodeId
cid <- getStdCANId msg
store dbg (cid)
isLSS <- deref stateLSS
isNMTmsg <- assign $ cid ==? 0x0
isLSSmsg <- assign $ cid ==? 0x7E5
isSDOmsg <- assign $ cid .& (safeCast sdoRequestBase) ==? (safeCast sdoRequestBase)
isPDOmsg <- assign $ cid .& (safeCast pdoBase) ==? (safeCast pdoBase)
when (isLSS .&& isLSSmsg) $ do
emit lsse msg
-- forward NMT messages only if node_id is configured
when (nid /=? 0 .&& isNMTmsg) $ do
emit nmte msg
when (nid /=? 0 .&& isSDOmsg) $ do
emit sdoe msg
when (nid /=? 0 .&& isPDOmsg) $ do
emit pdoe msg
-- node id updates from lss
handler lss_nid_out "canopen_lss_node_id" $ do
nidE <- emitter nid_update_in 1
ledStateE <- emitter ledState 2
callbackV $ \nid -> do
store nodeId nid
store stateLSS false
-- update nmt node
-- XXX: store (devinfo ~> nmt_state) nmtPreOperational
emitV nidE nid
emitV ledStateE ledstateOk
emitV ledStateE ledstatePreOperational
-- updates from nmt
handler nmt_state_out "canopen_nmt_state" $ do
nmtE <- emitter (nmt_state_in) 1
odIniE <- emitter (objdict_init objdictMerged) 1
ledStateE <- emitter ledState 1
callback $ \state -> do
refCopy stateNMT state
nstate <- deref stateNMT
cond_ [
nstate ==? nmtInitialising ==> do
emitV nmtE nmtPreOperational
, nstate ==? nmtResetting .|| nstate ==? nmtResettingComm ==> do
-- canopen node reset, reset dictionary state
-- transition to preOperational here
emitV odIniE true
emitV nmtE nmtPreOperational
emitV ledStateE ledstateLSS
, nstate ==? nmtStopped ==> do
emitV ledStateE ledstateStopped
, nstate ==? nmtPreOperational ==> do
emitV ledStateE ledstatePreOperational
, nstate ==? nmtOperational ==> do
emitV ledStateE ledstateOperational
]
handler systemInit "canopen_init" $ do
ledStateE <- emitter ledState 1
odIniE <- emitter (objdict_init objdictMerged) 1
callback $ const $ do
store stateLSS true
emitV ledStateE ledstateLSS
emitV odIniE true
canopenTowerDeps :: Tower e ()
canopenTowerDeps = do
towerDepends dictTypes
towerModule dictTypes
towerDepends serializeModule
towerModule serializeModule
mapM_ towerArtifact serializeArtifacts
| distrap/ivory-tower-canopen | src/CANOpen/Tower.hs | bsd-3-clause | 4,881 | 0 | 22 | 1,273 | 1,262 | 603 | 659 | 124 | 1 |
{-# LANGUAGE PackageImports #-}
import "RSSQueue" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, settingsPort)
import Control.Concurrent (forkIO, threadDelay)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings defaultSettings
{ settingsPort = port
} app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "yesod-devel/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
| ehamberg/rssqueue | devel.hs | bsd-3-clause | 682 | 0 | 10 | 121 | 179 | 97 | 82 | 22 | 2 |
module HaskellCraft.Parser where
import HaskellCraft.Block
import Text.Parsec
import Control.Applicative hiding ((<|>), many)
groupInts c = sepBy integer c
groupFloats c = sepBy float c
groupEvents = sepBy (groupInts (char ',')) (char '|')
plus = char '+' *> number
minus = (:) <$> char '-' <*> number
number = many1 digit
intPart = plus <|> minus <|> number
integer = rd <$> intPart
where rd = read :: String -> Int
float = fmap rd $ (++) <$> intPart <*> decimal
where rd = read :: String -> Double
decimal = option "" $ (:) <$> char '.' <*> number
parseCSVInts :: String -> Either ParseError [Int]
parseCSVInts input = parse (groupInts (char ',')) "(unknown)" input
parseBSVInts :: String -> Either ParseError [Int]
parseBSVInts input = parse (groupInts (char '|')) "(unknown)" input
parseCSVFloats :: String -> Either ParseError [Double]
parseCSVFloats input = parse (groupFloats (char ',')) "(unknown)" input
parseEvents :: String -> Either ParseError [[Int]]
parseEvents input = parse groupEvents "(unknown)" input
parseOneInt :: String -> Int
parseOneInt s = case parseCSVInts s of
Right [i] -> i
_ -> 0
parseBlockIntCSV :: String -> (Block, Int)
parseBlockIntCSV s = case parseCSVInts s of
Right [a,b] -> (toEnum a,b)
_ -> (Unknown 255,0)
parseThreeCSVInts :: String -> (Int, Int, Int)
parseThreeCSVInts s = case parseCSVInts s of
Right [a,b,c] -> (a,b,c)
_ -> (0,0,0)
parseThreeCSVFloats :: String -> (Double, Double, Double)
parseThreeCSVFloats s = case parseCSVFloats s of
Right [a,b,c] -> (a,b,c)
_ -> (0.0,0.0,0.0)
parseMaybeThreeCSVInts :: String -> Maybe (Int, Int, Int)
parseMaybeThreeCSVInts s = case parseCSVInts s of
Right [a,b,c] -> Just (a,b,c)
_ -> Nothing
parseMaybeThreeCSVFloats :: String -> Maybe (Double, Double, Double)
parseMaybeThreeCSVFloats s = case parseCSVFloats s of
Right [a,b,c] -> Just (a,b,c)
_ -> Nothing
parseIntList :: String -> [Int]
parseIntList s = case parseBSVInts s of
Right is -> is
_ -> []
parseEventList :: String -> [(Int,Int,Int,Int,Int)]
parseEventList s = case parseEvents s of
Right as -> toEvents as
_ -> []
where
toEvents :: [[Int]] -> [(Int,Int,Int,Int,Int)]
toEvents [] = []
toEvents es = (toEvent (head es)) ++ (toEvents (tail es))
toEvent :: [Int] -> [(Int,Int,Int,Int,Int)]
toEvent is = case is of
[a,b,c,d,e] -> [(a,b,c,d,e)]
_ -> []
| markgrebe/haskell-craft | HaskellCraft/Parser.hs | bsd-3-clause | 2,541 | 0 | 11 | 610 | 1,071 | 581 | 490 | 63 | 4 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
module EFA.Data.Record where
--import EFA.Utility(Caller,merror,(|>),ModuleName(..),FunctionName, genCaller)
import Prelude hiding ((++))
import qualified EFA.Data.Vector as DV
import qualified EFA.Data.OrdData as Ord
--import qualified Data.Vector as V
--import qualified Data.Vector.Unboxed as UV
import qualified Data.NonEmpty as NonEmpty
--import qualified EFA.Equation.Arithmetic as Arith((~/),(~*),(~+),(~-))
--newtype Record vec vec1 a = Record (NE (NE vec) (DataRow vec1 a))
type NE vec = NonEmpty.T vec
newtype Record vec vec1 a = Record (vec (DataRow vec1 a))
type SignalRecord vec vec1 a = Record (NE (NE vec)) vec1 a
type DeltaRecord vec vec1 a = Record (NE vec) vec1 a
type DataRow vec a = (Ord.Data a, vec a)
map :: (DV.Walker (NE (NE vec)), DV.Walker vec,
DV.Storage vec (Ord.Data a, vec1 a),
DV.Walker vec1, DV.Storage vec1 a)=>
(a -> a) -> Record vec vec1 a -> Record vec vec1 a
map f (Record rec) = Record $ DV.map (\(t,vec) -> (t,DV.map f vec)) rec
mapWithTime :: (DV.Walker (NE (NE vec)),DV.Walker vec,
DV.Storage vec (Ord.Data a, vec1 a),
DV.Walker vec1, DV.Storage vec1 a)=>
(Ord.Data a -> a -> a) -> Record vec vec1 a -> Record vec vec1 a
mapWithTime f (Record rec) = Record $ DV.map (\(t,vec) -> (t,DV.map (f t) vec)) rec
deltaMapWithTime :: (DV.Walker (NE (NE vec)),DV.Walker vec,
DV.Storage vec (Ord.Data a, vec1 a),
DV.Walker vec1, DV.Storage vec1 a)=>
(Ord.Data a -> a -> a) -> Record vec vec1 a -> Record vec vec1 a
deltaMapWithTime f (Record rec) = Record $ DV.map (\(t,vec) -> (t,DV.map (f t) vec)) rec
(++) ::
(DV.Storage (Record vec vec1) a,
DV.Singleton (Record vec vec1)) =>
Record vec vec1 a -> Record vec vec1 a -> Record vec vec1 a
(++) x y = DV.append x y
head :: (DV.Storage vec (Ord.Data a, vec1 a),
DV.Singleton (NE (NE vec)),DV.Singleton vec)=>
Record vec vec1 a -> DataRow vec1 a
head (Record rec) = DV.head rec
tail :: (DV.Storage vec (Ord.Data a, vec1 a),DV.Singleton vec,
DV.Singleton (NE (NE vec)))=>
Record vec vec1 a -> Record vec vec1 a
tail (Record rec) = Record $ DV.tail rec
deltaMap ::
(DV.Zipper vec,
DV.Storage vec (Ord.Data b, vec1 b),
DV.Storage vec (Ord.Data a, vec1 a),
DV.Singleton vec)=>
(DataRow vec1 a -> DataRow vec1 a -> DataRow vec1 b) ->
Record vec vec1 a -> Record vec vec1 b
deltaMap f (Record rec) =
Record (DV.deltaMap f rec)
data ZeroCross = Crossing | NoCrossing
{- In Arbeit -- hier weiter machen
-- |
zeroCrossing :: (Ord.Data a,a) -> (Ord.Data a,a) -> Ord.Data a
zeroCrossing (t,p) (t1,p1) = if t==t1 then t else t ~+ (p2~/m)
let m = (p2~-p1)~/(t2~-t1)
calcZeroCrossingTimes :: PowerPair a -> PowerPair a -> Ord.Data a
calcZeroCrossingTimes (pA,pB) (pA1,pB1) = (t ~+ t1) ~/Arith.fromRational 2
where t = calcZeroCrossingTime pA pA1
t1 = calcZeroCrossingTime pB pB1
calcZeroCrossingTimes :: PowerPair a -> PowerPair a -> Ord.Data a
calcZeroCrossingTimes (pA,pB) (pA1,pB1) = (t ~+ t1) ~/Arith.fromRational 2
where t = calcZeroCrossingTime pA pA1
t1 = calcZeroCrossingTime pB pB1
-}
| energyflowanalysis/efa-2.1 | src/EFA/Data/Record.hs | bsd-3-clause | 3,271 | 0 | 12 | 677 | 1,123 | 593 | 530 | 52 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE StandaloneDeriving #-}
-- | The Config type.
module Stack.Types.Config
(
-- * Main configuration types and classes
-- ** HasPlatform & HasStackRoot
HasPlatform(..)
,PlatformVariant(..)
-- ** Config & HasConfig
,Config(..)
,HasConfig(..)
,askLatestSnapshotUrl
,explicitSetupDeps
,getMinimalEnvOverride
-- ** BuildConfig & HasBuildConfig
,BuildConfigNoLocal(..)
,BuildConfigLocal(..)
,BuildConfig(..)
,stackYamlL
,projectRootL
,HasBuildConfigNoLocal(..)
,HasBuildConfig(..)
-- ** GHCVariant & HasGHCVariant
,GHCVariant(..)
,ghcVariantName
,ghcVariantSuffix
,parseGHCVariant
,HasGHCVariant(..)
,snapshotsDir
-- ** Constraint synonym for use with StackMini
,StackMiniM
-- ** EnvConfig & HasEnvConfig
,EnvConfig(..)
,EnvConfigNoLocal(..)
,EnvConfigLocal(..)
,HasEnvConfigNoLocal(..)
,HasEnvConfig(..)
,getCompilerPath
-- * Details
-- ** ApplyGhcOptions
,ApplyGhcOptions(..)
-- ** ConfigException
,ConfigException(..)
-- ** WhichSolverCmd
,WhichSolverCmd(..)
-- ** ConfigMonoid
,ConfigMonoid(..)
,configMonoidInstallGHCName
,configMonoidSystemGHCName
-- ** DumpLogs
,DumpLogs(..)
-- ** EnvSettings
,EnvSettings(..)
,minimalEnvSettings
-- ** GlobalOpts & GlobalOptsMonoid
,GlobalOpts(..)
,GlobalOptsMonoid(..)
,defaultLogLevel
-- ** LoadConfig
,LoadConfig(..)
-- ** PackageEntry & PackageLocation
,PackageEntry(..)
,TreatLikeExtraDep
,PackageLocation(..)
,RemotePackageType(..)
-- ** PackageIndex, IndexName & IndexLocation
-- Re-exports
,PackageIndex(..)
,IndexName(..)
,indexNameText
,IndexLocation(..)
-- Config fields
,configPackageIndex
,configPackageIndexOld
,configPackageIndexCache
,configPackageIndexGz
,configPackageIndexRoot
,configPackageIndexRepo
,configPackageTarball
-- ** Project & ProjectAndConfigMonoid
,Project(..)
,ProjectAndConfigMonoid(..)
-- ** PvpBounds
,PvpBounds(..)
,parsePvpBounds
-- ** ColorWhen
,ColorWhen(..)
,readColorWhen
-- ** SCM
,SCM(..)
-- ** CustomSnapshot
,CustomSnapshot(..)
-- ** GhcOptions
,GhcOptions(..)
,ghcOptionsFor
-- ** PackageFlags
,PackageFlags(..)
-- * Paths
,bindirSuffix
,configInstalledCache
,configMiniBuildPlanCache
,getProjectWorkDir
,docDirSuffix
,flagCacheLocal
,extraBinDirs
,hpcReportDir
,installationRootDeps
,installationRootLocal
,hoogleRoot
,hoogleDatabasePath
,packageDatabaseDeps
,packageDatabaseExtra
,packageDatabaseLocal
,platformOnlyRelDir
,platformGhcRelDir
,useShaPathOnWindows
,workDirL
-- * Command-specific types
-- ** Eval
,EvalOpts(..)
-- ** Exec
,ExecOpts(..)
,SpecialExecCmd(..)
,ExecOptsExtra(..)
-- ** Setup
,DownloadInfo(..)
,VersionedDownloadInfo(..)
,GHCDownloadInfo(..)
,SetupInfo(..)
,SetupInfoLocation(..)
-- ** Docker entrypoint
,DockerEntrypoint(..)
,DockerUser(..)
,module X
-- * Lens helpers
,wantedCompilerVersionL
,actualCompilerVersionL
,buildOptsL
,globalOptsL
,buildOptsInstallExesL
,buildOptsMonoidHaddockL
,buildOptsMonoidTestsL
,buildOptsMonoidBenchmarksL
,buildOptsMonoidInstallExesL
,buildOptsHaddockL
,globalOptsBuildOptsMonoidL
,packageIndicesL
,packageCachesL
,stackRootL
,configUrlsL
,cabalVersionL
,whichCompilerL
,buildConfigL
-- * Lens reexport
,view
,to
) where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Exception
import Control.Monad (liftM, mzero, join)
import Control.Monad.Catch (MonadThrow, MonadMask)
import Control.Monad.Logger (LogLevel(..), MonadLoggerIO)
import Control.Monad.Reader (MonadReader, MonadIO, liftIO)
import Control.Monad.Trans.Control
import Data.Aeson.Extended
(ToJSON, toJSON, FromJSON, parseJSON, withText, object,
(.=), (..:), (..:?), (..!=), Value(Bool, String),
withObjectWarnings, WarningParser, Object, jsonSubWarnings,
jsonSubWarningsT, jsonSubWarningsTT, WithJSONWarnings(..), noJSONWarnings)
import Data.Attoparsec.Args
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S8
import Data.Either (partitionEithers)
import Data.HashMap.Strict (HashMap)
import Data.IORef (IORef)
import Data.List (stripPrefix)
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NonEmpty
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Monoid.Extra
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Data.Typeable
import Data.Yaml (ParseException)
import qualified Data.Yaml as Yaml
import Distribution.System (Platform)
import qualified Distribution.Text
import Distribution.Version (anyVersion)
import GHC.Generics (Generic)
import Generics.Deriving.Monoid (memptydefault, mappenddefault)
import Lens.Micro (Lens', lens, _1, _2, to, Getting)
import Lens.Micro.Mtl (view)
import Network.HTTP.Client (parseRequest)
import Options.Applicative (ReadM)
import qualified Options.Applicative as OA
import qualified Options.Applicative.Types as OA
import Path
import qualified Paths_stack as Meta
import Stack.Types.BuildPlan (GitSHA1, MiniBuildPlan(..), SnapName, renderSnapName)
import Stack.Types.Compiler
import Stack.Types.CompilerBuild
import Stack.Types.Docker
import Stack.Types.FlagName
import Stack.Types.Image
import Stack.Types.Nix
import Stack.Types.PackageIdentifier
import Stack.Types.PackageIndex
import Stack.Types.PackageName
import Stack.Types.Resolver
import Stack.Types.TemplateName
import Stack.Types.Urls
import Stack.Types.Version
import System.FilePath (takeBaseName)
import System.PosixCompat.Types (UserID, GroupID, FileMode)
import System.Process.Read (EnvOverride, findExecutable)
-- Re-exports
import Stack.Types.Config.Build as X
#ifdef mingw32_HOST_OS
import qualified Crypto.Hash.SHA1 as SHA1
import qualified Data.ByteString.Base16 as B16
#endif
-- | The top-level Stackage configuration.
data Config =
Config {configStackRoot :: !(Path Abs Dir)
-- ^ ~/.stack more often than not
,configWorkDir :: !(Path Rel Dir)
-- ^ this allows to override .stack-work directory
,configUserConfigPath :: !(Path Abs File)
-- ^ Path to user configuration file (usually ~/.stack/config.yaml)
,configBuild :: !BuildOpts
-- ^ Build configuration
,configDocker :: !DockerOpts
-- ^ Docker configuration
,configNix :: !NixOpts
-- ^ Execution environment (e.g nix-shell) configuration
,configEnvOverride :: !(EnvSettings -> IO EnvOverride)
-- ^ Environment variables to be passed to external tools
,configLocalProgramsBase :: !(Path Abs Dir)
-- ^ Non-platform-specific path containing local installations
,configLocalPrograms :: !(Path Abs Dir)
-- ^ Path containing local installations (mainly GHC)
,configConnectionCount :: !Int
-- ^ How many concurrent connections are allowed when downloading
,configHideTHLoading :: !Bool
-- ^ Hide the Template Haskell "Loading package ..." messages from the
-- console
,configPlatform :: !Platform
-- ^ The platform we're building for, used in many directory names
,configPlatformVariant :: !PlatformVariant
-- ^ Variant of the platform, also used in directory names
,configGHCVariant0 :: !(Maybe GHCVariant)
-- ^ The variant of GHC requested by the user.
-- In most cases, use 'BuildConfig' or 'MiniConfig's version instead,
-- which will have an auto-detected default.
,configGHCBuild :: !(Maybe CompilerBuild)
-- ^ Override build of the compiler distribution (e.g. standard, gmp4, tinfo6)
,configUrls :: !Urls
-- ^ URLs for other files used by stack.
-- TODO: Better document
-- e.g. The latest snapshot file.
-- A build plan name (e.g. lts5.9.yaml) is appended when downloading
-- the build plan actually.
,configPackageIndices :: ![PackageIndex]
-- ^ Information on package indices. This is left biased, meaning that
-- packages in an earlier index will shadow those in a later index.
--
-- Warning: if you override packages in an index vs what's available
-- upstream, you may correct your compiled snapshots, as different
-- projects may have different definitions of what pkg-ver means! This
-- feature is primarily intended for adding local packages, not
-- overriding. Overriding is better accomplished by adding to your
-- list of packages.
--
-- Note that indices specified in a later config file will override
-- previous indices, /not/ extend them.
--
-- Using an assoc list instead of a Map to keep track of priority
,configSystemGHC :: !Bool
-- ^ Should we use the system-installed GHC (on the PATH) if
-- available? Can be overridden by command line options.
,configInstallGHC :: !Bool
-- ^ Should we automatically install GHC if missing or the wrong
-- version is available? Can be overridden by command line options.
,configSkipGHCCheck :: !Bool
-- ^ Don't bother checking the GHC version or architecture.
,configSkipMsys :: !Bool
-- ^ On Windows: don't use a sandboxed MSYS
,configCompilerCheck :: !VersionCheck
-- ^ Specifies which versions of the compiler are acceptable.
,configLocalBin :: !(Path Abs Dir)
-- ^ Directory we should install executables into
,configRequireStackVersion :: !VersionRange
-- ^ Require a version of stack within this range.
,configJobs :: !Int
-- ^ How many concurrent jobs to run, defaults to number of capabilities
,configOverrideGccPath :: !(Maybe (Path Abs File))
-- ^ Optional gcc override path
,configExtraIncludeDirs :: !(Set (Path Abs Dir))
-- ^ --extra-include-dirs arguments
,configExtraLibDirs :: !(Set (Path Abs Dir))
-- ^ --extra-lib-dirs arguments
,configConcurrentTests :: !Bool
-- ^ Run test suites concurrently
,configImage :: !ImageOpts
,configTemplateParams :: !(Map Text Text)
-- ^ Parameters for templates.
,configScmInit :: !(Maybe SCM)
-- ^ Initialize SCM (e.g. git) when creating new projects.
,configGhcOptions :: !GhcOptions
-- ^ Additional GHC options to apply to either all packages (Nothing)
-- or a specific package (Just).
,configSetupInfoLocations :: ![SetupInfoLocation]
-- ^ Additional SetupInfo (inline or remote) to use to find tools.
,configPvpBounds :: !PvpBounds
-- ^ How PVP upper bounds should be added to packages
,configModifyCodePage :: !Bool
-- ^ Force the code page to UTF-8 on Windows
,configExplicitSetupDeps :: !(Map (Maybe PackageName) Bool)
-- ^ See 'explicitSetupDeps'. 'Nothing' provides the default value.
,configRebuildGhcOptions :: !Bool
-- ^ Rebuild on GHC options changes
,configApplyGhcOptions :: !ApplyGhcOptions
-- ^ Which packages to ghc-options on the command line apply to?
,configAllowNewer :: !Bool
-- ^ Ignore version ranges in .cabal files. Funny naming chosen to
-- match cabal.
,configDefaultTemplate :: !(Maybe TemplateName)
-- ^ The default template to use when none is specified.
-- (If Nothing, the default default is used.)
,configAllowDifferentUser :: !Bool
-- ^ Allow users other than the stack root owner to use the stack
-- installation.
,configPackageCaches :: !(IORef (Maybe (Map PackageIdentifier (PackageIndex, PackageCache),
HashMap GitSHA1 (PackageIndex, OffsetSize))))
-- ^ In memory cache of hackage index.
,configDumpLogs :: !DumpLogs
-- ^ Dump logs of local non-dependencies when doing a build.
,configMaybeProject :: !(Maybe (Project, Path Abs File))
-- ^ 'Just' when a local project can be found, 'Nothing' when stack must
-- fall back on the implicit global project.
}
-- | Which packages do ghc-options on the command line apply to?
data ApplyGhcOptions = AGOTargets -- ^ all local targets
| AGOLocals -- ^ all local packages, even non-targets
| AGOEverything -- ^ every package
deriving (Show, Read, Eq, Ord, Enum, Bounded)
instance FromJSON ApplyGhcOptions where
parseJSON = withText "ApplyGhcOptions" $ \t ->
case t of
"targets" -> return AGOTargets
"locals" -> return AGOLocals
"everything" -> return AGOEverything
_ -> fail $ "Invalid ApplyGhcOptions: " ++ show t
-- | Which build log files to dump
data DumpLogs
= DumpNoLogs -- ^ don't dump any logfiles
| DumpWarningLogs -- ^ dump logfiles containing warnings
| DumpAllLogs -- ^ dump all logfiles
deriving (Show, Read, Eq, Ord, Enum, Bounded)
instance FromJSON DumpLogs where
parseJSON (Bool True) = return DumpAllLogs
parseJSON (Bool False) = return DumpNoLogs
parseJSON v =
withText
"DumpLogs"
(\t ->
if | t == "none" -> return DumpNoLogs
| t == "warning" -> return DumpWarningLogs
| t == "all" -> return DumpAllLogs
| otherwise -> fail ("Invalid DumpLogs: " ++ show t))
v
-- | Controls which version of the environment is used
data EnvSettings = EnvSettings
{ esIncludeLocals :: !Bool
-- ^ include local project bin directory, GHC_PACKAGE_PATH, etc
, esIncludeGhcPackagePath :: !Bool
-- ^ include the GHC_PACKAGE_PATH variable
, esStackExe :: !Bool
-- ^ set the STACK_EXE variable to the current executable name
, esLocaleUtf8 :: !Bool
-- ^ set the locale to C.UTF-8
}
deriving (Show, Eq, Ord)
data ExecOpts = ExecOpts
{ eoCmd :: !SpecialExecCmd
, eoArgs :: ![String]
, eoExtra :: !ExecOptsExtra
} deriving (Show)
data SpecialExecCmd
= ExecCmd String
| ExecGhc
| ExecRunGhc
deriving (Show, Eq)
data ExecOptsExtra
= ExecOptsPlain
| ExecOptsEmbellished
{ eoEnvSettings :: !EnvSettings
, eoPackages :: ![String]
}
deriving (Show)
data EvalOpts = EvalOpts
{ evalArg :: !String
, evalExtra :: !ExecOptsExtra
} deriving (Show)
-- | Parsed global command-line options.
data GlobalOpts = GlobalOpts
{ globalReExecVersion :: !(Maybe String) -- ^ Expected re-exec in container version
, globalDockerEntrypoint :: !(Maybe DockerEntrypoint)
-- ^ Data used when stack is acting as a Docker entrypoint (internal use only)
, globalLogLevel :: !LogLevel -- ^ Log level
, globalTimeInLog :: !Bool -- ^ Whether to include timings in logs.
, globalConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig'
, globalResolver :: !(Maybe AbstractResolver) -- ^ Resolver override
, globalCompiler :: !(Maybe CompilerVersion) -- ^ Compiler override
, globalTerminal :: !Bool -- ^ We're in a terminal?
, globalColorWhen :: !ColorWhen -- ^ When to use ansi terminal colors
, globalStackYaml :: !(Maybe FilePath) -- ^ Override project stack.yaml
} deriving (Show)
-- | Parsed global command-line options monoid.
data GlobalOptsMonoid = GlobalOptsMonoid
{ globalMonoidReExecVersion :: !(First String) -- ^ Expected re-exec in container version
, globalMonoidDockerEntrypoint :: !(First DockerEntrypoint)
-- ^ Data used when stack is acting as a Docker entrypoint (internal use only)
, globalMonoidLogLevel :: !(First LogLevel) -- ^ Log level
, globalMonoidTimeInLog :: !(First Bool) -- ^ Whether to include timings in logs.
, globalMonoidConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig'
, globalMonoidResolver :: !(First AbstractResolver) -- ^ Resolver override
, globalMonoidCompiler :: !(First CompilerVersion) -- ^ Compiler override
, globalMonoidTerminal :: !(First Bool) -- ^ We're in a terminal?
, globalMonoidColorWhen :: !(First ColorWhen) -- ^ When to use ansi colors
, globalMonoidStackYaml :: !(First FilePath) -- ^ Override project stack.yaml
} deriving (Show, Generic)
instance Monoid GlobalOptsMonoid where
mempty = memptydefault
mappend = mappenddefault
-- | Default logging level should be something useful but not crazy.
defaultLogLevel :: LogLevel
defaultLogLevel = LevelInfo
data ColorWhen = ColorNever | ColorAlways | ColorAuto
deriving (Show, Generic)
readColorWhen :: ReadM ColorWhen
readColorWhen = do
s <- OA.readerAsk
case s of
"never" -> return ColorNever
"always" -> return ColorAlways
"auto" -> return ColorAuto
_ -> OA.readerError "Expected values of color option are 'never', 'always', or 'auto'."
-- | A superset of 'Config' adding information on how to build code. The reason
-- for this breakdown is because we will need some of the information from
-- 'Config' in order to determine the values here.
--
-- These are the components which know nothing about local configuration.
data BuildConfigNoLocal = BuildConfigNoLocal
{ bcConfig :: !Config
, bcResolver :: !LoadedResolver
-- ^ How we resolve which dependencies to install given a set of
-- packages.
, bcWantedMiniBuildPlan :: !MiniBuildPlan
-- ^ Build plan wanted for this build
, bcGHCVariant :: !GHCVariant
-- ^ The variant of GHC used to select a GHC bindist.
}
-- | The local parts of 'BuildConfigNoLocal'
data BuildConfigLocal = BuildConfigLocal
{ bcPackageEntries :: ![PackageEntry]
-- ^ Local packages
, bcExtraDeps :: !(Map PackageName Version)
-- ^ Extra dependencies specified in configuration.
--
-- These dependencies will not be installed to a shared location, and
-- will override packages provided by the resolver.
, bcExtraPackageDBs :: ![Path Abs Dir]
-- ^ Extra package databases
, bcStackYaml :: !(Path Abs File)
-- ^ Location of the stack.yaml file.
--
-- Note: if the STACK_YAML environment variable is used, this may be
-- different from projectRootL </> "stack.yaml"
--
-- FIXME MSS 2016-12-08: is the above comment still true? projectRootL
-- is defined in terms of bcStackYaml
, bcFlags :: !PackageFlags
-- ^ Per-package flag overrides
, bcImplicitGlobal :: !Bool
-- ^ Are we loading from the implicit global stack.yaml? This is useful
-- for providing better error messages.
}
data BuildConfig = BuildConfig
{ bcNoLocal :: !BuildConfigNoLocal
, bcLocal :: !BuildConfigLocal
}
stackYamlL :: HasBuildConfig env => Lens' env (Path Abs File)
stackYamlL = buildConfigLocalL.lens bcStackYaml (\x y -> x { bcStackYaml = y })
-- | Directory containing the project's stack.yaml file
projectRootL :: HasBuildConfig env => Getting r env (Path Abs Dir)
projectRootL = stackYamlL.to parent
-- | Configuration after the environment has been setup.
data EnvConfigNoLocal = EnvConfigNoLocal
{envConfigBuildConfigNoLocal :: !BuildConfigNoLocal
,envConfigCabalVersion :: !Version
-- ^ This is the version of Cabal that stack will use to compile Setup.hs files
-- in the build process.
--
-- Note that this is not necessarily the same version as the one that stack
-- depends on as a library and which is displayed when running
-- @stack list-dependencies | grep Cabal@ in the stack project.
,envConfigCompilerVersion :: !CompilerVersion
-- ^ The actual version of the compiler to be used, as opposed to
-- 'wantedCompilerL', which provides the version specified by the
-- build plan.
,envConfigCompilerBuild :: !CompilerBuild
}
data EnvConfigLocal = EnvConfigLocal
{envConfigBuildConfigLocal :: !BuildConfigLocal
,envConfigPackagesRef :: !(IORef (Maybe (Map (Path Abs Dir) TreatLikeExtraDep)))
-- ^ Cache for 'getLocalPackages'.
}
data EnvConfig = EnvConfig
{ ecNoLocal :: !EnvConfigNoLocal
, ecLocal :: !EnvConfigLocal
}
-- | Value returned by 'Stack.Config.loadConfig'.
data LoadConfig m = LoadConfig
{ lcConfig :: !Config
-- ^ Top-level Stack configuration.
, lcLoadBuildConfig :: !(Maybe CompilerVersion -> m BuildConfig)
-- ^ Action to load the remaining 'BuildConfig'.
, lcProjectRoot :: !(Maybe (Path Abs Dir))
-- ^ The project root directory, if in a project.
}
data PackageEntry = PackageEntry
{ peExtraDepMaybe :: !(Maybe TreatLikeExtraDep)
, peLocation :: !PackageLocation
, peSubdirs :: ![FilePath]
}
deriving Show
-- | Perform defaulting of peExtraDepMaybe
peExtraDepDef :: PackageEntry -> TreatLikeExtraDep
peExtraDepDef = fromMaybe False . peExtraDepMaybe
-- | Should a package be treated just like an extra-dep?
--
-- 'True' means, it will only be built as a dependency
-- for others, and its test suite/benchmarks will not be run.
--
-- Useful modifying an upstream package, see:
-- https://github.com/commercialhaskell/stack/issues/219
-- https://github.com/commercialhaskell/stack/issues/386
type TreatLikeExtraDep = Bool
instance ToJSON PackageEntry where
toJSON pe | not (peExtraDepDef pe) && null (peSubdirs pe) =
toJSON $ peLocation pe
toJSON pe = object $
maybe id (\e -> (("extra-dep" .= e):)) (peExtraDepMaybe pe)
[ "location" .= peLocation pe
, "subdirs" .= peSubdirs pe
]
instance FromJSON (WithJSONWarnings PackageEntry) where
parseJSON (String t) = do
WithJSONWarnings loc _ <- parseJSON $ String t
return $ noJSONWarnings
PackageEntry
{ peExtraDepMaybe = Nothing
, peLocation = loc
, peSubdirs = []
}
parseJSON v = withObjectWarnings "PackageEntry" (\o -> PackageEntry
<$> o ..:? "extra-dep"
<*> jsonSubWarnings (o ..: "location")
<*> o ..:? "subdirs" ..!= []) v
data PackageLocation
= PLFilePath FilePath
-- ^ Note that we use @FilePath@ and not @Path@s. The goal is: first parse
-- the value raw, and then use @canonicalizePath@ and @parseAbsDir@.
| PLRemote Text RemotePackageType
-- ^ URL and further details
deriving Show
data RemotePackageType
= RPTHttp
| RPTGit Text -- ^ Commit
| RPTHg Text -- ^ Commit
deriving Show
instance ToJSON PackageLocation where
toJSON (PLFilePath fp) = toJSON fp
toJSON (PLRemote t RPTHttp) = toJSON t
toJSON (PLRemote x (RPTGit y)) = object [("git", toJSON x), ("commit", toJSON y)]
toJSON (PLRemote x (RPTHg y)) = object [( "hg", toJSON x), ("commit", toJSON y)]
instance FromJSON (WithJSONWarnings PackageLocation) where
parseJSON v
= (noJSONWarnings <$> withText "PackageLocation" (\t -> http t <|> file t) v)
<|> git v
<|> hg v
where
file t = pure $ PLFilePath $ T.unpack t
http t =
case parseRequest $ T.unpack t of
Left _ -> mzero
Right _ -> return $ PLRemote t RPTHttp
git = withObjectWarnings "PackageGitLocation" $ \o -> PLRemote
<$> o ..: "git"
<*> (RPTGit <$> o ..: "commit")
hg = withObjectWarnings "PackageHgLocation" $ \o -> PLRemote
<$> o ..: "hg"
<*> (RPTHg <$> o ..: "commit")
-- | A project is a collection of packages. We can have multiple stack.yaml
-- files, but only one of them may contain project information.
data Project = Project
{ projectUserMsg :: !(Maybe String)
-- ^ A warning message to display to the user when the auto generated
-- config may have issues.
, projectPackages :: ![PackageEntry]
-- ^ Components of the package list
, projectExtraDeps :: !(Map PackageName Version)
-- ^ Components of the package list referring to package/version combos,
-- see: https://github.com/fpco/stack/issues/41
, projectFlags :: !PackageFlags
-- ^ Per-package flag overrides
, projectResolver :: !Resolver
-- ^ How we resolve which dependencies to use
, projectCompiler :: !(Maybe CompilerVersion)
-- ^ When specified, overrides which compiler to use
, projectExtraPackageDBs :: ![FilePath]
}
deriving Show
instance ToJSON Project where
toJSON p = object $
maybe id (\cv -> (("compiler" .= cv) :)) (projectCompiler p) $
maybe id (\msg -> (("user-message" .= msg) :)) (projectUserMsg p)
[ "packages" .= projectPackages p
, "extra-deps" .= map fromTuple (Map.toList $ projectExtraDeps p)
, "flags" .= projectFlags p
, "resolver" .= projectResolver p
, "extra-package-dbs" .= projectExtraPackageDBs p
]
-- | Constraint synonym for constraints satisfied by a 'MiniConfig'
-- environment.
type StackMiniM r m =
( MonadReader r m, MonadIO m, MonadBaseControl IO m, MonadLoggerIO m, MonadMask m
)
-- An uninterpreted representation of configuration options.
-- Configurations may be "cascaded" using mappend (left-biased).
data ConfigMonoid =
ConfigMonoid
{ configMonoidStackRoot :: !(First (Path Abs Dir))
-- ^ See: 'configStackRoot'
, configMonoidWorkDir :: !(First (Path Rel Dir))
-- ^ See: 'configWorkDir'.
, configMonoidBuildOpts :: !BuildOptsMonoid
-- ^ build options.
, configMonoidDockerOpts :: !DockerOptsMonoid
-- ^ Docker options.
, configMonoidNixOpts :: !NixOptsMonoid
-- ^ Options for the execution environment (nix-shell or container)
, configMonoidConnectionCount :: !(First Int)
-- ^ See: 'configConnectionCount'
, configMonoidHideTHLoading :: !(First Bool)
-- ^ See: 'configHideTHLoading'
, configMonoidLatestSnapshotUrl :: !(First Text)
-- ^ Deprecated in favour of 'urlsMonoidLatestSnapshot'
, configMonoidUrls :: !UrlsMonoid
-- ^ See: 'configUrls
, configMonoidPackageIndices :: !(First [PackageIndex])
-- ^ See: 'configPackageIndices'
, configMonoidSystemGHC :: !(First Bool)
-- ^ See: 'configSystemGHC'
,configMonoidInstallGHC :: !(First Bool)
-- ^ See: 'configInstallGHC'
,configMonoidSkipGHCCheck :: !(First Bool)
-- ^ See: 'configSkipGHCCheck'
,configMonoidSkipMsys :: !(First Bool)
-- ^ See: 'configSkipMsys'
,configMonoidCompilerCheck :: !(First VersionCheck)
-- ^ See: 'configCompilerCheck'
,configMonoidRequireStackVersion :: !IntersectingVersionRange
-- ^ See: 'configRequireStackVersion'
,configMonoidArch :: !(First String)
-- ^ Used for overriding the platform
,configMonoidGHCVariant :: !(First GHCVariant)
-- ^ Used for overriding the platform
,configMonoidGHCBuild :: !(First CompilerBuild)
-- ^ Used for overriding the GHC build
,configMonoidJobs :: !(First Int)
-- ^ See: 'configJobs'
,configMonoidExtraIncludeDirs :: !(Set (Path Abs Dir))
-- ^ See: 'configExtraIncludeDirs'
,configMonoidExtraLibDirs :: !(Set (Path Abs Dir))
-- ^ See: 'configExtraLibDirs'
, configMonoidOverrideGccPath :: !(First (Path Abs File))
-- ^ Allow users to override the path to gcc
,configMonoidConcurrentTests :: !(First Bool)
-- ^ See: 'configConcurrentTests'
,configMonoidLocalBinPath :: !(First FilePath)
-- ^ Used to override the binary installation dir
,configMonoidImageOpts :: !ImageOptsMonoid
-- ^ Image creation options.
,configMonoidTemplateParameters :: !(Map Text Text)
-- ^ Template parameters.
,configMonoidScmInit :: !(First SCM)
-- ^ Initialize SCM (e.g. git init) when making new projects?
,configMonoidGhcOptions :: !GhcOptions
-- ^ See 'configGhcOptions'
,configMonoidExtraPath :: ![Path Abs Dir]
-- ^ Additional paths to search for executables in
,configMonoidSetupInfoLocations :: ![SetupInfoLocation]
-- ^ Additional setup info (inline or remote) to use for installing tools
,configMonoidLocalProgramsBase :: !(First (Path Abs Dir))
-- ^ Override the default local programs dir, where e.g. GHC is installed.
,configMonoidPvpBounds :: !(First PvpBounds)
-- ^ See 'configPvpBounds'
,configMonoidModifyCodePage :: !(First Bool)
-- ^ See 'configModifyCodePage'
,configMonoidExplicitSetupDeps :: !(Map (Maybe PackageName) Bool)
-- ^ See 'configExplicitSetupDeps'
,configMonoidRebuildGhcOptions :: !(First Bool)
-- ^ See 'configMonoidRebuildGhcOptions'
,configMonoidApplyGhcOptions :: !(First ApplyGhcOptions)
-- ^ See 'configApplyGhcOptions'
,configMonoidAllowNewer :: !(First Bool)
-- ^ See 'configMonoidAllowNewer'
,configMonoidDefaultTemplate :: !(First TemplateName)
-- ^ The default template to use when none is specified.
-- (If Nothing, the default default is used.)
, configMonoidAllowDifferentUser :: !(First Bool)
-- ^ Allow users other than the stack root owner to use the stack
-- installation.
, configMonoidDumpLogs :: !(First DumpLogs)
-- ^ See 'configDumpLogs'
}
deriving (Show, Generic)
instance Monoid ConfigMonoid where
mempty = memptydefault
mappend = mappenddefault
instance FromJSON (WithJSONWarnings ConfigMonoid) where
parseJSON = withObjectWarnings "ConfigMonoid" parseConfigMonoidJSON
-- | Parse a partial configuration. Used both to parse both a standalone config
-- file and a project file, so that a sub-parser is not required, which would interfere with
-- warnings for missing fields.
parseConfigMonoidJSON :: Object -> WarningParser ConfigMonoid
parseConfigMonoidJSON obj = do
-- Parsing 'stackRoot' from 'stackRoot'/config.yaml would be nonsensical
let configMonoidStackRoot = First Nothing
configMonoidWorkDir <- First <$> obj ..:? configMonoidWorkDirName
configMonoidBuildOpts <- jsonSubWarnings (obj ..:? configMonoidBuildOptsName ..!= mempty)
configMonoidDockerOpts <- jsonSubWarnings (obj ..:? configMonoidDockerOptsName ..!= mempty)
configMonoidNixOpts <- jsonSubWarnings (obj ..:? configMonoidNixOptsName ..!= mempty)
configMonoidConnectionCount <- First <$> obj ..:? configMonoidConnectionCountName
configMonoidHideTHLoading <- First <$> obj ..:? configMonoidHideTHLoadingName
configMonoidLatestSnapshotUrl <- First <$> obj ..:? configMonoidLatestSnapshotUrlName
configMonoidUrls <- jsonSubWarnings (obj ..:? configMonoidUrlsName ..!= mempty)
configMonoidPackageIndices <- First <$> jsonSubWarningsTT (obj ..:? configMonoidPackageIndicesName)
configMonoidSystemGHC <- First <$> obj ..:? configMonoidSystemGHCName
configMonoidInstallGHC <- First <$> obj ..:? configMonoidInstallGHCName
configMonoidSkipGHCCheck <- First <$> obj ..:? configMonoidSkipGHCCheckName
configMonoidSkipMsys <- First <$> obj ..:? configMonoidSkipMsysName
configMonoidRequireStackVersion <- IntersectingVersionRange . unVersionRangeJSON <$> (
obj ..:? configMonoidRequireStackVersionName
..!= VersionRangeJSON anyVersion)
configMonoidArch <- First <$> obj ..:? configMonoidArchName
configMonoidGHCVariant <- First <$> obj ..:? configMonoidGHCVariantName
configMonoidGHCBuild <- First <$> obj ..:? configMonoidGHCBuildName
configMonoidJobs <- First <$> obj ..:? configMonoidJobsName
configMonoidExtraIncludeDirs <- obj ..:? configMonoidExtraIncludeDirsName ..!= Set.empty
configMonoidExtraLibDirs <- obj ..:? configMonoidExtraLibDirsName ..!= Set.empty
configMonoidOverrideGccPath <- First <$> obj ..:? configMonoidOverrideGccPathName
configMonoidConcurrentTests <- First <$> obj ..:? configMonoidConcurrentTestsName
configMonoidLocalBinPath <- First <$> obj ..:? configMonoidLocalBinPathName
configMonoidImageOpts <- jsonSubWarnings (obj ..:? configMonoidImageOptsName ..!= mempty)
templates <- obj ..:? "templates"
(configMonoidScmInit,configMonoidTemplateParameters) <-
case templates of
Nothing -> return (First Nothing,M.empty)
Just tobj -> do
scmInit <- tobj ..:? configMonoidScmInitName
params <- tobj ..:? configMonoidTemplateParametersName
return (First scmInit,fromMaybe M.empty params)
configMonoidCompilerCheck <- First <$> obj ..:? configMonoidCompilerCheckName
configMonoidGhcOptions <- obj ..:? configMonoidGhcOptionsName ..!= mempty
configMonoidExtraPath <- obj ..:? configMonoidExtraPathName ..!= []
configMonoidSetupInfoLocations <-
maybeToList <$> jsonSubWarningsT (obj ..:? configMonoidSetupInfoLocationsName)
configMonoidLocalProgramsBase <- First <$> obj ..:? configMonoidLocalProgramsBaseName
configMonoidPvpBounds <- First <$> obj ..:? configMonoidPvpBoundsName
configMonoidModifyCodePage <- First <$> obj ..:? configMonoidModifyCodePageName
configMonoidExplicitSetupDeps <-
(obj ..:? configMonoidExplicitSetupDepsName ..!= mempty)
>>= fmap Map.fromList . mapM handleExplicitSetupDep . Map.toList
configMonoidRebuildGhcOptions <- First <$> obj ..:? configMonoidRebuildGhcOptionsName
configMonoidApplyGhcOptions <- First <$> obj ..:? configMonoidApplyGhcOptionsName
configMonoidAllowNewer <- First <$> obj ..:? configMonoidAllowNewerName
configMonoidDefaultTemplate <- First <$> obj ..:? configMonoidDefaultTemplateName
configMonoidAllowDifferentUser <- First <$> obj ..:? configMonoidAllowDifferentUserName
configMonoidDumpLogs <- First <$> obj ..:? configMonoidDumpLogsName
return ConfigMonoid {..}
where
handleExplicitSetupDep :: Monad m => (Text, Bool) -> m (Maybe PackageName, Bool)
handleExplicitSetupDep (name', b) = do
name <-
if name' == "*"
then return Nothing
else case parsePackageNameFromString $ T.unpack name' of
Left e -> fail $ show e
Right x -> return $ Just x
return (name, b)
configMonoidWorkDirName :: Text
configMonoidWorkDirName = "work-dir"
configMonoidBuildOptsName :: Text
configMonoidBuildOptsName = "build"
configMonoidDockerOptsName :: Text
configMonoidDockerOptsName = "docker"
configMonoidNixOptsName :: Text
configMonoidNixOptsName = "nix"
configMonoidConnectionCountName :: Text
configMonoidConnectionCountName = "connection-count"
configMonoidHideTHLoadingName :: Text
configMonoidHideTHLoadingName = "hide-th-loading"
configMonoidLatestSnapshotUrlName :: Text
configMonoidLatestSnapshotUrlName = "latest-snapshot-url"
configMonoidUrlsName :: Text
configMonoidUrlsName = "urls"
configMonoidPackageIndicesName :: Text
configMonoidPackageIndicesName = "package-indices"
configMonoidSystemGHCName :: Text
configMonoidSystemGHCName = "system-ghc"
configMonoidInstallGHCName :: Text
configMonoidInstallGHCName = "install-ghc"
configMonoidSkipGHCCheckName :: Text
configMonoidSkipGHCCheckName = "skip-ghc-check"
configMonoidSkipMsysName :: Text
configMonoidSkipMsysName = "skip-msys"
configMonoidRequireStackVersionName :: Text
configMonoidRequireStackVersionName = "require-stack-version"
configMonoidArchName :: Text
configMonoidArchName = "arch"
configMonoidGHCVariantName :: Text
configMonoidGHCVariantName = "ghc-variant"
configMonoidGHCBuildName :: Text
configMonoidGHCBuildName = "ghc-build"
configMonoidJobsName :: Text
configMonoidJobsName = "jobs"
configMonoidExtraIncludeDirsName :: Text
configMonoidExtraIncludeDirsName = "extra-include-dirs"
configMonoidExtraLibDirsName :: Text
configMonoidExtraLibDirsName = "extra-lib-dirs"
configMonoidOverrideGccPathName :: Text
configMonoidOverrideGccPathName = "with-gcc"
configMonoidConcurrentTestsName :: Text
configMonoidConcurrentTestsName = "concurrent-tests"
configMonoidLocalBinPathName :: Text
configMonoidLocalBinPathName = "local-bin-path"
configMonoidImageOptsName :: Text
configMonoidImageOptsName = "image"
configMonoidScmInitName :: Text
configMonoidScmInitName = "scm-init"
configMonoidTemplateParametersName :: Text
configMonoidTemplateParametersName = "params"
configMonoidCompilerCheckName :: Text
configMonoidCompilerCheckName = "compiler-check"
configMonoidGhcOptionsName :: Text
configMonoidGhcOptionsName = "ghc-options"
configMonoidExtraPathName :: Text
configMonoidExtraPathName = "extra-path"
configMonoidSetupInfoLocationsName :: Text
configMonoidSetupInfoLocationsName = "setup-info"
configMonoidLocalProgramsBaseName :: Text
configMonoidLocalProgramsBaseName = "local-programs-path"
configMonoidPvpBoundsName :: Text
configMonoidPvpBoundsName = "pvp-bounds"
configMonoidModifyCodePageName :: Text
configMonoidModifyCodePageName = "modify-code-page"
configMonoidExplicitSetupDepsName :: Text
configMonoidExplicitSetupDepsName = "explicit-setup-deps"
configMonoidRebuildGhcOptionsName :: Text
configMonoidRebuildGhcOptionsName = "rebuild-ghc-options"
configMonoidApplyGhcOptionsName :: Text
configMonoidApplyGhcOptionsName = "apply-ghc-options"
configMonoidAllowNewerName :: Text
configMonoidAllowNewerName = "allow-newer"
configMonoidDefaultTemplateName :: Text
configMonoidDefaultTemplateName = "default-template"
configMonoidAllowDifferentUserName :: Text
configMonoidAllowDifferentUserName = "allow-different-user"
configMonoidDumpLogsName :: Text
configMonoidDumpLogsName = "dump-logs"
data ConfigException
= ParseConfigFileException (Path Abs File) ParseException
| ParseCustomSnapshotException Text ParseException
| ParseResolverException Text
| NoProjectConfigFound (Path Abs Dir) (Maybe Text)
| UnexpectedArchiveContents [Path Abs Dir] [Path Abs File]
| UnableToExtractArchive Text (Path Abs File)
| BadStackVersionException VersionRange
| NoMatchingSnapshot WhichSolverCmd (NonEmpty SnapName)
| forall l. ResolverMismatch WhichSolverCmd (ResolverThat's l) String
| ResolverPartial WhichSolverCmd Resolver String
| NoSuchDirectory FilePath
| ParseGHCVariantException String
| BadStackRoot (Path Abs Dir)
| Won'tCreateStackRootInDirectoryOwnedByDifferentUser (Path Abs Dir) (Path Abs Dir) -- ^ @$STACK_ROOT@, parent dir
| UserDoesn'tOwnDirectory (Path Abs Dir)
| FailedToCloneRepo String
| ManualGHCVariantSettingsAreIncompatibleWithSystemGHC
| NixRequiresSystemGhc
deriving Typeable
instance Show ConfigException where
show (ParseConfigFileException configFile exception) = concat
[ "Could not parse '"
, toFilePath configFile
, "':\n"
, Yaml.prettyPrintParseException exception
, "\nSee http://docs.haskellstack.org/en/stable/yaml_configuration/"
]
show (ParseCustomSnapshotException url exception) = concat
[ "Could not parse '"
, T.unpack url
, "':\n"
, Yaml.prettyPrintParseException exception
-- FIXME: Link to docs about custom snapshots
-- , "\nSee http://docs.haskellstack.org/en/stable/yaml_configuration/"
]
show (ParseResolverException t) = concat
[ "Invalid resolver value: "
, T.unpack t
, ". Possible valid values include lts-2.12, nightly-YYYY-MM-DD, ghc-7.10.2, and ghcjs-0.1.0_ghc-7.10.2. "
, "See https://www.stackage.org/snapshots for a complete list."
]
show (NoProjectConfigFound dir mcmd) = concat
[ "Unable to find a stack.yaml file in the current directory ("
, toFilePath dir
, ") or its ancestors"
, case mcmd of
Nothing -> ""
Just cmd -> "\nRecommended action: stack " ++ T.unpack cmd
]
show (UnexpectedArchiveContents dirs files) = concat
[ "When unpacking an archive specified in your stack.yaml file, "
, "did not find expected contents. Expected: a single directory. Found: "
, show ( map (toFilePath . dirname) dirs
, map (toFilePath . filename) files
)
]
show (UnableToExtractArchive url file) = concat
[ "Archive extraction failed. We support tarballs and zip, couldn't handle the following URL, "
, T.unpack url, " downloaded to the file ", toFilePath $ filename file
]
show (BadStackVersionException requiredRange) = concat
[ "The version of stack you are using ("
, show (fromCabalVersion Meta.version)
, ") is outside the required\n"
,"version range specified in stack.yaml ("
, T.unpack (versionRangeText requiredRange)
, ")." ]
show (NoMatchingSnapshot whichCmd names) = concat
[ "None of the following snapshots provides a compiler matching "
, "your package(s):\n"
, unlines $ map (\name -> " - " <> T.unpack (renderSnapName name))
(NonEmpty.toList names)
, showOptions whichCmd Don'tSuggestSolver
]
show (ResolverMismatch whichCmd resolver errDesc) = concat
[ "Resolver '"
, T.unpack (resolverName resolver)
, "' does not have a matching compiler to build some or all of your "
, "package(s).\n"
, errDesc
, showOptions whichCmd Don'tSuggestSolver
]
show (ResolverPartial whichCmd resolver errDesc) = concat
[ "Resolver '"
, T.unpack (resolverName resolver)
, "' does not have all the packages to match your requirements.\n"
, unlines $ fmap (" " <>) (lines errDesc)
, showOptions whichCmd
(case whichCmd of
IsSolverCmd -> Don'tSuggestSolver
_ -> SuggestSolver)
]
show (NoSuchDirectory dir) =
"No directory could be located matching the supplied path: " ++ dir
show (ParseGHCVariantException v) =
"Invalid ghc-variant value: " ++ v
show (BadStackRoot stackRoot) = concat
[ "Invalid stack root: '"
, toFilePath stackRoot
, "'. Please provide a valid absolute path."
]
show (Won'tCreateStackRootInDirectoryOwnedByDifferentUser envStackRoot parentDir) = concat
[ "Preventing creation of stack root '"
, toFilePath envStackRoot
, "'. Parent directory '"
, toFilePath parentDir
, "' is owned by someone else."
]
show (UserDoesn'tOwnDirectory dir) = concat
[ "You are not the owner of '"
, toFilePath dir
, "'. Aborting to protect file permissions."
, "\nRetry with '--"
, T.unpack configMonoidAllowDifferentUserName
, "' to disable this precaution."
]
show (FailedToCloneRepo commandName) = concat
[ "Failed to use "
, commandName
, " to clone the repo. Please ensure that "
, commandName
, " is installed and available to stack on your PATH environment variable."
]
show ManualGHCVariantSettingsAreIncompatibleWithSystemGHC = T.unpack $ T.concat
[ "stack can only control the "
, configMonoidGHCVariantName
, " of its own GHC installations. Please use '--no-"
, configMonoidSystemGHCName
, "'."
]
show NixRequiresSystemGhc = T.unpack $ T.concat
[ "stack's Nix integration is incompatible with '--no-system-ghc'. "
, "Please use '--"
, configMonoidSystemGHCName
, "' or disable the Nix integration."
]
instance Exception ConfigException
showOptions :: WhichSolverCmd -> SuggestSolver -> String
showOptions whichCmd suggestSolver = unlines $ "\nThis may be resolved by:" : options
where
options =
(case suggestSolver of
SuggestSolver -> [useSolver]
Don'tSuggestSolver -> []) ++
(case whichCmd of
IsSolverCmd -> [useResolver]
IsInitCmd -> both
IsNewCmd -> both)
both = [omitPackages, useResolver]
useSolver = " - Using '--solver' to ask cabal-install to generate extra-deps, atop the chosen snapshot."
omitPackages = " - Using '--omit-packages to exclude mismatching package(s)."
useResolver = " - Using '--resolver' to specify a matching snapshot/resolver"
data WhichSolverCmd = IsInitCmd | IsSolverCmd | IsNewCmd
data SuggestSolver = SuggestSolver | Don'tSuggestSolver
-- | Get the URL to request the information on the latest snapshots
askLatestSnapshotUrl :: (MonadReader env m, HasConfig env) => m Text
askLatestSnapshotUrl = view $ configL.to configUrls.to urlsLatestSnapshot
-- | Root for a specific package index
configPackageIndexRoot :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs Dir)
configPackageIndexRoot (IndexName name) = do
root <- view stackRootL
dir <- parseRelDir $ S8.unpack name
return (root </> $(mkRelDir "indices") </> dir)
-- | Git repo directory for a specific package index, returns 'Nothing' if not
-- a Git repo
configPackageIndexRepo :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Maybe (Path Abs Dir))
configPackageIndexRepo name = do
indices <- view packageIndicesL
case filter (\p -> indexName p == name) indices of
[index] -> do
let murl =
case simplifyIndexLocation $ indexLocation index of
SILGit x -> Just x
SILHttp _ _ -> Nothing
case murl of
Nothing -> return Nothing
Just url -> do
sDir <- configPackageIndexRoot name
repoName <- parseRelDir $ takeBaseName $ T.unpack url
let suDir =
sDir </>
$(mkRelDir "git-update")
return $ Just $ suDir </> repoName
_ -> assert False $ return Nothing
-- | Location of the 01-index.cache file
configPackageIndexCache :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File)
configPackageIndexCache = liftM (</> $(mkRelFile "01-index.cache")) . configPackageIndexRoot
-- | Location of the 01-index.tar file
configPackageIndex :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File)
configPackageIndex = liftM (</> $(mkRelFile "01-index.tar")) . configPackageIndexRoot
-- | Location of the 00-index.tar file. This file is just a copy of
-- the 01-index.tar file, provided for tools which still look for the
-- 00-index.tar file.
configPackageIndexOld :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File)
configPackageIndexOld = liftM (</> $(mkRelFile "00-index.tar")) . configPackageIndexRoot
-- | Location of the 01-index.tar.gz file
configPackageIndexGz :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File)
configPackageIndexGz = liftM (</> $(mkRelFile "01-index.tar.gz")) . configPackageIndexRoot
-- | Location of a package tarball
configPackageTarball :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> PackageIdentifier -> m (Path Abs File)
configPackageTarball iname ident = do
root <- configPackageIndexRoot iname
name <- parseRelDir $ packageNameString $ packageIdentifierName ident
ver <- parseRelDir $ versionString $ packageIdentifierVersion ident
base <- parseRelFile $ packageIdentifierString ident ++ ".tar.gz"
return (root </> $(mkRelDir "packages") </> name </> ver </> base)
-- | @".stack-work"@
workDirL :: HasConfig env => Lens' env (Path Rel Dir)
workDirL = configL.lens configWorkDir (\x y -> x { configWorkDir = y })
-- | Per-project work dir
getProjectWorkDir :: (HasBuildConfig env, MonadReader env m) => m (Path Abs Dir)
getProjectWorkDir = do
root <- view projectRootL
workDir <- view workDirL
return (root </> workDir)
-- | File containing the installed cache, see "Stack.PackageDump"
configInstalledCache :: (HasBuildConfig env, MonadReader env m) => m (Path Abs File)
configInstalledCache = liftM (</> $(mkRelFile "installed-cache.bin")) getProjectWorkDir
-- | Relative directory for the platform identifier
platformOnlyRelDir
:: (MonadReader env m, HasPlatform env, MonadThrow m)
=> m (Path Rel Dir)
platformOnlyRelDir = do
platform <- view platformL
platformVariant <- view platformVariantL
parseRelDir (Distribution.Text.display platform ++ platformVariantSuffix platformVariant)
-- | Directory containing snapshots
snapshotsDir :: (MonadReader env m, HasEnvConfig env, MonadThrow m) => m (Path Abs Dir)
snapshotsDir = do
root <- view stackRootL
platform <- platformGhcRelDir
return $ root </> $(mkRelDir "snapshots") </> platform
-- | Installation root for dependencies
installationRootDeps :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
installationRootDeps = do
root <- view stackRootL
-- TODO: also useShaPathOnWindows here, once #1173 is resolved.
psc <- platformSnapAndCompilerRel
return $ root </> $(mkRelDir "snapshots") </> psc
-- | Installation root for locals
installationRootLocal :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
installationRootLocal = do
workDir <- getProjectWorkDir
psc <- useShaPathOnWindows =<< platformSnapAndCompilerRel
return $ workDir </> $(mkRelDir "install") </> psc
-- | Hoogle directory.
hoogleRoot :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
hoogleRoot = do
workDir <- getProjectWorkDir
psc <- useShaPathOnWindows =<< platformSnapAndCompilerRel
return $ workDir </> $(mkRelDir "hoogle") </> psc
-- | Get the hoogle database path.
hoogleDatabasePath :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs File)
hoogleDatabasePath = do
dir <- hoogleRoot
return (dir </> $(mkRelFile "database.hoo"))
-- | Path for platform followed by snapshot name followed by compiler
-- name.
platformSnapAndCompilerRel
:: (MonadReader env m, HasEnvConfig env, MonadThrow m)
=> m (Path Rel Dir)
platformSnapAndCompilerRel = do
resolver' <- view loadedResolverL
platform <- platformGhcRelDir
name <- parseRelDir $ T.unpack $ resolverDirName resolver'
ghc <- compilerVersionDir
useShaPathOnWindows (platform </> name </> ghc)
-- | Relative directory for the platform and GHC identifier
platformGhcRelDir
:: (MonadReader env m, HasEnvConfig env, MonadThrow m)
=> m (Path Rel Dir)
platformGhcRelDir = do
ec <- view envConfigNoLocalL
verOnly <- platformGhcVerOnlyRelDirStr
parseRelDir (mconcat [ verOnly
, compilerBuildSuffix (envConfigCompilerBuild ec)])
-- | Relative directory for the platform and GHC identifier without GHC bindist build
platformGhcVerOnlyRelDir
:: (MonadReader env m, HasPlatform env, HasGHCVariant env, MonadThrow m)
=> m (Path Rel Dir)
platformGhcVerOnlyRelDir =
parseRelDir =<< platformGhcVerOnlyRelDirStr
-- | Relative directory for the platform and GHC identifier without GHC bindist build
-- (before parsing into a Path)
platformGhcVerOnlyRelDirStr
:: (MonadReader env m, HasPlatform env, HasGHCVariant env)
=> m FilePath
platformGhcVerOnlyRelDirStr = do
platform <- view platformL
platformVariant <- view platformVariantL
ghcVariant <- view ghcVariantL
return $ mconcat [ Distribution.Text.display platform
, platformVariantSuffix platformVariant
, ghcVariantSuffix ghcVariant ]
-- | This is an attempt to shorten stack paths on Windows to decrease our
-- chances of hitting 260 symbol path limit. The idea is to calculate
-- SHA1 hash of the path used on other architectures, encode with base
-- 16 and take first 8 symbols of it.
useShaPathOnWindows :: MonadThrow m => Path Rel Dir -> m (Path Rel Dir)
useShaPathOnWindows =
#ifdef mingw32_HOST_OS
parseRelDir . S8.unpack . S8.take 8 . B16.encode . SHA1.hash . encodeUtf8 . T.pack . toFilePath
#else
return
#endif
compilerVersionDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Rel Dir)
compilerVersionDir = do
compilerVersion <- view actualCompilerVersionL
parseRelDir $ case compilerVersion of
GhcVersion version -> versionString version
GhcjsVersion {} -> compilerVersionString compilerVersion
-- | Package database for installing dependencies into
packageDatabaseDeps :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
packageDatabaseDeps = do
root <- installationRootDeps
return $ root </> $(mkRelDir "pkgdb")
-- | Package database for installing local packages into
packageDatabaseLocal :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
packageDatabaseLocal = do
root <- installationRootLocal
return $ root </> $(mkRelDir "pkgdb")
-- | Extra package databases
packageDatabaseExtra :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m [Path Abs Dir]
packageDatabaseExtra = do
bc <- view buildConfigLocalL
return $ bcExtraPackageDBs bc
-- | Directory for holding flag cache information
flagCacheLocal :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
flagCacheLocal = do
root <- installationRootLocal
return $ root </> $(mkRelDir "flag-cache")
-- | Where to store mini build plan caches
configMiniBuildPlanCache :: (MonadThrow m, MonadReader env m, HasConfig env, HasGHCVariant env)
=> SnapName
-> m (Path Abs File)
configMiniBuildPlanCache name = do
root <- view stackRootL
platform <- platformGhcVerOnlyRelDir
file <- parseRelFile $ T.unpack (renderSnapName name) ++ ".cache"
-- Yes, cached plans differ based on platform
return (root </> $(mkRelDir "build-plan-cache") </> platform </> file)
-- | Suffix applied to an installation root to get the bin dir
bindirSuffix :: Path Rel Dir
bindirSuffix = $(mkRelDir "bin")
-- | Suffix applied to an installation root to get the doc dir
docDirSuffix :: Path Rel Dir
docDirSuffix = $(mkRelDir "doc")
-- | Where HPC reports and tix files get stored.
hpcReportDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> m (Path Abs Dir)
hpcReportDir = do
root <- installationRootLocal
return $ root </> $(mkRelDir "hpc")
-- | Get the extra bin directories (for the PATH). Puts more local first
--
-- Bool indicates whether or not to include the locals
extraBinDirs :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> m (Bool -> [Path Abs Dir])
extraBinDirs = do
deps <- installationRootDeps
local <- installationRootLocal
return $ \locals -> if locals
then [local </> bindirSuffix, deps </> bindirSuffix]
else [deps </> bindirSuffix]
-- | Get the minimal environment override, useful for just calling external
-- processes like git or ghc
getMinimalEnvOverride :: (MonadReader env m, HasConfig env, MonadIO m) => m EnvOverride
getMinimalEnvOverride = do
config' <- view configL
liftIO $ configEnvOverride config' minimalEnvSettings
minimalEnvSettings :: EnvSettings
minimalEnvSettings =
EnvSettings
{ esIncludeLocals = False
, esIncludeGhcPackagePath = False
, esStackExe = False
, esLocaleUtf8 = False
}
-- | Get the path for the given compiler ignoring any local binaries.
--
-- https://github.com/commercialhaskell/stack/issues/1052
getCompilerPath
:: (MonadIO m, MonadThrow m, MonadReader env m, HasConfig env)
=> WhichCompiler
-> m (Path Abs File)
getCompilerPath wc = do
config' <- view configL
eoWithoutLocals <- liftIO $
configEnvOverride config' minimalEnvSettings { esLocaleUtf8 = True }
join (findExecutable eoWithoutLocals (compilerExeName wc))
data ProjectAndConfigMonoid
= ProjectAndConfigMonoid !Project !ConfigMonoid
instance FromJSON (WithJSONWarnings ProjectAndConfigMonoid) where
parseJSON = withObjectWarnings "ProjectAndConfigMonoid" $ \o -> do
dirs <- jsonSubWarningsTT (o ..:? "packages") ..!= [packageEntryCurrDir]
extraDeps' <- o ..:? "extra-deps" ..!= []
extraDeps <-
case partitionEithers $ goDeps extraDeps' of
([], x) -> return $ Map.fromList x
(errs, _) -> fail $ unlines errs
flags <- o ..:? "flags" ..!= mempty
resolver <- jsonSubWarnings (o ..: "resolver")
compiler <- o ..:? "compiler"
msg <- o ..:? "user-message"
config <- parseConfigMonoidJSON o
extraPackageDBs <- o ..:? "extra-package-dbs" ..!= []
let project = Project
{ projectUserMsg = msg
, projectPackages = dirs
, projectExtraDeps = extraDeps
, projectFlags = flags
, projectResolver = resolver
, projectCompiler = compiler
, projectExtraPackageDBs = extraPackageDBs
}
return $ ProjectAndConfigMonoid project config
where
goDeps =
map toSingle . Map.toList . Map.unionsWith Set.union . map toMap
where
toMap i = Map.singleton
(packageIdentifierName i)
(Set.singleton (packageIdentifierVersion i))
toSingle (k, s) =
case Set.toList s of
[x] -> Right (k, x)
xs -> Left $ concat
[ "Multiple versions for package "
, packageNameString k
, ": "
, unwords $ map versionString xs
]
-- | A PackageEntry for the current directory, used as a default
packageEntryCurrDir :: PackageEntry
packageEntryCurrDir = PackageEntry
{ peExtraDepMaybe = Nothing
, peLocation = PLFilePath "."
, peSubdirs = []
}
-- | A software control system.
data SCM = Git
deriving (Show)
instance FromJSON SCM where
parseJSON v = do
s <- parseJSON v
case s of
"git" -> return Git
_ -> fail ("Unknown or unsupported SCM: " <> s)
instance ToJSON SCM where
toJSON Git = toJSON ("git" :: Text)
-- | A variant of the platform, used to differentiate Docker builds from host
data PlatformVariant = PlatformVariantNone
| PlatformVariant String
-- | Render a platform variant to a String suffix.
platformVariantSuffix :: PlatformVariant -> String
platformVariantSuffix PlatformVariantNone = ""
platformVariantSuffix (PlatformVariant v) = "-" ++ v
-- | Specialized bariant of GHC (e.g. libgmp4 or integer-simple)
data GHCVariant
= GHCStandard -- ^ Standard bindist
| GHCIntegerSimple -- ^ Bindist that uses integer-simple
| GHCCustom String -- ^ Other bindists
deriving (Show)
instance FromJSON GHCVariant where
-- Strange structuring is to give consistent error messages
parseJSON =
withText
"GHCVariant"
(either (fail . show) return . parseGHCVariant . T.unpack)
-- | Render a GHC variant to a String.
ghcVariantName :: GHCVariant -> String
ghcVariantName GHCStandard = "standard"
ghcVariantName GHCIntegerSimple = "integersimple"
ghcVariantName (GHCCustom name) = "custom-" ++ name
-- | Render a GHC variant to a String suffix.
ghcVariantSuffix :: GHCVariant -> String
ghcVariantSuffix GHCStandard = ""
ghcVariantSuffix v = "-" ++ ghcVariantName v
-- | Parse GHC variant from a String.
parseGHCVariant :: (MonadThrow m) => String -> m GHCVariant
parseGHCVariant s =
case stripPrefix "custom-" s of
Just name -> return (GHCCustom name)
Nothing
| s == "" -> return GHCStandard
| s == "standard" -> return GHCStandard
| s == "integersimple" -> return GHCIntegerSimple
| otherwise -> return (GHCCustom s)
-- | Build of the compiler distribution (e.g. standard, gmp4, tinfo6)
-- | Information for a file to download.
data DownloadInfo = DownloadInfo
{ downloadInfoUrl :: Text
-- ^ URL or absolute file path
, downloadInfoContentLength :: Maybe Int
, downloadInfoSha1 :: Maybe ByteString
} deriving (Show)
instance FromJSON (WithJSONWarnings DownloadInfo) where
parseJSON = withObjectWarnings "DownloadInfo" parseDownloadInfoFromObject
-- | Parse JSON in existing object for 'DownloadInfo'
parseDownloadInfoFromObject :: Object -> WarningParser DownloadInfo
parseDownloadInfoFromObject o = do
url <- o ..: "url"
contentLength <- o ..:? "content-length"
sha1TextMay <- o ..:? "sha1"
return
DownloadInfo
{ downloadInfoUrl = url
, downloadInfoContentLength = contentLength
, downloadInfoSha1 = fmap encodeUtf8 sha1TextMay
}
data VersionedDownloadInfo = VersionedDownloadInfo
{ vdiVersion :: Version
, vdiDownloadInfo :: DownloadInfo
}
deriving Show
instance FromJSON (WithJSONWarnings VersionedDownloadInfo) where
parseJSON = withObjectWarnings "VersionedDownloadInfo" $ \o -> do
version <- o ..: "version"
downloadInfo <- parseDownloadInfoFromObject o
return VersionedDownloadInfo
{ vdiVersion = version
, vdiDownloadInfo = downloadInfo
}
data GHCDownloadInfo = GHCDownloadInfo
{ gdiConfigureOpts :: [Text]
, gdiConfigureEnv :: Map Text Text
, gdiDownloadInfo :: DownloadInfo
}
deriving Show
instance FromJSON (WithJSONWarnings GHCDownloadInfo) where
parseJSON = withObjectWarnings "GHCDownloadInfo" $ \o -> do
configureOpts <- o ..:? "configure-opts" ..!= mempty
configureEnv <- o ..:? "configure-env" ..!= mempty
downloadInfo <- parseDownloadInfoFromObject o
return GHCDownloadInfo
{ gdiConfigureOpts = configureOpts
, gdiConfigureEnv = configureEnv
, gdiDownloadInfo = downloadInfo
}
data SetupInfo = SetupInfo
{ siSevenzExe :: Maybe DownloadInfo
, siSevenzDll :: Maybe DownloadInfo
, siMsys2 :: Map Text VersionedDownloadInfo
, siGHCs :: Map Text (Map Version GHCDownloadInfo)
, siGHCJSs :: Map Text (Map CompilerVersion DownloadInfo)
, siStack :: Map Text (Map Version DownloadInfo)
}
deriving Show
instance FromJSON (WithJSONWarnings SetupInfo) where
parseJSON = withObjectWarnings "SetupInfo" $ \o -> do
siSevenzExe <- jsonSubWarningsT (o ..:? "sevenzexe-info")
siSevenzDll <- jsonSubWarningsT (o ..:? "sevenzdll-info")
siMsys2 <- jsonSubWarningsT (o ..:? "msys2" ..!= mempty)
siGHCs <- jsonSubWarningsTT (o ..:? "ghc" ..!= mempty)
siGHCJSs <- jsonSubWarningsTT (o ..:? "ghcjs" ..!= mempty)
siStack <- jsonSubWarningsTT (o ..:? "stack" ..!= mempty)
return SetupInfo {..}
-- | For @siGHCs@ and @siGHCJSs@ fields maps are deeply merged.
-- For all fields the values from the last @SetupInfo@ win.
instance Monoid SetupInfo where
mempty =
SetupInfo
{ siSevenzExe = Nothing
, siSevenzDll = Nothing
, siMsys2 = Map.empty
, siGHCs = Map.empty
, siGHCJSs = Map.empty
, siStack = Map.empty
}
mappend l r =
SetupInfo
{ siSevenzExe = siSevenzExe r <|> siSevenzExe l
, siSevenzDll = siSevenzDll r <|> siSevenzDll l
, siMsys2 = siMsys2 r <> siMsys2 l
, siGHCs = Map.unionWith (<>) (siGHCs r) (siGHCs l)
, siGHCJSs = Map.unionWith (<>) (siGHCJSs r) (siGHCJSs l)
, siStack = Map.unionWith (<>) (siStack l) (siStack r) }
-- | Remote or inline 'SetupInfo'
data SetupInfoLocation
= SetupInfoFileOrURL String
| SetupInfoInline SetupInfo
deriving (Show)
instance FromJSON (WithJSONWarnings SetupInfoLocation) where
parseJSON v =
(noJSONWarnings <$>
withText "SetupInfoFileOrURL" (pure . SetupInfoFileOrURL . T.unpack) v) <|>
inline
where
inline = do
WithJSONWarnings si w <- parseJSON v
return $ WithJSONWarnings (SetupInfoInline si) w
-- | How PVP bounds should be added to .cabal files
data PvpBounds
= PvpBoundsNone
| PvpBoundsUpper
| PvpBoundsLower
| PvpBoundsBoth
deriving (Show, Read, Eq, Typeable, Ord, Enum, Bounded)
pvpBoundsText :: PvpBounds -> Text
pvpBoundsText PvpBoundsNone = "none"
pvpBoundsText PvpBoundsUpper = "upper"
pvpBoundsText PvpBoundsLower = "lower"
pvpBoundsText PvpBoundsBoth = "both"
parsePvpBounds :: Text -> Either String PvpBounds
parsePvpBounds t =
case Map.lookup t m of
Nothing -> Left $ "Invalid PVP bounds: " ++ T.unpack t
Just x -> Right x
where
m = Map.fromList $ map (pvpBoundsText &&& id) [minBound..maxBound]
instance ToJSON PvpBounds where
toJSON = toJSON . pvpBoundsText
instance FromJSON PvpBounds where
parseJSON = withText "PvpBounds" (either fail return . parsePvpBounds)
-- | Provide an explicit list of package dependencies when running a custom Setup.hs
explicitSetupDeps :: (MonadReader env m, HasConfig env) => PackageName -> m Bool
explicitSetupDeps name = do
m <- view $ configL.to configExplicitSetupDeps
return $
-- Yes there are far cleverer ways to write this. I honestly consider
-- the explicit pattern matching much easier to parse at a glance.
case Map.lookup (Just name) m of
Just b -> b
Nothing ->
case Map.lookup Nothing m of
Just b -> b
Nothing -> False -- default value
-- | Data passed into Docker container for the Docker entrypoint's use
data DockerEntrypoint = DockerEntrypoint
{ deUser :: !(Maybe DockerUser)
-- ^ UID/GID/etc of host user, if we wish to perform UID/GID switch in container
} deriving (Read,Show)
-- | Docker host user info
data DockerUser = DockerUser
{ duUid :: UserID -- ^ uid
, duGid :: GroupID -- ^ gid
, duGroups :: [GroupID] -- ^ Supplemantal groups
, duUmask :: FileMode -- ^ File creation mask }
} deriving (Read,Show)
-- TODO: See section of
-- https://github.com/commercialhaskell/stack/issues/1265 about
-- rationalizing the config. It would also be nice to share more code.
-- For now it's more convenient just to extend this type. However, it's
-- unpleasant that it has overlap with both 'Project' and 'Config'.
data CustomSnapshot = CustomSnapshot
{ csCompilerVersion :: !(Maybe CompilerVersion)
, csPackages :: !(Set PackageIdentifier)
, csDropPackages :: !(Set PackageName)
, csFlags :: !PackageFlags
, csGhcOptions :: !GhcOptions
}
instance FromJSON (WithJSONWarnings (CustomSnapshot, Maybe Resolver)) where
parseJSON = withObjectWarnings "CustomSnapshot" $ \o -> (,)
<$> (CustomSnapshot
<$> o ..:? "compiler"
<*> o ..:? "packages" ..!= mempty
<*> o ..:? "drop-packages" ..!= mempty
<*> o ..:? "flags" ..!= mempty
<*> o ..:? configMonoidGhcOptionsName ..!= mempty)
<*> jsonSubWarningsT (o ..:? "resolver")
newtype GhcOptions = GhcOptions
{ unGhcOptions :: Map (Maybe PackageName) [Text] }
deriving Show
instance FromJSON GhcOptions where
parseJSON val = do
ghcOptions <- parseJSON val
fmap (GhcOptions . Map.fromList) $ mapM handleGhcOptions $ Map.toList ghcOptions
where
handleGhcOptions :: Monad m => (Text, Text) -> m (Maybe PackageName, [Text])
handleGhcOptions (name', vals') = do
name <-
if name' == "*"
then return Nothing
else case parsePackageNameFromString $ T.unpack name' of
Left e -> fail $ show e
Right x -> return $ Just x
case parseArgs Escaping vals' of
Left e -> fail e
Right vals -> return (name, map T.pack vals)
instance Monoid GhcOptions where
mempty = GhcOptions mempty
-- FIXME: Should GhcOptions really monoid like this? Keeping it this
-- way preserves the behavior of the ConfigMonoid. However, this
-- means there isn't the ability to fully override snapshot
-- ghc-options in the same way there is for flags. Do we want to
-- change the semantics here? (particularly for extensible
-- snapshots)
mappend (GhcOptions l) (GhcOptions r) =
GhcOptions (Map.unionWith (++) l r)
ghcOptionsFor :: PackageName -> GhcOptions -> [Text]
ghcOptionsFor name (GhcOptions mp) =
M.findWithDefault [] Nothing mp ++
M.findWithDefault [] (Just name) mp
newtype PackageFlags = PackageFlags
{ unPackageFlags :: Map PackageName (Map FlagName Bool) }
deriving Show
instance FromJSON PackageFlags where
parseJSON val = PackageFlags <$> parseJSON val
instance ToJSON PackageFlags where
toJSON = toJSON . unPackageFlags
instance Monoid PackageFlags where
mempty = PackageFlags mempty
mappend (PackageFlags l) (PackageFlags r) =
PackageFlags (Map.unionWith Map.union l r)
-----------------------------------
-- Lens classes
-----------------------------------
-- | Class for environment values which have a Platform
class HasPlatform env where
platformL :: Lens' env Platform
default platformL :: HasConfig env => Lens' env Platform
platformL = configL.platformL
{-# INLINE platformL #-}
platformVariantL :: Lens' env PlatformVariant
default platformVariantL :: HasConfig env => Lens' env PlatformVariant
platformVariantL = configL.platformVariantL
{-# INLINE platformVariantL #-}
-- | Class for environment values which have a GHCVariant
class HasGHCVariant env where
ghcVariantL :: Lens' env GHCVariant
default ghcVariantL :: HasBuildConfigNoLocal env => Lens' env GHCVariant
ghcVariantL = buildConfigNoLocalL.ghcVariantL
{-# INLINE ghcVariantL #-}
-- | Class for environment values that can provide a 'Config'.
class HasPlatform env => HasConfig env where
configL :: Lens' env Config
default configL :: HasBuildConfigNoLocal env => Lens' env Config
configL = buildConfigNoLocalL.lens bcConfig (\x y -> x { bcConfig = y })
{-# INLINE configL #-}
class HasConfig env => HasBuildConfigNoLocal env where
buildConfigNoLocalL :: Lens' env BuildConfigNoLocal
default buildConfigNoLocalL :: HasEnvConfigNoLocal env => Lens' env BuildConfigNoLocal
buildConfigNoLocalL = envConfigNoLocalL.lens
envConfigBuildConfigNoLocal
(\x y -> x { envConfigBuildConfigNoLocal = y })
-- | Class for environment values that can provide a 'BuildConfig'.
class HasBuildConfigNoLocal env => HasBuildConfig env where
buildConfigLocalL :: Lens' env BuildConfigLocal
default buildConfigLocalL :: HasEnvConfig env => Lens' env BuildConfigLocal
buildConfigLocalL = envConfigLocalL.lens
envConfigBuildConfigLocal
(\x y -> x { envConfigBuildConfigLocal = y })
class (HasBuildConfigNoLocal env, HasGHCVariant env) => HasEnvConfigNoLocal env where
envConfigNoLocalL :: Lens' env EnvConfigNoLocal
class (HasBuildConfig env, HasEnvConfigNoLocal env) => HasEnvConfig env where
envConfigLocalL :: Lens' env EnvConfigLocal
envConfigLocalL = envConfigL.lens ecLocal (\x y -> x { ecLocal = y })
{-# INLINE envConfigLocalL #-}
envConfigL :: Lens' env EnvConfig
-----------------------------------
-- Lens instances
-----------------------------------
instance HasPlatform (Platform,PlatformVariant) where
platformL = _1
platformVariantL = _2
instance HasPlatform Config where
platformL = lens configPlatform (\x y -> x { configPlatform = y })
platformVariantL = lens configPlatformVariant (\x y -> x { configPlatformVariant = y })
instance HasPlatform BuildConfigNoLocal
instance HasPlatform BuildConfig
instance HasPlatform EnvConfigNoLocal
instance HasPlatform EnvConfig
instance HasGHCVariant GHCVariant where
ghcVariantL = id
{-# INLINE ghcVariantL #-}
instance HasGHCVariant BuildConfigNoLocal where
ghcVariantL = lens bcGHCVariant (\x y -> x { bcGHCVariant = y })
instance HasGHCVariant BuildConfig
instance HasGHCVariant EnvConfigNoLocal
instance HasGHCVariant EnvConfig
instance HasConfig Config where
configL = id
{-# INLINE configL #-}
instance HasConfig BuildConfigNoLocal where
configL = lens bcConfig (\x y -> x { bcConfig = y })
instance HasConfig BuildConfig
instance HasConfig EnvConfigNoLocal
instance HasConfig EnvConfig
instance HasBuildConfigNoLocal BuildConfigNoLocal where
buildConfigNoLocalL = id
{-# INLINE buildConfigNoLocalL #-}
instance HasBuildConfigNoLocal BuildConfig where
buildConfigNoLocalL = lens
bcNoLocal
(\x y -> x { bcNoLocal = y })
instance HasBuildConfigNoLocal EnvConfigNoLocal
instance HasBuildConfigNoLocal EnvConfig
instance HasBuildConfig BuildConfig where
buildConfigLocalL = lens bcLocal (\x y -> x { bcLocal = y})
{-# INLINE buildConfigLocalL #-}
instance HasBuildConfig EnvConfig
instance HasEnvConfigNoLocal EnvConfigNoLocal where
envConfigNoLocalL = id
{-# INLINE envConfigNoLocalL #-}
instance HasEnvConfigNoLocal EnvConfig where
envConfigNoLocalL = lens ecNoLocal (\x y -> x { ecNoLocal = y })
{-# INLINE envConfigNoLocalL #-}
instance HasEnvConfig EnvConfig where
envConfigL = id
{-# INLINE envConfigL #-}
-----------------------------------
-- Helper lenses
-----------------------------------
stackRootL :: HasConfig s => Lens' s (Path Abs Dir)
stackRootL = configL.lens configStackRoot (\x y -> x { configStackRoot = y })
-- | The compiler specified by the @MiniBuildPlan@. This may be
-- different from the actual compiler used!
wantedCompilerVersionL :: HasBuildConfigNoLocal s => Lens' s CompilerVersion
wantedCompilerVersionL = miniBuildPlanL.lens
mbpCompilerVersion
(\x y -> x { mbpCompilerVersion = y })
-- | The version of the compiler which will actually be used. May be
-- different than that specified in the 'MiniBuildPlan' and returned
-- by 'wantedCompilerVersionL'.
actualCompilerVersionL :: HasEnvConfigNoLocal s => Lens' s CompilerVersion
actualCompilerVersionL = envConfigNoLocalL.lens
envConfigCompilerVersion
(\x y -> x { envConfigCompilerVersion = y })
loadedResolverL :: HasBuildConfigNoLocal s => Lens' s LoadedResolver
loadedResolverL = buildConfigNoLocalL.lens
bcResolver
(\x y -> x { bcResolver = y })
miniBuildPlanL :: HasBuildConfigNoLocal s => Lens' s MiniBuildPlan
miniBuildPlanL = buildConfigNoLocalL.lens
bcWantedMiniBuildPlan
(\x y -> x { bcWantedMiniBuildPlan = y })
packageIndicesL :: HasConfig s => Lens' s [PackageIndex]
packageIndicesL = configL.lens
configPackageIndices
(\x y -> x { configPackageIndices = y })
buildOptsL :: HasConfig s => Lens' s BuildOpts
buildOptsL = configL.lens
configBuild
(\x y -> x { configBuild = y })
buildOptsMonoidHaddockL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidHaddockL = lens (getFirst . buildMonoidHaddock)
(\buildMonoid t -> buildMonoid {buildMonoidHaddock = First t})
buildOptsMonoidTestsL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidTestsL = lens (getFirst . buildMonoidTests)
(\buildMonoid t -> buildMonoid {buildMonoidTests = First t})
buildOptsMonoidBenchmarksL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidBenchmarksL = lens (getFirst . buildMonoidBenchmarks)
(\buildMonoid t -> buildMonoid {buildMonoidBenchmarks = First t})
buildOptsMonoidInstallExesL :: Lens' BuildOptsMonoid (Maybe Bool)
buildOptsMonoidInstallExesL =
lens (getFirst . buildMonoidInstallExes)
(\buildMonoid t -> buildMonoid {buildMonoidInstallExes = First t})
buildOptsInstallExesL :: Lens' BuildOpts Bool
buildOptsInstallExesL =
lens boptsInstallExes
(\bopts t -> bopts {boptsInstallExes = t})
buildOptsHaddockL :: Lens' BuildOpts Bool
buildOptsHaddockL =
lens boptsHaddock
(\bopts t -> bopts {boptsHaddock = t})
globalOptsL :: Lens' GlobalOpts ConfigMonoid
globalOptsL = lens globalConfigMonoid (\x y -> x { globalConfigMonoid = y })
globalOptsBuildOptsMonoidL :: Lens' GlobalOpts BuildOptsMonoid
globalOptsBuildOptsMonoidL = globalOptsL.lens
configMonoidBuildOpts
(\x y -> x { configMonoidBuildOpts = y })
packageCachesL :: HasConfig env => Lens' env
(IORef (Maybe (Map PackageIdentifier (PackageIndex, PackageCache)
,HashMap GitSHA1 (PackageIndex, OffsetSize))))
packageCachesL = configL.lens configPackageCaches (\x y -> x { configPackageCaches = y })
configUrlsL :: HasConfig env => Lens' env Urls
configUrlsL = configL.lens configUrls (\x y -> x { configUrls = y })
cabalVersionL :: HasEnvConfigNoLocal env => Lens' env Version
cabalVersionL = envConfigNoLocalL.lens
envConfigCabalVersion
(\x y -> x { envConfigCabalVersion = y })
whichCompilerL :: Getting r CompilerVersion WhichCompiler
whichCompilerL = to whichCompiler
buildConfigL :: HasBuildConfig env => Getting r env BuildConfig
buildConfigL = to $ \env -> BuildConfig
(view buildConfigNoLocalL env)
(view buildConfigLocalL env)
| deech/stack | src/Stack/Types/Config.hs | bsd-3-clause | 78,951 | 0 | 24 | 18,447 | 15,172 | 8,238 | 6,934 | -1 | -1 |
import Test.Tasty
import Test_1_Parser
import Test_2_EvaluatingSimpleExpressions
import Test_3_EvaluatingComplexExpressions
import Test_4_WorkingWithVariables
import Test_5_AddingFunctionsToTheMix
import Test_6_WorkingWithLists
import Test_7_UsingTheLanguage
--
-- The following command runs all the tests for all of the parts:
--
-- stack test diy-lang-haskell:test-all
--
main :: IO ()
main = defaultMain
$ testGroup "\nDIY Lang: All Tests"
[ parsingTests
, evaluatingSimpleExpressionsTests
, evaluatingComplexExpressionsTests
, workingWithVariabesTests
, addingFunctionsToTheMixTests
, workingWithListsTests
, usingTheLanguageTests
]
| joelchelliah/diy-lang-haskell | test/Test_all.hs | bsd-3-clause | 780 | 0 | 7 | 207 | 77 | 48 | 29 | 18 | 1 |
{-# LANGUAGE FlexibleContexts, MultiParamTypeClasses, FlexibleInstances, OverloadedStrings, GADTs, ExistentialQuantification, TypeApplications, ScopedTypeVariables #-}
module QueryArrow.FFI.Service.Local where
import QueryArrow.Syntax.Term
import QueryArrow.DB.DB
import QueryArrow.QueryPlan
import QueryArrow.DB.ResultStream
import Prelude hiding (lookup)
import Data.Set (fromList, empty)
import Control.Monad.Trans.Resource (runResourceT)
import qualified Data.Map.Strict as Map
import Control.Monad.IO.Class (liftIO)
import System.Log.Logger (infoM)
import QueryArrow.FFI.Service
import QueryArrow.Config
import QueryArrow.DBMap
import QueryArrow.Syntax.Type
data Session = forall db. (IDatabaseUniformRowAndDBFormula MapResultRow FormulaT db) => Session db (ConnectionType db)
localService :: QueryArrowService Session
localService = QueryArrowService {
execQuery = \(Session db conn ) form params -> do
-- liftIO $ putStrLn ("execQuery: " ++ serialize form ++ show params)
let (varstinp, varstout) = setToMap (fromList (Map.keys params)) empty
liftIO $ runResourceT (depleteResultStream (doQueryWithConn db conn varstout form varstinp (listResultStream [params]))),
getAllResult = \(Session db conn ) vars form params -> do
let (varstinp, varstout) = setToMap (fromList (Map.keys params)) (fromList vars)
liftIO $ runResourceT (getAllResultsInStream (doQueryWithConn db conn varstout form varstinp (listResultStream [params]))),
qasConnect = \ path -> liftIO $ do
infoM "Plugin" ("loading configuration from " ++ path)
ps <- getConfig path
infoM "Plugin" ("configuration: " ++ show ps)
db <- transDB ps
case db of
AbstractDatabase db -> do
conn <- dbOpen db
return (Session db conn),
qasDisconnect = \ (Session _ conn) ->
liftIO $ dbClose conn,
qasCommit = \ (Session _ conn) ->
liftIO $ dbCommit conn,
qasRollback = \ (Session _ conn) ->
liftIO $ dbRollback conn,
qasPrepare = \ (Session _ conn) ->
liftIO $ dbPrepare conn,
qasBegin = \ (Session _ conn) ->
liftIO $ dbBegin conn
}
| xu-hao/QueryArrow | QueryArrow-ffi-service-local/src/QueryArrow/FFI/Service/Local.hs | bsd-3-clause | 2,122 | 0 | 18 | 386 | 615 | 331 | 284 | 44 | 1 |
module SelfDestruct where
import Botworld
import Botworld.Display
import Data.Maybe
import Data.Map (Map, empty, insert)
import qualified SelfDestruct.Omega as Omega
import qualified SelfDestruct.Ideal as Ideal
import Text.Printf (printf)
-- Processor you get to use.
cpu :: Processor
cpu = P 8192
-- Game description
value :: Item -> Int
value (Cargo price _) = price
value _ = 0
you :: Player
you = Player standardValuer (0, 0)
players :: Map String Player
players = insert "You" you empty
-- World description
worthless :: Item
worthless = Cargo 1 5
valuable :: Item
valuable = Cargo 99 5
rex :: Memory -> Robot
rex = Robot (F Red 10) [worthless] cpu
omega :: Robot
omega = Robot (F Black 10) inventory (P 4096) Omega.machine where
inventory = [valuable, DestroyShield, DestroyShield]
mintWorld :: Memory -> Botworld
mintWorld mem = Grid (2, 1)
[ Just (Square [rex mem] [])
, Just (Square [omega] [])
]
-- One game: the initial state plus five updated states.
game :: Memory -> [Botworld]
game = take 6 . iterate update . mintWorld
-- Display one game
displayGame :: [Botworld] -> IO ()
displayGame states = do
displayBotworld players $ head states
mapM_ (displayEventGrid players . runEnvironment) (init states)
displayScoreboard players $ last states
main :: IO ()
main = displayGame $ game Ideal.machine
| machine-intelligence/Botworld | games/SelfDestruct.hs | bsd-3-clause | 1,339 | 0 | 11 | 246 | 465 | 250 | 215 | 39 | 1 |
module Problem491 where
import qualified Data.HashSet as S
import Data.List
import Control.Arrow
import Data.Array
import Merge
main :: IO ()
main =
print
$ sum
$ map (\(a, b) -> count a * count'' (map (head &&& length) $ group b))
$ S.toList
$ S.fromList
$ filter valid
$ choose 10
$ merge [0 .. 9] [0 .. 9]
choose :: Int -> [a] -> [([a], [a])]
choose 0 xs = [([], xs)]
choose _ [] = []
choose n (x : xs) = (++)
[ (x : y, y') | (y, y') <- choose (n - 1) xs ]
[ (y, x : y') | (y, y') <- choose n xs ]
valid :: ([Int], [Int]) -> Bool
valid (xs, ys) = abs (sum xs - sum ys) `mod` 11 == 0
count :: [Int] -> Integer
count xs = let ns = map (head &&& length) $ group xs in count' ns
count' :: [(Int, Int)] -> Integer
count' xs@((0, n0) : xs') = count'' xs - count'' ((0, n0 - 1) : xs')
count' xs = count'' xs
count'' :: [(a, Int)] -> Integer
count'' xs = fact (sum $ map snd xs) `div` product (map (fact . snd) xs)
facts :: Array Int Integer
facts = listArray (0, 10) $ scanl (*) 1 [1 ..]
fact :: Int -> Integer
fact = (facts !)
| adityagupta1089/Project-Euler-Haskell | src/problems/Problem491.hs | bsd-3-clause | 1,129 | 0 | 20 | 333 | 624 | 343 | 281 | 35 | 1 |
module Language.TheExperiment.Parser.Module where
import Control.Monad
import Text.Parsec
import Text.Parsec.Expr
import Language.TheExperiment.AST.Module
import Language.TheExperiment.AST.Expression
import Language.TheExperiment.Parser.Lexer
import Language.TheExperiment.Parser.Statement
type ParsedModule = Module ()
parseSource :: EParser ParsedModule
parseSource = parseLex $ many anOpDef >>= putState . Operators >> aModule
aModule :: EParser ParsedModule
aModule = do
pos <- getPosition -- hmm do I really want this?
defs <- many aDefinition
return $ Module pos defs
{-
The syntax goes like this:
infixl + add 5
The function is a bit of a mind bender due to the wierdness
of the Parsec Operator type. I've written an equivalent to this
function at least five times and it still confuses me.
-}
anOpDef :: EParser (ParserOperator, Rational)
anOpDef = anOpType "infixr" InR (flip Infix AssocRight . liftM call2)
<|> anOpType "infix" In (flip Infix AssocNone . liftM call2)
<|> anOpType "infixl" InL (flip Infix AssocLeft . liftM call2)
<|> anOpType "prefix" Pre (Prefix . liftM call)
<|> anOpType "postfix" Post (Postfix . liftM call)
<?> "operator definition"
where
anOpType :: String -- tag
-> (Rational -> String -> OpFormat) -- fixityCons
-> (EParser (Expr ()) -> ParserOperator) -- opCons
-- take a parser for an operator and returns a
-- ParserOperator suitable for an expression parser
-- operator table
-> EParser (ParserOperator, Rational)
anOpType tag fixityCons opCons = do
--opCons :: EParser () -> ParserOperator
reserved tag
opName <- operator
name <- identifier
precedence <- rational
let opParser = do
pos <- getPosition
reservedOp opName
return $ Identifier { exprPos = pos
, exprNodeData = ()
-- #TODO find a more general solution here:
, idName = name
, opFormat = fixityCons precedence opName }
return (opCons opParser, precedence)
call f a = call' f [a]
call2 f a b = call' f [a, b]
call' f xs = Call { exprPos = exprPos f
, exprNodeData = ()
, callFunc = f
, callParams = xs
}
| jvranish/TheExperiment | src/Language/TheExperiment/Parser/Module.hs | bsd-3-clause | 2,491 | 0 | 17 | 802 | 541 | 285 | 256 | -1 | -1 |
-- | Basic server monads and related operations.
module Game.LambdaHack.Server.MonadServer
( -- * The server monad
MonadServer( getsServer
, modifyServer
, chanSaveServer -- exposed only to be implemented, not used
, liftIO -- exposed only to be implemented, not used
)
, MonadServerAtomic(..)
-- * Assorted primitives
, getServer, putServer, debugPossiblyPrint, debugPossiblyPrintAndExit
, serverPrint, saveServer, dumpRngs, restoreScore, registerScore
, rndToAction, getSetGen
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
-- Cabal
import qualified Paths_LambdaHack as Self (version)
import qualified Control.Exception as Ex
import qualified Control.Monad.Trans.State.Strict as St
import qualified Data.EnumMap.Strict as EM
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Time.Clock.POSIX
import Data.Time.LocalTime
import System.Exit (exitFailure)
import System.FilePath
import System.IO (hFlush, stdout)
import qualified System.Random.SplitMix32 as SM
import Game.LambdaHack.Atomic
import Game.LambdaHack.Common.ActorState
import Game.LambdaHack.Common.ClientOptions (sbenchmark)
import Game.LambdaHack.Common.Faction
import Game.LambdaHack.Common.File
import qualified Game.LambdaHack.Common.HighScore as HighScore
import Game.LambdaHack.Common.Kind
import Game.LambdaHack.Common.Misc
import Game.LambdaHack.Common.MonadStateRead
import Game.LambdaHack.Common.Perception
import qualified Game.LambdaHack.Common.Save as Save
import Game.LambdaHack.Common.State
import Game.LambdaHack.Common.Types
import Game.LambdaHack.Content.FactionKind
import Game.LambdaHack.Content.RuleKind
import Game.LambdaHack.Core.Random
import Game.LambdaHack.Server.ServerOptions
import Game.LambdaHack.Server.State
class MonadStateRead m => MonadServer m where
getsServer :: (StateServer -> a) -> m a
modifyServer :: (StateServer -> StateServer) -> m ()
chanSaveServer :: m (Save.ChanSave (State, StateServer))
-- We do not provide a MonadIO instance, so that outside
-- nobody can subvert the action monads by invoking arbitrary IO.
liftIO :: IO a -> m a
-- | The monad for executing atomic game state transformations.
class MonadServer m => MonadServerAtomic m where
-- | Execute an atomic command that changes the state
-- on the server and on all clients that can notice it.
execUpdAtomic :: UpdAtomic -> m ()
-- | Execute an atomic command that changes the state
-- on the server only.
execUpdAtomicSer :: UpdAtomic -> m Bool
-- | Execute an atomic command that changes the state
-- on the given single client only.
execUpdAtomicFid :: FactionId -> UpdAtomic -> m ()
-- | Execute an atomic command that changes the state
-- on the given single client only.
-- Catch 'AtomicFail' and indicate if it was in fact raised.
execUpdAtomicFidCatch :: FactionId -> UpdAtomic -> m Bool
-- | Execute an atomic command that only displays special effects.
execSfxAtomic :: SfxAtomic -> m ()
execSendPer :: FactionId -> LevelId
-> Perception -> Perception -> Perception -> m ()
getServer :: MonadServer m => m StateServer
getServer = getsServer id
putServer :: MonadServer m => StateServer -> m ()
putServer s = modifyServer (const s)
debugPossiblyPrint :: MonadServer m => Text -> m ()
debugPossiblyPrint t = do
debug <- getsServer $ sdbgMsgSer . soptions
when debug $ liftIO $ do
T.hPutStr stdout $! t <> "\n" -- hPutStrLn not atomic enough
hFlush stdout
-- No moving savefiles aside, to debug more easily.
debugPossiblyPrintAndExit :: MonadServer m => Text -> m ()
debugPossiblyPrintAndExit t = do
debug <- getsServer $ sdbgMsgSer . soptions
when debug $ liftIO $ do
T.hPutStr stdout $! t <> "\n" -- hPutStrLn not atomic enough
hFlush stdout
exitFailure
serverPrint :: MonadServer m => Text -> m ()
serverPrint t = liftIO $ do
T.hPutStr stdout $! t <> "\n" -- hPutStrLn not atomic enough
hFlush stdout
saveServer :: MonadServer m => m ()
saveServer = do
s <- getState
ser <- getServer
toSave <- chanSaveServer
liftIO $ Save.saveToChan toSave (s, ser)
-- | Dumps to stdout the RNG states from the start of the game.
dumpRngs :: MonadServer m => RNGs -> m ()
dumpRngs rngs = liftIO $ do
T.hPutStr stdout $! tshow rngs <> "\n" -- hPutStrLn not atomic enough
hFlush stdout
-- | Read the high scores dictionary. Return the empty table if no file.
restoreScore :: forall m. MonadServer m => COps -> m HighScore.ScoreDict
restoreScore COps{corule} = do
benchmark <- getsServer $ sbenchmark . sclientOptions . soptions
mscore <- if benchmark then return Nothing else do
let scoresFileName = rscoresFileName corule
dataDir <- liftIO appDataDir
let path bkp = dataDir </> bkp <> scoresFileName
configExists <- liftIO $ doesFileExist (path "")
res <- liftIO $ Ex.try $
if configExists then do
(vlib2, s) <- strictDecodeEOF (path "")
if Save.compatibleVersion vlib2 Self.version
then return $! s `seq` Just s
else do
let msg =
"High score file from incompatible version of game detected."
fail msg
else return Nothing
savePrefix <- getsServer $ ssavePrefixSer . soptions
let defPrefix = ssavePrefixSer defServerOptions
moveAside = savePrefix == defPrefix
handler :: Ex.SomeException -> m (Maybe a)
handler e = do
when moveAside $
liftIO $ renameFile (path "") (path "bkp.")
let msg = "High score restore failed."
<+> (if moveAside
then "The wrong file moved aside."
else "")
<+> "The error message is:"
<+> (T.unwords . T.lines) (tshow e)
serverPrint msg
return Nothing
either handler return res
maybe (return HighScore.empty) return mscore
-- | Generate a new score, register it and save.
registerScore :: MonadServer m => Status -> FactionId -> m ()
registerScore status fid = do
cops@COps{corule} <- getsState scops
total <- getsState $ snd . calculateTotal fid
let scoresFileName = rscoresFileName corule
dataDir <- liftIO appDataDir
-- Re-read the table in case it's changed by a concurrent game.
scoreDict <- restoreScore cops
gameModeId <- getsState sgameModeId
time <- getsState stime
dungeonTotal <- getsState sgold
date <- liftIO getPOSIXTime
tz <- liftIO $ getTimeZone $ posixSecondsToUTCTime date
curChalSer <- getsServer $ scurChalSer . soptions
factionD <- getsState sfactionD
bench <- getsServer $ sbenchmark . sclientOptions . soptions
noConfirmsGame <- isNoConfirmsGame
sbandSpawned <- getsServer sbandSpawned
let fact = factionD EM.! fid
path = dataDir </> scoresFileName
outputScore (worthMentioning, (ntable, pos)) =
-- If testing or fooling around, dump instead of registering.
-- In particular don't register score for the auto-* scenarios.
if bench || noConfirmsGame || gunderAI fact then
debugPossiblyPrint $ T.intercalate "\n"
$ HighScore.showScore tz pos (HighScore.getRecord pos ntable)
++ [" Spawned groups:"
<+> T.unwords (tail (T.words (tshow sbandSpawned)))]
else
let nScoreDict = EM.insert gameModeId ntable scoreDict
in when worthMentioning $ liftIO $
encodeEOF path Self.version (nScoreDict :: HighScore.ScoreDict)
theirVic (fi, fa) | isFoe fid fact fi
&& not (isHorrorFact fa) = Just $ gvictims fa
| otherwise = Nothing
theirVictims = EM.unionsWith (+) $ mapMaybe theirVic $ EM.assocs factionD
ourVic (fi, fa) | isFriend fid fact fi = Just $ gvictims fa
| otherwise = Nothing
ourVictims = EM.unionsWith (+) $ mapMaybe ourVic $ EM.assocs factionD
table = HighScore.getTable gameModeId scoreDict
registeredScore =
HighScore.register table total dungeonTotal time status date curChalSer
(T.unwords $ tail $ T.words $ gname fact)
ourVictims theirVictims
(fhiCondPoly $ gkind fact)
outputScore registeredScore
-- | Invoke pseudo-random computation with the generator kept in the state.
rndToAction :: MonadServer m => Rnd a -> m a
rndToAction r = do
gen1 <- getsServer srandom
let (a, gen2) = St.runState r gen1
modifyServer $ \ser -> ser {srandom = gen2}
return a
-- | Gets a random generator from the user-submitted options or, if not present,
-- generates one.
getSetGen :: MonadServer m => Maybe SM.SMGen -> m SM.SMGen
getSetGen mrng = case mrng of
Just rnd -> return rnd
Nothing -> liftIO SM.newSMGen
| LambdaHack/LambdaHack | engine-src/Game/LambdaHack/Server/MonadServer.hs | bsd-3-clause | 9,063 | 0 | 23 | 2,277 | 2,175 | 1,122 | 1,053 | -1 | -1 |
module Common.MapReduce (
mapReduce
, mapReduce'
) where
import Control.Parallel.Strategies (parMap, rdeepseq, using, NFData)
import Control.Parallel (pseq)
{-# INLINABLE divide #-}
{-# INLINABLE mapReduce #-}
{-# INLINABLE mapReduce' #-}
divide :: Int -> [a] -> [[a]]
divide _ [] = []
divide n xs = as : divide n bs where (as, bs) = splitAt n xs
mapReduce :: (NFData b, NFData c) => Int -> (a -> b) -> ([b] -> c) -> [a] -> c
mapReduce chunk mapFunc reduceFunc xs = mapResult `pseq` reduceResult
where
mapResult = concat $ parMap rdeepseq (map mapFunc) (divide chunk xs)
reduceResult = reduceFunc mapResult `using` rdeepseq
mapReduce' :: (NFData b) => Int -> Int -> (a -> b) -> ([b] -> b) -> [a] -> b
mapReduce' block chunk mapFunc reduceFunc xs = fstReduce `pseq` sndReduce
where
fstReduce = map (mapReduce chunk mapFunc reduceFunc) (divide block xs)
sndReduce = reduceFunc fstReduce `using` rdeepseq
| foreverbell/project-euler-solutions | lib/Common/MapReduce.hs | bsd-3-clause | 929 | 0 | 12 | 178 | 363 | 201 | 162 | 16 | 1 |
-- This module is based on `Parse.String` in the Elm compiler
-- https://github.com/elm/compiler/blob/94715a520f499591ac6901c8c822bc87cd1af24f/compiler/src/Parse/String.hs
{-# OPTIONS_GHC -Wall -fno-warn-unused-do-bind #-}
{-# LANGUAGE BangPatterns, MagicHash, OverloadedStrings, UnboxedTuples #-}
module Parse.String
( string
, character
)
where
import qualified Data.Utf8 as Utf8
import Data.Word (Word8, Word16)
import Foreign.Ptr (Ptr, plusPtr, minusPtr)
import qualified Elm.String as ES
import Parse.Primitives (Parser, Row, Col)
import qualified Parse.Number as Number
import qualified Parse.Primitives as P
import qualified Reporting.Error.Syntax as E
import AST.V0_16 ( StringRepresentation(SingleQuotedString, TripleQuotedString) )
-- CHARACTER
character :: (Row -> Col -> x) -> (E.Char -> Row -> Col -> x) -> Parser x ES.String
character toExpectation toError =
P.Parser $ \(P.State src pos end indent row col nl) cok _ cerr eerr ->
if pos >= end || P.unsafeIndex pos /= 0x27 {- ' -} then
eerr row col toExpectation
else
case chompChar (plusPtr pos 1) end row (col + 1) 0 placeholder of
Good newPos newCol numChars mostRecent ->
if numChars /= 1 then
cerr row col (toError (E.CharNotString (fromIntegral (newCol - col))))
else
let
!newState = P.State src newPos end indent row newCol nl
!char = ES.fromChunks [mostRecent]
in
cok char newState
CharEndless newCol ->
cerr row newCol (toError E.CharEndless)
CharEscape r c escape ->
cerr r c (toError (E.CharEscape escape))
data CharResult
= Good (Ptr Word8) Col Word16 ES.Chunk
| CharEndless Col
| CharEscape Row Col E.Escape
chompChar :: Ptr Word8 -> Ptr Word8 -> Row -> Col -> Word16 -> ES.Chunk -> CharResult
chompChar pos end row col numChars mostRecent =
if pos >= end then
CharEndless col
else
let
!word = P.unsafeIndex pos
in
if word == 0x27 {- ' -} then
Good (plusPtr pos 1) (col + 1) numChars mostRecent
else if word == 0x0A {- \n -} then
CharEndless col
else if word == 0x5C {- \ -} then
case eatEscape (plusPtr pos 1) end row col of
EscapeNormal code ->
chompChar (plusPtr pos 2) end row (col + 2) (numChars + 1) (ES.AsciiChar code)
EscapeUnicode delta code ->
chompChar (plusPtr pos delta) end row (col + fromIntegral delta) (numChars + 1) (ES.CodePoint code)
EscapeProblem r c badEscape ->
CharEscape r c badEscape
EscapeEndOfFile ->
CharEndless col
else
let
!width = P.getCharWidth word
!newPos = plusPtr pos width
in
chompChar newPos end row (col + 1) (numChars + 1) (ES.Slice pos width)
-- STRINGS
string :: (Row -> Col -> x) -> (E.String -> Row -> Col -> x) -> Parser x (ES.String, StringRepresentation)
string toExpectation toError =
P.Parser $ \(P.State src pos end indent row col nl) cok _ cerr eerr ->
if isDoubleQuote pos end then
let
!pos1 = plusPtr pos 1
in
case
if isDoubleQuote pos1 end then
let !pos2 = plusPtr pos 2 in
if isDoubleQuote pos2 end then
let
!pos3 = plusPtr pos 3
!col3 = col + 3
in
(multiString pos3 end row col3 pos3 row col mempty, TripleQuotedString)
else
(Ok pos2 row (col + 2) Utf8.empty, SingleQuotedString)
else
(singleString pos1 end row (col + 1) pos1 mempty, SingleQuotedString)
of
(Ok newPos newRow newCol utf8, representation) ->
let
!newState =
P.State src newPos end indent newRow newCol nl
in
cok (utf8, representation) newState
(Err r c x, _) ->
cerr r c (toError x)
else
eerr row col toExpectation
{-# INLINE isDoubleQuote #-}
isDoubleQuote :: Ptr Word8 -> Ptr Word8 -> Bool
isDoubleQuote pos end =
pos < end && P.unsafeIndex pos == 0x22 {- " -}
{-# INLINE isNewline #-}
isNewline :: Ptr Word8 -> Ptr Word8 -> Bool
isNewline pos end =
pos < end && P.unsafeIndex pos == 0x0A {- \n -}
data StringResult
= Ok (Ptr Word8) Row Col !ES.String
| Err Row Col E.String
finalize :: Ptr Word8 -> Ptr Word8 -> [ES.Chunk] -> ES.String
finalize start end revChunks =
ES.fromChunks $ reverse $
if start == end then
revChunks
else
ES.Slice start (minusPtr end start) : revChunks
addChunk :: ES.Chunk -> Ptr Word8 -> Ptr Word8 -> [ES.Chunk] -> [ES.Chunk]
addChunk chunk start end revChunks =
if start == end then
chunk : revChunks
else
chunk : ES.Slice start (minusPtr end start) : revChunks
-- SINGLE STRINGS
singleString :: Ptr Word8 -> Ptr Word8 -> Row -> Col -> Ptr Word8 -> [ES.Chunk] -> StringResult
singleString pos end row col initialPos revChunks =
if pos >= end then
Err row col E.StringEndless_Single
else
let
!word = P.unsafeIndex pos
in
if word == 0x22 {- " -} then
Ok (plusPtr pos 1) row (col + 1) $
finalize initialPos pos revChunks
else if word == 0x0A {- \n -} then
let !newPos = plusPtr pos 1 in
singleString newPos end (row + 1) 1 newPos $
addChunk newline initialPos pos revChunks
else if word == 0x0D {- \r -} && isNewline (plusPtr pos 1) end then
let !newPos = plusPtr pos 2 in
singleString newPos end (row + 1) 1 newPos $
addChunk newline initialPos pos revChunks
else if word == 0x5C {- \ -} then
case eatEscape (plusPtr pos 1) end row col of
EscapeNormal code ->
let !newPos = plusPtr pos 2 in
singleString newPos end row (col + 2) newPos $
addChunk (ES.AsciiChar code) initialPos pos revChunks
EscapeUnicode delta code ->
let !newPos = plusPtr pos delta in
singleString newPos end row (col + fromIntegral delta) newPos $
addChunk (ES.CodePoint code) initialPos pos revChunks
EscapeProblem r c x ->
Err r c (E.StringEscape x)
EscapeEndOfFile ->
Err row (col + 1) E.StringEndless_Single
else
let !newPos = plusPtr pos (P.getCharWidth word) in
singleString newPos end row (col + 1) initialPos revChunks
-- MULTI STRINGS
multiString :: Ptr Word8 -> Ptr Word8 -> Row -> Col -> Ptr Word8 -> Row -> Col -> [ES.Chunk] -> StringResult
multiString pos end row col initialPos sr sc revChunks =
if pos >= end then
Err sr sc E.StringEndless_Multi
else
let !word = P.unsafeIndex pos in
if word == 0x22 {- " -} && isDoubleQuote (plusPtr pos 1) end && isDoubleQuote (plusPtr pos 2) end then
Ok (plusPtr pos 3) row (col + 3) $
finalize initialPos pos revChunks
else if word == 0x0A {- \n -} then
let !pos1 = plusPtr pos 1 in
multiString pos1 end (row + 1) 1 pos1 sr sc $
addChunk newline initialPos pos revChunks
else if word == 0x0D {- \r -} && isNewline (plusPtr pos 1) end then
let !pos1 = plusPtr pos 2 in
multiString pos1 end (row + 1) 1 pos1 sr sc $
addChunk newline initialPos pos revChunks
else if word == 0x5C {- \ -} then
case eatEscape (plusPtr pos 1) end row col of
EscapeNormal code ->
let !newPos = plusPtr pos 2 in
multiString newPos end row (col + 2) newPos sr sc $
addChunk (ES.AsciiChar code) initialPos pos revChunks
EscapeUnicode delta code ->
let !newPos = plusPtr pos delta in
multiString newPos end row (col + fromIntegral delta) newPos sr sc $
addChunk (ES.CodePoint code) initialPos pos revChunks
EscapeProblem r c x ->
Err r c (E.StringEscape x)
EscapeEndOfFile ->
Err sr sc E.StringEndless_Multi
else
let !newPos = plusPtr pos (P.getCharWidth word) in
multiString newPos end row (col + 1) initialPos sr sc revChunks
-- ESCAPE CHARACTERS
data Escape
= EscapeNormal Word8
| EscapeUnicode !Int !Int
| EscapeEndOfFile
| EscapeProblem Row Col E.Escape
eatEscape :: Ptr Word8 -> Ptr Word8 -> Row -> Col -> Escape
eatEscape pos end row col =
if pos >= end then
EscapeEndOfFile
else
case P.unsafeIndex pos of
0x6E {- n -} -> EscapeNormal 0x0A {- \n -}
0x72 {- r -} -> EscapeNormal 0x0D {- \r -}
0x74 {- t -} -> EscapeNormal 0x09 {- \t -}
0x22 {- " -} -> EscapeNormal 0x22 {- " -}
0x27 {- ' -} -> EscapeNormal 0x27 {- ' -}
0x5C {- \ -} -> EscapeNormal 0x5C {- \ -}
0x78 {- x -} -> eatPre019Unicode (plusPtr pos 1) end row col
0x75 {- u -} -> eatUnicode (plusPtr pos 1) end row col
_ -> EscapeProblem row col E.EscapeUnknown
eatPre019Unicode :: Ptr Word8 -> Ptr Word8 -> Row -> Col -> Escape
eatPre019Unicode pos end row col =
if pos >= end then
EscapeProblem row col (E.BadUnicodeFormat 2)
else
let
(# newPos, code #) = Number.chompHex pos end
!numDigits = minusPtr newPos pos
in
if newPos >= end then
EscapeProblem row col $ E.BadUnicodeFormat (2 + fromIntegral numDigits)
else
EscapeUnicode (2 + numDigits) code
eatUnicode :: Ptr Word8 -> Ptr Word8 -> Row -> Col -> Escape
eatUnicode pos end row col =
if pos >= end || P.unsafeIndex pos /= 0x7B {- { -} then
EscapeProblem row col (E.BadUnicodeFormat 2)
else
let
!digitPos = plusPtr pos 1
(# newPos, code #) = Number.chompHex digitPos end
!numDigits = minusPtr newPos digitPos
in
if newPos >= end || P.unsafeIndex newPos /= 0x7D {- } -} then
EscapeProblem row col $ E.BadUnicodeFormat (2 + fromIntegral (minusPtr newPos pos))
else if code < 0 || 0x10FFFF < code then
EscapeProblem row col $ E.BadUnicodeCode (3 + fromIntegral (minusPtr newPos pos))
else if numDigits < 4 || 6 < numDigits then
EscapeProblem row col $
E.BadUnicodeLength
(3 + fromIntegral (minusPtr newPos pos))
numDigits
code
else
EscapeUnicode (numDigits + 4) code
{-# NOINLINE newline #-}
newline :: ES.Chunk
newline =
ES.AsciiChar 0x0A {- \n -}
{-# NOINLINE placeholder #-}
placeholder :: ES.Chunk
placeholder =
ES.CodePoint 0xFFFD {-replacement character-}
| avh4/elm-format | elm-format-lib/src/Parse/String.hs | bsd-3-clause | 10,429 | 0 | 21 | 3,067 | 3,454 | 1,725 | 1,729 | 232 | 10 |
{-# LANGUAGE OverloadedStrings #-}
module HtmlCat.Snap (feedStdIn, runHtmlCat) where
import Control.Concurrent (Chan, writeChan, forkIO)
import Control.Monad (void)
import Control.Monad.Trans (MonadIO(..))
import Data.Text (Text)
import System.IO (stdin)
import qualified Data.ByteString.Char8 as B8
import Data.Enumerator (Iteratee, Enumeratee, ($$), ($=))
import Snap.Core
import Snap.Http.Server (simpleHttpServe)
import Snap.Http.Server.Config
import Text.Blaze.Renderer.Utf8 (renderHtmlBuilder)
import qualified Blaze.ByteString.Builder.Char.Utf8 as B
import qualified Data.Enumerator as E (run_)
import qualified Data.Enumerator.List as E (map, foldM)
import qualified Data.Enumerator.Text as E (enumHandle)
import HtmlCat.Html (html)
import Snap.EventSource (ServerEvent(..), eventSourceApp)
feedStdIn :: Chan ServerEvent -> IO ()
feedStdIn chan = void . forkIO $ E.run_ $
sourceStdIn $= textsToEventSource $$ sinkChan chan
runHtmlCat :: Chan ServerEvent -> String -> Int -> IO ()
runHtmlCat chan host port =
simpleHttpServe (setPort port $ setBind (B8.pack host)
$ defaultConfig :: Config Snap ())
(app chan)
app :: Chan ServerEvent -> Snap ()
app chan = route [ ("", appTop)
, ("stream", appStream chan)
]
appTop :: Snap ()
appTop = writeBuilder $ renderHtmlBuilder html
appStream :: Chan ServerEvent -> Snap ()
appStream = eventSourceApp
sourceStdIn :: MonadIO m => Enumerator Text m a
sourceStdIn = E.enumHandle stdin
textsToEventSource :: Monad m => Enumeratee Text ServerEvent m a
textsToEventSource = E.map f
where
f text = ServerEvent { eventName = Nothing
, eventId = Nothing
, eventData = [B.fromText text] }
sinkChan :: MonadIO m => Chan a -> Iteratee a m ()
sinkChan chan = E.foldM go ()
where
go () a = liftIO $ writeChan chan a
| maoe/htmlcat | HtmlCat/Snap.hs | bsd-3-clause | 1,917 | 0 | 12 | 412 | 617 | 346 | 271 | 44 | 1 |
module KMC.Util.Set where
import qualified Data.Set as S
-- | The monad `join` operation for sets. Data.Set is not a monad
-- because of the (Ord a) constraint!
joinSets :: (Ord a) => S.Set (S.Set a) -> S.Set a
joinSets = S.foldl S.union S.empty
-- | Given a set S of `a` and a set of sets of `a`, return those sets that
-- contain an element from S.
occurs :: (Ord a) => S.Set a -> S.Set (S.Set a) -> S.Set (S.Set a)
occurs needle sets = S.filter (not . S.null . S.intersection needle) sets
| diku-kmc/repg | src/KMC/Util/Set.hs | mit | 498 | 0 | 11 | 102 | 159 | 84 | 75 | 6 | 1 |
{-# LANGUAGE CPP, NoImplicitPrelude #-}
module Data.Either.Compat (
module Base
, isLeft
, isRight
) where
import Data.Either as Base
#if !(MIN_VERSION_base(4,7,0))
import Data.Bool (Bool(..))
-- | Return `True` if the given value is a `Left`-value, `False` otherwise.
--
-- /Since: 4.7.0.0/
isLeft :: Either a b -> Bool
isLeft (Left _) = True
isLeft (Right _) = False
-- | Return `True` if the given value is a `Right`-value, `False` otherwise.
--
-- /Since: 4.7.0.0/
isRight :: Either a b -> Bool
isRight (Left _) = False
isRight (Right _) = True
#endif
| beni55/base-compat | src/Data/Either/Compat.hs | mit | 563 | 0 | 7 | 101 | 133 | 78 | 55 | 13 | 1 |
{-
Copyright (C) 2011 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.MIME
Copyright : Copyright (C) 2011 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <jgm@berkeley.edu>
Stability : alpha
Portability : portable
Mime type lookup for ODT writer.
-}
module Text.Pandoc.MIME ( getMimeType )
where
import System.FilePath
import Data.Char ( toLower )
import qualified Data.Map as M
-- | Determine mime type appropriate for file path.
getMimeType :: FilePath -> Maybe String
getMimeType f = M.lookup (map toLower $ drop 1 $ takeExtension f) mimeTypes
where mimeTypes = M.fromList -- List borrowed from happstack-server.
[("gz","application/x-gzip")
,("cabal","application/x-cabal")
,("%","application/x-trash")
,("323","text/h323")
,("3gp","video/3gpp")
,("7z","application/x-7z-compressed")
,("abw","application/x-abiword")
,("ai","application/postscript")
,("aif","audio/x-aiff")
,("aifc","audio/x-aiff")
,("aiff","audio/x-aiff")
,("alc","chemical/x-alchemy")
,("art","image/x-jg")
,("asc","text/plain")
,("asf","video/x-ms-asf")
,("asn","chemical/x-ncbi-asn1")
,("aso","chemical/x-ncbi-asn1-binary")
,("asx","video/x-ms-asf")
,("atom","application/atom")
,("atomcat","application/atomcat+xml")
,("atomsrv","application/atomserv+xml")
,("au","audio/basic")
,("avi","video/x-msvideo")
,("b","chemical/x-molconn-Z")
,("bak","application/x-trash")
,("bat","application/x-msdos-program")
,("bcpio","application/x-bcpio")
,("bib","text/x-bibtex")
,("bin","application/octet-stream")
,("bmp","image/x-ms-bmp")
,("boo","text/x-boo")
,("book","application/x-maker")
,("bsd","chemical/x-crossfire")
,("c","text/x-csrc")
,("c++","text/x-c++src")
,("c3d","chemical/x-chem3d")
,("cab","application/x-cab")
,("cac","chemical/x-cache")
,("cache","chemical/x-cache")
,("cap","application/cap")
,("cascii","chemical/x-cactvs-binary")
,("cat","application/vnd.ms-pki.seccat")
,("cbin","chemical/x-cactvs-binary")
,("cbr","application/x-cbr")
,("cbz","application/x-cbz")
,("cc","text/x-c++src")
,("cdf","application/x-cdf")
,("cdr","image/x-coreldraw")
,("cdt","image/x-coreldrawtemplate")
,("cdx","chemical/x-cdx")
,("cdy","application/vnd.cinderella")
,("cef","chemical/x-cxf")
,("cer","chemical/x-cerius")
,("chm","chemical/x-chemdraw")
,("chrt","application/x-kchart")
,("cif","chemical/x-cif")
,("class","application/java-vm")
,("cls","text/x-tex")
,("cmdf","chemical/x-cmdf")
,("cml","chemical/x-cml")
,("cod","application/vnd.rim.cod")
,("com","application/x-msdos-program")
,("cpa","chemical/x-compass")
,("cpio","application/x-cpio")
,("cpp","text/x-c++src")
,("cpt","application/mac-compactpro")
,("crl","application/x-pkcs7-crl")
,("crt","application/x-x509-ca-cert")
,("csf","chemical/x-cache-csf")
,("csh","application/x-csh")
,("csm","chemical/x-csml")
,("csml","chemical/x-csml")
,("css","text/css")
,("csv","text/csv")
,("ctab","chemical/x-cactvs-binary")
,("ctx","chemical/x-ctx")
,("cu","application/cu-seeme")
,("cub","chemical/x-gaussian-cube")
,("cxf","chemical/x-cxf")
,("cxx","text/x-c++src")
,("d","text/x-dsrc")
,("dat","chemical/x-mopac-input")
,("dcr","application/x-director")
,("deb","application/x-debian-package")
,("dif","video/dv")
,("diff","text/x-diff")
,("dir","application/x-director")
,("djv","image/vnd.djvu")
,("djvu","image/vnd.djvu")
,("dl","video/dl")
,("dll","application/x-msdos-program")
,("dmg","application/x-apple-diskimage")
,("dms","application/x-dms")
,("doc","application/msword")
,("dot","application/msword")
,("dv","video/dv")
,("dvi","application/x-dvi")
,("dx","chemical/x-jcamp-dx")
,("dxr","application/x-director")
,("emb","chemical/x-embl-dl-nucleotide")
,("embl","chemical/x-embl-dl-nucleotide")
,("eml","message/rfc822")
,("ent","chemical/x-ncbi-asn1-ascii")
,("eps","application/postscript")
,("etx","text/x-setext")
,("exe","application/x-msdos-program")
,("ez","application/andrew-inset")
,("fb","application/x-maker")
,("fbdoc","application/x-maker")
,("fch","chemical/x-gaussian-checkpoint")
,("fchk","chemical/x-gaussian-checkpoint")
,("fig","application/x-xfig")
,("flac","application/x-flac")
,("fli","video/fli")
,("fm","application/x-maker")
,("frame","application/x-maker")
,("frm","application/x-maker")
,("gal","chemical/x-gaussian-log")
,("gam","chemical/x-gamess-input")
,("gamin","chemical/x-gamess-input")
,("gau","chemical/x-gaussian-input")
,("gcd","text/x-pcs-gcd")
,("gcf","application/x-graphing-calculator")
,("gcg","chemical/x-gcg8-sequence")
,("gen","chemical/x-genbank")
,("gf","application/x-tex-gf")
,("gif","image/gif")
,("gjc","chemical/x-gaussian-input")
,("gjf","chemical/x-gaussian-input")
,("gl","video/gl")
,("gnumeric","application/x-gnumeric")
,("gpt","chemical/x-mopac-graph")
,("gsf","application/x-font")
,("gsm","audio/x-gsm")
,("gtar","application/x-gtar")
,("h","text/x-chdr")
,("h++","text/x-c++hdr")
,("hdf","application/x-hdf")
,("hh","text/x-c++hdr")
,("hin","chemical/x-hin")
,("hpp","text/x-c++hdr")
,("hqx","application/mac-binhex40")
,("hs","text/x-haskell")
,("hta","application/hta")
,("htc","text/x-component")
,("htm","text/html")
,("html","text/html")
,("hxx","text/x-c++hdr")
,("ica","application/x-ica")
,("ice","x-conference/x-cooltalk")
,("ico","image/x-icon")
,("ics","text/calendar")
,("icz","text/calendar")
,("ief","image/ief")
,("iges","model/iges")
,("igs","model/iges")
,("iii","application/x-iphone")
,("inp","chemical/x-gamess-input")
,("ins","application/x-internet-signup")
,("iso","application/x-iso9660-image")
,("isp","application/x-internet-signup")
,("ist","chemical/x-isostar")
,("istr","chemical/x-isostar")
,("jad","text/vnd.sun.j2me.app-descriptor")
,("jar","application/java-archive")
,("java","text/x-java")
,("jdx","chemical/x-jcamp-dx")
,("jmz","application/x-jmol")
,("jng","image/x-jng")
,("jnlp","application/x-java-jnlp-file")
,("jpe","image/jpeg")
,("jpeg","image/jpeg")
,("jpg","image/jpeg")
,("js","application/x-javascript")
,("kar","audio/midi")
,("key","application/pgp-keys")
,("kil","application/x-killustrator")
,("kin","chemical/x-kinemage")
,("kml","application/vnd.google-earth.kml+xml")
,("kmz","application/vnd.google-earth.kmz")
,("kpr","application/x-kpresenter")
,("kpt","application/x-kpresenter")
,("ksp","application/x-kspread")
,("kwd","application/x-kword")
,("kwt","application/x-kword")
,("latex","application/x-latex")
,("lha","application/x-lha")
,("lhs","text/x-literate-haskell")
,("lsf","video/x-la-asf")
,("lsx","video/x-la-asf")
,("ltx","text/x-tex")
,("lyx","application/x-lyx")
,("lzh","application/x-lzh")
,("lzx","application/x-lzx")
,("m3u","audio/mpegurl")
,("m4a","audio/mpeg")
,("maker","application/x-maker")
,("man","application/x-troff-man")
,("mcif","chemical/x-mmcif")
,("mcm","chemical/x-macmolecule")
,("mdb","application/msaccess")
,("me","application/x-troff-me")
,("mesh","model/mesh")
,("mid","audio/midi")
,("midi","audio/midi")
,("mif","application/x-mif")
,("mm","application/x-freemind")
,("mmd","chemical/x-macromodel-input")
,("mmf","application/vnd.smaf")
,("mml","text/mathml")
,("mmod","chemical/x-macromodel-input")
,("mng","video/x-mng")
,("moc","text/x-moc")
,("mol","chemical/x-mdl-molfile")
,("mol2","chemical/x-mol2")
,("moo","chemical/x-mopac-out")
,("mop","chemical/x-mopac-input")
,("mopcrt","chemical/x-mopac-input")
,("mov","video/quicktime")
,("movie","video/x-sgi-movie")
,("mp2","audio/mpeg")
,("mp3","audio/mpeg")
,("mp4","video/mp4")
,("mpc","chemical/x-mopac-input")
,("mpe","video/mpeg")
,("mpeg","video/mpeg")
,("mpega","audio/mpeg")
,("mpg","video/mpeg")
,("mpga","audio/mpeg")
,("ms","application/x-troff-ms")
,("msh","model/mesh")
,("msi","application/x-msi")
,("mvb","chemical/x-mopac-vib")
,("mxu","video/vnd.mpegurl")
,("nb","application/mathematica")
,("nc","application/x-netcdf")
,("nwc","application/x-nwc")
,("o","application/x-object")
,("oda","application/oda")
,("odb","application/vnd.oasis.opendocument.database")
,("odc","application/vnd.oasis.opendocument.chart")
,("odf","application/vnd.oasis.opendocument.formula")
,("odg","application/vnd.oasis.opendocument.graphics")
,("odi","application/vnd.oasis.opendocument.image")
,("odm","application/vnd.oasis.opendocument.text-master")
,("odp","application/vnd.oasis.opendocument.presentation")
,("ods","application/vnd.oasis.opendocument.spreadsheet")
,("odt","application/vnd.oasis.opendocument.text")
,("oga","audio/ogg")
,("ogg","application/ogg")
,("ogv","video/ogg")
,("ogx","application/ogg")
,("old","application/x-trash")
,("otg","application/vnd.oasis.opendocument.graphics-template")
,("oth","application/vnd.oasis.opendocument.text-web")
,("otp","application/vnd.oasis.opendocument.presentation-template")
,("ots","application/vnd.oasis.opendocument.spreadsheet-template")
,("ott","application/vnd.oasis.opendocument.text-template")
,("oza","application/x-oz-application")
,("p","text/x-pascal")
,("p7r","application/x-pkcs7-certreqresp")
,("pac","application/x-ns-proxy-autoconfig")
,("pas","text/x-pascal")
,("pat","image/x-coreldrawpattern")
,("patch","text/x-diff")
,("pbm","image/x-portable-bitmap")
,("pcap","application/cap")
,("pcf","application/x-font")
,("pcf.Z","application/x-font")
,("pcx","image/pcx")
,("pdb","chemical/x-pdb")
,("pdf","application/pdf")
,("pfa","application/x-font")
,("pfb","application/x-font")
,("pgm","image/x-portable-graymap")
,("pgn","application/x-chess-pgn")
,("pgp","application/pgp-signature")
,("php","application/x-httpd-php")
,("php3","application/x-httpd-php3")
,("php3p","application/x-httpd-php3-preprocessed")
,("php4","application/x-httpd-php4")
,("phps","application/x-httpd-php-source")
,("pht","application/x-httpd-php")
,("phtml","application/x-httpd-php")
,("pk","application/x-tex-pk")
,("pl","text/x-perl")
,("pls","audio/x-scpls")
,("pm","text/x-perl")
,("png","image/png")
,("pnm","image/x-portable-anymap")
,("pot","text/plain")
,("ppm","image/x-portable-pixmap")
,("pps","application/vnd.ms-powerpoint")
,("ppt","application/vnd.ms-powerpoint")
,("prf","application/pics-rules")
,("prt","chemical/x-ncbi-asn1-ascii")
,("ps","application/postscript")
,("psd","image/x-photoshop")
,("py","text/x-python")
,("pyc","application/x-python-code")
,("pyo","application/x-python-code")
,("qt","video/quicktime")
,("qtl","application/x-quicktimeplayer")
,("ra","audio/x-pn-realaudio")
,("ram","audio/x-pn-realaudio")
,("rar","application/rar")
,("ras","image/x-cmu-raster")
,("rd","chemical/x-mdl-rdfile")
,("rdf","application/rdf+xml")
,("rgb","image/x-rgb")
,("rhtml","application/x-httpd-eruby")
,("rm","audio/x-pn-realaudio")
,("roff","application/x-troff")
,("ros","chemical/x-rosdal")
,("rpm","application/x-redhat-package-manager")
,("rss","application/rss+xml")
,("rtf","application/rtf")
,("rtx","text/richtext")
,("rxn","chemical/x-mdl-rxnfile")
,("sct","text/scriptlet")
,("sd","chemical/x-mdl-sdfile")
,("sd2","audio/x-sd2")
,("sda","application/vnd.stardivision.draw")
,("sdc","application/vnd.stardivision.calc")
,("sdd","application/vnd.stardivision.impress")
,("sdf","application/vnd.stardivision.math")
,("sds","application/vnd.stardivision.chart")
,("sdw","application/vnd.stardivision.writer")
,("ser","application/java-serialized-object")
,("sgf","application/x-go-sgf")
,("sgl","application/vnd.stardivision.writer-global")
,("sh","application/x-sh")
,("shar","application/x-shar")
,("shtml","text/html")
,("sid","audio/prs.sid")
,("sik","application/x-trash")
,("silo","model/mesh")
,("sis","application/vnd.symbian.install")
,("sisx","x-epoc/x-sisx-app")
,("sit","application/x-stuffit")
,("sitx","application/x-stuffit")
,("skd","application/x-koan")
,("skm","application/x-koan")
,("skp","application/x-koan")
,("skt","application/x-koan")
,("smi","application/smil")
,("smil","application/smil")
,("snd","audio/basic")
,("spc","chemical/x-galactic-spc")
,("spl","application/futuresplash")
,("spx","audio/ogg")
,("src","application/x-wais-source")
,("stc","application/vnd.sun.xml.calc.template")
,("std","application/vnd.sun.xml.draw.template")
,("sti","application/vnd.sun.xml.impress.template")
,("stl","application/vnd.ms-pki.stl")
,("stw","application/vnd.sun.xml.writer.template")
,("sty","text/x-tex")
,("sv4cpio","application/x-sv4cpio")
,("sv4crc","application/x-sv4crc")
,("svg","image/svg+xml")
,("svgz","image/svg+xml")
,("sw","chemical/x-swissprot")
,("swf","application/x-shockwave-flash")
,("swfl","application/x-shockwave-flash")
,("sxc","application/vnd.sun.xml.calc")
,("sxd","application/vnd.sun.xml.draw")
,("sxg","application/vnd.sun.xml.writer.global")
,("sxi","application/vnd.sun.xml.impress")
,("sxm","application/vnd.sun.xml.math")
,("sxw","application/vnd.sun.xml.writer")
,("t","application/x-troff")
,("tar","application/x-tar")
,("taz","application/x-gtar")
,("tcl","application/x-tcl")
,("tex","text/x-tex")
,("texi","application/x-texinfo")
,("texinfo","application/x-texinfo")
,("text","text/plain")
,("tgf","chemical/x-mdl-tgf")
,("tgz","application/x-gtar")
,("tif","image/tiff")
,("tiff","image/tiff")
,("tk","text/x-tcl")
,("tm","text/texmacs")
,("torrent","application/x-bittorrent")
,("tr","application/x-troff")
,("ts","text/texmacs")
,("tsp","application/dsptype")
,("tsv","text/tab-separated-values")
,("txt","text/plain")
,("udeb","application/x-debian-package")
,("uls","text/iuls")
,("ustar","application/x-ustar")
,("val","chemical/x-ncbi-asn1-binary")
,("vcd","application/x-cdlink")
,("vcf","text/x-vcard")
,("vcs","text/x-vcalendar")
,("vmd","chemical/x-vmd")
,("vms","chemical/x-vamas-iso14976")
,("vrm","x-world/x-vrml")
,("vrml","model/vrml")
,("vsd","application/vnd.visio")
,("wad","application/x-doom")
,("wav","audio/x-wav")
,("wax","audio/x-ms-wax")
,("wbmp","image/vnd.wap.wbmp")
,("wbxml","application/vnd.wap.wbxml")
,("wk","application/x-123")
,("wm","video/x-ms-wm")
,("wma","audio/x-ms-wma")
,("wmd","application/x-ms-wmd")
,("wml","text/vnd.wap.wml")
,("wmlc","application/vnd.wap.wmlc")
,("wmls","text/vnd.wap.wmlscript")
,("wmlsc","application/vnd.wap.wmlscriptc")
,("wmv","video/x-ms-wmv")
,("wmx","video/x-ms-wmx")
,("wmz","application/x-ms-wmz")
,("wp5","application/wordperfect5.1")
,("wpd","application/wordperfect")
,("wrl","model/vrml")
,("wsc","text/scriptlet")
,("wvx","video/x-ms-wvx")
,("wz","application/x-wingz")
,("xbm","image/x-xbitmap")
,("xcf","application/x-xcf")
,("xht","application/xhtml+xml")
,("xhtml","application/xhtml+xml")
,("xlb","application/vnd.ms-excel")
,("xls","application/vnd.ms-excel")
,("xlt","application/vnd.ms-excel")
,("xml","application/xml")
,("xpi","application/x-xpinstall")
,("xpm","image/x-xpixmap")
,("xsl","application/xml")
,("xtel","chemical/x-xtel")
,("xul","application/vnd.mozilla.xul+xml")
,("xwd","image/x-xwindowdump")
,("xyz","chemical/x-xyz")
,("zip","application/zip")
,("zmt","chemical/x-mopac-input")
]
| Lythimus/lptv | sites/all/modules/jgm-pandoc-8be6cc2/src/Text/Pandoc/MIME.hs | gpl-2.0 | 20,338 | 0 | 9 | 5,576 | 4,076 | 2,707 | 1,369 | 449 | 1 |
{-# LANGUAGE CPP #-}
-- Create pandoc.1 man and pandoc_markdown.5 man pages from README
import Text.Pandoc
import Text.Pandoc.Error (handleError)
import qualified Text.Pandoc.UTF8 as UTF8
import Data.Char (toUpper)
import Control.Monad
import System.FilePath
import System.Environment (getArgs)
import Text.Pandoc.Shared (normalize)
import Data.Maybe ( catMaybes )
import Prelude hiding (catch)
import Control.Exception ( catch )
import System.IO.Error ( isDoesNotExistError )
#if MIN_VERSION_directory(1,2,0)
import Data.Time.Clock (UTCTime(..))
#else
import System.Time (ClockTime(..))
#endif
import System.Directory
main :: IO ()
main = do
ds1 <- modifiedDependencies ("man" </> "man1" </> "pandoc.1")
["README", "man" </> "man1" </> "pandoc.1.template"]
ds2 <- modifiedDependencies ("man" </> "man5" </> "pandoc_markdown.5")
["README", "man" </> "man5" </> "pandoc_markdown.5.template"]
unless (null ds1 && null ds2) $ do
rmContents <- UTF8.readFile "README"
let (Pandoc meta blocks) = normalize $ handleError $ readMarkdown def rmContents
let manBlocks = removeSect [Str "Wrappers"]
$ removeSect [Str "Pandoc's",Space,Str "markdown"] blocks
let syntaxBlocks = extractSect [Str "Pandoc's",Space,Str "markdown"] blocks
args <- getArgs
let verbose = "--verbose" `elem` args
unless (null ds1) $
makeManPage verbose ("man" </> "man1" </> "pandoc.1") meta manBlocks
unless (null ds2) $
makeManPage verbose ("man" </> "man5" </> "pandoc_markdown.5") meta syntaxBlocks
makeManPage :: Bool -> FilePath -> Meta -> [Block] -> IO ()
makeManPage verbose page meta blocks = do
let templ = page <.> "template"
manTemplate <- UTF8.readFile templ
writeManPage page manTemplate (Pandoc meta blocks)
when verbose $ putStrLn $ "Created " ++ page
writeManPage :: FilePath -> String -> Pandoc -> IO ()
writeManPage page templ doc = do
let version = pandocVersion
let opts = def{ writerStandalone = True
, writerTemplate = templ
, writerVariables = [("version",version)] }
let manPage = writeMan opts $
bottomUp (concatMap removeLinks) $
bottomUp capitalizeHeaders doc
UTF8.writeFile page manPage
removeLinks :: Inline -> [Inline]
removeLinks (Link l _) = l
removeLinks x = [x]
capitalizeHeaders :: Block -> Block
capitalizeHeaders (Header 1 attr xs) = Header 1 attr $ bottomUp capitalize xs
capitalizeHeaders x = x
capitalize :: Inline -> Inline
capitalize (Str xs) = Str $ map toUpper xs
capitalize x = x
removeSect :: [Inline] -> [Block] -> [Block]
removeSect ils (Header 1 _ x:xs) | x == ils =
dropWhile (not . isHeader1) xs
removeSect ils (x:xs) = x : removeSect ils xs
removeSect _ [] = []
extractSect :: [Inline] -> [Block] -> [Block]
extractSect ils (Header 1 _ z:xs) | z == ils =
bottomUp promoteHeader $ takeWhile (not . isHeader1) xs
where promoteHeader (Header n attr x) = Header (n-1) attr x
promoteHeader x = x
extractSect ils (x:xs) = extractSect ils xs
extractSect _ [] = []
isHeader1 :: Block -> Bool
isHeader1 (Header 1 _ _ ) = True
isHeader1 _ = False
-- | Returns a list of 'dependencies' that have been modified after 'file'.
modifiedDependencies :: FilePath -> [FilePath] -> IO [FilePath]
modifiedDependencies file dependencies = do
fileModTime <- catch (getModificationTime file) $
\e -> if isDoesNotExistError e
#if MIN_VERSION_directory(1,2,0)
then return (UTCTime (toEnum 0) 0) -- the minimum ClockTime
#else
then return (TOD 0 0) -- the minimum ClockTime
#endif
else ioError e
depModTimes <- mapM getModificationTime dependencies
let modified = zipWith (\dep time -> if time > fileModTime then Just dep else Nothing) dependencies depModTimes
return $ catMaybes modified
| csrhodes/pandoc | man/make-pandoc-man-pages.hs | gpl-2.0 | 3,906 | 2 | 16 | 857 | 1,264 | 645 | 619 | 82 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.SWF.RespondDecisionTaskCompleted
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Used by deciders to tell the service that the DecisionTask identified by
-- the 'taskToken' has successfully completed. The 'decisions' argument
-- specifies the list of decisions made while processing the task.
--
-- A 'DecisionTaskCompleted' event is added to the workflow history. The
-- 'executionContext' specified is attached to the event in the workflow
-- execution history.
--
-- __Access Control__
--
-- If an IAM policy grants permission to use
-- 'RespondDecisionTaskCompleted', it can express permissions for the list
-- of decisions in the 'decisions' parameter. Each of the decisions has one
-- or more parameters, much like a regular API call. To allow for policies
-- to be as readable as possible, you can express permissions on decisions
-- as if they were actual API calls, including applying conditions to some
-- parameters. For more information, see
-- <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAM to Manage Access to Amazon SWF Workflows>.
--
-- /See:/ <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_RespondDecisionTaskCompleted.html AWS API Reference> for RespondDecisionTaskCompleted.
module Network.AWS.SWF.RespondDecisionTaskCompleted
(
-- * Creating a Request
respondDecisionTaskCompleted
, RespondDecisionTaskCompleted
-- * Request Lenses
, rdtcDecisions
, rdtcExecutionContext
, rdtcTaskToken
-- * Destructuring the Response
, respondDecisionTaskCompletedResponse
, RespondDecisionTaskCompletedResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.SWF.Types
import Network.AWS.SWF.Types.Product
-- | /See:/ 'respondDecisionTaskCompleted' smart constructor.
data RespondDecisionTaskCompleted = RespondDecisionTaskCompleted'
{ _rdtcDecisions :: !(Maybe [Decision])
, _rdtcExecutionContext :: !(Maybe Text)
, _rdtcTaskToken :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RespondDecisionTaskCompleted' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rdtcDecisions'
--
-- * 'rdtcExecutionContext'
--
-- * 'rdtcTaskToken'
respondDecisionTaskCompleted
:: Text -- ^ 'rdtcTaskToken'
-> RespondDecisionTaskCompleted
respondDecisionTaskCompleted pTaskToken_ =
RespondDecisionTaskCompleted'
{ _rdtcDecisions = Nothing
, _rdtcExecutionContext = Nothing
, _rdtcTaskToken = pTaskToken_
}
-- | The list of decisions (possibly empty) made by the decider while
-- processing this decision task. See the docs for the decision structure
-- for details.
rdtcDecisions :: Lens' RespondDecisionTaskCompleted [Decision]
rdtcDecisions = lens _rdtcDecisions (\ s a -> s{_rdtcDecisions = a}) . _Default . _Coerce;
-- | User defined context to add to workflow execution.
rdtcExecutionContext :: Lens' RespondDecisionTaskCompleted (Maybe Text)
rdtcExecutionContext = lens _rdtcExecutionContext (\ s a -> s{_rdtcExecutionContext = a});
-- | The 'taskToken' from the DecisionTask.
--
-- 'taskToken' is generated by the service and should be treated as an
-- opaque value. If the task is passed to another process, its 'taskToken'
-- must also be passed. This enables it to provide its progress and respond
-- with results.
rdtcTaskToken :: Lens' RespondDecisionTaskCompleted Text
rdtcTaskToken = lens _rdtcTaskToken (\ s a -> s{_rdtcTaskToken = a});
instance AWSRequest RespondDecisionTaskCompleted
where
type Rs RespondDecisionTaskCompleted =
RespondDecisionTaskCompletedResponse
request = postJSON sWF
response
= receiveNull RespondDecisionTaskCompletedResponse'
instance ToHeaders RespondDecisionTaskCompleted where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("SimpleWorkflowService.RespondDecisionTaskCompleted"
:: ByteString),
"Content-Type" =#
("application/x-amz-json-1.0" :: ByteString)])
instance ToJSON RespondDecisionTaskCompleted where
toJSON RespondDecisionTaskCompleted'{..}
= object
(catMaybes
[("decisions" .=) <$> _rdtcDecisions,
("executionContext" .=) <$> _rdtcExecutionContext,
Just ("taskToken" .= _rdtcTaskToken)])
instance ToPath RespondDecisionTaskCompleted where
toPath = const "/"
instance ToQuery RespondDecisionTaskCompleted where
toQuery = const mempty
-- | /See:/ 'respondDecisionTaskCompletedResponse' smart constructor.
data RespondDecisionTaskCompletedResponse =
RespondDecisionTaskCompletedResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RespondDecisionTaskCompletedResponse' with the minimum fields required to make a request.
--
respondDecisionTaskCompletedResponse
:: RespondDecisionTaskCompletedResponse
respondDecisionTaskCompletedResponse = RespondDecisionTaskCompletedResponse'
| fmapfmapfmap/amazonka | amazonka-swf/gen/Network/AWS/SWF/RespondDecisionTaskCompleted.hs | mpl-2.0 | 5,854 | 0 | 12 | 1,131 | 594 | 364 | 230 | 79 | 1 |
{- |
Module : Verifier.SAW
Copyright : Galois, Inc. 2012-2015
License : BSD3
Maintainer : jhendrix@galois.com
Stability : experimental
Portability : non-portable (language extensions)
-}
{-# LANGUAGE TemplateHaskell #-}
module Verifier.SAW
( module Verifier.SAW.SharedTerm
, module Verifier.SAW.ExternalFormat
, Module
, preludeModule
, scLoadPreludeModule
) where
import Verifier.SAW.SharedTerm
import Verifier.SAW.Prelude
import Verifier.SAW.ExternalFormat
-- The following type-checks the Prelude at compile time, as a sanity check
import Language.Haskell.TH
$(runIO (mkSharedContext >>= \sc -> scLoadPreludeModule sc >> return []))
| GaloisInc/saw-script | saw-core/src/Verifier/SAW.hs | bsd-3-clause | 666 | 0 | 13 | 106 | 92 | 56 | 36 | 12 | 0 |
module Development.Abba
( module Development.Abba.Core
, module Development.Abba.DependencyGraph
, module Development.Abba.Types
) where
import Development.Abba.Core
import Development.Abba.DependencyGraph
import Development.Abba.Types
| mgeorgehansen/Abba | Development/Abba.hs | bsd-3-clause | 246 | 0 | 5 | 29 | 47 | 32 | 15 | 7 | 0 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Active Scan Rules - Alpha | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | 0xkasun/security-tools | src/org/zaproxy/zap/extension/ascanrulesAlpha/resources/help_ko_KR/helpset_ko_KR.hs | apache-2.0 | 987 | 85 | 53 | 163 | 405 | 213 | 192 | -1 | -1 |
-- helpers for constructing JS objects that can be efficiently inlined as literals
-- no Template Haskell available yet, generated by utils/genBuildObject.hs
{-# LANGUAGE CPP #-}
#ifndef ghcjs_HOST_OS
module GHCJS.Prim.Internal.Build () where
#else
{-# LANGUAGE ForeignFunctionInterface, JavaScriptFFI, GHCForeignImportPrim #-}
module GHCJS.Prim.Internal.Build
( buildArrayI
, buildArrayM
, buildObjectI
, buildObjectM
, buildArrayI1
, buildArrayI2
, buildArrayI3
, buildArrayI4
, buildArrayI5
, buildArrayI6
, buildArrayI7
, buildArrayI8
, buildArrayI9
, buildArrayI10
, buildArrayI11
, buildArrayI12
, buildArrayI13
, buildArrayI14
, buildArrayI15
, buildArrayI16
, buildArrayI17
, buildArrayI18
, buildArrayI19
, buildArrayI20
, buildArrayI21
, buildArrayI22
, buildArrayI23
, buildArrayI24
, buildArrayI25
, buildArrayI26
, buildArrayI27
, buildArrayI28
, buildArrayI29
, buildArrayI30
, buildArrayI31
, buildArrayI32
, buildArrayM1
, buildArrayM2
, buildArrayM3
, buildArrayM4
, buildArrayM5
, buildArrayM6
, buildArrayM7
, buildArrayM8
, buildArrayM9
, buildArrayM10
, buildArrayM11
, buildArrayM12
, buildArrayM13
, buildArrayM14
, buildArrayM15
, buildArrayM16
, buildArrayM17
, buildArrayM18
, buildArrayM19
, buildArrayM20
, buildArrayM21
, buildArrayM22
, buildArrayM23
, buildArrayM24
, buildArrayM25
, buildArrayM26
, buildArrayM27
, buildArrayM28
, buildArrayM29
, buildArrayM30
, buildArrayM31
, buildArrayM32
, buildObjectI1
, buildObjectI2
, buildObjectI3
, buildObjectI4
, buildObjectI5
, buildObjectI6
, buildObjectI7
, buildObjectI8
, buildObjectI9
, buildObjectI10
, buildObjectI11
, buildObjectI12
, buildObjectI13
, buildObjectI14
, buildObjectI15
, buildObjectI16
, buildObjectI17
, buildObjectI18
, buildObjectI19
, buildObjectI20
, buildObjectI21
, buildObjectI22
, buildObjectI23
, buildObjectI24
, buildObjectI25
, buildObjectI26
, buildObjectI27
, buildObjectI28
, buildObjectI29
, buildObjectI30
, buildObjectI31
, buildObjectI32
, buildObjectM1
, buildObjectM2
, buildObjectM3
, buildObjectM4
, buildObjectM5
, buildObjectM6
, buildObjectM7
, buildObjectM8
, buildObjectM9
, buildObjectM10
, buildObjectM11
, buildObjectM12
, buildObjectM13
, buildObjectM14
, buildObjectM15
, buildObjectM16
, buildObjectM17
, buildObjectM18
, buildObjectM19
, buildObjectM20
, buildObjectM21
, buildObjectM22
, buildObjectM23
, buildObjectM24
, buildObjectM25
, buildObjectM26
, buildObjectM27
, buildObjectM28
, buildObjectM29
, buildObjectM30
, buildObjectM31
, buildObjectM32
) where
import GHCJS.Prim
import GHC.Prim
import Unsafe.Coerce
import System.IO.Unsafe
type O = JSRef -- object
type K = JSRef -- key
type V = JSRef -- value
type J = JSRef -- some JS value
type A = JSRef -- array
seqTupList :: [(a,b)] -> [(a,b)]
seqTupList xs = go xs `seq` xs
where go ((x1,x2):xs) = x1 `seq` x2 `seq` go xs
go [] = ()
foreign import javascript unsafe "$r = [];" js_emptyArrayI :: A
buildArrayI :: [J] -> A
buildArrayI xs = unsafePerformIO (toJSArray xs)
{-# INLINE [1] buildArrayI #-}
{-# RULES "buildArrayI/empty" buildArrayI [] = js_emptyArrayI #-}
{-# RULES "buildArrayI/buildArrayI1" forall x1. buildArrayI [x1] = buildArrayI1 x1 #-}
{-# RULES "buildArrayI/buildArrayI2" forall x1 x2. buildArrayI [x1,x2] = buildArrayI2 x1 x2 #-}
{-# RULES "buildArrayI/buildArrayI3" forall x1 x2 x3. buildArrayI [x1,x2,x3] = buildArrayI3 x1 x2 x3 #-}
{-# RULES "buildArrayI/buildArrayI4" forall x1 x2 x3 x4. buildArrayI [x1,x2,x3,x4] = buildArrayI4 x1 x2 x3 x4 #-}
{-# RULES "buildArrayI/buildArrayI5" forall x1 x2 x3 x4 x5. buildArrayI [x1,x2,x3,x4,x5] = buildArrayI5 x1 x2 x3 x4 x5 #-}
{-# RULES "buildArrayI/buildArrayI6" forall x1 x2 x3 x4 x5 x6. buildArrayI [x1,x2,x3,x4,x5,x6] = buildArrayI6 x1 x2 x3 x4 x5 x6 #-}
{-# RULES "buildArrayI/buildArrayI7" forall x1 x2 x3 x4 x5 x6 x7. buildArrayI [x1,x2,x3,x4,x5,x6,x7] = buildArrayI7 x1 x2 x3 x4 x5 x6 x7 #-}
{-# RULES "buildArrayI/buildArrayI8" forall x1 x2 x3 x4 x5 x6 x7 x8. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8] = buildArrayI8 x1 x2 x3 x4 x5 x6 x7 x8 #-}
{-# RULES "buildArrayI/buildArrayI9" forall x1 x2 x3 x4 x5 x6 x7 x8 x9. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9] = buildArrayI9 x1 x2 x3 x4 x5 x6 x7 x8 x9 #-}
{-# RULES "buildArrayI/buildArrayI10" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10] = buildArrayI10 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 #-}
{-# RULES "buildArrayI/buildArrayI11" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11] = buildArrayI11 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 #-}
{-# RULES "buildArrayI/buildArrayI12" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12] = buildArrayI12 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 #-}
{-# RULES "buildArrayI/buildArrayI13" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13] = buildArrayI13 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 #-}
{-# RULES "buildArrayI/buildArrayI14" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14] = buildArrayI14 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 #-}
{-# RULES "buildArrayI/buildArrayI15" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15] = buildArrayI15 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 #-}
{-# RULES "buildArrayI/buildArrayI16" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16] = buildArrayI16 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 #-}
{-# RULES "buildArrayI/buildArrayI17" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17] = buildArrayI17 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 #-}
{-# RULES "buildArrayI/buildArrayI18" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18] = buildArrayI18 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 #-}
{-# RULES "buildArrayI/buildArrayI19" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19] = buildArrayI19 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 #-}
{-# RULES "buildArrayI/buildArrayI20" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20] = buildArrayI20 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 #-}
{-# RULES "buildArrayI/buildArrayI21" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21] = buildArrayI21 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 #-}
{-# RULES "buildArrayI/buildArrayI22" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22] = buildArrayI22 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 #-}
{-# RULES "buildArrayI/buildArrayI23" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23] = buildArrayI23 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 #-}
{-# RULES "buildArrayI/buildArrayI24" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24] = buildArrayI24 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 #-}
{-# RULES "buildArrayI/buildArrayI25" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25] = buildArrayI25 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 #-}
{-# RULES "buildArrayI/buildArrayI26" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26] = buildArrayI26 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 #-}
{-# RULES "buildArrayI/buildArrayI27" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27] = buildArrayI27 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 #-}
{-# RULES "buildArrayI/buildArrayI28" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27,x28] = buildArrayI28 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 #-}
{-# RULES "buildArrayI/buildArrayI29" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27,x28,x29] = buildArrayI29 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 #-}
{-# RULES "buildArrayI/buildArrayI30" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27,x28,x29,x30] = buildArrayI30 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 #-}
{-# RULES "buildArrayI/buildArrayI31" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27,x28,x29,x30,x31] = buildArrayI31 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 #-}
{-# RULES "buildArrayI/buildArrayI32" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 x32. buildArrayI [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27,x28,x29,x30,x31,x32] = buildArrayI32 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 x32 #-}
buildArrayI1 :: J -> A
buildArrayI1 x1 =
js_buildArrayI1 x1
{-# INLINE buildArrayI1 #-}
foreign import javascript unsafe "[$1]"
js_buildArrayI1 :: J -> A
buildArrayI2 :: J -> J -> A
buildArrayI2 x1 x2 =
js_buildArrayI2 x1 x2
{-# INLINE buildArrayI2 #-}
foreign import javascript unsafe "[$1,$2]"
js_buildArrayI2 :: J -> J -> A
buildArrayI3 :: J -> J -> J -> A
buildArrayI3 x1 x2 x3 =
js_buildArrayI3 x1 x2 x3
{-# INLINE buildArrayI3 #-}
foreign import javascript unsafe "[$1,$2,$3]"
js_buildArrayI3 :: J -> J -> J -> A
buildArrayI4 :: J -> J -> J -> J -> A
buildArrayI4 x1 x2 x3 x4 =
js_buildArrayI4 x1 x2 x3 x4
{-# INLINE buildArrayI4 #-}
foreign import javascript unsafe "[$1,$2,$3,$4]"
js_buildArrayI4 :: J -> J -> J -> J -> A
buildArrayI5 :: J -> J -> J -> J -> J -> A
buildArrayI5 x1 x2 x3 x4 x5 =
js_buildArrayI5 x1 x2 x3 x4 x5
{-# INLINE buildArrayI5 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5]"
js_buildArrayI5 :: J -> J -> J -> J -> J -> A
buildArrayI6 :: J -> J -> J -> J -> J -> J -> A
buildArrayI6 x1 x2 x3 x4 x5 x6 =
js_buildArrayI6 x1 x2 x3 x4 x5 x6
{-# INLINE buildArrayI6 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6]"
js_buildArrayI6 :: J -> J -> J -> J -> J -> J -> A
buildArrayI7 :: J -> J -> J -> J -> J -> J -> J -> A
buildArrayI7 x1 x2 x3 x4 x5 x6 x7 =
js_buildArrayI7 x1 x2 x3 x4 x5 x6 x7
{-# INLINE buildArrayI7 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7]"
js_buildArrayI7 :: J -> J -> J -> J -> J -> J -> J -> A
buildArrayI8 :: J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI8 x1 x2 x3 x4 x5 x6 x7 x8 =
js_buildArrayI8 x1 x2 x3 x4 x5 x6 x7 x8
{-# INLINE buildArrayI8 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8]"
js_buildArrayI8 :: J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI9 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI9 x1 x2 x3 x4 x5 x6 x7 x8 x9 =
js_buildArrayI9 x1 x2 x3 x4 x5 x6 x7 x8 x9
{-# INLINE buildArrayI9 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9]"
js_buildArrayI9 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI10 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI10 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 =
js_buildArrayI10 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10
{-# INLINE buildArrayI10 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10]"
js_buildArrayI10 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI11 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI11 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 =
js_buildArrayI11 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11
{-# INLINE buildArrayI11 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11]"
js_buildArrayI11 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI12 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI12 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 =
js_buildArrayI12 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12
{-# INLINE buildArrayI12 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12]"
js_buildArrayI12 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI13 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI13 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 =
js_buildArrayI13 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13
{-# INLINE buildArrayI13 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13]"
js_buildArrayI13 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI14 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI14 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 =
js_buildArrayI14 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14
{-# INLINE buildArrayI14 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14]"
js_buildArrayI14 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI15 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI15 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 =
js_buildArrayI15 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15
{-# INLINE buildArrayI15 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15]"
js_buildArrayI15 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI16 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI16 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 =
js_buildArrayI16 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16
{-# INLINE buildArrayI16 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16]"
js_buildArrayI16 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI17 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI17 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 =
js_buildArrayI17 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17
{-# INLINE buildArrayI17 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17]"
js_buildArrayI17 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI18 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI18 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 =
js_buildArrayI18 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18
{-# INLINE buildArrayI18 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18]"
js_buildArrayI18 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI19 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI19 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 =
js_buildArrayI19 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19
{-# INLINE buildArrayI19 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19]"
js_buildArrayI19 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI20 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI20 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 =
js_buildArrayI20 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20
{-# INLINE buildArrayI20 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20]"
js_buildArrayI20 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI21 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI21 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 =
js_buildArrayI21 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21
{-# INLINE buildArrayI21 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21]"
js_buildArrayI21 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI22 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI22 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 =
js_buildArrayI22 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22
{-# INLINE buildArrayI22 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22]"
js_buildArrayI22 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI23 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI23 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 =
js_buildArrayI23 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23
{-# INLINE buildArrayI23 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23]"
js_buildArrayI23 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI24 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI24 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 =
js_buildArrayI24 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24
{-# INLINE buildArrayI24 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24]"
js_buildArrayI24 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI25 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI25 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 =
js_buildArrayI25 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25
{-# INLINE buildArrayI25 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25]"
js_buildArrayI25 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI26 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI26 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 =
js_buildArrayI26 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26
{-# INLINE buildArrayI26 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26]"
js_buildArrayI26 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI27 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI27 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 =
js_buildArrayI27 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27
{-# INLINE buildArrayI27 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27]"
js_buildArrayI27 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI28 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI28 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 =
js_buildArrayI28 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28
{-# INLINE buildArrayI28 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28]"
js_buildArrayI28 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI29 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI29 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 =
js_buildArrayI29 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29
{-# INLINE buildArrayI29 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29]"
js_buildArrayI29 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI30 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI30 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 =
js_buildArrayI30 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30
{-# INLINE buildArrayI30 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30]"
js_buildArrayI30 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI31 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI31 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 =
js_buildArrayI31 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31
{-# INLINE buildArrayI31 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31]"
js_buildArrayI31 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI32 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
buildArrayI32 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 x32 =
js_buildArrayI32 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 x32
{-# INLINE buildArrayI32 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32]"
js_buildArrayI32 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> A
foreign import javascript unsafe "$r = [];" js_emptyArrayM :: IO A
buildArrayM :: [J] -> IO A
buildArrayM xs = toJSArray xs
{-# INLINE [1] buildArrayM #-}
{-# RULES "buildArrayM/empty" buildArrayM [] = js_emptyArrayM #-}
{-# RULES "buildArrayM/buildArrayM1" forall x1. buildArrayM [x1] = buildArrayM1 x1 #-}
{-# RULES "buildArrayM/buildArrayM2" forall x1 x2. buildArrayM [x1,x2] = buildArrayM2 x1 x2 #-}
{-# RULES "buildArrayM/buildArrayM3" forall x1 x2 x3. buildArrayM [x1,x2,x3] = buildArrayM3 x1 x2 x3 #-}
{-# RULES "buildArrayM/buildArrayM4" forall x1 x2 x3 x4. buildArrayM [x1,x2,x3,x4] = buildArrayM4 x1 x2 x3 x4 #-}
{-# RULES "buildArrayM/buildArrayM5" forall x1 x2 x3 x4 x5. buildArrayM [x1,x2,x3,x4,x5] = buildArrayM5 x1 x2 x3 x4 x5 #-}
{-# RULES "buildArrayM/buildArrayM6" forall x1 x2 x3 x4 x5 x6. buildArrayM [x1,x2,x3,x4,x5,x6] = buildArrayM6 x1 x2 x3 x4 x5 x6 #-}
{-# RULES "buildArrayM/buildArrayM7" forall x1 x2 x3 x4 x5 x6 x7. buildArrayM [x1,x2,x3,x4,x5,x6,x7] = buildArrayM7 x1 x2 x3 x4 x5 x6 x7 #-}
{-# RULES "buildArrayM/buildArrayM8" forall x1 x2 x3 x4 x5 x6 x7 x8. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8] = buildArrayM8 x1 x2 x3 x4 x5 x6 x7 x8 #-}
{-# RULES "buildArrayM/buildArrayM9" forall x1 x2 x3 x4 x5 x6 x7 x8 x9. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9] = buildArrayM9 x1 x2 x3 x4 x5 x6 x7 x8 x9 #-}
{-# RULES "buildArrayM/buildArrayM10" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10] = buildArrayM10 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 #-}
{-# RULES "buildArrayM/buildArrayM11" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11] = buildArrayM11 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 #-}
{-# RULES "buildArrayM/buildArrayM12" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12] = buildArrayM12 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 #-}
{-# RULES "buildArrayM/buildArrayM13" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13] = buildArrayM13 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 #-}
{-# RULES "buildArrayM/buildArrayM14" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14] = buildArrayM14 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 #-}
{-# RULES "buildArrayM/buildArrayM15" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15] = buildArrayM15 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 #-}
{-# RULES "buildArrayM/buildArrayM16" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16] = buildArrayM16 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 #-}
{-# RULES "buildArrayM/buildArrayM17" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17] = buildArrayM17 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 #-}
{-# RULES "buildArrayM/buildArrayM18" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18] = buildArrayM18 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 #-}
{-# RULES "buildArrayM/buildArrayM19" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19] = buildArrayM19 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 #-}
{-# RULES "buildArrayM/buildArrayM20" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20] = buildArrayM20 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 #-}
{-# RULES "buildArrayM/buildArrayM21" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21] = buildArrayM21 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 #-}
{-# RULES "buildArrayM/buildArrayM22" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22] = buildArrayM22 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 #-}
{-# RULES "buildArrayM/buildArrayM23" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23] = buildArrayM23 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 #-}
{-# RULES "buildArrayM/buildArrayM24" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24] = buildArrayM24 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 #-}
{-# RULES "buildArrayM/buildArrayM25" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25] = buildArrayM25 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 #-}
{-# RULES "buildArrayM/buildArrayM26" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26] = buildArrayM26 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 #-}
{-# RULES "buildArrayM/buildArrayM27" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27] = buildArrayM27 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 #-}
{-# RULES "buildArrayM/buildArrayM28" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27,x28] = buildArrayM28 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 #-}
{-# RULES "buildArrayM/buildArrayM29" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27,x28,x29] = buildArrayM29 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 #-}
{-# RULES "buildArrayM/buildArrayM30" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27,x28,x29,x30] = buildArrayM30 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 #-}
{-# RULES "buildArrayM/buildArrayM31" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27,x28,x29,x30,x31] = buildArrayM31 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 #-}
{-# RULES "buildArrayM/buildArrayM32" forall x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 x32. buildArrayM [x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x22,x23,x24,x25,x26,x27,x28,x29,x30,x31,x32] = buildArrayM32 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 x32 #-}
buildArrayM1 :: J -> IO A
buildArrayM1 x1 =
js_buildArrayM1 x1
{-# INLINE buildArrayM1 #-}
foreign import javascript unsafe "[$1]"
js_buildArrayM1 :: J -> IO A
buildArrayM2 :: J -> J -> IO A
buildArrayM2 x1 x2 =
js_buildArrayM2 x1 x2
{-# INLINE buildArrayM2 #-}
foreign import javascript unsafe "[$1,$2]"
js_buildArrayM2 :: J -> J -> IO A
buildArrayM3 :: J -> J -> J -> IO A
buildArrayM3 x1 x2 x3 =
js_buildArrayM3 x1 x2 x3
{-# INLINE buildArrayM3 #-}
foreign import javascript unsafe "[$1,$2,$3]"
js_buildArrayM3 :: J -> J -> J -> IO A
buildArrayM4 :: J -> J -> J -> J -> IO A
buildArrayM4 x1 x2 x3 x4 =
js_buildArrayM4 x1 x2 x3 x4
{-# INLINE buildArrayM4 #-}
foreign import javascript unsafe "[$1,$2,$3,$4]"
js_buildArrayM4 :: J -> J -> J -> J -> IO A
buildArrayM5 :: J -> J -> J -> J -> J -> IO A
buildArrayM5 x1 x2 x3 x4 x5 =
js_buildArrayM5 x1 x2 x3 x4 x5
{-# INLINE buildArrayM5 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5]"
js_buildArrayM5 :: J -> J -> J -> J -> J -> IO A
buildArrayM6 :: J -> J -> J -> J -> J -> J -> IO A
buildArrayM6 x1 x2 x3 x4 x5 x6 =
js_buildArrayM6 x1 x2 x3 x4 x5 x6
{-# INLINE buildArrayM6 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6]"
js_buildArrayM6 :: J -> J -> J -> J -> J -> J -> IO A
buildArrayM7 :: J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM7 x1 x2 x3 x4 x5 x6 x7 =
js_buildArrayM7 x1 x2 x3 x4 x5 x6 x7
{-# INLINE buildArrayM7 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7]"
js_buildArrayM7 :: J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM8 :: J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM8 x1 x2 x3 x4 x5 x6 x7 x8 =
js_buildArrayM8 x1 x2 x3 x4 x5 x6 x7 x8
{-# INLINE buildArrayM8 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8]"
js_buildArrayM8 :: J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM9 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM9 x1 x2 x3 x4 x5 x6 x7 x8 x9 =
js_buildArrayM9 x1 x2 x3 x4 x5 x6 x7 x8 x9
{-# INLINE buildArrayM9 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9]"
js_buildArrayM9 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM10 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM10 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 =
js_buildArrayM10 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10
{-# INLINE buildArrayM10 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10]"
js_buildArrayM10 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM11 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM11 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 =
js_buildArrayM11 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11
{-# INLINE buildArrayM11 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11]"
js_buildArrayM11 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM12 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM12 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 =
js_buildArrayM12 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12
{-# INLINE buildArrayM12 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12]"
js_buildArrayM12 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM13 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM13 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 =
js_buildArrayM13 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13
{-# INLINE buildArrayM13 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13]"
js_buildArrayM13 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM14 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM14 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 =
js_buildArrayM14 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14
{-# INLINE buildArrayM14 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14]"
js_buildArrayM14 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM15 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM15 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 =
js_buildArrayM15 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15
{-# INLINE buildArrayM15 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15]"
js_buildArrayM15 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM16 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM16 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 =
js_buildArrayM16 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16
{-# INLINE buildArrayM16 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16]"
js_buildArrayM16 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM17 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM17 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 =
js_buildArrayM17 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17
{-# INLINE buildArrayM17 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17]"
js_buildArrayM17 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM18 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM18 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 =
js_buildArrayM18 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18
{-# INLINE buildArrayM18 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18]"
js_buildArrayM18 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM19 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM19 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 =
js_buildArrayM19 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19
{-# INLINE buildArrayM19 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19]"
js_buildArrayM19 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM20 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM20 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 =
js_buildArrayM20 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20
{-# INLINE buildArrayM20 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20]"
js_buildArrayM20 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM21 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM21 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 =
js_buildArrayM21 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21
{-# INLINE buildArrayM21 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21]"
js_buildArrayM21 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM22 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM22 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 =
js_buildArrayM22 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22
{-# INLINE buildArrayM22 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22]"
js_buildArrayM22 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM23 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM23 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 =
js_buildArrayM23 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23
{-# INLINE buildArrayM23 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23]"
js_buildArrayM23 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM24 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM24 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 =
js_buildArrayM24 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24
{-# INLINE buildArrayM24 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24]"
js_buildArrayM24 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM25 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM25 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 =
js_buildArrayM25 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25
{-# INLINE buildArrayM25 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25]"
js_buildArrayM25 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM26 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM26 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 =
js_buildArrayM26 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26
{-# INLINE buildArrayM26 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26]"
js_buildArrayM26 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM27 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM27 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 =
js_buildArrayM27 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27
{-# INLINE buildArrayM27 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27]"
js_buildArrayM27 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM28 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM28 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 =
js_buildArrayM28 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28
{-# INLINE buildArrayM28 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28]"
js_buildArrayM28 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM29 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM29 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 =
js_buildArrayM29 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29
{-# INLINE buildArrayM29 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29]"
js_buildArrayM29 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM30 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM30 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 =
js_buildArrayM30 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30
{-# INLINE buildArrayM30 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30]"
js_buildArrayM30 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM31 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM31 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 =
js_buildArrayM31 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31
{-# INLINE buildArrayM31 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31]"
js_buildArrayM31 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM32 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
buildArrayM32 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 x32 =
js_buildArrayM32 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 x16 x17 x18 x19 x20 x21 x22 x23 x24 x25 x26 x27 x28 x29 x30 x31 x32
{-# INLINE buildArrayM32 #-}
foreign import javascript unsafe "[$1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32]"
js_buildArrayM32 :: J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> J -> IO A
foreign import javascript unsafe "h$buildObjectFromTupList($1)"
js_buildObjectFromTupListI :: Any -> O
foreign import javascript unsafe "$r = {};" js_emptyObjectI :: O
buildObjectI :: [(K,V)] -> O
buildObjectI xs = js_buildObjectFromTupListI . unsafeCoerce . seqTupList $ xs
{-# INLINE [1] buildObjectI #-}
{-# RULES "buildObjectI/empty" buildObjectI [] = js_emptyObjectI #-}
{-# RULES "buildObjectI/buildObjectI1" forall k1 v1. buildObjectI [(k1,v1)] = buildObjectI1 k1 v1 #-}
{-# RULES "buildObjectI/buildObjectI2" forall k1 v1 k2 v2. buildObjectI [(k1,v1),(k2,v2)] = buildObjectI2 k1 v1 k2 v2 #-}
{-# RULES "buildObjectI/buildObjectI3" forall k1 v1 k2 v2 k3 v3. buildObjectI [(k1,v1),(k2,v2),(k3,v3)] = buildObjectI3 k1 v1 k2 v2 k3 v3 #-}
{-# RULES "buildObjectI/buildObjectI4" forall k1 v1 k2 v2 k3 v3 k4 v4. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4)] = buildObjectI4 k1 v1 k2 v2 k3 v3 k4 v4 #-}
{-# RULES "buildObjectI/buildObjectI5" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5)] = buildObjectI5 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 #-}
{-# RULES "buildObjectI/buildObjectI6" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6)] = buildObjectI6 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 #-}
{-# RULES "buildObjectI/buildObjectI7" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7)] = buildObjectI7 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 #-}
{-# RULES "buildObjectI/buildObjectI8" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8)] = buildObjectI8 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 #-}
{-# RULES "buildObjectI/buildObjectI9" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9)] = buildObjectI9 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 #-}
{-# RULES "buildObjectI/buildObjectI10" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10)] = buildObjectI10 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 #-}
{-# RULES "buildObjectI/buildObjectI11" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11)] = buildObjectI11 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 #-}
{-# RULES "buildObjectI/buildObjectI12" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12)] = buildObjectI12 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 #-}
{-# RULES "buildObjectI/buildObjectI13" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13)] = buildObjectI13 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 #-}
{-# RULES "buildObjectI/buildObjectI14" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14)] = buildObjectI14 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 #-}
{-# RULES "buildObjectI/buildObjectI15" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15)] = buildObjectI15 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 #-}
{-# RULES "buildObjectI/buildObjectI16" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16)] = buildObjectI16 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 #-}
{-# RULES "buildObjectI/buildObjectI17" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17)] = buildObjectI17 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 #-}
{-# RULES "buildObjectI/buildObjectI18" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18)] = buildObjectI18 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 #-}
{-# RULES "buildObjectI/buildObjectI19" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19)] = buildObjectI19 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 #-}
{-# RULES "buildObjectI/buildObjectI20" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20)] = buildObjectI20 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 #-}
{-# RULES "buildObjectI/buildObjectI21" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21)] = buildObjectI21 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 #-}
{-# RULES "buildObjectI/buildObjectI22" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22)] = buildObjectI22 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 #-}
{-# RULES "buildObjectI/buildObjectI23" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23)] = buildObjectI23 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 #-}
{-# RULES "buildObjectI/buildObjectI24" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24)] = buildObjectI24 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 #-}
{-# RULES "buildObjectI/buildObjectI25" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25)] = buildObjectI25 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 #-}
{-# RULES "buildObjectI/buildObjectI26" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26)] = buildObjectI26 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 #-}
{-# RULES "buildObjectI/buildObjectI27" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27)] = buildObjectI27 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 #-}
{-# RULES "buildObjectI/buildObjectI28" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27),(k28,v28)] = buildObjectI28 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 #-}
{-# RULES "buildObjectI/buildObjectI29" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27),(k28,v28),(k29,v29)] = buildObjectI29 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 #-}
{-# RULES "buildObjectI/buildObjectI30" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27),(k28,v28),(k29,v29),(k30,v30)] = buildObjectI30 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 #-}
{-# RULES "buildObjectI/buildObjectI31" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27),(k28,v28),(k29,v29),(k30,v30),(k31,v31)] = buildObjectI31 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 #-}
{-# RULES "buildObjectI/buildObjectI32" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 k32 v32. buildObjectI [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27),(k28,v28),(k29,v29),(k30,v30),(k31,v31),(k32,v32)] = buildObjectI32 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 k32 v32 #-}
buildObjectI1 :: K -> V -> O
buildObjectI1 k1 v1 =
js_buildObjectI1 k1 v1
{-# INLINE buildObjectI1 #-}
foreign import javascript unsafe "h$buildObject($1,$2)"
js_buildObjectI1 :: K -> V -> O
buildObjectI2 :: K -> V -> K -> V -> O
buildObjectI2 k1 v1 k2 v2 =
js_buildObjectI2 k1 v1 k2 v2
{-# INLINE buildObjectI2 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4)"
js_buildObjectI2 :: K -> V -> K -> V -> O
buildObjectI3 :: K -> V -> K -> V -> K -> V -> O
buildObjectI3 k1 v1 k2 v2 k3 v3 =
js_buildObjectI3 k1 v1 k2 v2 k3 v3
{-# INLINE buildObjectI3 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6)"
js_buildObjectI3 :: K -> V -> K -> V -> K -> V -> O
buildObjectI4 :: K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI4 k1 v1 k2 v2 k3 v3 k4 v4 =
js_buildObjectI4 k1 v1 k2 v2 k3 v3 k4 v4
{-# INLINE buildObjectI4 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8)"
js_buildObjectI4 :: K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI5 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI5 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 =
js_buildObjectI5 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5
{-# INLINE buildObjectI5 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10)"
js_buildObjectI5 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI6 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI6 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 =
js_buildObjectI6 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6
{-# INLINE buildObjectI6 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12)"
js_buildObjectI6 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI7 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI7 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 =
js_buildObjectI7 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7
{-# INLINE buildObjectI7 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14)"
js_buildObjectI7 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI8 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI8 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 =
js_buildObjectI8 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8
{-# INLINE buildObjectI8 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16)"
js_buildObjectI8 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI9 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI9 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 =
js_buildObjectI9 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9
{-# INLINE buildObjectI9 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18)"
js_buildObjectI9 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI10 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI10 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 =
js_buildObjectI10 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10
{-# INLINE buildObjectI10 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20)"
js_buildObjectI10 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI11 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI11 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 =
js_buildObjectI11 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11
{-# INLINE buildObjectI11 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22)"
js_buildObjectI11 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI12 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI12 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 =
js_buildObjectI12 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12
{-# INLINE buildObjectI12 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24)"
js_buildObjectI12 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI13 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI13 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 =
js_buildObjectI13 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13
{-# INLINE buildObjectI13 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26)"
js_buildObjectI13 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI14 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI14 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 =
js_buildObjectI14 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14
{-# INLINE buildObjectI14 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28)"
js_buildObjectI14 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI15 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI15 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 =
js_buildObjectI15 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15
{-# INLINE buildObjectI15 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30)"
js_buildObjectI15 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI16 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI16 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 =
js_buildObjectI16 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16
{-# INLINE buildObjectI16 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32)"
js_buildObjectI16 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI17 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI17 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 =
js_buildObjectI17 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17
{-# INLINE buildObjectI17 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34)"
js_buildObjectI17 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI18 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI18 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 =
js_buildObjectI18 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18
{-# INLINE buildObjectI18 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36)"
js_buildObjectI18 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI19 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI19 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 =
js_buildObjectI19 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19
{-# INLINE buildObjectI19 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38)"
js_buildObjectI19 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI20 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI20 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 =
js_buildObjectI20 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20
{-# INLINE buildObjectI20 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40)"
js_buildObjectI20 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI21 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI21 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 =
js_buildObjectI21 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21
{-# INLINE buildObjectI21 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42)"
js_buildObjectI21 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI22 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI22 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 =
js_buildObjectI22 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22
{-# INLINE buildObjectI22 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44)"
js_buildObjectI22 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI23 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI23 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 =
js_buildObjectI23 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23
{-# INLINE buildObjectI23 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46)"
js_buildObjectI23 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI24 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI24 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 =
js_buildObjectI24 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24
{-# INLINE buildObjectI24 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48)"
js_buildObjectI24 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI25 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI25 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 =
js_buildObjectI25 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25
{-# INLINE buildObjectI25 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50)"
js_buildObjectI25 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI26 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI26 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 =
js_buildObjectI26 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26
{-# INLINE buildObjectI26 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52)"
js_buildObjectI26 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI27 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI27 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 =
js_buildObjectI27 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27
{-# INLINE buildObjectI27 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54)"
js_buildObjectI27 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI28 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI28 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 =
js_buildObjectI28 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28
{-# INLINE buildObjectI28 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54,$55,$56)"
js_buildObjectI28 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI29 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI29 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 =
js_buildObjectI29 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29
{-# INLINE buildObjectI29 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54,$55,$56,$57,$58)"
js_buildObjectI29 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI30 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI30 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 =
js_buildObjectI30 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30
{-# INLINE buildObjectI30 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54,$55,$56,$57,$58,$59,$60)"
js_buildObjectI30 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI31 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI31 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 =
js_buildObjectI31 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31
{-# INLINE buildObjectI31 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54,$55,$56,$57,$58,$59,$60,$61,$62)"
js_buildObjectI31 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI32 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
buildObjectI32 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 k32 v32 =
js_buildObjectI32 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 k32 v32
{-# INLINE buildObjectI32 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54,$55,$56,$57,$58,$59,$60,$61,$62,$63,$64)"
js_buildObjectI32 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> O
foreign import javascript unsafe "h$buildObjectFromTupList($1)"
js_buildObjectFromTupListM :: Any -> IO O
foreign import javascript unsafe "$r = {};" js_emptyObjectM :: IO O
buildObjectM :: [(K,V)] -> IO O
buildObjectM xs = js_buildObjectFromTupListM . unsafeCoerce . seqTupList $ xs
{-# INLINE [1] buildObjectM #-}
{-# RULES "buildObjectM/empty" buildObjectM [] = js_emptyObjectM #-}
{-# RULES "buildObjectM/buildObjectM1" forall k1 v1. buildObjectM [(k1,v1)] = buildObjectM1 k1 v1 #-}
{-# RULES "buildObjectM/buildObjectM2" forall k1 v1 k2 v2. buildObjectM [(k1,v1),(k2,v2)] = buildObjectM2 k1 v1 k2 v2 #-}
{-# RULES "buildObjectM/buildObjectM3" forall k1 v1 k2 v2 k3 v3. buildObjectM [(k1,v1),(k2,v2),(k3,v3)] = buildObjectM3 k1 v1 k2 v2 k3 v3 #-}
{-# RULES "buildObjectM/buildObjectM4" forall k1 v1 k2 v2 k3 v3 k4 v4. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4)] = buildObjectM4 k1 v1 k2 v2 k3 v3 k4 v4 #-}
{-# RULES "buildObjectM/buildObjectM5" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5)] = buildObjectM5 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 #-}
{-# RULES "buildObjectM/buildObjectM6" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6)] = buildObjectM6 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 #-}
{-# RULES "buildObjectM/buildObjectM7" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7)] = buildObjectM7 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 #-}
{-# RULES "buildObjectM/buildObjectM8" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8)] = buildObjectM8 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 #-}
{-# RULES "buildObjectM/buildObjectM9" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9)] = buildObjectM9 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 #-}
{-# RULES "buildObjectM/buildObjectM10" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10)] = buildObjectM10 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 #-}
{-# RULES "buildObjectM/buildObjectM11" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11)] = buildObjectM11 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 #-}
{-# RULES "buildObjectM/buildObjectM12" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12)] = buildObjectM12 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 #-}
{-# RULES "buildObjectM/buildObjectM13" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13)] = buildObjectM13 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 #-}
{-# RULES "buildObjectM/buildObjectM14" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14)] = buildObjectM14 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 #-}
{-# RULES "buildObjectM/buildObjectM15" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15)] = buildObjectM15 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 #-}
{-# RULES "buildObjectM/buildObjectM16" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16)] = buildObjectM16 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 #-}
{-# RULES "buildObjectM/buildObjectM17" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17)] = buildObjectM17 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 #-}
{-# RULES "buildObjectM/buildObjectM18" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18)] = buildObjectM18 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 #-}
{-# RULES "buildObjectM/buildObjectM19" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19)] = buildObjectM19 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 #-}
{-# RULES "buildObjectM/buildObjectM20" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20)] = buildObjectM20 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 #-}
{-# RULES "buildObjectM/buildObjectM21" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21)] = buildObjectM21 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 #-}
{-# RULES "buildObjectM/buildObjectM22" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22)] = buildObjectM22 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 #-}
{-# RULES "buildObjectM/buildObjectM23" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23)] = buildObjectM23 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 #-}
{-# RULES "buildObjectM/buildObjectM24" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24)] = buildObjectM24 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 #-}
{-# RULES "buildObjectM/buildObjectM25" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25)] = buildObjectM25 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 #-}
{-# RULES "buildObjectM/buildObjectM26" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26)] = buildObjectM26 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 #-}
{-# RULES "buildObjectM/buildObjectM27" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27)] = buildObjectM27 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 #-}
{-# RULES "buildObjectM/buildObjectM28" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27),(k28,v28)] = buildObjectM28 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 #-}
{-# RULES "buildObjectM/buildObjectM29" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27),(k28,v28),(k29,v29)] = buildObjectM29 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 #-}
{-# RULES "buildObjectM/buildObjectM30" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27),(k28,v28),(k29,v29),(k30,v30)] = buildObjectM30 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 #-}
{-# RULES "buildObjectM/buildObjectM31" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27),(k28,v28),(k29,v29),(k30,v30),(k31,v31)] = buildObjectM31 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 #-}
{-# RULES "buildObjectM/buildObjectM32" forall k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 k32 v32. buildObjectM [(k1,v1),(k2,v2),(k3,v3),(k4,v4),(k5,v5),(k6,v6),(k7,v7),(k8,v8),(k9,v9),(k10,v10),(k11,v11),(k12,v12),(k13,v13),(k14,v14),(k15,v15),(k16,v16),(k17,v17),(k18,v18),(k19,v19),(k20,v20),(k21,v21),(k22,v22),(k23,v23),(k24,v24),(k25,v25),(k26,v26),(k27,v27),(k28,v28),(k29,v29),(k30,v30),(k31,v31),(k32,v32)] = buildObjectM32 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 k32 v32 #-}
buildObjectM1 :: K -> V -> IO O
buildObjectM1 k1 v1 =
js_buildObjectM1 k1 v1
{-# INLINE buildObjectM1 #-}
foreign import javascript unsafe "h$buildObject($1,$2)"
js_buildObjectM1 :: K -> V -> IO O
buildObjectM2 :: K -> V -> K -> V -> IO O
buildObjectM2 k1 v1 k2 v2 =
js_buildObjectM2 k1 v1 k2 v2
{-# INLINE buildObjectM2 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4)"
js_buildObjectM2 :: K -> V -> K -> V -> IO O
buildObjectM3 :: K -> V -> K -> V -> K -> V -> IO O
buildObjectM3 k1 v1 k2 v2 k3 v3 =
js_buildObjectM3 k1 v1 k2 v2 k3 v3
{-# INLINE buildObjectM3 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6)"
js_buildObjectM3 :: K -> V -> K -> V -> K -> V -> IO O
buildObjectM4 :: K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM4 k1 v1 k2 v2 k3 v3 k4 v4 =
js_buildObjectM4 k1 v1 k2 v2 k3 v3 k4 v4
{-# INLINE buildObjectM4 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8)"
js_buildObjectM4 :: K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM5 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM5 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 =
js_buildObjectM5 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5
{-# INLINE buildObjectM5 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10)"
js_buildObjectM5 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM6 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM6 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 =
js_buildObjectM6 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6
{-# INLINE buildObjectM6 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12)"
js_buildObjectM6 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM7 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM7 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 =
js_buildObjectM7 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7
{-# INLINE buildObjectM7 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14)"
js_buildObjectM7 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM8 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM8 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 =
js_buildObjectM8 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8
{-# INLINE buildObjectM8 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16)"
js_buildObjectM8 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM9 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM9 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 =
js_buildObjectM9 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9
{-# INLINE buildObjectM9 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18)"
js_buildObjectM9 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM10 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM10 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 =
js_buildObjectM10 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10
{-# INLINE buildObjectM10 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20)"
js_buildObjectM10 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM11 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM11 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 =
js_buildObjectM11 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11
{-# INLINE buildObjectM11 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22)"
js_buildObjectM11 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM12 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM12 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 =
js_buildObjectM12 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12
{-# INLINE buildObjectM12 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24)"
js_buildObjectM12 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM13 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM13 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 =
js_buildObjectM13 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13
{-# INLINE buildObjectM13 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26)"
js_buildObjectM13 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM14 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM14 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 =
js_buildObjectM14 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14
{-# INLINE buildObjectM14 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28)"
js_buildObjectM14 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM15 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM15 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 =
js_buildObjectM15 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15
{-# INLINE buildObjectM15 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30)"
js_buildObjectM15 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM16 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM16 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 =
js_buildObjectM16 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16
{-# INLINE buildObjectM16 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32)"
js_buildObjectM16 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM17 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM17 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 =
js_buildObjectM17 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17
{-# INLINE buildObjectM17 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34)"
js_buildObjectM17 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM18 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM18 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 =
js_buildObjectM18 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18
{-# INLINE buildObjectM18 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36)"
js_buildObjectM18 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM19 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM19 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 =
js_buildObjectM19 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19
{-# INLINE buildObjectM19 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38)"
js_buildObjectM19 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM20 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM20 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 =
js_buildObjectM20 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20
{-# INLINE buildObjectM20 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40)"
js_buildObjectM20 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM21 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM21 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 =
js_buildObjectM21 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21
{-# INLINE buildObjectM21 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42)"
js_buildObjectM21 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM22 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM22 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 =
js_buildObjectM22 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22
{-# INLINE buildObjectM22 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44)"
js_buildObjectM22 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM23 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM23 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 =
js_buildObjectM23 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23
{-# INLINE buildObjectM23 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46)"
js_buildObjectM23 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM24 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM24 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 =
js_buildObjectM24 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24
{-# INLINE buildObjectM24 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48)"
js_buildObjectM24 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM25 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM25 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 =
js_buildObjectM25 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25
{-# INLINE buildObjectM25 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50)"
js_buildObjectM25 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM26 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM26 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 =
js_buildObjectM26 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26
{-# INLINE buildObjectM26 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52)"
js_buildObjectM26 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM27 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM27 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 =
js_buildObjectM27 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27
{-# INLINE buildObjectM27 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54)"
js_buildObjectM27 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM28 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM28 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 =
js_buildObjectM28 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28
{-# INLINE buildObjectM28 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54,$55,$56)"
js_buildObjectM28 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM29 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM29 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 =
js_buildObjectM29 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29
{-# INLINE buildObjectM29 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54,$55,$56,$57,$58)"
js_buildObjectM29 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM30 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM30 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 =
js_buildObjectM30 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30
{-# INLINE buildObjectM30 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54,$55,$56,$57,$58,$59,$60)"
js_buildObjectM30 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM31 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM31 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 =
js_buildObjectM31 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31
{-# INLINE buildObjectM31 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54,$55,$56,$57,$58,$59,$60,$61,$62)"
js_buildObjectM31 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM32 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
buildObjectM32 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 k32 v32 =
js_buildObjectM32 k1 v1 k2 v2 k3 v3 k4 v4 k5 v5 k6 v6 k7 v7 k8 v8 k9 v9 k10 v10 k11 v11 k12 v12 k13 v13 k14 v14 k15 v15 k16 v16 k17 v17 k18 v18 k19 v19 k20 v20 k21 v21 k22 v22 k23 v23 k24 v24 k25 v25 k26 v26 k27 v27 k28 v28 k29 v29 k30 v30 k31 v31 k32 v32
{-# INLINE buildObjectM32 #-}
foreign import javascript unsafe "h$buildObject($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35,$36,$37,$38,$39,$40,$41,$42,$43,$44,$45,$46,$47,$48,$49,$50,$51,$52,$53,$54,$55,$56,$57,$58,$59,$60,$61,$62,$63,$64)"
js_buildObjectM32 :: K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> K -> V -> IO O
#endif
| fizruk/ghcjs | lib/ghcjs-prim/GHCJS/Prim/Internal/Build.hs | mit | 136,167 | 0 | 3 | 30,667 | 17 | 14 | 3 | 2 | 0 |
{-# LANGUAGE CPP, TupleSections #-}
-- |Vectorisation of expressions.
module Vectorise.Exp
( -- * Vectorise right-hand sides of toplevel bindings
vectTopExpr
, vectTopExprs
, vectScalarFun
, vectScalarDFun
)
where
#include "HsVersions.h"
import Vectorise.Type.Type
import Vectorise.Var
import Vectorise.Convert
import Vectorise.Vect
import Vectorise.Env
import Vectorise.Monad
import Vectorise.Builtins
import Vectorise.Utils
import CoreUtils
import MkCore
import CoreSyn
import CoreFVs
import Class
import DataCon
import TyCon
import TcType
import Type
import TypeRep
import Var
import VarEnv
import VarSet
import NameSet
import Id
import BasicTypes( isStrongLoopBreaker )
import Literal
import TysPrim
import Outputable
import FastString
import DynFlags
import Util
import MonadUtils
import Control.Monad
import Data.Maybe
import Data.List
-- Main entry point to vectorise expressions -----------------------------------
-- |Vectorise a polymorphic expression that forms a *non-recursive* binding.
--
-- Return 'Nothing' if the expression is scalar; otherwise, the first component of the result
-- (which is of type 'Bool') indicates whether the expression is parallel (i.e., whether it is
-- tagged as 'VIParr').
--
-- We have got the non-recursive case as a special case as it doesn't require to compute
-- vectorisation information twice.
--
vectTopExpr :: Var -> CoreExpr -> VM (Maybe (Bool, Inline, CoreExpr))
vectTopExpr var expr
= do
{ exprVI <- encapsulateScalars <=< vectAvoidInfo emptyVarSet . freeVars $ expr
; if isVIEncaps exprVI
then
return Nothing
else do
{ vExpr <- closedV $
inBind var $
vectAnnPolyExpr False exprVI
; inline <- computeInline exprVI
; return $ Just (isVIParr exprVI, inline, vectorised vExpr)
}
}
-- Compute the inlining hint for the right-hand side of a top-level binding.
--
computeInline :: CoreExprWithVectInfo -> VM Inline
computeInline ((_, VIDict), _) = return $ DontInline
computeInline (_, AnnTick _ expr) = computeInline expr
computeInline expr@(_, AnnLam _ _) = Inline <$> polyArity tvs
where
(tvs, _) = collectAnnTypeBinders expr
computeInline _expr = return $ DontInline
-- |Vectorise a recursive group of top-level polymorphic expressions.
--
-- Return 'Nothing' if the expression group is scalar; otherwise, the first component of the result
-- (which is of type 'Bool') indicates whether the expressions are parallel (i.e., whether they are
-- tagged as 'VIParr').
--
vectTopExprs :: [(Var, CoreExpr)] -> VM (Maybe (Bool, [(Inline, CoreExpr)]))
vectTopExprs binds
= do
{ exprVIs <- mapM (vectAvoidAndEncapsulate emptyVarSet) exprs
; if all isVIEncaps exprVIs
-- if all bindings are scalar => don't vectorise this group of bindings
then return Nothing
else do
{ -- non-scalar bindings need to be vectorised
; let areVIParr = any isVIParr exprVIs
; revised_exprVIs <- if not areVIParr
-- if no binding is parallel => 'exprVIs' is ready for vectorisation
then return exprVIs
-- if any binding is parallel => recompute the vectorisation info
else mapM (vectAvoidAndEncapsulate (mkVarSet vars)) exprs
; vExprs <- zipWithM vect vars revised_exprVIs
; return $ Just (areVIParr, vExprs)
}
}
where
(vars, exprs) = unzip binds
vectAvoidAndEncapsulate pvs = encapsulateScalars <=< vectAvoidInfo pvs . freeVars
vect var exprVI
= do
{ vExpr <- closedV $
inBind var $
vectAnnPolyExpr (isStrongLoopBreaker $ idOccInfo var) exprVI
; inline <- computeInline exprVI
; return (inline, vectorised vExpr)
}
-- |Vectorise a polymorphic expression annotated with vectorisation information.
--
-- The special case of dictionary functions is currently handled separately. (Would be neater to
-- integrate them, though!)
--
vectAnnPolyExpr :: Bool -> CoreExprWithVectInfo -> VM VExpr
vectAnnPolyExpr loop_breaker (_, AnnTick tickish expr)
-- traverse through ticks
= vTick tickish <$> vectAnnPolyExpr loop_breaker expr
vectAnnPolyExpr loop_breaker expr
| isVIDict expr
-- special case the right-hand side of dictionary functions
= (, undefined) <$> vectDictExpr (deAnnotate expr)
| otherwise
-- collect and vectorise type abstractions; then, descent into the body
= polyAbstract tvs $ \args ->
mapVect (mkLams $ tvs ++ args) <$> vectFnExpr False loop_breaker mono
where
(tvs, mono) = collectAnnTypeBinders expr
-- Encapsulate every purely sequential subexpression of a (potentially) parallel expression into a
-- lambda abstraction over all its free variables followed by the corresponding application to those
-- variables. We can, then, avoid the vectorisation of the ensapsulated subexpressions.
--
-- Preconditions:
--
-- * All free variables and the result type must be /simple/ types.
-- * The expression is sufficiently complex (to warrant special treatment). For now, that is
-- every expression that is not constant and contains at least one operation.
--
--
-- The user has an option to choose between aggressive and minimal vectorisation avoidance. With
-- minimal vectorisation avoidance, we only encapsulate individual scalar operations. With
-- aggressive vectorisation avoidance, we encapsulate subexpression that are as big as possible.
--
encapsulateScalars :: CoreExprWithVectInfo -> VM CoreExprWithVectInfo
encapsulateScalars ce@(_, AnnType _ty)
= return ce
encapsulateScalars ce@((_, VISimple), AnnVar _v)
-- NB: diverts from the paper: encapsulate scalar variables (including functions)
= liftSimpleAndCase ce
encapsulateScalars ce@(_, AnnVar _v)
= return ce
encapsulateScalars ce@(_, AnnLit _)
= return ce
encapsulateScalars ((fvs, vi), AnnTick tck expr)
= do
{ encExpr <- encapsulateScalars expr
; return ((fvs, vi), AnnTick tck encExpr)
}
encapsulateScalars ce@((fvs, vi), AnnLam bndr expr)
= do
{ vectAvoid <- isVectAvoidanceAggressive
; varsS <- allScalarVarTypeSet fvs
-- NB: diverts from the paper: we need to check the scalarness of bound variables as well,
-- as 'vectScalarFun' will handle them just the same as those introduced for the 'fvs'
-- by encapsulation.
; bndrsS <- allScalarVarType bndrs
; case (vi, vectAvoid && varsS && bndrsS) of
(VISimple, True) -> liftSimpleAndCase ce
_ -> do
{ encExpr <- encapsulateScalars expr
; return ((fvs, vi), AnnLam bndr encExpr)
}
}
where
(bndrs, _) = collectAnnBndrs ce
encapsulateScalars ce@((fvs, vi), AnnApp ce1 ce2)
= do
{ vectAvoid <- isVectAvoidanceAggressive
; varsS <- allScalarVarTypeSet fvs
; case (vi, (vectAvoid || isSimpleApplication ce) && varsS) of
(VISimple, True) -> liftSimpleAndCase ce
_ -> do
{ encCe1 <- encapsulateScalars ce1
; encCe2 <- encapsulateScalars ce2
; return ((fvs, vi), AnnApp encCe1 encCe2)
}
}
where
isSimpleApplication :: CoreExprWithVectInfo -> Bool
isSimpleApplication (_, AnnTick _ ce) = isSimpleApplication ce
isSimpleApplication (_, AnnCast ce _) = isSimpleApplication ce
isSimpleApplication ce | isSimple ce = True
isSimpleApplication (_, AnnApp ce1 ce2) = isSimple ce1 && isSimpleApplication ce2
isSimpleApplication _ = False
--
isSimple :: CoreExprWithVectInfo -> Bool
isSimple (_, AnnType {}) = True
isSimple (_, AnnVar {}) = True
isSimple (_, AnnLit {}) = True
isSimple (_, AnnTick _ ce) = isSimple ce
isSimple (_, AnnCast ce _) = isSimple ce
isSimple _ = False
encapsulateScalars ce@((fvs, vi), AnnCase scrut bndr ty alts)
= do
{ vectAvoid <- isVectAvoidanceAggressive
; varsS <- allScalarVarTypeSet fvs
; case (vi, vectAvoid && varsS) of
(VISimple, True) -> liftSimpleAndCase ce
_ -> do
{ encScrut <- encapsulateScalars scrut
; encAlts <- mapM encAlt alts
; return ((fvs, vi), AnnCase encScrut bndr ty encAlts)
}
}
where
encAlt (con, bndrs, expr) = (con, bndrs,) <$> encapsulateScalars expr
encapsulateScalars ce@((fvs, vi), AnnLet (AnnNonRec bndr expr1) expr2)
= do
{ vectAvoid <- isVectAvoidanceAggressive
; varsS <- allScalarVarTypeSet fvs
; case (vi, vectAvoid && varsS) of
(VISimple, True) -> liftSimpleAndCase ce
_ -> do
{ encExpr1 <- encapsulateScalars expr1
; encExpr2 <- encapsulateScalars expr2
; return ((fvs, vi), AnnLet (AnnNonRec bndr encExpr1) encExpr2)
}
}
encapsulateScalars ce@((fvs, vi), AnnLet (AnnRec binds) expr)
= do
{ vectAvoid <- isVectAvoidanceAggressive
; varsS <- allScalarVarTypeSet fvs
; case (vi, vectAvoid && varsS) of
(VISimple, True) -> liftSimpleAndCase ce
_ -> do
{ encBinds <- mapM encBind binds
; encExpr <- encapsulateScalars expr
; return ((fvs, vi), AnnLet (AnnRec encBinds) encExpr)
}
}
where
encBind (bndr, expr) = (bndr,) <$> encapsulateScalars expr
encapsulateScalars ((fvs, vi), AnnCast expr coercion)
= do
{ encExpr <- encapsulateScalars expr
; return ((fvs, vi), AnnCast encExpr coercion)
}
encapsulateScalars _
= panic "Vectorise.Exp.encapsulateScalars: unknown constructor"
-- Lambda-lift the given simple expression and apply it to the abstracted free variables.
--
-- If the expression is a case expression scrutinising anything, but a scalar type, then lift
-- each alternative individually.
--
liftSimpleAndCase :: CoreExprWithVectInfo -> VM CoreExprWithVectInfo
liftSimpleAndCase aexpr@((fvs, _vi), AnnCase expr bndr t alts)
= do
{ vi <- vectAvoidInfoTypeOf expr
; if (vi == VISimple)
then
liftSimple aexpr -- if the scrutinee is scalar, we need no special treatment
else do
{ alts' <- mapM (\(ac, bndrs, aexpr) -> (ac, bndrs,) <$> liftSimpleAndCase aexpr) alts
; return ((fvs, vi), AnnCase expr bndr t alts')
}
}
liftSimpleAndCase aexpr = liftSimple aexpr
liftSimple :: CoreExprWithVectInfo -> VM CoreExprWithVectInfo
liftSimple ((fvs, vi), AnnVar v)
| v `elemVarSet` fvs -- special case to avoid producing: (\v -> v) v
&& not (isToplevel v) -- NB: if 'v' not free or is toplevel, we must get the 'VIEncaps'
= return $ ((fvs, vi), AnnVar v)
liftSimple aexpr@((fvs_orig, VISimple), expr)
= do
{ let liftedExpr = mkAnnApps (mkAnnLams (reverse vars) fvs expr) vars
; traceVt "encapsulate:" $ ppr (deAnnotate aexpr) $$ text "==>" $$ ppr (deAnnotate liftedExpr)
; return $ liftedExpr
}
where
vars = varSetElems fvs
fvs = filterVarSet (not . isToplevel) fvs_orig -- only include 'Id's that are not toplevel
mkAnnLams :: [Var] -> VarSet -> AnnExpr' Var (VarSet, VectAvoidInfo) -> CoreExprWithVectInfo
mkAnnLams [] fvs expr = ASSERT(isEmptyVarSet fvs)
((emptyVarSet, VIEncaps), expr)
mkAnnLams (v:vs) fvs expr = mkAnnLams vs (fvs `delVarSet` v) (AnnLam v ((fvs, VIEncaps), expr))
mkAnnApps :: CoreExprWithVectInfo -> [Var] -> CoreExprWithVectInfo
mkAnnApps aexpr [] = aexpr
mkAnnApps aexpr (v:vs) = mkAnnApps (mkAnnApp aexpr v) vs
mkAnnApp :: CoreExprWithVectInfo -> Var -> CoreExprWithVectInfo
mkAnnApp aexpr@((fvs, _vi), _expr) v
= ((fvs `extendVarSet` v, VISimple), AnnApp aexpr ((unitVarSet v, VISimple), AnnVar v))
liftSimple aexpr
= pprPanic "Vectorise.Exp.liftSimple: not simple" $ ppr (deAnnotate aexpr)
isToplevel :: Var -> Bool
isToplevel v | isId v = case realIdUnfolding v of
NoUnfolding -> False
OtherCon {} -> True
DFunUnfolding {} -> True
CoreUnfolding {uf_is_top = top} -> top
| otherwise = False
-- |Vectorise an expression.
--
vectExpr :: CoreExprWithVectInfo -> VM VExpr
vectExpr aexpr
-- encapsulated expression of functional type => try to vectorise as a scalar subcomputation
| (isFunTy . annExprType $ aexpr) && isVIEncaps aexpr
= vectFnExpr True False aexpr
-- encapsulated constant => vectorise as a scalar constant
| isVIEncaps aexpr
= traceVt "vectExpr (encapsulated constant):" (ppr . deAnnotate $ aexpr) >>
vectConst (deAnnotate aexpr)
vectExpr (_, AnnVar v)
= vectVar v
vectExpr (_, AnnLit lit)
= vectConst $ Lit lit
vectExpr aexpr@(_, AnnLam _ _)
= traceVt "vectExpr [AnnLam]:" (ppr . deAnnotate $ aexpr) >>
vectFnExpr True False aexpr
-- SPECIAL CASE: Vectorise/lift 'patError @ ty err' by only vectorising/lifting the type 'ty';
-- its only purpose is to abort the program, but we need to adjust the type to keep CoreLint
-- happy.
-- FIXME: can't be do this with a VECTORISE pragma on 'pAT_ERROR_ID' now?
vectExpr (_, AnnApp (_, AnnApp (_, AnnVar v) (_, AnnType ty)) err)
| v == pAT_ERROR_ID
= do
{ (vty, lty) <- vectAndLiftType ty
; return (mkCoreApps (Var v) [Type vty, err'], mkCoreApps (Var v) [Type lty, err'])
}
where
err' = deAnnotate err
-- type application (handle multiple consecutive type applications simultaneously to ensure the
-- PA dictionaries are put at the right places)
vectExpr e@(_, AnnApp _ arg)
| isAnnTypeArg arg
= vectPolyApp e
-- Lifted literal
vectExpr (_, AnnApp (_, AnnVar v) (_, AnnLit lit))
| Just _con <- isDataConId_maybe v
= do
{ let vexpr = App (Var v) (Lit lit)
; lexpr <- liftPD vexpr
; return (vexpr, lexpr)
}
-- value application (dictionary or user value)
vectExpr e@(_, AnnApp fn arg)
| isPredTy arg_ty -- dictionary application (whose result is not a dictionary)
= vectPolyApp e
| otherwise -- user value
= do
{ -- vectorise the types
; varg_ty <- vectType arg_ty
; vres_ty <- vectType res_ty
-- vectorise the function and argument expression
; vfn <- vectExpr fn
; varg <- vectExpr arg
-- the vectorised function is a closure; apply it to the vectorised argument
; mkClosureApp varg_ty vres_ty vfn varg
}
where
(arg_ty, res_ty) = splitFunTy . exprType $ deAnnotate fn
vectExpr (_, AnnCase scrut bndr ty alts)
| Just (tycon, ty_args) <- splitTyConApp_maybe scrut_ty
, isAlgTyCon tycon
= vectAlgCase tycon ty_args scrut bndr ty alts
| otherwise
= do
{ dflags <- getDynFlags
; cantVectorise dflags "Can't vectorise expression (no algebraic type constructor)" $
ppr scrut_ty
}
where
scrut_ty = exprType (deAnnotate scrut)
vectExpr (_, AnnLet (AnnNonRec bndr rhs) body)
= do
{ traceVt "let binding (non-recursive)" empty
; vrhs <- localV $
inBind bndr $
vectAnnPolyExpr False rhs
; traceVt "let body (non-recursive)" empty
; (vbndr, vbody) <- vectBndrIn bndr (vectExpr body)
; return $ vLet (vNonRec vbndr vrhs) vbody
}
vectExpr (_, AnnLet (AnnRec bs) body)
= do
{ (vbndrs, (vrhss, vbody)) <- vectBndrsIn bndrs $ do
{ traceVt "let bindings (recursive)" empty
; vrhss <- zipWithM vect_rhs bndrs rhss
; traceVt "let body (recursive)" empty
; vbody <- vectExpr body
; return (vrhss, vbody)
}
; return $ vLet (vRec vbndrs vrhss) vbody
}
where
(bndrs, rhss) = unzip bs
vect_rhs bndr rhs = localV $
inBind bndr $
vectAnnPolyExpr (isStrongLoopBreaker $ idOccInfo bndr) rhs
vectExpr (_, AnnTick tickish expr)
= vTick tickish <$> vectExpr expr
vectExpr (_, AnnType ty)
= vType <$> vectType ty
vectExpr e
= do
{ dflags <- getDynFlags
; cantVectorise dflags "Can't vectorise expression (vectExpr)" $ ppr (deAnnotate e)
}
-- |Vectorise an expression that *may* have an outer lambda abstraction. If the expression is marked
-- as encapsulated ('VIEncaps'), vectorise it as a scalar computation (using a generalised scalar
-- zip).
--
-- We do not handle type variables at this point, as they will already have been stripped off by
-- 'vectPolyExpr'. We also only have to worry about one set of dictionary arguments as we (1) only
-- deal with Haskell 2011 and (2) class selectors are vectorised elsewhere.
--
vectFnExpr :: Bool -- ^If we process the RHS of a binding, whether that binding
-- should be inlined
-> Bool -- ^Whether the binding is a loop breaker
-> CoreExprWithVectInfo -- ^Expression to vectorise; must have an outer `AnnLam`
-> VM VExpr
vectFnExpr inline loop_breaker aexpr@(_ann, AnnLam bndr body)
-- predicate abstraction: leave as a normal abstraction, but vectorise the predicate type
| isId bndr
&& isPredTy (idType bndr)
= do
{ vBndr <- vectBndr bndr
; vbody <- vectFnExpr inline loop_breaker body
; return $ mapVect (mkLams [vectorised vBndr]) vbody
}
-- encapsulated non-predicate abstraction: vectorise as a scalar computation
| isId bndr && isVIEncaps aexpr
= vectScalarFun . deAnnotate $ aexpr
-- non-predicate abstraction: vectorise as a non-scalar computation
| isId bndr
= vectLam inline loop_breaker aexpr
| otherwise
= do
{ dflags <- getDynFlags
; cantVectorise dflags "Vectorise.Exp.vectFnExpr: Unexpected type lambda" $
ppr (deAnnotate aexpr)
}
vectFnExpr _ _ aexpr
-- encapsulated function: vectorise as a scalar computation
| (isFunTy . annExprType $ aexpr) && isVIEncaps aexpr
= vectScalarFun . deAnnotate $ aexpr
| otherwise
-- not an abstraction: vectorise as a non-scalar vanilla expression
-- NB: we can get here due to the recursion in the first case above and from 'vectAnnPolyExpr'
= vectExpr aexpr
-- |Vectorise type and dictionary applications.
--
-- These are always headed by a variable (as we don't support higher-rank polymorphism), but may
-- involve two sets of type variables and dictionaries. Consider,
--
-- > class C a where
-- > m :: D b => b -> a
--
-- The type of 'm' is 'm :: forall a. C a => forall b. D b => b -> a'.
--
vectPolyApp :: CoreExprWithVectInfo -> VM VExpr
vectPolyApp e0
= case e4 of
(_, AnnVar var)
-> do { -- get the vectorised form of the variable
; vVar <- lookupVar var
; traceVt "vectPolyApp of" (ppr var)
-- vectorise type and dictionary arguments
; vDictsOuter <- mapM vectDictExpr (map deAnnotate dictsOuter)
; vDictsInner <- mapM vectDictExpr (map deAnnotate dictsInner)
; vTysOuter <- mapM vectType tysOuter
; vTysInner <- mapM vectType tysInner
; let reconstructOuter v = (`mkApps` vDictsOuter) <$> polyApply v vTysOuter
; case vVar of
Local (vv, lv)
-> do { MASSERT( null dictsInner ) -- local vars cannot be class selectors
; traceVt " LOCAL" (text "")
; (,) <$> reconstructOuter (Var vv) <*> reconstructOuter (Var lv)
}
Global vv
| isDictComp var -- dictionary computation
-> do { -- in a dictionary computation, the innermost, non-empty set of
-- arguments are non-vectorised arguments, where no 'PA'dictionaries
-- are needed for the type variables
; ve <- if null dictsInner
then
return $ Var vv `mkTyApps` vTysOuter `mkApps` vDictsOuter
else
reconstructOuter
(Var vv `mkTyApps` vTysInner `mkApps` vDictsInner)
; traceVt " GLOBAL (dict):" (ppr ve)
; vectConst ve
}
| otherwise -- non-dictionary computation
-> do { MASSERT( null dictsInner )
; ve <- reconstructOuter (Var vv)
; traceVt " GLOBAL (non-dict):" (ppr ve)
; vectConst ve
}
}
_ -> pprSorry "Cannot vectorise programs with higher-rank types:" (ppr . deAnnotate $ e0)
where
-- if there is only one set of variables or dictionaries, it will be the outer set
(e1, dictsOuter) = collectAnnDictArgs e0
(e2, tysOuter) = collectAnnTypeArgs e1
(e3, dictsInner) = collectAnnDictArgs e2
(e4, tysInner) = collectAnnTypeArgs e3
--
isDictComp var = (isJust . isClassOpId_maybe $ var) || isDFunId var
-- |Vectorise the body of a dfun.
--
-- Dictionary computations are special for the following reasons. The application of dictionary
-- functions are always saturated, so there is no need to create closures. Dictionary computations
-- don't depend on array values, so they are always scalar computations whose result we can
-- replicate (instead of executing them in parallel).
--
-- NB: To keep things simple, we are not rewriting any of the bindings introduced in a dictionary
-- computation. Consequently, the variable case needs to deal with cases where binders are
-- in the vectoriser environments and where that is not the case.
--
vectDictExpr :: CoreExpr -> VM CoreExpr
vectDictExpr (Var var)
= do { mb_scope <- lookupVar_maybe var
; case mb_scope of
Nothing -> return $ Var var -- binder from within the dict. computation
Just (Local (vVar, _)) -> return $ Var vVar -- local vectorised variable
Just (Global vVar) -> return $ Var vVar -- global vectorised variable
}
vectDictExpr (Lit lit)
= pprPanic "Vectorise.Exp.vectDictExpr: literal in dictionary computation" (ppr lit)
vectDictExpr (Lam bndr e)
= Lam bndr <$> vectDictExpr e
vectDictExpr (App fn arg)
= App <$> vectDictExpr fn <*> vectDictExpr arg
vectDictExpr (Case e bndr ty alts)
= Case <$> vectDictExpr e <*> pure bndr <*> vectType ty <*> mapM vectDictAlt alts
where
vectDictAlt (con, bs, e) = (,,) <$> vectDictAltCon con <*> pure bs <*> vectDictExpr e
--
vectDictAltCon (DataAlt datacon) = DataAlt <$> maybeV dataConErr (lookupDataCon datacon)
where
dataConErr = ptext (sLit "Cannot vectorise data constructor:") <+> ppr datacon
vectDictAltCon (LitAlt lit) = return $ LitAlt lit
vectDictAltCon DEFAULT = return DEFAULT
vectDictExpr (Let bnd body)
= Let <$> vectDictBind bnd <*> vectDictExpr body
where
vectDictBind (NonRec bndr e) = NonRec bndr <$> vectDictExpr e
vectDictBind (Rec bnds) = Rec <$> mapM (\(bndr, e) -> (bndr,) <$> vectDictExpr e) bnds
vectDictExpr e@(Cast _e _coe)
= pprSorry "Vectorise.Exp.vectDictExpr: cast" (ppr e)
vectDictExpr (Tick tickish e)
= Tick tickish <$> vectDictExpr e
vectDictExpr (Type ty)
= Type <$> vectType ty
vectDictExpr (Coercion coe)
= pprSorry "Vectorise.Exp.vectDictExpr: coercion" (ppr coe)
-- |Vectorise an expression of functional type, where all arguments and the result are of primitive
-- types (i.e., 'Int', 'Float', 'Double' etc., which have instances of the 'Scalar' type class) and
-- which does not contain any subcomputations that involve parallel arrays. Such functionals do not
-- require the full blown vectorisation transformation; instead, they can be lifted by application
-- of a member of the zipWith family (i.e., 'map', 'zipWith', zipWith3', etc.)
--
-- Dictionary functions are also scalar functions (as dictionaries themselves are not vectorised,
-- instead they become dictionaries of vectorised methods). We treat them differently, though see
-- "Note [Scalar dfuns]" in 'Vectorise'.
--
vectScalarFun :: CoreExpr -> VM VExpr
vectScalarFun expr
= do
{ traceVt "vectScalarFun:" (ppr expr)
; let (arg_tys, res_ty) = splitFunTys (exprType expr)
; mkScalarFun arg_tys res_ty expr
}
-- Generate code for a scalar function by generating a scalar closure. If the function is a
-- dictionary function, vectorise it as dictionary code.
--
mkScalarFun :: [Type] -> Type -> CoreExpr -> VM VExpr
mkScalarFun arg_tys res_ty expr
| isPredTy res_ty
= do { vExpr <- vectDictExpr expr
; return (vExpr, unused)
}
| otherwise
= do { traceVt "mkScalarFun: " $ ppr expr $$ ptext (sLit " ::") <+> ppr (mkFunTys arg_tys res_ty)
; fn_var <- hoistExpr (fsLit "fn") expr DontInline
; zipf <- zipScalars arg_tys res_ty
; clo <- scalarClosure arg_tys res_ty (Var fn_var) (zipf `App` Var fn_var)
; clo_var <- hoistExpr (fsLit "clo") clo DontInline
; lclo <- liftPD (Var clo_var)
; return (Var clo_var, lclo)
}
where
unused = error "Vectorise.Exp.mkScalarFun: we don't lift dictionary expressions"
-- |Vectorise a dictionary function that has a 'VECTORISE SCALAR instance' pragma.
--
-- In other words, all methods in that dictionary are scalar functions — to be vectorised with
-- 'vectScalarFun'. The dictionary "function" itself may be a constant, though.
--
-- NB: You may think that we could implement this function guided by the struture of the Core
-- expression of the right-hand side of the dictionary function. We cannot proceed like this as
-- 'vectScalarDFun' must also work for *imported* dfuns, where we don't necessarily have access
-- to the Core code of the unvectorised dfun.
--
-- Here an example — assume,
--
-- > class Eq a where { (==) :: a -> a -> Bool }
-- > instance (Eq a, Eq b) => Eq (a, b) where { (==) = ... }
-- > {-# VECTORISE SCALAR instance Eq (a, b) }
--
-- The unvectorised dfun for the above instance has the following signature:
--
-- > $dEqPair :: forall a b. Eq a -> Eq b -> Eq (a, b)
--
-- We generate the following (scalar) vectorised dfun (liberally using TH notation):
--
-- > $v$dEqPair :: forall a b. V:Eq a -> V:Eq b -> V:Eq (a, b)
-- > $v$dEqPair = /\a b -> \dEqa :: V:Eq a -> \dEqb :: V:Eq b ->
-- > D:V:Eq $(vectScalarFun True recFns
-- > [| (==) @(a, b) ($dEqPair @a @b $(unVect dEqa) $(unVect dEqb)) |])
--
-- NB:
-- * '(,)' vectorises to '(,)' — hence, the type constructor in the result type remains the same.
-- * We share the '$(unVect di)' sub-expressions between the different selectors, but duplicate
-- the application of the unvectorised dfun, to enable the dictionary selection rules to fire.
--
vectScalarDFun :: Var -- ^ Original dfun
-> VM CoreExpr
vectScalarDFun var
= do { -- bring the type variables into scope
; mapM_ defLocalTyVar tvs
-- vectorise dictionary argument types and generate variables for them
; vTheta <- mapM vectType theta
; vThetaBndr <- mapM (newLocalVar (fsLit "vd")) vTheta
; let vThetaVars = varsToCoreExprs vThetaBndr
-- vectorise superclass dictionaries and methods as scalar expressions
; thetaVars <- mapM (newLocalVar (fsLit "d")) theta
; thetaExprs <- zipWithM unVectDict theta vThetaVars
; let thetaDictBinds = zipWith NonRec thetaVars thetaExprs
dict = Var var `mkTyApps` (mkTyVarTys tvs) `mkVarApps` thetaVars
scsOps = map (\selId -> varToCoreExpr selId `mkTyApps` tys `mkApps` [dict])
selIds
; vScsOps <- mapM (\e -> vectorised <$> vectScalarFun e) scsOps
-- vectorised applications of the class-dictionary data constructor
; Just vDataCon <- lookupDataCon dataCon
; vTys <- mapM vectType tys
; let vBody = thetaDictBinds `mkLets` mkCoreConApps vDataCon (map Type vTys ++ vScsOps)
; return $ mkLams (tvs ++ vThetaBndr) vBody
}
where
ty = varType var
(tvs, theta, pty) = tcSplitSigmaTy ty -- 'theta' is the instance context
(cls, tys) = tcSplitDFunHead pty -- 'pty' is the instance head
selIds = classAllSelIds cls
dataCon = classDataCon cls
-- Build a value of the dictionary before vectorisation from original, unvectorised type and an
-- expression computing the vectorised dictionary.
--
-- Given the vectorised version of a dictionary 'vd :: V:C vt1..vtn', generate code that computes
-- the unvectorised version, thus:
--
-- > D:C op1 .. opm
-- > where
-- > opi = $(fromVect opTyi [| vSeli @vt1..vtk vd |])
--
-- where 'opTyi' is the type of the i-th superclass or op of the unvectorised dictionary.
--
unVectDict :: Type -> CoreExpr -> VM CoreExpr
unVectDict ty e
= do { vTys <- mapM vectType tys
; let meths = map (\sel -> Var sel `mkTyApps` vTys `mkApps` [e]) selIds
; scOps <- zipWithM fromVect methTys meths
; return $ mkCoreConApps dataCon (map Type tys ++ scOps)
}
where
(tycon, tys) = splitTyConApp ty
Just dataCon = isDataProductTyCon_maybe tycon
Just cls = tyConClass_maybe tycon
methTys = dataConInstArgTys dataCon tys
selIds = classAllSelIds cls
-- Vectorise an 'n'-ary lambda abstraction by building a set of 'n' explicit closures.
--
-- All non-dictionary free variables go into the closure's environment, whereas the dictionary
-- variables are passed explicit (as conventional arguments) into the body during closure
-- construction.
--
vectLam :: Bool -- ^ Should the RHS of a binding be inlined?
-> Bool -- ^ Whether the binding is a loop breaker.
-> CoreExprWithVectInfo -- ^ Body of abstraction.
-> VM VExpr
vectLam inline loop_breaker expr@((fvs, _vi), AnnLam _ _)
= do { traceVt "fully vectorise a lambda expression" (ppr . deAnnotate $ expr)
; let (bndrs, body) = collectAnnValBinders expr
-- grab the in-scope type variables
; tyvars <- localTyVars
-- collect and vectorise all /local/ free variables
; vfvs <- readLEnv $ \env ->
[ (var, fromJust mb_vv)
| var <- varSetElems fvs
, let mb_vv = lookupVarEnv (local_vars env) var
, isJust mb_vv -- its local == is in local var env
]
-- separate dictionary from non-dictionary variables in the free variable set
; let (vvs_dict, vvs_nondict) = partition (isPredTy . varType . fst) vfvs
(_fvs_dict, vfvs_dict) = unzip vvs_dict
(fvs_nondict, vfvs_nondict) = unzip vvs_nondict
-- compute the type of the vectorised closure
; arg_tys <- mapM (vectType . idType) bndrs
; res_ty <- vectType (exprType $ deAnnotate body)
; let arity = length fvs_nondict + length bndrs
vfvs_dict' = map vectorised vfvs_dict
; buildClosures tyvars vfvs_dict' vfvs_nondict arg_tys res_ty
. hoistPolyVExpr tyvars vfvs_dict' (maybe_inline arity)
$ do { -- generate the vectorised body of the lambda abstraction
; lc <- builtin liftingContext
; (vbndrs, vbody) <- vectBndrsIn (fvs_nondict ++ bndrs) $ vectExpr body
; vbody' <- break_loop lc res_ty vbody
; return $ vLams lc vbndrs vbody'
}
}
where
maybe_inline n | inline = Inline n
| otherwise = DontInline
-- If this is the body of a binding marked as a loop breaker, add a recursion termination test
-- to the /lifted/ version of the function body. The termination tests checks if the lifting
-- context is empty. If so, it returns an empty array of the (lifted) result type instead of
-- executing the function body. This is the test from the last line (defining \mathcal{L}')
-- in Figure 6 of HtM.
break_loop lc ty (ve, le)
| loop_breaker
= do { dflags <- getDynFlags
; empty <- emptyPD ty
; lty <- mkPDataType ty
; return (ve, mkWildCase (Var lc) intPrimTy lty
[(DEFAULT, [], le),
(LitAlt (mkMachInt dflags 0), [], empty)])
}
| otherwise = return (ve, le)
vectLam _ _ _ = panic "Vectorise.Exp.vectLam: not a lambda"
-- Vectorise an algebraic case expression.
--
-- We convert
--
-- case e :: t of v { ... }
--
-- to
--
-- V: let v' = e in case v' of _ { ... }
-- L: let v' = e in case v' `cast` ... of _ { ... }
--
-- When lifting, we have to do it this way because v must have the type
-- [:V(T):] but the scrutinee must be cast to the representation type. We also
-- have to handle the case where v is a wild var correctly.
--
-- FIXME: this is too lazy...is it?
vectAlgCase :: TyCon -> [Type] -> CoreExprWithVectInfo -> Var -> Type
-> [(AltCon, [Var], CoreExprWithVectInfo)]
-> VM VExpr
vectAlgCase _tycon _ty_args scrut bndr ty [(DEFAULT, [], body)]
= do
{ traceVt "scrutinee (DEFAULT only)" empty
; vscrut <- vectExpr scrut
; (vty, lty) <- vectAndLiftType ty
; traceVt "alternative body (DEFAULT only)" empty
; (vbndr, vbody) <- vectBndrIn bndr (vectExpr body)
; return $ vCaseDEFAULT vscrut vbndr vty lty vbody
}
vectAlgCase _tycon _ty_args scrut bndr ty [(DataAlt _, [], body)]
= do
{ traceVt "scrutinee (one shot w/o binders)" empty
; vscrut <- vectExpr scrut
; (vty, lty) <- vectAndLiftType ty
; traceVt "alternative body (one shot w/o binders)" empty
; (vbndr, vbody) <- vectBndrIn bndr (vectExpr body)
; return $ vCaseDEFAULT vscrut vbndr vty lty vbody
}
vectAlgCase _tycon _ty_args scrut bndr ty [(DataAlt dc, bndrs, body)]
= do
{ traceVt "scrutinee (one shot w/ binders)" empty
; vexpr <- vectExpr scrut
; (vty, lty) <- vectAndLiftType ty
; traceVt "alternative body (one shot w/ binders)" empty
; (vbndr, (vbndrs, (vect_body, lift_body)))
<- vect_scrut_bndr
. vectBndrsIn bndrs
$ vectExpr body
; let (vect_bndrs, lift_bndrs) = unzip vbndrs
; (vscrut, lscrut, pdata_dc) <- pdataUnwrapScrut (vVar vbndr)
; vect_dc <- maybeV dataConErr (lookupDataCon dc)
; let vcase = mk_wild_case vscrut vty vect_dc vect_bndrs vect_body
lcase = mk_wild_case lscrut lty pdata_dc lift_bndrs lift_body
; return $ vLet (vNonRec vbndr vexpr) (vcase, lcase)
}
where
vect_scrut_bndr | isDeadBinder bndr = vectBndrNewIn bndr (fsLit "scrut")
| otherwise = vectBndrIn bndr
mk_wild_case expr ty dc bndrs body
= mkWildCase expr (exprType expr) ty [(DataAlt dc, bndrs, body)]
dataConErr = (text "vectAlgCase: data constructor not vectorised" <+> ppr dc)
vectAlgCase tycon _ty_args scrut bndr ty alts
= do
{ traceVt "scrutinee (general case)" empty
; vexpr <- vectExpr scrut
; vect_tc <- vectTyCon tycon
; (vty, lty) <- vectAndLiftType ty
; let arity = length (tyConDataCons vect_tc)
; sel_ty <- builtin (selTy arity)
; sel_bndr <- newLocalVar (fsLit "sel") sel_ty
; let sel = Var sel_bndr
; traceVt "alternatives' body (general case)" empty
; (vbndr, valts) <- vect_scrut_bndr
$ mapM (proc_alt arity sel vty lty) alts'
; let (vect_dcs, vect_bndrss, lift_bndrss, vbodies) = unzip4 valts
; (vect_scrut, lift_scrut, pdata_dc) <- pdataUnwrapScrut (vVar vbndr)
; let (vect_bodies, lift_bodies) = unzip vbodies
; vdummy <- newDummyVar (exprType vect_scrut)
; ldummy <- newDummyVar (exprType lift_scrut)
; let vect_case = Case vect_scrut vdummy vty
(zipWith3 mk_vect_alt vect_dcs vect_bndrss vect_bodies)
; lc <- builtin liftingContext
; lbody <- combinePD vty (Var lc) sel lift_bodies
; let lift_case = Case lift_scrut ldummy lty
[(DataAlt pdata_dc, sel_bndr : concat lift_bndrss,
lbody)]
; return . vLet (vNonRec vbndr vexpr)
$ (vect_case, lift_case)
}
where
vect_scrut_bndr | isDeadBinder bndr = vectBndrNewIn bndr (fsLit "scrut")
| otherwise = vectBndrIn bndr
alts' = sortBy (\(alt1, _, _) (alt2, _, _) -> cmp alt1 alt2) alts
cmp (DataAlt dc1) (DataAlt dc2) = dataConTag dc1 `compare` dataConTag dc2
cmp DEFAULT DEFAULT = EQ
cmp DEFAULT _ = LT
cmp _ DEFAULT = GT
cmp _ _ = panic "vectAlgCase/cmp"
proc_alt arity sel _ lty (DataAlt dc, bndrs, body@((fvs_body, _), _))
= do
dflags <- getDynFlags
vect_dc <- maybeV dataConErr (lookupDataCon dc)
let ntag = dataConTagZ vect_dc
tag = mkDataConTag dflags vect_dc
fvs = fvs_body `delVarSetList` bndrs
sel_tags <- liftM (`App` sel) (builtin (selTags arity))
lc <- builtin liftingContext
elems <- builtin (selElements arity ntag)
(vbndrs, vbody)
<- vectBndrsIn bndrs
. localV
$ do
{ binds <- mapM (pack_var (Var lc) sel_tags tag)
. filter isLocalId
$ varSetElems fvs
; traceVt "case alternative:" (ppr . deAnnotate $ body)
; (ve, le) <- vectExpr body
; return (ve, Case (elems `App` sel) lc lty
[(DEFAULT, [], (mkLets (concat binds) le))])
}
-- empty <- emptyPD vty
-- return (ve, Case (elems `App` sel) lc lty
-- [(DEFAULT, [], Let (NonRec flags_var flags_expr)
-- $ mkLets (concat binds) le),
-- (LitAlt (mkMachInt 0), [], empty)])
let (vect_bndrs, lift_bndrs) = unzip vbndrs
return (vect_dc, vect_bndrs, lift_bndrs, vbody)
where
dataConErr = (text "vectAlgCase: data constructor not vectorised" <+> ppr dc)
proc_alt _ _ _ _ _ = panic "vectAlgCase/proc_alt"
mk_vect_alt vect_dc bndrs body = (DataAlt vect_dc, bndrs, body)
-- Pack a variable for a case alternative context *if* the variable is vectorised. If it
-- isn't, ignore it as scalar variables don't need to be packed.
pack_var len tags t v
= do
{ r <- lookupVar_maybe v
; case r of
Just (Local (vv, lv)) ->
do
{ lv' <- cloneVar lv
; expr <- packByTagPD (idType vv) (Var lv) len tags t
; updLEnv (\env -> env { local_vars = extendVarEnv (local_vars env) v (vv, lv') })
; return [(NonRec lv' expr)]
}
_ -> return []
}
-- Support to compute information for vectorisation avoidance ------------------
-- Annotation for Core AST nodes that describes how they should be handled during vectorisation
-- and especially if vectorisation of the corresponding computation can be avoided.
--
data VectAvoidInfo = VIParr -- tree contains parallel computations
| VISimple -- result type is scalar & no parallel subcomputation
| VIComplex -- any result type, no parallel subcomputation
| VIEncaps -- tree encapsulated by 'liftSimple'
| VIDict -- dictionary computation (never parallel)
deriving (Eq, Show)
-- Core expression annotated with free variables and vectorisation-specific information.
--
type CoreExprWithVectInfo = AnnExpr Id (VarSet, VectAvoidInfo)
-- Yield the type of an annotated core expression.
--
annExprType :: AnnExpr Var ann -> Type
annExprType = exprType . deAnnotate
-- Project the vectorisation information from an annotated Core expression.
--
vectAvoidInfoOf :: CoreExprWithVectInfo -> VectAvoidInfo
vectAvoidInfoOf ((_, vi), _) = vi
-- Is this a 'VIParr' node?
--
isVIParr :: CoreExprWithVectInfo -> Bool
isVIParr = (== VIParr) . vectAvoidInfoOf
-- Is this a 'VIEncaps' node?
--
isVIEncaps :: CoreExprWithVectInfo -> Bool
isVIEncaps = (== VIEncaps) . vectAvoidInfoOf
-- Is this a 'VIDict' node?
--
isVIDict :: CoreExprWithVectInfo -> Bool
isVIDict = (== VIDict) . vectAvoidInfoOf
-- 'VIParr' if either argument is 'VIParr'; otherwise, the first argument.
--
unlessVIParr :: VectAvoidInfo -> VectAvoidInfo -> VectAvoidInfo
unlessVIParr _ VIParr = VIParr
unlessVIParr vi _ = vi
-- 'VIParr' if either arguments vectorisation information is 'VIParr'; otherwise, the vectorisation
-- information of the first argument is produced.
--
unlessVIParrExpr :: VectAvoidInfo -> CoreExprWithVectInfo -> VectAvoidInfo
infixl `unlessVIParrExpr`
unlessVIParrExpr e1 e2 = e1 `unlessVIParr` vectAvoidInfoOf e2
-- Compute Core annotations to determine for which subexpressions we can avoid vectorisation.
--
-- * The first argument is the set of free, local variables whose evaluation may entail parallelism.
--
vectAvoidInfo :: VarSet -> CoreExprWithFVs -> VM CoreExprWithVectInfo
vectAvoidInfo pvs ce@(fvs, AnnVar v)
= do
{ gpvs <- globalParallelVars
; vi <- if v `elemVarSet` pvs || v `elemVarSet` gpvs
then return VIParr
else vectAvoidInfoTypeOf ce
; viTrace ce vi []
; when (vi == VIParr) $
traceVt " reason:" $ if v `elemVarSet` pvs then text "local" else
if v `elemVarSet` gpvs then text "global" else text "parallel type"
; return ((fvs, vi), AnnVar v)
}
vectAvoidInfo _pvs ce@(fvs, AnnLit lit)
= do
{ vi <- vectAvoidInfoTypeOf ce
; viTrace ce vi []
; return ((fvs, vi), AnnLit lit)
}
vectAvoidInfo pvs ce@(fvs, AnnApp e1 e2)
= do
{ ceVI <- vectAvoidInfoTypeOf ce
; eVI1 <- vectAvoidInfo pvs e1
; eVI2 <- vectAvoidInfo pvs e2
; let vi = ceVI `unlessVIParrExpr` eVI1 `unlessVIParrExpr` eVI2
-- ; viTrace ce vi [eVI1, eVI2]
; return ((fvs, vi), AnnApp eVI1 eVI2)
}
vectAvoidInfo pvs (fvs, AnnLam var body)
= do
{ bodyVI <- vectAvoidInfo pvs body
; varVI <- vectAvoidInfoType $ varType var
; let vi = vectAvoidInfoOf bodyVI `unlessVIParr` varVI
-- ; viTrace ce vi [bodyVI]
; return ((fvs, vi), AnnLam var bodyVI)
}
vectAvoidInfo pvs ce@(fvs, AnnLet (AnnNonRec var e) body)
= do
{ ceVI <- vectAvoidInfoTypeOf ce
; eVI <- vectAvoidInfo pvs e
; isScalarTy <- isScalar $ varType var
; (bodyVI, vi) <- if isVIParr eVI && not isScalarTy
then do -- binding is parallel
{ bodyVI <- vectAvoidInfo (pvs `extendVarSet` var) body
; return (bodyVI, VIParr)
}
else do -- binding doesn't affect parallelism
{ bodyVI <- vectAvoidInfo pvs body
; return (bodyVI, ceVI `unlessVIParrExpr` bodyVI)
}
-- ; viTrace ce vi [eVI, bodyVI]
; return ((fvs, vi), AnnLet (AnnNonRec var eVI) bodyVI)
}
vectAvoidInfo pvs ce@(fvs, AnnLet (AnnRec bnds) body)
= do
{ ceVI <- vectAvoidInfoTypeOf ce
; bndsVI <- mapM (vectAvoidInfoBnd pvs) bnds
; parrBndrs <- map fst <$> filterM isVIParrBnd bndsVI
; if not . null $ parrBndrs
then do -- body may trigger parallelism via at least one binding
{ new_pvs <- filterM ((not <$>) . isScalar . varType) parrBndrs
; let extendedPvs = pvs `extendVarSetList` new_pvs
; bndsVI <- mapM (vectAvoidInfoBnd extendedPvs) bnds
; bodyVI <- vectAvoidInfo extendedPvs body
-- ; viTrace ce VIParr (map snd bndsVI ++ [bodyVI])
; return ((fvs, VIParr), AnnLet (AnnRec bndsVI) bodyVI)
}
else do -- demanded bindings cannot trigger parallelism
{ bodyVI <- vectAvoidInfo pvs body
; let vi = ceVI `unlessVIParrExpr` bodyVI
-- ; viTrace ce vi (map snd bndsVI ++ [bodyVI])
; return ((fvs, vi), AnnLet (AnnRec bndsVI) bodyVI)
}
}
where
vectAvoidInfoBnd pvs (var, e) = (var,) <$> vectAvoidInfo pvs e
isVIParrBnd (var, eVI)
= do
{ isScalarTy <- isScalar (varType var)
; return $ isVIParr eVI && not isScalarTy
}
vectAvoidInfo pvs ce@(fvs, AnnCase e var ty alts)
= do
{ ceVI <- vectAvoidInfoTypeOf ce
; eVI <- vectAvoidInfo pvs e
; altsVI <- mapM (vectAvoidInfoAlt (isVIParr eVI)) alts
; let alteVIs = [eVI | (_, _, eVI) <- altsVI]
vi = foldl unlessVIParrExpr ceVI (eVI:alteVIs) -- NB: same effect as in the paper
-- ; viTrace ce vi (eVI : alteVIs)
; return ((fvs, vi), AnnCase eVI var ty altsVI)
}
where
vectAvoidInfoAlt scrutIsPar (con, bndrs, e)
= do
{ allScalar <- allScalarVarType bndrs
; let altPvs | scrutIsPar && not allScalar = pvs `extendVarSetList` bndrs
| otherwise = pvs
; (con, bndrs,) <$> vectAvoidInfo altPvs e
}
vectAvoidInfo pvs (fvs, AnnCast e (fvs_ann, ann))
= do
{ eVI <- vectAvoidInfo pvs e
; return ((fvs, vectAvoidInfoOf eVI), AnnCast eVI ((fvs_ann, VISimple), ann))
}
vectAvoidInfo pvs (fvs, AnnTick tick e)
= do
{ eVI <- vectAvoidInfo pvs e
; return ((fvs, vectAvoidInfoOf eVI), AnnTick tick eVI)
}
vectAvoidInfo _pvs (fvs, AnnType ty)
= return ((fvs, VISimple), AnnType ty)
vectAvoidInfo _pvs (fvs, AnnCoercion coe)
= return ((fvs, VISimple), AnnCoercion coe)
-- Compute vectorisation avoidance information for a type.
--
vectAvoidInfoType :: Type -> VM VectAvoidInfo
vectAvoidInfoType ty
| isPredTy ty
= return VIDict
| Just (arg, res) <- splitFunTy_maybe ty
= do
{ argVI <- vectAvoidInfoType arg
; resVI <- vectAvoidInfoType res
; case (argVI, resVI) of
(VISimple, VISimple) -> return VISimple -- NB: diverts from the paper: scalar functions
(_ , VIDict) -> return VIDict
_ -> return $ VIComplex `unlessVIParr` argVI `unlessVIParr` resVI
}
| otherwise
= do
{ parr <- maybeParrTy ty
; if parr
then return VIParr
else do
{ scalar <- isScalar ty
; if scalar
then return VISimple
else return VIComplex
} }
-- Compute vectorisation avoidance information for the type of a Core expression (with FVs).
--
vectAvoidInfoTypeOf :: AnnExpr Var ann -> VM VectAvoidInfo
vectAvoidInfoTypeOf = vectAvoidInfoType . annExprType
-- Checks whether the type might be a parallel array type.
--
maybeParrTy :: Type -> VM Bool
maybeParrTy ty
-- looking through newtypes
| Just ty' <- coreView ty
= (== VIParr) <$> vectAvoidInfoType ty'
-- decompose constructor applications
| Just (tc, ts) <- splitTyConApp_maybe ty
= do
{ isParallel <- (tyConName tc `elemNameSet`) <$> globalParallelTyCons
; if isParallel
then return True
else or <$> mapM maybeParrTy ts
}
maybeParrTy (ForAllTy _ ty) = maybeParrTy ty
maybeParrTy _ = return False
-- Are the types of all variables in the 'Scalar' class or toplevel variables?
--
-- NB: 'liftSimple' does not abstract over toplevel variables.
--
allScalarVarType :: [Var] -> VM Bool
allScalarVarType vs = and <$> mapM isScalarOrToplevel vs
where
isScalarOrToplevel v | isToplevel v = return True
| otherwise = isScalar (varType v)
-- Are the types of all variables in the set in the 'Scalar' class or toplevel variables?
--
allScalarVarTypeSet :: VarSet -> VM Bool
allScalarVarTypeSet = allScalarVarType . varSetElems
-- Debugging support
--
viTrace :: CoreExprWithFVs -> VectAvoidInfo -> [CoreExprWithVectInfo] -> VM ()
viTrace ce vi vTs
= traceVt ("vect info: " ++ show vi ++ "[" ++
(concat $ map ((++ " ") . show . vectAvoidInfoOf) vTs) ++ "]")
(ppr $ deAnnotate ce)
| frantisekfarka/ghc-dsi | compiler/vectorise/Vectorise/Exp.hs | bsd-3-clause | 49,194 | 6 | 22 | 14,217 | 11,187 | 5,862 | 5,325 | -1 | -1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Encoding.Failure
-- Copyright : (c) The University of Glasgow, 2008-2011
-- License : see libraries/base/LICENSE
--
-- Maintainer : libraries@haskell.org
-- Stability : internal
-- Portability : non-portable
--
-- Types for specifying how text encoding/decoding fails
--
-----------------------------------------------------------------------------
module GHC.IO.Encoding.Failure (
CodingFailureMode(..), codingFailureModeSuffix,
isSurrogate,
recoverDecode, recoverEncode
) where
import GHC.IO
import GHC.IO.Buffer
import GHC.IO.Exception
import GHC.Base
import GHC.Char
import GHC.Word
import GHC.Show
import GHC.Num
import GHC.Real ( fromIntegral )
--import System.Posix.Internals
import Data.Maybe
-- | The 'CodingFailureMode' is used to construct 'TextEncoding's, and
-- specifies how they handle illegal sequences.
data CodingFailureMode
= ErrorOnCodingFailure
-- ^ Throw an error when an illegal sequence is encountered
| IgnoreCodingFailure
-- ^ Attempt to ignore and recover if an illegal sequence is
-- encountered
| TransliterateCodingFailure
-- ^ Replace with the closest visual match upon an illegal
-- sequence
| RoundtripFailure
-- ^ Use the private-use escape mechanism to attempt to allow
-- illegal sequences to be roundtripped.
deriving (Show)
-- This will only work properly for those encodings which are
-- strict supersets of ASCII in the sense that valid ASCII data
-- is also valid in that encoding. This is not true for
-- e.g. UTF-16, because ASCII characters must be padded to two
-- bytes to retain their meaning.
-- Note [Roundtripping]
-- ~~~~~~~~~~~~~~~~~~~~
--
-- Roundtripping is based on the ideas of PEP383.
--
-- We used to use the range of private-use characters from 0xEF80 to
-- 0xEFFF designated for "encoding hacks" by the ConScript Unicode Registery
-- to encode these characters.
--
-- However, people didn't like this because it means we don't get
-- guaranteed roundtripping for byte sequences that look like a UTF-8
-- encoded codepoint 0xEFxx.
--
-- So now like PEP383 we use lone surrogate codepoints 0xDCxx to escape
-- undecodable bytes, even though that may confuse Unicode processing
-- software written in Haskell. This guarantees roundtripping because
-- unicode input that includes lone surrogate codepoints is invalid by
-- definition.
--
-- When we used private-use characters there was a technical problem when it
-- came to encoding back to bytes using iconv. The iconv code will not fail when
-- it tries to encode a private-use character (as it would if trying to encode
-- a surrogate), which means that we won't get a chance to replace it
-- with the byte we originally escaped.
--
-- To work around this, when filling the buffer to be encoded (in
-- writeBlocks/withEncodedCString/newEncodedCString), we replaced the
-- private-use characters with lone surrogates again! Likewise, when
-- reading from a buffer (unpack/unpack_nl/peekEncodedCString) we have
-- to do the inverse process.
--
-- The user of String would never see these lone surrogates, but it
-- ensures that iconv will throw an error when encountering them. We
-- use lone surrogates in the range 0xDC00 to 0xDCFF for this purpose.
codingFailureModeSuffix :: CodingFailureMode -> String
codingFailureModeSuffix ErrorOnCodingFailure = ""
codingFailureModeSuffix IgnoreCodingFailure = "//IGNORE"
codingFailureModeSuffix TransliterateCodingFailure = "//TRANSLIT"
codingFailureModeSuffix RoundtripFailure = "//ROUNDTRIP"
-- | In transliterate mode, we use this character when decoding
-- unknown bytes.
--
-- This is the defined Unicode replacement character:
-- <http://www.fileformat.info/info/unicode/char/0fffd/index.htm>
unrepresentableChar :: Char
unrepresentableChar = '\xFFFD'
-- It is extraordinarily important that this series of
-- predicates/transformers gets inlined, because they tend to be used
-- in inner loops related to text encoding. In particular,
-- surrogatifyRoundtripCharacter must be inlined (see #5536)
-- | Some characters are actually "surrogate" codepoints defined for
-- use in UTF-16. We need to signal an invalid character if we detect
-- them when encoding a sequence of 'Char's into 'Word8's because they
-- won't give valid Unicode.
--
-- We may also need to signal an invalid character if we detect them
-- when encoding a sequence of 'Char's into 'Word8's because the
-- 'RoundtripFailure' mode creates these to round-trip bytes through
-- our internal UTF-16 encoding.
{-# INLINE isSurrogate #-}
isSurrogate :: Char -> Bool
isSurrogate c = (0xD800 <= x && x <= 0xDBFF)
|| (0xDC00 <= x && x <= 0xDFFF)
where x = ord c
-- Bytes (in Buffer Word8) --> lone surrogates (in Buffer CharBufElem)
{-# INLINE escapeToRoundtripCharacterSurrogate #-}
escapeToRoundtripCharacterSurrogate :: Word8 -> Char
escapeToRoundtripCharacterSurrogate b
| b < 128 = chr (fromIntegral b)
-- Disallow 'smuggling' of ASCII bytes. For roundtripping to
-- work, this assumes encoding is ASCII-superset.
| otherwise = chr (0xDC00 + fromIntegral b)
-- Lone surrogates (in Buffer CharBufElem) --> bytes (in Buffer Word8)
{-# INLINE unescapeRoundtripCharacterSurrogate #-}
unescapeRoundtripCharacterSurrogate :: Char -> Maybe Word8
unescapeRoundtripCharacterSurrogate c
| 0xDC80 <= x && x < 0xDD00 = Just (fromIntegral x) -- Discard high byte
| otherwise = Nothing
where x = ord c
recoverDecode :: CodingFailureMode -> Buffer Word8 -> Buffer Char
-> IO (Buffer Word8, Buffer Char)
recoverDecode cfm input@Buffer{ bufRaw=iraw, bufL=ir, bufR=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow } = do
--puts $ "recoverDecode " ++ show ir
case cfm of
ErrorOnCodingFailure -> ioe_decodingError
IgnoreCodingFailure -> return (input { bufL=ir+1 }, output)
TransliterateCodingFailure -> do
ow' <- writeCharBuf oraw ow unrepresentableChar
return (input { bufL=ir+1 }, output { bufR=ow' })
RoundtripFailure -> do
b <- readWord8Buf iraw ir
ow' <- writeCharBuf oraw ow (escapeToRoundtripCharacterSurrogate b)
return (input { bufL=ir+1 }, output { bufR=ow' })
recoverEncode :: CodingFailureMode -> Buffer Char -> Buffer Word8
-> IO (Buffer Char, Buffer Word8)
recoverEncode cfm input@Buffer{ bufRaw=iraw, bufL=ir, bufR=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow } = do
(c,ir') <- readCharBuf iraw ir
--puts $ "recoverEncode " ++ show ir ++ " " ++ show ir'
case cfm of
IgnoreCodingFailure -> return (input { bufL=ir' }, output)
TransliterateCodingFailure -> do
if c == '?'
then return (input { bufL=ir' }, output)
else do
-- XXX: evil hack! To implement transliteration, we just
-- poke an ASCII ? into the input buffer and tell the caller
-- to try and decode again. This is *probably* safe given
-- current uses of TextEncoding.
--
-- The "if" test above ensures we skip if the encoding fails
-- to deal with the ?, though this should never happen in
-- practice as all encodings are in fact capable of
-- reperesenting all ASCII characters.
_ir' <- writeCharBuf iraw ir '?'
return (input, output)
-- This implementation does not work because e.g. UTF-16
-- requires 2 bytes to encode a simple ASCII value
--writeWord8Buf oraw ow unrepresentableByte
--return (input { bufL=ir' }, output { bufR=ow+1 })
RoundtripFailure | Just x <- unescapeRoundtripCharacterSurrogate c -> do
writeWord8Buf oraw ow x
return (input { bufL=ir' }, output { bufR=ow+1 })
_ -> ioe_encodingError
ioe_decodingError :: IO a
ioe_decodingError = ioException
(IOError Nothing InvalidArgument "recoverDecode"
"invalid byte sequence" Nothing Nothing)
ioe_encodingError :: IO a
ioe_encodingError = ioException
(IOError Nothing InvalidArgument "recoverEncode"
"invalid character" Nothing Nothing)
| jwiegley/ghc-release | libraries/base/GHC/IO/Encoding/Failure.hs | gpl-3.0 | 8,387 | 0 | 17 | 1,770 | 1,054 | 606 | 448 | 84 | 5 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Network.Wai.Handler.Warp.Request (
recvRequest
, headerLines
, pauseTimeoutKey
) where
import qualified Control.Concurrent as Conc (yield)
import Control.Exception (throwIO)
import Data.Array ((!))
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import qualified Data.ByteString.Unsafe as SU
import qualified Data.CaseInsensitive as CI
import qualified Data.IORef as I
import qualified Network.HTTP.Types as H
import Network.Socket (SockAddr)
import Network.Wai
import Network.Wai.Handler.Warp.Conduit
import Network.Wai.Handler.Warp.Header
import Network.Wai.Handler.Warp.ReadInt
import Network.Wai.Handler.Warp.RequestHeader
import Network.Wai.Handler.Warp.Settings (Settings, settingsNoParsePath)
import qualified Network.Wai.Handler.Warp.Timeout as Timeout
import Network.Wai.Handler.Warp.Types
import Network.Wai.Internal
import Prelude hiding (lines)
import Control.Monad (when)
import qualified Data.Vault.Lazy as Vault
import System.IO.Unsafe (unsafePerformIO)
----------------------------------------------------------------
-- FIXME come up with good values here
maxTotalHeaderLength :: Int
maxTotalHeaderLength = 50 * 1024
----------------------------------------------------------------
-- | Receiving a HTTP request from 'Connection' and parsing its header
-- to create 'Request'.
recvRequest :: Settings
-> Connection
-> InternalInfo
-> SockAddr -- ^ Peer's address.
-> Source -- ^ Where HTTP request comes from.
-> IO (Request
,Maybe (I.IORef Int)
,IndexedHeader
,IO ByteString) -- ^
-- 'Request' passed to 'Application',
-- how many bytes remain to be consumed, if known
-- 'IndexedHeader' of HTTP request for internal use,
-- Body producing action used for flushing the request body
recvRequest settings conn ii addr src = do
hdrlines <- headerLines src
(method, unparsedPath, path, query, httpversion, hdr) <- parseHeaderLines hdrlines
let idxhdr = indexRequestHeader hdr
expect = idxhdr ! idxExpect
cl = idxhdr ! idxContentLength
te = idxhdr ! idxTransferEncoding
handle100Continue = handleExpect conn httpversion expect
(rbody, remainingRef, bodyLength) <- bodyAndSource src cl te
-- body producing function which will produce '100-continue', if needed
rbody' <- timeoutBody remainingRef th rbody handle100Continue
-- body producing function which will never produce 100-continue
rbodyFlush <- timeoutBody remainingRef th rbody (return ())
let req = Request {
requestMethod = method
, httpVersion = httpversion
, pathInfo = H.decodePathSegments path
, rawPathInfo = if settingsNoParsePath settings then unparsedPath else path
, rawQueryString = query
, queryString = H.parseQuery query
, requestHeaders = hdr
, isSecure = False
, remoteHost = addr
, requestBody = rbody'
, vault = Vault.insert pauseTimeoutKey
(Timeout.pause th)
Vault.empty
, requestBodyLength = bodyLength
, requestHeaderHost = idxhdr ! idxHost
, requestHeaderRange = idxhdr ! idxRange
}
return (req, remainingRef, idxhdr, rbodyFlush)
where
th = threadHandle ii
----------------------------------------------------------------
headerLines :: Source -> IO [ByteString]
headerLines src = do
bs <- readSource src
if S.null bs
then throwIO ConnectionClosedByPeer
else push src (THStatus 0 id id) bs
----------------------------------------------------------------
handleExpect :: Connection
-> H.HttpVersion
-> Maybe HeaderValue
-> IO ()
handleExpect conn ver (Just "100-continue") = do
connSendAll conn continue
Conc.yield
where
continue
| ver == H.http11 = "HTTP/1.1 100 Continue\r\n\r\n"
| otherwise = "HTTP/1.0 100 Continue\r\n\r\n"
handleExpect _ _ _ = return ()
----------------------------------------------------------------
bodyAndSource :: Source
-> Maybe HeaderValue -- ^ content length
-> Maybe HeaderValue -- ^ transfer-encoding
-> IO (IO ByteString
,Maybe (I.IORef Int)
,RequestBodyLength
)
bodyAndSource src cl te
| chunked = do
csrc <- mkCSource src
return (readCSource csrc, Nothing, ChunkedBody)
| otherwise = do
isrc@(ISource _ remaining) <- mkISource src len
return (readISource isrc, Just remaining, bodyLen)
where
len = toLength cl
bodyLen = KnownLength $ fromIntegral len
chunked = isChunked te
toLength :: Maybe HeaderValue -> Int
toLength Nothing = 0
toLength (Just bs) = readInt bs
isChunked :: Maybe HeaderValue -> Bool
isChunked (Just bs) = CI.foldCase bs == "chunked"
isChunked _ = False
----------------------------------------------------------------
timeoutBody :: Maybe (I.IORef Int) -- ^ remaining
-> Timeout.Handle
-> IO ByteString
-> IO ()
-> IO (IO ByteString)
timeoutBody remainingRef timeoutHandle rbody handle100Continue = do
isFirstRef <- I.newIORef True
let checkEmpty =
case remainingRef of
Nothing -> return . S.null
Just ref -> \bs -> if S.null bs
then return True
else do
x <- I.readIORef ref
return $! x <= 0
return $ do
isFirst <- I.readIORef isFirstRef
when isFirst $ do
-- Only check if we need to produce the 100 Continue status
-- when asking for the first chunk of the body
handle100Continue
-- Timeout handling was paused after receiving the full request
-- headers. Now we need to resume it to avoid a slowloris
-- attack during request body sending.
Timeout.resume timeoutHandle
I.writeIORef isFirstRef False
bs <- rbody
-- As soon as we finish receiving the request body, whether
-- because the application is not interested in more bytes, or
-- because there is no more data available, pause the timeout
-- handler again.
isEmpty <- checkEmpty bs
when isEmpty (Timeout.pause timeoutHandle)
return bs
----------------------------------------------------------------
type BSEndo = ByteString -> ByteString
type BSEndoList = [ByteString] -> [ByteString]
data THStatus = THStatus
{-# UNPACK #-} !Int -- running total byte count
BSEndoList -- previously parsed lines
BSEndo -- bytestrings to be prepended
----------------------------------------------------------------
{- FIXME
close :: Sink ByteString IO a
close = throwIO IncompleteHeaders
-}
push :: Source -> THStatus -> ByteString -> IO [ByteString]
push src (THStatus len lines prepend) bs'
-- Too many bytes
| len > maxTotalHeaderLength = throwIO OverLargeHeader
| otherwise = push' mnl
where
bs = prepend bs'
bsLen = S.length bs
mnl = do
nl <- S.elemIndex 10 bs
-- check if there are two more bytes in the bs
-- if so, see if the second of those is a horizontal space
if bsLen > nl + 1 then
let c = S.index bs (nl + 1)
b = case nl of
0 -> True
1 -> S.index bs 0 == 13
_ -> False
in Just (nl, not b && (c == 32 || c == 9))
else
Just (nl, False)
{-# INLINE push' #-}
push' :: Maybe (Int, Bool) -> IO [ByteString]
-- No newline find in this chunk. Add it to the prepend,
-- update the length, and continue processing.
push' Nothing = do
bst <- readSource' src
when (S.null bst) $ throwIO IncompleteHeaders
push src status bst
where
len' = len + bsLen
prepend' = S.append bs
status = THStatus len' lines prepend'
-- Found a newline, but next line continues as a multiline header
push' (Just (end, True)) = push src status rest
where
rest = S.drop (end + 1) bs
prepend' = S.append (SU.unsafeTake (checkCR bs end) bs)
len' = len + end
status = THStatus len' lines prepend'
-- Found a newline at position end.
push' (Just (end, False))
-- leftover
| S.null line = do
when (start < bsLen) $ leftoverSource src (SU.unsafeDrop start bs)
return (lines [])
-- more headers
| otherwise = let len' = len + start
lines' = lines . (line:)
status = THStatus len' lines' id
in if start < bsLen then
-- more bytes in this chunk, push again
let bs'' = SU.unsafeDrop start bs
in push src status bs''
else do
-- no more bytes in this chunk, ask for more
bst <- readSource' src
when (S.null bs) $ throwIO IncompleteHeaders
push src status bst
where
start = end + 1 -- start of next chunk
line = SU.unsafeTake (checkCR bs end) bs
{-# INLINE checkCR #-}
checkCR :: ByteString -> Int -> Int
checkCR bs pos = if pos > 0 && 13 == S.index bs p then p else pos -- 13 is CR
where
!p = pos - 1
pauseTimeoutKey :: Vault.Key (IO ())
pauseTimeoutKey = unsafePerformIO Vault.newKey
{-# NOINLINE pauseTimeoutKey #-}
| dylex/wai | warp/Network/Wai/Handler/Warp/Request.hs | mit | 10,074 | 0 | 19 | 3,148 | 2,172 | 1,150 | 1,022 | 195 | 7 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.HTools.Container (testHTools_Container) where
import Test.QuickCheck
import Data.Maybe
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHTools
import Test.Ganeti.HTools.Node (genNode)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Node as Node
-- we silence the following due to hlint bug fixed in later versions
{-# ANN prop_addTwo "HLint: ignore Avoid lambda" #-}
prop_addTwo :: [Container.Key] -> Int -> Int -> Bool
prop_addTwo cdata i1 i2 =
fn i1 i2 cont == fn i2 i1 cont &&
fn i1 i2 cont == fn i1 i2 (fn i1 i2 cont)
where cont = foldl (\c x -> Container.add x x c) Container.empty cdata
fn x1 x2 = Container.addTwo x1 x1 x2 x2
prop_nameOf :: Node.Node -> Property
prop_nameOf node =
let nl = makeSmallCluster node 1
in case Container.elems nl of
[] -> failTest "makeSmallCluster 1 returned empty cluster?"
_:_:_ -> failTest "makeSmallCluster 1 returned >1 node?"
fnode:_ -> Container.nameOf nl (Node.idx fnode) ==? Node.name fnode
-- | We test that in a cluster, given a random node, we can find it by
-- its name and alias, as long as all names and aliases are unique,
-- and that we fail to find a non-existing name.
prop_findByName :: Property
prop_findByName =
forAll (genNode (Just 1) Nothing) $ \node ->
forAll (choose (1, 20)) $ \ cnt ->
forAll (choose (0, cnt - 1)) $ \ fidx ->
forAll (genUniquesList (cnt * 2) arbitrary) $ \ allnames ->
forAll (arbitrary `suchThat` (`notElem` allnames)) $ \ othername ->
let names = zip (take cnt allnames) (drop cnt allnames)
nl = makeSmallCluster node cnt
nodes = Container.elems nl
nodes' = map (\((name, alias), nn) -> (Node.idx nn,
nn { Node.name = name,
Node.alias = alias }))
$ zip names nodes
nl' = Container.fromList nodes'
target = snd (nodes' !! fidx)
in conjoin
[ Container.findByName nl' (Node.name target) ==? Just target
, Container.findByName nl' (Node.alias target) ==? Just target
, counterexample "Found non-existing name"
(isNothing (Container.findByName nl' othername))
]
testSuite "HTools/Container"
[ 'prop_addTwo
, 'prop_nameOf
, 'prop_findByName
]
| apyrgio/ganeti | test/hs/Test/Ganeti/HTools/Container.hs | bsd-2-clause | 3,834 | 0 | 25 | 841 | 708 | 380 | 328 | 50 | 3 |
module TestUtil where
import qualified Turnip.AST as AST
import Turnip.Parser (parseLua)
import Turnip.Eval.Types (Value)
successful :: Show a => Either a p -> p
successful (Right x) = x
successful (Left err) = error $ show err
failure :: Either Value [Value] -> [Value]
failure (Right x) = error $ "The call succeeded but failure expected, return: " ++ show x
failure (Left err) = [err]
parse :: String -> AST.Block
parse = successful . parseLua
| bananu7/Turnip | Test/TestUtil.hs | mit | 467 | 0 | 7 | 96 | 171 | 92 | 79 | 12 | 1 |
module SFML.Graphics.SFViewable
where
import SFML.Graphics.Rect
import SFML.Graphics.Types
class SFViewable a where
-- | Change the target's current active view.
setView :: a -> View -> IO ()
-- | Get the target's current active view.
getView :: a -> IO View
-- | Get the target's default view.
getDefaultView :: a -> IO View
-- | Get the viewport of a view applied to this target, expressed in pixels in the current target.
getViewport :: a -> View -> IO IntRect
| SFML-haskell/SFML | src/SFML/Graphics/SFViewable.hs | mit | 521 | 0 | 10 | 138 | 90 | 50 | 40 | 8 | 0 |
{-# LANGUAGE OverloadedStrings #-}
import Web.Scotty
main = scotty 3000 $ do
get "/" $ do
html "Hello World!" | slogsdon/language-web-shootout | haskell/src/Main.hs | mit | 116 | 0 | 11 | 25 | 35 | 16 | 19 | 5 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module Network.Octohat.Types ( Member(..)
, MemberWithKey(..)
, Team(..)
, TeamPermission(..)
, Repo(..)
, Organization(..)
, BearerToken(..)
, OrganizationName(..)
, TeamName(..)
, StatusInTeam(..)
, EmptyBody(..)
, DidDelete(..)
, PublicKey(..)
, PublicKeyFingerprint(..)
, TeamCreateRequest(..)
, GitHubReturnStatus(..)
, DidAddKey(..)
, AddPublicKeyRequest(..)
, Links(..)
, Pagination(..)
, runGitHub
, runGitHub'
, GitHub) where
import Control.Applicative
import Control.Monad.Reader (ReaderT(..))
import Control.Monad.State (StateT(..), evalStateT)
#if MIN_VERSION_errors(2,0,0)
import Control.Monad.Trans.Except (ExceptT, runExceptT)
#else
import Control.Monad.Trans.Either
#endif
import Data.Aeson
import Data.Aeson.TH
import Data.Char (toLower)
import Network.HTTP.Client
import Network.Wreq.Types
import System.Environment.Compat (lookupEnv)
import qualified Data.HashMap.Strict as HS
import qualified Data.Text as T
-- | Represents a user in GitHub. Contains no more than login and user ID
data Member =
Member { memberLogin :: T.Text
, memberId :: Integer
} deriving (Show, Eq)
-- | Represents the different permissions that a team can have in an organisation.
data TeamPermission = OwnerAccess -- ^ Default team of owners.
| PullAccess -- ^ This team will be able to view and clone its
-- repositories.
| PushAccess -- ^ This team will be able to read its
-- repositories, as well as push to them.
| AdminAccess -- ^ This team will be able to push/pull to its
-- repositories, as well as add other
-- collaborators to them.
deriving (Show,Eq)
-- | Represents a team in GitHub. Contains the team's ID, the team's name and an optional description
data Team =
Team { teamId :: Integer
, teamName :: T.Text
, teamDescription :: Maybe T.Text
, teamPermission :: TeamPermission
} deriving (Show, Eq)
-- | Represents a request to create a new team within an organization. The rest of the paramaters
-- are passed in the URL. Refer to <https://developer.github.com/v3/orgs/teams/#create-team>
data TeamCreateRequest =
TeamCreateRequest { newTeamName :: T.Text
, newTeamDescription :: T.Text
, newTeamPermission :: TeamPermission
} deriving (Show, Eq)
-- | Represents an organisation in GitHub. Only has name and description
data Organization =
Organization
{ orgLogin :: T.Text
, orgDescription :: Maybe T.Text
} deriving (Show, Eq)
-- | Represents a repo in GitHub. Contains the Name, Description, and Private status
data Repo =
Repo { repoName :: T.Text
, repoDescription :: Maybe T.Text
, repoPrivate :: Bool
} deriving (Show, Eq)
-- | Represents a GitHub user with its public keys and fingerprints. A GitHub user might or might not
-- have any public keys
data MemberWithKey =
MemberWithKey { member :: Member
, memberKey :: [PublicKey]
, memberKeyFingerprint :: [PublicKeyFingerprint]
} deriving (Show, Eq)
-- | Represents a PublicKey within GitHub. It includes its ID and the public key encoded as base 64
data PublicKey =
PublicKey { publicKeyId :: Integer
, publicKey :: T.Text
} deriving (Show, Eq)
-- | Represents a Fingerprint. The `fingerprintId` field should match the fingerprint's public key ID
-- within GitHub
data PublicKeyFingerprint =
PublicKeyFingerprint { fingerprintId :: Integer
, publicKeyFingerprint :: T.Text
} deriving (Show, Eq)
-- | Some Wreq functions expect a body, but often GitHub's API will request no body. The PUT verb
-- and its implementation in Wreq is an example of this.
data EmptyBody = EmptyBody deriving (Show, Eq)
-- | When adding a user to a team GitHub will add it immediately if the user already belongs to the
-- to the organization the team is in. Otherwise it will send an email for the user to accept the
-- request to join the team. Functions related adding or removing teams will return either Active
-- or Pending correspondingly.
data StatusInTeam = Active | Pending deriving (Show, Eq)
-- | Sum type to represent the success or failure of deletion of a resource within GitHub's API
data DidDelete = Deleted | NotDeleted deriving (Show, Eq)
instance FromJSON PublicKey where
parseJSON (Object o) = PublicKey <$> o .: "id" <*> o .: "key"
parseJSON _ = fail "Could not find public keys in document"
data DidAddKey = KeyAdded | KeyNotAdded
data AddPublicKeyRequest =
AddPublicKeyRequest {
addPublicKeyRequestKey :: T.Text,
addPublicKeyRequestTitle :: T.Text
}
instance FromJSON StatusInTeam where
parseJSON (Object o) =
case HS.lookup "state" o of
Just "active" -> pure Active
Just "pending" -> pure Pending
Just _ -> fail "\"state\" key not \"active\" or \"pending\""
Nothing -> (fail . maybe "No error message from GitHub" show) (HS.lookup "message" o)
parseJSON _ = fail "Expected a membership document, got something else"
instance FromJSON TeamPermission where
parseJSON (String p) =
case p of
"pull" -> pure PullAccess
"push" -> pure PushAccess
"admin" -> pure AdminAccess
"owner" -> pure OwnerAccess
_ -> fail "Expected a valid team permission ?"
parseJSON _ = fail "Expected a team permssion, got something else"
instance ToJSON TeamPermission where
toJSON p =
case p of
PullAccess -> String "pull"
PushAccess -> String "push"
AdminAccess -> String "admin"
OwnerAccess -> String "owner"
$(deriveJSON defaultOptions { fieldLabelModifier = drop 6 . map toLower } ''Member)
$(deriveJSON defaultOptions { fieldLabelModifier = drop 4 . map toLower } ''Team)
$(deriveJSON defaultOptions { fieldLabelModifier = drop 4 . map toLower } ''Repo)
$(deriveJSON defaultOptions { fieldLabelModifier = drop 3 . map toLower } ''Organization)
$(deriveJSON defaultOptions { fieldLabelModifier = drop 7 . map toLower } ''TeamCreateRequest)
$(deriveJSON defaultOptions { fieldLabelModifier = drop 19 . map toLower } ''AddPublicKeyRequest)
-- | Error codes GitHub might return when attempting to use an API endpoint
data GitHubReturnStatus = InvalidJSON -- ^ GitHub could not parse the JSON document sent
| ValidationFailed -- ^ Validation failed, an example of this error
-- is trying to create teams with the same name
-- within one organization
| InternalError -- ^ In case GitHub returns 500 Internal Server Error
-- to some request
| NotFound -- ^ When a resource has not been found. It does not
-- imply the resource does not exist
| NotAllowed -- ^ Usually returned after GitHub replies with 403 Forbidden.
-- The user might not have permission to access/modify
-- that resource
| AllOk -- ^ This should never be returned
| RequiresAuthentication -- ^ Accesing this resource requires authentication
| UnexpectedJSON String -- ^ This library has failed to fulfill its purpose and could not
-- handle GitHub's response
deriving (Show, Eq)
-- | Instance that does not add anything to the body or headers of a PUT request
instance Putable EmptyBody where
putPayload EmptyBody req = return $ req {requestBody = RequestBodyLBS ""}
instance Postable TeamCreateRequest where
postPayload createRequest req = return $ req { requestBody = RequestBodyLBS (encode createRequest)}
instance Postable AddPublicKeyRequest where
postPayload createRequest req = return $ req { requestBody = RequestBodyLBS (encode createRequest)}
-- | GitHub's OAuth 2.0 bearer token. This is simply added in an
-- Authorization header
newtype BearerToken = BearerToken { unBearerToken :: T.Text } deriving Show
-- | OrganizationName is added in order to have type safety in functions where the
-- Organization name and the Team name are both strings and may be confused
newtype OrganizationName = OrganizationName { unOrganizationName :: T.Text } deriving Show
-- | TeamName is added in order to have type safety in functions where the
-- Team name and the Organization name are both strings and may be confused
newtype TeamName = TeamName { unTeamName :: T.Text } deriving Show
-- | Links are used in the Pagination object
data Links = Links { linkNext :: Maybe Link, linkLast :: Maybe Link
, linkFirst :: Maybe Link, linkPrev :: Maybe Link } deriving Show
-- | Pagination options that can be set, including the page number, and the per_page
data Pagination = Pagination { perPage :: Int, page :: Int, links :: Links, recurse :: Bool } deriving Show
defPagination :: Pagination
defPagination = Pagination 30 1 (Links Nothing Nothing Nothing Nothing) True
-- | The monad transformer where all operations run. Supports initial configuration
-- through a Reader monad and the possibility of failure through Either
#if MIN_VERSION_errors(2,0,0)
type GitHub = ExceptT GitHubReturnStatus (ReaderT BearerToken (StateT Pagination IO))
#else
type GitHub = EitherT GitHubReturnStatus (ReaderT BearerToken (StateT Pagination IO))
#endif
-- | Executes a computation built within the GitHub monad returning an Either within
-- the IO data type using the provided token
runGitHub' :: GitHub a -> BearerToken -> IO (Either GitHubReturnStatus a)
#if MIN_VERSION_errors(2,0,0)
runGitHub' comp token = evalStateT (runReaderT (runExceptT comp) token) defPagination
#else
runGitHub' comp token = evalStateT (runReaderT (runEitherT comp) token) defPagination
#endif
-- | Executes a computation built within the GitHub monad returning an Either within
-- the IO data type. Reads an API token from an environment variable named GITHUB_TOKEN
runGitHub :: GitHub a -> IO (Either GitHubReturnStatus a)
runGitHub comp = do
maybeToken <- lookupEnv "GITHUB_TOKEN"
case maybeToken of
Just acquiredToken -> runGitHub' comp (BearerToken $ T.pack acquiredToken)
Nothing -> fail "Couldn't find GITHUB_TOKEN in environment"
| stackbuilders/octohat | src/Network/Octohat/Types.hs | mit | 11,770 | 0 | 14 | 3,727 | 1,764 | 1,011 | 753 | 154 | 2 |
module Rebase.Data.Groupoid
(
module Data.Groupoid
)
where
import Data.Groupoid
| nikita-volkov/rebase | library/Rebase/Data/Groupoid.hs | mit | 83 | 0 | 5 | 12 | 20 | 13 | 7 | 4 | 0 |
module Rebase.GHC.Storable
(
module GHC.Storable
)
where
import GHC.Storable
| nikita-volkov/rebase | library/Rebase/GHC/Storable.hs | mit | 80 | 0 | 5 | 12 | 20 | 13 | 7 | 4 | 0 |
{-# LANGUAGE Arrows #-}
module Main where
--------------------
-- Global Imports --
import Graphics.Rendering.OpenGL hiding (position)
import Graphics.UI.GLFW as GLFW
import Prelude hiding ((.))
import Control.Wire
import FRP.Netwire
import Data.IORef
import Linear.V2
----------
-- Code --
{-|
The size of the quad. Technically half of the size of the quad, as its
rendering is performed like so:
(x - s, y - s)
(x + s, y - s)
(x + s, y + s)
(x - s, y + s)
-}
s :: Float
s = 0.05
{-|
The global accelerational (and deceleration) speed.
-}
speed :: Float
speed = 4.0
{-|
The minimum speed the quad can take before becoming completely stationary
again. This is used because otherwise, due to floating point inaccuracies,
you would have a quad shifting back and forth.
-}
minSpeed :: Float
minSpeed = 0.01
{-|
The initial position the quad takes. The bounds are:
x min: -1 - s, x max: 1 + s
y min: -1 - s, y max: 1 + s
Where is is the global variable s stored above.
-}
initPos :: V2 Float
initPos = pure 0
{-|
Checking if a given key is held down. The wire blocks when the key is not
held down, and does not block when the key is held down.
-}
isKeyDown :: (Monoid e, Enum k) => k -> Wire s e IO a a
isKeyDown k =
mkGen_ $ \a -> do
state <- getKey k
return $ case state of
Release -> Left mempty
Press -> Right a
{-|
Performing some action with an input. Essentially a short hand version of
mkPure_ where you don't need to specify that something will not block.
-}
withInput :: (a -> b) -> Wire s e m a b
withInput fn = mkPure_ $ \a -> Right $ fn a
{-|
Applying deceleration to a @'Float'@.
-}
decel :: Float -> Float
decel x
| x < (-minSpeed) = ( speed)
| x > ( minSpeed) = (-speed)
| otherwise = x
{-|
Generating two different directional acceleration / deceleration functions.
-}
dAcceleration :: (Enum k, Monoid e) => k -> k -> Wire s e IO Float Float
dAcceleration k1 k2 = withInput decel . isKeyDown k1 . isKeyDown k2
<|> pure ( speed) . isKeyDown k1
<|> pure (-speed) . isKeyDown k2
<|> withInput decel
{-|
The velocity of the quad. Checks for the minimum velocity on the acceleration
-}
velocity :: (HasTime t s, Monoid e) => Wire s e IO (Float, Float) (V2 Float)
velocity =
withInput stop . integral 0 . withInput (uncurry V2)
where stop :: V2 Float -> V2 Float
stop = fmap stopF
where stopF :: Float -> Float
stopF x =
if x > (-minSpeed) && x < minSpeed
then 0
else x
{-|
The current position of the quad. It returns the integral of the input of a
@'V2'@ @'Float'@. The input should be the current velocity of the block.
-}
position :: (HasTime t s, Monoid e) => Wire s e IO (V2 Float) (V2 Float)
position = integral initPos
{-|
The final position of the quad. It uses Arrow notation, specifically the rec
keyword, to recursively define the application of the network. To get the
delta x, you need to know the current x velocity, and to get the current x
velocity you need to know the delta x. Conveniently, you can perform this
sort of recursive definition very easily using Arrow notation, as displayed
below.
-}
fPos :: HasTime t s => Wire s () IO a (V2 Float)
fPos = proc _ -> do
rec x <- dAcceleration (CharKey 'D') (CharKey 'A') -< vx
y <- dAcceleration (CharKey 'W') (CharKey 'S') -< vy
v@(V2 vx vy) <- velocity -< (x, y)
p <- position -< v
returnA -< p
{-|
Actually running the network, and performing OpenGL calls on the result.
-}
runNetwork' :: IORef Bool -> Session IO s -> Wire s e IO a (V2 Float) -> IO ()
runNetwork' closedRef session wire = do
closed <- readIORef closedRef
if closed
then return ()
else do
(st , session') <- stepSession session
(dw', wire' ) <- stepWire wire st $ Right undefined
case dw' of
Left _ -> return ()
Right (V2 x y) -> do
clear [ColorBuffer]
renderPrimitive Quads $
mapM_ (\(V2 rx ry) -> vertex $ Vertex2 (realToFrac rx :: GLfloat)
(realToFrac ry :: GLfloat))
[ V2 (x - s) (y - s)
, V2 (x + s) (y - s)
, V2 (x + s) (y + s)
, V2 (x - s) (y + s)
]
swapBuffers
runNetwork' closedRef session' wire'
{-|
Simply a wrapper function around @'runNetwork''@. Modeled after how I design
my API for network modules that I create.
-}
runNetwork :: IORef Bool -> IO ()
runNetwork closedRef =
runNetwork' closedRef clockSession_ fPos
{-|
The entry point to the program. It handles creating the GLFW window and the
OpenGL context. It then passes the main thread over to the @'runNetwork'@
function where the program begins to execute the Netwire network.
-}
main :: IO ()
main = do
initialize
openWindow (Size 640 480) [DisplayRGBBits 8 8 8, DisplayAlphaBits 8, DisplayDepthBits 24] Window
windowTitle $= "netwire-tutorial"
closedRef <- newIORef False
windowCloseCallback $= do
writeIORef closedRef True
return True
runNetwork closedRef
closeWindow
| crockeo/netwire-tutorial | src/part2/Main.hs | mit | 5,449 | 1 | 22 | 1,653 | 1,250 | 633 | 617 | -1 | -1 |
-- file: ch04/InteractWith.hs
import System.Environment (getArgs)
interactWith function inputFile outputFile = do
input <- readFile inputFile
writeFile outputFile (function input)
main = mainWith myFunction
where mainWith function = do
args <- getArgs
case args of
[input,output] -> interactWith function input output
_ -> putStrLn "error: exactly two arguments needed"
-- replace "id" with the name of our function below
myFunction = id
| tamasgal/haskell_exercises | Real_World_Haskell/ch04/InteractWith.hs | mit | 529 | 1 | 11 | 153 | 116 | 56 | 60 | 11 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# OPTIONS_GHC -fno-warn-orphans -fno-warn-missing-fields #-}
-- | This module provides utilities for creating backends. Regular users do not
-- need to use this module.
module Database.Persist.TH
( -- * Parse entity defs
persistWith
, persistUpperCase
, persistLowerCase
, persistFileWith
-- * Turn @EntityDef@s into types
, mkPersist
, MkPersistSettings
, mpsBackend
, mpsGeneric
, mpsPrefixFields
, mkPersistSettings
, sqlSettings
, sqlOnlySettings
-- * Various other TH functions
, mkMigrate
, mkSave
, mkDeleteCascade
, share
, derivePersistField
, persistFieldFromEntity
) where
import Prelude hiding ((++), take, concat, splitAt)
import Database.Persist
import Database.Persist.Sql (Migration, SqlPersistT, migrate, SqlBackend, PersistFieldSql)
import Database.Persist.Quasi
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Syntax
import Data.Char (toLower, toUpper)
import Control.Monad (forM, (<=<), mzero)
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.IO.Class (MonadIO)
import qualified System.IO as SIO
import Data.Text (pack, Text, append, unpack, concat, uncons, cons)
import qualified Data.Text.IO as TIO
import Data.List (foldl', find)
import Data.Maybe (isJust)
import Data.Monoid (mappend, mconcat)
import qualified Data.Map as M
import Data.Aeson
( ToJSON (toJSON), FromJSON (parseJSON), (.=), object
, Value (Object), (.:), (.:?)
)
import Control.Applicative (pure, (<*>), liftA2)
import Control.Monad.Logger (MonadLogger)
import Database.Persist.Sql (sqlType)
import Language.Haskell.TH.Instances ()
-- | Converts a quasi-quoted syntax into a list of entity definitions, to be
-- used as input to the template haskell generation code (mkPersist).
persistWith :: PersistSettings -> QuasiQuoter
persistWith ps = QuasiQuoter
{ quoteExp = parseSqlType ps . pack
}
-- | Apply 'persistWith' to 'upperCaseSettings'.
persistUpperCase :: QuasiQuoter
persistUpperCase = persistWith upperCaseSettings
-- | Apply 'persistWith' to 'lowerCaseSettings'.
persistLowerCase :: QuasiQuoter
persistLowerCase = persistWith lowerCaseSettings
-- | Same as 'persistWith', but uses an external file instead of a
-- quasiquotation.
persistFileWith :: PersistSettings -> FilePath -> Q Exp
persistFileWith ps fp = do
#ifdef GHC_7_4
qAddDependentFile fp
#endif
h <- qRunIO $ SIO.openFile fp SIO.ReadMode
qRunIO $ SIO.hSetEncoding h SIO.utf8_bom
s <- qRunIO $ TIO.hGetContents h
parseSqlType ps s
parseSqlType :: PersistSettings -> Text -> Q Exp
parseSqlType ps s =
lift $ map (getSqlType defsOrig) defsOrig
where
defsOrig = parse ps s
getSqlType :: [EntityDef ()] -> EntityDef () -> EntityDef DelayedSqlTypeExp
getSqlType allEntities ent =
ent
{ entityFields = map go $ entityFields ent
}
where
go :: FieldDef () -> FieldDef DelayedSqlTypeExp
go field = do
field
{ fieldSqlType = DSTE final
, fieldEmbedded = mEmbedded (fieldType field)
}
where
-- In the case of embedding, there won't be any datatype created yet.
-- We just use SqlString, as the data will be serialized to JSON.
final
| isJust (mEmbedded (fieldType field)) = SqlString'
| isReference = SqlInt64'
| otherwise =
case fieldType field of
-- In the case of lists, we always serialize to a string
-- value (via JSON).
--
-- Normally, this would be determined automatically by
-- SqlTypeExp. However, there's one corner case: if there's
-- a list of entity IDs, the datatype for the ID has not
-- yet been created, so the compiler will fail. This extra
-- clause works around this limitation.
FTList _ -> SqlString'
_ -> SqlTypeExp st
mEmbedded (FTTypeCon Just{} _) = Nothing
mEmbedded (FTTypeCon Nothing n) = let name = HaskellName n in
find ((name ==) . entityHaskell) allEntities
mEmbedded (FTList x) = mEmbedded x
mEmbedded (FTApp x y) = maybe (mEmbedded y) Just (mEmbedded x)
isReference =
case stripId $ fieldType field of
Just{} -> True
Nothing -> False
typ = ftToType $ fieldType field
mtyp = (ConT ''Maybe `AppT` typ)
typedNothing = SigE (ConE 'Nothing) mtyp
st = VarE 'sqlType `AppE` typedNothing
data DelayedSqlTypeExp = DSTE { unDSTE :: SqlTypeExp }
instance Lift DelayedSqlTypeExp where
lift (DSTE SqlString') = return $ ConE 'SqlString'
lift (DSTE SqlInt64') = return $ ConE 'SqlInt64'
lift (DSTE (SqlTypeExp e)) = liftA2 AppE (return $ ConE 'SqlTypeExp) (lift e)
data SqlTypeExp = SqlTypeExp Exp
| SqlString'
| SqlInt64'
instance Lift SqlTypeExp where
lift (SqlTypeExp e) = return e
lift SqlString' = [|SqlString|]
lift SqlInt64' = [|SqlInt64|]
-- | Create data types and appropriate 'PersistEntity' instances for the given
-- 'EntityDef's. Works well with the persist quasi-quoter.
mkPersist :: MkPersistSettings -> [EntityDef SqlTypeExp] -> Q [Dec]
mkPersist mps ents' = do
x <- fmap mconcat $ mapM (persistFieldFromEntity mps) ents
y <- fmap mconcat $ mapM (mkEntity mps) ents
z <- fmap mconcat $ mapM (mkJSON mps) ents
return $ mconcat [x, y, z]
where
ents = map fixEntityDef ents'
-- | Implement special preprocessing on EntityDef as necessary for 'mkPersist'.
-- For example, strip out any fields marked as MigrationOnly.
fixEntityDef :: EntityDef a -> EntityDef a
fixEntityDef ed =
ed { entityFields = filter keepField $ entityFields ed }
where
keepField fd = "MigrationOnly" `notElem` fieldAttrs fd &&
"SafeToRemove" `notElem` fieldAttrs fd
-- | Settings to be passed to the 'mkPersist' function.
data MkPersistSettings = MkPersistSettings
{ mpsBackend :: Type
-- ^ Which database backend we\'re using.
--
-- When generating data types, each type is given a generic version- which
-- works with any backend- and a type synonym for the commonly used
-- backend. This is where you specify that commonly used backend.
, mpsGeneric :: Bool
-- ^ Create generic types that can be used with multiple backends. Good for
-- reusable code, but makes error messages harder to understand. Default:
-- True.
, mpsPrefixFields :: Bool
-- ^ Prefix field names with the model name. Default: True.
}
-- | Create an @MkPersistSettings@ with default values.
mkPersistSettings :: Type -- ^ Value for 'mpsBackend'
-> MkPersistSettings
mkPersistSettings t = MkPersistSettings
{ mpsBackend = t
, mpsGeneric = True -- FIXME switch default to False in the future
, mpsPrefixFields = True
}
-- | Use the 'SqlPersist' backend.
sqlSettings :: MkPersistSettings
sqlSettings = mkPersistSettings $ ConT ''SqlBackend
-- | Same as 'sqlSettings', but set 'mpsGeneric' to @False@.
--
-- Since 1.1.1
sqlOnlySettings :: MkPersistSettings
sqlOnlySettings = sqlSettings { mpsGeneric = False }
recName :: MkPersistSettings -> Text -> Text -> Text
recName mps dt f
| mpsPrefixFields mps = lowerFirst dt ++ upperFirst f
| otherwise = lowerFirst f
lowerFirst :: Text -> Text
lowerFirst t =
case uncons t of
Just (a, b) -> cons (toLower a) b
Nothing -> t
upperFirst :: Text -> Text
upperFirst t =
case uncons t of
Just (a, b) -> cons (toUpper a) b
Nothing -> t
dataTypeDec :: MkPersistSettings -> EntityDef a -> Dec
dataTypeDec mps t =
DataD [] nameFinal paramsFinal constrs
$ map (mkName . unpack) $ entityDerives t
where
mkCol x FieldDef {..} =
(mkName $ unpack $ recName mps x $ unHaskellName fieldHaskell,
if fieldStrict then IsStrict else NotStrict,
pairToType mps backend (fieldType, nullable fieldAttrs)
)
(nameFinal, paramsFinal)
| mpsGeneric mps = (nameG, [PlainTV backend])
| otherwise = (name, [])
nameG = mkName $ unpack $ unHaskellName (entityHaskell t) ++ "Generic"
name = mkName $ unpack $ unHaskellName $ entityHaskell t
cols = map (mkCol $ unHaskellName $ entityHaskell t) $ entityFields t
backend = mkName "backend"
constrs
| entitySum t = map sumCon $ entityFields t
| otherwise = [RecC name cols]
sumCon fd = NormalC
(sumConstrName mps t fd)
[(NotStrict, pairToType mps backend (fieldType fd, NotNullable))]
sumConstrName :: MkPersistSettings -> EntityDef a -> FieldDef b -> Name
sumConstrName mps t FieldDef {..} = mkName $ unpack $ concat
[ if mpsPrefixFields mps
then unHaskellName $ entityHaskell t
else ""
, upperFirst $ unHaskellName fieldHaskell
, "Sum"
]
readMay :: Read a => String -> Maybe a
readMay s =
case reads s of
(x, _):_ -> Just x
[] -> Nothing
entityUpdates :: EntityDef a -> [(HaskellName, FieldType, IsNullable, PersistUpdate)]
entityUpdates =
concatMap go . entityFields
where
go FieldDef {..} = map (\a -> (fieldHaskell, fieldType, nullable fieldAttrs, a)) [minBound..maxBound]
uniqueTypeDec :: MkPersistSettings -> EntityDef a -> Dec
uniqueTypeDec mps t =
DataInstD [] ''Unique
[genericDataType mps (unHaskellName $ entityHaskell t) $ VarT backend]
(map (mkUnique mps backend t) $ entityUniques t)
[]
where
backend = mkName "backend"
mkUnique :: MkPersistSettings -> Name -> EntityDef a -> UniqueDef -> Con
mkUnique mps backend t (UniqueDef (HaskellName constr) _ fields attrs) =
NormalC (mkName $ unpack constr) types
where
types = map (go . flip lookup3 (entityFields t))
$ map (unHaskellName . fst) fields
force = "!force" `elem` attrs
go :: (FieldType, IsNullable) -> (Strict, Type)
go (_, Nullable _) | not force = error nullErrMsg
go (ft, y) = (NotStrict, pairToType mps backend (ft, y))
lookup3 :: Text -> [FieldDef a] -> (FieldType, IsNullable)
lookup3 s [] =
error $ unpack $ "Column not found: " ++ s ++ " in unique " ++ constr
lookup3 x (FieldDef {..}:rest)
| x == unHaskellName fieldHaskell = (fieldType, nullable fieldAttrs)
| otherwise = lookup3 x rest
nullErrMsg =
mconcat [ "Error: By default we disallow NULLables in an uniqueness "
, "constraint. The semantics of how NULL interacts with those "
, "constraints is non-trivial: two NULL values are not "
, "considered equal for the purposes of an uniqueness "
, "constraint. If you understand this feature, it is possible "
, "to use it your advantage. *** Use a \"!force\" attribute "
, "on the end of the line that defines your uniqueness "
, "constraint in order to disable this check. ***" ]
pairToType :: MkPersistSettings
-> Name -- ^ backend
-> (FieldType, IsNullable)
-> Type
pairToType mps backend (s, Nullable ByMaybeAttr) =
ConT ''Maybe `AppT` idType mps backend s
pairToType mps backend (s, _) = idType mps backend s
backendDataType :: MkPersistSettings -> Type
backendDataType mps
| mpsGeneric mps = VarT $ mkName "backend"
| otherwise = mpsBackend mps
genericDataType :: MkPersistSettings
-> Text -- ^ entity name
-> Type -- ^ backend
-> Type
genericDataType mps typ' backend
| mpsGeneric mps = ConT (mkName $ unpack $ typ' ++ "Generic") `AppT` backend
| otherwise = ConT $ mkName $ unpack typ'
idType :: MkPersistSettings -> Name -> FieldType -> Type
idType mps backend typ =
case stripId typ of
Just typ' ->
ConT ''KeyBackend
`AppT` backend'
`AppT` genericDataType mps typ' (VarT backend)
Nothing -> ftToType typ
where
backend'
| mpsGeneric mps = VarT backend
| otherwise = mpsBackend mps
degen :: [Clause] -> [Clause]
degen [] =
let err = VarE 'error `AppE` LitE (StringL
"Degenerate case, should never happen")
in [Clause [WildP] (NormalB err) []]
degen x = x
mkToPersistFields :: MkPersistSettings -> String -> EntityDef a -> Q Dec
mkToPersistFields mps constr ed@EntityDef { entitySum = isSum, entityFields = fields } = do
clauses <-
if isSum
then sequence $ zipWith goSum fields [1..]
else fmap return go
return $ FunD 'toPersistFields clauses
where
go :: Q Clause
go = do
xs <- sequence $ replicate fieldCount $ newName "x"
let pat = ConP (mkName constr) $ map VarP xs
sp <- [|SomePersistField|]
let bod = ListE $ map (AppE sp . VarE) xs
return $ Clause [pat] (NormalB bod) []
fieldCount = length fields
goSum :: FieldDef a -> Int -> Q Clause
goSum fd idx = do
let name = sumConstrName mps ed fd
enull <- [|SomePersistField PersistNull|]
let beforeCount = idx - 1
afterCount = fieldCount - idx
before = replicate beforeCount enull
after = replicate afterCount enull
x <- newName "x"
sp <- [|SomePersistField|]
let body = NormalB $ ListE $ mconcat
[ before
, [sp `AppE` VarE x]
, after
]
return $ Clause [ConP name [VarP x]] body []
mkToFieldNames :: [UniqueDef] -> Q Dec
mkToFieldNames pairs = do
pairs' <- mapM go pairs
return $ FunD 'persistUniqueToFieldNames $ degen pairs'
where
go (UniqueDef constr _ names _) = do
names' <- lift names
return $
Clause
[RecP (mkName $ unpack $ unHaskellName constr) []]
(NormalB names')
[]
mkToUpdate :: String -> [(String, PersistUpdate)] -> Q Dec
mkToUpdate name pairs = do
pairs' <- mapM go pairs
return $ FunD (mkName name) $ degen pairs'
where
go (constr, pu) = do
pu' <- lift pu
return $ Clause [RecP (mkName constr) []] (NormalB pu') []
mkUniqueToValues :: [UniqueDef] -> Q Dec
mkUniqueToValues pairs = do
pairs' <- mapM go pairs
return $ FunD 'persistUniqueToValues $ degen pairs'
where
go :: UniqueDef -> Q Clause
go (UniqueDef constr _ names _) = do
xs <- mapM (const $ newName "x") names
let pat = ConP (mkName $ unpack $ unHaskellName constr) $ map VarP xs
tpv <- [|toPersistValue|]
let bod = ListE $ map (AppE tpv . VarE) xs
return $ Clause [pat] (NormalB bod) []
mkToFieldName :: String -> [(String, String)] -> Dec
mkToFieldName func pairs =
FunD (mkName func) $ degen $ map go pairs
where
go (constr, name) =
Clause [RecP (mkName constr) []] (NormalB $ LitE $ StringL name) []
mkToValue :: String -> [String] -> Dec
mkToValue func = FunD (mkName func) . degen . map go
where
go constr =
let x = mkName "x"
in Clause [ConP (mkName constr) [VarP x]]
(NormalB $ VarE 'toPersistValue `AppE` VarE x)
[]
isNotNull :: PersistValue -> Bool
isNotNull PersistNull = False
isNotNull _ = True
mkFromPersistValues :: MkPersistSettings -> EntityDef a -> Q [Clause]
mkFromPersistValues mps t@(EntityDef { entitySum = False }) = do
nothing <- [|Left $(liftT $ "Invalid fromPersistValues input. Entity: " `mappend` entName)|]
let cons' = ConE $ mkName $ unpack $ entName
xs <- mapM (const $ newName "x") $ entityFields t
mkPersistValues <- mapM (mkPersistValue . unHaskellName . fieldHaskell) $ entityFields t
let xs' = map (\(pv, x) -> pv `AppE` VarE x) $ zip mkPersistValues xs
let pat = ListP $ map VarP xs
ap' <- [|(<*>)|]
just <- [|Right|]
let cons'' = just `AppE` cons'
return
[ Clause [pat] (NormalB $ foldl (go ap') cons'' xs') []
, Clause [WildP] (NormalB nothing) []
]
where
mkPersistValue fieldName = [|\persistValue ->
case fromPersistValue persistValue of
Right r -> Right r
Left err -> Left $
"field " `mappend` $(liftT fieldName) `mappend` ": " `mappend` err
|]
entName = unHaskellName $ entityHaskell t
go ap' x y = InfixE (Just x) ap' (Just y)
mkFromPersistValues mps t@(EntityDef { entitySum = True }) = do
nothing <- [|Left $(liftT $ "Invalid fromPersistValues input: sum type with all nulls. Entity: " `mappend` entName)|]
clauses <- mkClauses [] $ entityFields t
return $ clauses `mappend` [Clause [WildP] (NormalB nothing) []]
where
entName = unHaskellName $ entityHaskell t
mkClauses _ [] = return []
mkClauses before (field:after) = do
x <- newName "x"
let null' = ConP 'PersistNull []
pat = ListP $ mconcat
[ map (const null') before
, [VarP x]
, map (const null') after
]
constr = ConE $ sumConstrName mps t field
fmap' <- [|fmap|]
fs <- [|fromPersistValue $(return $ VarE x)|]
let guard' = NormalG $ VarE 'isNotNull `AppE` VarE x
let clause = Clause [pat] (GuardedB [(guard', InfixE (Just constr) fmap' (Just fs))]) []
clauses <- mkClauses (field : before) after
return $ clause : clauses
type Lens s t a b = forall f. Functor f => (a -> f b) -> s -> f t
lens :: (s -> a) -> (s -> b -> t) -> Lens s t a b
lens sa sbt afb s = fmap (sbt s) (afb $ sa s)
mkLensClauses :: MkPersistSettings -> EntityDef a -> Q [Clause]
mkLensClauses mps t = do
lens' <- [|lens|]
getId <- [|entityKey|]
setId <- [|\(Entity _ value) key -> Entity key value|]
getVal <- [|entityVal|]
dot <- [|(.)|]
keyName <- newName "key"
valName <- newName "value"
xName <- newName "x"
let idClause = Clause
[ConP (mkName $ unpack $ unHaskellName (entityHaskell t) ++ "Id") []]
(NormalB $ lens' `AppE` getId `AppE` setId)
[]
if entitySum t
then return $ idClause : map (toSumClause lens' keyName valName xName) (entityFields t)
else return $ idClause : map (toClause lens' getVal dot keyName valName xName) (entityFields t)
where
toClause lens' getVal dot keyName valName xName f = Clause
[ConP (filterConName mps t f) []]
(NormalB $ lens' `AppE` getter `AppE` setter)
[]
where
fieldName = mkName $ unpack $ recName mps (unHaskellName $ entityHaskell t) (unHaskellName $ fieldHaskell f)
getter = InfixE (Just $ VarE fieldName) dot (Just getVal)
setter = LamE
[ ConP 'Entity [VarP keyName, VarP valName]
, VarP xName
]
$ ConE 'Entity `AppE` VarE keyName `AppE` RecUpdE
(VarE valName)
[(fieldName, VarE xName)]
toSumClause lens' keyName valName xName f = Clause
[ConP (filterConName mps t f) []]
(NormalB $ lens' `AppE` getter `AppE` setter)
[]
where
emptyMatch = Match WildP (NormalB $ VarE 'error `AppE` LitE (StringL "Tried to use fieldLens on a Sum type")) []
getter = LamE
[ ConP 'Entity [WildP, VarP valName]
] $ CaseE (VarE valName)
$ Match (ConP (sumConstrName mps t f) [VarP xName]) (NormalB $ VarE xName) []
-- FIXME It would be nice if the types expressed that the Field is
-- a sum type and therefore could result in Maybe.
: if length (entityFields t) > 1 then [emptyMatch] else []
setter = LamE
[ ConP 'Entity [VarP keyName, WildP]
, VarP xName
]
$ ConE 'Entity `AppE` VarE keyName `AppE` (ConE (sumConstrName mps t f) `AppE` VarE xName)
mkEntity :: MkPersistSettings -> EntityDef SqlTypeExp -> Q [Dec]
mkEntity mps t = do
t' <- lift t
let nameT = unHaskellName $ entityHaskell t
let nameS = unpack nameT
let clazz = ConT ''PersistEntity `AppT` genericDataType mps (unHaskellName $ entityHaskell t) (VarT $ mkName "backend")
tpf <- mkToPersistFields mps nameS t
fpv <- mkFromPersistValues mps t
utv <- mkUniqueToValues $ entityUniques t
puk <- mkUniqueKeys t
fields <- mapM (mkField mps t) $ FieldDef
{ fieldHaskell = HaskellName "Id"
, fieldDB = entityID t
, fieldType = FTTypeCon Nothing $ unHaskellName (entityHaskell t) ++ "Id"
, fieldSqlType = SqlInt64'
, fieldEmbedded = Nothing
, fieldAttrs = []
, fieldStrict = True
}
: entityFields t
toFieldNames <- mkToFieldNames $ entityUniques t
let addSyn -- FIXME maybe remove this
| mpsGeneric mps = (:) $
TySynD (mkName nameS) [] $
genericDataType mps nameT $ mpsBackend mps
| otherwise = id
lensClauses <- mkLensClauses mps t
return $ addSyn
[ dataTypeDec mps t
, TySynD (mkName $ unpack $ unHaskellName (entityHaskell t) ++ "Id") [] $
ConT ''KeyBackend `AppT` mpsBackend mps `AppT` ConT (mkName nameS)
, InstanceD [] clazz $
[ uniqueTypeDec mps t
, FunD 'entityDef [Clause [WildP] (NormalB t') []]
, tpf
, FunD 'fromPersistValues fpv
, toFieldNames
, utv
, puk
, DataInstD
[]
''EntityField
[ genericDataType mps nameT $ VarT $ mkName "backend"
, VarT $ mkName "typ"
]
(map fst fields)
[]
, FunD 'persistFieldDef (map snd fields)
, TySynInstD
''PersistEntityBackend
#if MIN_VERSION_template_haskell(2,9,0)
(TySynEqn
[genericDataType mps (unHaskellName $ entityHaskell t) $ VarT $ mkName "backend"]
(backendDataType mps))
#else
[genericDataType mps (unHaskellName $ entityHaskell t) $ VarT $ mkName "backend"]
(backendDataType mps)
#endif
, FunD 'persistIdField [Clause [] (NormalB $ ConE $ mkName $ unpack $ unHaskellName (entityHaskell t) ++ "Id") []]
, FunD 'fieldLens lensClauses
]
]
-- | produce code similar to the following:
--
-- instance PersistEntity e => PersistField e where
-- toPersistValue = PersistMap $ zip columNames (map toPersistValue . toPersistFields)
-- fromPersistValue (PersistMap o) = fromPersistValues $ map (\(_,v) ->
-- casefromPersistValue v of
-- Left e -> error e
-- Right r -> r) o
-- fromPersistValue x = Left $ "Expected PersistMap, received: " ++ show x
-- sqlType _ = SqlString
persistFieldFromEntity :: MkPersistSettings -> EntityDef a -> Q [Dec]
persistFieldFromEntity mps e = do
ss <- [|SqlString|]
let columnNames = map (unpack . unHaskellName . fieldHaskell) (entityFields e)
obj <- [|\ent -> PersistMap $ zip (map pack columnNames) (map toPersistValue $ toPersistFields ent)|]
fpv <- [|\x -> fromPersistValues $ map (\(_,v) -> case fromPersistValue v of
Left e' -> error $ unpack e'
Right r -> r) x|]
let typ = genericDataType mps (pack entityName) $ VarT $ mkName "backend"
compose <- [|(<=<)|]
getPersistMap' <- [|getPersistMap|]
return
[ persistFieldInstanceD typ
[ FunD 'toPersistValue [ Clause [] (NormalB obj) [] ]
, FunD 'fromPersistValue
[ Clause [] (NormalB $ InfixE (Just fpv) compose $ Just getPersistMap') []
]
]
, persistFieldSqlInstanceD typ
[ sqlTypeFunD ss
]
]
where
entityName = (unpack $ unHaskellName $ entityHaskell e)
-- | Apply the given list of functions to the same @EntityDef@s.
--
-- This function is useful for cases such as:
--
-- >>> share [mkSave "myDefs", mkPersist sqlSettings] [persistLowerCase|...|]
share :: [[EntityDef a] -> Q [Dec]] -> [EntityDef a] -> Q [Dec]
share fs x = fmap mconcat $ mapM ($ x) fs
-- | Save the @EntityDef@s passed in under the given name.
mkSave :: String -> [EntityDef SqlType] -> Q [Dec]
mkSave name' defs' = do
let name = mkName name'
defs <- lift defs'
return [ SigD name $ ListT `AppT` (ConT ''EntityDef `AppT` ConT ''SqlType)
, FunD name [Clause [] (NormalB defs) []]
]
data Dep = Dep
{ depTarget :: Text
, depSourceTable :: HaskellName
, depSourceField :: HaskellName
, depSourceNull :: IsNullable
}
-- | Generate a 'DeleteCascade' instance for the given @EntityDef@s.
mkDeleteCascade :: MkPersistSettings -> [EntityDef a] -> Q [Dec]
mkDeleteCascade mps defs = do
let deps = concatMap getDeps defs
mapM (go deps) defs
where
getDeps :: EntityDef a -> [Dep]
getDeps def =
concatMap getDeps' $ entityFields $ fixEntityDef def
where
getDeps' :: FieldDef a -> [Dep]
getDeps' FieldDef {..} =
case stripId fieldType of
Just f ->
return Dep
{ depTarget = f
, depSourceTable = entityHaskell def
, depSourceField = fieldHaskell
, depSourceNull = nullable fieldAttrs
}
Nothing -> []
go :: [Dep] -> EntityDef a -> Q Dec
go allDeps EntityDef{entityHaskell = name} = do
let deps = filter (\x -> depTarget x == unHaskellName name) allDeps
key <- newName "key"
let del = VarE 'delete
let dcw = VarE 'deleteCascadeWhere
just <- [|Just|]
filt <- [|Filter|]
eq <- [|Eq|]
left <- [|Left|]
let mkStmt :: Dep -> Stmt
mkStmt dep = NoBindS
$ dcw `AppE`
ListE
[ filt `AppE` ConE filtName
`AppE` (left `AppE` val (depSourceNull dep))
`AppE` eq
]
where
filtName = filterConName' mps (depSourceTable dep) (depSourceField dep)
val (Nullable ByMaybeAttr) = just `AppE` VarE key
val _ = VarE key
let stmts :: [Stmt]
stmts = map mkStmt deps `mappend`
[NoBindS $ del `AppE` VarE key]
let entityT = genericDataType mps (unHaskellName name) $ VarT $ mkName "backend"
return $
InstanceD
[ ClassP ''PersistQuery [VarT $ mkName "m"]
, EqualP (ConT ''PersistEntityBackend `AppT` entityT) (ConT ''PersistMonadBackend `AppT` VarT (mkName "m"))
]
(ConT ''DeleteCascade `AppT` entityT `AppT` VarT (mkName "m"))
[ FunD 'deleteCascade
[Clause [VarP key] (NormalB $ DoE stmts) []]
]
mkUniqueKeys :: EntityDef a -> Q Dec
mkUniqueKeys def | entitySum def =
return $ FunD 'persistUniqueKeys [Clause [WildP] (NormalB $ ListE []) []]
mkUniqueKeys def = do
c <- clause
return $ FunD 'persistUniqueKeys [c]
where
clause = do
xs <- forM (entityFields def) $ \fd -> do
let x = fieldHaskell fd
x' <- newName $ '_' : unpack (unHaskellName x)
return (x, x')
let pcs = map (go xs) $ entityUniques def
let pat = ConP
(mkName $ unpack $ unHaskellName $ entityHaskell def)
(map (VarP . snd) xs)
return $ Clause [pat] (NormalB $ ListE pcs) []
go :: [(HaskellName, Name)] -> UniqueDef -> Exp
go xs (UniqueDef name _ cols _) =
foldl' (go' xs) (ConE (mkName $ unpack $ unHaskellName name)) (map fst cols)
go' :: [(HaskellName, Name)] -> Exp -> HaskellName -> Exp
go' xs front col =
let Just col' = lookup col xs
in front `AppE` VarE col'
sqlTypeFunD :: Exp -> Dec
sqlTypeFunD st = FunD 'sqlType
[ Clause [WildP] (NormalB st) [] ]
persistFieldInstanceD :: Type -> [Dec] -> Dec
persistFieldInstanceD typ =
InstanceD [] (ConT ''PersistField `AppT` typ)
persistFieldSqlInstanceD :: Type -> [Dec] -> Dec
persistFieldSqlInstanceD typ =
InstanceD [] (ConT ''PersistFieldSql `AppT` typ)
-- | Automatically creates a valid 'PersistField' instance for any datatype
-- that has valid 'Show' and 'Read' instances. Can be very convenient for
-- 'Enum' types.
derivePersistField :: String -> Q [Dec]
derivePersistField s = do
ss <- [|SqlString|]
tpv <- [|PersistText . pack . show|]
fpv <- [|\dt v ->
case fromPersistValue v of
Left e -> Left e
Right s' ->
case reads $ unpack s' of
(x, _):_ -> Right x
[] -> Left $ pack "Invalid " ++ pack dt ++ pack ": " ++ s'|]
return
[ persistFieldInstanceD (ConT $ mkName s)
[ FunD 'toPersistValue
[ Clause [] (NormalB tpv) []
]
, FunD 'fromPersistValue
[ Clause [] (NormalB $ fpv `AppE` LitE (StringL s)) []
]
]
, persistFieldSqlInstanceD (ConT $ mkName s)
[ sqlTypeFunD ss
]
]
-- | Creates a single function to perform all migrations for the entities
-- defined here. One thing to be aware of is dependencies: if you have entities
-- with foreign references, make sure to place those definitions after the
-- entities they reference.
mkMigrate :: Lift' a => String -> [EntityDef a] -> Q [Dec]
mkMigrate fun allDefs = do
body' <- body
return
[ SigD (mkName fun) typ
, FunD (mkName fun) [Clause [] (NormalB body') []]
]
where
defs = filter isMigrated allDefs
isMigrated def = not $ "no-migrate" `elem` entityAttrs def
typ = ForallT [PlainTV $ mkName "m"]
[ ClassP ''MonadBaseControl [ConT ''IO, VarT $ mkName "m"]
, ClassP ''MonadIO [VarT $ mkName "m"]
, ClassP ''MonadLogger [VarT $ mkName "m"]
]
$ ConT ''Migration `AppT` (ConT ''SqlPersistT `AppT` VarT (mkName "m"))
body :: Q Exp
body =
case defs of
[] -> [|return ()|]
_ -> do
defsName <- newName "defs"
defsStmt <- do
defs' <- mapM lift defs
let defsExp = ListE defs'
return $ LetS [ValD (VarP defsName) (NormalB defsExp) []]
stmts <- mapM (toStmt $ VarE defsName) defs
return (DoE $ defsStmt : stmts)
toStmt :: Lift' a => Exp -> EntityDef a -> Q Stmt
toStmt defsExp ed = do
u <- lift ed
m <- [|migrate|]
return $ NoBindS $ m `AppE` defsExp `AppE` u
instance Lift' a => Lift (EntityDef a) where
lift (EntityDef a b c d e f g h i) =
[|EntityDef
$(lift a)
$(lift b)
$(lift c)
$(liftTs d)
$(lift e)
$(lift f)
$(liftTs g)
$(liftMap h)
$(lift i)
|]
instance Lift' a => Lift (FieldDef a) where
lift (FieldDef a b c d e f g) = [|FieldDef a b c $(lift' d) $(liftTs e) f $(lift' g)|]
instance Lift UniqueDef where
lift (UniqueDef a b c d) = [|UniqueDef $(lift a) $(lift b) $(lift c) $(liftTs d)|]
-- | A hack to avoid orphans.
class Lift' a where
lift' :: a -> Q Exp
instance Lift' SqlType where
lift' = lift
instance Lift' a => Lift' (Maybe a) where
lift' Nothing = [|Nothing|]
lift' (Just a) = [|Just $(lift' a)|]
instance Lift' a => Lift' (EntityDef a) where
lift' = lift
instance Lift' () where
lift' () = [|()|]
instance Lift' SqlTypeExp where
lift' = lift
instance Lift' DelayedSqlTypeExp where
lift' = lift
pack' :: String -> Text
pack' = pack
#if !MIN_VERSION_text(0, 11, 2)
{-# NOINLINE pack' #-}
#endif
liftT :: Text -> Q Exp
liftT t = [|pack' $(lift (unpack t))|]
liftTs :: [Text] -> Q Exp
liftTs = fmap ListE . mapM liftT
liftTss :: [[Text]] -> Q Exp
liftTss = fmap ListE . mapM liftTs
liftMap :: M.Map Text [[Text]] -> Q Exp
liftMap m = [|M.fromList $(fmap ListE $ mapM liftPair $ M.toList m)|]
liftPair :: (Text, [[Text]]) -> Q Exp
liftPair (t, ts) = [|($(liftT t), $(liftTss ts))|]
instance Lift HaskellName where
lift (HaskellName t) = [|HaskellName $(liftT t)|]
instance Lift DBName where
lift (DBName t) = [|DBName $(liftT t)|]
instance Lift FieldType where
lift (FTTypeCon Nothing t) = [|FTTypeCon Nothing $(liftT t)|]
lift (FTTypeCon (Just x) t) = [|FTTypeCon (Just $(liftT x)) $(liftT t)|]
lift (FTApp x y) = [|FTApp $(lift x) $(lift y)|]
lift (FTList x) = [|FTList $(lift x)|]
instance Lift PersistFilter where
lift Eq = [|Eq|]
lift Ne = [|Ne|]
lift Gt = [|Gt|]
lift Lt = [|Lt|]
lift Ge = [|Ge|]
lift Le = [|Le|]
lift In = [|In|]
lift NotIn = [|NotIn|]
lift (BackendSpecificFilter x) = [|BackendSpecificFilter $(liftT x)|]
instance Lift PersistUpdate where
lift Assign = [|Assign|]
lift Add = [|Add|]
lift Subtract = [|Subtract|]
lift Multiply = [|Multiply|]
lift Divide = [|Divide|]
instance Lift SqlType where
lift SqlString = [|SqlString|]
lift SqlInt32 = [|SqlInt32|]
lift SqlInt64 = [|SqlInt64|]
lift SqlReal = [|SqlReal|]
lift (SqlNumeric x y) =
[|SqlNumeric (fromInteger x') (fromInteger y')|]
where
x' = fromIntegral x
y' = fromIntegral y
lift SqlBool = [|SqlBool|]
lift SqlDay = [|SqlDay|]
lift SqlTime = [|SqlTime|]
lift SqlDayTime = [|SqlDayTime|]
lift SqlDayTimeZoned = [|SqlDayTimeZoned|]
lift SqlBlob = [|SqlBlob|]
lift (SqlOther a) = [|SqlOther $(liftT a)|]
-- Ent
-- fieldName FieldType
--
-- forall . typ ~ FieldType => EntFieldName
--
-- EntFieldName = FieldDef ....
mkField :: MkPersistSettings -> EntityDef a -> FieldDef SqlTypeExp -> Q (Con, Clause)
mkField mps et cd = do
let con = ForallC
[]
[EqualP (VarT $ mkName "typ") maybeTyp]
$ NormalC name []
bod <- lift cd
let cla = Clause
[ConP name []]
(NormalB bod)
[]
return (con, cla)
where
name = filterConName mps et cd
maybeTyp =
if nullable (fieldAttrs cd) == Nullable ByMaybeAttr
then ConT ''Maybe `AppT` typ
else typ
typ =
case stripId $ fieldType cd of
Just ft ->
ConT ''KeyBackend
`AppT` (if mpsGeneric mps
then VarT $ mkName "backend"
else mpsBackend mps)
`AppT` genericDataType mps ft (VarT $ mkName "backend")
Nothing -> ftToType $ fieldType cd
filterConName :: MkPersistSettings
-> EntityDef sqlType1
-> FieldDef sqlType2
-> Name
filterConName mps entity field = filterConName' mps (entityHaskell entity) (fieldHaskell field)
filterConName' :: MkPersistSettings
-> HaskellName -- ^ table
-> HaskellName -- ^ field
-> Name
filterConName' mps entity field = mkName $ unpack $ concat
[ if mpsPrefixFields mps || field == HaskellName "Id"
then unHaskellName entity
else ""
, upperFirst $ unHaskellName field
]
ftToType :: FieldType -> Type
ftToType (FTTypeCon Nothing t) = ConT $ mkName $ unpack t
ftToType (FTTypeCon (Just m) t) = ConT $ mkName $ unpack $ concat [m, ".", t]
ftToType (FTApp x y) = ftToType x `AppT` ftToType y
ftToType (FTList x) = ListT `AppT` ftToType x
infixr 5 ++
(++) :: Text -> Text -> Text
(++) = append
mkJSON :: MkPersistSettings -> EntityDef a -> Q [Dec]
mkJSON _ def | not ("json" `elem` entityAttrs def) = return []
mkJSON mps def = do
pureE <- [|pure|]
apE' <- [|(<*>)|]
packE <- [|pack|]
dotEqualE <- [|(.=)|]
dotColonE <- [|(.:)|]
dotColonQE <- [|(.:?)|]
objectE <- [|object|]
obj <- newName "obj"
mzeroE <- [|mzero|]
xs <- mapM (newName . unpack . unHaskellName . fieldHaskell)
$ entityFields def
let conName = mkName $ unpack $ unHaskellName $ entityHaskell def
typ = genericDataType mps (unHaskellName $ entityHaskell def) $ VarT $ mkName "backend"
toJSONI = InstanceD
[]
(ConT ''ToJSON `AppT` typ)
[toJSON']
toJSON' = FunD 'toJSON $ return $ Clause
[ConP conName $ map VarP xs]
(NormalB $ objectE `AppE` ListE pairs)
[]
pairs = zipWith toPair (entityFields def) xs
toPair f x = InfixE
(Just (packE `AppE` LitE (StringL $ unpack $ unHaskellName $ fieldHaskell f)))
dotEqualE
(Just $ VarE x)
fromJSONI = InstanceD
[]
(ConT ''FromJSON `AppT` typ)
[parseJSON']
parseJSON' = FunD 'parseJSON
[ Clause [ConP 'Object [VarP obj]]
(NormalB $ foldl'
(\x y -> InfixE (Just x) apE' (Just y))
(pureE `AppE` ConE conName)
pulls
)
[]
, Clause [WildP] (NormalB mzeroE) []
]
pulls = map toPull $ entityFields def
toPull f = InfixE
(Just $ VarE obj)
(if nullable (fieldAttrs f) == Nullable ByMaybeAttr then dotColonQE else dotColonE)
(Just $ AppE packE $ LitE $ StringL $ unpack $ unHaskellName $ fieldHaskell f)
return [toJSONI, fromJSONI]
| gbwey/persistentold | persistent-template/Database/Persist/TH.hs | mit | 38,009 | 57 | 27 | 11,661 | 11,233 | 5,838 | 5,395 | 805 | 6 |
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE BangPatterns #-}
-- | Helper functions for SmallArray#
--
-- This module exposes _unsafe_ functions to work with SmallArrays. That means
-- that specifically neither index bounds nor element types are checked So this
-- functionality should only be used in a context that enforces them by some
-- other means, e.g. ARec's type index
module Data.Vinyl.ARec.Internal.SmallArray where
import GHC.Prim
import GHC.Types
import Unsafe.Coerce
import GHC.ST
data SmallArray = SmallArray !(SmallArray# Any)
data SmallMutableArray s = SmallMutableArray !(SmallMutableArray# s Any)
indexSmallArray :: SmallArray -> Int -> a
indexSmallArray (SmallArray arr) (I# ix) =
case indexSmallArray# arr ix of
(# v #) -> unsafeCoerce v
{-# INLINE indexSmallArray #-}
withNewSmallArray :: Int -> (SmallMutableArray s -> ST s ()) -> ST s SmallArray
withNewSmallArray (I# len#) f =
ST $ \s0 -> case newSmallArray# len# (error "withNewSmallArray exploded") s0 of
(# s1, mArr #) ->
case f (SmallMutableArray mArr) of
ST st -> case st s1 of
(# s2, () #) -> case unsafeFreezeSmallArray# mArr s2 of
(# s3, ar #) -> (# s3, SmallArray ar #)
{-# INLINE withNewSmallArray #-}
writeSmallArray :: SmallMutableArray s -> Int -> a -> ST s ()
writeSmallArray (SmallMutableArray mArr) (I# n#) x = ST $ \s ->
case writeSmallArray# mArr n# (unsafeCoerce x) s of
s' -> (# s', () #)
{-# INLINE writeSmallArray #-}
withThawedSmallArray :: SmallArray
-> (SmallMutableArray s -> ST s ())
-> ST s SmallArray
withThawedSmallArray (SmallArray arr) f = ST $ \s0 ->
let !(I# z#) = 0
in case thawSmallArray# arr z# (sizeofSmallArray# arr) s0 of
(# s1, mArr #) ->
case f (SmallMutableArray mArr) of
ST st -> case st s1 of
(# s2, () #) -> case unsafeFreezeSmallArray# mArr s2 of
(# s3, ar #) -> (# s3, SmallArray ar #)
{-# INLINE withThawedSmallArray #-}
| VinylRecords/Vinyl | Data/Vinyl/ARec/Internal/SmallArray.hs | mit | 2,067 | 0 | 22 | 481 | 562 | 289 | 273 | 46 | 1 |
module Gt.Core
(
do_trans
, do_trans_each_word
)
where
import Gt.Net
import Gt.Langs
import Gt.Helpers
import Gt.Translation
import Text.JSON
do_trans :: Lang -> Lang -> String -> IO String
do_trans sl tl str =
do
jresp <- get_resp sl tl str
return $ show $ jresp_to_resp jresp
do_trans_each_word :: IO String
do_trans_each_word = error "Gt.Core.do_trans_each_word: unimplemented"
jresp_to_resp :: String -> Resp
jresp_to_resp unparsed_json = foldr fill_resp br obj_list
where obj_list = fromJSObject $ from_ok unparsed_json (decode unparsed_json :: Result (JSObject JSValue))
br = blank_resp
fill_resp :: (String, JSValue) -> Resp -> Resp
fill_resp (vclass, jval) resp =
case vclass of
"sentences" -> resp {sentences = jval_to_sentences jval}
"dict" -> resp {dicts = jval_to_dicts jval}
"src" -> resp {src = jstr_to_str jval}
_ -> resp -- Ignoring any new data
-- Sentences processing
jval_to_sentences :: JSValue -> Sentences
jval_to_sentences (JSArray jval) = Sentences { sent_list = foldr jsso_to_sentence [] jval }
jval_to_sentences _ = error "Gt.Core.jval_to_sentences: not a JSArray"
jsso_to_sentence :: JSValue -> [Sentence] -> [Sentence]
jsso_to_sentence (JSObject jobj) acc =
-- in the truth we need to reverse data,
-- but currently there is only one element,
-- so just leaving as is
foldr tot (Sentence "" "" "") in_list:acc
where in_list = fromJSObject jobj
jsso_to_sentence _ _ = error "Gt.Core.jsso_to_sentence: not a JSObject"
-- Conversion of inner list with trans-orig-translit data
tot :: (String, JSValue) -> Sentence -> Sentence
tot (tclass, jval) s =
case tclass of
"trans" -> s { trans = jstr_to_str jval}
"orig" -> s { orig = jstr_to_str jval}
"translit" -> s { translit = jstr_to_str jval}
_ -> s
-- Dictionaries processing
jval_to_dicts :: JSValue -> Dicts
jval_to_dicts (JSArray jval) = Dicts { dict_list = foldr jsdo_to_dict [] jval }
jval_to_dicts _ = error "Gt.Core.jval_to_dicts: not a JSArray"
jsdo_to_dict :: JSValue -> [Dict] -> [Dict]
jsdo_to_dict (JSObject jobj) acc =
-- in the truth we need to reverse data,
-- but currently there is only one element,
-- so just leaving as is
foldr pt (Dict "" []) in_list:acc
where in_list = fromJSObject jobj
jsdo_to_dict _ _ = error "Gt.Core.jsdo_to_dict: not a JSObject"
-- Conversion of inner list with pos-term data
pt :: (String, JSValue) -> Dict -> Dict
pt (tclass, jval) s =
case tclass of
"pos" -> s { pos = pos_to_str jval }
"terms" -> s { terms = nub_sort $ jlist_to_slist jval}
_ -> s
pos_to_str :: JSValue -> String
pos_to_str jval =
case p of
"" -> "common"
_ -> p
where p = jstr_to_str jval
| styx/gtc | Gt/Core.hs | gpl-3.0 | 2,876 | 0 | 12 | 712 | 767 | 406 | 361 | 62 | 4 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Sara.TestUtils.AstGenUtils () where
import Sara.TestUtils.ArbitraryUtils
import Sara.Ast.Syntax
import Sara.Semantic.Symbolizer
import Sara.Ast.Types
import Sara.Parser.Lexer
import Sara.Ast.Meta
import Sara.Ast.Operators
import Sara.Ast.AstUtils
import Sara.TestUtils.GenT
import Sara.Utils
import qualified Sara.Ast.Syntax as S
import qualified Sara.Ast.Types as T
import qualified Data.Set as S
import qualified Test.QuickCheck as Q
import Control.Monad.State.Strict
import Control.Monad.Reader
import Control.Monad.Writer
import Data.Maybe
import Data.List
import qualified Data.Map.Strict as M
import Sara.TestUtils.AstTestUtils
instance (MonadState s m) => MonadState s (GenT m) where
get = lift get
put = lift . put
state = lift . state
instance (Monoid w, MonadWriter w m) => MonadWriter w (GenT m) where
writer = lift . writer
tell = lift . tell
listen = mapGenT listen
pass = mapGenT pass
instance (MonadReader e m) => MonadReader e (GenT m) where
ask = lift ask
local = mapGenT . local
reader = lift . reader
instance (MonadGen m) => MonadGen (StateT s m) where
liftGen = lift . liftGen
variant = mapStateT . variant
sized f = StateT $ \state -> sized $ \size -> runStateT (f size) state
resize = mapStateT . resize
choose = lift . choose
instance (Monoid w, MonadGen m) => MonadGen (WriterT w m) where
liftGen = lift . liftGen
variant = mapWriterT . variant
sized f = WriterT $ sized $ runWriterT . f
resize = mapWriterT . resize
choose = lift . choose
instance (MonadGen m) => MonadGen (ReaderT e m) where
liftGen = lift . liftGen
variant = mapReaderT . variant
sized f = ReaderT $ \env -> sized $ \size -> runReaderT (f size) env
resize = mapReaderT . resize
choose = lift . choose
-- | Valid initial letters for an identifier.
identifierStarts :: String
identifierStarts = ['a'..'z'] ++ ['A'..'Z'] ++ "_"
-- | Valid intermediate letters for an identifier.
identifierLetters :: String
identifierLetters = identifierStarts ++ ['0'..'9']
arbitraryIdentifierCandidate :: MonadGen g => g Name
arbitraryIdentifierCandidate = do
i0 <- elements identifierStarts
iN <- listOf $ elements identifierLetters
return (i0:iN)
arbitraryIdentifier :: (MonadGen g, MonadState (S.Set Name) g) => g Name
arbitraryIdentifier = do
id <- arbitraryIdentifierCandidate
names <- get
valid <- runReaderT (isNotReserved id) names
when valid (modify $ S.insert id)
if valid then return id else arbitraryIdentifier
-- | Shrinks an identifier given a set of reserved names.
shrinkIdentifier :: MonadReader (S.Set Name) m => Name -> m [Name]
shrinkIdentifier n = filterM isNotReserved $ filter validIdentifier (Q.shrink n)
where validIdentifier [] = False
validIdentifier (x:_) = x `elem` identifierStarts
-- | Tests whether the identifier is not a reserved name.
isNotReserved :: MonadReader (S.Set Name) m => Name -> m Bool
isNotReserved a = do
names <- ask
return $ a /= "main" && a `notElem` reservedNames && a `S.notMember` names
addExpMeta :: Monad g => Type -> g IncompleteExpression -> g TypeCheckerExpression
addExpMeta t gen = gen <*> pure (TypMeta t) <*> pure mkNodeMeta
-- | A signature where pre- and postconditions are missing.
data PartialSignature
= PartialSignature { isPureSig :: Bool
, sigName :: Name
, args :: [TypeCheckerTypedVariable]
, retType :: Type }
deriving (Eq, Ord, Show)
toSignature :: PartialSignature -> [TypeCheckerExpression] -> [TypeCheckerExpression] -> TypeCheckerSignature
toSignature PartialSignature{..} pres posts = Signature isPureSig sigName args retType pres posts () mkNodeMeta
-- | Environment for the generation of expressions. The methods and functions are keyed by return type.
data GeneratorEnv
= GeneratorEnv { callables :: M.Map T.Type [PartialSignature] -- ^ In a pure context, these are the functions, otherwise, functions and methods.
, functions :: M.Map T.Type [PartialSignature] -- ^ Pure functions.
, variables :: M.Map T.Type [Name]
, isPureEnv :: Bool }
deriving (Eq, Ord, Show)
initialEnv :: [PartialSignature] -> GeneratorEnv
initialEnv sigs = GeneratorEnv { callables = callables'
, functions = functions'
, variables = M.empty
, isPureEnv = undefined }
where callables' = keyBy Sara.TestUtils.AstGenUtils.retType sigs
functions' = M.map (filter isPureSig) callables'
arbitraryProgram :: MonadGen g => g TypeCheckerProgram
arbitraryProgram = scale intRoot $ do
sigs <- evalStateT (listOf arbitraryPartialSignature) S.empty
decls <- runReaderT (mapM arbitraryDeclForSignature sigs) (initialEnv sigs)
return $ Program decls mkNodeMeta
partialFunctionKey :: PartialSignature -> FunctionKey
partialFunctionKey PartialSignature{..} = FunctionKey sigName (map S.varType args)
arbitraryPartialSignature :: (MonadGen g, MonadState (S.Set FunctionKey) g) => g PartialSignature
arbitraryPartialSignature = do
sigs <- get
sig <- arbitrarySignature' `suchThat` (\sig -> partialFunctionKey sig `S.notMember` sigs)
modify $ S.insert $ partialFunctionKey sig
return sig
where arbitrarySignature' = do
pur <- arbitraryBool
nam <- evalStateT arbitraryIdentifier S.empty
args <- evalStateT (scale intRoot $ listOf $ arbitraryTypedVariable) S.empty
retType <- arbitraryType
return $ PartialSignature pur nam args retType
arbitraryTypedVariable :: (MonadGen g, MonadState (S.Set Name) g) => g TypeCheckerTypedVariable
arbitraryTypedVariable = do
name <- arbitraryIdentifier
typ <- arbitraryType
return $ TypedVariable name typ () mkNodeMeta
shrinkTypedVariable :: MonadReader (S.Set Name) m => TypeCheckerTypedVariable -> m [TypeCheckerTypedVariable]
shrinkTypedVariable (TypedVariable v t m p) = do
vs <- shrinkIdentifier v
return [TypedVariable v' t m p | v' <- vs]
arbitraryType :: MonadGen g => g Type
arbitraryType = elements [T.Unit, T.Boolean, T.Integer, T.Double]
-- | The transformation of the environment that a partial signature causes, i.e. the arguments are added to the local variables.
-- The argument pure indicates whether it should be a pure context.
-- Note that this is not equivalent to the signature being pure since methods also have pure pre- and postconditions.
partialSigEnvTransform :: Bool -> PartialSignature -> GeneratorEnv -> GeneratorEnv
partialSigEnvTransform pure sig env = envWithPureness pure $ env{ variables = variables' }
where as = M.map (map S.varName) $ keyBy S.varType $ Sara.TestUtils.AstGenUtils.args sig
variables' = M.unionWith (++) (variables env) as
arbitrarySignatureForPartialSig :: (MonadGen g, MonadReader GeneratorEnv g) => PartialSignature -> g TypeCheckerSignature
arbitrarySignatureForPartialSig sig = toSignature sig <$> conditions <*> conditions
where conditions = local envTransform $ scale intRoot $ listOf $ arbitraryExpression T.Boolean
envTransform env = partialSigEnvTransform True sig env
arbitrarySignature :: MonadGen g => g TypeCheckerSignature
arbitrarySignature = do
sig <- evalStateT arbitraryPartialSignature S.empty
runReaderT (arbitrarySignatureForPartialSig sig) (initialEnv [sig])
arbitraryDeclForSignature :: (MonadGen g, MonadReader GeneratorEnv g) => PartialSignature -> g TypeCheckerDeclaration
arbitraryDeclForSignature partialSig = do
sig <- arbitrarySignatureForPartialSig partialSig
body <- local (partialSigEnvTransform (isPureSig partialSig) partialSig) $ arbitraryExpression $ S.retType sig
return $ Function sig body mkNodeMeta
arbitraryDeclaration :: MonadGen g => g TypeCheckerDeclaration
arbitraryDeclaration = do
sig <- evalStateT arbitraryPartialSignature S.empty
runReaderT (arbitraryDeclForSignature sig) (initialEnv [sig])
type IncompleteExpression = TypMeta -> NodeMeta -> TypeCheckerExpression
arbitraryBoolean :: MonadGen g => g IncompleteExpression
arbitraryBoolean = S.Boolean <$> arbitraryBool
-- | Generates a positive integer expression.
-- We don't like negative numbers here because -4 gets parsed as 4 with the unary minus applied to it.
arbitraryInteger :: MonadGen g => g IncompleteExpression
arbitraryInteger = S.Integer <$> arbitrarySizedNatural
arbitraryDouble :: MonadGen g => g IncompleteExpression
arbitraryDouble = liftM S.Double $ elements niceDoubles
where niceDoubles = [0.0, 0.1, 1.0, 1.1, 1e10, 1.1e10]
arbitraryVariable :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g (Maybe IncompleteExpression)
arbitraryVariable t = do
vars <- asks $ M.lookup t . variables
case vars of
Nothing -> return Nothing
Just [] -> return Nothing
Just vs -> do
v <- elements vs
return $ Just $ Variable v ()
arbitraryAssertionKind :: MonadGen g => g AssertionKind
arbitraryAssertionKind = elements assertionKinds
arbitraryAssertion :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g (Maybe IncompleteExpression)
arbitraryAssertion t = do
pure <- asks isPureEnv
case t of
T.Unit | not pure -> liftM Just $ Assertion <$> arbitraryAssertionKind <*> local (envWithPureness True) (arbitraryExpression T.Boolean)
_ -> return Nothing
arbitraryCall :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g (Maybe IncompleteExpression)
arbitraryCall t = do
calls <- asks $ M.lookup t . callables
case calls of
Nothing -> return Nothing
Just [] -> return Nothing
Just calls -> do
PartialSignature{..} <- elements calls
as <- arbitraryArgs args
return $ Just $ Call sigName as ()
-- | Generates expressions as arguments for a function or method call given a list of desired typed variables
-- representing the formal arguments of the function or method to call.
arbitraryArgs :: (MonadGen g, MonadReader GeneratorEnv g) => [TypeCheckerTypedVariable] -> g [TypeCheckerExpression]
arbitraryArgs args = scale (`div` numArgs) $ mapM (arbitraryExpression . varType) args
where numArgs = length args
intRoot :: Int -> Int
intRoot = round . sqrt . fromIntegral
invert :: (Ord k, Ord v) => M.Map k v -> M.Map v [k]
invert m = M.fromListWith (++) pairs
where pairs = [(v, [k]) | (k, v) <- M.toList m]
invertedUnOps :: M.Map Type [TypedUnOp]
invertedUnOps = invert typedUnOps
invertedBinOps :: M.Map Type [TypedBinOp]
invertedBinOps = invert typedBinOps
findListWithDefault :: Ord k => M.Map k [v] -> k -> [v]
findListWithDefault map key = M.findWithDefault [] key map
typUnOps :: Type -> [TypedUnOp]
typUnOps = findListWithDefault invertedUnOps
typBinOps :: Type -> [TypedBinOp]
typBinOps = findListWithDefault invertedBinOps
arbitraryAssignable :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g (Maybe TypeCheckerExpression)
arbitraryAssignable t = do
var <- arbitraryVariable t
case var of
Just v -> Just <$> (addExpMeta t $ return v)
Nothing -> return Nothing
-- | Creates one generator for each possible binary operator for that type.
arbitraryBinaryOperations :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g [IncompleteExpression]
arbitraryBinaryOperations t = do
ops <- mapM binOp $ typBinOps t
return $ catMaybes ops
where binOp :: (MonadGen g, MonadReader GeneratorEnv g) => TypedBinOp -> g (Maybe IncompleteExpression)
binOp (TypedBinOp Assign r s) = do
pure <- asks isPureEnv
var <- arbitraryAssignable r
exp <- arbitraryExpression s
case var of
Just v | not pure -> return $ Just $ BinaryOperation Assign v exp
_ -> return Nothing
binOp (TypedBinOp op r s) = Just <$> (BinaryOperation op <$> subtree r <*> subtree s)
subtree r = scale (`div` 2) $ arbitraryExpression r
-- | Creates one generator for each possible unary operator for that type.
arbitraryUnaryOperations :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g [IncompleteExpression]
arbitraryUnaryOperations t = mapM unOp $ typUnOps t
where unOp (TypedUnOp op s) = UnaryOperation op <$> subtree s
subtree s = scale pred $ arbitraryExpression s
arbitraryConditional :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g IncompleteExpression
arbitraryConditional t = Conditional <$> subtree T.Boolean <*> subtree t <*> subtree t
where subtree t = scale (`div` 3) $ arbitraryExpression t
arbitraryBlock :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g IncompleteExpression
arbitraryBlock t = do
newVars <- evalStateT (scale ((`div` 2) . intRoot) $ listOf $ arbitraryTypedVariable) S.empty
arbitraryBlockWithNewVars [] newVars
where arbitraryBlockWithNewVars defs (var:vars) = do
exp' <- local (withoutVarEnvTransform var) $ arbitraryExpression (varType var)
pure <- asks isPureEnv
isVal <- if pure then return True else arbitraryBool
def <- addExpMeta T.Unit $ return $ VarDef var isVal exp'
local (varEnvTransform var) $ arbitraryBlockWithNewVars (def : defs) vars
arbitraryBlockWithNewVars defs [] = do
stmts' <- stmts
exp' <- exp
return $ Block (defs ++ stmts') exp'
withoutVarEnvTransform :: TypeCheckerTypedVariable -> GeneratorEnv -> GeneratorEnv
withoutVarEnvTransform var env = env{ variables = M.adjust (delete $ varName var) (varType var) (variables env) }
varEnvTransform :: TypeCheckerTypedVariable -> GeneratorEnv -> GeneratorEnv
varEnvTransform var env = env{ variables = M.insertWith (++) (varType var) [varName var] (variables env) }
stmts = scale ((`div` 2) . intRoot) $ listOf $ arbitraryType >>= arbitraryExpression
exp = scale intRoot $ arbitraryExpression t
arbitraryWhile :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g (Maybe IncompleteExpression)
arbitraryWhile t = do
isPure <- asks isPureEnv
case t of
T.Unit | not isPure -> Just <$> (While <$> invariant <*> subtree T.Boolean <*> (arbitraryType >>= subtree))
_ -> return Nothing
where subtree t = scale (`div` 2) $ arbitraryExpression t
invariant = local (envWithPureness True) (scale intRoot $ listOf $ subtree T.Boolean)
arbitraryLeafExpression :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g IncompleteExpression
arbitraryLeafExpression t = do
var <- arbitraryVariable t
c <- arbitraryConstant t
case var of
Nothing -> return c
Just v -> elements [v, c]
arbitraryConstant :: MonadGen g => Type -> g IncompleteExpression
arbitraryConstant T.Boolean = arbitraryBoolean
arbitraryConstant T.Integer = arbitraryInteger
arbitraryConstant T.Double = arbitraryDouble
arbitraryConstant T.Unit = return S.Unit
arbitraryInnerExpression :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g IncompleteExpression
arbitraryInnerExpression t = do
binOps <- arbitraryBinaryOperations t
unOps <- arbitraryUnaryOperations t
let numUnOps = length unOps
let numBinOps = length binOps
let weightUnOps = if numBinOps == 0 then 1 else numBinOps
let weightBinOps = if numUnOps == 0 then 1 else numUnOps
let weightOthers = case (numUnOps, numBinOps) of
(0, 0) -> 1
(0, k) -> k
(k, 0) -> k
(k, l) -> k * l
unreliableArbitraries <- catMaybes <$> sequence [arbitraryCall t, arbitraryWhile t, arbitraryAssertion t]
let anyTyped = map ($ t) [arbitraryLeafExpression, arbitraryLeafExpression, arbitraryConditional, arbitraryBlock]
++ map return unreliableArbitraries
-- We want the same probability to get a unary operation, a binary operation, a constant, a variable, a call or a conditional
let weighted = map ((,) weightOthers) anyTyped
++ map ((,) weightBinOps . return) binOps
++ map ((,) weightUnOps . return) unOps
frequency weighted
arbitraryExpression :: (MonadGen g, MonadReader GeneratorEnv g) => Type -> g TypeCheckerExpression
arbitraryExpression t = addExpMeta t $ sized expression'
where expression' :: (MonadGen g, MonadReader GeneratorEnv g) => Int -> g IncompleteExpression
expression' 0 = arbitraryLeafExpression t
expression' n | n > 0 = arbitraryInnerExpression t
expression' s = error $ "expression' for negative size " ++ show s ++ " not supported."
-- | The most simple expression with a given type. Used for variable shrinking.
trivial :: TypMeta -> TypeCheckerExpression
trivial m = trivial' (typTyp m) m mkNodeMeta
where trivial' T.Boolean = S.Boolean False
trivial' T.Integer = S.Integer 0
trivial' T.Double = S.Double 0.0
trivial' T.Unit = S.Unit
-- | Returns the free variables in an expression. Used to determine which declarations are shrinkable.
freeVariables :: TypeCheckerExpression -> S.Set TypeCheckerTypedVariable
freeVariables = foldMapExpression freeVariable
where freeVariable v@(Variable a _ _ _) = S.singleton $ TypedVariable a (expressionTyp' v) () mkNodeMeta
freeVariable _ = S.empty
-- | Returns the called functions in a program. Used to determine which signatures are shrinkable.
calledFunctions :: TypeCheckerProgram -> S.Set FunctionKey
calledFunctions = foldMapExpressions calledFunctionsExpression
where calledFunctionsExpression c@Call{} = S.singleton $ callFunctionKey c
calledFunctionsExpression _ = S.empty
shrinkExpression :: TypeCheckerExpression -> [TypeCheckerExpression]
shrinkExpression b@S.Boolean{ boolValue = val } = [b{ boolValue = v } | v <- Q.shrink val]
shrinkExpression n@S.Integer{ intValue = val } = [n{ intValue = v } | v <- Q.shrink val]
shrinkExpression d@S.Double{ doubleValue = val } = [d{ doubleValue = v } | v <- Q.shrink val]
shrinkExpression S.Unit{} = []
shrinkExpression (Variable _ _ m _) = [trivial m]
shrinkExpression (BinaryOperation Assign left right m p) = childrenWithType m [left, right]
++ [BinaryOperation Assign left r m p | r <- Q.shrink right]
shrinkExpression (BinaryOperation op left right m p) = childrenWithType m [left, right]
++ [BinaryOperation op l r m p | (l, r) <- Q.shrink (left, right)]
shrinkExpression (UnaryOperation op exp m p) = childrenWithType m [exp]
++ [UnaryOperation op e m p | e <- Q.shrink exp]
shrinkExpression (Call name args cm m p) = childrenWithType m args
++ [Call name a cm m p | a <- shrinkArgs args]
where shrinkArgs :: [TypeCheckerExpression] -> [[TypeCheckerExpression]]
shrinkArgs [] = []
shrinkArgs (x:xs) = [y : xs | y <- Q.shrink x] ++ [x : ys | ys <- shrinkArgs xs]
shrinkExpression (Conditional cond ifExp elseExp m p) = childrenWithType m [cond, ifExp, elseExp]
++ [Conditional c i e m p | (c, i, e) <- Q.shrink (cond, ifExp, elseExp)]
shrinkExpression (Block stmts exp m p) = [exp | S.null $ freeVariables exp `S.intersection` definedVars]
++ [Block (init stmts) (last stmts) m p | not (null stmts), expressionTyp' (last stmts) == typTyp m]
++ [Block s exp m p | s <- shrinkStmts stmts]
++ [Block stmts e m p | e <- Q.shrink exp]
where freeVars = foldMap freeVariables (exp : stmts)
definedVars = foldMap definedVar (exp : stmts)
definedVar (VarDef v _ _ _ _) = S.singleton v
definedVar _ = S.empty
isRemovable (VarDef v _ _ _ _) = v `notElem` freeVars
isRemovable _ = True
shrinkStmts :: [TypeCheckerExpression] -> [[TypeCheckerExpression]]
shrinkStmts [] = []
shrinkStmts (x:xs) = [xs | isRemovable x] ++ [y : xs | y <- Q.shrink x] ++ [x : ys | ys <- shrinkStmts xs]
shrinkExpression (While invs cond body m p) = S.Unit m p
: childrenWithType m [body]
++ [While i c b m p | (i, c, b) <- Q.shrink (invs, cond, body)]
shrinkExpression (Assertion k exp m p) = S.Unit m p : [Assertion k e m p | e <- Q.shrink exp]
shrinkExpression (VarDef var isVal exp m p) = [VarDef var isVal e m p | e <- Q.shrink exp]
childrenWithType :: TypMeta -> [TypeCheckerExpression] -> [TypeCheckerExpression]
childrenWithType m = filter (\c -> expressionTyp' c == (typTyp m))
shrinkSignature :: MonadReader (S.Set FunctionKey) m => S.Set ParserTypedVariable -> TypeCheckerSignature -> m [TypeCheckerSignature]
shrinkSignature free sig@Signature{..} = do
functionNames <- asks $ S.map funcName
isRemovable <- asks $ flip isRemovableSignature sig
let shrinkedConds = [Signature isPure sigName args retType precs' posts' sigMeta sigNodeMeta |
(precs', posts') <- Q.shrink (preconditions, postconditions)]
let shrinkedArgs = [Signature isPure sigName args' retType preconditions postconditions sigMeta sigNodeMeta |
args' <- shrinkArgs isRemovable args]
shrinkedNameIdentifiers <- runReaderT (shrinkIdentifier sigName) functionNames
let shrinkedNames = [Signature isPure sigName' args retType preconditions postconditions sigMeta sigNodeMeta |
sigName' <- shrinkedNameIdentifiers, isRemovable]
return $ shrinkedConds ++ shrinkedArgs ++ shrinkedNames
where shrinkArgs :: Bool -> [ParserTypedVariable] -> [[ParserTypedVariable]]
shrinkArgs isRemovable args = shrinkArgs' isRemovable args $ S.fromList $ map varName args
shrinkArgs' :: Bool -> [ParserTypedVariable] -> S.Set Name -> [[ParserTypedVariable]]
shrinkArgs' _ [] _ = []
shrinkArgs' isRemovable (x:xs) argNames | x `S.member` free = [x : ys | ys <- shrinkArgs' isRemovable xs argNames]
| otherwise = [xs | isRemovable] -- We can only remove arguments if the function is never called.
++ [y : xs | y <- runReader (shrinkTypedVariable x) argNames]
++ [x : ys | ys <- shrinkArgs' isRemovable xs argNames]
isRemovableSignature :: S.Set FunctionKey -> TypeCheckerSignature -> Bool
isRemovableSignature funcs sig = functionKey sig `S.notMember` funcs
condFreeVariables :: TypeCheckerSignature -> S.Set TypeCheckerTypedVariable
condFreeVariables Signature{..} = freeVars preconditions `S.union` freeVars postconditions
where freeVars = foldr S.union S.empty . map freeVariables
shrinkDeclaration :: MonadReader (S.Set FunctionKey) m => TypeCheckerDeclaration -> m [TypeCheckerDeclaration]
shrinkDeclaration (Function sig body meta) = do
sigShrinks <- map (\s -> Extern s meta) <$> shrinkSignature free sig
return $ sigShrinks ++ [Function sig b meta | b <- shrinkExpression body]
where free = condFreeVariables sig `S.union` freeVariables body
shrinkDeclaration (Extern sig meta) = do
map (\s -> Extern s meta) <$> shrinkSignature free sig
where free = condFreeVariables sig
shrinkProgram :: TypeCheckerProgram -> [TypeCheckerProgram]
shrinkProgram p = shrinkProgram' (calledFunctions p) p
where meta = S.progMeta p
shrinkProgram' _ (Program [] _) = []
shrinkProgram' funcs (Program (x:xs) _) = headRemovals ++ map appendTail headShrinks ++ map appendHead tailShrinks
where headShrinks = runReader (shrinkDeclaration x) funcs
tailShrinks = shrinkProgram' funcs $ Program xs meta
headRemovals = [Program xs meta | isRemovableDeclaration funcs x]
isRemovableDeclaration funcs = isRemovableSignature funcs . signature
appendTail y = Program (y:xs) meta
appendHead (Program ys p) = Program (x : ys) p
initialEnvWithPureness :: Bool -> GeneratorEnv
initialEnvWithPureness isPure = GeneratorEnv{ callables = M.empty, functions = M.empty, variables = M.empty, isPureEnv = isPure }
envWithPureness :: Bool -> GeneratorEnv -> GeneratorEnv
envWithPureness isPure env = env{ callables = callables', isPureEnv = isPure }
where callables' = if isPure then functions env else callables env
newtype PureExpression
= PureExpression { runPureExpression :: TypeCheckerExpression }
deriving (Eq, Ord, Show)
instance Q.Arbitrary TypeCheckerExpression where
arbitrary = do
typ <- arbitraryType
pure <- choose (False, True)
runReaderT (arbitraryExpression typ) (initialEnvWithPureness pure)
shrink = shrinkExpression
instance Q.Arbitrary PureExpression where
arbitrary = do
typ <- arbitraryType
PureExpression <$> runReaderT (arbitraryExpression typ) (initialEnvWithPureness True)
shrink = map PureExpression . shrinkExpression . runPureExpression
instance Q.Arbitrary TypeCheckerTypedVariable where
arbitrary = evalStateT arbitraryTypedVariable S.empty
shrink v = runReader (shrinkTypedVariable v) S.empty
instance Q.Arbitrary TypeCheckerSignature where
arbitrary = arbitrarySignature
shrink sig = runReader (shrinkSignature S.empty sig) S.empty
instance Q.Arbitrary TypeCheckerDeclaration where
arbitrary = arbitraryDeclaration
shrink decl = runReader (shrinkDeclaration decl) S.empty
instance Q.Arbitrary TypeCheckerProgram where
arbitrary = arbitraryProgram
shrink prog = shrinkProgram prog
instance Q.Arbitrary Type where
arbitrary = arbitraryType
instance Q.Arbitrary UnaryOperator where
arbitrary = elements unaryOperators
instance Q.Arbitrary BinaryOperator where
arbitrary = elements binaryOperators
| Lykos/Sara | tests/Sara/TestUtils/AstGenUtils.hs | gpl-3.0 | 26,298 | 0 | 16 | 6,007 | 7,849 | 3,983 | 3,866 | 437 | 6 |
module MC.Pi (approximatePi) where
import MC.Internal.Circle
approximatePi :: [Point] -> Double
approximatePi points = 4.0 * fromIntegral pointsInCircle / fromIntegral totalPoints
where pointsInCircle = length $ filter isInUnitCircle points
totalPoints = length points
| matt-keibler/monte-carlo-pi | monte-carlo-pi-hs/src/MC/Pi.hs | gpl-3.0 | 281 | 0 | 8 | 45 | 75 | 40 | 35 | 6 | 1 |
module Utils.GenEventSIR where
import Control.Monad.Random
import Control.Monad.Reader
import Control.Monad.Writer
import Data.MonadicStreamFunction.InternalCore
import Test.Tasty.QuickCheck
import SIR.Event
import SIR.Model
import Utils.GenSIR
genEvent :: [AgentId] -> Gen SIREvent
genEvent = genEventFreq 1 1 1 (1,1,1)
genNonEmptyAgentIds :: Gen [AgentId]
genNonEmptyAgentIds = listOf1 (do
(Positive t) <- arbitrary :: Gen (Positive Int)
return t)
genAgentIds :: Gen [AgentId]
genAgentIds = map (\(Positive i) -> i) <$> (arbitrary :: Gen [Positive Int])
genEventFreq :: Int
-> Int
-> Int
-> (Int, Int, Int)
-> [AgentId]
-> Gen SIREvent
genEventFreq mcf _ rcf _ []
= frequency [ (mcf, return MakeContact), (rcf, return Recover)]
genEventFreq mcf cof rcf (s,i,r) ais
= frequency [ (mcf, return MakeContact)
, (cof, do
ss <- frequency [ (s, return Susceptible)
, (i, return Infected)
, (r, return Recovered)]
ai <- elements ais
return $ Contact ai ss)
, (rcf, return Recover)]
genQueueItemStream :: Double
-> [AgentId]
-> Gen [QueueItem SIREvent]
genQueueItemStream t ais = do
evt <- genQueueItem t ais
evts <- genQueueItemStream (eventTime evt) ais
return (evt : evts)
genQueueItem :: Double
-> [AgentId]
-> Gen (QueueItem SIREvent)
genQueueItem t ais = do
(Positive dt) <- arbitrary
e <- genEvent ais
receiver <- elements ais
let evtTime = t + dt
return $ QueueItem e receiver evtTime
eventTime :: QueueItem e -> Time
eventTime (QueueItem _ _ et) = et
genRunSusceptibleAgent :: Int
-> Double
-> Double
-> Double
-> [AgentId]
-> SIREvent
-> Gen (AgentId, SIRState, [QueueItem SIREvent])
genRunSusceptibleAgent cor inf ild t ais evt = do
g <- genStdGen
-- the susceptible agents id is picked randomly from all empty agent ids
ai <- elements ais
-- create susceptible agent with agent id
let a = susceptibleAgent ai cor inf ild
-- run agent with given event and configuration
let (_g', _a', ao, es) = runAgent g a evt t ais
return (ai, ao, es)
genRunInfectedAgent :: Double
-> [AgentId]
-> SIREvent
-> Gen (AgentId, SIRState, [QueueItem SIREvent])
genRunInfectedAgent t ais evt = do
g <- genStdGen
-- the susceptible agents id is picked randomly from all empty agent ids
ai <- elements ais
-- create susceptible agent with agent id
let a = infectedAgent ai
-- run agent with given event and configuration
let (_g', _a', ao, es) = runAgent g a evt t ais
return (ai, ao, es)
genRunRecoveredAgent :: Double
-> [AgentId]
-> SIREvent
-> Gen (SIRState, [QueueItem SIREvent])
genRunRecoveredAgent t ais evt = do
g <- genStdGen
-- create susceptible agent with agent id
let a = recoveredAgent
-- run agent with given event and configuration
let (_g', _a', ao, es) = runAgent g a evt t ais
return (ao, es)
genEventSIR :: [SIRState]
-> Int
-> Double
-> Double
-> Integer
-> Double
-> Gen [(Time, (Int, Int, Int))]
genEventSIR as cor inf ild maxEvents maxTime
= fst . runEventSIR as cor inf ild maxEvents maxTime <$> genStdGen
genLastEventSIR :: [SIRState]
-> Int
-> Double
-> Double
-> Integer
-> Double
-> Gen (Time, (Int, Int, Int))
genLastEventSIR [] _ _ _ _ _ = return (0, (0,0,0))
genLastEventSIR as cor inf ild maxEvents maxTime = do
ret <- genEventSIR as cor inf ild maxEvents maxTime
if null ret
then return (maxTime, aggregateSIRStates as)
else return (last ret)
genEventSIRRepls :: Int
-> [SIRState]
-> Int
-> Double
-> Double
-> Integer
-> Double
-> Gen [(Int, Int, Int)]
genEventSIRRepls n as cor inf ild maxEvents tMax
= map snd <$> vectorOf n (genLastEventSIR as cor inf ild maxEvents tMax)
--------------------------------------------------------------------------------
-- AGENT RUNNER
--------------------------------------------------------------------------------
runAgent :: RandomGen g
=> g
-> SIRAgentMSF g
-> SIREvent
-> Time
-> [AgentId]
-> (g, SIRAgentMSF g, SIRState, [QueueItem SIREvent])
runAgent g a e t ais = (g', a', ao, es)
where
aMsf = unMSF a e
aEvtWriter = runReaderT aMsf t
aAisReader = runWriterT aEvtWriter
aRand = runReaderT aAisReader ais
(((ao, a'), es), g') = runRand aRand g
| thalerjonathan/phd | thesis/code/sir/src/test/Utils/GenEventSIR.hs | gpl-3.0 | 5,042 | 0 | 14 | 1,679 | 1,516 | 793 | 723 | 129 | 2 |
-- | This module contains parsing functions for Infernal programs
module Biobase.RNAlien.InfernalParser (
module Biobase.RNAlien.Types,
readCMSearch,
readCMSearches,
parseCMSearch,
parseCMSearches,
)
where
import Text.ParserCombinators.Parsec
import Biobase.RNAlien.Types
import qualified Data.ByteString.Char8 as B
-- | parse from input filePath
parseCMSearch :: String -> Either ParseError CMsearch
parseCMSearch = parse genParserCMSearch "parseCMsearch"
-- | parse from input filePath
parseCMSearches :: String -> Either ParseError CMsearch
parseCMSearches = parse genParserCMSearches "parseCMsearch"
-- | parse from input filePath
readCMSearch :: String -> IO (Either ParseError CMsearch)
readCMSearch filePath = do
parsedFile <- parseFromFile genParserCMSearch filePath
return parsedFile
-- | parse from input filePath
readCMSearches :: String -> IO (Either ParseError CMsearch)
readCMSearches filePath = do
parsedFile <- parseFromFile genParserCMSearches filePath
return parsedFile
genParserCMSearches :: GenParser Char st CMsearch
genParserCMSearches = do
--_ <- string "# cmsearch :: search CM(s) against a sequence database"
--_ <- newline
--_ <- string "# INFERNAL "
--_ <- many1 (noneOf "\n")
--_ <- newline
--_ <- string "# Copyright (C) 201"
--_ <- many1 (noneOf "\n")
--_ <- newline
--_ <- manyTill anyChar (try (string "# Freely distributed under the GNU General Public License (GPLv3).") --Freely distributed under a BSD open source license.)
--_ <- newline
_ <- manyTill anyChar (try (string "# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"))
_ <- newline
_ <- string "# query CM file:"
skipMany1 space
queryCMfile' <- many1 (noneOf "\n")
newline
_ <- string "# target sequence database:"
skipMany1 space
targetSequenceDatabase' <- many1 (noneOf "\n")
_ <- newline
optional (try (genParserCMsearchHeaderField "# CM configuration"))
optional (try (genParserCMsearchHeaderField "# database size is set to"))
optional (try (genParserCMsearchHeaderField "# truncated sequence detection"))
_ <- string "# number of worker threads:"
skipMany1 space
numberOfWorkerThreads' <- many1 (noneOf "\n")
_ <- newline
_ <- string "# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"
_ <- newline
_ <- optional newline
cmSearchesHits <- many1 (try genParserMultipleCMSearch)
_ <- optional (string "[ok]\n")
_ <- eof
return $ CMsearch queryCMfile' targetSequenceDatabase' numberOfWorkerThreads' (concat cmSearchesHits)
genParserCMSearch :: GenParser Char st CMsearch
genParserCMSearch = do
--_ <- string "# cmsearch :: search CM(s) against a sequence database"
--_ <- newline
--_ <- string "# INFERNAL "
--skipMany1 (noneOf "\n")
--_ <- newline
--_ <- string "# Copyright (C) 201"
--_ <- many1 (noneOf "\n")
--_ <- newline
--_ <- string "# Freely distributed under the GNU General Public License (GPLv3)."
--_ <- newline
_ <- manyTill anyChar (try (string "# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"))
_ <- newline
_ <- string "# query CM file:"
_ <- skipMany1 space
queryCMfile' <- many1 (noneOf "\n")
_ <- newline
_ <- string "# target sequence database:"
skipMany1 space
targetSequenceDatabase' <- many1 (noneOf "\n")
_ <- newline
_ <- optional (try (genParserCMsearchHeaderField "# CM configuration"))
_ <- optional (try (genParserCMsearchHeaderField "# database size is set to"))
_ <- optional (try (genParserCMsearchHeaderField "# truncated sequence detection"))
_ <- string "# number of worker threads:"
skipMany1 space
numberOfWorkerThreads' <- many1 (noneOf "\n")
_ <- newline
_ <- string "# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"
_ <- newline
_ <- optional newline
_ <- string "Query:"
skipMany1 (noneOf "\n")
_ <- newline
_ <- optional (try (genParserCMsearchHeaderField "Accession"))
_ <- optional (try (genParserCMsearchHeaderField "Description"))
_ <- string "Hit scores:"
_ <- newline
_ <- choice [try (string " rank"), try (string " rank") , try (string " rank"), try (string " rank"),try (string " rank"),try (string " rank")]
many1 space
string "E-value"
--many1 space
--string "score"
--many1 space
--string "bias"
--many1 space
--string "sequence"
--many1 space
--string "start"
--many1 space
--string "end"
--many1 space
--string "mdl"
--many1 space
--string "trunc"
--many1 space
--string "gc"
--many1 space
--string "description"
--newline
_ <- manyTill anyChar (try (string "-"))
--string " -"
skipMany1 (try (oneOf " -"))
_ <- newline
optional (try (string " ------ inclusion threshold ------"))
skipMany newline
hitScores' <- many (try genParserCMsearchHit) --`endBy` (try (string "Hit alignments:"))
optional (try genParserCMsearchEmptyHit)
-- this is followed by hit alignments and internal cmsearch statistics which are not parsed
_ <- many anyChar
_ <- eof
return $ CMsearch queryCMfile' targetSequenceDatabase' numberOfWorkerThreads' hitScores'
-- | Parsing function for CMSearches with multiple querymodels in one modelfile, e.g. clans
genParserMultipleCMSearch :: GenParser Char st [CMsearchHit]
genParserMultipleCMSearch = do
--optional newline
--optional string "//"
string "Query:"
many1 (noneOf "\n")
newline
optional (try (genParserCMsearchHeaderField "Accession"))
optional (try (genParserCMsearchHeaderField "Description"))
string "Hit scores:"
newline
choice [try (string " rank"), try (string " rank") , try (string " rank"), try (string " rank"),try (string " rank"),try (string " rank")]
many1 space
string "E-value"
many1 space
string "score"
many1 space
string "bias"
many1 space
string "sequence"
many1 space
string "start"
many1 space
string "end"
many1 space
string "mdl"
many1 space
string "trunc"
many1 space
string "gc"
many1 space
string "description"
newline
string " -"
many1 (try (oneOf " -"))
newline
optional (try (string " ------ inclusion threshold ------"))
many newline
hitScores' <- many (try genParserCMsearchHit) --`endBy` (try (string "Hit alignments:"))
optional (try genParserCMsearchEmptyHit)
-- this is followed by hit alignments and internal cmsearch statistics which are not parsed
--many anyChar
manyTill anyChar (try (string "//\n"))
return hitScores'
genParserCMsearchHeaderField :: String -> GenParser Char st String
genParserCMsearchHeaderField fieldname = do
string (fieldname ++ ":")
many1 space
many1 (noneOf "\n")
newline
return []
genParserCMsearchEmptyHit :: GenParser Char st [CMsearchHit]
genParserCMsearchEmptyHit = do
string " [No hits detected that satisfy reporting thresholds]"
newline
optional (try newline)
return []
genParserCMsearchHit :: GenParser Char st CMsearchHit
genParserCMsearchHit = do
many1 space
string "("
hitRank' <- many1 digit
string ")"
many1 space
hitSignificant' <- choice [char '!', char '?']
many1 space
hitEValue' <- many1 (oneOf "0123456789.e-")
many1 space
hitScore' <- many1 (oneOf "0123456789.e-")
many1 space
hitBias' <- many1 (oneOf "0123456789.e-")
many1 space
hitSequenceHeader' <- many1 (noneOf " ")
many1 space
hitStart' <- many1 digit
many1 space
hitEnd' <- many1 digit
many1 space
hitStrand' <- choice [char '+', char '-', char '.']
many1 space
hitModel' <- many1 letter
many1 space
hitTruncation' <- many1 (choice [alphaNum, char '\''])
many1 space
hitGCcontent' <- many1 (oneOf "0123456789.e-")
many1 space
hitDescription' <- many1 (noneOf "\n")
newline
optional (try (string " ------ inclusion threshold ------"))
optional (try newline)
return $ CMsearchHit (readInt hitRank') hitSignificant' (readDouble hitEValue') (readDouble hitScore') (readDouble hitBias') (B.pack hitSequenceHeader') (readInt hitStart') (readInt hitEnd') hitStrand' (B.pack hitModel') (B.pack hitTruncation') (readDouble hitGCcontent') (B.pack hitDescription')
--
readInt :: String -> Int
readInt = read
readDouble :: String -> Double
readDouble = read
| eggzilla/RNAlien | Biobase/RNAlien/InfernalParser.hs | gpl-3.0 | 8,441 | 0 | 12 | 1,841 | 2,121 | 968 | 1,153 | 180 | 1 |
{- Merch.Race.UI.Button - Button user interface.
Copyright 2013 Alan Manuel K. Gloria
This file is part of Merchant's Race.
Merchant's Race is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Merchant's Race is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Merchant's Race. If not, see <http://www.gnu.org/licenses/>.
-}
{- General button creation. -}
module Merch.Race.UI.Button
( buttonGeneric
, button
, ButtonConfig
, ButtonConfigOpt(..)
, mkButtonConfig
) where
import Merch.Race.UI.Drawing
import qualified Merch.Race.UI.DrawingCombinators as Draw
import Merch.Race.UI.DrawingCombinators((%%))
import Data.List
import Data.Monoid
buttonGeneric :: Draw.Image Any -> Draw.Image Any -> Draw.Image Any
-> Screen -> Drawing
buttonGeneric idle hover press screen = idleState
where
idleF MouseMove = [Modify hoverState]
idleF _ = []
idleState = drawing idleF idle
hoverF MouseMoveOut = [Modify idleState]
hoverF MouseDown = [Grab, Modify pressState]
hoverF _ = []
hoverState = drawing hoverF hover
pressF MouseMoveOut = [Modify pressOffState]
pressF MouseUp = [Ungrab, Replace screen]
pressF _ = []
pressState = drawing pressF press
pressOffF MouseMove = [Modify pressState]
pressOffF MouseUp = [Ungrab, Modify idleState]
pressOffF _ = []
pressOffState = drawing pressOffF idle
data ButtonConfig
= BC
{ bcFont :: Draw.Font
, bcWidth :: Draw.R
, bcHeight :: Draw.R
, bcTextHeight :: Draw.R
}
data ButtonConfigOpt
= ButtonFont Draw.Font
| ButtonWidth Draw.R
| ButtonHeight Draw.R
| ButtonTextHeight Draw.R
mkButtonConfig :: [ButtonConfigOpt] -> ButtonConfig
mkButtonConfig = foldl' process start
where
start = BC
{ bcFont = undefined
, bcWidth = 0.8
, bcHeight = 0.12
, bcTextHeight = 0.04
}
process c (ButtonFont f) = c { bcFont = f }
process c (ButtonWidth w) = c { bcWidth = w }
process c (ButtonHeight h) = c { bcHeight = h }
process c (ButtonTextHeight t) = c { bcTextHeight = t }
button :: ButtonConfig -> (Draw.R, Draw.R) -> String -> Screen -> Drawing
button bc (x,y) l screen = buttonGeneric idle hover press screen
where
BC font fw fh t = bc
labelRaw = Draw.text font l
labelCentered
= Draw.translate (negate $ Draw.textWidth font l / 2, negate 0.7) %% labelRaw
labelSized
= Draw.scale t t %% labelCentered
labelMoved
= Draw.forceSample (Any False) $ Draw.translate (x,y) %% labelSized
w = fw / 2; nw = negate w
h = fh / 2; nh = negate h
-- Set up sizes for curves.
outerRadiusBase = 0.03
diffBase = 0.01
outerRadius
| min fw fh < outerRadiusBase = min fw fh
| otherwise = outerRadiusBase
diff
| outerRadius - diffBase < 0 = diffBase
| otherwise = outerRadius / 2
innerRadius = outerRadius - diff
-- Generate curved-corner rectangles.
innerBox
= roundedRectangle innerRadius (nw+diff, nh+diff) (w-diff,h-diff)
outerBox
= roundedRectangle outerRadius (nw , nh ) (w ,h )
innerBoxMoved
= Draw.translate (x,y) %% innerBox
outerBoxMoved
= Draw.translate (x,y) %% outerBox
white = Draw.Color 1 1 1 1
gray = Draw.Color 0.3 0.3 0.3 1
black = Draw.Color 0 0 0 1
idle
= mconcat
[ Draw.tint white labelMoved
, Draw.tint black innerBoxMoved
, Draw.tint white outerBoxMoved
]
hover
= mconcat
[ Draw.tint white labelMoved
, Draw.tint gray innerBoxMoved
, Draw.tint white outerBoxMoved
]
press
= mconcat
[ Draw.tint black labelMoved
, Draw.tint white outerBoxMoved
]
-- Rounded rectangle implementation
roundedRectangle :: Draw.R -> (Draw.R, Draw.R) -> (Draw.R, Draw.R) -> Draw.Image Any
roundedRectangle radius (x1, y1) (x2, y2)
= core (min x1 x2) (min y1 y2) (max x1 x2) (max y1 y2)
where
core lx ly ux uy
= mconcat
[ circles
, tall
, wide
]
where
lxr = lx + radius
lyr = ly + radius
uxr = ux - radius
uyr = uy - radius
tall = Draw.rectangle (lxr, ly) (uxr, uy)
wide = Draw.rectangle (lx, lyr) (ux, uyr)
circleSized = Draw.scale radius radius %% Draw.circle
circles = mconcat
[ Draw.translate (lxr, lyr) %% circleSized
, Draw.translate (lxr, uyr) %% circleSized
, Draw.translate (uxr, uyr) %% circleSized
, Draw.translate (uxr, lyr) %% circleSized
]
| AmkG/merchants-race | Merch/Race/UI/Button.hs | gpl-3.0 | 4,982 | 0 | 13 | 1,324 | 1,414 | 763 | 651 | 117 | 8 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-| Update ProductVariant Prices by SKU from a CSV. -}
import Control.Monad.Logger (runNoLoggingT)
import Data.Csv (FromNamedRecord, decodeByName)
import Database.Persist
import Database.Persist.Postgresql
( ConnectionPool, SqlPersistT, createPostgresqlPool, runSqlPool
)
import GHC.Generics (Generic)
import Models
import Models.Fields (Cents, fromDollars)
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Char as Char
import qualified Data.Text as T
import qualified Data.Vector as V
main :: IO ()
main = do
productRows <- fmap readPrices . either error snd . decodeByName
<$> LBS.readFile "new_product_prices.csv"
connectToPostgres >>= runSqlPool (updateProducts productRows)
connectToPostgres :: IO ConnectionPool
connectToPostgres =
runNoLoggingT $ createPostgresqlPool "dbname=sese-website" 1
data ProductData =
ProductData
{ productSku :: T.Text
, price :: T.Text
} deriving (Show, Generic)
instance FromNamedRecord ProductData
readPrices :: ProductData -> (T.Text, T.Text, Cents)
readPrices prod =
let fullSku =
productSku prod
(prefix, suffix) =
if Char.isAlpha $ T.head $ T.takeEnd 1 fullSku then
(T.dropEnd 1 fullSku, T.takeEnd 1 fullSku)
else
(fullSku, "")
in
( prefix
, suffix
, fromDollars $ read $ T.unpack $ price prod
)
updateProducts :: V.Vector (T.Text, T.Text, Cents) -> SqlPersistT IO ()
updateProducts =
V.mapM_ $ \(baseSku, skuSuffix, price_) ->
getBy (UniqueBaseSku baseSku) >>= \case
Nothing ->
error $ "No product with base SKU: " ++ T.unpack baseSku
Just (Entity pId _) ->
getBy (UniqueSku pId skuSuffix) >>= \case
Nothing ->
error $ "No product with full SKU: "
++ T.unpack baseSku ++ T.unpack skuSuffix
Just (Entity vId _) ->
update vId [ProductVariantPrice =. price_]
| Southern-Exposure-Seed-Exchange/southernexposure.com | server/scripts/UpdateProductPrices.hs | gpl-3.0 | 2,188 | 0 | 18 | 599 | 561 | 305 | 256 | 54 | 3 |
module Main where
import Game.Regret (defaultMain)
import Game.Select (SelectGame(SelectGame))
import Game.KuhnPoker (KuhnPoker(KuhnPoker))
main :: IO ()
main = defaultMain $ pure $ SelectGame KuhnPoker
| davidspies/regret-solver | kuhn-poker/app/Main.hs | gpl-3.0 | 206 | 0 | 6 | 27 | 68 | 40 | 28 | 6 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Firewalls.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a firewall rule in the specified project using the data included
-- in the request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.firewalls.insert@.
module Network.Google.Resource.Compute.Firewalls.Insert
(
-- * REST Resource
FirewallsInsertResource
-- * Creating a Request
, firewallsInsert
, FirewallsInsert
-- * Request Lenses
, fiRequestId
, fiProject
, fiPayload
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.firewalls.insert@ method which the
-- 'FirewallsInsert' request conforms to.
type FirewallsInsertResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"firewalls" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Firewall :> Post '[JSON] Operation
-- | Creates a firewall rule in the specified project using the data included
-- in the request.
--
-- /See:/ 'firewallsInsert' smart constructor.
data FirewallsInsert =
FirewallsInsert'
{ _fiRequestId :: !(Maybe Text)
, _fiProject :: !Text
, _fiPayload :: !Firewall
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'FirewallsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fiRequestId'
--
-- * 'fiProject'
--
-- * 'fiPayload'
firewallsInsert
:: Text -- ^ 'fiProject'
-> Firewall -- ^ 'fiPayload'
-> FirewallsInsert
firewallsInsert pFiProject_ pFiPayload_ =
FirewallsInsert'
{_fiRequestId = Nothing, _fiProject = pFiProject_, _fiPayload = pFiPayload_}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
fiRequestId :: Lens' FirewallsInsert (Maybe Text)
fiRequestId
= lens _fiRequestId (\ s a -> s{_fiRequestId = a})
-- | Project ID for this request.
fiProject :: Lens' FirewallsInsert Text
fiProject
= lens _fiProject (\ s a -> s{_fiProject = a})
-- | Multipart request metadata.
fiPayload :: Lens' FirewallsInsert Firewall
fiPayload
= lens _fiPayload (\ s a -> s{_fiPayload = a})
instance GoogleRequest FirewallsInsert where
type Rs FirewallsInsert = Operation
type Scopes FirewallsInsert =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient FirewallsInsert'{..}
= go _fiProject _fiRequestId (Just AltJSON)
_fiPayload
computeService
where go
= buildClient
(Proxy :: Proxy FirewallsInsertResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/Firewalls/Insert.hs | mpl-2.0 | 4,242 | 0 | 16 | 961 | 484 | 292 | 192 | 73 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.AppsTasks.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.AppsTasks.Types.Sum where
import Network.Google.Prelude hiding (Bytes)
-- | V1 error format.
data Xgafv
= X1
-- ^ @1@
-- v1 error format
| X2
-- ^ @2@
-- v2 error format
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable Xgafv
instance FromHttpApiData Xgafv where
parseQueryParam = \case
"1" -> Right X1
"2" -> Right X2
x -> Left ("Unable to parse Xgafv from: " <> x)
instance ToHttpApiData Xgafv where
toQueryParam = \case
X1 -> "1"
X2 -> "2"
instance FromJSON Xgafv where
parseJSON = parseJSONText "Xgafv"
instance ToJSON Xgafv where
toJSON = toJSONText
| brendanhay/gogol | gogol-apps-tasks/gen/Network/Google/AppsTasks/Types/Sum.hs | mpl-2.0 | 1,221 | 0 | 11 | 292 | 197 | 114 | 83 | 26 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DeploymentManager.Resources.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets information about a single resource.
--
-- /See:/ <https://cloud.google.com/deployment-manager/ Google Cloud Deployment Manager API Reference> for @deploymentmanager.resources.get@.
module Network.Google.Resource.DeploymentManager.Resources.Get
(
-- * REST Resource
ResourcesGetResource
-- * Creating a Request
, resourcesGet
, ResourcesGet
-- * Request Lenses
, rgProject
, rgResource
, rgDeployment
) where
import Network.Google.DeploymentManager.Types
import Network.Google.Prelude
-- | A resource alias for @deploymentmanager.resources.get@ method which the
-- 'ResourcesGet' request conforms to.
type ResourcesGetResource =
"deploymentmanager" :>
"v2" :>
"projects" :>
Capture "project" Text :>
"global" :>
"deployments" :>
Capture "deployment" Text :>
"resources" :>
Capture "resource" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Resource
-- | Gets information about a single resource.
--
-- /See:/ 'resourcesGet' smart constructor.
data ResourcesGet = ResourcesGet'
{ _rgProject :: !Text
, _rgResource :: !Text
, _rgDeployment :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ResourcesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rgProject'
--
-- * 'rgResource'
--
-- * 'rgDeployment'
resourcesGet
:: Text -- ^ 'rgProject'
-> Text -- ^ 'rgResource'
-> Text -- ^ 'rgDeployment'
-> ResourcesGet
resourcesGet pRgProject_ pRgResource_ pRgDeployment_ =
ResourcesGet'
{ _rgProject = pRgProject_
, _rgResource = pRgResource_
, _rgDeployment = pRgDeployment_
}
-- | The project ID for this request.
rgProject :: Lens' ResourcesGet Text
rgProject
= lens _rgProject (\ s a -> s{_rgProject = a})
-- | The name of the resource for this request.
rgResource :: Lens' ResourcesGet Text
rgResource
= lens _rgResource (\ s a -> s{_rgResource = a})
-- | The name of the deployment for this request.
rgDeployment :: Lens' ResourcesGet Text
rgDeployment
= lens _rgDeployment (\ s a -> s{_rgDeployment = a})
instance GoogleRequest ResourcesGet where
type Rs ResourcesGet = Resource
type Scopes ResourcesGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
"https://www.googleapis.com/auth/ndev.cloudman",
"https://www.googleapis.com/auth/ndev.cloudman.readonly"]
requestClient ResourcesGet'{..}
= go _rgProject _rgDeployment _rgResource
(Just AltJSON)
deploymentManagerService
where go
= buildClient (Proxy :: Proxy ResourcesGetResource)
mempty
| rueshyna/gogol | gogol-deploymentmanager/gen/Network/Google/Resource/DeploymentManager/Resources/Get.hs | mpl-2.0 | 3,756 | 0 | 17 | 915 | 473 | 282 | 191 | 77 | 1 |
{-
Habit of Fate, a game to incentivize habit formation.
Copyright (C) 2019 Gregory Crosswhite
This program is free software: you can redistribute it and/or modify
it under version 3 of the terms of the GNU Affero General Public License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-}
{-# LANGUAGE AutoDeriveTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE UnicodeSyntax #-}
module HabitOfFate.Data.Markdown where
import HabitOfFate.Prelude
import CMark (commonmarkToHtml, commonmarkToLaTeX)
import Data.Aeson (FromJSON(..), ToJSON(..), Value(String), withText)
import GHC.Exts (IsString(..))
import qualified Text.Blaze.Html5 as H
import Text.Blaze.Html5 (Html)
newtype Markdown = Markdown { unwrapMarkdown ∷ Text } deriving (Eq,Monoid,Ord,Read,Semigroup,Show)
instance IsString Markdown where
fromString = pack >>> Markdown
instance ToJSON Markdown where
toJSON = unwrapMarkdown >>> String
instance FromJSON Markdown where
parseJSON = withText "markdown must be string-shaped" (Markdown >>> pure)
embolden ∷ Markdown → Markdown
embolden x = Markdown $ "**" ⊕ unwrapMarkdown x ⊕ "**"
renderMarkdownToHtml, renderMarkdownToHtmlWithoutParagraphTags ∷ Markdown → Html
renderMarkdownToHtml = unwrapMarkdown >>> commonmarkToHtml [] >>> H.preEscapedToHtml
-- This is brittle but I can't think of a better way at the moment.
renderMarkdownToHtmlWithoutParagraphTags =
unwrapMarkdown
>>>
commonmarkToHtml []
>>>
dropEnd 5 -- Drops </p> plus the trailing newline from the end.
>>>
drop 3 -- Drops <p> from the beginning.
>>>
H.preEscapedToHtml
renderMarkdownToLaTeX ∷ Markdown → Text
renderMarkdownToLaTeX = unwrapMarkdown >>> commonmarkToLaTeX [] Nothing
| gcross/habit-of-fate | sources/library/HabitOfFate/Data/Markdown.hs | agpl-3.0 | 2,178 | 0 | 10 | 359 | 318 | 184 | 134 | 35 | 1 |
{-# LANGUAGE NoOverloadedStrings #-}
module Lupo.Backends.URLMapper
( makeURLMapper
) where
import Control.Exception
import qualified Data.ByteString.Char8 as C
import Data.Function
import qualified Data.List as L
import Data.Monoid
import qualified Data.Text as T
import Data.Text.Lens
import Text.Shakespeare.Text
import qualified Lupo.Entry as E
import Lupo.Import
import Lupo.URLMapper
import Lupo.Util
makeURLMapper :: Path -> URLMapper
makeURLMapper basePath = fix $ \self -> URLMapper
{ _entryPath = \s ->
full $ "entries" </> show (s ^. E.idx)
, _entryEditPath = \s ->
full $ "admin" </> show (s ^. E.idx) </> "edit"
, _entryDeletePath = \s ->
full $ "admin" </> show (s ^. E.idx) </> "delete"
, _singleDayPath = full . dayPath
, _entryDayPath = \page e ->
let base = self ^. singleDayPath (page ^. E.pageDay)
n = maybe (assert False undefined) succ $ L.findIndex (== e) $ page ^. E.pageEntries
in base <> C.pack ("#" <> over packed (T.justifyRight 2 '0') (show n))
, _multiDaysPath = \d n ->
full [st|#{dayPath d}-#{show n}|]
, _monthPath = full . formatTime "%Y%m"
, _topPagePath = full ""
, _adminPath = full "admin"
, _loginPath = full "login"
, _initAccountPath = full "init-account"
, _commentPostPath = \d ->
full $ dayPath d </> "comment#new-comment"
, _newCommentPath = \d ->
full $ dayPath d <> "#new-comment"
, _commentsPath = \d ->
full $ dayPath d <> "#comments"
, _cssPath = \css ->
full $ "css" </> css
, _feedPath = full "recent.atom"
, _fullPath = full
}
where
dayPath = toString . formatTime "%Y%m%d"
full path = C.pack $ toString basePath </> toString path
p </> c = toString p <> "/" <> toString c
infixl 5 </>
class ToString a where
toString :: a -> String
instance ToString String where
toString = id
instance ToString C.ByteString where
toString = C.unpack
instance ToString T.Text where
toString = T.unpack
| keitax/lupo | src/Lupo/Backends/URLMapper.hs | lgpl-3.0 | 1,989 | 0 | 19 | 452 | 649 | 359 | 290 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
-- |
-- Module: EchoBackground
-- Experimentation with data-ringbuffer
--
-- This run replicates a simple echo using:
--
-- - Data.RingBuffer
-- - Background: a worker pool of background threads
module Hyperq.EchoBackground (
-- * testing imperatives
run
, run'
, runString
, runString'
, makeCon
-- * unused functions
, concBatchPublishTo'
) where
import Control.Applicative
( (*>) )
import Control.Concurrent
( newEmptyMVar, putMVar , takeMVar, MVar, killThread )
import Control.Concurrent.STM
( atomically )
import Control.Exception.Base
( evaluate )
import Control.Monad
( unless, forever, when )
import Data.Bits
( (.&.) )
import Data.Char
( chr )
import qualified Data.Vector.Fusion.Stream.Monadic as S
import Data.Vector.Generic.Mutable
(mstream)
import qualified Data.Vector.Mutable as MV
-- Data.RingBuffer (https://github.com/hyperq/data-ringbuffer) is a fork of
-- https://github.com/kim/data-ringbuffer to expose all modules
import Data.RingBuffer
( nextBatch, publish, waitFor, newConsumer, newSequencer, newBarrier
, nextSeq )
import Data.RingBuffer.Internal
( writeSeq, readSeq, addAndGet, mkSeq )
import Data.RingBuffer.Types
( Barrier(..), Consumer(..), Sequencer(..), Sequence )
import Data.RingBuffer.Vector
( MVector(..), newRingBuffer)
-- local helper modules
import Hyperq.Background
( spawnWorkers )
import Util
( now, printTiming' )
-- | Publish Wrapper for Data.RingBuffer.Vector function,
-- to extract the Sequence from Sequencer.
concPublishTo' :: MVector Int -- ^ publishing buffer
-> Int -- ^ mod mask
-> Sequencer -- ^ contains the cursor
-> Int -- ^ value to be published
-> IO ()
concPublishTo' mv modm seqr@(Sequencer sq _) =
concPublishTo mv modm seqr sq
concPublishTo :: MVector Int -> Int -> Sequencer -> Sequence -> Int -> IO ()
concPublishTo (MVector mvec) modm seqr sq v = do
next <- nextSeq seqr sq (MV.length mvec)
MV.unsafeWrite mvec (next .&. modm) v
publish seqr next 1
-- not sure why the original function uses an explicit length
concBatchPublishTo' :: MVector a -- ^ buffer
-> Int -- ^ mod mask
-> Sequencer -- ^ publisher and cursor
-> [a] -- ^ values to be published
-> IO ()
concBatchPublishTo' (MVector mvec) modm seqr@(Sequencer sq _) vs = do
next <- nextBatch seqr sq len (MV.length mvec)
mapM_ update $ zip [next - len + 1..next] vs
publish seqr (next+1) len
where
len = length vs
update (n,x) = MV.unsafeWrite mvec (n .&. modm) x
-- | Consume from buffer. Altered consumeFrom to reference
-- last consumed (upto) rather
-- than next to be consumed (next)
consumeFrom' :: MVector Int -- ^ buffer
-> Int -- ^ mod mask
-> Barrier -- ^ tracking cursors
-> Consumer Int -- ^ a stream consumer (with cursor)
-> IO ()
consumeFrom' (MVector mvec) modm barr (Consumer fn sq) = do
next <- addAndGet sq 1
avail <- waitFor barr next
let start = next .&. modm
len = avail - next + 1
(_,t) = MV.splitAt start mvec
tlen = MV.length t
S.mapM_ fn $ mstream $ MV.take len t
unless (tlen >= len) $
S.mapM_ fn . mstream . MV.take (len - tlen) $ mvec
writeSeq sq avail
-- | Prints to stdout unless it encounters a 'q', in which case
-- it triggers shutdown
printOrDone :: MVar Int -- ^ shutdown flag
-> Int -- ^ value
-> IO ()
printOrDone done x
| toEnum x == 'q' = putMVar done 1
| otherwise = putChar $ toEnum x
-- | Print to an MVector unless 'q' happens
printOrDone' :: MVector Int -- ^ vector being printed to
-> Sequence -- ^ vector size
-> MVar Int -- ^ shutdown flag
-> Int -- ^ value
-> IO ()
printOrDone' (MVector ans) mvc done x = do
-- print "in printOrDone'"
c <- readSeq mvc
_ <- MV.unsafeWrite ans (c+1) x
_ <- writeSeq mvc (c + 1)
when (x == 113) $
putMVar done 1
return()
-- | test run using stdin and stdout
-- answer and count associate with con and with final IO cleanup
run :: IO ()
run = do
done <- newEmptyMVar
(con, seqr, buf, start) <- makeRb bufferSize (printOrDone done)
(submit,_,ids) <- spawnWorkers 2
atomically . submit . forever $ do
c <- getChar
concPublishTo' buf modmask seqr $ fromEnum c
atomically . submit . forever
$ consumeFrom' buf modmask (newBarrier seqr []) con
takeMVar done *>
now >>= printTiming' start >>
mapM_ killThread ids
where
bufferSize = 1024*8
modmask = bufferSize - 1
-- | test run using MVector
runString :: String -> IO String
runString s = do
answer <- do
m <- MV.replicate 100 0
return(MVector m)
count <- mkSeq
done <- newEmptyMVar
(con, seqr, buf, start) <- makeRb bufferSize
(printOrDone' answer count done)
(submit,_,ids) <- spawnWorkers 3
atomically . submit $ mapM_
(concPublishTo' buf modmask seqr . fromEnum) s
atomically . submit . forever
$ consumeFrom' buf modmask (newBarrier seqr []) con
takeMVar done *>
now >>= printTiming' start >>
mapM_ killThread ids
c <- readSeq count
let (MVector mvec) = answer
(t,_) = MV.splitAt (c+1) mvec
sOut <- mapM (MV.read t) [0..c]
return $ map chr sOut
where
bufferSize = 1024*8
modmask = bufferSize - 1
makeRb :: Int -> (a -> IO ()) ->
IO (Consumer a, Sequencer, MVector Int, Double)
makeRb bufferSize conFn = do
con <- newConsumer conFn
seqr <- newSequencer [con]
buf <- newRingBuffer bufferSize (0 :: Int)
start <- now
return(con, seqr, buf, start)
-- | make an RB connection
makeCon :: IO [Int] -- sequencer function
-> (MVar Int -> Int -> IO ()) -- consumer function
-> IO (MVar Int)
makeCon seqrFn conFn = do
done <- newEmptyMVar
(con, seqr, buf, start) <- makeRb bufferSize (conFn done)
(submit,_,ids) <- spawnWorkers 2
atomically . submit . forever $ do
next <- seqrFn
mapM_ (concPublishTo' buf modmask seqr) next
atomically . submit . forever
$ consumeFrom' buf modmask (newBarrier seqr []) con
takeMVar done *>
now >>= printTiming' start >>
mapM_ killThread ids
return done
where
bufferSize = 1024*8
modmask = bufferSize - 1
-- | using makeCon
-- There is a bug in this where the string is published three time,
-- due to the forever do pattern used in makeCon
runString' :: String -> IO String
runString' s = do
answer <- do
m <- MV.replicate 1000 0
return(MVector m)
count <- mkSeq
let seqrFn = evaluate $ map fromEnum s
conFn = printOrDone' answer count
_ <- makeCon seqrFn conFn
c <- readSeq count
let (MVector mvec) = answer
(t,_) = MV.splitAt (c+1) mvec
sOut <- mapM (MV.read t) [0..c]
return $ map chr sOut
-- | simple echo ring buffer
run' :: IO ()
run' = do
let seqrFn = do
c <- getLine
return $ map fromEnum c
conFn = printOrDone
_ <- makeCon seqrFn conFn
return ()
| mogeiwang/hyperq | src/Hyperq/EchoBackground.hs | apache-2.0 | 7,493 | 0 | 13 | 2,224 | 2,204 | 1,127 | 1,077 | 183 | 1 |
module Walk.A324605Spec (main, spec) where
import Test.Hspec
import Walk.A324605 (a324605)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A324605" $
it "correctly computes the first 10 elements" $
take 10 (map a324605 [1..]) `shouldBe` expectedValue where
expectedValue = [2,1,2,1,5,6,1,2,11,8]
| peterokagey/haskellOEIS | test/Walk/A324605Spec.hs | apache-2.0 | 324 | 0 | 10 | 59 | 130 | 75 | 55 | 10 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module Radiation.Parsers.Languages.C(parser) where
import Control.Applicative
import Control.Monad
import Data.Attoparsec.ByteString.Char8 as BP
import Data.Char (isAlphaNum)
import Data.Maybe (catMaybes)
import My.Utils
import Prelude hiding (log)
import Radiation.Parsers.Internal.CStyle
import Radiation.Parsers.Internal.CommandParser
import Radiation.Parsers.Internal.InternalIO
import Radiation.Parsers.Internal.WithAttoparsec
import Vim
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Radiation.Parsers as R
{- Keep radiation from highlighting already
- defined keywords -}
blacklist :: Set.Set BS.ByteString
blacklist = Set.fromList [
"auto" ,"else" ,"long" ,"switch",
"break" ,"enum" ,"register","typedef",
"case" ,"extern","return" ,"union",
"char" ,"float" ,"short" ,"unsigned",
"const" ,"for" ,"signed" ,"void",
"continue","goto" ,"sizeof" ,"volatile",
"default" ,"if" ,"static" ,"while",
"do" ,"int" ,"struct" ,"_Packed",
"double" ]
typMap :: Map.Map String String
typMap = Map.fromList [
("struct","RadiationCStruct"),
("union","RadiationCUnion"),
("enum","RadiationCEnum")]
{- Parsec Monad for C files -}
parseC :: Parser (Map.Map String (Set.Set BS.ByteString))
parseC = let
{- Parse a typedef and return pairs of highlights
- to keywords -}
parseTypedef :: Parser [(String,BSC.ByteString)]
parseTypedef = do
_ <- string "typedef"
(do
{- parse the typedef of template:
- typedef struct [name] { ... } [ident] -}
typ <- skipSpace *> (choice . map string) (map BSC.pack $ Map.keys typMap)
id1' <- (option Nothing (Just <$> identifier))
let id1 = (,) <$> Map.lookup (BSC.unpack typ) typMap <*> id1'
(addJust id1 . return . ("RadiationCTypedef",))
<$> (skipSpace *> (option "" body) *> identifier)) <|>
{- Or as the original typedef ... [ident]; -}
((return . ("RadiationCTypedef",) . last . BSC.words) <$> BP.takeWhile (/=';'))
{- Parse the names of functions in the
- file -}
parseFunction :: Parser [(String, BSC.ByteString)]
parseFunction = do
_ <- identifier
_ <- many (skipSpace *> char '*')
name <- identifier
_ <- skipSpace *> char '('
return [("RadiationCFunction",name)]
{- Parse any type independent of a typedef -}
parseAnyType :: Parser [(String,BSC.ByteString)]
parseAnyType = do
{- Struct, union, enum etc. -}
typ <- choice (map (string . BSC.pack) $ Map.keys typMap)
ident <- identifier
let look = Map.lookup (BSC.unpack typ) typMap
let dat :: [Maybe (String, BSC.ByteString)]
dat = return ((,) <$> look <*> pure ident)
return $ catMaybes dat
in do
bs <- removePattern attribute
subparse (
(fromList' . concat) <$> many (choice
[ parseTypedef,
parseFunction,
parseAnyType,
anyChar $> []])
) bs
where fromList' :: (Ord a, Ord b) => [(a,b)] -> Map.Map a (Set.Set b)
fromList' = foldl (\mp (k,v) ->
Map.insertWith Set.union k (Set.singleton v) mp) Map.empty
addJust :: Maybe a -> [a] -> [a]
addJust Nothing = id
addJust (Just x) = (x:)
parser :: R.Parser
parser = R.Parser "c" (const ["g:radiation_c_cc", "g:radiation_c_flags"]) $ \filename -> do
vlog Info "Starting C Parser"
forM_ (map snd $ Map.toList typMap) (flip R.hiLink "Type" . BSC.pack)
pipes <- sequence {- bracketV automatically detaches from vim -}
[queryDefault "g:radiation_c_cc" "cc",
queryDefault "g:radiation_c_flags" "",
pure "-E", pure filename] >>= runCommand
reportErrors pipes $
withParsingMap (Map.map (Set.\\blacklist) <$> parseC) <=< vGetHandleContents;
| jrahm/Radiation | src/Radiation/Parsers/Languages/C.hs | bsd-2-clause | 4,455 | 0 | 23 | 1,394 | 1,186 | 652 | 534 | 88 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-
ARM instruction set.
-}
module Translator.Assembler.Target.ARM (ARMInstr(..), CondExec(..), Reg(..), OpAdr(..),
Update(..), Shift(..), Suffix(..),
module Translator.Assembler.Directive,
noShift,
adc, adcs, add, adds, ands, asr, asrs, b, bl,
cmp, eors,
ldr, ldrb, ldrh, ldrsb, ldrsh,
lsl, lsls, lsr, lsrs, mov, movs, orrs,
sbc, sbcs, str, strb, strh,
sub, subs, teq, umull) where
import Data.Char
import Data.Int
import Translator.Expression
import Translator.Assembler.Directive
import Translator.Assembler.InstructionSet
import Prelude hiding (EQ, LT, GT)
-- | ARM instruction set, with Thumb and Thumb2 as well as needed pseudo instructions.
data ARMInstr = ADD Update CondExec Suffix Reg Reg OpAdr Shift
| ADC Update CondExec Suffix Reg Reg OpAdr Shift
| AND Update CondExec Suffix Reg Reg OpAdr Shift
| ASR Update CondExec Suffix Reg Reg OpAdr
| B CondExec Suffix OpAdr
| BL CondExec Suffix OpAdr
| CMP CondExec Suffix Reg OpAdr Shift
| EOR Update CondExec Suffix Reg Reg OpAdr Shift
| LDR CondExec Suffix Reg OpAdr
| LDRB CondExec Suffix Reg OpAdr
| LDRH CondExec Suffix Reg OpAdr
| LDRSB CondExec Suffix Reg OpAdr
| LDRSH CondExec Suffix Reg OpAdr
| LSL Update CondExec Suffix Reg Reg OpAdr
| LSR Update CondExec Suffix Reg Reg OpAdr
| MOV Update CondExec Suffix Reg OpAdr
| ORR Update CondExec Suffix Reg Reg OpAdr Shift
| SBC Update CondExec Suffix Reg Reg OpAdr Shift
| STR CondExec Suffix Reg OpAdr
| STRB CondExec Suffix Reg OpAdr
| STRH CondExec Suffix Reg OpAdr
| SUB Update CondExec Suffix Reg Reg OpAdr Shift
| TEQ CondExec Suffix Reg OpAdr
| UMULL CondExec Reg Reg Reg Reg
| Directive GNUDirective
-- Constructors for common uses
adc = i3 ADC U
adcs = i3 ADC S
add = i3 ADD U
adds = i3 ADD S
ands = i3 AND S
asr = ASR U AL Any
asrs = ASR S AL Any
b = B AL Any
bl = BL AL Any
cmp = CMP AL Any
eors = i3 EOR S
ldr = LDR AL Any
ldrb = LDRB AL Any
ldrh = LDRH AL Any
ldrsb = LDRSB AL Any
ldrsh = LDRSH AL Any
lsl = LSL U AL Any
lsls = LSL S AL Any
lsr = LSR U AL Any
lsrs = LSR S AL Any
mov = MOV U AL Any
movs = MOV S AL Any
orrs = i3 ORR S
sbc = i3 SBC U
sbcs = i3 SBC S
str = STR AL Any
strb = STRB AL Any
strh = STRH AL Any
sub = i3 SUB U
subs = i3 SUB S
teq = TEQ AL Any
umull = UMULL AL
i3 ctor s x y z = ctor s AL Any x y z noShift
-- Update flags modifier
data Update = U | S
data CondExec = AL | EQ | NE | CS | CC | MI | PL | VS | VC | HI | LS | GE | LT | GT | LE
deriving Eq
instance Show CondExec where
show AL = ""
show EQ = "eq"
show NE = "ne"
show CS = "cs"
show CC = "cc"
show MI = "MI"
show PL = "pl"
show VS = "vs"
show VC = "vc"
show HI = "hi"
show LS = "ls"
show GE = "ge"
show LT = "lt"
show GT = "gt"
show LE = "le"
data Reg = R0 | R1 | R2 | R3 | R4 | R5 | R6 | R7 | R8 | R9 | R10 | R11 | R12 |
SP | LR | PC | NoReg deriving (Eq, Show)
showReg = map toLower . show
data OpAdr = RegOp Reg | Mem Expr | NoOperand | Imm Expr | RegIndOffset Reg Expr |
RegRegInd Reg Reg Shift | Cond CondExec | PreIndexed Reg Int | PostIndexed Reg Int
deriving Eq
instance Show OpAdr where
show (RegOp r) = showReg r
show (Mem mem) = show mem
show (Imm val) = '#' : show val
show (RegIndOffset r offset) = "[" ++ showReg r ++ ",#" ++ show offset ++ "]"
show (RegRegInd r1 r2 (OpLSL 0)) = '[' : showReg r1 ++ "," ++ showReg r2 ++ "]"
show (RegRegInd r1 r2 sh) = '[' : showReg r1 ++ "," ++ showReg r2 ++ "," ++ show sh ++ "]"
show NoOperand = error "no operand"
show Cond{} = error "cond"
show (PreIndexed r n) = '[' : showReg r ++ ",#" ++ show n ++ "]!"
show (PostIndexed r n) = '[' : showReg r ++ "],#" ++ show n
-- | Shifts in operands
data Shift = OpLSL Int | OpLSR Int | OpASR Int | OpROR Int | OpRRX
deriving Eq
instance Show Shift where
show (OpLSL n) = "lsl #" ++ show n
show (OpLSR n) = "lsr #" ++ show n
show (OpASR n) = "asr #" ++ show n
show (OpROR n) = "ror #" ++ show n
show OpRRX{} = "rrx"
noShift = OpLSL 0
-- Thumb 2 word suffix
-- Any - select what is appropriate
-- N - forced Narrow (Thumb 1)
-- A - ARM
-- W - Thumb2
data Suffix = Any | N | A | W deriving Show
instance InstructionSet ARMInstr where
disassemble instr =
let disasm (ADC f c q d s x sh) = arith "adc" f c q d s x sh
disasm (ADD f c q d s x sh) = arith "add" f c q d s x sh
disasm (AND f c q d s x sh) = arith "and" f c q d s x sh
disasm (ASR f c q d s x) = (m (fl "asr" f) c q, Just (showReg d : showReg s : optop x))
disasm (B c q d) = (m "b" c q, Just [show d])
disasm (BL c q d) = (m "bl" c q, Just [show d])
disasm (CMP c q d s sh) = arith "cmp" U c q d NoReg s sh
disasm (EOR f c q d s x sh) = arith "eor" f c q d s x sh
disasm (LDR c q d s) = (m "ldr" c q, Just [showReg d, op s])
disasm (LDRB c q d s) = (m "ldrb" c q, Just [showReg d, op s])
disasm (LDRH c q d s) = (m "ldrh" c q, Just [showReg d, op s])
disasm (LDRSB c q d s) = (m "ldrsb" c q, Just [showReg d, op s])
disasm (LDRSH c q d s) = (m "ldrsh" c q, Just [showReg d, op s])
disasm (LSR f c q d s x) = (m (fl "lsr" f) c q, Just (showReg d : showReg s : optop x))
disasm (LSL f c q d s x) = (m (fl "lsl" f) c q, Just (showReg d : showReg s : optop x))
disasm (MOV S c q d s) = (m "movs" c q, Just [showReg d, op s])
disasm (MOV U c q d s) = (m "mov" c q, Just [showReg d, op s])
disasm (ORR f c q d s x sh) = arith "orr" f c q d s x sh
disasm (SBC f c q d s x sh) = arith "sbc" f c q d s x sh
disasm (STR c q d s) = (m "str" c q, Just [showReg d, op s])
disasm (STRB c q d s) = (m "strb" c q, Just [showReg d, op s])
disasm (STRH c q d s) = (m "strh" c q, Just [showReg d, op s])
disasm (SUB f c q d s x sh) = arith "sub" f c q d s x sh
disasm (TEQ c q r s) = (m "teq" c q, Just [showReg r, op s])
disasm (UMULL c r1 r2 r3 r4) = (m "umull" c Any, Just (map showReg [r1, r2, r3, r4]))
disasm (Directive dir) = disassemble dir
arith mne f c q d NoReg NoOperand sh = (m (fl mne f) c q, Just (showReg d : shift sh))
arith mne f c q d s NoOperand sh = (m (fl mne f) c q, Just (showReg d : showReg s : shift sh))
arith mne f c q d NoReg x sh = (m (fl mne f) c q, Just (showReg d : op x : shift sh))
arith mne f c q d s x sh = (m (fl mne f) c q, Just (showReg d : showReg s : op x : shift sh))
fl name U = name
fl name S = name ++ "s"
m name AL Any = name
m name cc Any = name ++ show cc
op = show
shift (OpLSL 0) = []
shift sh = [show sh]
optop NoOperand = []
optop x = [op x]
in disasm instr
| hth313/hthforth | src/Translator/Assembler/Target/ARM.hs | bsd-2-clause | 7,732 | 0 | 16 | 2,923 | 3,331 | 1,727 | 1,604 | 163 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : System.Ext2.Parsers
-- Copyright : (C) 2015 Ricky Elrod
-- License : BSD2 (see LICENSE file)
-- Maintainer : Ricky Elrod <ricky@elrod.me>
-- Stability : experimental
-- Portability : lens
--
-- This library exists for me to learn about ext2 and to possibly be used as
-- part of an assignment in a Computer Science course I will be taking soon.
--
-- Please feel free to use it to learn about ext2 and play around, but don't
-- use it for anything that you care about for now, because I don't trust
-- myself.
----------------------------------------------------------------------------
module System.Ext2.Parsers (
-- * Superblock
Superblock (..)
-- ** Parsers
, readSuperblock
-- * BlockGroupDescriptorTable
, BlockGroupDescriptorTable (..)
-- ** Parsers
, readBlockGroupDescriptorTable
-- * Inode
, Inode (..)
-- ** Parsers
, readInode
, readInodeTable
-- * Directory
, Directory (..)
-- ** Parsers
, readDirectory
) where
import Control.Applicative
import Data.Bytes.Get
import qualified Data.Vector as V
import System.Ext2.FeatureFlags
import System.Ext2.Tables
-- See also 'readExtendedSuperblock'
readSuperblock :: MonadGet m => m Superblock
readSuperblock =
Superblock <$> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord16le
<*> getWord16le
<*> getWord16le
<*> (word16ToFSState <$> getWord16le)
<*> getWord16le
<*> getWord16le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord16le
<*> getWord16le
<*> getWord32le
<*> getWord16le
<*> getWord16le
<*> ((listReqFlags . fromIntegral) <$> getWord32le)
<*> ((listOptionalFlags . fromIntegral) <$> getWord32le)
<*> ((listRoFlags . fromIntegral) <$> getWord32le)
<*> getLazyByteString 16
<*> getLazyByteString 16
<*> getLazyByteString 64
<*> getWord32le
<*> getWord8
<*> getWord8
<*> getWord16le
<*> getLazyByteString 16
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> ((,,,)
<$> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le)
<*> getWord8
<*> getWord32le
<*> getWord32le
-- | Reads the block group descriptor table. The last 12 ("reserved") bytes are
-- ignored and skipped over (consumed).
readBlockGroupDescriptorTable :: MonadGet m => m BlockGroupDescriptorTable
readBlockGroupDescriptorTable =
BlockGroupDescriptorTable <$> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord16le
<*> getWord16le
<*> getWord16le
readInode :: MonadGet m => m Inode
readInode =
Inode <$> getWord16le
<*> getWord16le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord16le
<*> getWord16le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> ((,,,,,,,,,,,,,,)
<$> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le)
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getWord32le
<*> getLazyByteString 12
readInodeTable :: MonadGet m => Int -> m (V.Vector Inode)
readInodeTable n = V.replicateM n readInode
readDirectory :: MonadGet m => m Directory
readDirectory = do
inode' <- getWord32le
recLen' <- getWord16le
nameLen' <- getWord8
fileType' <- getWord8
name' <- getLazyByteString (fromIntegral nameLen')
padding' <- getLazyByteString 3
return $ Directory inode' recLen' nameLen' fileType' name' padding'
| relrod/ext2 | src/System/Ext2/Parsers.hs | bsd-2-clause | 4,805 | 0 | 50 | 1,699 | 753 | 410 | 343 | 121 | 1 |
{-# OPTIONS -fglasgow-exts -#include "../include/gui/qtc_hs_QSortFilterProxyModel.h" #-}
-----------------------------------------------------------------------------
{-| Module : QSortFilterProxyModel.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:15
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QSortFilterProxyModel (
QqSortFilterProxyModel(..)
,dynamicSortFilter
,QfilterAcceptsColumn(..)
,QfilterAcceptsRow(..)
,filterCaseSensitivity
,QfilterChanged(..)
,filterKeyColumn
,filterRegExp
,filterRole
,QinvalidateFilter(..)
,isSortLocaleAware
,QlessThan(..)
,Qmatch(..)
,setDynamicSortFilter
,setFilterCaseSensitivity
,setFilterFixedString
,setFilterKeyColumn
,QsetFilterRegExp(..)
,setFilterRole
,setFilterWildcard
,setSortCaseSensitivity
,setSortLocaleAware
,sortCaseSensitivity
,qSortFilterProxyModel_delete
,qSortFilterProxyModel_deleteLater
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Core.Qt
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
instance QuserMethod (QSortFilterProxyModel ()) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QSortFilterProxyModel_userMethod cobj_qobj (toCInt evid)
foreign import ccall "qtc_QSortFilterProxyModel_userMethod" qtc_QSortFilterProxyModel_userMethod :: Ptr (TQSortFilterProxyModel a) -> CInt -> IO ()
instance QuserMethod (QSortFilterProxyModelSc a) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QSortFilterProxyModel_userMethod cobj_qobj (toCInt evid)
instance QuserMethod (QSortFilterProxyModel ()) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QSortFilterProxyModel_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
foreign import ccall "qtc_QSortFilterProxyModel_userMethodVariant" qtc_QSortFilterProxyModel_userMethodVariant :: Ptr (TQSortFilterProxyModel a) -> CInt -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
instance QuserMethod (QSortFilterProxyModelSc a) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QSortFilterProxyModel_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
class QqSortFilterProxyModel x1 where
qSortFilterProxyModel :: x1 -> IO (QSortFilterProxyModel ())
instance QqSortFilterProxyModel (()) where
qSortFilterProxyModel ()
= withQSortFilterProxyModelResult $
qtc_QSortFilterProxyModel
foreign import ccall "qtc_QSortFilterProxyModel" qtc_QSortFilterProxyModel :: IO (Ptr (TQSortFilterProxyModel ()))
instance QqSortFilterProxyModel ((QObject t1)) where
qSortFilterProxyModel (x1)
= withQSortFilterProxyModelResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel1 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel1" qtc_QSortFilterProxyModel1 :: Ptr (TQObject t1) -> IO (Ptr (TQSortFilterProxyModel ()))
instance Qbuddy (QSortFilterProxyModel ()) ((QModelIndex t1)) (IO (QModelIndex ())) where
buddy x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_buddy_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_buddy_h" qtc_QSortFilterProxyModel_buddy_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO (Ptr (TQModelIndex ()))
instance Qbuddy (QSortFilterProxyModelSc a) ((QModelIndex t1)) (IO (QModelIndex ())) where
buddy x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_buddy_h cobj_x0 cobj_x1
instance QcanFetchMore (QSortFilterProxyModel ()) ((QModelIndex t1)) where
canFetchMore x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_canFetchMore_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_canFetchMore_h" qtc_QSortFilterProxyModel_canFetchMore_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO CBool
instance QcanFetchMore (QSortFilterProxyModelSc a) ((QModelIndex t1)) where
canFetchMore x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_canFetchMore_h cobj_x0 cobj_x1
instance Qclear (QSortFilterProxyModel a) (()) where
clear x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_clear cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_clear" qtc_QSortFilterProxyModel_clear :: Ptr (TQSortFilterProxyModel a) -> IO ()
instance QcolumnCount (QSortFilterProxyModel a) (()) where
columnCount x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_columnCount cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_columnCount" qtc_QSortFilterProxyModel_columnCount :: Ptr (TQSortFilterProxyModel a) -> IO CInt
instance QcolumnCount (QSortFilterProxyModel ()) ((QModelIndex t1)) where
columnCount x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_columnCount1_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_columnCount1_h" qtc_QSortFilterProxyModel_columnCount1_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO CInt
instance QcolumnCount (QSortFilterProxyModelSc a) ((QModelIndex t1)) where
columnCount x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_columnCount1_h cobj_x0 cobj_x1
instance Qqdata (QSortFilterProxyModel ()) ((QModelIndex t1)) (IO (QVariant ())) where
qdata x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_data cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_data" qtc_QSortFilterProxyModel_data :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO (Ptr (TQVariant ()))
instance Qqdata (QSortFilterProxyModelSc a) ((QModelIndex t1)) (IO (QVariant ())) where
qdata x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_data cobj_x0 cobj_x1
instance Qqdata_nf (QSortFilterProxyModel ()) ((QModelIndex t1)) (IO (QVariant ())) where
qdata_nf x0 (x1)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_data cobj_x0 cobj_x1
instance Qqdata_nf (QSortFilterProxyModelSc a) ((QModelIndex t1)) (IO (QVariant ())) where
qdata_nf x0 (x1)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_data cobj_x0 cobj_x1
instance Qqdata (QSortFilterProxyModel ()) ((QModelIndex t1, Int)) (IO (QVariant ())) where
qdata x0 (x1, x2)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_data1_h cobj_x0 cobj_x1 (toCInt x2)
foreign import ccall "qtc_QSortFilterProxyModel_data1_h" qtc_QSortFilterProxyModel_data1_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> CInt -> IO (Ptr (TQVariant ()))
instance Qqdata (QSortFilterProxyModelSc a) ((QModelIndex t1, Int)) (IO (QVariant ())) where
qdata x0 (x1, x2)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_data1_h cobj_x0 cobj_x1 (toCInt x2)
instance Qqdata_nf (QSortFilterProxyModel ()) ((QModelIndex t1, Int)) (IO (QVariant ())) where
qdata_nf x0 (x1, x2)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_data1_h cobj_x0 cobj_x1 (toCInt x2)
instance Qqdata_nf (QSortFilterProxyModelSc a) ((QModelIndex t1, Int)) (IO (QVariant ())) where
qdata_nf x0 (x1, x2)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_data1_h cobj_x0 cobj_x1 (toCInt x2)
instance QdropMimeData (QSortFilterProxyModel ()) ((QMimeData t1, DropAction, Int, Int, QModelIndex t5)) where
dropMimeData x0 (x1, x2, x3, x4, x5)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x5 $ \cobj_x5 ->
qtc_QSortFilterProxyModel_dropMimeData_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) (toCInt x3) (toCInt x4) cobj_x5
foreign import ccall "qtc_QSortFilterProxyModel_dropMimeData_h" qtc_QSortFilterProxyModel_dropMimeData_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQMimeData t1) -> CLong -> CInt -> CInt -> Ptr (TQModelIndex t5) -> IO CBool
instance QdropMimeData (QSortFilterProxyModelSc a) ((QMimeData t1, DropAction, Int, Int, QModelIndex t5)) where
dropMimeData x0 (x1, x2, x3, x4, x5)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x5 $ \cobj_x5 ->
qtc_QSortFilterProxyModel_dropMimeData_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) (toCInt x3) (toCInt x4) cobj_x5
dynamicSortFilter :: QSortFilterProxyModel a -> (()) -> IO (Bool)
dynamicSortFilter x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_dynamicSortFilter cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_dynamicSortFilter" qtc_QSortFilterProxyModel_dynamicSortFilter :: Ptr (TQSortFilterProxyModel a) -> IO CBool
instance QfetchMore (QSortFilterProxyModel ()) ((QModelIndex t1)) where
fetchMore x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_fetchMore_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_fetchMore_h" qtc_QSortFilterProxyModel_fetchMore_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO ()
instance QfetchMore (QSortFilterProxyModelSc a) ((QModelIndex t1)) where
fetchMore x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_fetchMore_h cobj_x0 cobj_x1
class QfilterAcceptsColumn x0 x1 where
filterAcceptsColumn :: x0 -> x1 -> IO (Bool)
instance QfilterAcceptsColumn (QSortFilterProxyModel ()) ((Int, QModelIndex t2)) where
filterAcceptsColumn x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_filterAcceptsColumn cobj_x0 (toCInt x1) cobj_x2
foreign import ccall "qtc_QSortFilterProxyModel_filterAcceptsColumn" qtc_QSortFilterProxyModel_filterAcceptsColumn :: Ptr (TQSortFilterProxyModel a) -> CInt -> Ptr (TQModelIndex t2) -> IO CBool
instance QfilterAcceptsColumn (QSortFilterProxyModelSc a) ((Int, QModelIndex t2)) where
filterAcceptsColumn x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_filterAcceptsColumn cobj_x0 (toCInt x1) cobj_x2
class QfilterAcceptsRow x0 x1 where
filterAcceptsRow :: x0 -> x1 -> IO (Bool)
instance QfilterAcceptsRow (QSortFilterProxyModel ()) ((Int, QModelIndex t2)) where
filterAcceptsRow x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_filterAcceptsRow cobj_x0 (toCInt x1) cobj_x2
foreign import ccall "qtc_QSortFilterProxyModel_filterAcceptsRow" qtc_QSortFilterProxyModel_filterAcceptsRow :: Ptr (TQSortFilterProxyModel a) -> CInt -> Ptr (TQModelIndex t2) -> IO CBool
instance QfilterAcceptsRow (QSortFilterProxyModelSc a) ((Int, QModelIndex t2)) where
filterAcceptsRow x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_filterAcceptsRow cobj_x0 (toCInt x1) cobj_x2
filterCaseSensitivity :: QSortFilterProxyModel a -> (()) -> IO (CaseSensitivity)
filterCaseSensitivity x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_filterCaseSensitivity cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_filterCaseSensitivity" qtc_QSortFilterProxyModel_filterCaseSensitivity :: Ptr (TQSortFilterProxyModel a) -> IO CLong
class QfilterChanged x0 x1 where
filterChanged :: x0 -> x1 -> IO ()
instance QfilterChanged (QSortFilterProxyModel ()) (()) where
filterChanged x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_filterChanged cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_filterChanged" qtc_QSortFilterProxyModel_filterChanged :: Ptr (TQSortFilterProxyModel a) -> IO ()
instance QfilterChanged (QSortFilterProxyModelSc a) (()) where
filterChanged x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_filterChanged cobj_x0
filterKeyColumn :: QSortFilterProxyModel a -> (()) -> IO (Int)
filterKeyColumn x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_filterKeyColumn cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_filterKeyColumn" qtc_QSortFilterProxyModel_filterKeyColumn :: Ptr (TQSortFilterProxyModel a) -> IO CInt
filterRegExp :: QSortFilterProxyModel a -> (()) -> IO (QRegExp ())
filterRegExp x0 ()
= withQRegExpResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_filterRegExp cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_filterRegExp" qtc_QSortFilterProxyModel_filterRegExp :: Ptr (TQSortFilterProxyModel a) -> IO (Ptr (TQRegExp ()))
filterRole :: QSortFilterProxyModel a -> (()) -> IO (Int)
filterRole x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_filterRole cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_filterRole" qtc_QSortFilterProxyModel_filterRole :: Ptr (TQSortFilterProxyModel a) -> IO CInt
instance Qflags (QSortFilterProxyModel ()) ((QModelIndex t1)) (IO (ItemFlags)) where
flags x0 (x1)
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_flags_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_flags_h" qtc_QSortFilterProxyModel_flags_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO CLong
instance Qflags (QSortFilterProxyModelSc a) ((QModelIndex t1)) (IO (ItemFlags)) where
flags x0 (x1)
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_flags_h cobj_x0 cobj_x1
instance QhasChildren (QSortFilterProxyModel a) (()) where
hasChildren x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_hasChildren cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_hasChildren" qtc_QSortFilterProxyModel_hasChildren :: Ptr (TQSortFilterProxyModel a) -> IO CBool
instance QhasChildren (QSortFilterProxyModel ()) ((QModelIndex t1)) where
hasChildren x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_hasChildren1_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_hasChildren1_h" qtc_QSortFilterProxyModel_hasChildren1_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO CBool
instance QhasChildren (QSortFilterProxyModelSc a) ((QModelIndex t1)) where
hasChildren x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_hasChildren1_h cobj_x0 cobj_x1
instance QheaderData (QSortFilterProxyModel ()) ((Int, QtOrientation)) where
headerData x0 (x1, x2)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_headerData cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QSortFilterProxyModel_headerData" qtc_QSortFilterProxyModel_headerData :: Ptr (TQSortFilterProxyModel a) -> CInt -> CLong -> IO (Ptr (TQVariant ()))
instance QheaderData (QSortFilterProxyModelSc a) ((Int, QtOrientation)) where
headerData x0 (x1, x2)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_headerData cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2)
instance QheaderData_nf (QSortFilterProxyModel ()) ((Int, QtOrientation)) where
headerData_nf x0 (x1, x2)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_headerData cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2)
instance QheaderData_nf (QSortFilterProxyModelSc a) ((Int, QtOrientation)) where
headerData_nf x0 (x1, x2)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_headerData cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2)
instance QheaderData (QSortFilterProxyModel ()) ((Int, QtOrientation, Int)) where
headerData x0 (x1, x2, x3)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_headerData1_h cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2) (toCInt x3)
foreign import ccall "qtc_QSortFilterProxyModel_headerData1_h" qtc_QSortFilterProxyModel_headerData1_h :: Ptr (TQSortFilterProxyModel a) -> CInt -> CLong -> CInt -> IO (Ptr (TQVariant ()))
instance QheaderData (QSortFilterProxyModelSc a) ((Int, QtOrientation, Int)) where
headerData x0 (x1, x2, x3)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_headerData1_h cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2) (toCInt x3)
instance QheaderData_nf (QSortFilterProxyModel ()) ((Int, QtOrientation, Int)) where
headerData_nf x0 (x1, x2, x3)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_headerData1_h cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2) (toCInt x3)
instance QheaderData_nf (QSortFilterProxyModelSc a) ((Int, QtOrientation, Int)) where
headerData_nf x0 (x1, x2, x3)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_headerData1_h cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2) (toCInt x3)
instance Qindex (QSortFilterProxyModel a) ((Int, Int)) (IO (QModelIndex ())) where
index x0 (x1, x2)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_index cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QSortFilterProxyModel_index" qtc_QSortFilterProxyModel_index :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> IO (Ptr (TQModelIndex ()))
instance Qindex (QSortFilterProxyModel ()) ((Int, Int, QModelIndex t3)) (IO (QModelIndex ())) where
index x0 (x1, x2, x3)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_index1_h cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
foreign import ccall "qtc_QSortFilterProxyModel_index1_h" qtc_QSortFilterProxyModel_index1_h :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> Ptr (TQModelIndex t3) -> IO (Ptr (TQModelIndex ()))
instance Qindex (QSortFilterProxyModelSc a) ((Int, Int, QModelIndex t3)) (IO (QModelIndex ())) where
index x0 (x1, x2, x3)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_index1_h cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
instance QinsertColumns (QSortFilterProxyModel a) ((Int, Int)) (IO (Bool)) where
insertColumns x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_insertColumns cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QSortFilterProxyModel_insertColumns" qtc_QSortFilterProxyModel_insertColumns :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> IO CBool
instance QinsertColumns (QSortFilterProxyModel ()) ((Int, Int, QModelIndex t3)) (IO (Bool)) where
insertColumns x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_insertColumns1_h cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
foreign import ccall "qtc_QSortFilterProxyModel_insertColumns1_h" qtc_QSortFilterProxyModel_insertColumns1_h :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> Ptr (TQModelIndex t3) -> IO CBool
instance QinsertColumns (QSortFilterProxyModelSc a) ((Int, Int, QModelIndex t3)) (IO (Bool)) where
insertColumns x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_insertColumns1_h cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
instance QinsertRows (QSortFilterProxyModel a) ((Int, Int)) (IO (Bool)) where
insertRows x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_insertRows cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QSortFilterProxyModel_insertRows" qtc_QSortFilterProxyModel_insertRows :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> IO CBool
instance QinsertRows (QSortFilterProxyModel ()) ((Int, Int, QModelIndex t3)) (IO (Bool)) where
insertRows x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_insertRows1_h cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
foreign import ccall "qtc_QSortFilterProxyModel_insertRows1_h" qtc_QSortFilterProxyModel_insertRows1_h :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> Ptr (TQModelIndex t3) -> IO CBool
instance QinsertRows (QSortFilterProxyModelSc a) ((Int, Int, QModelIndex t3)) (IO (Bool)) where
insertRows x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_insertRows1_h cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
instance Qinvalidate (QSortFilterProxyModel a) (()) where
invalidate x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_invalidate cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_invalidate" qtc_QSortFilterProxyModel_invalidate :: Ptr (TQSortFilterProxyModel a) -> IO ()
class QinvalidateFilter x0 x1 where
invalidateFilter :: x0 -> x1 -> IO ()
instance QinvalidateFilter (QSortFilterProxyModel ()) (()) where
invalidateFilter x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_invalidateFilter cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_invalidateFilter" qtc_QSortFilterProxyModel_invalidateFilter :: Ptr (TQSortFilterProxyModel a) -> IO ()
instance QinvalidateFilter (QSortFilterProxyModelSc a) (()) where
invalidateFilter x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_invalidateFilter cobj_x0
isSortLocaleAware :: QSortFilterProxyModel a -> (()) -> IO (Bool)
isSortLocaleAware x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_isSortLocaleAware cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_isSortLocaleAware" qtc_QSortFilterProxyModel_isSortLocaleAware :: Ptr (TQSortFilterProxyModel a) -> IO CBool
class QlessThan x0 x1 where
lessThan :: x0 -> x1 -> IO (Bool)
instance QlessThan (QSortFilterProxyModel ()) ((QModelIndex t1, QModelIndex t2)) where
lessThan x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_lessThan cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QSortFilterProxyModel_lessThan" qtc_QSortFilterProxyModel_lessThan :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> Ptr (TQModelIndex t2) -> IO CBool
instance QlessThan (QSortFilterProxyModelSc a) ((QModelIndex t1, QModelIndex t2)) where
lessThan x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_lessThan cobj_x0 cobj_x1 cobj_x2
instance QmapFromSource (QSortFilterProxyModel ()) ((QModelIndex t1)) where
mapFromSource x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_mapFromSource_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_mapFromSource_h" qtc_QSortFilterProxyModel_mapFromSource_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO (Ptr (TQModelIndex ()))
instance QmapFromSource (QSortFilterProxyModelSc a) ((QModelIndex t1)) where
mapFromSource x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_mapFromSource_h cobj_x0 cobj_x1
instance QmapSelectionFromSource (QSortFilterProxyModel ()) ((QItemSelection t1)) where
mapSelectionFromSource x0 (x1)
= withQItemSelectionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_mapSelectionFromSource_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_mapSelectionFromSource_h" qtc_QSortFilterProxyModel_mapSelectionFromSource_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQItemSelection t1) -> IO (Ptr (TQItemSelection ()))
instance QmapSelectionFromSource (QSortFilterProxyModelSc a) ((QItemSelection t1)) where
mapSelectionFromSource x0 (x1)
= withQItemSelectionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_mapSelectionFromSource_h cobj_x0 cobj_x1
instance QmapSelectionToSource (QSortFilterProxyModel ()) ((QItemSelection t1)) where
mapSelectionToSource x0 (x1)
= withQItemSelectionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_mapSelectionToSource_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_mapSelectionToSource_h" qtc_QSortFilterProxyModel_mapSelectionToSource_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQItemSelection t1) -> IO (Ptr (TQItemSelection ()))
instance QmapSelectionToSource (QSortFilterProxyModelSc a) ((QItemSelection t1)) where
mapSelectionToSource x0 (x1)
= withQItemSelectionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_mapSelectionToSource_h cobj_x0 cobj_x1
instance QmapToSource (QSortFilterProxyModel ()) ((QModelIndex t1)) where
mapToSource x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_mapToSource_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_mapToSource_h" qtc_QSortFilterProxyModel_mapToSource_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO (Ptr (TQModelIndex ()))
instance QmapToSource (QSortFilterProxyModelSc a) ((QModelIndex t1)) where
mapToSource x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_mapToSource_h cobj_x0 cobj_x1
class Qmatch x0 x1 where
match :: x0 -> x1 -> IO ([QModelIndex ()])
instance Qmatch (QSortFilterProxyModel ()) ((QModelIndex t1, Int, QVariant t3)) where
match x0 (x1, x2, x3)
= withQListObjectRefResult $ \arr ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_match cobj_x0 cobj_x1 (toCInt x2) cobj_x3 arr
foreign import ccall "qtc_QSortFilterProxyModel_match" qtc_QSortFilterProxyModel_match :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> CInt -> Ptr (TQVariant t3) -> Ptr (Ptr (TQModelIndex ())) -> IO CInt
instance Qmatch (QSortFilterProxyModelSc a) ((QModelIndex t1, Int, QVariant t3)) where
match x0 (x1, x2, x3)
= withQListObjectRefResult $ \arr ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_match cobj_x0 cobj_x1 (toCInt x2) cobj_x3 arr
instance Qmatch (QSortFilterProxyModel ()) ((QModelIndex t1, Int, QVariant t3, Int)) where
match x0 (x1, x2, x3, x4)
= withQListObjectRefResult $ \arr ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_match1 cobj_x0 cobj_x1 (toCInt x2) cobj_x3 (toCInt x4) arr
foreign import ccall "qtc_QSortFilterProxyModel_match1" qtc_QSortFilterProxyModel_match1 :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> CInt -> Ptr (TQVariant t3) -> CInt -> Ptr (Ptr (TQModelIndex ())) -> IO CInt
instance Qmatch (QSortFilterProxyModelSc a) ((QModelIndex t1, Int, QVariant t3, Int)) where
match x0 (x1, x2, x3, x4)
= withQListObjectRefResult $ \arr ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_match1 cobj_x0 cobj_x1 (toCInt x2) cobj_x3 (toCInt x4) arr
instance Qmatch (QSortFilterProxyModel ()) ((QModelIndex t1, Int, QVariant t3, Int, MatchFlags)) where
match x0 (x1, x2, x3, x4, x5)
= withQListObjectRefResult $ \arr ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_match2_h cobj_x0 cobj_x1 (toCInt x2) cobj_x3 (toCInt x4) (toCLong $ qFlags_toInt x5) arr
foreign import ccall "qtc_QSortFilterProxyModel_match2_h" qtc_QSortFilterProxyModel_match2_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> CInt -> Ptr (TQVariant t3) -> CInt -> CLong -> Ptr (Ptr (TQModelIndex ())) -> IO CInt
instance Qmatch (QSortFilterProxyModelSc a) ((QModelIndex t1, Int, QVariant t3, Int, MatchFlags)) where
match x0 (x1, x2, x3, x4, x5)
= withQListObjectRefResult $ \arr ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_match2_h cobj_x0 cobj_x1 (toCInt x2) cobj_x3 (toCInt x4) (toCLong $ qFlags_toInt x5) arr
instance Qparent (QSortFilterProxyModel ()) ((QModelIndex t1)) (IO (QModelIndex ())) where
parent x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_parent1_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_parent1_h" qtc_QSortFilterProxyModel_parent1_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO (Ptr (TQModelIndex ()))
instance Qparent (QSortFilterProxyModelSc a) ((QModelIndex t1)) (IO (QModelIndex ())) where
parent x0 (x1)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_parent1_h cobj_x0 cobj_x1
instance Qparent (QSortFilterProxyModel a) (()) (IO (QObject ())) where
parent x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_parent cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_parent" qtc_QSortFilterProxyModel_parent :: Ptr (TQSortFilterProxyModel a) -> IO (Ptr (TQObject ()))
instance QremoveColumns (QSortFilterProxyModel a) ((Int, Int)) (IO (Bool)) where
removeColumns x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_removeColumns cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QSortFilterProxyModel_removeColumns" qtc_QSortFilterProxyModel_removeColumns :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> IO CBool
instance QremoveColumns (QSortFilterProxyModel ()) ((Int, Int, QModelIndex t3)) (IO (Bool)) where
removeColumns x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_removeColumns1_h cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
foreign import ccall "qtc_QSortFilterProxyModel_removeColumns1_h" qtc_QSortFilterProxyModel_removeColumns1_h :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> Ptr (TQModelIndex t3) -> IO CBool
instance QremoveColumns (QSortFilterProxyModelSc a) ((Int, Int, QModelIndex t3)) (IO (Bool)) where
removeColumns x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_removeColumns1_h cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
instance QremoveRows (QSortFilterProxyModel a) ((Int, Int)) (IO (Bool)) where
removeRows x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_removeRows cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QSortFilterProxyModel_removeRows" qtc_QSortFilterProxyModel_removeRows :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> IO CBool
instance QremoveRows (QSortFilterProxyModel ()) ((Int, Int, QModelIndex t3)) (IO (Bool)) where
removeRows x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_removeRows1_h cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
foreign import ccall "qtc_QSortFilterProxyModel_removeRows1_h" qtc_QSortFilterProxyModel_removeRows1_h :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> Ptr (TQModelIndex t3) -> IO CBool
instance QremoveRows (QSortFilterProxyModelSc a) ((Int, Int, QModelIndex t3)) (IO (Bool)) where
removeRows x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_removeRows1_h cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
instance QrowCount (QSortFilterProxyModel a) (()) where
rowCount x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_rowCount cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_rowCount" qtc_QSortFilterProxyModel_rowCount :: Ptr (TQSortFilterProxyModel a) -> IO CInt
instance QrowCount (QSortFilterProxyModel ()) ((QModelIndex t1)) where
rowCount x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_rowCount1_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_rowCount1_h" qtc_QSortFilterProxyModel_rowCount1_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO CInt
instance QrowCount (QSortFilterProxyModelSc a) ((QModelIndex t1)) where
rowCount x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_rowCount1_h cobj_x0 cobj_x1
instance QsetData (QSortFilterProxyModel a) ((QModelIndex t1, QVariant t2)) (IO (Bool)) where
setData x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_setData cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QSortFilterProxyModel_setData" qtc_QSortFilterProxyModel_setData :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> Ptr (TQVariant t2) -> IO CBool
instance QsetData (QSortFilterProxyModel ()) ((QModelIndex t1, QVariant t2, Int)) (IO (Bool)) where
setData x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_setData1_h cobj_x0 cobj_x1 cobj_x2 (toCInt x3)
foreign import ccall "qtc_QSortFilterProxyModel_setData1_h" qtc_QSortFilterProxyModel_setData1_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> Ptr (TQVariant t2) -> CInt -> IO CBool
instance QsetData (QSortFilterProxyModelSc a) ((QModelIndex t1, QVariant t2, Int)) (IO (Bool)) where
setData x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_setData1_h cobj_x0 cobj_x1 cobj_x2 (toCInt x3)
setDynamicSortFilter :: QSortFilterProxyModel a -> ((Bool)) -> IO ()
setDynamicSortFilter x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_setDynamicSortFilter cobj_x0 (toCBool x1)
foreign import ccall "qtc_QSortFilterProxyModel_setDynamicSortFilter" qtc_QSortFilterProxyModel_setDynamicSortFilter :: Ptr (TQSortFilterProxyModel a) -> CBool -> IO ()
setFilterCaseSensitivity :: QSortFilterProxyModel a -> ((CaseSensitivity)) -> IO ()
setFilterCaseSensitivity x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_setFilterCaseSensitivity cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QSortFilterProxyModel_setFilterCaseSensitivity" qtc_QSortFilterProxyModel_setFilterCaseSensitivity :: Ptr (TQSortFilterProxyModel a) -> CLong -> IO ()
setFilterFixedString :: QSortFilterProxyModel a -> ((String)) -> IO ()
setFilterFixedString x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QSortFilterProxyModel_setFilterFixedString cobj_x0 cstr_x1
foreign import ccall "qtc_QSortFilterProxyModel_setFilterFixedString" qtc_QSortFilterProxyModel_setFilterFixedString :: Ptr (TQSortFilterProxyModel a) -> CWString -> IO ()
setFilterKeyColumn :: QSortFilterProxyModel a -> ((Int)) -> IO ()
setFilterKeyColumn x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_setFilterKeyColumn cobj_x0 (toCInt x1)
foreign import ccall "qtc_QSortFilterProxyModel_setFilterKeyColumn" qtc_QSortFilterProxyModel_setFilterKeyColumn :: Ptr (TQSortFilterProxyModel a) -> CInt -> IO ()
class QsetFilterRegExp x1 where
setFilterRegExp :: QSortFilterProxyModel a -> x1 -> IO ()
instance QsetFilterRegExp ((QRegExp t1)) where
setFilterRegExp x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_setFilterRegExp1 cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_setFilterRegExp1" qtc_QSortFilterProxyModel_setFilterRegExp1 :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQRegExp t1) -> IO ()
instance QsetFilterRegExp ((String)) where
setFilterRegExp x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QSortFilterProxyModel_setFilterRegExp cobj_x0 cstr_x1
foreign import ccall "qtc_QSortFilterProxyModel_setFilterRegExp" qtc_QSortFilterProxyModel_setFilterRegExp :: Ptr (TQSortFilterProxyModel a) -> CWString -> IO ()
setFilterRole :: QSortFilterProxyModel a -> ((Int)) -> IO ()
setFilterRole x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_setFilterRole cobj_x0 (toCInt x1)
foreign import ccall "qtc_QSortFilterProxyModel_setFilterRole" qtc_QSortFilterProxyModel_setFilterRole :: Ptr (TQSortFilterProxyModel a) -> CInt -> IO ()
setFilterWildcard :: QSortFilterProxyModel a -> ((String)) -> IO ()
setFilterWildcard x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QSortFilterProxyModel_setFilterWildcard cobj_x0 cstr_x1
foreign import ccall "qtc_QSortFilterProxyModel_setFilterWildcard" qtc_QSortFilterProxyModel_setFilterWildcard :: Ptr (TQSortFilterProxyModel a) -> CWString -> IO ()
instance QsetHeaderData (QSortFilterProxyModel a) ((Int, QtOrientation, QVariant t3)) where
setHeaderData x0 (x1, x2, x3)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_setHeaderData cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2) cobj_x3
foreign import ccall "qtc_QSortFilterProxyModel_setHeaderData" qtc_QSortFilterProxyModel_setHeaderData :: Ptr (TQSortFilterProxyModel a) -> CInt -> CLong -> Ptr (TQVariant t3) -> IO CBool
instance QsetHeaderData (QSortFilterProxyModel ()) ((Int, QtOrientation, QVariant t3, Int)) where
setHeaderData x0 (x1, x2, x3, x4)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_setHeaderData1_h cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2) cobj_x3 (toCInt x4)
foreign import ccall "qtc_QSortFilterProxyModel_setHeaderData1_h" qtc_QSortFilterProxyModel_setHeaderData1_h :: Ptr (TQSortFilterProxyModel a) -> CInt -> CLong -> Ptr (TQVariant t3) -> CInt -> IO CBool
instance QsetHeaderData (QSortFilterProxyModelSc a) ((Int, QtOrientation, QVariant t3, Int)) where
setHeaderData x0 (x1, x2, x3, x4)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_setHeaderData1_h cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2) cobj_x3 (toCInt x4)
setSortCaseSensitivity :: QSortFilterProxyModel a -> ((CaseSensitivity)) -> IO ()
setSortCaseSensitivity x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_setSortCaseSensitivity cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QSortFilterProxyModel_setSortCaseSensitivity" qtc_QSortFilterProxyModel_setSortCaseSensitivity :: Ptr (TQSortFilterProxyModel a) -> CLong -> IO ()
setSortLocaleAware :: QSortFilterProxyModel a -> ((Bool)) -> IO ()
setSortLocaleAware x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_setSortLocaleAware cobj_x0 (toCBool x1)
foreign import ccall "qtc_QSortFilterProxyModel_setSortLocaleAware" qtc_QSortFilterProxyModel_setSortLocaleAware :: Ptr (TQSortFilterProxyModel a) -> CBool -> IO ()
instance QsetSortRole (QSortFilterProxyModel a) ((Int)) where
setSortRole x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_setSortRole cobj_x0 (toCInt x1)
foreign import ccall "qtc_QSortFilterProxyModel_setSortRole" qtc_QSortFilterProxyModel_setSortRole :: Ptr (TQSortFilterProxyModel a) -> CInt -> IO ()
instance QsetSourceModel (QSortFilterProxyModel ()) ((QAbstractItemModel t1)) where
setSourceModel x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_setSourceModel_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_setSourceModel_h" qtc_QSortFilterProxyModel_setSourceModel_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQAbstractItemModel t1) -> IO ()
instance QsetSourceModel (QSortFilterProxyModelSc a) ((QAbstractItemModel t1)) where
setSourceModel x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_setSourceModel_h cobj_x0 cobj_x1
instance Qsort (QSortFilterProxyModel a) ((Int)) where
sort x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_sort cobj_x0 (toCInt x1)
foreign import ccall "qtc_QSortFilterProxyModel_sort" qtc_QSortFilterProxyModel_sort :: Ptr (TQSortFilterProxyModel a) -> CInt -> IO ()
instance Qsort (QSortFilterProxyModel ()) ((Int, SortOrder)) where
sort x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_sort1_h cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QSortFilterProxyModel_sort1_h" qtc_QSortFilterProxyModel_sort1_h :: Ptr (TQSortFilterProxyModel a) -> CInt -> CLong -> IO ()
instance Qsort (QSortFilterProxyModelSc a) ((Int, SortOrder)) where
sort x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_sort1_h cobj_x0 (toCInt x1) (toCLong $ qEnum_toInt x2)
sortCaseSensitivity :: QSortFilterProxyModel a -> (()) -> IO (CaseSensitivity)
sortCaseSensitivity x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_sortCaseSensitivity cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_sortCaseSensitivity" qtc_QSortFilterProxyModel_sortCaseSensitivity :: Ptr (TQSortFilterProxyModel a) -> IO CLong
instance QsortRole (QSortFilterProxyModel a) (()) where
sortRole x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_sortRole cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_sortRole" qtc_QSortFilterProxyModel_sortRole :: Ptr (TQSortFilterProxyModel a) -> IO CInt
instance Qqspan (QSortFilterProxyModel ()) ((QModelIndex t1)) where
qspan x0 (x1)
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_span_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_span_h" qtc_QSortFilterProxyModel_span_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> IO (Ptr (TQSize ()))
instance Qqspan (QSortFilterProxyModelSc a) ((QModelIndex t1)) where
qspan x0 (x1)
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_span_h cobj_x0 cobj_x1
instance Qspan (QSortFilterProxyModel ()) ((QModelIndex t1)) where
span x0 (x1)
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_span_qth_h cobj_x0 cobj_x1 csize_ret_w csize_ret_h
foreign import ccall "qtc_QSortFilterProxyModel_span_qth_h" qtc_QSortFilterProxyModel_span_qth_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> Ptr CInt -> Ptr CInt -> IO ()
instance Qspan (QSortFilterProxyModelSc a) ((QModelIndex t1)) where
span x0 (x1)
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_span_qth_h cobj_x0 cobj_x1 csize_ret_w csize_ret_h
instance QsupportedDropActions (QSortFilterProxyModel ()) (()) where
supportedDropActions x0 ()
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_supportedDropActions_h cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_supportedDropActions_h" qtc_QSortFilterProxyModel_supportedDropActions_h :: Ptr (TQSortFilterProxyModel a) -> IO CLong
instance QsupportedDropActions (QSortFilterProxyModelSc a) (()) where
supportedDropActions x0 ()
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_supportedDropActions_h cobj_x0
qSortFilterProxyModel_delete :: QSortFilterProxyModel a -> IO ()
qSortFilterProxyModel_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_delete cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_delete" qtc_QSortFilterProxyModel_delete :: Ptr (TQSortFilterProxyModel a) -> IO ()
qSortFilterProxyModel_deleteLater :: QSortFilterProxyModel a -> IO ()
qSortFilterProxyModel_deleteLater x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_deleteLater cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_deleteLater" qtc_QSortFilterProxyModel_deleteLater :: Ptr (TQSortFilterProxyModel a) -> IO ()
instance Qrevert (QSortFilterProxyModel ()) (()) where
revert x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_revert_h cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_revert_h" qtc_QSortFilterProxyModel_revert_h :: Ptr (TQSortFilterProxyModel a) -> IO ()
instance Qrevert (QSortFilterProxyModelSc a) (()) where
revert x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_revert_h cobj_x0
instance Qsubmit (QSortFilterProxyModel ()) (()) where
submit x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_submit_h cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_submit_h" qtc_QSortFilterProxyModel_submit_h :: Ptr (TQSortFilterProxyModel a) -> IO CBool
instance Qsubmit (QSortFilterProxyModelSc a) (()) where
submit x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_submit_h cobj_x0
instance QbeginInsertColumns (QSortFilterProxyModel ()) ((QModelIndex t1, Int, Int)) where
beginInsertColumns x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_beginInsertColumns cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
foreign import ccall "qtc_QSortFilterProxyModel_beginInsertColumns" qtc_QSortFilterProxyModel_beginInsertColumns :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> CInt -> CInt -> IO ()
instance QbeginInsertColumns (QSortFilterProxyModelSc a) ((QModelIndex t1, Int, Int)) where
beginInsertColumns x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_beginInsertColumns cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
instance QbeginInsertRows (QSortFilterProxyModel ()) ((QModelIndex t1, Int, Int)) where
beginInsertRows x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_beginInsertRows cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
foreign import ccall "qtc_QSortFilterProxyModel_beginInsertRows" qtc_QSortFilterProxyModel_beginInsertRows :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> CInt -> CInt -> IO ()
instance QbeginInsertRows (QSortFilterProxyModelSc a) ((QModelIndex t1, Int, Int)) where
beginInsertRows x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_beginInsertRows cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
instance QbeginRemoveColumns (QSortFilterProxyModel ()) ((QModelIndex t1, Int, Int)) where
beginRemoveColumns x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_beginRemoveColumns cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
foreign import ccall "qtc_QSortFilterProxyModel_beginRemoveColumns" qtc_QSortFilterProxyModel_beginRemoveColumns :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> CInt -> CInt -> IO ()
instance QbeginRemoveColumns (QSortFilterProxyModelSc a) ((QModelIndex t1, Int, Int)) where
beginRemoveColumns x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_beginRemoveColumns cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
instance QbeginRemoveRows (QSortFilterProxyModel ()) ((QModelIndex t1, Int, Int)) where
beginRemoveRows x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_beginRemoveRows cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
foreign import ccall "qtc_QSortFilterProxyModel_beginRemoveRows" qtc_QSortFilterProxyModel_beginRemoveRows :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> CInt -> CInt -> IO ()
instance QbeginRemoveRows (QSortFilterProxyModelSc a) ((QModelIndex t1, Int, Int)) where
beginRemoveRows x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_beginRemoveRows cobj_x0 cobj_x1 (toCInt x2) (toCInt x3)
instance QchangePersistentIndex (QSortFilterProxyModel ()) ((QModelIndex t1, QModelIndex t2)) where
changePersistentIndex x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_changePersistentIndex cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QSortFilterProxyModel_changePersistentIndex" qtc_QSortFilterProxyModel_changePersistentIndex :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQModelIndex t1) -> Ptr (TQModelIndex t2) -> IO ()
instance QchangePersistentIndex (QSortFilterProxyModelSc a) ((QModelIndex t1, QModelIndex t2)) where
changePersistentIndex x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_changePersistentIndex cobj_x0 cobj_x1 cobj_x2
instance QcreateIndex (QSortFilterProxyModel ()) ((Int, Int)) where
createIndex x0 (x1, x2)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_createIndex cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QSortFilterProxyModel_createIndex" qtc_QSortFilterProxyModel_createIndex :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> IO (Ptr (TQModelIndex ()))
instance QcreateIndex (QSortFilterProxyModelSc a) ((Int, Int)) where
createIndex x0 (x1, x2)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_createIndex cobj_x0 (toCInt x1) (toCInt x2)
instance QcreateIndex (QSortFilterProxyModel ()) ((Int, Int, Int)) where
createIndex x0 (x1, x2, x3)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_createIndex2 cobj_x0 (toCInt x1) (toCInt x2) (toCUInt x3)
foreign import ccall "qtc_QSortFilterProxyModel_createIndex2" qtc_QSortFilterProxyModel_createIndex2 :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> CUInt -> IO (Ptr (TQModelIndex ()))
instance QcreateIndex (QSortFilterProxyModelSc a) ((Int, Int, Int)) where
createIndex x0 (x1, x2, x3)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_createIndex2 cobj_x0 (toCInt x1) (toCInt x2) (toCUInt x3)
instance QcreateIndex (QSortFilterProxyModel ()) ((Int, Int, QVoid t3)) where
createIndex x0 (x1, x2, x3)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_createIndex3 cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
foreign import ccall "qtc_QSortFilterProxyModel_createIndex3" qtc_QSortFilterProxyModel_createIndex3 :: Ptr (TQSortFilterProxyModel a) -> CInt -> CInt -> Ptr (TQVoid t3) -> IO (Ptr (TQModelIndex ()))
instance QcreateIndex (QSortFilterProxyModelSc a) ((Int, Int, QVoid t3)) where
createIndex x0 (x1, x2, x3)
= withQModelIndexResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QSortFilterProxyModel_createIndex3 cobj_x0 (toCInt x1) (toCInt x2) cobj_x3
instance QendInsertColumns (QSortFilterProxyModel ()) (()) where
endInsertColumns x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_endInsertColumns cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_endInsertColumns" qtc_QSortFilterProxyModel_endInsertColumns :: Ptr (TQSortFilterProxyModel a) -> IO ()
instance QendInsertColumns (QSortFilterProxyModelSc a) (()) where
endInsertColumns x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_endInsertColumns cobj_x0
instance QendInsertRows (QSortFilterProxyModel ()) (()) where
endInsertRows x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_endInsertRows cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_endInsertRows" qtc_QSortFilterProxyModel_endInsertRows :: Ptr (TQSortFilterProxyModel a) -> IO ()
instance QendInsertRows (QSortFilterProxyModelSc a) (()) where
endInsertRows x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_endInsertRows cobj_x0
instance QendRemoveColumns (QSortFilterProxyModel ()) (()) where
endRemoveColumns x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_endRemoveColumns cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_endRemoveColumns" qtc_QSortFilterProxyModel_endRemoveColumns :: Ptr (TQSortFilterProxyModel a) -> IO ()
instance QendRemoveColumns (QSortFilterProxyModelSc a) (()) where
endRemoveColumns x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_endRemoveColumns cobj_x0
instance QendRemoveRows (QSortFilterProxyModel ()) (()) where
endRemoveRows x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_endRemoveRows cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_endRemoveRows" qtc_QSortFilterProxyModel_endRemoveRows :: Ptr (TQSortFilterProxyModel a) -> IO ()
instance QendRemoveRows (QSortFilterProxyModelSc a) (()) where
endRemoveRows x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_endRemoveRows cobj_x0
instance QinsertColumn (QSortFilterProxyModel ()) ((Int)) (IO (Bool)) where
insertColumn x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_insertColumn cobj_x0 (toCInt x1)
foreign import ccall "qtc_QSortFilterProxyModel_insertColumn" qtc_QSortFilterProxyModel_insertColumn :: Ptr (TQSortFilterProxyModel a) -> CInt -> IO CBool
instance QinsertColumn (QSortFilterProxyModelSc a) ((Int)) (IO (Bool)) where
insertColumn x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_insertColumn cobj_x0 (toCInt x1)
instance QinsertColumn (QSortFilterProxyModel ()) ((Int, QModelIndex t2)) (IO (Bool)) where
insertColumn x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_insertColumn1 cobj_x0 (toCInt x1) cobj_x2
foreign import ccall "qtc_QSortFilterProxyModel_insertColumn1" qtc_QSortFilterProxyModel_insertColumn1 :: Ptr (TQSortFilterProxyModel a) -> CInt -> Ptr (TQModelIndex t2) -> IO CBool
instance QinsertColumn (QSortFilterProxyModelSc a) ((Int, QModelIndex t2)) (IO (Bool)) where
insertColumn x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_insertColumn1 cobj_x0 (toCInt x1) cobj_x2
instance QinsertRow (QSortFilterProxyModel ()) ((Int)) (IO (Bool)) where
insertRow x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_insertRow cobj_x0 (toCInt x1)
foreign import ccall "qtc_QSortFilterProxyModel_insertRow" qtc_QSortFilterProxyModel_insertRow :: Ptr (TQSortFilterProxyModel a) -> CInt -> IO CBool
instance QinsertRow (QSortFilterProxyModelSc a) ((Int)) (IO (Bool)) where
insertRow x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_insertRow cobj_x0 (toCInt x1)
instance QinsertRow (QSortFilterProxyModel ()) ((Int, QModelIndex t2)) (IO (Bool)) where
insertRow x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_insertRow1 cobj_x0 (toCInt x1) cobj_x2
foreign import ccall "qtc_QSortFilterProxyModel_insertRow1" qtc_QSortFilterProxyModel_insertRow1 :: Ptr (TQSortFilterProxyModel a) -> CInt -> Ptr (TQModelIndex t2) -> IO CBool
instance QinsertRow (QSortFilterProxyModelSc a) ((Int, QModelIndex t2)) (IO (Bool)) where
insertRow x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_insertRow1 cobj_x0 (toCInt x1) cobj_x2
instance Qreset (QSortFilterProxyModel ()) (()) (IO ()) where
reset x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_reset cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_reset" qtc_QSortFilterProxyModel_reset :: Ptr (TQSortFilterProxyModel a) -> IO ()
instance Qreset (QSortFilterProxyModelSc a) (()) (IO ()) where
reset x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_reset cobj_x0
instance QchildEvent (QSortFilterProxyModel ()) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_childEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_childEvent" qtc_QSortFilterProxyModel_childEvent :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQChildEvent t1) -> IO ()
instance QchildEvent (QSortFilterProxyModelSc a) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_childEvent cobj_x0 cobj_x1
instance QconnectNotify (QSortFilterProxyModel ()) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QSortFilterProxyModel_connectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QSortFilterProxyModel_connectNotify" qtc_QSortFilterProxyModel_connectNotify :: Ptr (TQSortFilterProxyModel a) -> CWString -> IO ()
instance QconnectNotify (QSortFilterProxyModelSc a) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QSortFilterProxyModel_connectNotify cobj_x0 cstr_x1
instance QcustomEvent (QSortFilterProxyModel ()) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_customEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_customEvent" qtc_QSortFilterProxyModel_customEvent :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQEvent t1) -> IO ()
instance QcustomEvent (QSortFilterProxyModelSc a) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_customEvent cobj_x0 cobj_x1
instance QdisconnectNotify (QSortFilterProxyModel ()) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QSortFilterProxyModel_disconnectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QSortFilterProxyModel_disconnectNotify" qtc_QSortFilterProxyModel_disconnectNotify :: Ptr (TQSortFilterProxyModel a) -> CWString -> IO ()
instance QdisconnectNotify (QSortFilterProxyModelSc a) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QSortFilterProxyModel_disconnectNotify cobj_x0 cstr_x1
instance Qevent (QSortFilterProxyModel ()) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_event_h cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_event_h" qtc_QSortFilterProxyModel_event_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent (QSortFilterProxyModelSc a) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_event_h cobj_x0 cobj_x1
instance QeventFilter (QSortFilterProxyModel ()) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_eventFilter_h cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QSortFilterProxyModel_eventFilter_h" qtc_QSortFilterProxyModel_eventFilter_h :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter (QSortFilterProxyModelSc a) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QSortFilterProxyModel_eventFilter_h cobj_x0 cobj_x1 cobj_x2
instance Qreceivers (QSortFilterProxyModel ()) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QSortFilterProxyModel_receivers cobj_x0 cstr_x1
foreign import ccall "qtc_QSortFilterProxyModel_receivers" qtc_QSortFilterProxyModel_receivers :: Ptr (TQSortFilterProxyModel a) -> CWString -> IO CInt
instance Qreceivers (QSortFilterProxyModelSc a) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QSortFilterProxyModel_receivers cobj_x0 cstr_x1
instance Qsender (QSortFilterProxyModel ()) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_sender cobj_x0
foreign import ccall "qtc_QSortFilterProxyModel_sender" qtc_QSortFilterProxyModel_sender :: Ptr (TQSortFilterProxyModel a) -> IO (Ptr (TQObject ()))
instance Qsender (QSortFilterProxyModelSc a) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QSortFilterProxyModel_sender cobj_x0
instance QtimerEvent (QSortFilterProxyModel ()) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_timerEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QSortFilterProxyModel_timerEvent" qtc_QSortFilterProxyModel_timerEvent :: Ptr (TQSortFilterProxyModel a) -> Ptr (TQTimerEvent t1) -> IO ()
instance QtimerEvent (QSortFilterProxyModelSc a) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QSortFilterProxyModel_timerEvent cobj_x0 cobj_x1
| keera-studios/hsQt | Qtc/Gui/QSortFilterProxyModel.hs | bsd-2-clause | 63,882 | 0 | 18 | 9,355 | 19,025 | 9,726 | 9,299 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE UnicodeSyntax #-}
module File.Remove where
import System.Fuse
import DB.Base
import DB.Read (fileEntityFromPath)
import Debug (dbg)
import Node (nodeNamed)
import Parse
import File.Util (rmLastTag)
{- | unlink(const char* path)
Remove (delete) the given file, symbolic link, hard link, or
special node.
-}
tRemoveLink ∷ DB → FilePath → IO Errno
tRemoveLink db filePath = do
dbg $ "RemoveLink: " ++ filePath
maybeEntity ← fileEntityFromPath db filePath
case maybeEntity of
Nothing →
return eNOENT
Just (FileEntity fileId _) → do
rmLastTag db fileId filePath
return eOK
| marklar/TagFS | src/File/Remove.hs | bsd-3-clause | 818 | 0 | 12 | 280 | 156 | 81 | 75 | 20 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.ByteString.Char8 (pack)
import Data.List (nub)
import Network.Wai.Middleware.Static (staticPolicy, addBase)
import System.Directory (getDirectoryContents)
import System.Environment (getEnv)
import Text.Regex (mkRegex, subRegex)
import Text.Regex.Base (matchTest)
import Web.Scotty (scotty, middleware)
import Database (connectInfo, getDBConnection, seedDB)
import Routes (routes)
getShows :: IO [FilePath]
getShows = getEnv "ANIMU_HOME" >>= getDirectoryContents
getNames :: [FilePath] -> [String]
getNames = (=<<) getName
where
getName x =
case (matchTest front x, matchTest back x) of
(True, True) -> [clean x]
_ -> mempty
where
front = mkRegex "^\\[.*\\] "
back = mkRegex " - \\d*.*$"
remove a b = subRegex a b ""
clean a = remove front $ remove back a
main :: IO ()
main = do
myShows <- nub . getNames <$> getShows
port <- read <$> getEnv "PORT"
host <- getEnv "REDIS_HOST"
redisPort <- getEnv "REDIS_PORT"
auth <- getEnv "REDIS_AUTH"
conn <- getDBConnection $ connectInfo host redisPort $ pack auth
_ <- seedDB conn myShows
scotty port $ do
middleware $ staticPolicy $ addBase "web/dist"
routes conn myShows
| justinwoo/tracker-hs | app/Main.hs | bsd-3-clause | 1,274 | 0 | 11 | 267 | 418 | 218 | 200 | 36 | 2 |
module TestHelper where
import Test.Tasty
import Test.Tasty.HUnit
import Environment (extend, lookup)
import Evaluator (evaluate)
import Interpreter (interpret, interpretFile)
import Parser (parse)
import Prelude hiding (lookup)
import Types
--
-- This module contains some common helper functions that
-- are used to generate the different test cases.
--
-- Assertions for validating the result of parsing an input string.
assertParse :: (DiyAST, String) -> Assertion
assertParse (expected, input) =
assertEqual (descParse input) expected $ parse input
descParse :: String -> String
descParse input = "parse \"" ++ input ++ "\""
-- Assertions for validating the result of evaluating an AST.
-- Both with and without an already existing environment.
assertEvaluateWithEnvironment :: Environment -> (DiyAST, DiyAST) -> Assertion
assertEvaluateWithEnvironment env (expected, input) =
assertEqual description expected result
where description = descEvaluate input env
(result, _) = evaluate input env
assertEvaluateWithoutEnvironment :: (DiyAST, DiyAST) -> Assertion
assertEvaluateWithoutEnvironment =
assertEvaluateWithEnvironment $ Environment []
descEvaluate :: DiyAST -> Environment -> String
descEvaluate input env =
"evaluate (" ++ show input ++ ") (" ++ show env ++ ")"
-- Assertions for validating the environment.
assertLookUp :: Environment -> String -> DiyAST -> Assertion
assertLookUp env key expected =
assertEqual ("lookup " ++ show env ++ " " ++ show key) expected result
where result = lookup env key
-- Assertions for validating closures.
assertIsClosure :: DiyAST -> Assertion
assertIsClosure (DiyClosure _ _) = assertBool "is a closure" True
assertIsClosure exp = assertFailure $ show exp ++ " is not a closure"
assertClosureFunction :: DiyAST -> DiyFunctionParams -> DiyFunctionBody -> Assertion
assertClosureFunction (DiyClosure func _) expectedParams expectedBody = do
assertEqual "closure function params" expectedParams $ params func
assertEqual "closure function body" expectedBody $ body func
-- Assertions for validating the result of interpreting an input string.
-- Both with and without an already existing environment.
assertInterpretWithEnvironment :: Environment -> (String, String) -> Assertion
assertInterpretWithEnvironment environment (expected, input) =
assertEqual description expected result
where description = "interpret " ++ show input
(result, _) = interpret input environment
assertInterpretWithoutEnvironment :: (String, String) -> Assertion
assertInterpretWithoutEnvironment =
assertInterpretWithEnvironment $ Environment []
| joelchelliah/diy-lang-haskell | test/parts/TestHelper.hs | bsd-3-clause | 2,763 | 0 | 10 | 536 | 580 | 311 | 269 | 44 | 1 |
module Plugin where
import GhcPlugins
plugin :: Plugin
plugin = defaultPlugin {
installCoreToDos = install
}
install :: [CommandLineOption] -> [CoreToDo] -> CoreM [CoreToDo]
install _ todo = do
reinitializeGlobals
return $ pprTraceIt "Hello World" todo
| isovector/dependent-types | src/Plugin.hs | bsd-3-clause | 265 | 0 | 8 | 48 | 75 | 41 | 34 | 9 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Reduce where
import Obsidian.GCDObsidian
import qualified Obsidian.GCDObsidian.CodeGen.CUDA as CUDA
----------------------------------------------------------------------------
-- DONE: This is too limited (Scalar a).
-- Comes from the sync. Figure out how to generalize
-- DONE: The issue above again raises a questionmark about the "Tuples"
-- in the Exp type
--
-- DONE: Shows an error in the code generation.
-- The writing of the result is not done by just a single thread
-- as it should!
reduce :: Syncable Array a =>
Int -> (a -> a -> a) -> Array a -> Kernel (Array a)
reduce 0 f = pure id
reduce n f = pure op ->- sync ->- reduce (n-1) f
where
op = fmap (uncurry f) . pair
getReduce = putStrLn$ CUDA.genKernel "reduce" (reduce 3 (+)) (namedArray "apa" 8 :: Array (Exp Int))
| svenssonjoel/GCDObsidian | LicExamples/Reduce.hs | bsd-3-clause | 880 | 0 | 11 | 195 | 192 | 105 | 87 | 10 | 1 |
module T197 where
import Data.Singletons.TH
import Prelude.Singletons
$(singletons [d|
infixl 5 $$:
($$:) :: Bool -> Bool -> Bool
_ $$: _ = False
|])
| goldfirere/singletons | singletons-base/tests/compile-and-dump/Singletons/T197.hs | bsd-3-clause | 159 | 0 | 7 | 34 | 29 | 18 | 11 | -1 | -1 |
{-# LANGUAGE BangPatterns, FlexibleInstances, OverloadedStrings,
RecordWildCards, TypeSynonymInstances #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
module Main ( main ) where
import Control.Applicative
import Control.Exception (evaluate)
import Control.DeepSeq
import Criterion.Main
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as HM
import Control.Monad (mzero)
import Data.Text (Text)
import qualified Text.CSV.Lazy.ByteString as LazyCsv
import Data.Vector (Vector)
import qualified Data.Vector as V
import Data.Csv
import qualified Data.Csv.Streaming as Streaming
data President = President
{ presidency :: !Int
, president :: !Text
, wikipediaEntry :: !ByteString
, tookOffice :: !ByteString
, leftOffice :: !ByteString
, party :: !Text
, homeState :: !Text
}
instance NFData President where
rnf (President {}) = ()
instance FromRecord President where
parseRecord v
| V.length v == 7 = President <$>
v .!! 0 <*>
v .!! 1 <*>
v .!! 2 <*>
v .!! 3 <*>
v .!! 4 <*>
v .!! 5 <*>
v .!! 6
| otherwise = mzero
-- | Unchecked version of '(.!)'.
(.!!) :: FromField a => Record -> Int -> Parser a
v .!! idx = parseField (V.unsafeIndex v idx)
{-# INLINE (.!!) #-}
infixl 9 .!!
instance ToRecord President where
toRecord (President {..}) =
record [toField presidency, toField president, toField wikipediaEntry,
toField tookOffice, toField leftOffice, toField party,
toField homeState]
instance FromNamedRecord President where
parseNamedRecord m = President <$>
m .: "Presidency" <*>
m .: "President" <*>
m .: "Wikipedia Entry" <*>
m .: "Took office" <*>
m .: "Left office" <*>
m .: "Party" <*>
m .: "Home State"
instance ToNamedRecord President where
toNamedRecord (President {..}) = namedRecord
[ "Presidency" .= presidency
, "President" .= president
, "Wikipedia Entry" .= wikipediaEntry
, "Took office" .= tookOffice
, "Left office" .= leftOffice
, "Party" .= party
, "Home State" .= homeState
]
fromStrict s = BL.fromChunks [s]
type BSHashMap a = HM.HashMap B.ByteString a
instance NFData LazyCsv.CSVField where
rnf LazyCsv.CSVField {} = ()
rnf LazyCsv.CSVFieldError {} = ()
instance NFData LazyCsv.CSVError where
rnf (LazyCsv.IncorrectRow !_ !_ !_ xs) = rnf xs
rnf (LazyCsv.BlankLine _ _ _ field) = rnf field
rnf (LazyCsv.FieldError field) = rnf field
rnf (LazyCsv.DuplicateHeader _ _ s) = rnf s
rnf LazyCsv.NoData = ()
main :: IO ()
main = do
!csvData <- fromStrict `fmap` B.readFile "benchmarks/presidents.csv"
!csvDataN <- fromStrict `fmap` B.readFile
"benchmarks/presidents_with_header.csv"
let (Right !presidents) = V.toList <$> decodePresidents csvData
(Right (!hdr, !presidentsNV)) = decodePresidentsN csvDataN
!presidentsN = V.toList presidentsNV
evaluate (rnf [presidents, presidentsN])
defaultMain [
bgroup "positional"
[ bgroup "decode"
[ bench "presidents/without conversion" $ whnf idDecode csvData
, bench "presidents/with conversion" $ whnf decodePresidents csvData
, bgroup "streaming"
[ bench "presidents/without conversion" $ nf idDecodeS csvData
, bench "presidents/with conversion" $ nf decodePresidentsS csvData
]
]
, bgroup "encode"
[ bench "presidents/with conversion" $ whnf encode presidents
]
]
, bgroup "named"
[ bgroup "decode"
[ bench "presidents/without conversion" $ whnf idDecodeN csvDataN
, bench "presidents/with conversion" $ whnf decodePresidentsN csvDataN
]
, bgroup "encode"
[ bench "presidents/with conversion" $ whnf (encodeByName hdr) presidentsN
]
]
, bgroup "comparison"
[ bench "lazy-csv" $ nf LazyCsv.parseCSV csvData
]
]
where
decodePresidents :: BL.ByteString -> Either String (Vector President)
decodePresidents = decode NoHeader
decodePresidentsN :: BL.ByteString -> Either String (Header, Vector President)
decodePresidentsN = decodeByName
decodePresidentsS :: BL.ByteString -> Streaming.Records President
decodePresidentsS = Streaming.decode NoHeader
idDecode :: BL.ByteString -> Either String (Vector (Vector B.ByteString))
idDecode = decode NoHeader
idDecodeN :: BL.ByteString -> Either String (Header, Vector (BSHashMap B.ByteString))
idDecodeN = decodeByName
idDecodeS :: BL.ByteString -> Streaming.Records (Vector B.ByteString)
idDecodeS = Streaming.decode NoHeader
| mikeizbicki/cassava | benchmarks/Benchmarks.hs | bsd-3-clause | 5,432 | 0 | 20 | 1,785 | 1,322 | 682 | 640 | 131 | 1 |
-- | Fake cabal module for local building
module Paths_shake where
import Data.Version
import System.IO.Unsafe
import System.Directory
import Control.Exception
-- We want getDataFileName to be relative to the current directory even if
-- we issue a change directory command. Therefore, first call caches, future ones read.
curdir :: String
curdir = unsafePerformIO getCurrentDirectory
getDataFileName :: FilePath -> IO FilePath
getDataFileName x = do
evaluate curdir
return $ curdir ++ "/" ++ x
version :: Version
version = Version {versionBranch = [0,0], versionTags = []}
| nh2/shake | Paths.hs | bsd-3-clause | 588 | 0 | 9 | 98 | 114 | 65 | 49 | 13 | 1 |
----------------------------------------------------------------------------
-- THIS is a MODIFIED VERSION of Text.Parsec.Token
-- Modified to support layout combinators by Tim Sheard 7/27/09
-- Further updated to Parsec 3.1 by Vilhelm Sjoberg 2011-01-13
-- Find modified and added lines by searching for "--MOD"
-- |
-- Module : Text.Parsec.Token
-- Copyright : (c) Daan Leijen 1999-2001, (c) Paolo Martini 2007
-- License : BSD-style (see the LICENSE file)
--
-- Maintainer : derek.a.elkins@gmail.com
-- Stability : provisional
-- Portability : non-portable (uses local universal quantification: PolymorphicComponents)
--
-- A helper module to parse lexical elements (tokens). See 'makeTokenParser'
-- for a description of how to use it.
--
-----------------------------------------------------------------------------
{-# LANGUAGE PolymorphicComponents, NoMonomorphismRestriction #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing -fno-warn-unused-do-bind -fno-warn-unused-matches #-}
module LayoutToken
( LanguageDef
, GenLanguageDef (..)
, TokenParser
, GenTokenParser (..)
, makeTokenParser
, LayoutFun (..)
) where
import Data.Char ( isAlpha, toLower, toUpper, isSpace, digitToInt )
import Data.List ( nub, sort )
import Control.Monad.Identity
import Text.Parsec (Column,sourceColumn, setSourceColumn)
import Text.Parsec.Prim
import Text.Parsec.Char
import Text.Parsec.Combinator
import Control.Applicative ((<$>), (<*>))
-----------------------------------------------------------
-- Language Definition
-----------------------------------------------------------
type LanguageDef st = GenLanguageDef String st Identity
-- | The @GenLanguageDef@ type is a record that contains all parameterizable
-- features of the 'Text.Parsec.Token' module. The module 'Text.Parsec.Language'
-- contains some default definitions.
data GenLanguageDef s u m
= LanguageDef {
-- | Describes the start of a block comment. Use the empty string if the
-- language doesn't support block comments. For example \"\/*\".
commentStart :: String,
-- | Describes the end of a block comment. Use the empty string if the
-- language doesn't support block comments. For example \"*\/\".
commentEnd :: String,
-- | Describes the start of a line comment. Use the empty string if the
-- language doesn't support line comments. For example \"\/\/\".
commentLine :: String,
-- | Set to 'True' if the language supports nested block comments.
nestedComments :: Bool,
-- | This parser should accept any start characters of identifiers. For
-- example @letter \<|> char \"_\"@.
identStart :: ParsecT s u m Char,
-- | This parser should accept any legal tail characters of identifiers.
-- For example @alphaNum \<|> char \"_\"@.
identLetter :: ParsecT s u m Char,
-- | This parser should accept any start characters of operators. For
-- example @oneOf \":!#$%&*+.\/\<=>?\@\\\\^|-~\"@
opStart :: ParsecT s u m Char,
-- | This parser should accept any legal tail characters of operators.
-- Note that this parser should even be defined if the language doesn't
-- support user-defined operators, or otherwise the 'reservedOp'
-- parser won't work correctly.
opLetter :: ParsecT s u m Char,
-- | The list of reserved identifiers.
reservedNames :: [String],
-- | The list of reserved operators.
reservedOpNames:: [String],
-- | Set to 'True' if the language is case sensitive.
caseSensitive :: Bool
}
-----------------------------------------------------------
-- A first class module: TokenParser
-----------------------------------------------------------
type TokenParser st = GenTokenParser String st Identity
-- | The type of the record that holds lexical parsers that work on
-- @s@ streams with state @u@ over a monad @m@.
data GenTokenParser s u m
= TokenParser {
-- | This lexeme parser parses a legal identifier. Returns the identifier
-- string. This parser will fail on identifiers that are reserved
-- words. Legal identifier (start) characters and reserved words are
-- defined in the 'LanguageDef' that is passed to
-- 'makeTokenParser'. An @identifier@ is treated as
-- a single token using 'try'.
identifier :: ParsecT s u m String,
-- | The lexeme parser @reserved name@ parses @symbol
-- name@, but it also checks that the @name@ is not a prefix of a
-- valid identifier. A @reserved@ word is treated as a single token
-- using 'try'.
reserved :: String -> ParsecT s u m (),
-- | This lexeme parser parses a legal operator. Returns the name of the
-- operator. This parser will fail on any operators that are reserved
-- operators. Legal operator (start) characters and reserved operators
-- are defined in the 'LanguageDef' that is passed to
-- 'makeTokenParser'. An @operator@ is treated as a
-- single token using 'try'.
operator :: ParsecT s u m String,
-- |The lexeme parser @reservedOp name@ parses @symbol
-- name@, but it also checks that the @name@ is not a prefix of a
-- valid operator. A @reservedOp@ is treated as a single token using
-- 'try'.
reservedOp :: String -> ParsecT s u m (),
-- | This lexeme parser parses a single literal character. Returns the
-- literal character value. This parsers deals correctly with escape
-- sequences. The literal character is parsed according to the grammar
-- rules defined in the Haskell report (which matches most programming
-- languages quite closely).
charLiteral :: ParsecT s u m Char,
-- | This lexeme parser parses a literal string. Returns the literal
-- string value. This parsers deals correctly with escape sequences and
-- gaps. The literal string is parsed according to the grammar rules
-- defined in the Haskell report (which matches most programming
-- languages quite closely).
stringLiteral :: ParsecT s u m String,
-- | This lexeme parser parses a natural number (a positive whole
-- number). Returns the value of the number. The number can be
-- specified in 'decimal', 'hexadecimal' or
-- 'octal'. The number is parsed according to the grammar
-- rules in the Haskell report.
natural :: ParsecT s u m Integer,
-- | This lexeme parser parses an integer (a whole number). This parser
-- is like 'natural' except that it can be prefixed with
-- sign (i.e. \'-\' or \'+\'). Returns the value of the number. The
-- number can be specified in 'decimal', 'hexadecimal'
-- or 'octal'. The number is parsed according
-- to the grammar rules in the Haskell report.
integer :: ParsecT s u m Integer,
-- | This lexeme parser parses a floating point value. Returns the value
-- of the number. The number is parsed according to the grammar rules
-- defined in the Haskell report.
float :: ParsecT s u m Double,
-- | This lexeme parser parses either 'natural' or a 'float'.
-- Returns the value of the number. This parsers deals with
-- any overlap in the grammar rules for naturals and floats. The number
-- is parsed according to the grammar rules defined in the Haskell report.
naturalOrFloat :: ParsecT s u m (Either Integer Double),
-- | Parses a positive whole number in the decimal system. Returns the
-- value of the number.
decimal :: ParsecT s u m Integer,
-- | Parses a positive whole number in the hexadecimal system. The number
-- should be prefixed with \"0x\" or \"0X\". Returns the value of the
-- number.
hexadecimal :: ParsecT s u m Integer,
-- | Parses a positive whole number in the octal system. The number
-- should be prefixed with \"0o\" or \"0O\". Returns the value of the
-- number.
octal :: ParsecT s u m Integer,
-- | Lexeme parser @symbol s@ parses 'string' @s@ and skips
-- trailing white space.
symbol :: String -> ParsecT s u m String,
-- | @lexeme p@ first applies parser @p@ and than the 'whiteSpace'
-- parser, returning the value of @p@. Every lexical
-- token (lexeme) is defined using @lexeme@, this way every parse
-- starts at a point without white space. Parsers that use @lexeme@ are
-- called /lexeme/ parsers in this document.
--
-- The only point where the 'whiteSpace' parser should be
-- called explicitly is the start of the main parser in order to skip
-- any leading white space.
--
-- > mainParser = do{ whiteSpace
-- > ; ds <- many (lexeme digit)
-- > ; eof
-- > ; return (sum ds)
-- > }
lexeme :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | Parses any white space. White space consists of /zero/ or more
-- occurrences of a 'space', a line comment or a block (multi
-- line) comment. Block comments may be nested. How comments are
-- started and ended is defined in the 'LanguageDef'
-- that is passed to 'makeTokenParser'.
whiteSpace :: ParsecT s u m (),
-- | Lexeme parser @parens p@ parses @p@ enclosed in parenthesis,
-- returning the value of @p@.
parens :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | Lexeme parser @braces p@ parses @p@ enclosed in braces (\'{\' and
-- \'}\'), returning the value of @p@.
braces :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | Lexeme parser @angles p@ parses @p@ enclosed in angle brackets (\'\<\'
-- and \'>\'), returning the value of @p@.
angles :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | Lexeme parser @brackets p@ parses @p@ enclosed in brackets (\'[\'
-- and \']\'), returning the value of @p@.
brackets :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | DEPRECATED: Use 'brackets'.
squares :: forall a. ParsecT s u m a -> ParsecT s u m a,
-- | Lexeme parser |semi| parses the character \';\' and skips any
-- trailing white space. Returns the string \";\".
semi :: ParsecT s u m String,
-- | Lexeme parser @comma@ parses the character \',\' and skips any
-- trailing white space. Returns the string \",\".
comma :: ParsecT s u m String,
-- | Lexeme parser @colon@ parses the character \':\' and skips any
-- trailing white space. Returns the string \":\".
colon :: ParsecT s u m String,
-- | Lexeme parser @dot@ parses the character \'.\' and skips any
-- trailing white space. Returns the string \".\".
dot :: ParsecT s u m String,
-- | Lexeme parser @semiSep p@ parses /zero/ or more occurrences of @p@
-- separated by 'semi'. Returns a list of values returned by
-- @p@.
semiSep :: forall a . ParsecT s u m a -> ParsecT s u m [a],
-- | Lexeme parser @semiSep1 p@ parses /one/ or more occurrences of @p@
-- separated by 'semi'. Returns a list of values returned by @p@.
semiSep1 :: forall a . ParsecT s u m a -> ParsecT s u m [a],
-- | Lexeme parser @commaSep p@ parses /zero/ or more occurrences of
-- @p@ separated by 'comma'. Returns a list of values returned
-- by @p@.
commaSep :: forall a . ParsecT s u m a -> ParsecT s u m [a],
-- | Lexeme parser @commaSep1 p@ parses /one/ or more occurrences of
-- @p@ separated by 'comma'. Returns a list of values returned
-- by @p@.
commaSep1 :: forall a . ParsecT s u m a -> ParsecT s u m [a]
}
-----------------------------------------------------------
-- Given a LanguageDef, create a token parser.
-----------------------------------------------------------
-- | The expression @makeTokenParser language@ creates a 'GenTokenParser'
-- record that contains lexical parsers that are
-- defined using the definitions in the @language@ record.
--
-- The use of this function is quite stylized - one imports the
-- appropiate language definition and selects the lexical parsers that
-- are needed from the resulting 'GenTokenParser'.
--
-- > module Main where
-- >
-- > import Text.Parsec
-- > import qualified Text.Parsec.Token as P
-- > import Text.Parsec.Language (haskellDef)
-- >
-- > -- The parser
-- > ...
-- >
-- > expr = parens expr
-- > <|> identifier
-- > <|> ...
-- >
-- >
-- > -- The lexer
-- > lexer = P.makeTokenParser haskellDef
-- >
-- > parens = P.parens lexer
-- > braces = P.braces lexer
-- > identifier = P.identifier lexer
-- > reserved = P.reserved lexer
-- > ...
makeTokenParser :: (Monad m) => GenLanguageDef String [Column] m -> String -> String -> String
-> (GenTokenParser String [Column] m, LayoutFun String m)
-- MOD: add parameters open,sep,close.
makeTokenParser languageDef open sep close
=(TokenParser{ identifier = identifier
, reserved = reserved
, operator = operator
, reservedOp = reservedOp
, charLiteral = charLiteral
, stringLiteral = stringLiteral
, natural = natural
, integer = integer
, float = float
, naturalOrFloat = naturalOrFloat
, decimal = decimal
, hexadecimal = hexadecimal
, octal = octal
, symbol = symbol
, lexeme = lexeme
, whiteSpace = whiteSpace
, parens = parens
, braces = braces
, angles = angles
, brackets = brackets
, squares = brackets
, semi = semi
, comma = comma
, colon = colon
, dot = dot
, semiSep = semiSep
, semiSep1 = semiSep1
, commaSep = commaSep
, commaSep1 = commaSep1
}--MOD also return the layout combinator!
,LayFun layout)
where
-----------------------------------------------------------
-- Bracketing
-----------------------------------------------------------
parens = between (symbol "(") (symbol ")")
braces = between (symbol "{") (symbol "}")
angles = between (symbol "<") (symbol ">")
brackets = between (symbol "[") (symbol "]")
semi = symbol ";"
comma = symbol ","
dot = symbol "."
colon = symbol ":"
commaSep p = sepBy p comma
semiSep p = sepBy p semi
commaSep1 p = sepBy1 p comma
semiSep1 p = sepBy1 p semi
-----------------------------------------------------------
-- Chars & Strings
-----------------------------------------------------------
charLiteral = lexeme (between (char '\'')
(char '\'' <?> "end of character")
characterChar )
<?> "character"
characterChar = charLetter <|> charEscape
<?> "literal character"
charEscape = do{ char '\\'; escapeCode }
charLetter = satisfy (\c -> (c /= '\'') && (c /= '\\') && (c > '\026'))
stringLiteral = lexeme (
do{ str <- between (char '"')
(char '"' <?> "end of string")
(many stringChar)
; return (foldr (maybe id (:)) "" str)
}
<?> "literal string")
stringChar = do{ c <- stringLetter; return (Just c) }
<|> stringEscape
<?> "string character"
stringLetter = satisfy (\c -> (c /= '"') && (c /= '\\') && (c > '\026'))
stringEscape = do{ char '\\'
; do{ escapeGap ; return Nothing }
<|> do{ escapeEmpty; return Nothing }
<|> do{ esc <- escapeCode; return (Just esc) }
}
escapeEmpty = char '&'
escapeGap = do{ many1 space
; char '\\' <?> "end of string gap"
}
-- escape codes
escapeCode = charEsc <|> charNum <|> charAscii <|> charControl
<?> "escape code"
charControl = do{ char '^'
; code <- upper
; return (toEnum (fromEnum code - fromEnum 'A'))
}
charNum = do{ code <- decimal
<|> do{ char 'o'; number 8 octDigit }
<|> do{ char 'x'; number 16 hexDigit }
; return (toEnum (fromInteger code))
}
charEsc = choice (map parseEsc escMap)
where
parseEsc (c,code) = do{ char c; return code }
charAscii = choice (map parseAscii asciiMap)
where
parseAscii (asc,code) = try (do{ string asc; return code })
-- escape code tables
escMap = zip "abfnrtv\\\"\'" "\a\b\f\n\r\t\v\\\"\'"
asciiMap = zip (ascii3codes ++ ascii2codes) (ascii3 ++ ascii2)
ascii2codes = ["BS","HT","LF","VT","FF","CR","SO","SI","EM",
"FS","GS","RS","US","SP"]
ascii3codes = ["NUL","SOH","STX","ETX","EOT","ENQ","ACK","BEL",
"DLE","DC1","DC2","DC3","DC4","NAK","SYN","ETB",
"CAN","SUB","ESC","DEL"]
ascii2 = ['\BS','\HT','\LF','\VT','\FF','\CR','\SO','\SI',
'\EM','\FS','\GS','\RS','\US','\SP']
ascii3 = ['\NUL','\SOH','\STX','\ETX','\EOT','\ENQ','\ACK',
'\BEL','\DLE','\DC1','\DC2','\DC3','\DC4','\NAK',
'\SYN','\ETB','\CAN','\SUB','\ESC','\DEL']
-----------------------------------------------------------
-- Numbers
-----------------------------------------------------------
naturalOrFloat = lexeme natFloat <?> "number"
float = lexeme floating <?> "float"
integer = lexeme int <?> "integer"
natural = lexeme nat <?> "natural"
-- floats
floating = decimal >>= fractExponent
natFloat = (char '0' >> zeroNumFloat) <|> decimalFloat
zeroNumFloat = Left <$> (hexadecimal <|> octal)
<|> decimalFloat
<|> fractFloat 0
<|> return (Left 0)
decimalFloat = do{ n <- decimal
; option (Left n) (fractFloat n)
}
fractFloat n = Right <$> fractExponent n
fractExponent n = do{ fract <- fraction
; expo <- option 1.0 exponent'
; return ((fromInteger n + fract)*expo)
}
<|> (fromInteger n *) <$> exponent'
fraction = do{ char '.'
; digits <- many1 digit <?> "fraction"
; return (foldr op 0.0 digits)
}
<?> "fraction"
where
op d f = (f + fromIntegral (digitToInt d))/10.0
exponent' = do{ oneOf "eE"
; f <- sign
; e <- decimal <?> "exponent"
; return (power (f e))
}
<?> "exponent"
where
power e | e < 0 = 1.0/power(-e)
| otherwise = fromInteger (10^e)
-- integers and naturals
int = do{ f <- lexeme sign
; n <- nat
; return (f n)
}
sign = (char '-' >> return negate)
<|> (char '+' >> return id)
<|> return id
nat = zeroNumber <|> decimal
zeroNumber = do{ char '0'
; hexadecimal <|> octal <|> decimal <|> return 0
}
<?> ""
decimal = number 10 digit
hexadecimal = do{ oneOf "xX"; number 16 hexDigit }
octal = do{ oneOf "oO"; number 8 octDigit }
number base baseDigit
= do{ digits <- many1 baseDigit
; let n = foldl (\x d -> base*x + toInteger (digitToInt d)) 0 digits
; seq n (return n)
}
-----------------------------------------------------------
-- Operators & reserved ops
-----------------------------------------------------------
reservedOp name =
lexeme $ try $
do{ string name
; notFollowedBy (opLetter languageDef) <?> ("end of " ++ show name)
}
operator =
lexeme $ try $
do{ name <- oper
; if isReservedOp name
then unexpected ("reserved operator " ++ show name)
else return name
}
oper = (:) <$> opStart languageDef <*> many (opLetter languageDef)
<?> "operator"
isReservedOp = isReserved (sort (reservedOpNames languageDef))
-----------------------------------------------------------
-- Identifiers & Reserved words
-----------------------------------------------------------
reserved name =
lexeme $ try $
do{ caseString name
; notFollowedBy (identLetter languageDef) <?> ("end of " ++ show name)
}
caseString name
| caseSensitive languageDef = string name
| otherwise = do{ walk name; return name }
where
walk [] = return ()
walk (c:cs) = do{ caseChar c <?> msg; walk cs }
caseChar c | isAlpha c = char (toLower c) <|> char (toUpper c)
| otherwise = char c
msg = show name
identifier =
lexeme $ try $
do{ name <- ident
; if isReservedName name
then unexpected ("reserved word " ++ show name)
else return name
}
ident
= do{ c <- identStart languageDef
; cs <- many (identLetter languageDef)
; return (c:cs)
}
<?> "identifier"
isReservedName name
= isReserved theReservedNames caseName
where
caseName | caseSensitive languageDef = name
| otherwise = map toLower name
isReserved names name
= scan names
where
scan [] = False
scan (r:rs) = case compare r name of
LT -> scan rs
EQ -> True
GT -> False
theReservedNames
| caseSensitive languageDef = sortedNames
| otherwise = map (map toLower) sortedNames
where
sortedNames = sort (reservedNames languageDef)
-----------------------------------------------------------
-- White space & symbols
-----------------------------------------------------------
symbol name
= lexeme (string name)
lexeme p
= do{ x <- p; whiteSpace; return x }
--whiteSpace
-- MOD: this function renamed from whiteSpace to ws, and changed to return the matched string.
ws
| noLine && noMulti = many (simpleSpace <?> "")
| noLine = many (simpleSpace <|> multiLineComment <?> "")
| noMulti = many (simpleSpace <|> oneLineComment <?> "")
| otherwise = many (simpleSpace <|> oneLineComment <|> multiLineComment <?> "")
where
noLine = null (commentLine languageDef)
noMulti = null (commentStart languageDef)
--simpleSpace = skipMany1 (eoln ws <|> satisfy isSpace)
--MOD simpleSpace WAS MODIFIED FOR LAYOUT TOKEN PARSERS by Tim Sheard 7/27/09
simpleSpace =
many1 (satisfy isSpace)
-- MOD return matched string
oneLineComment =
do{ xs <- try (string (commentLine languageDef))
; ys <- many (satisfy (/= '\n'))
; return (xs++ys)
}
-- MOD: return matched string
multiLineComment = (++)
<$> try (string (commentStart languageDef))
<*> inComment
inComment
| nestedComments languageDef = inCommentMulti
| otherwise = inCommentSingle
-- MOD: return matched string
inCommentMulti
= try (string (commentEnd languageDef))
<|> (++) <$> multiLineComment <*> inCommentMulti
<|> (++) <$> many1 (noneOf startEnd) <*> inCommentMulti
<|> (:) <$> oneOf startEnd <*> inCommentMulti
<?> "end of comment"
where
startEnd = nub $ commentEnd languageDef ++ commentStart languageDef
inCommentSingle
= try (string (commentEnd languageDef))
<|> (++) <$> many1 (noneOf startEnd) <*> inCommentSingle
<|> (:) <$> oneOf startEnd <*> inCommentSingle
<?> "end of comment"
where
startEnd = nub $ commentEnd languageDef ++ commentStart languageDef
--MOD --------------------------------------------------------------------
-- THE FOLLOWING WAS ADDED FOR LAYOUT TOKEN PARSERS by Tim Sheard 7/27/09
layoutSep = symbol sep <?> ("inserted layout separator ("++sep++")")
layoutEnd = symbol close <?> ("inserted layout closing symbol("++close++")")
layoutBegin = symbol open <?> ("layout opening symbol ("++open++")")
layout p stop =
(do { try layoutBegin; xs <- sepBy p (symbol ";")
; layoutEnd <?> "explicit layout closing brace"
; stop; return xs}) <|>
(indent >> align p stop)
align p stop = ormore <|> zero
where zero = do { stop; option "" layoutSep; undent; return []}
ormore = do { x <- p
; whiteSpace
; (do { try layoutSep; xs <- align p stop; return (x:xs)}) <|>
(do { try layoutEnd; stop; return [x]}) <|>
-- removing indentation happens automatically
-- in function whiteSpace
(do { stop; undent; return [x]})}
whiteSpace =
do { (col1,_,_) <- getInfo
; wsChars <- ws
; (col2,tabs,more) <- getInfo
; case (wsChars,tabs,more,compare col2 col1) of
([],_,_,_) -> return ()
(_,[],_,_) -> return ()
(_,_,[],_) -> -- No more input, close all the pending layout with '}'
setInfo (col2,[],concatMap (const close) tabs)
(cs,p:ps,_,EQ) -> setInfo (col2-1,tabs,sep++more)
(cs,p:ps,_,LT) -> let adjust (col,[],add) = setInfo(col,[],rev add more)
adjust (col,p:ps,add)
| col2 < p = adjust(col-1,ps,close:add)
| col2== p = setInfo(col-1,p:ps,rev (sep:add) more)
| col2 > p = setInfo(col,p:ps,rev add more)
rev ss xs = foldl (flip (++)) xs ss
in adjust (col2,p:ps,[])
(cs,p:ps,_,GT) -> return ()
}
getInfo = (,,) <$> (sourceColumn <$> getPosition) <*> getState <*> getInput
setInfo (col,tabs,tokens) =
do { p <- getPosition
; setPosition (setSourceColumn p col)
; setState tabs
; setInput tokens }
indent = setState =<< ((:) <$> sourceColumn <$> getPosition <*> getState)
undent =
do { (_:ps) <- getState
; setState ps
}
_eoln whiteSpace =
do { c <- satisfy (=='\n') -- c == '\n' to succeed
; (col,tabs@(p:ps),input) <- getInfo
; whiteSpace -- this may screw up the tabs,
-- but use the old tabs, but the
-- new column (col2) and input (cs)
; (col2,_,cs) <- getInfo
; case cs of
[] -> -- No more input, close all the pending layout with '}'
setInfo (col2,[],map (const '}') tabs)
_ -> case compare col2 p of
EQ -> setInfo (col2-1,tabs,';':cs)
GT -> setInfo (col2,tabs,cs)
LT -> let adjust prefix cs column [] = setInfo (column,[],rev prefix cs)
adjust prefix cs column (tabs @ (p:ps))
| col2==p = setInfo (column-1,tabs,rev (';':prefix) cs)
| col2<p = adjust ('}':prefix) cs (column - 1) ps
| col2>p = setInfo (column,tabs,rev prefix cs)
rev xs ys = foldl (flip (:)) ys xs
in adjust [] cs col2 tabs
; return '\n' }
data LayoutFun s m =
LayFun (forall a t. ParsecT s [Column] m a
-> ParsecT s [Column] m t
-> ParsecT s [Column] m [a])
-- End of added code
--MOD --------------------------------------------------------------------
| jonsterling/Luitzen | src/LayoutToken.hs | bsd-3-clause | 29,965 | 17 | 30 | 10,496 | 5,584 | 3,048 | 2,536 | 357 | 13 |
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
module Monad where
import Control.Monad.State.Strict
import System.Console.Haskeline
import System.Directory
import System.FilePath
import CSPM
import Util.Exception
import Util.PrettyPrint
data ICheckerState = ICheckerState {
settingsDirectory :: FilePath,
cspmSession :: CSPMSession,
currentFilePath :: Maybe FilePath
}
initICheckerState :: IO ICheckerState
initICheckerState = do
settingsDirectory <- getAppUserDataDirectory "cspmchecker"
createDirectoryIfMissing True $ joinPath [settingsDirectory, "interactive"]
sess <- newCSPMSession
return $ ICheckerState settingsDirectory sess Nothing
resetCSPM :: IChecker ()
resetCSPM = do
sess <- liftIO $ newCSPMSession
modify (\st -> st { cspmSession = sess })
type IChecker = StateT ICheckerState IO
runIChecker :: ICheckerState -> IChecker a -> IO a
runIChecker st a = runStateT a st >>= return . fst
getState :: (ICheckerState -> a) -> IChecker a
getState = gets
modifyState :: (ICheckerState -> ICheckerState) -> IChecker ()
modifyState = modify
instance CSPMMonad IChecker where
getSession = gets cspmSession
setSession s = modify (\ st -> st { cspmSession = s })
handleWarnings = panic "Cannot handle warnings here in a pure IChecker Monad"
instance CSPMMonad (InputT IChecker) where
setSession = lift . setSession
getSession = lift getSession
handleWarnings ms = printError $ show $ prettyPrint ms
printError :: String -> InputT IChecker ()
printError s = outputStrLn $ "\ESC[1;31m\STX"++s++"\ESC[0m\STX"
| sashabu/libcspm | cspmchecker/src/InteractiveChecker/Monad.hs | bsd-3-clause | 1,607 | 0 | 11 | 292 | 422 | 222 | 200 | 40 | 1 |
module Solr.Query.Filter
( FilterParams
, cache
, cost
, locals
) where
import Solr.Query.Filter.Internal
| Sentenai/solr-query | src/Solr/Query/Filter.hs | bsd-3-clause | 117 | 0 | 4 | 25 | 28 | 19 | 9 | 6 | 0 |
-- | Provides an easy way to combine several pages in a list. The applications
-- are obvious:
--
-- * A post list on a blog
--
-- * An image list in a gallery
--
-- * A sitemap
--
module Hakyll.Web.Page.List
( setFieldPageList
, pageListCompiler
, chronological
, recentFirst
, sortByBaseName
) where
import Control.Arrow ((>>>), arr)
import Data.List (sortBy)
import Data.Monoid (Monoid, mconcat)
import Data.Ord (comparing)
import System.FilePath (takeBaseName)
import Hakyll.Core.Compiler
import Hakyll.Core.Identifier
import Hakyll.Core.Identifier.Pattern
import Hakyll.Web.Page
import Hakyll.Web.Page.Metadata
import Hakyll.Web.Template
-- | Set a field of a page to a listing of pages
--
setFieldPageList :: ([Page String] -> [Page String])
-- ^ Determines list order
-> Identifier Template
-- ^ Applied to every page
-> String
-- ^ Key indicating which field should be set
-> Pattern (Page String)
-- ^ Selects pages to include in the list
-> Compiler (Page String) (Page String)
-- ^ Compiler that sets the page list in a field
setFieldPageList sort template key pattern =
requireAllA pattern $ setFieldA key $ pageListCompiler sort template
-- | Create a list of pages
--
pageListCompiler :: ([Page String] -> [Page String]) -- ^ Determine list order
-> Identifier Template -- ^ Applied to pages
-> Compiler [Page String] String -- ^ Compiles page list
pageListCompiler sort template =
arr sort >>> applyTemplateToList template >>> arr concatPages
-- | Apply a template to every page in a list
--
applyTemplateToList :: Identifier Template
-> Compiler [Page String] [Page String]
applyTemplateToList identifier =
require identifier $ \posts template -> map (applyTemplate template) posts
-- | Concatenate the bodies of a page list
--
concatPages :: Monoid m => [Page m] -> m
concatPages = mconcat . map pageBody
-- | Sort pages chronologically. This function assumes that the pages have a
-- @year-month-day-title.extension@ naming scheme -- as is the convention in
-- Hakyll.
--
chronological :: [Page a] -> [Page a]
chronological = sortBy $ comparing $ takeBaseName . getField "path"
-- | The reverse of 'chronological'
--
recentFirst :: [Page a] -> [Page a]
recentFirst = reverse . chronological
-- | Deprecated, see 'chronological'
--
sortByBaseName :: [Page a] -> [Page a]
sortByBaseName = chronological
{-# DEPRECATED sortByBaseName "Use chronological" #-}
| sol/hakyll | src/Hakyll/Web/Page/List.hs | bsd-3-clause | 2,674 | 0 | 11 | 671 | 523 | 295 | 228 | 42 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DataKinds #-}
--------------------------------------------------------------------------------
module Generics.Deriving.Zipper.Trace (
Zipper(..),
ZipperT,
runZipperT,
ZipperM,
runZipperM,
Dir(..),
Path(..),
-- *
enter,
leave,
-- *
up,
down,
downu,
downl,
move,
moveu,
movel,
-- *
get,
set,
modify,
-- *
Loc,
-- *
Generic,
Typeable,
) where
--------------------------------------------------------------------------------
import Generics.Deriving.Util (Dir(..), dir)
import Generics.Deriving.Zipper.Base hiding (enter, leave, up, down, move, get, set, modify)
import qualified Generics.Deriving.Zipper.Base as Base
import Data.Functor.Identity
import Control.Applicative (Applicative)
import Control.Monad (liftM)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Error
import Control.Monad.Trans.State.Lazy hiding (get, modify)
import qualified Control.Monad.Trans.State.Lazy as State (get, modify)
--------------------------------------------------------------------------------
data Step = Down Dir String | Move Dir String
deriving Show
type Path = [Step]
type PathFun = Path -> Path
instance Error (Path -> Path) where
noMsg = id
--------------------------------------------------------------------------------
newtype ZipperT m a = ZipperT { unZipperT :: StateT PathFun (ErrorT PathFun m) a }
deriving (Functor, Applicative, Monad)
runZipperT :: Monad m => ZipperT m a -> m (Either Path a)
runZipperT z = liftM changeResults (runErrorT (runStateT (unZipperT z) id))
where
changeResults = either (Left . ($ [])) (Right . fst)
type ZipperM = ZipperT Identity
runZipperM :: ZipperM a -> Either Path a
runZipperM = runIdentity . runZipperT
--------------------------------------------------------------------------------
z_log :: Monad m => Step -> ZipperT m ()
z_log p = ZipperT $ State.modify (. (:) p)
z_log_up :: Monad m => ZipperT m ()
z_log_up = ZipperT $ State.modify (tail .)
z_error :: Monad m => ZipperT m a
z_error = ZipperT $ State.get >>= lift . throwError
z_check :: Monad m => Maybe a -> ZipperT m a
z_check = maybe z_error return
--------------------------------------------------------------------------------
up :: (Zipper a, Zipper b, Monad m) => Loc a r (b ': c) -> ZipperT m (Loc b r c)
up loc = do
z_log_up
return (Base.up loc)
downl :: (Zipper a, Zipper b, Monad m) => Dir -> proxy b -> String -> Loc a r c -> ZipperT m (Loc b r (a ': c))
downl dir _ msg loc = do
z_log (Down dir msg)
z_check (Base.down dir loc)
downu :: (Zipper a, Zipper b, Monad m) => Dir -> proxy b -> Loc a r c -> ZipperT m (Loc b r (a ': c))
downu dir proxy = downl dir proxy "<unknown>"
down :: (Zipper a, Zipper b, Show (proxy b), Monad m) => Dir -> proxy b -> Loc a r c -> ZipperT m (Loc b r (a ': c))
down dir proxy loc = downl dir proxy (show proxy) loc
movel :: (Zipper a, Zipper b, Monad m) => Dir -> proxy b -> String -> Loc a r (c ': cs) -> ZipperT m (Loc b r (c ': cs))
movel dir _ msg loc = do
z_log (Move dir msg)
z_check (Base.move dir loc)
moveu :: (Zipper a, Zipper b, Monad m) => Dir -> proxy b -> Loc a r (c ': cs) -> ZipperT m (Loc b r (c ': cs))
moveu dir proxy loc = movel dir proxy "<unknown>" loc
move :: (Zipper a, Zipper b, Show (proxy b), Monad m) => Dir -> proxy b -> Loc a r (c ': cs) -> ZipperT m (Loc b r (c ': cs))
move dir proxy loc = movel dir proxy (show proxy) loc
--------------------------------------------------------------------------------
enter :: (Zipper a, Monad m) => a -> ZipperT m (Loc a a '[])
enter = return . Base.enter
leave :: (Zipper a, Monad m) => Loc a r c -> ZipperT m r
leave = return . Base.leave
--------------------------------------------------------------------------------
get :: Monad m => Loc a r c -> ZipperT m a
get = return . Base.get
set :: Monad m => a -> Loc a r c -> ZipperT m (Loc a r c)
set x = return . Base.set x
modify :: Monad m => (a -> a) -> Loc a r c -> ZipperT m (Loc a r c)
modify f = return . Base.modify f
| spl/generic-deriving-extras | src/Generics/Deriving/Zipper/Trace.hs | bsd-3-clause | 4,144 | 0 | 14 | 780 | 1,664 | 886 | 778 | 89 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE OverloadedLists #-}
-- | Static analysis of Lua code; each AST node is analyzed in isolation.
module Static
( staticAnalysis
, StyleGuide(..)
, Suggestion(..)
, showSuggestion
) where
import HasPos
import Trav
import Control.Monad.Reader
import Control.Monad.Writer
import Data.DList (DList)
import qualified Data.DList as DL
import Data.Loc
import Data.Sequence (Seq, ViewL(..), ViewR(..), viewl, viewr)
import qualified Data.Sequence as Seq
import Data.Text (Text)
import qualified Data.Text as T
import Lens.Micro
import Language.Lua.Parser
import Language.Lua.Syntax
import Language.Lua.Token
-- | A suggestion to be output.
data Suggestion = Suggestion Severity Text
showSuggestion :: Suggestion -> Text
showSuggestion (Suggestion s t) = showSeverity s <> " | " <> t
-- | Message severity.
data Severity = Style | Warning | Error
deriving (Eq, Ord)
showSeverity :: Severity -> Text
showSeverity Style = "Style "
showSeverity Warning = "Warning"
showSeverity Error = "Error "
data StyleGuide = StyleGuide
{ styleGuideIndent :: !Int
}
-- | Static analysis monad. It has a style guide environment and outputs
-- suggestions.
type Static a = ReaderT StyleGuide (Writer (DList Suggestion)) a
staticAnalysis :: StyleGuide -> Block NodeInfo -> [Suggestion]
staticAnalysis styleGuide block =
DL.toList (execWriter (runReaderT (traverseLuaChunk callbacks block) styleGuide))
where
callbacks = Callbacks
{ onIdent = const ok
, onIdentList = const ok
, onIdentList1 = const ok
, onBlock = staticBlock
, onStatement = staticStatement
, onReturnStatement = staticReturnStatement
, onFunctionName = const ok
, onVariable = const ok
, onVariableList1 = const ok
, onExpression = const ok
, onExpressionList = const ok
, onExpressionList1 = const ok
, onPrefixExpression = const ok
, onFunctionCall = const ok
, onFunctionArgs = const ok
, onFunctionBody = const ok
, onTableConstructor = const ok
, onField = staticField
, onFieldList = const ok
, onBinop = const ok
, onUnop = const ok
}
style :: HasPos a => Text -> a -> Static ()
style = suggest Style
warn :: HasPos a => Text -> a -> Static ()
warn = suggest Warning
err :: HasPos a => Text -> a -> Static ()
err = suggest Error
suggest :: HasPos a => Severity -> Text -> a -> Static ()
suggest s t p = tell [Suggestion s $ t <> ": " <> pshow p]
where
pshow :: HasPos a => a -> Text
pshow = T.pack . displayPos . firstPos
ok :: Static ()
ok = pure ()
indent :: Static Int
indent = asks styleGuideIndent
--------------------------------------------------------------------------------
-- Static checks
-- Checks:
-- * WARNING if there are no statements
-- * STYLE if all statements do not begin at the same column.
staticBlock :: Block NodeInfo -> Static ()
staticBlock (Block info [] Nothing) = warn "Empty block" info
staticBlock (Block _ [] (Just _)) = ok
staticBlock (Block info (s:ss) mr) = do
let pos = firstPos s
forM_ ss $ \s' -> do
let pos' = firstPos s'
when (posCol pos /= posCol pos') $
style ("Statement should begin at column " <> tshow (posCol pos)) pos'
case mr of
Just r -> do
let pos' = firstPos r
when (posCol pos /= posCol pos') $
style ("Statement should begin at column " <> tshow (posCol pos)) pos'
Nothing -> ok
staticStatement :: Statement NodeInfo -> Static ()
staticStatement (EmptyStmt _) = ok
-- Checks:
-- * ERROR if length vs < length es
-- * STYLE if there is not one space around the '='
staticStatement (Assign info (VariableList1 vinfo vs) (ExpressionList1 einfo es)) = do
let vlen = length vs
elen = length es
when (vlen < elen) $
err ("Unused expression(s) in assignment: found " <> vstr vlen <> estr elen) info
asgnStyle (info^.nodeTokens) (vinfo^.nodeTokens) (einfo^.nodeTokens)
where
vstr 1 = "1 variable and "
vstr n = tshow n <> " variables and "
estr 1 = "1 expression"
estr n = tshow n <> " expressions"
staticStatement (FunCall _ _) = ok
-- Checks:
-- STYLE if there is any whitespace between either '::' and the identifier
staticStatement (Label info ident) = do
let [L (Loc _ pos1) _, L (Loc pos2 pos3) _, L (Loc pos4 _) _] = info^.nodeTokens
when (posCol pos1 + 1 /= posCol pos2 ||
posCol pos3 + 1 /= posCol pos4) $
warn "Improper label whitespace (no spaces before/after '::' allowed)" pos2
staticStatement (Break _) = ok
-- Checks:
-- ERROR if this node exists at all
-- STYLE if there is more than one space after 'goto'
staticStatement (Goto info ident) = do
err "Edsger Dijkstra (March 1968). \"Go To Statement Considered Harmful\". Communications of the ACM 11 (3): 147-148." info
let pos1 = firstEndPos info
pos2 = firstPos ident
when (posCol pos1 + 2 /= posCol pos2) $
style "Unnecessary whitespace after 'goto'" info
-- Checks:
-- * The block is is an inner block
staticStatement (Do info block) = do
case block of
Block _ [] Nothing -> ok -- don't bother style checking the "do end"
_ -> innerBlockStyleCheck (firstPos info) (lastStartPos info) block "do" "end"
-- Checks:
-- * STYLE if there is not one space after 'while'
-- * STYLE if there is not one space before 'do'
-- * STYLE if 'while' and 'do' are not on the same line
-- * The block is an inner block
staticStatement (While info exp block) = do
let whilePos = firstPos info
whileEndPos = firstEndPos info
condPos = firstPos exp
condEndPos = lastPos exp
doPos = firstPos $ (info^.nodeTokens) ! (1 + length (exp^.ann.nodeTokens))
endPos = lastStartPos info
when (posCol whileEndPos + 2 /= posCol condPos) $
style "Unnecessary whitespace after 'while'" whilePos
-- Don't want to double-warn when both the line and column of 'do' are incorrect.
if posLine whilePos /= posLine doPos
then style "Unnecessary newline before 'do'" doPos
else when (posCol condEndPos + 2 /= posCol doPos) $
style "Unnecessary whitespace before 'do'" doPos
innerBlockStyleCheck whilePos endPos block "while" "end"
-- Checks:
-- * STYLE if there is not one space after 'until'
-- * STYLE if the condition and 'until' are not on the same line
-- * The block is an inner block
staticStatement (Repeat info block exp) = do
let repeatPos = firstPos info
untilToken = (info^.nodeTokens) ! (1 + length (block^.ann^.nodeTokens))
untilPos = firstPos untilToken
untilEndPos = lastPos untilToken
condPos = firstPos exp
innerBlockStyleCheck repeatPos untilPos block "repeat" "until"
-- Don't want to double warn when both the line and column of the expression
-- are incorrect.
if posLine untilPos /= posLine condPos
then style "Unnecessary newline after 'until'" untilPos
else when (posCol untilEndPos + 2 /= posCol condPos) $
style "Unnecessary whitespace after 'until'" untilPos
staticStatement (If info ebs mb) = ok
-- Checks:
-- * STYLE if 'for' and 'do' are not on the same line
-- * The block is an inner block
-- TODO: Check whitespace around '=' and ','
staticStatement (For info x e1 e2 me3 block) = do
let forPos = firstPos info
doPos = firstPos $
(info^.nodeTokens) ! (4 + length (e1^.ann.nodeTokens)
+ length (e2^.ann.nodeTokens)
+ maybe 0 (\e3 -> 1 + length (e3^.ann.nodeTokens)) me3)
endPos = lastStartPos info
when (posLine forPos /= posLine doPos) $
style "Unnecessary newline before 'do'" doPos
innerBlockStyleCheck forPos endPos block "for" "end"
-- Checks:
-- * STYLE if 'for' and 'do' are not on the same line
-- * The block is an inner block
-- TODO: Check whitespace around 'in' and ','
staticStatement (ForIn info is es block) = do
let forPos = firstPos info
doPos = firstPos $
(info^.nodeTokens) ! (2 + length (is^.ann.nodeTokens)
+ length (es^.ann.nodeTokens))
endPos = lastStartPos info
when (posLine forPos /= posLine doPos) $
style "Unnecessary newline before 'do'" doPos
innerBlockStyleCheck forPos endPos block "for" "end"
-- Checks:
-- * STYLE if there is a space between function name and '('
-- * The body's block is an inner block
staticStatement (FunAssign info name body) = do
let functionPos = firstPos info
nameLastPos = lastPos name
bodyPos = firstPos body
FunctionBody _ _ _ block = body
endPos = lastStartPos info
when (posCol nameLastPos + 1 /= posCol bodyPos) $ do
style "Unnecessary whitespace after function name" nameLastPos
innerBlockStyleCheck functionPos endPos block "function" "end"
-- Checks:
-- * STYLE if there is a space between function name and '('
-- * The body's block is an inner block
staticStatement (LocalFunAssign info name body) = do
let localPos = firstPos info
nameLastPos = lastPos name
bodyPos = firstPos body
FunctionBody _ _ _ block = body
endPos = lastStartPos info
when (posCol nameLastPos + 1 /= posCol bodyPos) $ do
style "Unnecessary whitespace after function name" nameLastPos
innerBlockStyleCheck localPos endPos block "local" "end"
-- Checks:
-- * ERROR if length is < length es
staticStatement (LocalAssign info (IdentList1 _ is) (ExpressionList _ es)) = do
let ilen = length is
elen = length es
when (ilen < elen) $
err ("Unused expression(s) in local assignment: found " <> istr ilen <> estr elen) info
where
istr 1 = "1 identifier and "
istr n = tshow n <> " identifiers and "
estr 1 = "1 expression"
estr n = tshow n <> " expressions"
-- Checks:
-- STYLE if inner block is not one line down and one indent in from beginning location.
-- STYLE if there is a blank line before 'end'
-- STYLE if the 'end' does not align with the corresponding begin token.
innerBlockStyleCheck :: Pos -> Pos -> Block NodeInfo -> Text -> Text -> Static ()
innerBlockStyleCheck p1 p2 block name1 name2 = do
let Loc blockBeginPos blockEndPos = block^.ann.nodeLoc
i <- indent
when (posLine p1 + 1 /= posLine blockBeginPos ||
posCol p1 + i /= posCol blockBeginPos) $
style ("Inner block should be on a new line and indented " <> tshow i <> " spaces") blockBeginPos
when (posLine blockEndPos + 1 /= posLine p2) $
style "Unnecessary newline before 'end'" p2
when (posCol p1 /= posCol p2) $
style ("'" <> name2 <> "' does not align with corresponding '" <> name1 <> "'") p2
-- Checks:
-- STYLE if there is more than one space after 'return'
staticReturnStatement :: ReturnStatement NodeInfo -> Static ()
staticReturnStatement (ReturnStatement _ (ExpressionList _ [])) = ok
staticReturnStatement (ReturnStatement info _ ) = do
let tk1 :< rest = viewl (info^.nodeTokens)
tk2 :< _ = viewl rest
when (posCol (lastPos tk1) + 2 /= posCol (firstPos tk2)) $
style "Unnecessary whitespace after 'return'" tk1
staticField :: Field NodeInfo -> Static ()
staticField (FieldExp info e1 e2) =
asgnStyle xs ls rs
where
xs, ls, rs :: Seq (L Token)
xs = info^.nodeTokens
-- All of the tokens of the expression, plus the '[' ']'
ls = Seq.take (2 + length (e1^.ann^.nodeTokens)) xs
rs = e2^.ann.nodeTokens
staticField (FieldIdent info i e) = asgnStyle (info^.nodeTokens) (i^.ann.nodeTokens) (e^.ann.nodeTokens)
staticField _ = ok
--------------------------------------------------------------------------------
-- Style-check a "blah = blah", which occurs in multiple locations in the AST.
asgnStyle
:: Seq (L Token) -- ^ All tokens
-> Seq (L Token) -- ^ Tokens of LHS
-> Seq (L Token) -- ^ Tokens of RHS
-> Static ()
asgnStyle xs ls rs = do
-- xs == [foobar, =, bazwhat]
-- ls == [foobar]
-- rs == [bazwhat]
--
-- p1 p3
-- v v
-- foobar = bazwhat
-- ^
-- p2
let p1 = lastPos ls
p2 = firstPos (Seq.drop (length ls) xs)
p3 = firstPos rs
when (posCol p1 + 2 /= posCol p2 ||
posCol p2 + 2 /= posCol p3) $
style "Improper whitespace around '='" p2
infixl 9 !
(!) :: Seq a -> Int -> a
(!) s n = case viewl (Seq.drop n s) of
x :< _ -> x
_ -> error "(!): index out of range"
tshow :: Show a => a -> Text
tshow = T.pack . show
| mitchellwrosen/llint | src/Static.hs | bsd-3-clause | 13,182 | 0 | 20 | 3,694 | 3,454 | 1,743 | 1,711 | -1 | -1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Machine-dependent assembly language
--
-- (c) The University of Glasgow 1993-2004
--
-----------------------------------------------------------------------------
#include "HsVersions.h"
#include "nativeGen/NCG.h"
module PPC.Instr (
archWordSize,
RI(..),
Instr(..),
maxSpillSlots,
allocMoreStack,
makeFarBranches
)
where
import PPC.Regs
import PPC.Cond
import Instruction
import Size
import TargetReg
import RegClass
import Reg
import CodeGen.Platform
import BlockId
import DynFlags
import Cmm
import CmmInfo
import FastString
import CLabel
import Outputable
import Platform
import FastBool
import UniqFM (listToUFM, lookupUFM)
import UniqSupply
import Control.Monad (replicateM)
import Data.Maybe (fromMaybe)
--------------------------------------------------------------------------------
-- Size of a PPC memory address, in bytes.
--
archWordSize :: Size
archWordSize = II32
-- | Instruction instance for powerpc
instance Instruction Instr where
regUsageOfInstr = ppc_regUsageOfInstr
patchRegsOfInstr = ppc_patchRegsOfInstr
isJumpishInstr = ppc_isJumpishInstr
jumpDestsOfInstr = ppc_jumpDestsOfInstr
patchJumpInstr = ppc_patchJumpInstr
mkSpillInstr = ppc_mkSpillInstr
mkLoadInstr = ppc_mkLoadInstr
takeDeltaInstr = ppc_takeDeltaInstr
isMetaInstr = ppc_isMetaInstr
mkRegRegMoveInstr _ = ppc_mkRegRegMoveInstr
takeRegRegMoveInstr = ppc_takeRegRegMoveInstr
mkJumpInstr = ppc_mkJumpInstr
mkStackAllocInstr = ppc_mkStackAllocInstr
mkStackDeallocInstr = ppc_mkStackDeallocInstr
ppc_mkStackAllocInstr :: Platform -> Int -> Instr
ppc_mkStackAllocInstr platform amount
= case platformArch platform of
ArchPPC -> -- SUB II32 (OpImm (ImmInt amount)) (OpReg esp)
ADD sp sp (RIImm (ImmInt (-amount)))
arch -> panic $ "ppc_mkStackAllocInstr " ++ show arch
ppc_mkStackDeallocInstr :: Platform -> Int -> Instr
ppc_mkStackDeallocInstr platform amount
= case platformArch platform of
ArchPPC -> -- ADD II32 (OpImm (ImmInt amount)) (OpReg esp)
ADD sp sp (RIImm (ImmInt amount))
arch -> panic $ "ppc_mkStackDeallocInstr " ++ show arch
--
-- See note [extra spill slots] in X86/Instr.hs
--
allocMoreStack
:: Platform
-> Int
-> NatCmmDecl statics PPC.Instr.Instr
-> UniqSM (NatCmmDecl statics PPC.Instr.Instr)
allocMoreStack _ _ top@(CmmData _ _) = return top
allocMoreStack platform slots (CmmProc info lbl live (ListGraph code)) = do
let
infos = mapKeys info
entries = case code of
[] -> infos
BasicBlock entry _ : _ -- first block is the entry point
| entry `elem` infos -> infos
| otherwise -> entry : infos
uniqs <- replicateM (length entries) getUniqueM
let
delta = ((x + stackAlign - 1) `quot` stackAlign) * stackAlign -- round up
where x = slots * spillSlotSize -- sp delta
alloc = mkStackAllocInstr platform delta
dealloc = mkStackDeallocInstr platform delta
new_blockmap :: BlockEnv BlockId
new_blockmap = mapFromList (zip entries (map mkBlockId uniqs))
insert_stack_insns (BasicBlock id insns)
| Just new_blockid <- mapLookup id new_blockmap
= [ BasicBlock id [alloc, BCC ALWAYS new_blockid]
, BasicBlock new_blockid block'
]
| otherwise
= [ BasicBlock id block' ]
where
block' = foldr insert_dealloc [] insns
insert_dealloc insn r
-- BCTR might or might not be a non-local jump. For
-- "labeled-goto" we use JMP, and for "computed-goto" we
-- use MTCTR followed by BCTR. See 'PPC.CodeGen.genJump'.
= case insn of
JMP _ -> dealloc : insn : r
BCTR [] Nothing -> dealloc : insn : r
BCTR ids label -> BCTR (map (fmap retarget) ids) label : r
BCCFAR cond b -> BCCFAR cond (retarget b) : r
BCC cond b -> BCC cond (retarget b) : r
_ -> insn : r
-- BL and BCTRL are call-like instructions rather than
-- jumps, and are used only for C calls.
retarget :: BlockId -> BlockId
retarget b
= fromMaybe b (mapLookup b new_blockmap)
new_code
= concatMap insert_stack_insns code
-- in
return (CmmProc info lbl live (ListGraph new_code))
-- -----------------------------------------------------------------------------
-- Machine's assembly language
-- We have a few common "instructions" (nearly all the pseudo-ops) but
-- mostly all of 'Instr' is machine-specific.
-- Register or immediate
data RI
= RIReg Reg
| RIImm Imm
data Instr
-- comment pseudo-op
= COMMENT FastString
-- some static data spat out during code
-- generation. Will be extracted before
-- pretty-printing.
| LDATA Section CmmStatics
-- start a new basic block. Useful during
-- codegen, removed later. Preceding
-- instruction should be a jump, as per the
-- invariants for a BasicBlock (see Cmm).
| NEWBLOCK BlockId
-- specify current stack offset for
-- benefit of subsequent passes
| DELTA Int
-- Loads and stores.
| LD Size Reg AddrMode -- Load size, dst, src
| LA Size Reg AddrMode -- Load arithmetic size, dst, src
| ST Size Reg AddrMode -- Store size, src, dst
| STU Size Reg AddrMode -- Store with Update size, src, dst
| LIS Reg Imm -- Load Immediate Shifted dst, src
| LI Reg Imm -- Load Immediate dst, src
| MR Reg Reg -- Move Register dst, src -- also for fmr
| CMP Size Reg RI -- size, src1, src2
| CMPL Size Reg RI -- size, src1, src2
| BCC Cond BlockId
| BCCFAR Cond BlockId
| JMP CLabel -- same as branch,
-- but with CLabel instead of block ID
| MTCTR Reg
| BCTR [Maybe BlockId] (Maybe CLabel) -- with list of local destinations, and jump table location if necessary
| BL CLabel [Reg] -- with list of argument regs
| BCTRL [Reg]
| ADD Reg Reg RI -- dst, src1, src2
| ADDC Reg Reg Reg -- (carrying) dst, src1, src2
| ADDE Reg Reg Reg -- (extend) dst, src1, src2
| ADDIS Reg Reg Imm -- Add Immediate Shifted dst, src1, src2
| SUBF Reg Reg Reg -- dst, src1, src2 ; dst = src2 - src1
| MULLW Reg Reg RI
| DIVW Reg Reg Reg
| DIVWU Reg Reg Reg
| MULLW_MayOflo Reg Reg Reg
-- dst = 1 if src1 * src2 overflows
-- pseudo-instruction; pretty-printed as:
-- mullwo. dst, src1, src2
-- mfxer dst
-- rlwinm dst, dst, 2, 31,31
| AND Reg Reg RI -- dst, src1, src2
| OR Reg Reg RI -- dst, src1, src2
| XOR Reg Reg RI -- dst, src1, src2
| XORIS Reg Reg Imm -- XOR Immediate Shifted dst, src1, src2
| EXTS Size Reg Reg
| NEG Reg Reg
| NOT Reg Reg
| SLW Reg Reg RI -- shift left word
| SRW Reg Reg RI -- shift right word
| SRAW Reg Reg RI -- shift right arithmetic word
| RLWINM Reg Reg Int Int Int -- Rotate Left Word Immediate then AND with Mask
| FADD Size Reg Reg Reg
| FSUB Size Reg Reg Reg
| FMUL Size Reg Reg Reg
| FDIV Size Reg Reg Reg
| FNEG Reg Reg -- negate is the same for single and double prec.
| FCMP Reg Reg
| FCTIWZ Reg Reg -- convert to integer word
| FRSP Reg Reg -- reduce to single precision
-- (but destination is a FP register)
| CRNOR Int Int Int -- condition register nor
| MFCR Reg -- move from condition register
| MFLR Reg -- move from link register
| FETCHPC Reg -- pseudo-instruction:
-- bcl to next insn, mflr reg
| LWSYNC -- memory barrier
-- | Get the registers that are being used by this instruction.
-- regUsage doesn't need to do any trickery for jumps and such.
-- Just state precisely the regs read and written by that insn.
-- The consequences of control flow transfers, as far as register
-- allocation goes, are taken care of by the register allocator.
--
ppc_regUsageOfInstr :: Platform -> Instr -> RegUsage
ppc_regUsageOfInstr platform instr
= case instr of
LD _ reg addr -> usage (regAddr addr, [reg])
LA _ reg addr -> usage (regAddr addr, [reg])
ST _ reg addr -> usage (reg : regAddr addr, [])
STU _ reg addr -> usage (reg : regAddr addr, [])
LIS reg _ -> usage ([], [reg])
LI reg _ -> usage ([], [reg])
MR reg1 reg2 -> usage ([reg2], [reg1])
CMP _ reg ri -> usage (reg : regRI ri,[])
CMPL _ reg ri -> usage (reg : regRI ri,[])
BCC _ _ -> noUsage
BCCFAR _ _ -> noUsage
MTCTR reg -> usage ([reg],[])
BCTR _ _ -> noUsage
BL _ params -> usage (params, callClobberedRegs platform)
BCTRL params -> usage (params, callClobberedRegs platform)
ADD reg1 reg2 ri -> usage (reg2 : regRI ri, [reg1])
ADDC reg1 reg2 reg3 -> usage ([reg2,reg3], [reg1])
ADDE reg1 reg2 reg3 -> usage ([reg2,reg3], [reg1])
ADDIS reg1 reg2 _ -> usage ([reg2], [reg1])
SUBF reg1 reg2 reg3 -> usage ([reg2,reg3], [reg1])
MULLW reg1 reg2 ri -> usage (reg2 : regRI ri, [reg1])
DIVW reg1 reg2 reg3 -> usage ([reg2,reg3], [reg1])
DIVWU reg1 reg2 reg3 -> usage ([reg2,reg3], [reg1])
MULLW_MayOflo reg1 reg2 reg3
-> usage ([reg2,reg3], [reg1])
AND reg1 reg2 ri -> usage (reg2 : regRI ri, [reg1])
OR reg1 reg2 ri -> usage (reg2 : regRI ri, [reg1])
XOR reg1 reg2 ri -> usage (reg2 : regRI ri, [reg1])
XORIS reg1 reg2 _ -> usage ([reg2], [reg1])
EXTS _ reg1 reg2 -> usage ([reg2], [reg1])
NEG reg1 reg2 -> usage ([reg2], [reg1])
NOT reg1 reg2 -> usage ([reg2], [reg1])
SLW reg1 reg2 ri -> usage (reg2 : regRI ri, [reg1])
SRW reg1 reg2 ri -> usage (reg2 : regRI ri, [reg1])
SRAW reg1 reg2 ri -> usage (reg2 : regRI ri, [reg1])
RLWINM reg1 reg2 _ _ _ -> usage ([reg2], [reg1])
FADD _ r1 r2 r3 -> usage ([r2,r3], [r1])
FSUB _ r1 r2 r3 -> usage ([r2,r3], [r1])
FMUL _ r1 r2 r3 -> usage ([r2,r3], [r1])
FDIV _ r1 r2 r3 -> usage ([r2,r3], [r1])
FNEG r1 r2 -> usage ([r2], [r1])
FCMP r1 r2 -> usage ([r1,r2], [])
FCTIWZ r1 r2 -> usage ([r2], [r1])
FRSP r1 r2 -> usage ([r2], [r1])
MFCR reg -> usage ([], [reg])
MFLR reg -> usage ([], [reg])
FETCHPC reg -> usage ([], [reg])
_ -> noUsage
where
usage (src, dst) = RU (filter (interesting platform) src)
(filter (interesting platform) dst)
regAddr (AddrRegReg r1 r2) = [r1, r2]
regAddr (AddrRegImm r1 _) = [r1]
regRI (RIReg r) = [r]
regRI _ = []
interesting :: Platform -> Reg -> Bool
interesting _ (RegVirtual _) = True
interesting platform (RegReal (RealRegSingle i))
= isFastTrue (freeReg platform i)
interesting _ (RegReal (RealRegPair{}))
= panic "PPC.Instr.interesting: no reg pairs on this arch"
-- | Apply a given mapping to all the register references in this
-- instruction.
ppc_patchRegsOfInstr :: Instr -> (Reg -> Reg) -> Instr
ppc_patchRegsOfInstr instr env
= case instr of
LD sz reg addr -> LD sz (env reg) (fixAddr addr)
LA sz reg addr -> LA sz (env reg) (fixAddr addr)
ST sz reg addr -> ST sz (env reg) (fixAddr addr)
STU sz reg addr -> STU sz (env reg) (fixAddr addr)
LIS reg imm -> LIS (env reg) imm
LI reg imm -> LI (env reg) imm
MR reg1 reg2 -> MR (env reg1) (env reg2)
CMP sz reg ri -> CMP sz (env reg) (fixRI ri)
CMPL sz reg ri -> CMPL sz (env reg) (fixRI ri)
BCC cond lbl -> BCC cond lbl
BCCFAR cond lbl -> BCCFAR cond lbl
MTCTR reg -> MTCTR (env reg)
BCTR targets lbl -> BCTR targets lbl
BL imm argRegs -> BL imm argRegs -- argument regs
BCTRL argRegs -> BCTRL argRegs -- cannot be remapped
ADD reg1 reg2 ri -> ADD (env reg1) (env reg2) (fixRI ri)
ADDC reg1 reg2 reg3 -> ADDC (env reg1) (env reg2) (env reg3)
ADDE reg1 reg2 reg3 -> ADDE (env reg1) (env reg2) (env reg3)
ADDIS reg1 reg2 imm -> ADDIS (env reg1) (env reg2) imm
SUBF reg1 reg2 reg3 -> SUBF (env reg1) (env reg2) (env reg3)
MULLW reg1 reg2 ri -> MULLW (env reg1) (env reg2) (fixRI ri)
DIVW reg1 reg2 reg3 -> DIVW (env reg1) (env reg2) (env reg3)
DIVWU reg1 reg2 reg3 -> DIVWU (env reg1) (env reg2) (env reg3)
MULLW_MayOflo reg1 reg2 reg3
-> MULLW_MayOflo (env reg1) (env reg2) (env reg3)
AND reg1 reg2 ri -> AND (env reg1) (env reg2) (fixRI ri)
OR reg1 reg2 ri -> OR (env reg1) (env reg2) (fixRI ri)
XOR reg1 reg2 ri -> XOR (env reg1) (env reg2) (fixRI ri)
XORIS reg1 reg2 imm -> XORIS (env reg1) (env reg2) imm
EXTS sz reg1 reg2 -> EXTS sz (env reg1) (env reg2)
NEG reg1 reg2 -> NEG (env reg1) (env reg2)
NOT reg1 reg2 -> NOT (env reg1) (env reg2)
SLW reg1 reg2 ri -> SLW (env reg1) (env reg2) (fixRI ri)
SRW reg1 reg2 ri -> SRW (env reg1) (env reg2) (fixRI ri)
SRAW reg1 reg2 ri -> SRAW (env reg1) (env reg2) (fixRI ri)
RLWINM reg1 reg2 sh mb me
-> RLWINM (env reg1) (env reg2) sh mb me
FADD sz r1 r2 r3 -> FADD sz (env r1) (env r2) (env r3)
FSUB sz r1 r2 r3 -> FSUB sz (env r1) (env r2) (env r3)
FMUL sz r1 r2 r3 -> FMUL sz (env r1) (env r2) (env r3)
FDIV sz r1 r2 r3 -> FDIV sz (env r1) (env r2) (env r3)
FNEG r1 r2 -> FNEG (env r1) (env r2)
FCMP r1 r2 -> FCMP (env r1) (env r2)
FCTIWZ r1 r2 -> FCTIWZ (env r1) (env r2)
FRSP r1 r2 -> FRSP (env r1) (env r2)
MFCR reg -> MFCR (env reg)
MFLR reg -> MFLR (env reg)
FETCHPC reg -> FETCHPC (env reg)
_ -> instr
where
fixAddr (AddrRegReg r1 r2) = AddrRegReg (env r1) (env r2)
fixAddr (AddrRegImm r1 i) = AddrRegImm (env r1) i
fixRI (RIReg r) = RIReg (env r)
fixRI other = other
--------------------------------------------------------------------------------
-- | Checks whether this instruction is a jump/branch instruction.
-- One that can change the flow of control in a way that the
-- register allocator needs to worry about.
ppc_isJumpishInstr :: Instr -> Bool
ppc_isJumpishInstr instr
= case instr of
BCC{} -> True
BCCFAR{} -> True
BCTR{} -> True
BCTRL{} -> True
BL{} -> True
JMP{} -> True
_ -> False
-- | Checks whether this instruction is a jump/branch instruction.
-- One that can change the flow of control in a way that the
-- register allocator needs to worry about.
ppc_jumpDestsOfInstr :: Instr -> [BlockId]
ppc_jumpDestsOfInstr insn
= case insn of
BCC _ id -> [id]
BCCFAR _ id -> [id]
BCTR targets _ -> [id | Just id <- targets]
_ -> []
-- | Change the destination of this jump instruction.
-- Used in the linear allocator when adding fixup blocks for join
-- points.
ppc_patchJumpInstr :: Instr -> (BlockId -> BlockId) -> Instr
ppc_patchJumpInstr insn patchF
= case insn of
BCC cc id -> BCC cc (patchF id)
BCCFAR cc id -> BCCFAR cc (patchF id)
BCTR ids lbl -> BCTR (map (fmap patchF) ids) lbl
_ -> insn
-- -----------------------------------------------------------------------------
-- | An instruction to spill a register into a spill slot.
ppc_mkSpillInstr
:: DynFlags
-> Reg -- register to spill
-> Int -- current stack delta
-> Int -- spill slot to use
-> Instr
ppc_mkSpillInstr dflags reg delta slot
= let platform = targetPlatform dflags
off = spillSlotToOffset slot
in
let sz = case targetClassOfReg platform reg of
RcInteger -> II32
RcDouble -> FF64
_ -> panic "PPC.Instr.mkSpillInstr: no match"
in ST sz reg (AddrRegImm sp (ImmInt (off-delta)))
ppc_mkLoadInstr
:: DynFlags
-> Reg -- register to load
-> Int -- current stack delta
-> Int -- spill slot to use
-> Instr
ppc_mkLoadInstr dflags reg delta slot
= let platform = targetPlatform dflags
off = spillSlotToOffset slot
in
let sz = case targetClassOfReg platform reg of
RcInteger -> II32
RcDouble -> FF64
_ -> panic "PPC.Instr.mkLoadInstr: no match"
in LD sz reg (AddrRegImm sp (ImmInt (off-delta)))
-- | The maximum number of bytes required to spill a register. PPC32
-- has 32-bit GPRs and 64-bit FPRs, while PPC64 has 64-bit GPRs and
-- 64-bit FPRs. So the maximum is 8 regardless of platforms unlike
-- x86. Note that AltiVec's vector registers are 128-bit wide so we
-- must not use this to spill them.
spillSlotSize :: Int
spillSlotSize = 8
-- | The number of spill slots available without allocating more.
maxSpillSlots :: DynFlags -> Int
maxSpillSlots dflags
= ((rESERVED_C_STACK_BYTES dflags - 64) `div` spillSlotSize) - 1
-- = 0 -- useful for testing allocMoreStack
-- | The number of bytes that the stack pointer should be aligned
-- to. This is 16 both on PPC32 and PPC64 at least for Darwin, but I'm
-- not sure this is correct for other OSes.
stackAlign :: Int
stackAlign = 16
-- | Convert a spill slot number to a *byte* offset, with no sign.
spillSlotToOffset :: Int -> Int
spillSlotToOffset slot
= 64 + spillSlotSize * slot
--------------------------------------------------------------------------------
-- | See if this instruction is telling us the current C stack delta
ppc_takeDeltaInstr
:: Instr
-> Maybe Int
ppc_takeDeltaInstr instr
= case instr of
DELTA i -> Just i
_ -> Nothing
ppc_isMetaInstr
:: Instr
-> Bool
ppc_isMetaInstr instr
= case instr of
COMMENT{} -> True
LDATA{} -> True
NEWBLOCK{} -> True
DELTA{} -> True
_ -> False
-- | Copy the value in a register to another one.
-- Must work for all register classes.
ppc_mkRegRegMoveInstr
:: Reg
-> Reg
-> Instr
ppc_mkRegRegMoveInstr src dst
= MR dst src
-- | Make an unconditional jump instruction.
-- For architectures with branch delay slots, its ok to put
-- a NOP after the jump. Don't fill the delay slot with an
-- instruction that references regs or you'll confuse the
-- linear allocator.
ppc_mkJumpInstr
:: BlockId
-> [Instr]
ppc_mkJumpInstr id
= [BCC ALWAYS id]
-- | Take the source and destination from this reg -> reg move instruction
-- or Nothing if it's not one
ppc_takeRegRegMoveInstr :: Instr -> Maybe (Reg,Reg)
ppc_takeRegRegMoveInstr (MR dst src) = Just (src,dst)
ppc_takeRegRegMoveInstr _ = Nothing
-- -----------------------------------------------------------------------------
-- Making far branches
-- Conditional branches on PowerPC are limited to +-32KB; if our Procs get too
-- big, we have to work around this limitation.
makeFarBranches
:: BlockEnv CmmStatics
-> [NatBasicBlock Instr]
-> [NatBasicBlock Instr]
makeFarBranches info_env blocks
| last blockAddresses < nearLimit = blocks
| otherwise = zipWith handleBlock blockAddresses blocks
where
blockAddresses = scanl (+) 0 $ map blockLen blocks
blockLen (BasicBlock _ instrs) = length instrs
handleBlock addr (BasicBlock id instrs)
= BasicBlock id (zipWith makeFar [addr..] instrs)
makeFar _ (BCC ALWAYS tgt) = BCC ALWAYS tgt
makeFar addr (BCC cond tgt)
| abs (addr - targetAddr) >= nearLimit
= BCCFAR cond tgt
| otherwise
= BCC cond tgt
where Just targetAddr = lookupUFM blockAddressMap tgt
makeFar _ other = other
-- 8192 instructions are allowed; let's keep some distance, as
-- we have a few pseudo-insns that are pretty-printed as
-- multiple instructions, and it's just not worth the effort
-- to calculate things exactly
nearLimit = 7000 - mapSize info_env * maxRetInfoTableSizeW
blockAddressMap = listToUFM $ zip (map blockId blocks) blockAddresses
| jstolarek/ghc | compiler/nativeGen/PPC/Instr.hs | bsd-3-clause | 21,949 | 0 | 18 | 7,296 | 5,735 | 2,961 | 2,774 | 391 | 49 |
-- Exposes definition for Dict between the modules that need it
-- (for safe casting, mostly)
module AO.InnerDict
( AODict(..)
) where
import Control.Arrow (second)
import qualified Data.Map as M
import AO.Code
-- | An AO dictionary is simply a map from words to code plus any
-- metadata for each word.
--
-- However, a dictionary has a few special restrictions:
--
-- * no cyclic definitions
-- * no incompletely defined words
--
-- In this AO library, these attributes are enforced by a smart
-- constructors, 'buildAODict' or 'cleanAODict'. Functions taking
-- an `AODict` type may then depend on a clean (but not necessarily
-- type-safe) dictionary.
--
newtype AODict meta = AODict (M.Map Word (AO_Code, meta))
-- for debugging support, just counts the words.
instance Show (AODict meta) where
showsPrec _ (AODict d) =
showString "AO dictionary with " .
shows (M.size d) . showString " words."
-- users are free to manipulate metadata
-- without disturbing a clean AODict.
instance Functor AODict where
fmap fn (AODict d) = AODict (fmap (second fn) d)
| dmbarbour/awelon | hsrc/AO/InnerDict.hs | bsd-3-clause | 1,106 | 0 | 11 | 225 | 177 | 104 | 73 | 12 | 0 |
{-| The Quasiquoter for the reflex-jsx language
The only import you need is "jsx", which is the quasiquoter. See the README
for more information.
-}
{-# LANGUAGE TemplateHaskell #-}
module ReflexJsx.QQ
( jsx
) where
import qualified Language.Haskell.TH as TH
import Language.Haskell.TH.Quote
import Language.Haskell.Meta (parseExp)
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Reflex.Dom as Dom
import ReflexJsx.Parser
import Prelude hiding (exp)
{-| Quasiquoter for jsx-like expressions
Used like "[jsx| <div /> |]"
-}
jsx :: QuasiQuoter
jsx = QuasiQuoter
{ quoteExp = quoteJsxExpression
, quotePat = undefined
, quoteDec = undefined
, quoteType = undefined
}
quoteJsxExpression :: String -> TH.ExpQ
quoteJsxExpression str = do
exp <- parseJsx str
outputWidgetCode exp
outputWidgetCode :: Node -> TH.ExpQ
outputWidgetCode node =
case node of
Node tag attrs children -> outputNode tag attrs children
Text content -> [| Dom.text content |]
SplicedNode varName -> do
let Right exp = parseExp varName
return exp
outputNode :: String -> Attrs -> [Node] -> TH.ExpQ
outputNode tag attrs children =
let renderedChildren = TH.listE $ List.map outputWidgetCode children
in case attrs of
StaticAttrs staticAttrs ->
let stringAttrs = TH.listE $ List.map toStringAttr staticAttrs
in [| Dom.elAttr tag (Map.fromList $(stringAttrs)) $ sequence_ $(renderedChildren) |]
SplicedAttrs attrExpr -> do
let Right exp = parseExp attrExpr
[| Dom.elDynAttr tag $(return exp) $ sequence_ $(renderedChildren) |]
toStringAttr :: (String, AttrValue) -> TH.ExpQ
toStringAttr (key, value) = case value of
TextVal content -> [| (key, content) |]
ExprVal exprString -> do
let Right exp = parseExp exprString
[| (key, $(return exp)) |]
| dackerman/reflex-jsx | src/ReflexJsx/QQ.hs | bsd-3-clause | 1,875 | 0 | 16 | 388 | 443 | 239 | 204 | 45 | 3 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
module ZM.Type.NonEmptyList
( NonEmptyList(..)
, nonEmptyList
)
where
import Control.DeepSeq
import Flat
import Data.Model
-- |A list that contains at least one element
data NonEmptyList a = Elem a
| Cons a (NonEmptyList a)
deriving (Eq, Ord, Show, NFData, Generic, Functor, Foldable, Traversable, Flat)
instance Model a => Model (NonEmptyList a)
-- |Convert a list to a `NonEmptyList`, returns an error if the list is empty
nonEmptyList :: [a] -> NonEmptyList a
nonEmptyList [] = error "Cannot convert an empty list to NonEmptyList"
nonEmptyList [h ] = Elem h
nonEmptyList (h : t) = Cons h (nonEmptyList t)
| tittoassini/typed | src/ZM/Type/NonEmptyList.hs | bsd-3-clause | 853 | 0 | 8 | 202 | 189 | 104 | 85 | 19 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module :
-- Copyright : (c) 2012 Boyun Tang
-- License : BSD-style
-- Maintainer : tangboyun@hotmail.com
-- Stability : experimental
-- Portability : ghc
--
--
--
-----------------------------------------------------------------------------
module Bio.Web.GProfiler.GConvert
(
defaultPara
, queryGConvert
)
where
import Bio.Web.GProfiler.Types
import Bio.Web.Internal.Patiently
import Data.ByteString (ByteString)
import Data.ByteString.Char8 (pack,unpack)
import Data.Char
import Data.List.Split
import Network.Shpider
import Text.HTML.TagSoup
-- | Default: change human GeneID from RefSeq to ENSG
defaultPara :: ConvertPara
defaultPara = CPara Human ENSG Prefix_PUBMED False
-- | Convert a list of GeneID to another alias.
queryGConvert :: ConvertPara -> ByteString -> IO [GeneRecord]
queryGConvert para text = runShpider $ do
patiently waitTime download gCovertUrl
f:_ <- currentForms
(_,p1) <- patiently waitTime sendForm $ myForm f para
return $ pageToGeneRecord p1
where
gCovertUrl = "http://biit.cs.ut.ee/gprofiler/gconvert.cgi"
waitTime = 30
myForm form (CPara organ target prefix useRegion) =
let region = [("region_query", "on") | useRegion]
in fillOutForm form $ (++) region $ pairs $ do
"query" =: unpack text -- testGeneID
"organism" =: toPara organ -- Organism
"target" =: toPara target -- Target database
"output" =: "txt" -- Output type: html,txt,gem,xls,mini
"prefix" =: toPara prefix -- Numeric IDs treated as
pageToGeneRecord = map ((\(_:ori_id:_:new_id:name:descr:ns:[]) ->
GR (pack ori_id) (pack new_id) (pack name)
(pack descr) (pack ns)
) . splitOn "\t") .
filter (\e -> not (all isSpace e) || not (null e)) .
lines . concatMap fromTagText .
filter isTagText .
takeWhile (not . ("pre" `isTagCloseName`)) .
dropWhile (not . ("pre" `isTagOpenName`)) . tags
| tangboyun/bio-web-api | Bio/Web/GProfiler/GConvert.hs | bsd-3-clause | 2,234 | 0 | 27 | 615 | 531 | 286 | 245 | 39 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleContexts, DeriveFunctor, GeneralizedNewtypeDeriving #-}
module HLib
(
Node(..), Conversation(..), SerializableUser(..), EdgeTarget(..),
ComputationResult(..),
ErrorType(..),
processSerializedUserMessageAndUpdateUser,
toEither, toEitherM
)
where
import qualified Data.Map as M
import Control.Monad.Trans
import Control.Arrow ((***))
type Question = String
type Answer = String
data ErrorType = InvalidInputError | EdgeNotFoundError | ConversationNotFound | NodeNotFound | APIError
toEither :: a -> Maybe b -> Either a b
toEither _ (Just b) = Right b
toEither a _ = Left a
toEitherM :: (Monad m) => a -> m (Maybe b) -> m (Either a b)
toEitherM = fmap . toEither
toComputationResult :: a -> Maybe b -> ComputationResult a b
toComputationResult _ (Just b) = return b
toComputationResult a _ = ComputationResult $ return (Left a)
mapMTuples :: (Monad m) => (a -> m b) -> (c -> m d) -> [(a, c)] -> m [(b, d)]
mapMTuples f g = mapM (sequenceTuple . (f *** g)) where
sequenceTuple :: (Monad m) => (m a, m b) -> m (a, b)
sequenceTuple (ma, mb) = (,) <$> ma <*> mb
newtype ComputationResult a b = ComputationResult (IO (Either a b)) deriving (Functor)
instance Applicative (ComputationResult a) where
pure = ComputationResult . return . Right
(ComputationResult mf) <*> (ComputationResult ma) = ComputationResult $ do
f <- mf
a <- ma
return $ f <*> a
instance Monad (ComputationResult a) where
return = pure
(ComputationResult ma) >>= f = ComputationResult $ do
a <- ma
let b = f <$> a
case b of
(Left e) -> return $ Left e
(Right (ComputationResult x)) -> x
-- Node
-- @nid@ :: NodeId
-- @us@ :: UserState
data Node nid us = ResponseNode {
nodeId :: nid,
question :: Question,
receive :: String -> us -> ComputationResult ErrorType (Answer, us)
} | AutoExecNode { nodeId :: nid, exec :: us -> ComputationResult ErrorType (Answer, us) }
instance (Eq nid) => Eq (Node nid us) where
n1 == n2 = nodeId n1 == nodeId n2
instance (Ord nid) => Ord (Node nid us) where
n1 `compare` n2 = nodeId n1 `compare` nodeId n2
data EdgeTarget cid nid us = EOC | NodeTarget (Node nid us) | ConversationTarget (Conversation cid nid us)
data Conversation cid nid us = Conversation {
conversationId :: cid,
starter :: us -> Node nid us,
edges :: M.Map (Node nid us) (us -> EdgeTarget cid nid us)
}
data User cid nid us = User {
conversations :: [(Conversation cid nid us, Node nid us)],
userState :: us
}
processMessage :: (Ord nid) => User cid nid us -> String -> ComputationResult ErrorType (Answer, us, EdgeTarget cid nid us)
processMessage user input =
let (conv, node) = head $ conversations user
ustate = userState user
edge = toComputationResult EdgeNotFoundError (M.lookup node (edges conv))
in do
ef <- edge
(ans, ustate') <- receive node input ustate
return (ans, ustate', ef ustate')
updateUser :: User cid nid us -> (us, EdgeTarget cid nid us) -> User cid nid us
updateUser user (_, EOC) = user { conversations = drop 1 (conversations user) }
updateUser user (_, NodeTarget node) = let convs = conversations user in
user { conversations = (fst $ head convs, node) : tail convs }
updateUser user (ustate', ConversationTarget nconv) = user {
conversations = (nconv, starter nconv ustate') : conversations user,
userState = ustate'
}
processMessageAndUpdateUser :: (Ord nid) => User cid nid us -> String -> ComputationResult ErrorType (Answer, User cid nid us)
processMessageAndUpdateUser user input = do
(ans, ustate', et) <- processMessage user input
return (ans, updateUser user (ustate', et))
data SerializableUser cid nid us = SerializableUser {
serializableConversations :: [(cid, nid)],
serializableUserState :: us
}
defaultUserDeserializer :: (Monad m) => (cid -> m (Conversation cid nid us)) -> (nid -> m (Node nid us)) -> SerializableUser cid nid us -> m (User cid nid us)
defaultUserDeserializer conversationGetter nodeGetter suser = do
convs <- mapMTuples conversationGetter nodeGetter (serializableConversations suser)
return $ User convs (serializableUserState suser)
type UserId = String
-- main export
processSerializedUserMessageAndUpdateUser :: (Ord nid) =>
(cid -> Maybe (Conversation cid nid us)) ->
(nid -> Maybe (Node nid us)) ->
(UserId -> ComputationResult ErrorType (SerializableUser cid nid us)) ->
UserId -> String -> ComputationResult ErrorType (Answer, User cid nid us)
processSerializedUserMessageAndUpdateUser conversationGetter nodeGetter userGetter userId input =
processEitherUserMessageAndUpdateUser (userGetter userId) where
processEitherUserMessageAndUpdateUser fuser = do
suser <- fuser
user <- defaultUserDeserializer (toComputationResult ConversationNotFound . conversationGetter) (toComputationResult NodeNotFound . nodeGetter) suser
processMessageAndUpdateUser user input
| homam/fsm-conversational-ui | src/HLib.hs | bsd-3-clause | 4,923 | 0 | 15 | 921 | 1,808 | 959 | 849 | 97 | 1 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
module FPNLA.Operations.LAPACK.Strategies.DataTypes (
HMatrixBindSeq,
CholLLVBlocksSeq,
CholLLVBlocksPar_Repa,
CholLLVSeq,
CholLLVPar_Repa,
NullContext,
SqrBlockContext(..)
) where
import FPNLA.Operations.Parameters (StratCtx)
import FPNLA.Operations.Utils (NullContext, SqrBlockContext(..))
data HMatrixBindSeq
data CholLLVSeq dots gemvs
data CholLLVPar_Repa dots gemvs
data CholLLVBlocksSeq potrfs syrks gemms trsms
data CholLLVBlocksPar_Repa potrfs syrks gemms trsms
type instance StratCtx HMatrixBindSeq = NullContext
type instance StratCtx (CholLLVSeq dots gemvs) = (StratCtx dots, StratCtx gemvs)
type instance StratCtx (CholLLVPar_Repa dots gemvs) = (StratCtx dots, StratCtx gemvs)
type instance StratCtx (CholLLVBlocksSeq syrks gemms trsms potrfs) =
(SqrBlockContext, StratCtx syrks, StratCtx gemms, StratCtx trsms, StratCtx potrfs)
type instance StratCtx (CholLLVBlocksPar_Repa syrks gemms trsms potrfs) =
(SqrBlockContext, StratCtx syrks, StratCtx gemms, StratCtx trsms, StratCtx potrfs)
| mauroblanco/fpnla-examples | src/FPNLA/Operations/LAPACK/Strategies/DataTypes.hs | bsd-3-clause | 1,197 | 0 | 6 | 215 | 263 | 159 | 104 | -1 | -1 |
{-# LANGUAGE DataKinds, FlexibleContexts, FlexibleInstances,
MultiParamTypeClasses, QuasiQuotes, TemplateHaskell,
TypeOperators, UndecidableInstances #-}
-- | An example of dealing with rows that contain missing data. We may
-- want to fill in the gaps with default values.
import Data.Monoid ((<>), First(..))
import Data.Vinyl (Rec(..), rmap, RecApplicative, rapply)
import Data.Vinyl.Functor (Lift(..))
import Frames hiding ((:&))
import Pipes (cat, Producer, (>->))
import Pipes.Prelude as P
-- An en passant Default class
class Default a where
def :: a
type MyInt = "int" :-> Int
type MyString = "string" :-> String
type MyBool = "bool" :-> Bool
-- Note that we define instances for column types. This lets us have
-- different defaults for different column names.
instance Default MyInt where def = Col 0
instance Default MyString where def = Col ""
instance Default MyBool where def = Col False
-- We can write instances for /all/ 'Rec' values.
instance (Applicative f, LAll Default ts, RecApplicative ts)
=> Default (Rec f ts) where
def = reifyDict [pr|Default|] (pure def)
-- Just to try it out at the 'Identity' functor.
defRec :: Record '[MyString, MyInt, MyBool]
defRec = def
-- A default record at a more interesting 'Functor'.
defFirst :: Rec First '[MyString, MyInt, MyBool]
defFirst = def
-- Real data often has holes. Here we have the 'MyString' column, but
-- not the others.
holyRow :: Rec First '[MyString, MyInt, MyBool]
holyRow = rmap First $ pure (Col "joe") :& Nothing :& Nothing :& RNil
-- We can fill in the holes with our default record.
unholy :: Maybe (Record '[MyString, MyInt, MyBool])
unholy = recMaybe . rmap getFirst $ rapply (rmap (Lift . flip (<>)) def) holyRow
-- * Reading a CSV file with missing data
instance Default ("col_a" :-> Int) where def = Col 0
instance Default ("col_b" :-> Text) where def = Col mempty
tableTypes "Row" "data/missing.csv"
-- | Fill in missing columns with a default 'Row' value synthesized
-- from 'Default' instances.
holesFilled :: Producer Row IO ()
holesFilled = readTableMaybe "data/missing.csv" >-> P.map (fromJust . holeFiller)
where holeFiller :: Rec Maybe (RecordColumns Row) -> Maybe Row
holeFiller = recMaybe
. rmap getFirst
. rapply (rmap (Lift . flip (<>)) def)
. rmap First
fromJust = maybe (error "Frames holesFilled failure") id
showFilledHoles :: IO ()
showFilledHoles = pipePreview holesFilled 10 cat
main :: IO ()
main = return ()
| codygman/Frames | demo/MissingData.hs | bsd-3-clause | 2,537 | 0 | 14 | 511 | 649 | 360 | 289 | 43 | 1 |
{-# LANGUAGE TypeFamilies, FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RankNTypes, GADTs, ScopedTypeVariables, FunctionalDependencies, RecursiveDo, UndecidableInstances, GeneralizedNewtypeDeriving, StandaloneDeriving, EmptyDataDecls, NoMonomorphismRestriction, TypeOperators, DeriveDataTypeable, PackageImports, TemplateHaskell, LambdaCase, DataKinds, PolyKinds #-}
module Reflex.Dynamic ( Dynamic -- Abstract so we can preserve the law that the current value is always equal to the most recent update
, current
, updated
, constDyn
, holdDyn
, nubDyn
, count
, toggle
, switchPromptlyDyn
, tagDyn
, attachDyn
, attachDynWith
, attachDynWithMaybe
, mapDyn
, forDyn
, mapDynM
, foldDyn
, foldDynM
, combineDyn
, collectDyn
, mconcatDyn
, distributeDMapOverDyn
, joinDyn
, joinDynThroughMap
, traceDyn
, traceDynWith
, splitDyn
, Demux
, demux
, getDemuxed
-- Things that probably aren't very useful:
, HList (..)
, FHList (..)
, distributeFHListOverDyn
-- Unsafe
, unsafeDynamic
) where
import Prelude hiding (mapM, mapM_)
import Reflex.Class
import Data.Functor.Misc
import Control.Monad hiding (mapM, mapM_, forM, forM_)
import Control.Monad.Fix
import Control.Monad.Identity hiding (mapM, mapM_, forM, forM_)
import Data.These
import Data.Traversable (mapM, forM)
import Data.Align
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Dependent.Map (DMap)
import qualified Data.Dependent.Map as DMap
import Data.Dependent.Sum (DSum (..))
import Data.GADT.Compare (GCompare (..), GEq (..), (:~:) (..), GOrdering (..))
import Data.Monoid
--import Data.HList (HList (..), hBuild)
data HList (l::[*]) where
HNil :: HList '[]
HCons :: e -> HList l -> HList (e ': l)
infixr 2 `HCons`
type family HRevApp (l1 :: [k]) (l2 :: [k]) :: [k]
type instance HRevApp '[] l = l
type instance HRevApp (e ': l) l' = HRevApp l (e ': l')
hRevApp :: HList l1 -> HList l2 -> HList (HRevApp l1 l2)
hRevApp HNil l = l
hRevApp (HCons x l) l' = hRevApp l (HCons x l')
hReverse :: HList l -> HList (HRevApp l '[])
hReverse l = hRevApp l HNil
hBuild :: (HBuild' '[] r) => r
hBuild = hBuild' HNil
class HBuild' l r where
hBuild' :: HList l -> r
instance (l' ~ HRevApp l '[])
=> HBuild' l (HList l') where
hBuild' l = hReverse l
instance HBuild' (a ': l) r
=> HBuild' l (a->r) where
hBuild' l x = hBuild' (HCons x l)
-- | A container for a value that can change over time and allows notifications on changes.
-- Basically a combination of a 'Behavior' and an 'Event', with a rule that the Behavior will
-- change if and only if the Event fires.
data Dynamic t a
= Dynamic (Behavior t a) (Event t a)
unsafeDynamic :: Behavior t a -> Event t a -> Dynamic t a
unsafeDynamic = Dynamic
-- | Extract the 'Behavior' of a 'Dynamic'.
current :: Dynamic t a -> Behavior t a
current (Dynamic b _) = b
-- | Extract the 'Event' of the 'Dynamic'.
updated :: Dynamic t a -> Event t a
updated (Dynamic _ e) = e
-- | 'Dynamic' with the constant supplied value.
constDyn :: Reflex t => a -> Dynamic t a
constDyn x = Dynamic (constant x) never
-- | Create a 'Dynamic' using the initial value that changes every
-- time the 'Event' occurs.
holdDyn :: MonadHold t m => a -> Event t a -> m (Dynamic t a)
holdDyn v0 e = do
b <- hold v0 e
return $ Dynamic b e
-- | Create a new 'Dynamic' that only signals changes if the values
-- actually changed.
nubDyn :: (Reflex t, Eq a) => Dynamic t a -> Dynamic t a
nubDyn d =
let e' = attachWithMaybe (\x x' -> if x' == x then Nothing else Just x') (current d) (updated d)
in Dynamic (current d) e' --TODO: Avoid invalidating the outgoing Behavior
{-
instance Reflex t => Functor (Dynamic t) where
fmap f d =
let e' = fmap f $ updated d
eb' = push (\b' -> liftM Just $ constant b') e'
b0 = fmap f $ current d
-}
-- | Map a function over a 'Dynamic'.
mapDyn :: (Reflex t, MonadHold t m) => (a -> b) -> Dynamic t a -> m (Dynamic t b)
mapDyn f = mapDynM $ return . f
-- | Flipped version of 'mapDyn'.
forDyn :: (Reflex t, MonadHold t m) => Dynamic t a -> (a -> b) -> m (Dynamic t b)
forDyn = flip mapDyn
-- | Map a monadic function over a 'Dynamic'. The only monadic action that the given function can
-- perform is 'sample'.
{-# INLINE mapDynM #-}
mapDynM :: forall t m a b. (Reflex t, MonadHold t m) => (forall m'. MonadSample t m' => a -> m' b) -> Dynamic t a -> m (Dynamic t b)
mapDynM f d = do
let e' = push (liftM Just . f :: a -> PushM t (Maybe b)) $ updated d
eb' = fmap constant e'
v0 = pull $ f =<< sample (current d)
bb' :: Behavior t (Behavior t b) <- hold v0 eb'
let b' = pull $ sample =<< sample bb'
return $ Dynamic b' e'
-- | Create a 'Dynamic' using the initial value and change it each
-- time the 'Event' occurs using a folding function on the previous
-- value and the value of the 'Event'.
foldDyn :: (Reflex t, MonadHold t m, MonadFix m) => (a -> b -> b) -> b -> Event t a -> m (Dynamic t b)
foldDyn f = foldDynM (\o v -> return $ f o v)
-- | Create a 'Dynamic' using the initial value and change it each
-- time the 'Event' occurs using a monadic folding function on the
-- previous value and the value of the 'Event'.
foldDynM :: (Reflex t, MonadHold t m, MonadFix m) => (a -> b -> PushM t b) -> b -> Event t a -> m (Dynamic t b)
foldDynM f z e = do
rec let e' = flip push e $ \o -> do
v <- sample b'
liftM Just $ f o v
b' <- hold z e'
return $ Dynamic b' e'
-- | Create a new 'Dynamic' that counts the occurences of the 'Event'.
count :: (Reflex t, MonadHold t m, MonadFix m, Num b) => Event t a -> m (Dynamic t b)
count e = holdDyn 0 =<< zipListWithEvent const (iterate (+1) 1) e
-- | Create a new 'Dynamic' using the initial value that flips its
-- value every time the 'Event' occurs.
toggle :: (Reflex t, MonadHold t m, MonadFix m) => Bool -> Event t a -> m (Dynamic t Bool)
toggle = foldDyn (const not)
-- | Switches to the new 'Event' whenever it receives one. Switching
-- occurs *before* the inner 'Event' fires - so if the 'Dynamic' changes and both the old and new
-- inner Events fire simultaneously, the output will fire with the value of the *new* 'Event'.
switchPromptlyDyn :: forall t a. Reflex t => Dynamic t (Event t a) -> Event t a
switchPromptlyDyn de =
let eLag = switch $ current de
eCoincidences = coincidence $ updated de
in leftmost [eCoincidences, eLag]
{-
mergeEventsWith :: Reflex t m => (a -> a -> a) -> Event t a -> Event t a -> m (Event t a)
mergeEventsWith f ea eb = mapE (mergeThese f) =<< alignEvents ea eb
firstE :: (Reflex t m) => [Event t a] -> m (Event t a)
firstE [] = return never
firstE (h:t) = mergeEventsLeftBiased h =<< firstE t
concatEventsWith :: (Reflex t m) => (a -> a -> a) -> [Event t a] -> m (Event t a)
concatEventsWith _ [] = return never
concatEventsWith _ [e] = return e
concatEventsWith f es = mapEM (liftM (foldl1 f . map (\(Const2 _ :=> v) -> v) . DMap.toList) . sequenceDmap) <=< mergeEventDMap $ DMap.fromList $ map (\(k, v) -> WrapArg (Const2 k) :=> v) $ zip [0 :: Int ..] es
--concatEventsWith f (h:t) = mergeEventsWith f h =<< concatEventsWith f t
mconcatE :: (Reflex t m, Monoid a) => [Event t a] -> m (Event t a)
mconcatE = concatEventsWith mappend
-}
-- | Split the 'Dynamic' into two 'Dynamic's, each taking the
-- respective value of the tuple.
splitDyn :: (Reflex t, MonadHold t m) => Dynamic t (a, b) -> m (Dynamic t a, Dynamic t b)
splitDyn d = liftM2 (,) (mapDyn fst d) (mapDyn snd d)
-- | Merge the 'Dynamic' values using their 'Monoid' instance.
mconcatDyn :: forall t m a. (Reflex t, MonadHold t m, Monoid a) => [Dynamic t a] -> m (Dynamic t a)
mconcatDyn es = do
ddm :: Dynamic t (DMap (Const2 Int a)) <- distributeDMapOverDyn $ DMap.fromList $ map (\(k, v) -> WrapArg (Const2 k) :=> v) $ zip [0 :: Int ..] es
mapDyn (mconcat . map (\(Const2 _ :=> v) -> v) . DMap.toList) ddm
-- | Create a 'Dynamic' with a 'DMap' of values out of a 'DMap' of
-- Dynamic values.
distributeDMapOverDyn :: forall t m k. (Reflex t, MonadHold t m, GCompare k) => DMap (WrapArg (Dynamic t) k) -> m (Dynamic t (DMap k))
distributeDMapOverDyn dm = case DMap.toList dm of
[] -> return $ constDyn DMap.empty
[WrapArg k :=> v] -> mapDyn (DMap.singleton k) v
_ -> do
let edmPre = merge $ rewrapDMap updated dm
edm :: Event t (DMap k) = flip push edmPre $ \o -> return . Just =<< do
let f _ = \case
This origDyn -> sample $ current origDyn
That _ -> error "distributeDMapOverDyn: should be impossible to have an event occurring that is not present in the original DMap"
These _ (Identity newVal) -> return newVal
sequenceDmap $ combineDMapsWithKey f dm (wrapDMap Identity o)
dm0 :: Behavior t (DMap k) = pull $ do
liftM DMap.fromList $ forM (DMap.toList dm) $ \(WrapArg k :=> dv) -> liftM (k :=>) $ sample $ current dv
bbdm :: Behavior t (Behavior t (DMap k)) <- hold dm0 $ fmap constant edm
let bdm = pull $ sample =<< sample bbdm
return $ Dynamic bdm edm
-- | Merge two 'Dynamic's into a new one using the provided
-- function. The new 'Dynamic' changes its value each time one of the
-- original 'Dynamic's changes its value.
combineDyn :: forall t m a b c. (Reflex t, MonadHold t m) => (a -> b -> c) -> Dynamic t a -> Dynamic t b -> m (Dynamic t c)
combineDyn f da db = do
let eab = align (updated da) (updated db)
ec = flip push eab $ \o -> do
(a, b) <- case o of
This a -> do
b <- sample $ current db
return (a, b)
That b -> do
a <- sample $ current da
return (a, b)
These a b -> return (a, b)
return $ Just $ f a b
c0 :: Behavior t c = pull $ liftM2 f (sample $ current da) (sample $ current db)
bbc :: Behavior t (Behavior t c) <- hold c0 $ fmap constant ec
let bc :: Behavior t c = pull $ sample =<< sample bbc
return $ Dynamic bc ec
{-
tagInnerDyn :: Reflex t => Event t (Dynamic t a) -> Event t a
tagInnerDyn e =
let eSlow = push (liftM Just . sample . current) e
eFast = coincidence $ fmap updated e
in leftmost [eFast, eSlow]
-}
-- | Join a nested 'Dynamic' into a new 'Dynamic' that has the value
-- of the inner 'Dynamic'.
joinDyn :: forall t a. (Reflex t) => Dynamic t (Dynamic t a) -> Dynamic t a
joinDyn dd =
let b' = pull $ sample . current =<< sample (current dd)
eOuter :: Event t a = pushAlways (sample . current) $ updated dd
eInner :: Event t a = switch $ fmap updated (current dd)
eBoth :: Event t a = coincidence $ fmap updated (updated dd)
e' = leftmost [eBoth, eOuter, eInner]
in Dynamic b' e'
--TODO: Generalize this to functors other than Maps
-- | Combine a 'Dynamic' of a 'Map' of 'Dynamic's into a 'Dynamic'
-- with the current values of the 'Dynamic's in a map.
joinDynThroughMap :: forall t k a. (Reflex t, Ord k) => Dynamic t (Map k (Dynamic t a)) -> Dynamic t (Map k a)
joinDynThroughMap dd =
let b' = pull $ mapM (sample . current) =<< sample (current dd)
eOuter :: Event t (Map k a) = pushAlways (mapM (sample . current)) $ updated dd
eInner :: Event t (Map k a) = attachWith (flip Map.union) b' $ switch $ fmap (mergeMap . fmap updated) (current dd) --Note: the flip is important because Map.union is left-biased
readNonFiring :: MonadSample t m => These (Dynamic t a) a -> m a
readNonFiring = \case
This d -> sample $ current d
That a -> return a
These _ a -> return a
eBoth :: Event t (Map k a) = coincidence $ fmap (\m -> pushAlways (mapM readNonFiring . align m) $ mergeMap $ fmap updated m) (updated dd)
e' = leftmost [eBoth, eOuter, eInner]
in Dynamic b' e'
-- | Print the value of the 'Dynamic' on each change and prefix it
-- with the provided string. This should /only/ be used for debugging.
--
-- Note: Just like Debug.Trace.trace, the value will only be shown if something
-- else in the system is depending on it.
traceDyn :: (Reflex t, Show a) => String -> Dynamic t a -> Dynamic t a
traceDyn s = traceDynWith $ \x -> s <> ": " <> show x
-- | Print the result of applying the provided function to the value
-- of the 'Dynamic' on each change. This should /only/ be used for
-- debugging.
--
-- Note: Just like Debug.Trace.trace, the value will only be shown if something
-- else in the system is depending on it.
traceDynWith :: Reflex t => (a -> String) -> Dynamic t a -> Dynamic t a
traceDynWith f d =
let e' = traceEventWith f $ updated d
in Dynamic (current d) e'
-- | Replace the value of the 'Event' with the current value of the 'Dynamic'
-- each time the 'Event' occurs.
--
-- Note: `tagDyn d e` differs from `tag (current d) e` in the case that `e` is firing
-- at the same time that `d` is changing. With `tagDyn d e`, the *new* value of `d`
-- will replace the value of `e`, whereas with `tag (current d) e`, the *old* value
-- will be used, since the 'Behavior' won't be updated until the end of the frame.
-- Additionally, this means that the output 'Event' may not be used to directly change
-- the input 'Dynamic', because that would mean its value depends on itself. When creating
-- cyclic data flows, generally `tag (current d) e` is preferred.
tagDyn :: Reflex t => Dynamic t a -> Event t b -> Event t a
tagDyn = attachDynWith const
-- | Attach the current value of the 'Dynamic' to the value of the
-- 'Event' each time it occurs.
--
-- Note: `attachDyn d` is not the same as `attach (current d)`. See 'tagDyn' for details.
attachDyn :: Reflex t => Dynamic t a -> Event t b -> Event t (a, b)
attachDyn = attachDynWith (,)
-- | Combine the current value of the 'Dynamic' with the value of the
-- 'Event' each time it occurs.
--
-- Note: `attachDynWith f d` is not the same as `attachWith f (current d)`. See 'tagDyn' for details.
attachDynWith :: Reflex t => (a -> b -> c) -> Dynamic t a -> Event t b -> Event t c
attachDynWith f = attachDynWithMaybe $ \a b -> Just $ f a b
-- | Create a new 'Event' by combining the value at each occurence
-- with the current value of the 'Dynamic' value and possibly
-- filtering if the combining function returns 'Nothing'.
--
-- Note: `attachDynWithMaybe f d` is not the same as `attachWithMaybe f (current d)`. See 'tagDyn' for details.
attachDynWithMaybe :: Reflex t => (a -> b -> Maybe c) -> Dynamic t a -> Event t b -> Event t c
attachDynWithMaybe f d e =
let e' = attach (current d) e
in fforMaybe (align e' $ updated d) $ \case
This (a, b) -> f a b -- Only the tagging event is firing, so use that
These (_, b) a -> f a b -- Both events are firing, so use the newer value
That _ -> Nothing -- The tagging event isn't firing, so don't fire
--------------------------------------------------------------------------------
-- Demux
--------------------------------------------------------------------------------
-- | Represents a time changing value together with an 'EventSelector'
-- that can efficiently detect when the underlying Dynamic has a particular value.
-- This is useful for representing data like the current selection of a long list.
--
-- Semantically,
-- > getDemuxed (demux d) k === mapDyn (== k) d
-- However, the when getDemuxed is used multiple times, the complexity is only /O(log(n))/,
-- rather than /O(n)/ for mapDyn.
data Demux t k = Demux { demuxValue :: Behavior t k
, demuxSelector :: EventSelector t (Const2 k Bool)
}
-- | Demultiplex an input value to a 'Demux' with many outputs. At any given time, whichever output is indicated by the given 'Dynamic' will be 'True'.
demux :: (Reflex t, Ord k) => Dynamic t k -> Demux t k
demux k = Demux (current k) (fan $ attachWith (\k0 k1 -> if k0 == k1 then DMap.empty else DMap.fromList [Const2 k0 :=> False, Const2 k1 :=> True]) (current k) (updated k))
--TODO: The pattern of using hold (sample b0) can be reused in various places as a safe way of building certain kinds of Dynamics; see if we can factor this out
-- | Select a particular output of the 'Demux'; this is equivalent to (but much faster than)
-- mapping over the original 'Dynamic' and checking whether it is equal to the given key.
getDemuxed :: (Reflex t, MonadHold t m, Eq k) => Demux t k -> k -> m (Dynamic t Bool)
getDemuxed d k = do
let e = select (demuxSelector d) (Const2 k)
bb <- hold (liftM (==k) $ sample $ demuxValue d) $ fmap return e
let b = pull $ join $ sample bb
return $ Dynamic b e
--------------------------------------------------------------------------------
-- collectDyn
--------------------------------------------------------------------------------
--TODO: This whole section is badly in need of cleanup
data FHList f l where
FHNil :: FHList f '[]
FHCons :: f e -> FHList f l -> FHList f (e ': l)
instance GEq (HListPtr l) where
HHeadPtr `geq` HHeadPtr = Just Refl
HHeadPtr `geq` HTailPtr _ = Nothing
HTailPtr _ `geq` HHeadPtr = Nothing
HTailPtr a `geq` HTailPtr b = a `geq` b
instance GCompare (HListPtr l) where -- Warning: This ordering can't change, dmapTo*HList will break
HHeadPtr `gcompare` HHeadPtr = GEQ
HHeadPtr `gcompare` HTailPtr _ = GLT
HTailPtr _ `gcompare` HHeadPtr = GGT
HTailPtr a `gcompare` HTailPtr b = a `gcompare` b
data HListPtr l a where
HHeadPtr :: HListPtr (h ': t) h
HTailPtr :: HListPtr t a -> HListPtr (h ': t) a
fhlistToDMap :: forall f l. FHList f l -> DMap (WrapArg f (HListPtr l))
fhlistToDMap = DMap.fromList . go
where go :: forall l'. FHList f l' -> [DSum (WrapArg f (HListPtr l'))]
go = \case
FHNil -> []
FHCons h t -> (WrapArg HHeadPtr :=> h) : map (\(WrapArg p :=> v) -> WrapArg (HTailPtr p) :=> v) (go t)
class RebuildSortedHList l where
rebuildSortedFHList :: [DSum (WrapArg f (HListPtr l))] -> FHList f l
rebuildSortedHList :: [DSum (HListPtr l)] -> HList l
instance RebuildSortedHList '[] where
rebuildSortedFHList [] = FHNil
rebuildSortedHList [] = HNil
instance RebuildSortedHList t => RebuildSortedHList (h ': t) where
rebuildSortedFHList ((WrapArg HHeadPtr :=> h) : t) = FHCons h $ rebuildSortedFHList $ map (\(WrapArg (HTailPtr p) :=> v) -> WrapArg p :=> v) t
rebuildSortedHList ((HHeadPtr :=> h) : t) = HCons h $ rebuildSortedHList $ map (\(HTailPtr p :=> v) -> p :=> v) t
dmapToHList :: forall l. RebuildSortedHList l => DMap (HListPtr l) -> HList l
dmapToHList = rebuildSortedHList . DMap.toList
distributeFHListOverDyn :: forall t m l. (Reflex t, MonadHold t m, RebuildSortedHList l) => FHList (Dynamic t) l -> m (Dynamic t (HList l))
distributeFHListOverDyn l = mapDyn dmapToHList =<< distributeDMapOverDyn (fhlistToDMap l)
{-
distributeFHListOverDyn l = do
let ec = undefined
c0 = pull $ sequenceFHList $ natMap (sample . current) l
bbc <- hold c0 $ fmap constant ec
let bc = pull $ sample =<< sample bbc
return $ Dynamic bc ec
-}
class AllAreFunctors (f :: a -> *) (l :: [a]) where
type FunctorList f l :: [*]
toFHList :: HList (FunctorList f l) -> FHList f l
fromFHList :: FHList f l -> HList (FunctorList f l)
instance AllAreFunctors f '[] where
type FunctorList f '[] = '[]
toFHList HNil = FHNil
fromFHList FHNil = HNil
instance AllAreFunctors f t => AllAreFunctors f (h ': t) where
type FunctorList f (h ': t) = f h ': FunctorList f t
toFHList (a `HCons` b) = a `FHCons` toFHList b
fromFHList (a `FHCons` b) = a `HCons` fromFHList b
collectDyn :: ( RebuildSortedHList (HListElems b)
, IsHList a, IsHList b
, AllAreFunctors (Dynamic t) (HListElems b)
, Reflex t, MonadHold t m
, HListElems a ~ FunctorList (Dynamic t) (HListElems b)
) => a -> m (Dynamic t b)
collectDyn ds =
mapDyn fromHList =<< distributeFHListOverDyn (toFHList $ toHList ds)
-- Poor man's Generic
class IsHList a where
type HListElems a :: [*]
toHList :: a -> HList (HListElems a)
fromHList :: HList (HListElems a) -> a
instance IsHList (a, b) where
type HListElems (a, b) = [a, b]
toHList (a, b) = hBuild a b
fromHList (a `HCons` b `HCons` HNil) = (a, b)
instance IsHList (a, b, c, d) where
type HListElems (a, b, c, d) = [a, b, c, d]
toHList (a, b, c, d) = hBuild a b c d
fromHList (a `HCons` b `HCons` c `HCons` d `HCons` HNil) = (a, b, c, d)
instance IsHList (a, b, c, d, e, f) where
type HListElems (a, b, c, d, e, f) = [a, b, c, d, e, f]
toHList (a, b, c, d, e, f) = hBuild a b c d e f
fromHList (a `HCons` b `HCons` c `HCons` d `HCons` e `HCons` f `HCons` HNil) = (a, b, c, d, e, f)
| phadej/reflex | src/Reflex/Dynamic.hs | bsd-3-clause | 21,307 | 0 | 26 | 5,379 | 6,217 | 3,240 | 2,977 | 283 | 5 |
module HsCmp (
cmp
, cmp'
, compareF
, CmpResult (..)
)
where
import Prelude hiding (lines)
import System.IO.MMap
import Data.ByteString
import Data.Word
import Data.Word8
data CmpResult = Same
| Differ{bytes :: Int, lines :: Int }
instance Show CmpResult where
show Same = ""
show x = "differ: byte "
++ show (bytes x)
++ ", line "
++ show (lines x)
instance Eq CmpResult where
Same == Same = True
Differ x1 y1 == Differ x2 y2 = x1==x2 && y1==y2
_ == _ = False
cmp' :: Int -> Int -> [Word8] -> [Word8] -> CmpResult
cmp' bytes lines [] [] = Same
cmp' bytes lines (x:xs) (y:ys)
| x == _lf && x==y = cmp' (bytes+1) (lines+1) xs ys
| x == y = cmp' (bytes+1) lines xs ys
| otherwise = Differ bytes lines
cmp xs ys = cmp' 1 1 (unpack xs) (unpack ys)
compareF :: FilePath -> FilePath -> IO CmpResult
compareF x y = do
xs <- mmapFileByteString x Nothing
ys <- mmapFileByteString y Nothing
return $ cmp xs ys
| yaccz/hscmp | HsCmp.hs | bsd-3-clause | 1,030 | 0 | 11 | 307 | 443 | 226 | 217 | 34 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
module Pack6 where
import Data.Functor.Identity
import Data.Maybe
import Data.Map (Map)
import Data.Set (Set)
import qualified Data.Map as Map
import qualified Data.Set as Set
import Control.Lens
s1 :: ([Int], [Bool])
s1 = ([1], [True])
pl1 :: [(Int, Bool)]
pl1 = [(1, True), (2, False)]
test11 :: (Set Int, [Bool])
test11 = over _1 Set.fromList s1
test12 :: ([Int], Set Bool)
test12 = over _2 Set.fromList s1
-- NG
mapTuple1 :: (forall x. f x -> g x) -> (f a, f b) -> (g a, f b)
mapTuple1 f = over _1 f
-- OK
mapTuple2 :: (f a -> g a) -> (f b -> g b) -> (f a, f b) -> (g a, g b)
mapTuple2 f g = over _2 g . over _1 f
-- OK
mapTuple :: (Ord a, Ord b) =>
(forall x. Ord x => f x -> g x) -> (f a, f b) -> (g a, g b)
mapTuple f = over _2 f . over _1 f
test1 :: (Set Int, Set Bool)
test1 = mapTuple Set.fromList s1
| notae/haskell-exercise | pack/Pack6.hs | bsd-3-clause | 898 | 0 | 11 | 215 | 481 | 264 | 217 | 27 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE UndecidableInstances #-}
module WeiXin.PublicPlatform.Yesod.Site.Function
( module WeiXin.PublicPlatform.Yesod.Site.Function
, module WeiXin.PublicPlatform.Yesod.Site.Data
)where
-- {{{1 imports
import ClassyPrelude
#if MIN_VERSION_base(4, 13, 0)
-- import Control.Monad (MonadFail(..))
#else
import Control.DeepSeq (($!!))
#endif
import Yesod
import Control.Lens
import qualified Control.Exception.Safe as ExcSafe
import Network.Wreq
import Control.Monad.Logger
import Control.Monad.Trans.Except
import Control.Monad.Trans.Maybe
import qualified Data.ByteString.Lazy as LB
import qualified Data.Conduit.List as CL
import qualified Data.Map.Strict as Map
import Data.Conduit
import Data.Time (addUTCTime, diffUTCTime, NominalDiffTime)
import Database.Persist.Sql
import Database.Persist.TX.Utils
import WeiXin.PublicPlatform.Security
import WeiXin.PublicPlatform.Media
import WeiXin.PublicPlatform.WS
import WeiXin.PublicPlatform.EndUser
import WeiXin.PublicPlatform.InMsgHandler
import WeiXin.PublicPlatform.Yesod.Site.Data
import WeiXin.PublicPlatform.Yesod.Model
import Yesod.Compat
-- }}}1
-- | Handler: 保存所有收到的比较原始的消息(解密之后的结果)到数据库
data StoreInMsgToDB m = StoreInMsgToDB
WxppDbRunner
-- function to run DB actions
(Bool -> WxppInMsgRecordId -> WxppBriefMediaID -> m ())
-- function to download media file
-- 推荐使用异步方式下载
type instance WxppInMsgProcessResult (StoreInMsgToDB m) = WxppInMsgHandlerResult
instance JsonConfigable (StoreInMsgToDB m) where
type JsonConfigableUnconfigData (StoreInMsgToDB m) =
( WxppDbRunner
, Bool -> WxppInMsgRecordId -> WxppBriefMediaID -> m ()
)
isNameOfInMsgHandler _ = ( == "db-store-all" )
parseWithExtraData _ (x,y) _obj = return $ StoreInMsgToDB x y
instance (MonadIO m, MonadLogger m)
=> IsWxppInMsgProcessor m (StoreInMsgToDB m) where
processInMsg (StoreInMsgToDB {}) _cache _app_info _bs _ime = do
$logWarnS wxppLogSource $
"StoreInMsgToDB now do nothing when used as incoming message handler"
return $ Right []
instance (MonadIO m, MonadLoggerIO m)
=> IsWxppInMsgProcMiddleware m (StoreInMsgToDB m) where
preProcInMsg (StoreInMsgToDB db_runner media_downloader) _cache app_info bs ime = runMaybeT $ do
now <- liftIO getCurrentTime
log_func <- askLoggerIO
(msg_record_id, (is_video, mids)) <- mapMaybeT (liftIO . flip runLoggingT log_func . runWxppDB db_runner) $ do
let m_to = Just $ wxppInToUserName ime
m_from = Just $ wxppInFromUserName ime
m_ctime = Just $ wxppInCreatedTime ime
m_msg_id = wxppInMessageID ime
old_or_msg_record_id <- lift $ insertBy $ WxppInMsgRecord
(Just app_id)
m_to m_from m_ctime m_msg_id
(LB.toStrict bs)
now
msg_record_id <- case old_or_msg_record_id of
Left (Entity old_id _) -> do
$logWarnS wxppLogSource $
"got a duplicate message from WeiXin platform: db id=" <> toPathPiece old_id
<>", MsgId=" <> (fromString $ show $ fmap unWxppInMsgID m_msg_id)
mzero
Right x -> return x
-- save any temporary media data
(is_video, mids) <-
case wxppInMessage ime of
WxppInMsgImage mid _ -> return (False, [mid])
WxppInMsgVoice mid _ _ -> return (False, [mid])
WxppInMsgVideo mid mid2 -> return (True, [mid, mid2])
_ -> return (False, [])
return (msg_record_id, (is_video, mids))
lift $ forM_ mids $ \mid -> do
media_downloader (not is_video) msg_record_id mid
return (bs, ime)
where
app_id = procAppIdInfoReceiverId app_info
-- | 现在StoreInMsgToDB的preProcInMsg仅当消息xml已被成功解释后才能被调用
-- 要提供另一个函数特别为xml解释失败时回调
defaultOnInMsgParseFailed :: forall m. (MonadIO m)
=> Maybe WxppAppID
-- ^ 目标app id. 有时候这个id来自消息本身,所以不能保证总是能得到
-> LB.ByteString
-- ^ 原始的消息
-> ReaderT WxppDbBackend m WxppInMsgRecordId
defaultOnInMsgParseFailed m_app_id lbs = do
now <- liftIO getCurrentTime
insert $ WxppInMsgRecord
m_app_id
Nothing Nothing Nothing Nothing
(LB.toStrict lbs)
now
-- | Handler: 更新 WxppOpenIdUnionId 的记录
data CacheAppOpenIdToUnionId = CacheAppOpenIdToUnionId
WxppDbRunner
-- ^ function to run DB actions
type instance WxppInMsgProcessResult CacheAppOpenIdToUnionId = WxppInMsgHandlerResult
instance JsonConfigable CacheAppOpenIdToUnionId where
type JsonConfigableUnconfigData CacheAppOpenIdToUnionId = WxppDbRunner
isNameOfInMsgHandler _ = ( == "update-openid-to-unionid" )
parseWithExtraData _ x _obj = return $ CacheAppOpenIdToUnionId x
instance (MonadIO m , MonadLogger m) => IsWxppInMsgProcessor m CacheAppOpenIdToUnionId where
processInMsg (CacheAppOpenIdToUnionId {}) _cache _app_info _bs _ime = runExceptT $ do
$logWarnS wxppLogSource $
"CacheAppOpenIdToUnionId now do nothing when used as incoming message handler"
return []
instance (WxppApiMonad env m
, ExcSafe.MonadCatch m
, Functor m
) => IsWxppInMsgProcMiddleware m CacheAppOpenIdToUnionId where
preProcInMsg (CacheAppOpenIdToUnionId db_runner) cache app_info bs ime = do
let m_subs_or_unsubs = case wxppInMessage ime of
(WxppInMsgEvent WxppEvtSubscribe) -> Just True
(WxppInMsgEvent (WxppEvtSubscribeAtScene {})) -> Just True
(WxppInMsgEvent WxppEvtUnsubscribe) -> Just False
_ -> Nothing
case m_subs_or_unsubs of
Just True -> do
err_or <- runExceptT $ do
atk <- (tryWxppWsResultE "getting access token" $ liftIO $
wxppCacheGetAccessToken cache app_id)
>>= maybe (throwE $ "no access token available") (return . fst)
let open_id = wxppInFromUserName ime
qres <- tryWxppWsResultE "wxppQueryEndUserInfo" $
wxppQueryEndUserInfo atk open_id
let m_uid = endUserQueryResultUnionID qres
now <- liftIO getCurrentTime
liftIO $ runWxppDB db_runner $ do
void $ insertOrUpdate
(WxppUserCachedInfo app_id open_id m_uid now)
[ WxppUserCachedInfoUnionId =. m_uid
, WxppUserCachedInfoUpdatedTime =. now
]
case err_or of
Left err -> $logErrorS wxppLogSource $ "CacheAppOpenIdToUnionId: " <> err
Right () -> return ()
Just False -> do
-- 取消关注时,目前先不删除记录
-- 估计 openid unionid 对于固定的用户是固定的
return ()
_ -> return ()
return $ Just (bs, ime)
where
app_id = procAppIdInfoReceiverId app_info
type TrackHandledInMsgInnerMap = Map (WxppAppID, WxppInMsgAmostUniqueID)
(UTCTime, Maybe (Either String UTCTime))
-- | 检查收到的信息有没有处理过,如果是,则不再处理
data TrackHandledInMsg = TrackHandledInMsg
NominalDiffTime
(MVar TrackHandledInMsgInnerMap)
instance JsonConfigable TrackHandledInMsg where
type JsonConfigableUnconfigData TrackHandledInMsg =
( NominalDiffTime
, MVar TrackHandledInMsgInnerMap
)
isNameOfInMsgHandler _ t = t == "track-handled-in-msg"
parseWithExtraData _ (x1, x2) _ = return $ TrackHandledInMsg x1 x2
instance (MonadIO m
, MonadLogger m
) => IsWxppInMsgProcMiddleware m TrackHandledInMsg where
preProcInMsg (TrackHandledInMsg _ map_mar) _cache app_info bs ime = do
let msg_id = almostUniqueIdOfWxppInMsgEntity ime
now <- liftIO getCurrentTime
m_prev_rec <- liftIO $ modifyMVar map_mar $
\the_map -> do
let k = (app_id, msg_id)
v = (now, Nothing)
return $!!
case Map.lookup k the_map of
Nothing -> (Map.insert k v the_map, Nothing)
Just old_v -> (the_map, Just old_v)
case m_prev_rec of
Nothing -> do
-- 正常的情况
return $ Just (bs, ime)
Just (_prev_start, m_prev_done) -> do
case m_prev_done of
Nothing -> do
$logWarnS wxppLogSource $
"Duplicate incoming message before previous one could be handled successfully:"
<> tshow msg_id
return Nothing
Just (Left _) -> do
-- handled before, but failed
$logInfoS wxppLogSource $
"Duplicate incoming message with previous one was handled unsuccessfully:"
<> tshow msg_id
-- retry
return $ Just (bs, ime)
Just (Right _) -> do
-- handled before, and success
$logInfoS wxppLogSource $
"Duplicate incoming message with previous one was handled successfully:"
<> tshow msg_id
return Nothing
where
app_id = procAppIdInfoReceiverId app_info
postProcInMsg (TrackHandledInMsg slow_threshold map_mar) app_info _bs ime res = do
trackHandleInMsgSaveResult slow_threshold app_id map_mar ime Nothing
return res
where
app_id = procAppIdInfoReceiverId app_info
onProcInMsgError (TrackHandledInMsg slow_threshold map_mar) app_info _bs ime err = do
trackHandleInMsgSaveResult slow_threshold app_id map_mar ime (Just err)
where
app_id = procAppIdInfoReceiverId app_info
trackHandleInMsgSaveResult :: (MonadIO m, MonadLogger m)
=> NominalDiffTime
-> WxppAppID
-> MVar TrackHandledInMsgInnerMap
-> WxppInMsgEntity
-> Maybe String
-> m ()
trackHandleInMsgSaveResult slow_threshold app_id map_mvar ime m_err = do
let msg_id = almostUniqueIdOfWxppInMsgEntity ime
now <- liftIO getCurrentTime
let dt = addUTCTime (negate $ fromIntegral (1800 :: Int)) now
m_val <- liftIO $ modifyMVar map_mvar $
\the_map -> do
let (m_val, new_map) = Map.updateLookupWithKey
(\_ -> Just . second
(const $ Just $ maybe (Right now) Left m_err)
)
(app_id, msg_id)
the_map
-- remove histories that are long ago
new_map' = Map.filter ((> dt) . fst) new_map
return $!! (new_map', m_val)
case m_val of
Nothing -> do
$logErrorS wxppLogSource $
"Previous handling info was not found: " <> tshow msg_id
Just (start_time, _) -> do
let time_used = diffUTCTime now start_time
when (time_used > slow_threshold) $ do
$logWarnS wxppLogSource $
"Too slow to handle message " <> tshow msg_id
<> ", time used: "
<> tshow (realToFrac time_used :: Float) <> " seconds."
-- | 下载多媒体文件,保存至数据库
downloadSaveMediaToDB ::
( MonadLogger m
#if MIN_VERSION_classy_prelude(1, 5, 0)
-- , MonadIO m
, RunSqlBaseMonad m
#else
, MonadIO m
#endif
, ExcSafe.MonadCatch m
#if MIN_VERSION_persistent(2, 0, 0)
, PersistUnique backend
, backend ~ PersistEntityBackend WxppStoredMedia
#else
, PersistUnique m
, PersistMonadBackend m ~ PersistEntityBackend WxppStoredMedia
#endif
) =>
WxppApiEnv
-> AccessToken
-> WxppInMsgRecordId
-> WxppBriefMediaID
#if MIN_VERSION_persistent(2, 0, 0)
-> ReaderT backend m ()
#else
-> m ()
#endif
downloadSaveMediaToDB api_env atk msg_id media_id = do
err_or_rb <- tryWxppWsResult $ flip runReaderT api_env $ wxppDownloadMedia atk media_id
case err_or_rb of
Left err -> do
$(logErrorS) wxppLogSource $ "Failed to download media '" <> unWxppBriefMediaID media_id
<> "': " <> (fromString $ show err)
Right rb -> do
now <- liftIO getCurrentTime
old_or_id <- insertBy $ WxppStoredMedia
(accessTokenApp atk)
media_id
msg_id
(LB.toStrict $ rb ^. responseBody)
(rb ^. responseHeader "Content-Type")
now
case old_or_id of
Left (Entity old_id _) -> do
$(logWarnS) wxppLogSource $ "Media '" <> unWxppBriefMediaID media_id
<> "' already in DB, record id: "
<> toPathPiece old_id
Right _ -> return ()
-- | 找出最近一段时间内有消息发給系统的用户
wxppUserLatestActiveTime :: (MonadIO m, MonadResource m) =>
UTCTime -- ^ 只检查过去一段时间内的消息历史
-> WxppAppID
-> SourceC (ReaderT WxppDbBackend m) (WxppOpenID, UTCTime)
-- {{{1
wxppUserLatestActiveTime start_time app_id = do
open_id_fn <- lift $ getFieldName WxppInMsgRecordFrom
created_time_fn <- lift $ getFieldName WxppInMsgRecordCreatedTime
app_fn <- lift $ getFieldName WxppInMsgRecordApp
table_name <- lift $ getTableName (error "WxppInMsgRecord forced" :: WxppInMsgRecord)
let query = "SELECT "
<> open_id_fn
<> ",MAX(" <> created_time_fn <> ")"
<> " FROM "
<> table_name
<> " WHERE "
<> app_fn <> "= ?"
<> " AND "
<> created_time_fn <> ">= ?"
<> " GROUP BY " <> open_id_fn
rawQuery query [ toPersistValue app_id, toPersistValue start_time]
.| CL.mapM (\x -> case x of
[v1, v2] -> return $
(,) <$> fromPersistValue v1
<*> fromPersistValue v2
_ -> liftIO $ throwIO $ PersistMarshalError $
"Expecting 2 columns, but got "
<> (fromString $ show $ length x)
)
.| CL.mapM (either (liftIO . throwIO . PersistMarshalError) return)
-- }}}1
-- vim: set foldmethod=marker:
| yoo-e/weixin-mp-sdk | WeiXin/PublicPlatform/Yesod/Site/Function.hs | mit | 16,130 | 0 | 24 | 5,837 | 3,148 | 1,591 | 1,557 | -1 | -1 |
-- | Stability: Experimental
module Mahjong.Riichi.FourPlayer where
import Mahjong.Board
-- import Mahjong.Player
import Mahjong.Riichi
import Mahjong.Tile
data FourPlayer
= FourPlayer (Board (Dora, RiichiTile))
deriving (Show)
data FourPlayerOptions
= FourPlayerOptions
{ allowNotenRiichi :: Bool
-- ^ Declaring riichi while you don't have a hand that's in tenpai.
, optionalYakus :: YakuOptions }
deriving (Show)
data YakuOptions
= YakuOptions
{ renhou :: RenhouOption
-- ^ Optional yaku, that is obtained by calling ron before the dealer's
-- second turn or before any called tile. Only non-dealers may get this
-- yaku.
}
deriving (Show)
data RenhouOption
= RenhouMangan
| RenhouYakuman
| RenhouYaku Int
-- ^ The yaku value is cumulative and is generally set to 5 for the hand to
-- be at least a mangan.
| Nashi
deriving (Eq, Show)
data MahjongError
= AbandonedGame
deriving (Eq, Show)
| TakSuyu/mahsjong | src/Mahjong/Riichi/FourPlayer.hs | mit | 974 | 0 | 9 | 219 | 155 | 95 | 60 | 25 | 0 |
{-# LANGUAGE OverloadedStrings, DeriveGeneric #-}
{-# LANGUAGE ExtendedDefaultRules #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
module Dampf.Provision (ProvisionType(..), goProvision) where
import Shelly
import Data.Text (Text)
import qualified Data.Text as T
import Dampf.Types
import Control.Monad.Catch (MonadThrow)
import Control.Monad.IO.Class (MonadIO, liftIO)
import GHC.Generics
default (Text)
data ProvisionType =
SingleServer | Development | CI
deriving (Show, Read, Eq, Generic)
goProvision :: (MonadIO m, MonadThrow m) => ProvisionType -> m ()
goProvision Development = shelly $ core >> docker >> nginx >> postgresql
goProvision _ = shelly $ core >> ufw >> docker >> nginx >> certbot >> postgresql
core :: Sh ()
core = do
aptUpdate
aptInstall ["git", "zile", "curl", "lsb-core",
"software-properties-common", "fail2ban"]
docker :: Sh ()
docker = unlessExistsCmd "docker" $
bash_ "curl" ["https://get.docker.com/ | sh"]
nginx:: Sh ()
nginx = do
aptInstall ["nginx"]
postgresql :: Sh ()
postgresql = unlessExistsCmd "psql" $ do
bash_ "add-apt-repository" ["\"deb http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main\""]
bash_ "wget" ["--quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add"]
aptUpdate
aptInstall ["postgresql-10"]
ufw :: Sh ()
ufw = unlessExistsCmd "ufw" $ do
aptInstall ["ufw"]
run_ "ufw" ["default","deny","incoming"]
run_ "ufw" ["default","allow","outgoing"]
run_ "ufw" ["allow","ssh"]
run_ "ufw" ["allow","80"]
run_ "ufw" ["allow","443"]
run_ "ufw" ["enable"]
certbot :: Sh ()
certbot = unlessExistsCmd "certbot-auto" $ do
run_ "wget" ["-O","/usr/local/bin/certbot-auto","https://dl.eff.org/certbot-auto"]
run_ "chmod" ["+x","/usr/local/bin/certbot-auto"]
run_ "certbot-auto" ["register","--email","dampf@diffusionkinetics.com","--agree-tos","--noninteractive"]
bash_ "" ["(crontab -l 2>/dev/null ; echo \"42 */12 * * * certbot renew --allow-subset-of-names\") | crontab"]
------------------------------------------------------
-- TOOLS
------------------------------------------------------
existsCmd :: Text -> Sh Bool
existsCmd cmd = do
errExit False $ run_ "which" [cmd]
(==0) <$> lastExitCode
unlessExistsCmd :: Text -> Sh () -> Sh ()
unlessExistsCmd cmd go = do
ex <- existsCmd cmd
when (not ex) go
aptUpdate :: Sh ()
aptUpdate =
run_ "apt-get" $ ["-yqq","update"]
aptInstall :: [Text] -> Sh ()
aptInstall pkgs =
run_ "apt-get" $ "-yqq":"install":pkgs
{-
sudo apt-get -yqq update
sudo apt-get -yqq install curl lsb-core
curl https://get.docker.com/ | sudo sh
sudo apt-get -yqq install nginx
sudo add-apt-repository "deb http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main"
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add
sudo apt-get -yqq update
sudo apt-get -yqq install postgresql-10
-} | filopodia/open | dampf/lib/Dampf/Provision.hs | mit | 2,937 | 0 | 10 | 449 | 736 | 386 | 350 | 64 | 1 |
module Timing ( getTick
, timeIt
) where
import Control.Exception
import Data.Time.Clock
import Control.Monad.IO.Class
import System.IO.Unsafe
-- Timing functions
-- TODO: Consider just using the criterion package for all performance measurements
-- http://hackage.haskell.org/package/criterion
{-# NOINLINE startTime #-}
startTime :: UTCTime
startTime = unsafePerformIO getCurrentTime
-- In seconds
getTick :: IO Double
getTick = do
-- Make sure startTime has been evaluated, otherwise the getCurrentTime in the
-- unsafePerformIO might be evaluated after the getCurrentTime here, returning a
-- negative tick on the first call to getTick
st <- evaluate startTime
(realToFrac . flip diffUTCTime st) <$> getCurrentTime
timeIt :: MonadIO m => m a -> m (Double, a)
timeIt f = do
start <- liftIO getCurrentTime
r <- f
end <- liftIO getCurrentTime
return (realToFrac $ diffUTCTime end start, r)
| blitzcode/rust-exp | hs-src/Timing.hs | mit | 969 | 0 | 10 | 208 | 183 | 98 | 85 | 19 | 1 |
{-# LANGUAGE QuasiQuotes, TemplateHaskell #-}
module Language.Java.Paragon.QuasiQuoter where
--import Language.Haskell.TH.Syntax
--import qualified Language.Haskell.TH as TH
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Lift
--import Language.Haskell.TH.Lib
import Language.Haskell.Meta.Parse
--import Data.Generics (extQ)
--import Data.Data
import Language.Java.Paragon.Parser
-- import Language.Java.Paragon.Syntax (ambName)
import Language.Java.Paragon.Lexer
import Language.Java.Paragon.Interaction
import Language.Java.Paragon.QuasiQuoter.Lift ()
import Prelude hiding(exp)
import Text.ParserCombinators.Parsec
quasiQuoterModule :: String
quasiQuoterModule = libraryBase ++ ".QuasiQuoter"
fromRight :: Either a b -> b
fromRight (Right res) = res
fromRight _ = panic (quasiQuoterModule ++ ".fromRight") ""
parserQQ :: (Lift a,Show a) => GenParser (L Token) () a -> QuasiQuoter
parserQQ f = QuasiQuoter{
quoteExp = lift . fromRight . parser f,
quotePat = return .fromRight . parsePat .show . fromRight . parser f,
quoteType = panic (quasiQuoterModule ++ ".parserQQ: quoteType") "",
quoteDec = panic (quasiQuoterModule ++ ".parserQQ: quoteDec" ) ""
}
nameQQ, expQQ, typeQQ, stmtQQ, lhsQQ :: QuasiQuoter
nameQQ = parserQQ name
expQQ = parserQQ exp
typeQQ = parserQQ ttype
stmtQQ = parserQQ stmt
lhsQQ = parserQQ lhs
impDeclQQ,
varDeclQQ, methodBodyQQ, memberDeclQQ, fieldDeclQQ, methodDeclQQ,
modifiersQQ, formalParamQQ, blockStmtQQ, classDeclQQ, lockPropQQ :: QuasiQuoter
varDeclQQ = parserQQ varDecl
methodBodyQQ = parserQQ methodBody
memberDeclQQ = parserQQ memberDecl
fieldDeclQQ = parserQQ fieldDecl
methodDeclQQ = parserQQ methodDecl
modifiersQQ = parserQQ (list modifier)
formalParamQQ = parserQQ formalParam
blockStmtQQ = parserQQ blockStmt
classDeclQQ = parserQQ classDecl
lockPropQQ = parserQQ lockProperties
impDeclQQ = parserQQ importDecl
| bvdelft/parac2 | src/Language/Java/Paragon/QuasiQuoter.hs | bsd-3-clause | 1,996 | 2 | 10 | 344 | 450 | 263 | 187 | 42 | 1 |
{-# Language PatternGuards #-}
module Blub
( blub
, foo
, bar
) where
import Control.Applicative
(r, t, z)
import Ugah.Blub
( a
, b
, c
)
import Data.Text (Text())
f :: Int -> Int
f = (+ 3)
r :: Int -> Int
r =
| dan-t/hsimport | tests/goldenFiles/SymbolTest47.hs | bsd-3-clause | 238 | 1 | 7 | 75 | 91 | 57 | 34 | -1 | -1 |
-- Copyright (c) 2014-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is distributed under the terms of a BSD license,
-- found in the LICENSE file.
-- | Internal utilities only.
--
module Haxl.Core.Util
( compose
, textShow
) where
import Data.Text (Text)
import qualified Data.Text as Text
-- | Composes a list of endofunctions.
compose :: [a -> a] -> a -> a
compose = foldr (.) id
textShow :: (Show a) => a -> Text
textShow = Text.pack . show
| simonmar/Haxl | Haxl/Core/Util.hs | bsd-3-clause | 480 | 0 | 7 | 97 | 102 | 64 | 38 | 9 | 1 |
-- {-# LANGUAGE Haskell201x #-}
module Maybe2 where
data Maybe' a = Nothing' | Just' a
instance Functor Maybe' where
fmap f (Just' v) = Just' (f v)
fmap _ Nothing' = Nothing'
instance Applicative Maybe' where
pure = Just'
f1 <*> f2 = f1 >>= \v1 -> f2 >>= (pure . v1)
instance Monad Maybe' where
Nothing' >>= _ = Nothing'
Just' x >>= f = f x
| alanz/Hs2010To201x | testcases/H201x/Maybe2.hs | bsd-3-clause | 376 | 0 | 10 | 104 | 141 | 72 | 69 | 11 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.