code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
import Hangman
import System.Environment
--main :: IO ()
main = getArgs >>= parseArguments >>= startGame
-- If the user only specifies one argument then they are properly using their own word list,
-- otherwise we just use the internal word list.
parseArguments :: [String] -> IO String
parseArguments [] = return "../data/wordlist"
parseArguments (arg:_) = return arg
|
skejserjensen/Hangman
|
haskell/src/Main.hs
|
gpl-3.0
| 374
| 0
| 7
| 62
| 71
| 38
| 33
| 6
| 1
|
module Accelerate
( run
) where
import Data.Array.Accelerate.LLVM.Native
|
Qinka/reimagined-pancake
|
toy-backend/toy-backend-classify/src/knn/llvm/native/Accelerate.hs
|
gpl-3.0
| 98
| 0
| 4
| 33
| 17
| 12
| 5
| 3
| 0
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidPublisher.Edits.ExpansionFiles.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates the APK\'s Expansion File configuration to reference another
-- APK\'s Expansion Files. To add a new Expansion File use the Upload
-- method.
--
-- /See:/ <https://developers.google.com/android-publisher Google Play Developer API Reference> for @androidpublisher.edits.expansionfiles.update@.
module Network.Google.Resource.AndroidPublisher.Edits.ExpansionFiles.Update
(
-- * REST Resource
EditsExpansionFilesUpdateResource
-- * Creating a Request
, editsExpansionFilesUpdate
, EditsExpansionFilesUpdate
-- * Request Lenses
, eefuPackageName
, eefuAPKVersionCode
, eefuPayload
, eefuExpansionFileType
, eefuEditId
) where
import Network.Google.AndroidPublisher.Types
import Network.Google.Prelude
-- | A resource alias for @androidpublisher.edits.expansionfiles.update@ method which the
-- 'EditsExpansionFilesUpdate' request conforms to.
type EditsExpansionFilesUpdateResource =
"androidpublisher" :>
"v2" :>
"applications" :>
Capture "packageName" Text :>
"edits" :>
Capture "editId" Text :>
"apks" :>
Capture "apkVersionCode" (Textual Int32) :>
"expansionFiles" :>
Capture "expansionFileType"
EditsExpansionFilesUpdateExpansionFileType
:>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ExpansionFile :>
Put '[JSON] ExpansionFile
-- | Updates the APK\'s Expansion File configuration to reference another
-- APK\'s Expansion Files. To add a new Expansion File use the Upload
-- method.
--
-- /See:/ 'editsExpansionFilesUpdate' smart constructor.
data EditsExpansionFilesUpdate = EditsExpansionFilesUpdate'
{ _eefuPackageName :: !Text
, _eefuAPKVersionCode :: !(Textual Int32)
, _eefuPayload :: !ExpansionFile
, _eefuExpansionFileType :: !EditsExpansionFilesUpdateExpansionFileType
, _eefuEditId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'EditsExpansionFilesUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eefuPackageName'
--
-- * 'eefuAPKVersionCode'
--
-- * 'eefuPayload'
--
-- * 'eefuExpansionFileType'
--
-- * 'eefuEditId'
editsExpansionFilesUpdate
:: Text -- ^ 'eefuPackageName'
-> Int32 -- ^ 'eefuAPKVersionCode'
-> ExpansionFile -- ^ 'eefuPayload'
-> EditsExpansionFilesUpdateExpansionFileType -- ^ 'eefuExpansionFileType'
-> Text -- ^ 'eefuEditId'
-> EditsExpansionFilesUpdate
editsExpansionFilesUpdate pEefuPackageName_ pEefuAPKVersionCode_ pEefuPayload_ pEefuExpansionFileType_ pEefuEditId_ =
EditsExpansionFilesUpdate'
{ _eefuPackageName = pEefuPackageName_
, _eefuAPKVersionCode = _Coerce # pEefuAPKVersionCode_
, _eefuPayload = pEefuPayload_
, _eefuExpansionFileType = pEefuExpansionFileType_
, _eefuEditId = pEefuEditId_
}
-- | Unique identifier for the Android app that is being updated; for
-- example, \"com.spiffygame\".
eefuPackageName :: Lens' EditsExpansionFilesUpdate Text
eefuPackageName
= lens _eefuPackageName
(\ s a -> s{_eefuPackageName = a})
-- | The version code of the APK whose Expansion File configuration is being
-- read or modified.
eefuAPKVersionCode :: Lens' EditsExpansionFilesUpdate Int32
eefuAPKVersionCode
= lens _eefuAPKVersionCode
(\ s a -> s{_eefuAPKVersionCode = a})
. _Coerce
-- | Multipart request metadata.
eefuPayload :: Lens' EditsExpansionFilesUpdate ExpansionFile
eefuPayload
= lens _eefuPayload (\ s a -> s{_eefuPayload = a})
eefuExpansionFileType :: Lens' EditsExpansionFilesUpdate EditsExpansionFilesUpdateExpansionFileType
eefuExpansionFileType
= lens _eefuExpansionFileType
(\ s a -> s{_eefuExpansionFileType = a})
-- | Unique identifier for this edit.
eefuEditId :: Lens' EditsExpansionFilesUpdate Text
eefuEditId
= lens _eefuEditId (\ s a -> s{_eefuEditId = a})
instance GoogleRequest EditsExpansionFilesUpdate
where
type Rs EditsExpansionFilesUpdate = ExpansionFile
type Scopes EditsExpansionFilesUpdate =
'["https://www.googleapis.com/auth/androidpublisher"]
requestClient EditsExpansionFilesUpdate'{..}
= go _eefuPackageName _eefuEditId _eefuAPKVersionCode
_eefuExpansionFileType
(Just AltJSON)
_eefuPayload
androidPublisherService
where go
= buildClient
(Proxy :: Proxy EditsExpansionFilesUpdateResource)
mempty
|
rueshyna/gogol
|
gogol-android-publisher/gen/Network/Google/Resource/AndroidPublisher/Edits/ExpansionFiles/Update.hs
|
mpl-2.0
| 5,582
| 0
| 19
| 1,285
| 645
| 381
| 264
| 104
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Buyongoogleprograms.Onboard
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Onboards the BoG program in your Merchant Center account. By using this
-- method, you agree to the [Terms of
-- Service](https:\/\/merchants.google.com\/mc\/termsofservice\/transactions\/US\/latest).
-- Calling this method is only possible if the authenticated account is the
-- same as the merchant id in the request. Calling this method multiple
-- times will only accept Terms of Service if the latest version is not
-- currently signed.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.buyongoogleprograms.onboard@.
module Network.Google.Resource.Content.Buyongoogleprograms.Onboard
(
-- * REST Resource
BuyongoogleprogramsOnboardResource
-- * Creating a Request
, buyongoogleprogramsOnboard
, BuyongoogleprogramsOnboard
-- * Request Lenses
, boXgafv
, boMerchantId
, boUploadProtocol
, boRegionCode
, boAccessToken
, boUploadType
, boPayload
, boCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.buyongoogleprograms.onboard@ method which the
-- 'BuyongoogleprogramsOnboard' request conforms to.
type BuyongoogleprogramsOnboardResource =
"content" :>
"v2.1" :>
Capture "merchantId" (Textual Int64) :>
"buyongoogleprograms" :>
Capture "regionCode" Text :>
"onboard" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] OnboardBuyOnGoogleProgramRequest :>
Post '[JSON] ()
-- | Onboards the BoG program in your Merchant Center account. By using this
-- method, you agree to the [Terms of
-- Service](https:\/\/merchants.google.com\/mc\/termsofservice\/transactions\/US\/latest).
-- Calling this method is only possible if the authenticated account is the
-- same as the merchant id in the request. Calling this method multiple
-- times will only accept Terms of Service if the latest version is not
-- currently signed.
--
-- /See:/ 'buyongoogleprogramsOnboard' smart constructor.
data BuyongoogleprogramsOnboard =
BuyongoogleprogramsOnboard'
{ _boXgafv :: !(Maybe Xgafv)
, _boMerchantId :: !(Textual Int64)
, _boUploadProtocol :: !(Maybe Text)
, _boRegionCode :: !Text
, _boAccessToken :: !(Maybe Text)
, _boUploadType :: !(Maybe Text)
, _boPayload :: !OnboardBuyOnGoogleProgramRequest
, _boCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'BuyongoogleprogramsOnboard' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'boXgafv'
--
-- * 'boMerchantId'
--
-- * 'boUploadProtocol'
--
-- * 'boRegionCode'
--
-- * 'boAccessToken'
--
-- * 'boUploadType'
--
-- * 'boPayload'
--
-- * 'boCallback'
buyongoogleprogramsOnboard
:: Int64 -- ^ 'boMerchantId'
-> Text -- ^ 'boRegionCode'
-> OnboardBuyOnGoogleProgramRequest -- ^ 'boPayload'
-> BuyongoogleprogramsOnboard
buyongoogleprogramsOnboard pBoMerchantId_ pBoRegionCode_ pBoPayload_ =
BuyongoogleprogramsOnboard'
{ _boXgafv = Nothing
, _boMerchantId = _Coerce # pBoMerchantId_
, _boUploadProtocol = Nothing
, _boRegionCode = pBoRegionCode_
, _boAccessToken = Nothing
, _boUploadType = Nothing
, _boPayload = pBoPayload_
, _boCallback = Nothing
}
-- | V1 error format.
boXgafv :: Lens' BuyongoogleprogramsOnboard (Maybe Xgafv)
boXgafv = lens _boXgafv (\ s a -> s{_boXgafv = a})
-- | Required. The ID of the account.
boMerchantId :: Lens' BuyongoogleprogramsOnboard Int64
boMerchantId
= lens _boMerchantId (\ s a -> s{_boMerchantId = a})
. _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
boUploadProtocol :: Lens' BuyongoogleprogramsOnboard (Maybe Text)
boUploadProtocol
= lens _boUploadProtocol
(\ s a -> s{_boUploadProtocol = a})
-- | The program region code [ISO 3166-1
-- alpha-2](https:\/\/en.wikipedia.org\/wiki\/ISO_3166-1_alpha-2).
-- Currently only US is available.
boRegionCode :: Lens' BuyongoogleprogramsOnboard Text
boRegionCode
= lens _boRegionCode (\ s a -> s{_boRegionCode = a})
-- | OAuth access token.
boAccessToken :: Lens' BuyongoogleprogramsOnboard (Maybe Text)
boAccessToken
= lens _boAccessToken
(\ s a -> s{_boAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
boUploadType :: Lens' BuyongoogleprogramsOnboard (Maybe Text)
boUploadType
= lens _boUploadType (\ s a -> s{_boUploadType = a})
-- | Multipart request metadata.
boPayload :: Lens' BuyongoogleprogramsOnboard OnboardBuyOnGoogleProgramRequest
boPayload
= lens _boPayload (\ s a -> s{_boPayload = a})
-- | JSONP
boCallback :: Lens' BuyongoogleprogramsOnboard (Maybe Text)
boCallback
= lens _boCallback (\ s a -> s{_boCallback = a})
instance GoogleRequest BuyongoogleprogramsOnboard
where
type Rs BuyongoogleprogramsOnboard = ()
type Scopes BuyongoogleprogramsOnboard =
'["https://www.googleapis.com/auth/content"]
requestClient BuyongoogleprogramsOnboard'{..}
= go _boMerchantId _boRegionCode _boXgafv
_boUploadProtocol
_boAccessToken
_boUploadType
_boCallback
(Just AltJSON)
_boPayload
shoppingContentService
where go
= buildClient
(Proxy :: Proxy BuyongoogleprogramsOnboardResource)
mempty
|
brendanhay/gogol
|
gogol-shopping-content/gen/Network/Google/Resource/Content/Buyongoogleprograms/Onboard.hs
|
mpl-2.0
| 6,677
| 0
| 20
| 1,491
| 899
| 526
| 373
| 127
| 1
|
module Model.Permission (PermissionLevel (..), permissionLevelField) where
import Import
import Model.Permission.Internal
import qualified Data.Text as T
permissionLevelField :: (RenderMessage (HandlerSite m) FormMessage, m ~ HandlerT site IO) => Field m PermissionLevel
permissionLevelField = (radioField' . optionsPairs) $ map (permissionLevelLabel &&& id) [minBound ..]
permissionLevelLabel :: PermissionLevel -> Text
permissionLevelLabel = T.pack . show
|
Happy0/snowdrift
|
Model/Permission.hs
|
agpl-3.0
| 465
| 0
| 8
| 59
| 127
| 73
| 54
| -1
| -1
|
module Git.Command.SymbolicRef (run) where
run :: [String] -> IO ()
run args = return ()
|
wereHamster/yag
|
Git/Command/SymbolicRef.hs
|
unlicense
| 89
| 0
| 7
| 15
| 42
| 23
| 19
| 3
| 1
|
module HaskellSetup where
{- This is the low-level stuff that hooks into the ncurses library, together
with the Haskell versions of the Agda types. You should not need to bother
reading or modifying this file. -}
import Debug.Trace
import Foreign
import Foreign.C (CInt(..))
import ANSIEscapes
import System.IO
import System.Environment
import Control.Applicative
import Control.Concurrent
foreign import ccall
initscr :: IO ()
foreign import ccall "endwin"
endwin :: IO CInt
foreign import ccall "refresh"
refresh :: IO CInt
foreign import ccall "&LINES"
linesPtr :: Ptr CInt
foreign import ccall "&COLS"
colsPtr :: Ptr CInt
scrSize :: IO (Int, Int)
scrSize = do
lnes <- peek linesPtr
cols <- peek colsPtr
return (fromIntegral cols, fromIntegral lnes)
data Modifier = Normal | Shift | Control deriving Show
data Key = Char Char | Arrow Modifier Dir | Enter | Backspace | Delete | Escape | Tab deriving Show
data Event = Key Key | Resize Integer Integer
{-
data Nat = Zero | Suc Nat
toNat :: Int -> Nat
toNat 0 = Zero
toNat n = Suc (toNat (n - 1))
fromNat :: Nat -> Int
fromNat Zero = 0
fromNat (Suc n) = 1 + fromNat n
-}
data EQ a b c = Refl
data Change = AllQuiet | CursorMove | LineEdit | BigChange
data Colour
= Black | Red | Green | Yellow
| Blue | Magenta | Cyan | White
data Action
= GoRowCol Integer Integer
| SendText [Char]
| Move Dir Integer
| FgText Colour
| BgText Colour
act :: Action -> IO ()
act (GoRowCol y x) = do
resetCursor
forward (fromIntegral x)
down (fromIntegral y)
act (SendText s) = putStr s
act (Move d n) = moveCursor d (fromIntegral n)
act (FgText Black) = escape "0;30m"
act (FgText Red) = escape "1;31m"
act (FgText Green) = escape "1;32m"
act (FgText Yellow) = escape "1;33m"
act (FgText Blue) = escape "1;34m"
act (FgText Magenta) = escape "1;35m"
act (FgText Cyan) = escape "1;36m"
act (FgText White) = escape "1;37m"
act (BgText Black) = escape "40m"
act (BgText Red) = escape "41m"
act (BgText Green) = escape "42m"
act (BgText Yellow) = escape "43m"
act (BgText Blue) = escape "44m"
act (BgText Magenta) = escape "45m"
act (BgText Cyan) = escape "46m"
act (BgText White) = escape "47m"
getEscapeKey :: [(String, Key)] -> IO (Maybe Key)
getEscapeKey [] = return Nothing
getEscapeKey sks = case lookup "" sks of
Just k -> return (Just k)
_ -> do
c <- getChar
getEscapeKey [(cs, k) | (d : cs, k) <- sks, d == c]
directions :: [(Char, Dir)]
directions = [('A', DU), ('B', DD),
('C', DR), ('D', DL)]
escapeKeys :: [(String, Key)]
escapeKeys =
[([c], Arrow Normal d) | (c, d) <- directions] ++
[("1;2" ++ [c], Arrow Shift d) | (c, d) <- directions] ++
[("1;5" ++ [c], Arrow Control d) | (c, d) <- directions] ++
[("3~", Delete)]
keyReady :: IO (Maybe Key)
keyReady = do
b <- hReady stdin
if not b then return Nothing else do
c <- getChar
case c of
'\n' -> return $ Just Enter
'\r' -> return $ Just Enter
'\b' -> return $ Just Backspace
'\DEL' -> return $ Just Backspace
'\t' -> return $ Just Tab
_ | c >= ' ' -> return $ Just (Char c)
'\ESC' -> do
b <- hReady stdin
if not b then return $ Just Escape else do
c <- getChar
case c of
'[' -> getEscapeKey escapeKeys
_ -> return $ Just Escape
_ -> return $ Nothing
pni :: (Int, Int) -> (Integer, Integer)
pni (y, x) = (toInteger y, toInteger x)
mainLoop ::
([[Char]] -> b) ->
(Key -> b -> (Change, b)) ->
((Integer, Integer) -> (Integer, Integer) -> (Change, b) -> ([Action], (Integer, Integer))) ->
IO ()
mainLoop initBuf keystroke render = do
hSetBuffering stdout NoBuffering
hSetBuffering stdin NoBuffering
xs <- getArgs
buf <- case xs of
[] -> return (initBuf [])
(x : _) -> (initBuf . lines) <$> readFile x
initscr
innerLoop (0, 0) (0, 0) (BigChange, buf)
endwin
return ()
where
innerLoop oldSize topLeft (c, b) = do
refresh
size <- scrSize
(acts, topLeft) <- return $
if size /= oldSize
then render (pni size) topLeft (BigChange, b)
else render (pni size) topLeft (c, b)
mapM_ act acts
mc <- keyReady
case mc of
Nothing -> threadDelay 100 >> innerLoop size topLeft (AllQuiet, b)
Just k -> innerLoop size topLeft (keystroke k b)
mainAppLoop ::
s -> (Event -> s -> (s, [Action])) ->
IO ()
mainAppLoop start reactor = do
hSetBuffering stdout NoBuffering
hSetBuffering stdin NoBuffering
initscr
innerLoop (0, 0) start
endwin
return ()
where
innerLoop oldSize state0 = do
refresh
size@(w, h) <- scrSize
let (state1, acts) = if size /= oldSize
then reactor (Resize (toInteger w) (toInteger h)) state0
else (state0, [])
mapM_ act acts
mc <- keyReady
case mc of
Nothing -> threadDelay 100 >> innerLoop size state1
Just k -> do
let (state2, acts) = reactor (Key k) state1
mapM_ act acts
innerLoop size state2
|
haroldcarr/learn-haskell-coq-ml-etc
|
agda/course/2017-conor_mcbride_cs410/CS410-17-master/lectures/HaskellSetup.hs
|
unlicense
| 5,090
| 0
| 22
| 1,343
| 2,000
| 1,029
| 971
| 151
| 11
|
module Standard.Parser(parseToplevel) where
import Text.Parsec
import Text.Parsec.String (Parser)
import qualified Text.Parsec.Char as Ch
import ParserSupport (range, contents)
import Lexer
import Standard.Syntax
toplevel :: Parser Expr
toplevel = do
Ch.string "evendistr"
whitespace
count <- range
whitespace
values <- range
whitespace
return $ EvenDistr count values
<?> "distribution"
parseToplevel :: String -> Either ParseError Expr
parseToplevel s = parse (contents toplevel) "<stdin>" s
|
eigengo/hwsexp
|
core/main/Standard/Parser.hs
|
apache-2.0
| 516
| 0
| 9
| 83
| 150
| 79
| 71
| 19
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Network.Ldap.SSHA
(
slappasswd
) where
import qualified Data.Digest.Pure.SHA as SHA
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as L
import Data.Text.Lazy.Encoding (encodeUtf8)
import Data.ByteString.Base64 (encode)
import Data.ByteString.Lazy.Char8 (ByteString)
import Data.Text.Lazy (Text)
import System.IO
import System.Random
-- |
ssha'' :: ByteString -> ByteString
ssha'' = SHA.bytestringDigest . SHA.sha1
-- | LDAP specific SSHA implementation
-- Takes a password and a salt and returns the LDAP conform sha1-hashed and salted password with
-- the appended salt
--
-- 1. The salt is appended to the raw unhased password
-- 2. The password ++ salt will be hashed with sha1
-- 3. The hashed value will be appended with the salt
-- in Hhaskell pseudocode:
-- > ssha = sha1 (password ++ salt) ++ salt
ssha' :: ByteString -> ByteString -> ByteString
ssha' pass salt = L.append (ssha'' passSalt) salt
where passSalt = L.append pass salt
ldapssha :: Text -> L.ByteString -> B.ByteString
ldapssha password hexSalt = encode $ B.concat $ L.toChunks $ ssha' (encodeUtf8 password) (hexSalt)
-- | like the the OpenLDAP slappasswd tool
-- currently only with ssha support
slappasswd :: Text -> IO B.ByteString
slappasswd pass = do
salt <- genSalt
return $ B.append "{SSHA}" (ldapssha pass salt)
-- | Ispired by the 'Crypto.PasswordStore.genSaltDevURandom'
genSalt = genSaltDevURandom
-- | Generate a salt from @\/dev\/urandom@.
genSaltDevURandom :: IO L.ByteString
genSaltDevURandom = withFile "/dev/urandom" ReadMode $ \h -> do
rawSalt <- L.hGet h 8
return $ rawSalt
|
MaxDaten/ldap-simple
|
Network/Ldap/SSHA.hs
|
bsd-2-clause
| 1,733
| 0
| 11
| 328
| 347
| 199
| 148
| 29
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TupleSections #-}
module Language.Indescript.AST where
import Control.IxFix
-- ####
-- # Type Indexes and Proxies
data AstIx = AstDecl | AstExpr | AstPatt | AstType
-- ## Type Synonyms for Promoted Term Constrcutors
type AstDecl = 'AstDecl
type AstExpr = 'AstExpr
type AstPatt = 'AstPatt
type AstType = 'AstType
-- ## Proxies
data AstIxProxy (i :: AstIx) where
DeclProxy :: AstIxProxy AstDecl
ExprProxy :: AstIxProxy AstExpr
PattProxy :: AstIxProxy AstPatt
TypeProxy :: AstIxProxy AstType
-- # AST Base Functor
data AstF (f :: AstIx -> *) (i :: AstIx) where
-- ## Atomic AST Nodes (Expr, Patt, Decl)
VarF :: Var -> AstIxProxy i -> AstF f i
ConF :: Var -> AstIxProxy i -> AstF f i
LitF :: Lit -> AstIxProxy i -> AstF f i
ParenF :: f i -> AstF f i
-- ## Complex AST Expression
AppF :: f i -> [f i] -> AstF f i
InfixF :: f i -> f i -> f i -> AstF f i
NegF :: f AstExpr -> AstF f AstExpr
LOSecF :: f AstExpr -> f AstExpr -> AstF f AstExpr
ROSecF :: f AstExpr -> f AstExpr -> AstF f AstExpr
TyAnnF :: f AstExpr -> f AstType -> AstF f AstExpr
IfF :: f AstExpr -> f AstExpr -> f AstExpr -> AstF f AstExpr
LamF :: [f AstPatt] -> f AstExpr -> AstF f AstExpr
LetF :: f AstDecl -> f AstExpr -> AstF f AstExpr
AltF :: f AstPatt -> f AstExpr -> f AstDecl -> AstF f AstExpr
CaseF :: f AstExpr -> [f AstExpr] -> AstF f AstExpr
-- ## Complex AST Type
-- Generalized types, also known as “schemes”.
TyGenF :: [f AstType] -> f AstType -> AstF f AstType
-- ## Complex AST Pattern
BindF :: f AstPatt -> f AstPatt -> AstF f AstPatt
HoleF :: AstF f AstPatt
-- ## Complex AST Declarations
-- An equation, e.g. add 2 2 = 5
EqtF :: FnLhsF f AstExpr -> f AstExpr -> f AstDecl -> AstF f AstDecl
-- A function definition, namely a group of equations with same “name”.
FnDefF :: Var -> [f AstDecl] -> AstF f AstDecl
-- Declartion groups, used for
-- 1. A container of each group of where-clauses.
-- 2. Dependency group for most generalized types during type reconstruction.
DeclsF :: [f AstDecl] -> AstF f AstDecl
-- Operator fixity declarations.
-- Removed after adjusting AST.
OpFixF :: AssocType -> Int -> [f AstExpr] -> AstF f AstDecl
TySigF :: [f AstExpr] -> f AstType -> AstF f AstDecl
TyAlsF :: f AstType -> f AstType -> AstF f AstDecl
NewTyF :: f AstType -> f AstType -> f AstType -> AstF f AstDecl
DatTyF :: f AstType -> [f AstType] -> AstF f AstDecl
instance IxFunctor AstF where imap = imapDefault
-- TODO: use Template Haskell to automatically derivie the following instance.
instance IxTraversable AstF where
itraverse f = \case
(VarF var i) -> VarF <$> pure var <*> pure i
(ConF con i) -> ConF <$> pure con <*> pure i
(LitF lit i) -> LitF <$> pure lit <*> pure i
(ParenF e) -> ParenF <$> f e
(AppF fn args) -> AppF <$> f fn <*> traverse f args
(InfixF l o r) -> InfixF <$> f l <*> f o <*> f r
(NegF e) -> NegF <$> f e
(LOSecF o x) -> LOSecF <$> f o <*> f x
(ROSecF o x) -> LOSecF <$> f o <*> f x
(TyAnnF e t) -> TyAnnF <$> f e <*> f t
(IfF e1 e2 e3) -> IfF <$> f e1 <*> f e2 <*> f e3
(LamF patts e) -> LamF <$> traverse f patts <*> f e
(LetF decls e) -> LetF <$> f decls <*> f e
(AltF p e ds) -> AltF <$> f p <*> f e <*> f ds
(CaseF e alts) -> CaseF <$> f e <*> traverse f alts
(TyGenF ts ty) -> TyGenF <$> traverse f ts <*> f ty
(BindF as src) -> BindF <$> f as <*> f src
(HoleF) -> pure HoleF
-- Declarations
(EqtF lhs e ds) -> EqtF <$> itraverse f lhs <*> f e <*> f ds
(FnDefF fn alts) -> FnDefF <$> pure fn <*> traverse f alts
(DeclsF decls) -> DeclsF <$> traverse f decls
(OpFixF d l ops) -> OpFixF d l <$> traverse f ops
(TySigF xs ty) -> TySigF <$> traverse f xs <*> f ty
(TyAlsF t' t) -> TyAlsF <$> f t' <*> f t
(NewTyF t' c t) -> NewTyF <$> f t' <*> f c <*> f t
(DatTyF t cs) -> DatTyF <$> f t <*> traverse f cs
newtype AnnotAstF a f i = Annot { unAnnot :: (a, AstF f i) }
instance IxFunctor (AnnotAstF a) where imap = imapDefault
instance IxTraversable (AnnotAstF x) where
itraverse f (Annot (x, t)) = (Annot . (x,)) <$> itraverse f t
data Lit = LInt Int
| LFloat Float
| LChar Char
| LString String
| LUnit
deriving (Eq, Show)
data Var = VarId String
| ConId String
| VarSym String
| ConSym String
deriving (Eq, Show, Ord)
data AssocType = Infix | InfixL | InfixR deriving (Eq, Show)
data FnLhsF (f :: AstIx -> *) (i :: AstIx) where
FnArgsF :: f AstExpr -> [f AstExpr] -> FnLhsF f AstExpr
FnOpF :: f AstExpr -> f AstExpr -> f AstExpr -> FnLhsF f AstExpr
instance IxFunctor FnLhsF where imap = imapDefault
-- TODO: use Template Haskell to automatically derivie the following instance.
instance IxTraversable FnLhsF where
itraverse f = \case
FnArgsF fn xs -> FnArgsF <$> f fn <*> traverse f xs
FnOpF l o r -> FnOpF <$> f l <*> f o <*> f r
removeAnnot :: IxFix (AnnotAstF a) i -> IxFix AstF i
removeAnnot = cata (In . snd . unAnnot)
|
notcome/indescript
|
src/Language/Indescript/AST.hs
|
bsd-3-clause
| 5,292
| 0
| 12
| 1,413
| 1,994
| 994
| 1,000
| 100
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DeriveGeneric #-}
module Server where
import Prelude ()
import Prelude.Compat
import Control.Monad.Except
import Control.Monad.Reader
import Data.Maybe (catMaybes)
import Database.Persist.Sqlite
import Data.Aeson.Types
import GHC.Generics
import Servant
import Models
data PostSuccess = PostSuccess { userId :: Int }
deriving (Generic, Show)
instance ToJSON PostSuccess
type UserGet = "user"
:> QueryParam "first_name" String
:> QueryParam "last_name" String
:> QueryParam "age" Int
:> QueryParam "email" String
:> Get '[JSON] [User]
type UserPost = "user" :> ReqBody '[JSON] User :> Post '[JSON] PostSuccess
type UserApi = UserGet :<|> UserPost
server :: Server UserApi
server = getHandler :<|> postHandler
where getHandler
:: Maybe String
-> Maybe String
-> Maybe Int
-> Maybe String
-> Handler [User]
getHandler fname lname age email = do
res <- runSqlite "sqlite.db" $ selectList
(catMaybes
[ fmap (UserFirstName ==.) fname
, fmap (UserLastName ==.) lname
, fmap (UserAge ==.) age
, fmap (UserEmail ==.) email
]) []
return $ map entityVal res
postHandler :: User -> Handler PostSuccess
postHandler user = do
res <- runSqlite "sqlite.db" $ insert user
return PostSuccess { userId = fromIntegral $ fromSqlKey res }
userApi :: Proxy UserApi
userApi = Proxy
|
sgeop/micro
|
src/Server.hs
|
bsd-3-clause
| 1,702
| 0
| 16
| 438
| 433
| 231
| 202
| 52
| 1
|
{-# OPTIONS -Wall #-}
{-# LANGUAGE BangPatterns #-}
{-|
Module : Esge.Core
Description : Most lowlevel edge module.
Copyright : (c) Simon Goller, 2015
License : BSD
Maintainer : neosam@posteo.de
Esge.Core provides the low level data types and functions to the user.
It defines the core Ingame type, what Action, Meta and Storage, methods
to access Ingame e.g. to schedule Actions and run one iteration.
This module does not know anything about Individuals or Rooms and so on.
-}
module Esge.Core (
-- * Data types
Meta,
MetaList,
Storage (Storage),
Action,
Ingame,
-- * Storage primitive values
StoreString (StoreString),
StoreBool (StoreBool),
StoreInt (StoreInt),
StoreFloat (StoreFloat),
-- * Data classes
Storageable (toStorage, fromStorage),
Actionable (toAction),
-- * Functions
defaultIngame,
defaultIngameWithStorage,
storageGet,
storage,
allOfType,
scheduleAction,
getIngameResponse,
step,
-- * Action
-- ** Action helpers
action,
getIngame,
setIngame,
replaceIngame,
-- ** Ingame reader
storageGetA,
storageA,
allOfTypeA,
scheduleActionA,
getIngameResponseA,
-- ** Ingame manipulators
storageInsertA,
storageRemoveA,
insertStoragesA,
setIngameResponseA
) where
import Text.Read (readMaybe)
import Data.Maybe (catMaybes)
import Control.Monad (liftM, ap)
-- | Key value pair using strings
type Meta = (String, String)
-- | List of of key value pairs mainly used as storage
type MetaList = [Meta]
-- | Used to hold an element of the game like a person, room, action.
-- Add custom elements by using the Storageable class
--
-- @
-- Storage key type metas
-- @
data Storage = Storage String String MetaList
deriving (Show, Read, Eq)
-- | Add a new kind of storage or element to the game by adding these
-- transformer functions.
class Storageable a where
-- | Transform to custom type to strage
toStorage :: a -> Storage
-- | Try to ransform storage to type
fromStorage :: Storage -> Maybe a
-- | To convert a String to 'Storage' - to store the key
newtype StoreString = StoreString (String, String)
deriving (Show, Read)
-- | To convert a Bool to 'Storage' - to store the key
newtype StoreBool = StoreBool (String, Bool)
deriving (Show, Read)
-- | To convert an Int to 'Storage' - to store the key
newtype StoreInt = StoreInt (String, Int)
deriving (Show, Read)
-- | To convert a Float to 'Storage' - to store the key
newtype StoreFloat = StoreFloat (String, Float)
deriving (Show, Read)
-- | Make String storageable
instance Storageable StoreString where
toStorage (StoreString (key, str)) = Storage key "string" [("val", str)]
fromStorage = stringFromStorage "string"
-- | Make Bool storageable
instance Storageable StoreBool where
toStorage (StoreBool (key, a)) = Storage key "bool" [("val", show a)]
fromStorage a = case primitiveFromStorage "bool" a of
Just x -> Just $ StoreBool x
Nothing -> Nothing
-- | Make Int storageable
instance Storageable StoreInt where
toStorage (StoreInt (key, a)) = Storage key "int" [("val", show a)]
fromStorage a = case primitiveFromStorage "int" a of
Just x -> Just $ StoreInt x
Nothing -> Nothing
-- | Make Float storageable
instance Storageable StoreFloat where
toStorage (StoreFloat (key, a)) = Storage key "float" [("val", show a)]
fromStorage a = case primitiveFromStorage "float" a of
Just x -> Just $ StoreFloat x
Nothing -> Nothing
-- | Make a Stogage able to convert into itself.
-- | Then a Starge can also be used for Storageable functions
instance Storageable Storage where
toStorage sto = sto
fromStorage sto = Just sto
-- | Intended to help to make String storageable
stringFromStorage :: String -> Storage -> Maybe StoreString
stringFromStorage typeStr1 (Storage key typeStr2 metas) =
if typeStr1 /= typeStr2 then Nothing
else case lookup "val" metas of
Just str -> Just $ StoreString (key, str)
Nothing -> Nothing
-- | Intended to help to make all types storageable which implement
-- the Read class to make primitives like Int and Bool Storageable
primitiveFromStorage :: Read a => String -> Storage -> Maybe (String, a)
primitiveFromStorage typeStr sto = case stringFromStorage typeStr sto of
Nothing -> Nothing
Just (StoreString (key, str)) -> case readMaybe str of
Just a -> Just (key, a)
Nothing -> Nothing
-- | In ingame modifying function.
-- Can be added as Action in ingame to modify the game in an iteration
newtype Action a = Action (Ingame -> (a, Ingame))
-- | Turn the internal used 'Action' function to an 'Action'
action :: (Ingame -> (a, Ingame)) -> Action a
action fn = Action fn
-- | Get the current ingame state
getIngame :: Action Ingame
getIngame = action $ \ingame -> (ingame, ingame)
-- | Set new 'Ingame' state
setIngame :: Ingame -> Action ()
setIngame ingame = action $ \_ ->
((), ingame)
runAction :: Action a -> Ingame -> Ingame
runAction act ingame =
let Action fn = act
(_, ingame') = fn ingame
in ingame'
-- | Transform in 'Ingame' to 'Ingame' function to an Action ()
replaceIngame :: (Ingame -> Ingame) -> Action ()
replaceIngame fn = do
ingame <- getIngame
setIngame $ fn ingame
applyActions :: Action a -> (a -> Action b) -> Action b
applyActions (Action act) fn = Action $ \ ingame ->
let (x, ingame') = act ingame
Action act2 = fn x
in act2 ingame'
returnAction :: a -> Action a
returnAction x = Action $ \ ingame -> (x, ingame)
instance Monad Action where
(>>=) = applyActions
return = returnAction
instance Functor Action where
fmap = liftM
instance Applicative Action where
pure = return
(<*>) = ap
-- | Typeclass to transform something into an action.
-- Could be a special storage for example which is used as trigger
-- in the game.
class Actionable a where
toAction :: a -> Action ()
-- | Represents the whole game state. It contains
--
-- * All items like Rooms, Individuals, Trigger and so on as 'Storage'.
-- This can be extended by other Haskell modules.
-- * List of 'Action's for the next iteration.
-- * Response back to the caller (for output and so on). There can be
-- unlimited "channels". Values are always Strings.
data Ingame = Ingame [Storage] -- All items
[Action ()] -- Actions on next step
MetaList -- Engine response
-- | Empfy state, used to initialize
defaultIngame :: Ingame
defaultIngame = Ingame [] [] []
-- | Return default ingame with the given 'Storage's.
--
-- This is mainly used to initialize the 'Ingame' state.
defaultIngameWithStorage :: [Storage] -> Ingame
defaultIngameWithStorage sto = Ingame sto [] []
-- | Replace Storages in Ingame
setStorage :: [Storage] -> Ingame -> Ingame
setStorage sto (Ingame _ a b) = Ingame sto a b
-- | Get all Storages from Ingame
storage :: Ingame -> [Storage]
storage (Ingame sto _ _) = sto
-- | Get all 'Storage's
storageA :: Action [Storage]
storageA = do
ingame <- getIngame
return $ storage ingame
-- | Set all Actions in Ingame
setActions :: [Action ()] -> Ingame -> Ingame
setActions xs (Ingame a _ b) = Ingame a xs b
-- | Set responese in Ingame
setResponse :: MetaList -> Ingame -> Ingame
setResponse xs (Ingame a b _) = Ingame a b xs
-- | Set a single ingame response
setIngameResponseA :: String -- ^ Response channel
-> String -- ^ Response text
-> Action () -- ^ Modification action
setIngameResponseA key val = replaceIngame $ setIngameResponse_ key val
setIngameResponse_ :: String -- ^ Response channel
-> String -- ^ Response text
-> Ingame -- ^ Ingame before
-> Ingame -- ^ Modified Ingame
setIngameResponse_ key val ingame@(Ingame _ _ xs) =
setResponse ((key, val) : xs) ingame
-- | Get a single ingame response
getIngameResponse :: String -- ^ Response channel
-> Ingame -- ^ Ingame
-> String -- ^ Response value. "" if not found.
getIngameResponse key (Ingame _ _ xs) = maybe "" id $ lookup key xs
-- | Get a single ingame response
getIngameResponseA :: String -- ^ Response channel
-> Action String -- ^ String result
getIngameResponseA key = do
ingame <- getIngame
return $ getIngameResponse key ingame
-- | Register Action for next iteration
scheduleAction :: Action () -> Ingame -> Ingame
scheduleAction x ingame@(Ingame _ xs _) = setActions (x : xs) ingame
-- | Register 'Action' for next iteration
scheduleActionA :: Action () -> Action ()
scheduleActionA x = replaceIngame $ scheduleAction x
-- | Do one iterations and run all Actions
step :: Ingame -> Ingame
step (Ingame sto actions _) =
foldr run ingame actions
where ingame = Ingame sto [] []
run act ingame' = runAction act ingame'
-- | Get id from storage
storageId :: Storage -> String
storageId (Storage sid _ _) = sid
-- | Add a value to the storage list
insertInMetaList :: String -> Storage -> [Storage] -> [Storage]
insertInMetaList _ x xs = x : xs
storageInsert_ :: Storageable a => a -> Ingame -> Ingame
storageInsert_ a ingame@(Ingame sto _ _) = setStorage sto' ingame
where sto' = insertInMetaList key storageItem sto
storageItem = toStorage a
key = storageId storageItem
-- | Insert a Storageable item to the Ingame Storage
storageInsertA :: Storageable a => a -> Action ()
storageInsertA sto = replaceIngame $ storageInsert_ sto
-- | Get storage with given key from Ingame
storageGet :: String -> Ingame -> Maybe Storage
storageGet key ingame = storageLookup key $ storage ingame
-- | Get storge with given key
storageGetA :: String -> Action (Maybe Storage)
storageGetA k = do
ingame <- getIngame
return $ storageGet k ingame
-- | Got storage with given key from Storage List
storageLookup :: String -> [Storage] -> Maybe Storage
storageLookup key storages = lookup key tupleList
where tupleList = map storageToTuple storages
-- | Translate a storage to a tuple with its key and the storage.
-- Used for lookup
storageToTuple :: Storage -> (String, Storage)
storageToTuple sto@(Storage key _ _) = (key, sto)
-- | Got all Storages which can be transformed to a type
allOfType :: Storageable a => Ingame -> [a]
allOfType ingame = catMaybes $ map fromStorage $ storage ingame
-- | Got all 'Storage's which can be fransformed to a type
allOfTypeA :: Storageable a => Action [a]
allOfTypeA = do
ingame <- getIngame
return $ allOfType ingame
-- | Insert a list of 'Storage's
insertStoragesA :: Storageable a => [a] -> Action ()
insertStoragesA xs = replaceIngame $ insertStorages_ xs
insertStorages_ :: Storageable a => [a] -> Ingame -> Ingame
insertStorages_ xs ingame = foldr insert ingame xs
where insert = storageInsert_
-- | Remove element from storage
storageRemoveA :: String -> Action ()
storageRemoveA key = replaceIngame $ storageRemove_ key
storageRemove_ :: String -- ^ Storage key to remove
-> Ingame -- ^ State to edit
-> Ingame -- ^ Modified state
storageRemove_ key ingame = setStorage storage' ingame
where storage' = filter removeFn $ storage ingame
removeFn (Storage storageKey _ _) = storageKey /= key
|
neosam/esge
|
src/Esge/Core.hs
|
bsd-3-clause
| 11,812
| 0
| 12
| 3,022
| 2,620
| 1,413
| 1,207
| 223
| 3
|
module Problem102 where
import Data.List.Split
main :: IO ()
main =
readFile "txt/102.txt"
>>= print
. length
. filter containsOrigin
. map (map read . wordsBy (== ','))
. lines
containsOrigin :: [Double] -> Bool
-- line through two points should have origin and the other point on same side
-- (x1,y1) - (x2,y2) => (y-y2)/(y2-y1) = (x-x2)/(x2-x1)
-- or (y-y2) - [(y2-y1)/(x2-x1)](x-x2) = 0
-- for (x3,y3) -> (y3-y2) - [(y2-y1)/(x2-x1)](x3-x2) = (-y2)-[(y2-y1)/(x2-x1)](-x2)
containsOrigin [x1, y1, x2, y2, x3, y3] =
sameSide (x1, y1) (x2, y2) (x3, y3) (0, 0)
&& sameSide (x1, y1) (x3, y3) (x2, y2) (0, 0)
&& sameSide (x2, y2) (x3, y3) (x1, y1) (0, 0)
sameSide
:: (Double, Double)
-> (Double, Double)
-> (Double, Double)
-> (Double, Double)
-> Bool
sameSide (x1, y1) (x2, y2) (x3, y3) (x4, y4) = signum (r (x3, y3)) == signum (r (x4, y4))
where
m = (y2 - y1) / (x2 - x1)
r (a, b) = (b - y2) - m * (a - x2)
|
adityagupta1089/Project-Euler-Haskell
|
src/problems/Problem102.hs
|
bsd-3-clause
| 1,012
| 0
| 11
| 274
| 398
| 230
| 168
| 24
| 1
|
{-# LANGUAGE ConstraintKinds, DeriveDataTypeable, FlexibleContexts,
FlexibleInstances, OverloadedStrings, RecordWildCards,
ScopedTypeVariables #-}
-- | handle cards
module Web.MangoPay.Cards where
import Web.MangoPay.Documents
import Web.MangoPay.Monad
import Web.MangoPay.Types
import Web.MangoPay.Users
import Data.Text
import Data.Typeable (Typeable)
import Data.Aeson
import Control.Applicative
import qualified Data.HashMap.Lazy as HM
-- | card registration Id
type CardRegistrationId=Text
-- | create a card registration
createCardRegistration :: (MPUsableMonad m) => CardRegistration -> AccessToken -> MangoPayT m CardRegistration
createCardRegistration = createGeneric "/cardregistrations"
-- | modify a card registration
modifyCardRegistration :: (MPUsableMonad m) => CardRegistration -> AccessToken -> MangoPayT m CardRegistration
modifyCardRegistration cr = modifyGGeneric
(Just $ HM.filterWithKey (\k _->k=="RegistrationData")) "/cardregistrations/" cr crId
-- | credit card information
data CardInfo = CardInfo {
ciNumber :: Text
,ciExpire :: CardExpiration
,ciCSC :: Text
} deriving (Show,Read,Eq,Ord,Typeable)
-- | helper function to create a new card registration
mkCardRegistration :: AnyUserId -> Currency -> CardRegistration
mkCardRegistration uid currency=CardRegistration Nothing Nothing Nothing uid currency Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
-- | a card registration
data CardRegistration = CardRegistration {
crId :: Maybe CardRegistrationId -- ^ The Id of the object
,crCreationDate :: Maybe MpTime -- ^ The creation date of the object
,crTag :: Maybe Text -- ^ Custom data
,crUserId :: AnyUserId -- ^ The Id of the author
,crCurrency :: Currency -- ^ The currency of the card registrated
,crAccessKey :: Maybe Text -- ^ This key has to be sent with the card details and the PreregistrationData
,crPreregistrationData :: Maybe Text -- ^ This passphrase has to be sent with the card details and the AccessKey
,crCardRegistrationURL :: Maybe Text -- ^ The URL where to POST the card details, the AccessKey and PreregistrationData
,crRegistrationData :: Maybe Text -- ^ You get the CardRegistrationData once you posted the card details, the AccessKey and PreregistrationData
,crCardType :: Maybe Text -- ^ « CB_VISA_MASTERCARD » is the only value available yet
,crCardId :: Maybe CardId -- ^ You get the CardId (to process payments) once you edited the CardRegistration Object with the RegistrationData
,crResultCode :: Maybe Text -- ^ The result code of the object
,crResultMessage :: Maybe Text -- ^ The message explaining the result code
,crStatus :: Maybe DocumentStatus -- ^ The status of the object.
} deriving (Show,Eq,Ord,Typeable)
-- | to json as per MangoPay format
instance ToJSON CardRegistration where
toJSON cr=objectSN ["Id".= crId cr -- we store the Id, because in the registration workflow we may need to hang on to the registration object for a while, so let's use JSON serialization to keep it!
, "Tag" .= crTag cr,"UserId" .= crUserId cr
,"Currency" .= crCurrency cr,"RegistrationData" .= crRegistrationData cr
,"CardRegistrationURL" .= crCardRegistrationURL cr]
-- | from json as per MangoPay format
instance FromJSON CardRegistration where
parseJSON (Object v) =CardRegistration <$>
v .: "Id" <*>
v .:? "CreationDate" <*>
v .:? "Tag" <*>
v .: "UserId" <*>
v .: "Currency" <*>
v .:? "AccessKey" <*>
v .:? "PreregistrationData" <*>
v .:? "CardRegistrationURL" <*>
v .:? "RegistrationData" <*>
v .:? "CardType" <*>
v .:? "CardId" <*>
v .:? "ResultCode" <*>
v .:? "ResultMessage" <*>
v .:? "Status"
parseJSON _=fail "CardRegistration"
-- | fetch a card from its Id
fetchCard :: (MPUsableMonad m) => CardId -> AccessToken -> MangoPayT m Card
fetchCard = fetchGeneric "/cards/"
-- | list all cards for a given user
listCards :: (MPUsableMonad m) => AnyUserId -> GenericSort -> Maybe Pagination -> AccessToken -> MangoPayT m (PagedList Card)
listCards uid gs = genericListExtra (sortAttributes gs) ["/users/",uid,"/cards"]
-- | validity of a card
data CardValidity=UNKNOWN | VALID | INVALID
deriving (Show,Read,Eq,Ord,Bounded,Enum,Typeable)
-- | to json as per MangoPay format
instance ToJSON CardValidity where
toJSON =toJSON . show
-- | from json as per MangoPay format
instance FromJSON CardValidity where
parseJSON = jsonRead "CardValidity"
-- | a registered card
data Card=Card {
cId :: CardId
,cCreationDate :: MpTime
,cTag :: Maybe Text
,cExpirationDate :: CardExpiration -- ^ MMYY
,cAlias :: Text -- ^ Example: 497010XXXXXX4414
,cCardProvider :: Text -- ^ The card provider, it could be « CB », « VISA », « MASTERCARD », etc.
,cCardType :: Text -- ^ « CB_VISA_MASTERCARD » is the only value available yet
,cProduct :: Maybe Text
,cBankCode :: Maybe Text
,cActive :: Bool
,cCurrency :: Currency
,cValidity :: CardValidity -- ^ Once we proceed (or attempted to process) a payment with the card we are able to indicate if it is « valid » or « invalid ». If we didn’t process a payment yet the « Validity » stay at « unknown ».
,cCountry :: Text
,cUserId :: AnyUserId
} deriving (Show,Eq,Ord,Typeable)
-- | from json as per MangoPay format
instance FromJSON Card where
parseJSON (Object v) =Card <$>
v .: "Id" <*>
v .: "CreationDate" <*>
v .:? "Tag" <*>
v .: "ExpirationDate" <*>
v .: "Alias" <*>
v .: "CardProvider" <*>
v .: "CardType" <*>
v .:? "Product" <*>
v .:? "BankCode" <*>
v .: "Active" <*>
v .: "Currency" <*>
v .: "Validity" <*>
v .: "Country" <*>
v .: "UserId"
parseJSON _=fail "Card"
-- | to json as per MangoPay format
instance ToJSON Card where
toJSON Card {..} = objectSN
[ "Id" .= cId
, "CreationDate" .= cCreationDate
, "Tag" .= cTag
, "ExpirationDate" .= cExpirationDate
, "Alias" .= cAlias
, "CardProvider" .= cCardProvider
, "CardType" .= cCardType
, "Product" .= cProduct
, "BankCode" .= cBankCode
, "Active" .= cActive
, "Currency" .= cCurrency
, "Validity" .= cValidity
, "Country" .= cCountry
, "UserId" .= cUserId ]
|
prowdsponsor/mangopay
|
mangopay/src/Web/MangoPay/Cards.hs
|
bsd-3-clause
| 7,249
| 1
| 33
| 2,209
| 1,263
| 699
| 564
| 123
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-| Minimalist implementation of type-safe formatted strings, borrowing heavily
from the implementation of the @formatting@ package.
Example use of this module:
>>> :set -XOverloadedStrings
>>> import Turtle.Format
>>> format ("This is a "%s%" string that takes "%d%" arguments") "format" 2
"This is a format string that takes 2 arguments"
A `Format` string that takes no arguments has this type:
> "I take 0 arguments" :: Format r r
>
> format "I take 0 arguments" :: Text
>>> format "I take 0 arguments"
"I take 0 arguments"
A `Format` string that takes one argument has this type:
> "I take "%d%" arguments" :: Format r (Int -> r)
>
> format ("I take "%d%" argument") :: Int -> Text
>>> format ("I take "%d%" argument") 1
"I take 1 argument"
A `Format` string that takes two arguments has this type:
> "I "%s%" "%d%" arguments" :: Format r (Text -> Int -> r)
>
> format ("I "%s%" "%d%" arguments") :: Text -> Int -> Text
>>> format ("I "%s%" "%d%" arguments") "take" 2
"I take 2 arguments"
-}
{-# LANGUAGE TypeFamilies #-}
module Turtle.Format (
-- * Format
Format
, (%)
, format
, makeFormat
-- * Parameters
, w
, d
, u
, o
, x
, f
, e
, g
, s
, fp
-- * Utilities
, repr
) where
import Control.Category (Category(..))
import Data.Monoid ((<>))
import Data.String (IsString(..))
import Data.Text (Text, pack)
import Data.Word (Word)
import Filesystem.Path.CurrentOS (FilePath, toText)
import Numeric (showEFloat, showFFloat, showGFloat, showHex, showOct)
import Prelude hiding ((.), id, FilePath)
-- | A `Format` string
newtype Format a b = Format { (>>-) :: (Text -> a) -> b }
instance Category Format where
id = Format (\return_ -> return_ "")
fmt1 . fmt2 = Format (\return_ ->
fmt1 >>- \str1 ->
fmt2 >>- \str2 ->
return_ (str1 <> str2) )
-- | Concatenate two `Format` strings
(%) :: Format b c -> Format a b -> Format a c
(%) = (.)
instance (a ~ b) => IsString (Format a b) where
fromString str = Format (\return_ -> return_ (pack str))
{-| Convert a `Format` string to a print function that takes zero or more typed
arguments and returns a `Text` string
-}
format :: Format Text r -> r
format fmt = fmt >>- id
-- | Create your own format specifier
makeFormat :: (a -> Text) -> Format r (a -> r)
makeFormat k = Format (\return_ -> \a -> return_ (k a))
{-| `Format` any `Show`able value
>>> format w True
"True"
-}
w :: Show a => Format r (a -> r)
w = makeFormat (pack . show)
{-| `Format` an `Int` value as a signed decimal
>>> format d 25
"25"
>>> format d (-25)
"-25"
-}
d :: Format r (Int -> r)
d = w
{-| `Format` a `Word` value as an unsigned decimal
>>> format u 25
"25"
-}
u :: Format r (Word -> r)
u = w
{-| `Format` a `Word` value as an unsigned octal number
>>> format o 25
"31"
-}
o :: Format r (Word -> r)
o = makeFormat (\n -> pack (showOct n ""))
{-| `Format` a `Word` value as an unsigned hexadecimal number (without a
leading \"0x\")
>>> format x 25
"19"
-}
x :: Format r (Word -> r)
x = makeFormat (\n -> pack (showHex n ""))
{-| `Format` a `Double` using decimal notation with 6 digits of precision
>>> format f 25.1
"25.100000"
-}
f :: Format r (Double -> r)
f = makeFormat (\n -> pack (showFFloat (Just 6) n ""))
{-| `Format` a `Double` using scientific notation with 6 digits of precision
>>> format e 25.1
"2.510000e1"
-}
e :: Format r (Double -> r)
e = makeFormat (\n -> pack (showEFloat (Just 6) n ""))
{-| `Format` a `Double` using decimal notation for small exponents and
scientific notation for large exponents
>>> format g 25.1
"25.100000"
>>> format g 123456789
"1.234568e8"
>>> format g 0.00000000001
"1.000000e-11"
-}
g :: Format r (Double -> r)
g = makeFormat (\n -> pack (showGFloat (Just 6) n ""))
{-| `Format` that inserts `Text`
>>> format s "ABC"
"ABC"
-}
s :: Format r (Text -> r)
s = makeFormat id
{-| `Format` a `Filesystem.Path.CurrentOS.FilePath` into `Text`
>>> import Filesystem.Path.CurrentOS((</>))
>>> format fp ("usr" </> "lib")
"usr/lib"
-}
fp :: Format r (FilePath -> r)
fp = makeFormat (\fpath -> either id id (toText fpath))
{-| Convert a `Show`able value to `Text`
Short-hand for @(format w)@
>>> repr (1,2)
"(1,2)"
-}
repr :: Show a => a -> Text
repr = format w
|
maoe/Haskell-Turtle-Library
|
src/Turtle/Format.hs
|
bsd-3-clause
| 4,353
| 6
| 12
| 972
| 886
| 500
| 386
| 63
| 1
|
-- | This is a pretty-printer for turning the internal representation
-- of generic structured XML documents into the Doc type (which can
-- later be rendered using Text.ParserCombinators.HughesPJ.render).
-- Essentially there is one pp function for each type in
-- Text.Xml.HaXml.Types, so you can pretty-print as much or as little
-- of the document as you wish.
module Text.XML.HaXml.Pretty
(
-- * Pretty-print a whole document
document
-- ** Just one content
, content
-- ** Just one tagged element
, element
-- * Pretty-print just a DTD
, doctypedecl
-- ** The prolog
, prolog
-- ** A content particle description
, cp
) where
import Prelude hiding (maybe,either)
import Maybe hiding (maybe)
import List (intersperse)
--import Char (isSpace)
import Text.PrettyPrint.HughesPJ
import Text.XML.HaXml.Types
either f g (Left x) = f x
either f g (Right x) = g x
maybe f Nothing = empty
maybe f (Just x) = f x
--peref p = text "%" <> text p <> text ";"
----
document :: Document i -> Doc
prolog :: Prolog -> Doc
xmldecl :: XMLDecl -> Doc
misc :: Misc -> Doc
sddecl :: Bool -> Doc
doctypedecl :: DocTypeDecl -> Doc
markupdecl :: MarkupDecl -> Doc
extsubset :: ExtSubset -> Doc
extsubsetdecl :: ExtSubsetDecl -> Doc
cp :: CP -> Doc
element :: Element i -> Doc
attribute :: Attribute -> Doc --etc
content :: Content i -> Doc
----
document (Document p _ e m)= prolog p $$ element e $$ vcat (map misc m)
prolog (Prolog x m1 dtd m2)= maybe xmldecl x $$
vcat (map misc m1) $$
maybe doctypedecl dtd $$
vcat (map misc m2)
xmldecl (XMLDecl v e sd) = text "<?xml version='" <> text v <> text "'" <+>
maybe encodingdecl e <+>
maybe sddecl sd <+>
text "?>"
misc (Comment s) = text "<!--" <+> text s <+> text "-->"
misc (PI (n,s)) = text "<?" <> text n <+> text s <+> text "?>"
sddecl sd | sd = text "standalone='yes'"
| otherwise = text "standalone='no'"
doctypedecl (DTD n eid ds) = if null ds then
hd <> text ">"
else hd <+> text " [" $$
vcat (map markupdecl ds) $$ text "]>"
where hd = text "<!DOCTYPE" <+> text n <+>
maybe externalid eid
markupdecl (Element e) = elementdecl e
markupdecl (AttList a) = attlistdecl a
markupdecl (Entity e) = entitydecl e
markupdecl (Notation n) = notationdecl n
markupdecl (MarkupMisc m) = misc m
--markupdecl (MarkupPE p m) = peref p
extsubset (ExtSubset t ds) = maybe textdecl t $$
vcat (map extsubsetdecl ds)
extsubsetdecl (ExtMarkupDecl m) = markupdecl m
extsubsetdecl (ExtConditionalSect c) = conditionalsect c
--extsubsetdecl (ExtPEReference p e) = peref p
element (Elem n as []) = text "<" <> text n <+>
fsep (map attribute as) <> text "/>"
element e@(Elem n as cs)
-- | any isText cs = text "<" <> text n <+> fsep (map attribute as) <>
-- text ">" <> hcat (map content cs) <>
-- text "</" <> text n <> text ">"
| isText (head cs) = text "<" <> text n <+> fsep (map attribute as) <>
text ">" <> hcat (map content cs) <>
text "</" <> text n <> text ">"
| otherwise = let (d,c) = carryelem e empty
in d <> c
isText (CString _ _ _) = True
isText (CRef _ _) = True
isText _ = False
carryelem (Elem n as []) c
= ( c <>
text "<" <> text n <+> fsep (map attribute as)
, text "/>")
carryelem e@(Elem n as cs) c
-- | any isText cs = ( c <> element e, empty)
| otherwise = let (cs0,d0) = carryscan carrycontent cs (text ">")
in
( c <>
text "<" <> text n <+> fsep (map attribute as) $$
nest 2 (vcat cs0) <> --- $$
d0 <> text "</" <> text n
, text ">")
carrycontent (CElem e _) c = carryelem e c
carrycontent (CString False s _) c = (c <> chardata s, empty)
carrycontent (CString True s _) c = (c <> cdsect s, empty)
carrycontent (CRef r _) c = (c <> reference r, empty)
carrycontent (CMisc m _) c = (c <> misc m, empty)
carryscan :: (a->c->(b,c)) -> [a] -> c -> ([b],c)
carryscan f [] c = ([],c)
carryscan f (a:as) c = let (b, c0) = f a c
(bs,c1) = carryscan f as c0
in (b:bs, c1)
--carryelem e@(Elem n as cs) c
-- | isText (head cs) =
-- ( start <>
-- text ">" <> hcat (map content cs) <> text "</" <> text n
-- , text ">")
-- | otherwise =
-- let (d,c0) = foldl carrycontent (start, text ">") cs in
-- ( d <> c0 <> text "</" <> text n
-- , text ">")
-- where start = c <> text "<" <> text n <+> fsep (map attribute as)
--
--carrycontent (d,c) (CElem e) = let (d',c') = carryelem e c in
-- (d $$ nest 2 d', c')
--carrycontent (d,c) (CString _ s) = (d <> c <> chardata s, empty)
--carrycontent (d,c) (CRef r) = (d <> c <> reference r,empty)
--carrycontent (d,c) (CMisc m) = (d $$ c <> misc m, empty)
attribute (n,v) = text n <> text "=" <> attvalue v
content (CElem e _) = element e
content (CString False s _) = chardata s
content (CString True s _) = cdsect s
content (CRef r _) = reference r
content (CMisc m _) = misc m
elementdecl (ElementDecl n cs) = text "<!ELEMENT" <+> text n <+>
contentspec cs <> text ">"
contentspec EMPTY = text "EMPTY"
contentspec ANY = text "ANY"
contentspec (Mixed m) = mixed m
contentspec (ContentSpec c) = cp c
--contentspec (ContentPE p cs) = peref p
cp (TagName n m) = text n <> modifier m
cp (Choice cs m) = parens (hcat (intersperse (text "|") (map cp cs))) <>
modifier m
cp (Seq cs m) = parens (hcat (intersperse (text ",") (map cp cs))) <>
modifier m
--cp (CPPE p c) = peref p
modifier None = empty
modifier Query = text "?"
modifier Star = text "*"
modifier Plus = text "+"
mixed PCDATA = text "(#PCDATA)"
mixed (PCDATAplus ns) = text "(#PCDATA |" <+>
hcat (intersperse (text "|") (map text ns)) <>
text ")*"
attlistdecl (AttListDecl n ds) = text "<!ATTLIST" <+> text n <+>
fsep (map attdef ds) <> text ">"
attdef (AttDef n t d) = text n <+> atttype t <+> defaultdecl d
atttype StringType = text "CDATA"
atttype (TokenizedType t) = tokenizedtype t
atttype (EnumeratedType t) = enumeratedtype t
tokenizedtype ID = text "ID"
tokenizedtype IDREF = text "IDREF"
tokenizedtype IDREFS = text "IDREFS"
tokenizedtype ENTITY = text "ENTITY"
tokenizedtype ENTITIES = text "ENTITIES"
tokenizedtype NMTOKEN = text "NMTOKEN"
tokenizedtype NMTOKENS = text "NMTOKENS"
enumeratedtype (NotationType n)= notationtype n
enumeratedtype (Enumeration e) = enumeration e
notationtype ns = text "NOTATION" <+>
parens (hcat (intersperse (text "|") (map text ns)))
enumeration ns = parens (hcat (intersperse (text "|") (map nmtoken ns)))
defaultdecl REQUIRED = text "#REQUIRED"
defaultdecl IMPLIED = text "#IMPLIED"
defaultdecl (DefaultTo a f) = maybe (const (text "#FIXED")) f <+> attvalue a
conditionalsect (IncludeSect i)= text "<![INCLUDE [" <+>
vcat (map extsubsetdecl i) <+> text "]]>"
conditionalsect (IgnoreSect i) = text "<![IGNORE [" <+>
fsep (map ignoresectcontents i) <+> text "]]>"
ignore (Ignore) = empty
ignoresectcontents (IgnoreSectContents i is)
= ignore i <+> vcat (map internal is)
where internal (ics,i) = text "<![[" <+>
ignoresectcontents ics <+>
text "]]>" <+> ignore i
reference (RefEntity er) = entityref er
reference (RefChar cr) = charref cr
entityref n = text "&" <> text n <> text ";"
charref c = text "&#" <> text (show c) <> text ";"
entitydecl (EntityGEDecl d) = gedecl d
entitydecl (EntityPEDecl d) = pedecl d
gedecl (GEDecl n ed) = text "<!ENTITY" <+> text n <+> entitydef ed <>
text ">"
pedecl (PEDecl n pd) = text "<!ENTITY %" <> text n <+> pedef pd <>
text ">"
entitydef (DefEntityValue ev) = entityvalue ev
entitydef (DefExternalID i nd) = externalid i <+> maybe ndatadecl nd
pedef (PEDefEntityValue ev) = entityvalue ev
pedef (PEDefExternalID eid) = externalid eid
externalid (SYSTEM sl) = text "SYSTEM" <+> systemliteral sl
externalid (PUBLIC i sl) = text "PUBLIC" <+> pubidliteral i <+>
systemliteral sl
ndatadecl (NDATA n) = text "NDATA" <+> text n
textdecl (TextDecl vi ed) = text "<?xml" <+> maybe text vi <+>
encodingdecl ed <> text "?>"
extparsedent (ExtParsedEnt t c)= maybe textdecl t <+> content c
extpe (ExtPE t esd) = maybe textdecl t <+>
vcat (map extsubsetdecl esd)
notationdecl (NOTATION n e) = text "<!NOTATION" <+> text n <+>
either externalid publicid e <>
text ">"
publicid (PUBLICID p) = text "PUBLICID" <+> pubidliteral p
encodingdecl (EncodingDecl s) = text "encoding='" <> text s <> text "'"
nmtoken s = text s
attvalue (AttValue esr) = text "\"" <>
hcat (map (either text reference) esr) <>
text "\""
entityvalue (EntityValue evs)
| containsDoubleQuote evs = text "'" <> hcat (map ev evs) <> text "'"
| otherwise = text "\"" <> hcat (map ev evs) <> text "\""
ev (EVString s) = text s
--ev (EVPERef p e) = peref p
ev (EVRef r) = reference r
pubidliteral (PubidLiteral s)
| '"' `elem` s = text "'" <> text s <> text "'"
| otherwise = text "\"" <> text s <> text "\""
systemliteral (SystemLiteral s)
| '"' `elem` s = text "'" <> text s <> text "'"
| otherwise = text "\"" <> text s <> text "\""
chardata s = {-if all isSpace s then empty else-} text s
cdsect c = text "<![CDATA[" <> chardata c <> text "]]>"
----
containsDoubleQuote evs = any csq evs
where csq (EVString s) = '"' `elem` s
csq _ = False
|
FranklinChen/hugs98-plus-Sep2006
|
packages/HaXml/src/Text/XML/HaXml/Pretty.hs
|
bsd-3-clause
| 11,378
| 0
| 16
| 4,234
| 3,520
| 1,704
| 1,816
| 194
| 2
|
{-# LANGUAGE RankNTypes #-}
module NAT
( NAT(..)
, z'
, s'
, zero'
, mult'
, plus'
, pred'
) where
import ORD
import BOOL
-----------------------------------------
-- Primitive Recursive Type
-----------------------------------------
type PRType =
forall r. r -> (r -> r) -> r
oneN :: PRType
oneN = \z s -> z
uN :: PRType -> PRType
uN n = \z s -> s (n z s)
-----------------------------------------
eN :: PRType -> PRType
eN n = \z s -> let f k = \m -> m (k s)
in n (\x -> z) f id
-----------------------------------------
newtype PRTW = PRTW PRType
compareN :: PRType -> PRType -> ORD
compareN m n = fst $ m (n eq (const lt), PRTW n) comp
where comp :: (ORD, PRTW) -> (ORD, PRTW)
comp r@((ORD ord), x@(PRTW xx)) =
ord (gt, x) (f (eN xx)) r
f :: PRType -> (ORD, PRTW)
f n = n (eq, PRTW n) (const (lt, PRTW n))
eqN :: PRType -> PRType -> BOOL
eqN m n = ordToEq $ compareN m n
----------------------------------------------------------
-- Natural Number Type
----------------------------------------------------------
newtype NAT = NAT { unNat :: PRType }
----------------------------------------------------------
-- Constructors
----------------------------------------------------------
z' :: NAT
z' = NAT oneN
s' :: NAT -> NAT
s' (NAT n) = NAT (uN n)
----------------------------------------------------------
-- Operators
----------------------------------------------------------
zero' :: NAT -> BOOL
zero' (NAT n) = n true (\x -> false)
mult' :: NAT -> NAT -> NAT
mult' (NAT m) (NAT n) =
NAT $ \z s -> m z (flip n s)
plus' :: NAT -> NAT -> NAT
plus' (NAT m) (NAT n) =
NAT $ \z s -> m (n z s) s
pred' :: NAT -> NAT
pred' (NAT n) = NAT (eN n)
----------------------------------------------------------
-- Type Class Instances
----------------------------------------------------------
instance Show NAT where
show (NAT n) = show $ n 0 (1+)
instance Eq NAT where
(NAT m) == (NAT n) = toBool $ eqN m n
instance Ord NAT where
compare (NAT m) (NAT n) = toOrdering (compareN m n)
|
godsme/haskell-adt
|
src/NAT.hs
|
bsd-3-clause
| 2,070
| 0
| 13
| 431
| 784
| 424
| 360
| 51
| 1
|
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
import System.Environment
import Control.Applicative
import qualified Data.ByteString as BS
import Data.X509
import Data.X509.File
import Data.X509.CertificateStore
import Network
import TlsClient
import Data.HandleLike
import Control.Monad
import System.Exit
import System.Console.GetOpt
import Basic
import ReadEcPrivateKey
data Option
= SHA1
| SHA256
| ECDSA
deriving (Show, Eq)
optDescr :: [OptDescr Option]
optDescr = [
Option "" ["sha1"] (NoArg SHA1) "Use SHA1",
Option "" ["sha256"] (NoArg SHA256) "Use SHA256",
Option "" ["ecdsa"] (NoArg ECDSA) "Use ECDSA for client certification"
]
getCipherSuites :: [Option] -> [CipherSuite]
getCipherSuites opts = (++ [CipherSuite RSA AES_128_CBC_SHA]) $
case (SHA1 `elem` opts, SHA256 `elem` opts) of
(True, False) -> [CipherSuite ECDHE_ECDSA AES_128_CBC_SHA]
(False, True) -> [CipherSuite ECDHE_ECDSA AES_128_CBC_SHA256]
_ -> [
CipherSuite ECDHE_ECDSA AES_128_CBC_SHA256,
CipherSuite ECDHE_ECDSA AES_128_CBC_SHA,
CipherSuite ECDHE_RSA AES_128_CBC_SHA256,
CipherSuite ECDHE_RSA AES_128_CBC_SHA,
CipherSuite DHE_RSA AES_128_CBC_SHA256,
CipherSuite DHE_RSA AES_128_CBC_SHA,
CipherSuite RSA AES_128_CBC_SHA256]
(+++) :: BS.ByteString -> BS.ByteString -> BS.ByteString
(+++) = BS.append
main :: IO ()
main = do
(opts, svpna : name : _, errs) <- getOpt Permute optDescr <$> getArgs
unless (null errs) $ do
mapM_ putStr errs
exitFailure
[PrivKeyRSA pkys] <- readKeyFile "yoshikuni.key"
certChain <- CertificateChain <$> readSignedObject "yoshikuni.crt"
pkysEc <- readEcPrivKey "client_ecdsa.key"
certChainEc <- CertificateChain <$> readSignedObject "client_ecdsa.cert"
certStore <- makeCertificateStore . concat <$> mapM readSignedObject [
"cacert.pem",
"../verisign/rsa/veri_test_root_3.pem"
]
sv <- connectTo "localhost" . PortNumber . fromIntegral =<<
(readIO svpna :: IO Int)
let suit = getCipherSuites opts
tls <- if ECDSA `elem` opts
then openTlsServer name [(pkysEc, certChainEc)] certStore sv suit
else openTlsServer name [(pkys, certChain)] certStore sv suit
hlPut tls $
"GET / HTTP/1.1\r\n" +++
"Host: localhost:4492\r\n" +++
"User-Agent: Mozilla/5.0 (X11; Linux i686; rv:24.0) " +++
"Gecko/20140415 Firefox/24.0\r\n" +++
"Accept: text/html,application/xhtml+xml,application/xml;" +++
"q=0.9,*/*;q=0.8\r\n" +++
"Accept-Language: ja,en-us;q=0.7,en;q=0.3\r\n" +++
"Accept-Encoding: gzip, deflate\r\n" +++
"Connection: keep-alive\r\n" +++
"Cache-Control: max-age=0\r\n\r\n"
hlGetHeaders tls >>= print
hlGetContent tls >>= print
-- tGet tls 10 >>= print
-- tGet tls 10 >>= print
{-
tGetByte tls >>= print
tGetByte tls >>= print
tGetByte tls >>= print
tGetByte tls >>= print
-}
tClose tls
hlGetHeaders :: TlsServer -> IO [BS.ByteString]
hlGetHeaders tls = do
l <- hlGetLine tls
if BS.null l then return [l] else (l :) <$> hlGetHeaders tls
|
YoshikuniJujo/forest
|
subprojects/tls-analysis/client/mkClient.hs
|
bsd-3-clause
| 2,948
| 59
| 17
| 460
| 830
| 434
| 396
| 77
| 3
|
{-# LANGUAGE BangPatterns , RankNTypes, GADTs, DataKinds #-}
{- | The 'Numerical.HBLAS.BLAS.Level1' module provides a fully general
yet type safe Level1 BLAS API.
When in doubt about the semantics of an operation,
consult your system's BLAS api documentation, or just read the documentation
for
<https://software.intel.com/sites/products/documentation/hpc/mkl/mklman/index.htm the Intel MKL BLAS distribution>
A few basic notes about how to invoke BLAS routines.
Many BLAS operations take one or more arguments of type 'Transpose'.
'Tranpose' has the following different constructors, which tell BLAS
routines what transformation to implicitly apply to an input matrix @mat@ with dimension @n x m@.
* 'NoTranspose' leaves the matrix @mat@ as is.
* 'Transpose' treats the @mat@ as being implicitly transposed, with dimension
@m x n@. Entry @mat(i,j)@ being treated as actually being the entry
@mat(j,i)@. For Real matrices this is also the matrix adjoint operation.
ie @Tranpose(mat)(i,j)=mat(j,i)@
* 'ConjNoTranspose' will implicitly conjugate @mat@, which is a no op for Real ('Float' or 'Double') matrices, but for
'Complex Float' and 'Complex Double' matrices, a given matrix entry @mat(i,j)==x':+'y@
will be treated as actually being @conjugate(mat)(i,j)=y':+'x@.
* 'ConjTranpose' will implicitly transpose and conjugate the input matrix.
ConjugateTranpose acts as matrix adjoint for both real and complex matrices.
The *gemm operations work as follows (using 'sgemm' as an example):
* @'sgemm trLeft trRight alpha beta left right result'@, where @trLeft@ and @trRight@
are values of type 'Transpose' that respectively act on the matrices @left@ and @right@.
* the generalized matrix computation thusly formed can be viewed as being
@result = alpha * trLeft(left) * trRight(right) + beta * result@
the *gemv operations are akin to the *gemm operations, but with @right@ and @result@
being vectors rather than matrices.
the *trsv operations solve for @x@ in the equation @A x = y@ given @A@ and @y@.
The 'MatUpLo' argument determines if the matrix should be treated as upper or
lower triangular and 'MatDiag' determines if the triangular solver should treat
the diagonal of the matrix as being all 1's or not. A general pattern of invocation
would be @'strsv' matuplo tranposeMatA matdiag matrixA xVector@.
A key detail to note is that the input vector is ALSO the result vector,
ie 'strsv' and friends updates the vector place.
-}
module Numerical.HBLAS.BLAS.Level1(
sasum
,dasum
,scasum
,dzasum
,saxpy
,daxpy
,caxpy
,zaxpy
,scopy
,dcopy
,ccopy
,zcopy
,sdot
,ddot
--,sdsdot
,dsdot
,cdotu
,cdotc
,zdotu
,zdotc
,snrm2
,dnrm2
,scnrm2
,dznrm2
,srot
,drot
--,srotg
--,drotg
,srotm
,drotm
--,srotmg
--,drotmg
,sscal
,dscal
,cscal
,zscal
,csscal
,zdscal
,sswap
,dswap
,cswap
,zswap
,isamax
,idamax
,icamax
,izamax
) where
import Numerical.HBLAS.UtilsFFI
import Numerical.HBLAS.BLAS.FFI.Level1
import Numerical.HBLAS.BLAS.Internal.Level1
import Control.Monad.Primitive
import Data.Complex
sasum :: PrimMonad m => AsumFun Float (PrimState m) m Float
sasum = asumAbstraction "sasum" cblas_sasum_safe cblas_sasum_unsafe
dasum :: PrimMonad m => AsumFun Double (PrimState m) m Double
dasum = asumAbstraction "dasum" cblas_dasum_safe cblas_dasum_unsafe
scasum :: PrimMonad m => AsumFun (Complex Float) (PrimState m) m Float
scasum = asumAbstraction "scasum" cblas_scasum_safe cblas_scasum_unsafe
dzasum :: PrimMonad m => AsumFun (Complex Double) (PrimState m) m Double
dzasum = asumAbstraction "dzasum" cblas_dzasum_safe cblas_dzasum_unsafe
saxpy :: PrimMonad m => AxpyFun Float (PrimState m) m
saxpy = axpyAbstraction "saxpy" cblas_saxpy_safe cblas_saxpy_unsafe (\x f -> f x)
daxpy :: PrimMonad m => AxpyFun Double (PrimState m) m
daxpy = axpyAbstraction "daxpy" cblas_daxpy_safe cblas_daxpy_unsafe (\x f -> f x)
caxpy :: PrimMonad m => AxpyFun (Complex Float) (PrimState m) m
caxpy = axpyAbstraction "caxpy" cblas_caxpy_safe cblas_caxpy_unsafe withRStorable_
zaxpy :: PrimMonad m => AxpyFun (Complex Double) (PrimState m) m
zaxpy = axpyAbstraction "zaxpy" cblas_zaxpy_safe cblas_zaxpy_unsafe withRStorable_
scopy :: PrimMonad m => CopyFun Float (PrimState m) m
scopy = copyAbstraction "scopy" cblas_scopy_safe cblas_scopy_unsafe
dcopy :: PrimMonad m => CopyFun Double (PrimState m) m
dcopy = copyAbstraction "dcopy" cblas_dcopy_safe cblas_dcopy_unsafe
ccopy :: PrimMonad m => CopyFun (Complex Float) (PrimState m) m
ccopy = copyAbstraction "ccopy" cblas_ccopy_safe cblas_ccopy_unsafe
zcopy :: PrimMonad m => CopyFun (Complex Double) (PrimState m) m
zcopy = copyAbstraction "zcopy" cblas_zcopy_safe cblas_zcopy_unsafe
sdot :: PrimMonad m => NoScalarDotFun Float (PrimState m) m Float
sdot = noScalarDotAbstraction "sdot" cblas_sdot_safe cblas_sdot_unsafe
ddot :: PrimMonad m => NoScalarDotFun Double (PrimState m) m Double
ddot = noScalarDotAbstraction "ddot" cblas_ddot_safe cblas_ddot_unsafe
--sdsdot :: PrimMonad m => ScalarDotFun Float (PrimState m) m Float
--sdsdot = scalarDotAbstraction "sdsdot" cblas_sdsdot_safe cblas_sdsdot_unsafe withRStorable withRStorable
dsdot :: PrimMonad m => NoScalarDotFun Float (PrimState m) m Double
dsdot = noScalarDotAbstraction "dsdot" cblas_dsdot_safe cblas_dsdot_unsafe
cdotu :: PrimMonad m => ComplexDotFun (Complex Float) (PrimState m) m
cdotu = complexDotAbstraction "cdotu" cblas_cdotu_safe cblas_cdotu_unsafe
cdotc :: PrimMonad m => ComplexDotFun (Complex Float) (PrimState m) m
cdotc = complexDotAbstraction "cdotc" cblas_cdotc_safe cblas_cdotc_unsafe
zdotu :: PrimMonad m => ComplexDotFun (Complex Double) (PrimState m) m
zdotu = complexDotAbstraction "zdotu" cblas_zdotu_safe cblas_zdotu_unsafe
zdotc :: PrimMonad m => ComplexDotFun (Complex Double) (PrimState m) m
zdotc = complexDotAbstraction "zdotc" cblas_zdotc_safe cblas_zdotc_unsafe
snrm2 :: PrimMonad m => Nrm2Fun Float (PrimState m) m Float
snrm2 = norm2Abstraction "snrm2" cblas_snrm2_safe cblas_snrm2_unsafe
dnrm2 :: PrimMonad m => Nrm2Fun Double (PrimState m) m Double
dnrm2 = norm2Abstraction "dnrm2" cblas_dnrm2_safe cblas_dnrm2_unsafe
scnrm2 :: PrimMonad m => Nrm2Fun (Complex Float) (PrimState m) m Float
scnrm2 = norm2Abstraction "scnrm2" cblas_scnrm2_safe cblas_scnrm2_unsafe
dznrm2 :: PrimMonad m => Nrm2Fun (Complex Double) (PrimState m) m Double
dznrm2 = norm2Abstraction "dznrm2" cblas_dznrm2_safe cblas_dznrm2_unsafe
srot :: PrimMonad m => RotFun Float (PrimState m) m
srot = rotAbstraction "srot" cblas_srot_safe cblas_srot_unsafe
drot :: PrimMonad m => RotFun Double (PrimState m) m
drot = rotAbstraction "drot" cblas_drot_safe cblas_drot_unsafe
--srotg :: PrimMonad m => RotgFun Float (PrimState m) m
--srotg = rotgAbstraction "srotg" cblas_srotg_safe cblas_srotg_unsafe
--drotg :: PrimMonad m => RotgFun Double (PrimState m) m
--drotg = rotgAbstraction "drotg" cblas_drotg_safe cblas_drotg_unsafe
srotm :: PrimMonad m => RotmFun Float (PrimState m) m
srotm = rotmAbstraction "srotm" cblas_srotm_safe cblas_srotm_unsafe
drotm :: PrimMonad m => RotmFun Double (PrimState m) m
drotm = rotmAbstraction "drotm" cblas_drotm_safe cblas_drotm_unsafe
--srotmg :: PrimMonad m => RotmgFun Float (PrimState m) m
--srotmg = rotmgAbstraction "srotmg" cblas_srotmg_safe cblas_srotmg_unsafe
--drotmg :: PrimMonad m => RotmgFun Double (PrimState m) m
--drotmg = rotmgAbstraction "drotmg" cblas_drotmg_safe cblas_drotmg_unsafe
sscal :: PrimMonad m => ScalFun Float Float (PrimState m) m
sscal = scalAbstraction "sscal" cblas_sscal_safe cblas_sscal_unsafe (\x f -> f x )
dscal :: PrimMonad m => ScalFun Double Double (PrimState m) m
dscal = scalAbstraction "dscal" cblas_dscal_safe cblas_dscal_unsafe (\x f -> f x )
cscal :: PrimMonad m => ScalFun (Complex Float) (Complex Float) (PrimState m) m
cscal = scalAbstraction "cscal" cblas_cscal_safe cblas_cscal_unsafe withRStorable_
zscal :: PrimMonad m => ScalFun (Complex Double) (Complex Double) (PrimState m) m
zscal = scalAbstraction "zscal" cblas_zscal_safe cblas_zscal_unsafe withRStorable_
csscal :: PrimMonad m => ScalFun Float (Complex Float) (PrimState m) m
csscal = scalAbstraction "csscal" cblas_csscal_safe cblas_csscal_unsafe (\x f -> f x )
zdscal :: PrimMonad m => ScalFun Double (Complex Double) (PrimState m) m
zdscal = scalAbstraction "zdscal" cblas_zdscal_safe cblas_zdscal_unsafe (\x f -> f x )
sswap :: PrimMonad m => SwapFun Float (PrimState m) m
sswap = swapAbstraction "sswap" cblas_sswap_safe cblas_sswap_unsafe
dswap :: PrimMonad m => SwapFun Double (PrimState m) m
dswap = swapAbstraction "dswap" cblas_dswap_safe cblas_dswap_unsafe
cswap :: PrimMonad m => SwapFun (Complex Float) (PrimState m) m
cswap = swapAbstraction "cswap" cblas_cswap_safe cblas_cswap_unsafe
zswap :: PrimMonad m => SwapFun (Complex Double) (PrimState m) m
zswap = swapAbstraction "zswap" cblas_zswap_safe cblas_zswap_unsafe
isamax :: PrimMonad m => IamaxFun Float (PrimState m) m
isamax = iamaxAbstraction "isamax" cblas_isamax_safe cblas_isamax_unsafe
idamax :: PrimMonad m => IamaxFun Double (PrimState m) m
idamax = iamaxAbstraction "idamax" cblas_idamax_safe cblas_idamax_unsafe
icamax :: PrimMonad m => IamaxFun (Complex Float) (PrimState m) m
icamax = iamaxAbstraction "icamax" cblas_icamax_safe cblas_icamax_unsafe
izamax :: PrimMonad m => IamaxFun (Complex Double)(PrimState m) m
izamax = iamaxAbstraction "izamax" cblas_izamax_safe cblas_izamax_unsafe
{-
isamin :: PrimMonad m => IaminFun Float (PrimState m) m
isamin = iaminAbstraction "isamin" cblas_isamin_safe cblas_isamin_unsafe
idamin :: PrimMonad m => IaminFun Double (PrimState m) m
idamin = iaminAbstraction "idamin" cblas_idamin_safe cblas_idamin_unsafe
icamin :: PrimMonad m => IaminFun (Complex Float) (PrimState m) m
icamin = iaminAbstraction "icamin" cblas_icamin_safe cblas_icamin_unsafe
izamin :: PrimMonad m => IaminFun (Complex Double)(PrimState m) m
izamin = iaminAbstraction "izamin" cblas_izamin_safe cblas_izamin_unsafe
-}
|
wellposed/hblas
|
src/Numerical/HBLAS/BLAS/Level1.hs
|
bsd-3-clause
| 10,337
| 0
| 8
| 1,795
| 1,965
| 1,003
| 962
| 130
| 1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
--------------------------------------------------------------------------------
-- |
-- Module HipChat.AddOn.Registration
--
-- HipChat add on registration, sent as part of the add-on installation flow.
-- See: https://developer.atlassian.com/hipchat/guide/installation-flow
--------------------------------------------------------------------------------
module HipChat.AddOn.Registration where
import HipChat.Types (RoomId, URL)
import HipChat.Util (ToFromJSON)
import Control.Lens.TH (camelCaseFields, makeLensesWith)
import Data.Text (Text)
import GHC.Generics
data Registration = Registration
{ registrationOauthId :: Text
-- ^ OAuth client ID
, registrationOauthSecret :: Text
-- ^ OAuth shared secret
, registrationCapabilitiesUrl :: URL
-- ^ URL for a capabilities document which lists the URL for endpoints you can
-- use to make REST calls to this installation
, registrationRoomId :: Maybe RoomId
-- ^ (optional, for room installations only): ID for the room the add-on was installed in
, registrationGroupId :: Int
-- ^ ID for the HipChat group the add-on was installed in
} deriving (Show, Eq, Generic)
instance ToFromJSON Registration
makeLensesWith camelCaseFields ''Registration
|
mjhopkins/hipchat
|
src/HipChat/AddOn/Registration.hs
|
bsd-3-clause
| 1,519
| 0
| 9
| 306
| 151
| 96
| 55
| 20
| 0
|
{-# LANGUAGE CPP #-}
-- | A module for 'FilePath' operations, to be used instead of "System.FilePath"
-- when writing build systems. In build systems, when using the file name
-- as a key for indexing rules, it is important that two different strings do
-- not refer to the same on-disk file. We therefore follow the conventions:
--
-- * Always use @\/@ as the directory separator, even on Windows.
--
-- * When combining 'FilePath' values with '</>' we squash any @\/.\/@ components.
module Development.Shake.FilePath(
module System.FilePath.Posix,
dropDirectory1, takeDirectory1, normalise,
(-<.>),
toNative, (</>), combine,
exe
) where
import System.FilePath.Posix hiding (normalise, (</>), combine)
import qualified System.FilePath as Native
import Data.List
infixr 5 </>
infixr 7 -<.>
-- | Drop the first directory from a 'FilePath'. Should only be used on
-- relative paths.
--
-- > dropDirectory1 "aaa/bbb" == "bbb"
-- > dropDirectory1 "aaa/" == ""
-- > dropDirectory1 "aaa" == ""
-- > dropDirectory1 "" == ""
dropDirectory1 :: FilePath -> FilePath
dropDirectory1 = drop 1 . dropWhile (not . Native.isPathSeparator)
-- | Take the first component of a 'FilePath'. Should only be used on
-- relative paths.
--
-- > takeDirectory1 "aaa/bbb" == "aaa"
-- > takeDirectory1 "aaa/" == "aaa"
-- > takeDirectory1 "aaa" == "aaa"
takeDirectory1 :: FilePath -> FilePath
takeDirectory1 = takeWhile (not . Native.isPathSeparator)
-- | Normalise a 'FilePath', applying the standard 'FilePath' normalisation, plus
-- translating any path separators to @\/@ and removing @foo\/..@ components where possible.
normalise :: FilePath -> FilePath
normalise = intercalate "/" . dropDots . split . Native.normalise
where
dropDots = reverse . f 0 . reverse
where
f i ("..":xs) = f (i+1) xs
f 0 (x:xs) = x : f 0 xs
f i (x:xs) = f (i-1) xs
f i [] = replicate i ".."
split xs = a : if null b then [] else split $ tail b
where (a,b) = break Native.isPathSeparator xs
-- | Convert to native path separators, namely @\\@ on Windows.
toNative :: FilePath -> FilePath
toNative = map (\x -> if Native.isPathSeparator x then Native.pathSeparator else x)
-- | Combine two file paths, an alias for 'combine'.
(</>) :: FilePath -> FilePath -> FilePath
(</>) = combine
-- | Remove the current extension and add another, an alias for 'replaceExtension'.
(-<.>) :: FilePath -> String -> FilePath
(-<.>) = replaceExtension
-- | Combine two file paths. Any leading @.\/@ or @..\/@ components in the right file
-- are eliminated.
--
-- > combine "aaa/bbb" "ccc" == "aaa/bbb/ccc"
-- > combine "aaa/bbb" "./ccc" == "aaa/bbb/ccc"
-- > combine "aaa/bbb" "../ccc" == "aaa/ccc"
combine :: FilePath -> FilePath -> FilePath
combine "." y = y
combine x ('.':'.':'/':y) = combine (takeDirectory x) y
combine x ('.':'/':y) = combine x y
combine x y = normalise $ Native.combine (toNative x) (toNative y)
-- | The extension of executables, @\"exe\"@ on Windows and @\"\"@ otherwise.
exe :: String
#ifdef mingw32_HOST_OS
exe = "exe"
#else
exe = ""
#endif
|
nh2/shake
|
Development/Shake/FilePath.hs
|
bsd-3-clause
| 3,171
| 0
| 11
| 663
| 600
| 347
| 253
| 38
| 5
|
{-# LANGUAGE OverloadedStrings #-}
module Bot
( openConnection,
startBot
) where
import Network.Socket
import Data.Maybe (mapMaybe)
import qualified Data.Text as T
import qualified Types as IT
import Actions
openConnection :: IT.Server -> IT.User -> IT.Channel -> IO IT.Connection
openConnection server user channel =
do
let address = IT.address server
let port = show . IT.port $ server
sock <- initSocket address port
send sock $ "NICK " ++ IT.nick user ++ "\r\n"
send sock $ "USER " ++ IT.nick user ++ " " ++ address ++ " arb: " ++ IT.realName user ++ "\r\n"
send sock $ "JOIN " ++ IT.channelName channel ++ "\r\n"
return $ IT.Connection sock channel
handleMessage :: IT.Connection -> [IT.IRCAction] -> T.Text -> IO ()
handleMessage connection actions message =
do
let channel = IT.connectionChannel connection
let handled = T.unpack <$> mapMaybe (\a -> a channel message) actions
let sock = IT.socket connection
mapM_ (send sock) handled
startBot :: IT.Connection -> [IT.IRCAction] -> T.Text -> IO ()
startBot connection actions buffer =
do
print buffer
let sock = IT.socket connection
let buf_lines = T.lines buffer
mapM_ (handleMessage connection actions) buf_lines
rd <- recv sock 1024
let (_,unhandled) = T.breakOnEnd "\r\n" buffer
let new_buffer = T.concat [unhandled, T.pack rd]
startBot connection actions new_buffer
initSocket hostname port =
do
addrinfos <- getAddrInfo Nothing (Just hostname) (Just port)
let serveraddr = head addrinfos
sock <- socket (addrFamily serveraddr) Stream defaultProtocol
setSocketOption sock KeepAlive 1
connect sock (addrAddress serveraddr)
return sock
|
weeezes/Hik
|
src/Bot.hs
|
bsd-3-clause
| 1,907
| 0
| 14
| 552
| 613
| 291
| 322
| 45
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE BangPatterns #-}
-- This module is based on the implementation from Falko Peters'
-- 'scrypt' package and used under the terms of the BSD3 license.
module Tinfoil.KDF.Scrypt.Internal(
ScryptParams(..)
, scryptParams
, scrypt
, encodeScryptParams
, decodeScryptParams
, combine
, separate
) where
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Base64 as Base64
import Data.Char (ord)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Read as TR
import Foreign (Ptr, Word8, Word32, Word64, allocaBytes, castPtr)
import Foreign.C
import P
import Prelude (String)
import System.IO
import Tinfoil.Data
data ScryptParams =
ScryptParams {
scryptLogN :: !Int
, scryptR :: !Int
, scryptP :: !Int
} deriving (Eq, Show)
encodeScryptParams :: ScryptParams -> ByteString
encodeScryptParams (ScryptParams logN r p) = T.encodeUtf8 $ T.intercalate "|" [
T.pack $ show logN
, T.pack $ show r
, T.pack $ show p
]
decodeScryptParams :: ByteString -> Maybe' ScryptParams
decodeScryptParams bs = case BS.split (fromIntegral $ ord '|') bs of
[logN', r', p'] -> do
logN <- maybeRead . TR.decimal $ T.decodeUtf8 logN'
r <- maybeRead . TR.decimal $ T.decodeUtf8 r'
p <- maybeRead . TR.decimal $ T.decodeUtf8 p'
pure $ ScryptParams logN r p
_ -> Nothing'
where
maybeRead :: Either String (Int, Text) -> Maybe' Int
maybeRead (Left _) = Nothing'
maybeRead (Right (x, "")) = Just' x
maybeRead (Right (_, _)) = Nothing'
scryptParams :: Int -> Int -> Int -> ScryptParams
scryptParams logN r p =
ScryptParams logN r p
combine :: ScryptParams -> Entropy -> ByteString -> CredentialHash
combine params (Entropy salt) passHash =
CredentialHash . BS.intercalate "|" $
encodeScryptParams params
: [Base64.encode salt, Base64.encode passHash]
separate :: CredentialHash -> Maybe' (ScryptParams, Entropy, ByteString)
separate = go . BS.split (fromIntegral $ ord '|') . unCredentialHash
where
go [logN', r', p', salt', hash'] = do
[salt, hash] <- mapM decodeBase64 [salt', hash']
params <- decodeScryptParams $ BS.intercalate "|" [logN', r', p']
pure (params, Entropy salt, hash)
go _ = Nothing'
decodeBase64 = either (const Nothing') Just' . Base64.decode
-- This implementation originally from the `scrypt` package; modified to
-- run in IO.
scrypt :: ScryptParams -> Entropy -> Credential -> IO ByteString
scrypt (ScryptParams logN r p) (Entropy salt) (Credential pass) =
let bufLen = 64 :: Int in
BS.useAsCStringLen salt $ \(saltPtr, saltLen) ->
BS.useAsCStringLen pass $ \(passPtr, passLen) ->
allocaBytes (fromIntegral bufLen) $ \bufPtr -> do
throwErrnoIfMinus1_ "crypto_scrypt" $ crypto_scrypt
(castPtr passPtr) (fromIntegral passLen)
(castPtr saltPtr) (fromIntegral saltLen)
(2^logN) (fromIntegral r) (fromIntegral p)
bufPtr (fromIntegral bufLen)
BS.packCStringLen (castPtr bufPtr, fromIntegral bufLen)
foreign import ccall safe crypto_scrypt
:: Ptr Word8 -> CSize -- password
-> Ptr Word8 -> CSize -- salt
-> Word64 -> Word32 -> Word32 -- N, r, p
-> Ptr Word8 -> CSize -- result buffer
-> IO CInt
|
ambiata/tinfoil
|
src/Tinfoil/KDF/Scrypt/Internal.hs
|
bsd-3-clause
| 3,565
| 0
| 18
| 834
| 1,064
| 570
| 494
| 88
| 4
|
module GoogleApi where
import Data.Text (Text)
import qualified Data.Text as T
geocodeUrl = "maps.googleapis.com/maps/api/geocode/json"
-- | https://developers.google.com/maps/documentation/geocoding/#ComponentFiltering
--
data Component = Route -- ^ matches long or short name of a route.
| Locality -- ^ matches against both locality and sublocality types.
| AdministrativeArea -- ^ matches all the administrative_area levels.
| PostalCode -- ^ matches postal_code and postal_code_prefix.
| Country -- ^ matches a country name or a two letter ISO 3166-1 country code.
-- | https://developers.google.com/maps/documentation/geocoding/#ReverseGeocoding
--
data Options = Options { key :: Maybe Text -- ^ Your application's API key, obtained from the Google Developers Console.
--, bounds ::
, language :: Text -- ^ The language in which to return results. https://developers.google.com/maps/faq#languagesupport
, resultType :: Text
, components :: [(Component,String)]
}
[(Route,"TX"), (Country,"US")]
getByAddress...
|
Romer4ig/geodb
|
src/GoogleApi/Reverse.hs
|
bsd-3-clause
| 1,090
| 1
| 11
| 203
| 129
| 85
| 44
| -1
| -1
|
{-# LANGUAGE TypeFamilies, TypeOperators, FlexibleInstances #-}
-- | Index space transformation between arrays and slices.
module Data.Array.Repa.Slice
( All (..)
, Any (..)
, FullShape
, SliceShape
, Slice (..))
where
import Data.Array.Repa.Index
import Prelude hiding (replicate, drop)
-- | Select all indices at a certain position.
data All = All
-- | Place holder for any possible shape.
data Any sh = Any
-- | Map a type of the index in the full shape, to the type of the index in the slice.
type family FullShape ss
type instance FullShape Z = Z
type instance FullShape (Any sh) = sh
type instance FullShape (sl :. Int) = FullShape sl :. Int
type instance FullShape (sl :. All) = FullShape sl :. Int
-- | Map the type of an index in the slice, to the type of the index in the full shape.
type family SliceShape ss
type instance SliceShape Z = Z
type instance SliceShape (Any sh) = sh
type instance SliceShape (sl :. Int) = SliceShape sl
type instance SliceShape (sl :. All) = SliceShape sl :. Int
-- | Class of index types that can map to slices.
class Slice ss where
-- | Map an index of a full shape onto an index of some slice.
sliceOfFull :: ss -> FullShape ss -> SliceShape ss
-- | Map an index of a slice onto an index of the full shape.
fullOfSlice :: ss -> SliceShape ss -> FullShape ss
instance Slice Z where
{-# INLINE [1] sliceOfFull #-}
sliceOfFull _ _ = Z
{-# INLINE [1] fullOfSlice #-}
fullOfSlice _ _ = Z
instance Slice (Any sh) where
{-# INLINE [1] sliceOfFull #-}
sliceOfFull _ sh = sh
{-# INLINE [1] fullOfSlice #-}
fullOfSlice _ sh = sh
instance Slice sl => Slice (sl :. Int) where
{-# INLINE [1] sliceOfFull #-}
sliceOfFull (fsl :. _) (ssl :. _)
= sliceOfFull fsl ssl
{-# INLINE [1] fullOfSlice #-}
fullOfSlice (fsl :. n) ssl
= fullOfSlice fsl ssl :. n
instance Slice sl => Slice (sl :. All) where
{-# INLINE [1] sliceOfFull #-}
sliceOfFull (fsl :. All) (ssl :. s)
= sliceOfFull fsl ssl :. s
{-# INLINE [1] fullOfSlice #-}
fullOfSlice (fsl :. All) (ssl :. s)
= fullOfSlice fsl ssl :. s
|
kairne/repa-lts
|
Data/Array/Repa/Slice.hs
|
bsd-3-clause
| 2,091
| 67
| 9
| 459
| 555
| 323
| 232
| 48
| 0
|
module Playlistach.Util.List where
proportionate :: [[a]] -> [a]
proportionate = concat
|
aemxdp/playlistach
|
backend/Playlistach/Util/List.hs
|
bsd-3-clause
| 88
| 0
| 7
| 11
| 30
| 19
| 11
| 3
| 1
|
{-# LINE 1 "Foreign.Ptr.hs" #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, MagicHash, GeneralizedNewtypeDeriving,
StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.Ptr
-- Copyright : (c) The FFI task force 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : ffi@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- This module provides typed pointers to foreign data. It is part
-- of the Foreign Function Interface (FFI) and will normally be
-- imported via the "Foreign" module.
--
-----------------------------------------------------------------------------
module Foreign.Ptr (
-- * Data pointers
Ptr,
nullPtr,
castPtr,
plusPtr,
alignPtr,
minusPtr,
-- * Function pointers
FunPtr,
nullFunPtr,
castFunPtr,
castFunPtrToPtr,
castPtrToFunPtr,
freeHaskellFunPtr,
-- Free the function pointer created by foreign export dynamic.
-- * Integral types with lossless conversion to and from pointers
IntPtr,
ptrToIntPtr,
intPtrToPtr,
WordPtr,
ptrToWordPtr,
wordPtrToPtr
) where
import GHC.Ptr
import GHC.Base
import GHC.Num
import GHC.Read
import GHC.Real
import GHC.Show
import GHC.Enum
import Data.Bits
import Foreign.Storable ( Storable(..) )
-- | Release the storage associated with the given 'FunPtr', which
-- must have been obtained from a wrapper stub. This should be called
-- whenever the return value from a foreign import wrapper function is
-- no longer required; otherwise, the storage it uses will leak.
foreign import ccall unsafe "freeHaskellFunctionPtr"
freeHaskellFunPtr :: FunPtr a -> IO ()
{- --------------------------------------------------------------------------
// Dirty CPP hackery for CTypes/CTypesISO
//
// (c) The FFI task force, 2000
// --------------------------------------------------------------------------
-}
{-
// As long as there is no automatic derivation of classes for newtypes we resort
// to extremely dirty cpp-hackery. :-P Some care has to be taken when the
// macros below are modified, otherwise the layout rule will bite you.
-}
-- // GHC can derive any class for a newtype, so we make use of that here...
-- | An unsigned integral type that can be losslessly converted to and from
-- @Ptr@. This type is also compatible with the C99 type @uintptr_t@, and
-- can be marshalled to and from that type safely.
newtype WordPtr = WordPtr Word deriving (Eq,Ord,Num,Enum,Storable,Real, Bounded,Integral,Bits,FiniteBits); instance Read WordPtr where { readsPrec = unsafeCoerce# (readsPrec :: Int -> ReadS Word); readList = unsafeCoerce# (readList :: ReadS [Word]); }; instance Show WordPtr where { showsPrec = unsafeCoerce# (showsPrec :: Int -> Word -> ShowS); show = unsafeCoerce# (show :: Word -> String); showList = unsafeCoerce# (showList :: [Word] -> ShowS); };
-- Word and Int are guaranteed pointer-sized in GHC
-- | A signed integral type that can be losslessly converted to and from
-- @Ptr@. This type is also compatible with the C99 type @intptr_t@, and
-- can be marshalled to and from that type safely.
newtype IntPtr = IntPtr Int deriving (Eq,Ord,Num,Enum,Storable,Real, Bounded,Integral,Bits,FiniteBits); instance Read IntPtr where { readsPrec = unsafeCoerce# (readsPrec :: Int -> ReadS Int); readList = unsafeCoerce# (readList :: ReadS [Int]); }; instance Show IntPtr where { showsPrec = unsafeCoerce# (showsPrec :: Int -> Int -> ShowS); show = unsafeCoerce# (show :: Int -> String); showList = unsafeCoerce# (showList :: [Int] -> ShowS); };
-- Word and Int are guaranteed pointer-sized in GHC
-- | casts a @Ptr@ to a @WordPtr@
ptrToWordPtr :: Ptr a -> WordPtr
ptrToWordPtr (Ptr a#) = WordPtr (W# (int2Word# (addr2Int# a#)))
-- | casts a @WordPtr@ to a @Ptr@
wordPtrToPtr :: WordPtr -> Ptr a
wordPtrToPtr (WordPtr (W# w#)) = Ptr (int2Addr# (word2Int# w#))
-- | casts a @Ptr@ to an @IntPtr@
ptrToIntPtr :: Ptr a -> IntPtr
ptrToIntPtr (Ptr a#) = IntPtr (I# (addr2Int# a#))
-- | casts an @IntPtr@ to a @Ptr@
intPtrToPtr :: IntPtr -> Ptr a
intPtrToPtr (IntPtr (I# i#)) = Ptr (int2Addr# i#)
|
phischu/fragnix
|
builtins/base/Foreign.Ptr.hs
|
bsd-3-clause
| 4,911
| 0
| 11
| 1,428
| 721
| 420
| 301
| 43
| 1
|
{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2013-15 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability : non-portable
--
-----------------------------------------------------------------------------
module Succinct.Tree.Types
( Binary(..)
, Labelled(..)
, labelledToTree
, drawLabelled
, Rose(..)
) where
import Control.Applicative
import Data.Bifunctor
import Data.Bifoldable
import Data.Bitraversable
import Data.Tree
import qualified Data.Foldable as F
import qualified Data.Traversable as T
data Binary = Bin Binary Binary | Tip deriving (Eq,Show)
data Labelled b a = LabelledBin b (Labelled b a) (Labelled b a) | LabelledTip a deriving (Eq,Show,Functor,F.Foldable,T.Traversable)
labelledToTree :: Labelled b a -> Tree (Either b a)
labelledToTree (LabelledTip x) = Node (Right x) []
labelledToTree (LabelledBin x l r) = Node (Left x) [labelledToTree l, labelledToTree r]
drawLabelled :: (Show a, Show b) => Labelled b a -> String
drawLabelled = drawTree . fmap show . labelledToTree
instance Bifunctor Labelled where
bimap f g (LabelledBin x a b) = LabelledBin (f x) (bimap f g a) (bimap f g b)
bimap _ g (LabelledTip x) = LabelledTip $ g x
instance Bifoldable Labelled where
bifoldMap = bifoldMapDefault
instance Bitraversable Labelled where
bitraverse _ g (LabelledTip x) = LabelledTip <$> g x
bitraverse f g (LabelledBin x a b) = LabelledBin <$> f x <*> bitraverse f g a <*> bitraverse f g b
newtype Rose = Rose [Rose] deriving (Eq,Show)
|
ekmett/succinct
|
src/Succinct/Tree/Types.hs
|
bsd-2-clause
| 1,713
| 0
| 9
| 281
| 520
| 281
| 239
| 30
| 1
|
{-# LANGUAGE DeriveDataTypeable, TypeFamilies, TemplateHaskell, RankNTypes, NamedFieldPuns, RecordWildCards, DoRec, BangPatterns, CPP #-}
module Distribution.Server.Features.HoogleData (
initHoogleDataFeature,
HoogleDataFeature(..),
) where
import Distribution.Server.Framework hiding (path)
import Distribution.Server.Framework.BlobStorage (BlobId)
import qualified Distribution.Server.Framework.BlobStorage as BlobStorage
import Distribution.Server.Features.Core
import Distribution.Server.Features.Documentation
import Distribution.Server.Features.TarIndexCache
import qualified Distribution.Server.Packages.PackageIndex as PackageIndex
import Data.TarIndex as TarIndex
import Distribution.Package
import Distribution.Text
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Codec.Compression.GZip as GZip
import qualified Codec.Compression.Zlib.Internal as Zlib
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.ByteString.Lazy as BS
import Data.Serialize (runGetLazy, runPutLazy)
import Data.SafeCopy (SafeCopy, safeGet, safePut)
import Data.Maybe
import Control.Monad.State
import System.IO
import System.IO.Unsafe (unsafeInterleaveIO)
import System.Directory
import System.FilePath
import Control.Concurrent.MVar
import Control.Concurrent.Async
import Control.Exception
import qualified System.IO.Error as IOError
-- | A feature to serve up a tarball of hoogle files, for the hoogle client.
--
data HoogleDataFeature = HoogleDataFeature {
hoogleDataFeatureInterface :: HackageFeature
}
instance IsHackageFeature HoogleDataFeature where
getFeatureInterface = hoogleDataFeatureInterface
----------------------------------------
-- Feature definition & initialisation
--
initHoogleDataFeature :: ServerEnv
-> IO (CoreFeature
-> DocumentationFeature
-> TarIndexCacheFeature
-> IO HoogleDataFeature)
initHoogleDataFeature env@ServerEnv{ serverCacheDelay,
serverVerbosity = verbosity } = do
-- Ephemeral state
docsUpdatedState <- newMemStateWHNF Set.empty
hoogleBundleUpdateJob <- newAsyncUpdate serverCacheDelay verbosity
"hoogle.tar.gz"
return $ \core docs tarIndexCache -> do
let feature = hoogleDataFeature docsUpdatedState
hoogleBundleUpdateJob
env core docs tarIndexCache
return feature
hoogleDataFeature :: MemState (Set PackageId)
-> AsyncUpdate
-> ServerEnv
-> CoreFeature
-> DocumentationFeature
-> TarIndexCacheFeature
-> HoogleDataFeature
hoogleDataFeature docsUpdatedState hoogleBundleUpdateJob
ServerEnv{serverBlobStore = store, serverStateDir}
CoreFeature{..} DocumentationFeature{..}
TarIndexCacheFeature{..}
= HoogleDataFeature {..}
where
hoogleDataFeatureInterface = (emptyHackageFeature "hoogle-data") {
featureDesc = "Provide a tarball of all package's hoogle files"
, featureResources = [hoogleBundleResource]
, featureState = []
, featureCaches = []
, featurePostInit = postInit
}
-- Resources
--
hoogleBundleResource =
(resourceAt "/packages/hoogle.tar.gz") {
resourceDesc = [ (GET, "get the tarball of hoogle files for all packages")
]
, resourceGet = [ ("tarball", serveHoogleData) ]
}
-- Request handlers
--
featureStateDir = serverStateDir </> "db" </> "HoogleData"
bundleTarGzFile = featureStateDir </> "hoogle.tar.gz"
bundleCacheFile = featureStateDir </> "cache"
serveHoogleData :: DynamicPath -> ServerPartE Response
serveHoogleData _ =
-- serve the cached hoogle.tar.gz file
serveFile (asContentType "application/x-gzip") bundleTarGzFile
postInit :: IO ()
postInit = do
createDirectoryIfMissing False featureStateDir
prodFileCacheUpdate
registerHook documentationChangeHook $ \pkgid -> do
modifyMemState docsUpdatedState (Set.insert pkgid)
prodFileCacheUpdate
prodFileCacheUpdate :: IO ()
prodFileCacheUpdate =
asyncUpdate hoogleBundleUpdateJob updateHoogleBundle
-- Actually do the update. Here we are guaranteed that we're only doing
-- one update at once, no concurrent updates.
updateHoogleBundle :: IO ()
updateHoogleBundle = do
docsUpdated <- readMemState docsUpdatedState
writeMemState docsUpdatedState Set.empty
updated <- maybeWithFile bundleTarGzFile $ \mhOldTar -> do
mcache <- readCacheFile bundleCacheFile
let docEntryCache = maybe Map.empty fst mcache
oldTarPkgids = maybe Set.empty snd mcache
tmpdir = featureStateDir
updateTarBundle mhOldTar tmpdir
docEntryCache oldTarPkgids
docsUpdated
case updated of
Nothing -> return ()
Just (docEntryCache', newTarPkgids, newTarFile) -> do
renameFile newTarFile bundleTarGzFile
writeCacheFile bundleCacheFile (docEntryCache', newTarPkgids)
updateTarBundle :: Maybe Handle -> FilePath
-> Map PackageId (Maybe (BlobId, TarEntryOffset))
-> Set PackageId
-> Set PackageId
-> IO (Maybe (Map PackageId (Maybe (BlobId, TarEntryOffset))
,Set PackageId, FilePath))
updateTarBundle mhOldTar tmpdir docEntryCache oldTarPkgids docsUpdated = do
-- Invalidate cached info about any package docs that have been updated
let docEntryCache' = docEntryCache `Map.difference` fromSet docsUpdated
cachedPkgids = fromSet (oldTarPkgids `Set.difference` docsUpdated)
-- get the package & docs index
pkgindex <- queryGetPackageIndex
docindex <- queryDocumentationIndex
-- Select the package ids that have corresponding docs that contain a
-- hoogle .txt file.
-- We prefer later package versions, but if a later one is missing the
-- hoogle .txt file then we fall back to older ones.
--
-- For the package ids we pick we keep the associated doc tarball blobid
-- and the offset of the hoogle .txt file within that tarball.
--
-- Looking up if a package's docs contains the hoogle .txt file is
-- expensive (have to read the doc tarball's index) so we maintain a
-- cache of that information.
(selectedPkgids, docEntryCache'') <-
-- use a state monad for access to and updating the cache
flip runStateT docEntryCache' $
fmap (Map.fromList . catMaybes) $
sequence
[ findFirstCached (lookupHoogleEntry docindex)
(reverse (map packageId pkgs))
| pkgs <- PackageIndex.allPackagesByName pkgindex ]
-- the set of pkgids to try to reuse from the existing tar file
let reusePkgs :: Map PackageId ()
reusePkgs = cachedPkgids `Map.intersection` selectedPkgids
-- the packages where we need to read it fresh
readFreshPkgs :: Map PackageId (BlobId, TarEntryOffset)
readFreshPkgs = selectedPkgids `Map.difference` reusePkgs
if Map.null readFreshPkgs && Map.keysSet reusePkgs == oldTarPkgids
then return Nothing
else liftM Just $
withTempFile tmpdir "newtar" $ \hNewTar newTarFile ->
withWriter (tarWriter hNewTar) $ \putEntry -> do
-- We truncate on tar format errors. This works for the empty case
-- and should be self-correcting for real errors. It just means we
-- miss a few entries from the tarball 'til next time its updated.
oldEntries <- case mhOldTar of
Nothing -> return []
Just hOldTar ->
return . Tar.foldEntries (:) [] (const [])
. Tar.read
. BS.fromChunks
. Zlib.foldDecompressStream (:) [] (\_ _ -> [])
. Zlib.decompressWithErrors
Zlib.gzipFormat
Zlib.defaultDecompressParams
=<< BS.hGetContents hOldTar
-- Write out the cached ones
sequence_
[ putEntry entry
| entry <- oldEntries
, pkgid <- maybeToList (entryPkgId entry)
, pkgid `Map.member` reusePkgs ]
-- Write out the new/changed ones
sequence_
[ withFile doctarfile ReadMode $ \hDocTar -> do
mentry <- newCacheTarEntry pkgid hDocTar taroffset
maybe (return ()) putEntry mentry
| (pkgid, (doctarblobid, taroffset)) <- Map.toList readFreshPkgs
, let doctarfile = BlobStorage.filepath store doctarblobid ]
return (docEntryCache'', Map.keysSet selectedPkgids, newTarFile)
lookupHoogleEntry :: Map PackageId BlobId -> PackageId -> IO (Maybe (BlobId, TarEntryOffset))
lookupHoogleEntry docindex pkgid
| Just doctarblobid <- Map.lookup pkgid docindex
= do doctarindex <- cachedTarIndex doctarblobid
case lookupPkgDocHoogleFile pkgid doctarindex of
Nothing -> return Nothing
Just offset -> return (Just (doctarblobid, offset))
| otherwise = return Nothing
fromSet :: Ord a => Set a -> Map a ()
fromSet = Map.fromAscList . map (\x -> (x, ())) . Set.toAscList
-- | Like list 'find' but with a monadic lookup function and we cache the
-- results of that lookup function.
--
findFirstCached :: (Ord a, Monad m)
=> (a -> m (Maybe b))
-> [a] -> StateT (Map a (Maybe b)) m (Maybe (a, b))
findFirstCached _ [] = return Nothing
findFirstCached f (x:xs) = do
cache <- get
case Map.lookup x cache of
Just m_y -> checkY m_y
Nothing -> do
m_y <- lift (f x)
put (Map.insert x m_y cache)
checkY m_y
where
checkY Nothing = findFirstCached f xs
checkY (Just y) = return (Just (x, y))
withTempFile :: FilePath -> String -> (Handle -> FilePath -> IO a) -> IO a
withTempFile tmpdir template action =
mask $ \restore -> do
(fname, hnd) <- openTempFile tmpdir template
x <- restore (action hnd fname)
`onException` (hClose hnd >> removeFile fname)
hClose hnd
return x
maybeWithFile :: FilePath -> (Maybe Handle -> IO a) -> IO a
maybeWithFile file action =
mask $ \unmask -> do
mhnd <- try $ openFile file ReadMode
case mhnd of
Right hnd -> unmask (action (Just hnd)) `finally` hClose hnd
Left e | IOError.isDoesNotExistError e
, Just file == IOError.ioeGetFileName e
-> unmask (action Nothing)
Left e -> throw e
readCacheFile :: SafeCopy a => FilePath -> IO (Maybe a)
readCacheFile file =
maybeWithFile file $ \mhnd ->
case mhnd of
Nothing -> return Nothing
Just hnd -> do
content <- BS.hGetContents hnd
case runGetLazy safeGet content of
Left _ -> return Nothing
Right x -> return (Just x)
writeCacheFile :: SafeCopy a => FilePath -> a -> IO ()
writeCacheFile file x =
BS.writeFile file (runPutLazy (safePut x))
lookupPkgDocHoogleFile :: PackageId -> TarIndex -> Maybe TarEntryOffset
lookupPkgDocHoogleFile pkgid index = do
TarFileEntry offset <- TarIndex.lookup index path
return offset
where
path = (display pkgid ++ "-docs") </> display (packageName pkgid) <.> "txt"
newCacheTarEntry :: PackageId -> Handle -> TarEntryOffset -> IO (Maybe Tar.Entry)
newCacheTarEntry pkgid htar offset
| Just entrypath <- hoogleDataTarPath pkgid = do
morigEntry <- readTarEntryAt htar offset
case morigEntry of
Nothing -> return Nothing
Just origEntry ->
return $ Just
(Tar.simpleEntry entrypath (Tar.entryContent origEntry)) {
Tar.entryTime = Tar.entryTime origEntry
}
| otherwise = return Nothing
hoogleDataTarPath :: PackageId -> Maybe Tar.TarPath
hoogleDataTarPath pkgid =
either (const Nothing) Just (Tar.toTarPath False filepath)
where
-- like zlib/0.5.4.1/doc/html/zlib.txt
filepath = joinPath [ display (packageName pkgid)
, display (packageVersion pkgid)
, "doc", "html"
, display (packageName pkgid) <.> "txt" ]
entryPkgId :: Tar.Entry -> Maybe PackageId
entryPkgId = parseEntryPath . Tar.entryPath
parseEntryPath :: FilePath -> Maybe PackageId
parseEntryPath filename
| [namestr, verstr,
"doc", "html",
filestr] <- splitDirectories filename
, Just pkgname <- simpleParse namestr
, Just pkgver <- simpleParse verstr
, (namestr', ".txt") <- splitExtension filestr
, Just pkgname' <- simpleParse namestr'
, pkgname == pkgname'
= Just (PackageIdentifier pkgname pkgver)
| otherwise
= Nothing
readTarEntryAt :: Handle -> TarEntryOffset -> IO (Maybe Tar.Entry)
readTarEntryAt htar off = do
hSeek htar AbsoluteSeek (fromIntegral (off * 512))
header <- BS.hGet htar 512
case Tar.read header of
(Tar.Next entry@Tar.Entry{Tar.entryContent = Tar.NormalFile _ size} _) -> do
content <- BS.hGet htar (fromIntegral size)
return $ Just entry { Tar.entryContent = Tar.NormalFile content size }
_ -> return Nothing
data Writer a = Writer { wWrite :: a -> IO (), wClose :: IO () }
withWriter :: IO (Writer b) -> ((b -> IO ()) -> IO a) -> IO a
withWriter mkwriter action = bracket mkwriter wClose (action . wWrite)
tarWriter :: Handle -> IO (Writer Tar.Entry)
tarWriter hnd = do
chan <- newBChan
awriter <- async $ do
entries <- getBChanContents chan
BS.hPut hnd ((GZip.compress . Tar.write) entries)
return Writer {
wWrite = writeBChan chan,
wClose = do closeBChan chan
wait awriter
}
newtype BChan a = BChan (MVar (Maybe a))
newBChan :: IO (BChan a)
newBChan = liftM BChan newEmptyMVar
writeBChan :: BChan a -> a -> IO ()
writeBChan (BChan c) = putMVar c . Just
closeBChan :: BChan a -> IO ()
closeBChan (BChan c) = putMVar c Nothing
getBChanContents :: BChan a -> IO [a]
getBChanContents (BChan c) = do
res <- takeMVar c
case res of
Nothing -> return []
Just x -> do xs <- unsafeInterleaveIO (getBChanContents (BChan c))
return (x : xs)
|
mpickering/hackage-server
|
Distribution/Server/Features/HoogleData.hs
|
bsd-3-clause
| 14,896
| 0
| 29
| 4,233
| 3,631
| 1,850
| 1,781
| 288
| 5
|
module Language.Mecha.Visual
( visual
) where
import Graphics.UI.GLUT
import Language.Mecha.Assembly
visual :: Asm -> IO ()
visual _ = do
initialize "Mecha" []
initialWindowSize $= Size 800 600
initialDisplayMode $= [RGBAMode, WithDepthBuffer, DoubleBuffered]
actionOnWindowClose $= MainLoopReturns
createWindow "Mecha Visual"
setView 800 600
displayCallback $= redraw
reshapeCallback $= (Just $ \ (Size w h) -> setView (fromIntegral w) (fromIntegral h))
keyboardMouseCallback $= (Just $ \ key keyState mods pos -> do
print key
print keyState
print mods
print pos)
position (Light 0) $= Vertex4 1 1 0 1
ambient (Light 0) $= Color4 0 0 0 1
diffuse (Light 0) $= Color4 1 1 1 1
specular (Light 0) $= Color4 1 1 1 1
lightModelAmbient $= Color4 0.2 0.2 0.2 1
lighting $= Enabled
light (Light 0) $= Enabled
colorMaterial $= Just (FrontAndBack, AmbientAndDiffuse)
materialSpecular FrontAndBack $= Color4 1 1 1 1
materialEmission FrontAndBack $= Color4 0 0 0 1
normalize $= Enabled
clearColor $= Color4 0.4 0.4 0.4 1
clearDepth $= 1
depthFunc $= Just Less
depthMask $= Enabled
cullFace $= Nothing
shadeModel $= Smooth
mainLoop
setView :: Int -> Int -> IO ()
setView w h = do
matrixMode $= Projection
loadIdentity
let r = (fromIntegral w / fromIntegral h)
frustum (-r * 0.1) (r * 0.1) (-0.1) 0.1 0.1 100000
matrixMode $= Modelview 0
viewport $= (Position 0 0, Size (fromIntegral w) (fromIntegral h))
redraw :: IO ()
redraw = do
clear [ColorBuffer, DepthBuffer]
loadIdentity
-- XXX
flush
swapBuffers
|
tomahawkins/mecha
|
attic/Visual.hs
|
bsd-3-clause
| 1,605
| 0
| 13
| 357
| 641
| 297
| 344
| 51
| 1
|
-- (c) The University of Glasgow 2006
--
-- FamInstEnv: Type checked family instance declarations
{-# LANGUAGE CPP, GADTs, ScopedTypeVariables #-}
module FamInstEnv (
FamInst(..), FamFlavor(..), famInstAxiom, famInstTyCon, famInstRHS,
famInstsRepTyCons, famInstRepTyCon_maybe, dataFamInstRepTyCon,
pprFamInst, pprFamInsts,
mkImportedFamInst,
FamInstEnvs, FamInstEnv, emptyFamInstEnv, emptyFamInstEnvs,
extendFamInstEnv, deleteFromFamInstEnv, extendFamInstEnvList,
identicalFamInstHead, famInstEnvElts, familyInstances, orphNamesOfFamInst,
-- * CoAxioms
mkCoAxBranch, mkBranchedCoAxiom, mkUnbranchedCoAxiom, mkSingleCoAxiom,
computeAxiomIncomps,
FamInstMatch(..),
lookupFamInstEnv, lookupFamInstEnvConflicts,
isDominatedBy,
-- Normalisation
topNormaliseType, topNormaliseType_maybe,
normaliseType, normaliseTcApp,
reduceTyFamApp_maybe, chooseBranch,
-- Flattening
flattenTys
) where
#include "HsVersions.h"
import InstEnv
import Unify
import Type
import TcType ( orphNamesOfTypes )
import TypeRep
import TyCon
import Coercion
import CoAxiom
import VarSet
import VarEnv
import Name
import UniqFM
import Outputable
import Maybes
import TrieMap
import Unique
import Util
import Var
import Pair
import SrcLoc
import NameSet
import FastString
{-
************************************************************************
* *
Type checked family instance heads
* *
************************************************************************
Note [FamInsts and CoAxioms]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* CoAxioms and FamInsts are just like
DFunIds and ClsInsts
* A CoAxiom is a System-FC thing: it can relate any two types
* A FamInst is a Haskell source-language thing, corresponding
to a type/data family instance declaration.
- The FamInst contains a CoAxiom, which is the evidence
for the instance
- The LHS of the CoAxiom is always of form F ty1 .. tyn
where F is a type family
-}
data FamInst -- See Note [FamInsts and CoAxioms]
= FamInst { fi_axiom :: CoAxiom Unbranched -- The new coercion axiom introduced
-- by this family instance
, fi_flavor :: FamFlavor
-- Everything below here is a redundant,
-- cached version of the two things above
-- except that the TyVars are freshened
, fi_fam :: Name -- Family name
-- Used for "rough matching"; same idea as for class instances
-- See Note [Rough-match field] in InstEnv
, fi_tcs :: [Maybe Name] -- Top of type args
-- INVARIANT: fi_tcs = roughMatchTcs fi_tys
-- Used for "proper matching"; ditto
, fi_tvs :: [TyVar] -- Template tyvars for full match
-- Like ClsInsts, these variables are always
-- fresh. See Note [Template tyvars are fresh]
-- in InstEnv
, fi_tys :: [Type] -- and its arg types
-- INVARIANT: fi_tvs = coAxiomTyVars fi_axiom
, fi_rhs :: Type -- the RHS, with its freshened vars
}
data FamFlavor
= SynFamilyInst -- A synonym family
| DataFamilyInst TyCon -- A data family, with its representation TyCon
-- Obtain the axiom of a family instance
famInstAxiom :: FamInst -> CoAxiom Unbranched
famInstAxiom = fi_axiom
-- Split the left-hand side of the FamInst
famInstSplitLHS :: FamInst -> (TyCon, [Type])
famInstSplitLHS (FamInst { fi_axiom = axiom, fi_tys = lhs })
= (coAxiomTyCon axiom, lhs)
-- Get the RHS of the FamInst
famInstRHS :: FamInst -> Type
famInstRHS = fi_rhs
-- Get the family TyCon of the FamInst
famInstTyCon :: FamInst -> TyCon
famInstTyCon = coAxiomTyCon . famInstAxiom
-- Return the representation TyCons introduced by data family instances, if any
famInstsRepTyCons :: [FamInst] -> [TyCon]
famInstsRepTyCons fis = [tc | FamInst { fi_flavor = DataFamilyInst tc } <- fis]
-- Extracts the TyCon for this *data* (or newtype) instance
famInstRepTyCon_maybe :: FamInst -> Maybe TyCon
famInstRepTyCon_maybe fi
= case fi_flavor fi of
DataFamilyInst tycon -> Just tycon
SynFamilyInst -> Nothing
dataFamInstRepTyCon :: FamInst -> TyCon
dataFamInstRepTyCon fi
= case fi_flavor fi of
DataFamilyInst tycon -> tycon
SynFamilyInst -> pprPanic "dataFamInstRepTyCon" (ppr fi)
{-
************************************************************************
* *
Pretty printing
* *
************************************************************************
-}
instance NamedThing FamInst where
getName = coAxiomName . fi_axiom
instance Outputable FamInst where
ppr = pprFamInst
-- Prints the FamInst as a family instance declaration
-- NB: FamInstEnv.pprFamInst is used only for internal, debug printing
-- See pprTyThing.pprFamInst for printing for the user
pprFamInst :: FamInst -> SDoc
pprFamInst famInst
= hang (pprFamInstHdr famInst)
2 (vcat [ ifPprDebug (ptext (sLit "Coercion axiom:") <+> ppr ax)
, ifPprDebug (ptext (sLit "RHS:") <+> ppr (famInstRHS famInst)) ])
where
ax = fi_axiom famInst
pprFamInstHdr :: FamInst -> SDoc
pprFamInstHdr fi@(FamInst {fi_flavor = flavor})
= pprTyConSort <+> pp_instance <+> pp_head
where
-- For *associated* types, say "type T Int = blah"
-- For *top level* type instances, say "type instance T Int = blah"
pp_instance
| isTyConAssoc fam_tc = empty
| otherwise = ptext (sLit "instance")
(fam_tc, etad_lhs_tys) = famInstSplitLHS fi
vanilla_pp_head = pprTypeApp fam_tc etad_lhs_tys
pp_head | DataFamilyInst rep_tc <- flavor
, isAlgTyCon rep_tc
, let extra_tvs = dropList etad_lhs_tys (tyConTyVars rep_tc)
, not (null extra_tvs)
= getPprStyle $ \ sty ->
if debugStyle sty
then vanilla_pp_head -- With -dppr-debug just show it as-is
else pprTypeApp fam_tc (etad_lhs_tys ++ mkTyVarTys extra_tvs)
-- Without -dppr-debug, eta-expand
-- See Trac #8674
-- (This is probably over the top now that we use this
-- only for internal debug printing; PprTyThing.pprFamInst
-- is used for user-level printing.)
| otherwise
= vanilla_pp_head
pprTyConSort = case flavor of
SynFamilyInst -> ptext (sLit "type")
DataFamilyInst tycon
| isDataTyCon tycon -> ptext (sLit "data")
| isNewTyCon tycon -> ptext (sLit "newtype")
| isAbstractTyCon tycon -> ptext (sLit "data")
| otherwise -> ptext (sLit "WEIRD") <+> ppr tycon
pprFamInsts :: [FamInst] -> SDoc
pprFamInsts finsts = vcat (map pprFamInst finsts)
{-
Note [Lazy axiom match]
~~~~~~~~~~~~~~~~~~~~~~~
It is Vitally Important that mkImportedFamInst is *lazy* in its axiom
parameter. The axiom is loaded lazily, via a forkM, in TcIface. Sometime
later, mkImportedFamInst is called using that axiom. However, the axiom
may itself depend on entities which are not yet loaded as of the time
of the mkImportedFamInst. Thus, if mkImportedFamInst eagerly looks at the
axiom, a dependency loop spontaneously appears and GHC hangs. The solution
is simply for mkImportedFamInst never, ever to look inside of the axiom
until everything else is good and ready to do so. We can assume that this
readiness has been achieved when some other code pulls on the axiom in the
FamInst. Thus, we pattern match on the axiom lazily (in the where clause,
not in the parameter list) and we assert the consistency of names there
also.
-}
-- Make a family instance representation from the information found in an
-- interface file. In particular, we get the rough match info from the iface
-- (instead of computing it here).
mkImportedFamInst :: Name -- Name of the family
-> [Maybe Name] -- Rough match info
-> CoAxiom Unbranched -- Axiom introduced
-> FamInst -- Resulting family instance
mkImportedFamInst fam mb_tcs axiom
= FamInst {
fi_fam = fam,
fi_tcs = mb_tcs,
fi_tvs = tvs,
fi_tys = tys,
fi_rhs = rhs,
fi_axiom = axiom,
fi_flavor = flavor }
where
-- See Note [Lazy axiom match]
~(CoAxiom { co_ax_branches =
~(FirstBranch ~(CoAxBranch { cab_lhs = tys
, cab_tvs = tvs
, cab_rhs = rhs })) }) = axiom
-- Derive the flavor for an imported FamInst rather disgustingly
-- Maybe we should store it in the IfaceFamInst?
flavor = case splitTyConApp_maybe rhs of
Just (tc, _)
| Just ax' <- tyConFamilyCoercion_maybe tc
, ax' == axiom
-> DataFamilyInst tc
_ -> SynFamilyInst
{-
************************************************************************
* *
FamInstEnv
* *
************************************************************************
Note [FamInstEnv]
~~~~~~~~~~~~~~~~~
A FamInstEnv maps a family name to the list of known instances for that family.
The same FamInstEnv includes both 'data family' and 'type family' instances.
Type families are reduced during type inference, but not data families;
the user explains when to use a data family instance by using contructors
and pattern matching.
Nevertheless it is still useful to have data families in the FamInstEnv:
- For finding overlaps and conflicts
- For finding the representation type...see FamInstEnv.topNormaliseType
and its call site in Simplify
- In standalone deriving instance Eq (T [Int]) we need to find the
representation type for T [Int]
Note [Varying number of patterns for data family axioms]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For data families, the number of patterns may vary between instances.
For example
data family T a b
data instance T Int a = T1 a | T2
data instance T Bool [a] = T3 a
Then we get a data type for each instance, and an axiom:
data TInt a = T1 a | T2
data TBoolList a = T3 a
axiom ax7 :: T Int ~ TInt -- Eta-reduced
axiom ax8 a :: T Bool [a] ~ TBoolList a
These two axioms for T, one with one pattern, one with two. The reason
for this eta-reduction is decribed in TcInstDcls
Note [Eta reduction for data family axioms]
-}
type FamInstEnv = UniqFM FamilyInstEnv -- Maps a family to its instances
-- See Note [FamInstEnv]
type FamInstEnvs = (FamInstEnv, FamInstEnv)
-- External package inst-env, Home-package inst-env
newtype FamilyInstEnv
= FamIE [FamInst] -- The instances for a particular family, in any order
instance Outputable FamilyInstEnv where
ppr (FamIE fs) = ptext (sLit "FamIE") <+> vcat (map ppr fs)
-- INVARIANTS:
-- * The fs_tvs are distinct in each FamInst
-- of a range value of the map (so we can safely unify them)
emptyFamInstEnvs :: (FamInstEnv, FamInstEnv)
emptyFamInstEnvs = (emptyFamInstEnv, emptyFamInstEnv)
emptyFamInstEnv :: FamInstEnv
emptyFamInstEnv = emptyUFM
famInstEnvElts :: FamInstEnv -> [FamInst]
famInstEnvElts fi = [elt | FamIE elts <- eltsUFM fi, elt <- elts]
familyInstances :: (FamInstEnv, FamInstEnv) -> TyCon -> [FamInst]
familyInstances (pkg_fie, home_fie) fam
= get home_fie ++ get pkg_fie
where
get env = case lookupUFM env fam of
Just (FamIE insts) -> insts
Nothing -> []
-- | Collects the names of the concrete types and type constructors that
-- make up the LHS of a type family instance, including the family
-- name itself.
--
-- For instance, given `type family Foo a b`:
-- `type instance Foo (F (G (H a))) b = ...` would yield [Foo,F,G,H]
--
-- Used in the implementation of ":info" in GHCi.
orphNamesOfFamInst :: FamInst -> NameSet
orphNamesOfFamInst fam_inst
= orphNamesOfTypes (concat (brListMap cab_lhs (coAxiomBranches axiom)))
`extendNameSet` getName (coAxiomTyCon axiom)
where
axiom = fi_axiom fam_inst
extendFamInstEnvList :: FamInstEnv -> [FamInst] -> FamInstEnv
extendFamInstEnvList inst_env fis = foldl extendFamInstEnv inst_env fis
extendFamInstEnv :: FamInstEnv -> FamInst -> FamInstEnv
extendFamInstEnv inst_env
ins_item@(FamInst {fi_fam = cls_nm})
= addToUFM_C add inst_env cls_nm (FamIE [ins_item])
where
add (FamIE items) _ = FamIE (ins_item:items)
deleteFromFamInstEnv :: FamInstEnv -> FamInst -> FamInstEnv
-- Used only for overriding in GHCi
deleteFromFamInstEnv inst_env fam_inst@(FamInst {fi_fam = fam_nm})
= adjustUFM adjust inst_env fam_nm
where
adjust :: FamilyInstEnv -> FamilyInstEnv
adjust (FamIE items)
= FamIE (filterOut (identicalFamInstHead fam_inst) items)
identicalFamInstHead :: FamInst -> FamInst -> Bool
-- ^ True when the LHSs are identical
-- Used for overriding in GHCi
identicalFamInstHead (FamInst { fi_axiom = ax1 }) (FamInst { fi_axiom = ax2 })
= coAxiomTyCon ax1 == coAxiomTyCon ax2
&& brListLength brs1 == brListLength brs2
&& and (brListZipWith identical_branch brs1 brs2)
where
brs1 = coAxiomBranches ax1
brs2 = coAxiomBranches ax2
identical_branch br1 br2
= isJust (tcMatchTys tvs1 lhs1 lhs2)
&& isJust (tcMatchTys tvs2 lhs2 lhs1)
where
tvs1 = mkVarSet (coAxBranchTyVars br1)
tvs2 = mkVarSet (coAxBranchTyVars br2)
lhs1 = coAxBranchLHS br1
lhs2 = coAxBranchLHS br2
{-
************************************************************************
* *
Compatibility
* *
************************************************************************
Note [Apartness]
~~~~~~~~~~~~~~~~
In dealing with closed type families, we must be able to check that one type
will never reduce to another. This check is called /apartness/. The check
is always between a target (which may be an arbitrary type) and a pattern.
Here is how we do it:
apart(target, pattern) = not (unify(flatten(target), pattern))
where flatten (implemented in flattenTys, below) converts all type-family
applications into fresh variables. (See Note [Flattening].)
Note [Compatibility]
~~~~~~~~~~~~~~~~~~~~
Two patterns are /compatible/ if either of the following conditions hold:
1) The patterns are apart.
2) The patterns unify with a substitution S, and their right hand sides
equal under that substitution.
For open type families, only compatible instances are allowed. For closed
type families, the story is slightly more complicated. Consider the following:
type family F a where
F Int = Bool
F a = Int
g :: Show a => a -> F a
g x = length (show x)
Should that type-check? No. We need to allow for the possibility that 'a'
might be Int and therefore 'F a' should be Bool. We can simplify 'F a' to Int
only when we can be sure that 'a' is not Int.
To achieve this, after finding a possible match within the equations, we have to
go back to all previous equations and check that, under the
substitution induced by the match, other branches are surely apart. (See
Note [Apartness].) This is similar to what happens with class
instance selection, when we need to guarantee that there is only a match and
no unifiers. The exact algorithm is different here because the the
potentially-overlapping group is closed.
As another example, consider this:
type family G x
type instance where
G Int = Bool
G a = Double
type family H y
-- no instances
Now, we want to simplify (G (H Char)). We can't, because (H Char) might later
simplify to be Int. So, (G (H Char)) is stuck, for now.
While everything above is quite sound, it isn't as expressive as we'd like.
Consider this:
type family J a where
J Int = Int
J a = a
Can we simplify (J b) to b? Sure we can. Yes, the first equation matches if
b is instantiated with Int, but the RHSs coincide there, so it's all OK.
So, the rule is this: when looking up a branch in a closed type family, we
find a branch that matches the target, but then we make sure that the target
is apart from every previous *incompatible* branch. We don't check the
branches that are compatible with the matching branch, because they are either
irrelevant (clause 1 of compatible) or benign (clause 2 of compatible).
-}
-- See Note [Compatibility]
compatibleBranches :: CoAxBranch -> CoAxBranch -> Bool
compatibleBranches (CoAxBranch { cab_lhs = lhs1, cab_rhs = rhs1 })
(CoAxBranch { cab_lhs = lhs2, cab_rhs = rhs2 })
= case tcUnifyTysFG instanceBindFun lhs1 lhs2 of
SurelyApart -> True
Unifiable subst
| Type.substTy subst rhs1 `eqType` Type.substTy subst rhs2
-> True
_ -> False
-- takes a CoAxiom with unknown branch incompatibilities and computes
-- the compatibilities
-- See Note [Storing compatibility] in CoAxiom
computeAxiomIncomps :: CoAxiom br -> CoAxiom br
computeAxiomIncomps ax@(CoAxiom { co_ax_branches = branches })
= ax { co_ax_branches = go [] branches }
where
go :: [CoAxBranch] -> BranchList CoAxBranch br -> BranchList CoAxBranch br
go prev_branches (FirstBranch br)
= FirstBranch (br { cab_incomps = mk_incomps br prev_branches })
go prev_branches (NextBranch br tail)
= let br' = br { cab_incomps = mk_incomps br prev_branches } in
NextBranch br' (go (br' : prev_branches) tail)
mk_incomps :: CoAxBranch -> [CoAxBranch] -> [CoAxBranch]
mk_incomps br = filter (not . compatibleBranches br)
{-
************************************************************************
* *
Constructing axioms
These functions are here because tidyType / tcUnifyTysFG
are not available in CoAxiom
* *
************************************************************************
Note [Tidy axioms when we build them]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We print out axioms and don't want to print stuff like
F k k a b = ...
Instead we must tidy those kind variables. See Trac #7524.
-}
-- all axiom roles are Nominal, as this is only used with type families
mkCoAxBranch :: [TyVar] -- original, possibly stale, tyvars
-> [Type] -- LHS patterns
-> Type -- RHS
-> SrcSpan
-> CoAxBranch
mkCoAxBranch tvs lhs rhs loc
= CoAxBranch { cab_tvs = tvs1
, cab_lhs = tidyTypes env lhs
, cab_roles = map (const Nominal) tvs1
, cab_rhs = tidyType env rhs
, cab_loc = loc
, cab_incomps = placeHolderIncomps }
where
(env, tvs1) = tidyTyVarBndrs emptyTidyEnv tvs
-- See Note [Tidy axioms when we build them]
-- all of the following code is here to avoid mutual dependencies with
-- Coercion
mkBranchedCoAxiom :: Name -> TyCon -> [CoAxBranch] -> CoAxiom Branched
mkBranchedCoAxiom ax_name fam_tc branches
= computeAxiomIncomps $
CoAxiom { co_ax_unique = nameUnique ax_name
, co_ax_name = ax_name
, co_ax_tc = fam_tc
, co_ax_role = Nominal
, co_ax_implicit = False
, co_ax_branches = toBranchList branches }
mkUnbranchedCoAxiom :: Name -> TyCon -> CoAxBranch -> CoAxiom Unbranched
mkUnbranchedCoAxiom ax_name fam_tc branch
= CoAxiom { co_ax_unique = nameUnique ax_name
, co_ax_name = ax_name
, co_ax_tc = fam_tc
, co_ax_role = Nominal
, co_ax_implicit = False
, co_ax_branches = FirstBranch (branch { cab_incomps = [] }) }
mkSingleCoAxiom :: Role -> Name
-> [TyVar] -> TyCon -> [Type] -> Type
-> CoAxiom Unbranched
-- Make a single-branch CoAxiom, incluidng making the branch itself
-- Used for both type family (Nominal) and data family (Representational)
-- axioms, hence passing in the Role
mkSingleCoAxiom role ax_name tvs fam_tc lhs_tys rhs_ty
= CoAxiom { co_ax_unique = nameUnique ax_name
, co_ax_name = ax_name
, co_ax_tc = fam_tc
, co_ax_role = role
, co_ax_implicit = False
, co_ax_branches = FirstBranch (branch { cab_incomps = [] }) }
where
branch = mkCoAxBranch tvs lhs_tys rhs_ty (getSrcSpan ax_name)
{-
************************************************************************
* *
Looking up a family instance
* *
************************************************************************
@lookupFamInstEnv@ looks up in a @FamInstEnv@, using a one-way match.
Multiple matches are only possible in case of type families (not data
families), and then, it doesn't matter which match we choose (as the
instances are guaranteed confluent).
We return the matching family instances and the type instance at which it
matches. For example, if we lookup 'T [Int]' and have a family instance
data instance T [a] = ..
desugared to
data :R42T a = ..
coe :Co:R42T a :: T [a] ~ :R42T a
we return the matching instance '(FamInst{.., fi_tycon = :R42T}, Int)'.
-}
-- when matching a type family application, we get a FamInst,
-- and the list of types the axiom should be applied to
data FamInstMatch = FamInstMatch { fim_instance :: FamInst
, fim_tys :: [Type]
}
-- See Note [Over-saturated matches]
instance Outputable FamInstMatch where
ppr (FamInstMatch { fim_instance = inst
, fim_tys = tys })
= ptext (sLit "match with") <+> parens (ppr inst) <+> ppr tys
lookupFamInstEnv
:: FamInstEnvs
-> TyCon -> [Type] -- What we are looking for
-> [FamInstMatch] -- Successful matches
-- Precondition: the tycon is saturated (or over-saturated)
lookupFamInstEnv
= lookup_fam_inst_env match
where
match _ tpl_tvs tpl_tys tys = tcMatchTys tpl_tvs tpl_tys tys
lookupFamInstEnvConflicts
:: FamInstEnvs
-> FamInst -- Putative new instance
-> [FamInstMatch] -- Conflicting matches (don't look at the fim_tys field)
-- E.g. when we are about to add
-- f : type instance F [a] = a->a
-- we do (lookupFamInstConflicts f [b])
-- to find conflicting matches
--
-- Precondition: the tycon is saturated (or over-saturated)
lookupFamInstEnvConflicts envs fam_inst@(FamInst { fi_axiom = new_axiom })
= lookup_fam_inst_env my_unify envs fam tys
where
(fam, tys) = famInstSplitLHS fam_inst
-- In example above, fam tys' = F [b]
my_unify (FamInst { fi_axiom = old_axiom }) tpl_tvs tpl_tys _
= ASSERT2( tyVarsOfTypes tys `disjointVarSet` tpl_tvs,
(ppr fam <+> ppr tys) $$
(ppr tpl_tvs <+> ppr tpl_tys) )
-- Unification will break badly if the variables overlap
-- They shouldn't because we allocate separate uniques for them
if compatibleBranches (coAxiomSingleBranch old_axiom) new_branch
then Nothing
else Just noSubst
-- Note [Family instance overlap conflicts]
noSubst = panic "lookupFamInstEnvConflicts noSubst"
new_branch = coAxiomSingleBranch new_axiom
{-
Note [Family instance overlap conflicts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- In the case of data family instances, any overlap is fundamentally a
conflict (as these instances imply injective type mappings).
- In the case of type family instances, overlap is admitted as long as
the right-hand sides of the overlapping rules coincide under the
overlap substitution. eg
type instance F a Int = a
type instance F Int b = b
These two overlap on (F Int Int) but then both RHSs are Int,
so all is well. We require that they are syntactically equal;
anything else would be difficult to test for at this stage.
-}
------------------------------------------------------------
-- Might be a one-way match or a unifier
type MatchFun = FamInst -- The FamInst template
-> TyVarSet -> [Type] -- fi_tvs, fi_tys of that FamInst
-> [Type] -- Target to match against
-> Maybe TvSubst
lookup_fam_inst_env' -- The worker, local to this module
:: MatchFun
-> FamInstEnv
-> TyCon -> [Type] -- What we are looking for
-> [FamInstMatch]
lookup_fam_inst_env' match_fun ie fam match_tys
| isOpenFamilyTyCon fam
, Just (FamIE insts) <- lookupUFM ie fam
= find insts -- The common case
| otherwise = []
where
find [] = []
find (item@(FamInst { fi_tcs = mb_tcs, fi_tvs = tpl_tvs,
fi_tys = tpl_tys }) : rest)
-- Fast check for no match, uses the "rough match" fields
| instanceCantMatch rough_tcs mb_tcs
= find rest
-- Proper check
| Just subst <- match_fun item (mkVarSet tpl_tvs) tpl_tys match_tys1
= (FamInstMatch { fim_instance = item
, fim_tys = substTyVars subst tpl_tvs `chkAppend` match_tys2 })
: find rest
-- No match => try next
| otherwise
= find rest
where
(rough_tcs, match_tys1, match_tys2) = split_tys tpl_tys
-- Precondition: the tycon is saturated (or over-saturated)
-- Deal with over-saturation
-- See Note [Over-saturated matches]
split_tys tpl_tys
| isTypeFamilyTyCon fam
= pre_rough_split_tys
| otherwise
= let (match_tys1, match_tys2) = splitAtList tpl_tys match_tys
rough_tcs = roughMatchTcs match_tys1
in (rough_tcs, match_tys1, match_tys2)
(pre_match_tys1, pre_match_tys2) = splitAt (tyConArity fam) match_tys
pre_rough_split_tys
= (roughMatchTcs pre_match_tys1, pre_match_tys1, pre_match_tys2)
lookup_fam_inst_env -- The worker, local to this module
:: MatchFun
-> FamInstEnvs
-> TyCon -> [Type] -- What we are looking for
-> [FamInstMatch] -- Successful matches
-- Precondition: the tycon is saturated (or over-saturated)
lookup_fam_inst_env match_fun (pkg_ie, home_ie) fam tys
= lookup_fam_inst_env' match_fun home_ie fam tys
++ lookup_fam_inst_env' match_fun pkg_ie fam tys
{-
Note [Over-saturated matches]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's ok to look up an over-saturated type constructor. E.g.
type family F a :: * -> *
type instance F (a,b) = Either (a->b)
The type instance gives rise to a newtype TyCon (at a higher kind
which you can't do in Haskell!):
newtype FPair a b = FP (Either (a->b))
Then looking up (F (Int,Bool) Char) will return a FamInstMatch
(FPair, [Int,Bool,Char])
The "extra" type argument [Char] just stays on the end.
We handle data families and type families separately here:
* For type families, all instances of a type family must have the
same arity, so we can precompute the split between the match_tys
and the overflow tys. This is done in pre_rough_split_tys.
* For data family instances, though, we need to re-split for each
instance, because the breakdown might be different for each
instance. Why? Because of eta reduction; see Note [Eta reduction
for data family axioms] in TcInstDcls.
-}
-- checks if one LHS is dominated by a list of other branches
-- in other words, if an application would match the first LHS, it is guaranteed
-- to match at least one of the others. The RHSs are ignored.
-- This algorithm is conservative:
-- True -> the LHS is definitely covered by the others
-- False -> no information
-- It is currently (Oct 2012) used only for generating errors for
-- inaccessible branches. If these errors go unreported, no harm done.
-- This is defined here to avoid a dependency from CoAxiom to Unify
isDominatedBy :: CoAxBranch -> [CoAxBranch] -> Bool
isDominatedBy branch branches
= or $ map match branches
where
lhs = coAxBranchLHS branch
match (CoAxBranch { cab_tvs = tvs, cab_lhs = tys })
= isJust $ tcMatchTys (mkVarSet tvs) tys lhs
{-
************************************************************************
* *
Choosing an axiom application
* *
************************************************************************
The lookupFamInstEnv function does a nice job for *open* type families,
but we also need to handle closed ones when normalising a type:
-}
reduceTyFamApp_maybe :: FamInstEnvs
-> Role -- Desired role of result coercion
-> TyCon -> [Type]
-> Maybe (Coercion, Type)
-- Attempt to do a *one-step* reduction of a type-family application
-- but *not* newtypes
-- Works on type-synonym families always; data-families only if
-- the role we seek is representational
-- It does *not* normlise the type arguments first, so this may not
-- go as far as you want. If you want normalised type arguments,
-- use normaliseTcArgs first.
--
-- The TyCon can be oversaturated.
-- Works on both open and closed families
reduceTyFamApp_maybe envs role tc tys
| Phantom <- role
= Nothing
| case role of
Representational -> isOpenFamilyTyCon tc
_ -> isOpenTypeFamilyTyCon tc
-- If we seek a representational coercion
-- (e.g. the call in topNormaliseType_maybe) then we can
-- unwrap data families as well as type-synonym families;
-- otherwise only type-synonym families
, FamInstMatch { fim_instance = fam_inst
, fim_tys = inst_tys } : _ <- lookupFamInstEnv envs tc tys
-- NB: Allow multiple matches because of compatible overlap
= let ax = famInstAxiom fam_inst
co = mkUnbranchedAxInstCo role ax inst_tys
ty = pSnd (coercionKind co)
in Just (co, ty)
| Just ax <- isClosedSynFamilyTyConWithAxiom_maybe tc
, Just (ind, inst_tys) <- chooseBranch ax tys
= let co = mkAxInstCo role ax ind inst_tys
ty = pSnd (coercionKind co)
in Just (co, ty)
| Just ax <- isBuiltInSynFamTyCon_maybe tc
, Just (coax,ts,ty) <- sfMatchFam ax tys
= let co = mkAxiomRuleCo coax ts []
in Just (co, ty)
| otherwise
= Nothing
-- The axiom can be oversaturated. (Closed families only.)
chooseBranch :: CoAxiom Branched -> [Type] -> Maybe (BranchIndex, [Type])
chooseBranch axiom tys
= do { let num_pats = coAxiomNumPats axiom
(target_tys, extra_tys) = splitAt num_pats tys
branches = coAxiomBranches axiom
; (ind, inst_tys) <- findBranch (fromBranchList branches) 0 target_tys
; return (ind, inst_tys ++ extra_tys) }
-- The axiom must *not* be oversaturated
findBranch :: [CoAxBranch] -- branches to check
-> BranchIndex -- index of current branch
-> [Type] -- target types
-> Maybe (BranchIndex, [Type])
findBranch (CoAxBranch { cab_tvs = tpl_tvs, cab_lhs = tpl_lhs, cab_incomps = incomps }
: rest) ind target_tys
= case tcMatchTys (mkVarSet tpl_tvs) tpl_lhs target_tys of
Just subst -- matching worked. now, check for apartness.
| all (isSurelyApart
. tcUnifyTysFG instanceBindFun flattened_target
. coAxBranchLHS) incomps
-> -- matching worked & we're apart from all incompatible branches. success
Just (ind, substTyVars subst tpl_tvs)
-- failure. keep looking
_ -> findBranch rest (ind+1) target_tys
where isSurelyApart SurelyApart = True
isSurelyApart _ = False
-- See Note [Flattening] below
flattened_target = flattenTys in_scope target_tys
in_scope = mkInScopeSet (unionVarSets $
map (tyVarsOfTypes . coAxBranchLHS) incomps)
-- fail if no branches left
findBranch [] _ _ = Nothing
{-
************************************************************************
* *
Looking up a family instance
* *
************************************************************************
-}
topNormaliseType :: FamInstEnvs -> Type -> Type
topNormaliseType env ty = case topNormaliseType_maybe env ty of
Just (_co, ty') -> ty'
Nothing -> ty
topNormaliseType_maybe :: FamInstEnvs -> Type -> Maybe (Coercion, Type)
-- ^ Get rid of *outermost* (or toplevel)
-- * type function redex
-- * data family redex
-- * newtypes
-- returning an appropriate Representational coercion. Specifically, if
-- topNormaliseType_maybe env ty = Maybe (co, ty')
-- then
-- (a) co :: ty ~R ty'
-- (b) ty' is not a newtype, and is not a type-family or data-family redex
--
-- However, ty' can be something like (Maybe (F ty)), where
-- (F ty) is a redex.
--
-- Its a bit like Type.repType, but handles type families too
topNormaliseType_maybe env ty
= topNormaliseTypeX_maybe stepper ty
where
stepper = unwrapNewTypeStepper `composeSteppers` tyFamStepper
tyFamStepper rec_nts tc tys -- Try to step a type/data familiy
= let (args_co, ntys) = normaliseTcArgs env Representational tc tys in
case reduceTyFamApp_maybe env Representational tc ntys of
Just (co, rhs) -> NS_Step rec_nts rhs (args_co `mkTransCo` co)
Nothing -> NS_Done
---------------
normaliseTcApp :: FamInstEnvs -> Role -> TyCon -> [Type] -> (Coercion, Type)
-- See comments on normaliseType for the arguments of this function
normaliseTcApp env role tc tys
| isTypeSynonymTyCon tc
, Just (tenv, rhs, ntys') <- expandSynTyCon_maybe tc ntys
, (co2, ninst_rhs) <- normaliseType env role (Type.substTy (mkTopTvSubst tenv) rhs)
= if isReflCo co2 then (args_co, mkTyConApp tc ntys)
else (args_co `mkTransCo` co2, mkAppTys ninst_rhs ntys')
| Just (first_co, ty') <- reduceTyFamApp_maybe env role tc ntys
, (rest_co,nty) <- normaliseType env role ty'
= (args_co `mkTransCo` first_co `mkTransCo` rest_co, nty)
| otherwise -- No unique matching family instance exists;
-- we do not do anything
= (args_co, mkTyConApp tc ntys)
where
(args_co, ntys) = normaliseTcArgs env role tc tys
---------------
normaliseTcArgs :: FamInstEnvs -- environment with family instances
-> Role -- desired role of output coercion
-> TyCon -> [Type] -- tc tys
-> (Coercion, [Type]) -- (co, new_tys), where
-- co :: tc tys ~ tc new_tys
normaliseTcArgs env role tc tys
= (mkTyConAppCo role tc cois, ntys)
where
(cois, ntys) = zipWithAndUnzip (normaliseType env) (tyConRolesX role tc) tys
---------------
normaliseType :: FamInstEnvs -- environment with family instances
-> Role -- desired role of output coercion
-> Type -- old type
-> (Coercion, Type) -- (coercion,new type), where
-- co :: old-type ~ new_type
-- Normalise the input type, by eliminating *all* type-function redexes
-- but *not* newtypes (which are visible to the programmer)
-- Returns with Refl if nothing happens
-- Try to not to disturb type synonyms if possible
normaliseType env role (TyConApp tc tys)
= normaliseTcApp env role tc tys
normaliseType _env role ty@(LitTy {}) = (mkReflCo role ty, ty)
normaliseType env role (AppTy ty1 ty2)
= let (coi1,nty1) = normaliseType env role ty1
(coi2,nty2) = normaliseType env Nominal ty2
in (mkAppCo coi1 coi2, mkAppTy nty1 nty2)
normaliseType env role (FunTy ty1 ty2)
= let (coi1,nty1) = normaliseType env role ty1
(coi2,nty2) = normaliseType env role ty2
in (mkFunCo role coi1 coi2, mkFunTy nty1 nty2)
normaliseType env role (ForAllTy tyvar ty1)
= let (coi,nty1) = normaliseType env role ty1
in (mkForAllCo tyvar coi, ForAllTy tyvar nty1)
normaliseType _ role ty@(TyVarTy _)
= (mkReflCo role ty,ty)
{-
************************************************************************
* *
Flattening
* *
************************************************************************
Note [Flattening]
~~~~~~~~~~~~~~~~~
As described in "Closed type families with overlapping equations"
http://research.microsoft.com/en-us/um/people/simonpj/papers/ext-f/axioms-extended.pdf
we need to flatten core types before unifying them, when checking for "surely-apart"
against earlier equations of a closed type family.
Flattening means replacing all top-level uses of type functions with
fresh variables, *taking care to preserve sharing*. That is, the type
(Either (F a b) (F a b)) should flatten to (Either c c), never (Either
c d).
Here is a nice example of why it's all necessary:
type family F a b where
F Int Bool = Char
F a b = Double
type family G a -- open, no instances
How do we reduce (F (G Float) (G Float))? The first equation clearly doesn't match,
while the second equation does. But, before reducing, we must make sure that the
target can never become (F Int Bool). Well, no matter what G Float becomes, it
certainly won't become *both* Int and Bool, so indeed we're safe reducing
(F (G Float) (G Float)) to Double.
This is necessary not only to get more reductions (which we might be
willing to give up on), but for substitutivity. If we have (F x x), we
can see that (F x x) can reduce to Double. So, it had better be the
case that (F blah blah) can reduce to Double, no matter what (blah)
is! Flattening as done below ensures this.
flattenTys is defined here because of module dependencies.
-}
type FlattenMap = TypeMap TyVar
-- See Note [Flattening]
flattenTys :: InScopeSet -> [Type] -> [Type]
flattenTys in_scope tys = snd $ coreFlattenTys all_in_scope emptyTypeMap tys
where
-- when we hit a type function, we replace it with a fresh variable
-- but, we need to make sure that this fresh variable isn't mentioned
-- *anywhere* in the types we're flattening, even if locally-bound in
-- a forall. That way, we can ensure consistency both within and outside
-- of that forall.
all_in_scope = in_scope `extendInScopeSetSet` allTyVarsInTys tys
coreFlattenTys :: InScopeSet -> FlattenMap -> [Type] -> (FlattenMap, [Type])
coreFlattenTys in_scope = go []
where
go rtys m [] = (m, reverse rtys)
go rtys m (ty : tys)
= let (m', ty') = coreFlattenTy in_scope m ty in
go (ty' : rtys) m' tys
coreFlattenTy :: InScopeSet -> FlattenMap -> Type -> (FlattenMap, Type)
coreFlattenTy in_scope = go
where
go m ty | Just ty' <- coreView ty = go m ty'
go m ty@(TyVarTy {}) = (m, ty)
go m (AppTy ty1 ty2) = let (m1, ty1') = go m ty1
(m2, ty2') = go m1 ty2 in
(m2, AppTy ty1' ty2')
go m (TyConApp tc tys)
-- NB: Don't just check if isFamilyTyCon: this catches *data* families,
-- which are generative and thus can be preserved during flattening
| not (isGenerativeTyCon tc Nominal)
= let (m', tv) = coreFlattenTyFamApp in_scope m tc tys in
(m', mkTyVarTy tv)
| otherwise
= let (m', tys') = coreFlattenTys in_scope m tys in
(m', mkTyConApp tc tys')
go m (FunTy ty1 ty2) = let (m1, ty1') = go m ty1
(m2, ty2') = go m1 ty2 in
(m2, FunTy ty1' ty2')
-- Note to RAE: this will have to be changed with kind families
go m (ForAllTy tv ty) = let (m', ty') = go m ty in
(m', ForAllTy tv ty')
go m ty@(LitTy {}) = (m, ty)
coreFlattenTyFamApp :: InScopeSet -> FlattenMap
-> TyCon -- type family tycon
-> [Type] -- args
-> (FlattenMap, TyVar)
coreFlattenTyFamApp in_scope m fam_tc fam_args
= case lookupTypeMap m fam_ty of
Just tv -> (m, tv)
-- we need fresh variables here, but this is called far from
-- any good source of uniques. So, we just use the fam_tc's unique
-- and trust uniqAway to avoid clashes. Recall that the in_scope set
-- contains *all* tyvars, even locally bound ones elsewhere in the
-- overall type, so this really is fresh.
Nothing -> let tyvar_name = mkSysTvName (getUnique fam_tc) (fsLit "fl")
tv = uniqAway in_scope $ mkTyVar tyvar_name (typeKind fam_ty)
m' = extendTypeMap m fam_ty tv in
(m', tv)
where fam_ty = TyConApp fam_tc fam_args
allTyVarsInTys :: [Type] -> VarSet
allTyVarsInTys [] = emptyVarSet
allTyVarsInTys (ty:tys) = allTyVarsInTy ty `unionVarSet` allTyVarsInTys tys
allTyVarsInTy :: Type -> VarSet
allTyVarsInTy = go
where
go (TyVarTy tv) = unitVarSet tv
go (AppTy ty1 ty2) = (go ty1) `unionVarSet` (go ty2)
go (TyConApp _ tys) = allTyVarsInTys tys
go (FunTy ty1 ty2) = (go ty1) `unionVarSet` (go ty2)
go (ForAllTy tv ty) = (go (tyVarKind tv)) `unionVarSet`
unitVarSet tv `unionVarSet`
(go ty) -- don't remove tv
go (LitTy {}) = emptyVarSet
|
TomMD/ghc
|
compiler/types/FamInstEnv.hs
|
bsd-3-clause
| 42,766
| 0
| 19
| 12,019
| 6,348
| 3,426
| 2,922
| -1
| -1
|
{-# LANGUAGE Rank2Types, ScopedTypeVariables #-}
module STMatrix
where
import Data.Elem.BLAS
import Data.Vector.Dense
import Data.Matrix.Dense
import Data.Matrix.Dense.ST
import qualified Test.Matrix.Dense as Test
import Driver
-------------------------- Creating Matrices --------------------------------
newMatrix_S = matrix
prop_NewMatrix (Assocs2 mn ijes) =
newMatrix mn ijes `equivalent` newMatrix_S mn ijes
newListMatrix_S = listMatrix
prop_NewListMatrix (Nat2 mn) es =
newListMatrix mn es `equivalent` newListMatrix_S mn es
---------------------- Reading and Writing Elements --------------------------
getSize_S a = ( size a, a )
prop_GetSize = getSize `implements` getSize_S
readElem_S a ij = ( a!ij, a )
prop_ReadElem (Index2 mn ij) =
implementsFor mn (`readElem` ij) (`readElem_S` ij)
canModifyElem_S a ij = ( True, a )
prop_CanModifyElem ij = (`canModifyElem` ij) `implements` (`canModifyElem_S` ij)
writeElem_S a ij e = ( (), a // [(ij,e)] )
prop_WriteElem (Index2 mn ij) e =
implementsFor mn (\a -> writeElem a ij e) (\a -> writeElem_S a ij e)
modifyElem_S a ij f = writeElem_S a ij $ f (a!ij)
prop_ModifyElem (Index2 mn ij) f =
implementsFor mn (\a -> modifyElem a ij f) (\a -> modifyElem_S a ij f)
getIndices_S a = ( indices a, a )
prop_GetIndicesLazy = getIndices `implements` getIndices_S
prop_GetIndicesStrict = getIndices' `implements` getIndices_S
getElems_S a = ( elems a, a )
prop_GetElemsLazy = getElems `implements` getElems_S
prop_GetElemsStrict = getElems' `implements` getElems_S
getElemsLazyModifyWith_S f a = ( elems a', a' ) where a' = tmap f a
prop_GetElemsLazyModifyWith f =
(\a -> do { es <- getElems a ; modifyWith f a ; return es })
`implements `
(getElemsLazyModifyWith_S f)
getElemsStrictModifyWith_S f a = ( elems a, a' ) where a' = tmap f a
prop_GetElemsStrictModifyWith f =
(\a -> do { es <- getElems' a ; modifyWith f a ; return es })
`implements `
(getElemsStrictModifyWith_S f)
getAssocsLazyModifyWith_S f a = ( assocs a', a' ) where a' = tmap f a
prop_GetAssocsLazyModifyWith f =
(\a -> do { ijes <- getAssocs a ; modifyWith f a ; return ijes })
`implements`
getAssocsLazyModifyWith_S f
getAssocsStrictModifyWith_S f a = ( assocs a, a' ) where a' = tmap f a
prop_GetAssocsStrictModifyWith f =
(\a -> do { ijes <- getAssocs' a ; modifyWith f a ; return ijes })
`implements`
getAssocsStrictModifyWith_S f
----------------------------- Special Matrices --------------------------------
newZeroMatrix_S = zeroMatrix
prop_NewZeroMatrix (Nat2 mn) =
newZeroMatrix mn `equivalent` newZeroMatrix_S mn
setZeroMatrix_S a = ( (), newZeroMatrix_S (shape a) )
prop_SetZeroMatrix = setZeroMatrix `implements` setZeroMatrix_S
newConstantMatrix_S mn e = constantMatrix mn e
prop_NewConstantMatrix (Nat2 mn) e =
newConstantMatrix mn e `equivalent` newConstantMatrix_S mn e
setConstantMatrix_S e a = ( (), newConstantMatrix_S (shape a) e )
prop_SetConstantMatrix e = setConstantMatrix e `implements` setConstantMatrix_S e
newIdentityMatrix_S = identityMatrix
prop_NewIdentityMatrix (Nat2 mn) =
newIdentityMatrix mn `equivalent` newIdentityMatrix_S mn
setIdentityMatrix_S a = ( (), newIdentityMatrix_S (shape a) )
prop_SetIdentityMatrix =
setIdentityMatrix `implements` setIdentityMatrix_S
---------------------------- Copying Matrices --------------------------------
newCopyMatrix_S a = ( a, a )
prop_NewCopyMatrix =
(\a -> newCopyMatrix a >>= abstract) `implements` newCopyMatrix_S
copyMatrix_S a b = ( (), b, b )
prop_CopyMatrix = copyMatrix `implements2` copyMatrix_S
swapMatrix_S a b = ( (), b, a )
prop_SwapMatrix = swapMatrix `implements2` swapMatrix_S
-------------------------- Unsary Matrix Operations --------------------------
doConj_S x = ( (), tmap conjugate x )
prop_DoConj = doConj `implements` doConj_S
scaleBy_S k x = ( (), tmap (k*) x )
prop_ScaleBy k = scaleBy k `implements` scaleBy_S k
shiftBy_S k x = ( (), tmap (k+) x )
prop_ShiftBy k = shiftBy k `implements` shiftBy_S k
modifyWith_S f x = ( (), tmap f x )
prop_ModifyWith f = modifyWith f `implements` modifyWith_S f
rank1UpdateMatrix_S a alpha x y =
let a' = a + matrixFromCol x <**> matrixFromRow (alpha *> (conj y))
in ( (), a')
prop_Rank1UpdateMatrix alpha x y =
implementsFor (dim x, dim y)
(\a -> rank1UpdateMatrix a alpha x y)
(\a -> rank1UpdateMatrix_S a alpha x y)
getConjMatrix_S x = ( tmap conjugate x, x )
prop_GetConjMatrix =
(\x -> getConjMatrix x >>= abstract) `implements` getConjMatrix_S
getScaledMatrix_S k x = ( tmap (k*) x, x )
prop_GetScaledMatrix k =
(\x -> getScaledMatrix k x >>= abstract) `implements` (getScaledMatrix_S k)
getShiftedMatrix_S k x = ( tmap (k+) x, x )
prop_GetShiftedMatrix k =
(\x -> getShiftedMatrix k x >>= abstract) `implements` (getShiftedMatrix_S k)
------------------------- Binary Matrix Operations ---------------------------
addMatrix_S x y = ( (), x + y, y )
prop_AddMatrix = addMatrix `implements2` addMatrix_S
subMatrix_S x y = ( (), x - y, y )
prop_SubMatrix = subMatrix `implements2` subMatrix_S
axpyMatrix_S alpha x y = ( (), x, alpha *> x + y )
prop_AxpyMatrix alpha = axpyMatrix alpha `implements2` axpyMatrix_S alpha
mulMatrix_S x y = ( (), x * y, y )
prop_MulMatrix = mulMatrix `implements2` mulMatrix_S
divMatrix_S x y = ( (), x / y, y )
prop_DivMatrix = divMatrix `implements2` divMatrix_S
getAddMatrix_S x y = ( x + y, x, y )
prop_GetAddMatrix =
(\x y -> getAddMatrix x y >>= abstract) `implements2` getAddMatrix_S
getSubMatrix_S x y = ( x - y, x, y )
prop_GetSubMatrix =
(\x y -> getSubMatrix x y >>= abstract) `implements2` getSubMatrix_S
getMulMatrix_S x y = ( x * y, x, y )
prop_GetMulMatrix =
(\x y -> getMulMatrix x y >>= abstract) `implements2` getMulMatrix_S
getDivMatrix_S x y = ( x / y, x, y )
prop_GetDivMatrix =
(\x y -> getDivMatrix x y >>= abstract) `implements2` getDivMatrix_S
------------------------------------------------------------------------
--
-- The specification language
--
abstract :: (BLAS3 e) => STMatrix s e -> ST s (Matrix e)
abstract = freezeMatrix
commutes :: (AEq a, Show a, AEq e, BLAS3 e) =>
STMatrix s e -> (STMatrix s e -> ST s a) ->
(Matrix e -> (a,Matrix e)) -> ST s Bool
commutes x a f = do
old <- abstract x
r <- a x
new <- abstract x
let s = f old
s' = (r,new)
passed = s ~== s'
when (not passed) $
trace (printf ("expected `%s' but got `%s'") (show s) (show s'))
return ()
return passed
commutes2 :: (AEq a, Show a, AEq e, BLAS3 e) =>
STMatrix s e -> STMatrix s e ->
(STMatrix s e -> STMatrix s e -> ST s a) ->
(Matrix e -> Matrix e -> (a,Matrix e,Matrix e)) -> ST s Bool
commutes2 x y a f = do
oldX <- abstract x
oldY <- abstract y
r <- a x y
newX <- abstract x
newY <- abstract y
let s = f oldX oldY
s' = (r,newX,newY)
passed = s ~== s'
when (not passed) $
trace (printf ("expected `%s' but got `%s'") (show s) (show s'))
return ()
return passed
equivalent :: (forall s . ST s (STMatrix s E)) -> Matrix E -> Bool
equivalent x s = runST $ do
x' <- (x >>= abstract)
when (not $ x' === s) $
trace (printf ("expected `%s' but got `%s'") (show s) (show x'))
return ()
return (x' === s)
implements :: (AEq a, Show a) =>
(forall s . STMatrix s E -> ST s a) ->
(Matrix E -> (a,Matrix E)) ->
Property
a `implements` f =
forAll arbitrary $ \(Nat2 mn) ->
implementsFor mn a f
implements2 :: (AEq a, Show a) =>
(forall s . STMatrix s E -> STMatrix s E -> ST s a) ->
(Matrix E -> Matrix E -> (a,Matrix E,Matrix E)) ->
Property
a `implements2` f =
forAll arbitrary $ \(Nat2 mn) ->
implementsFor2 mn a f
implementsFor :: (AEq a, Show a) =>
(Int,Int) ->
(forall s . STMatrix s E -> ST s a) ->
(Matrix E -> (a,Matrix E)) ->
Property
implementsFor mn a f =
forAll (Test.matrix mn) $ \x ->
runST $ do
x' <- unsafeThawMatrix x
commutes x' a f
implementsFor2 :: (AEq a, Show a) =>
(Int,Int) ->
(forall s . STMatrix s E -> STMatrix s E -> ST s a) ->
(Matrix E -> Matrix E -> (a,Matrix E,Matrix E)) ->
Property
implementsFor2 mn a f =
forAll (Test.matrix mn) $ \x ->
forAll (Test.matrix mn) $ \y ->
runST $ do
x' <- unsafeThawMatrix x
y' <- unsafeThawMatrix y
commutes2 x' y' a f
implementsIf :: (AEq a, Show a) =>
(forall s . STMatrix s E -> ST s Bool) ->
(forall s . STMatrix s E -> ST s a) ->
(Matrix E -> (a,Matrix E)) ->
Property
implementsIf pre a f =
forAll arbitrary $ \(Nat2 mn) ->
forAll (Test.matrix mn) $ \x ->
runST ( do
x' <- thawMatrix x
pre x') ==>
runST ( do
x' <- unsafeThawMatrix x
commutes x' a f )
implementsIf2 :: (AEq a, Show a) =>
(forall s . STMatrix s E -> STMatrix s E -> ST s Bool) ->
(forall s . STMatrix s E -> STMatrix s E -> ST s a) ->
(Matrix E -> Matrix E -> (a,Matrix E,Matrix E)) ->
Property
implementsIf2 pre a f =
forAll arbitrary $ \(Nat2 mn) ->
forAll (Test.matrix mn) $ \x ->
forAll (Test.matrix mn) $ \y ->
runST ( do
x' <- thawMatrix x
y' <- thawMatrix y
pre x' y') ==>
runST ( do
x' <- unsafeThawMatrix x
y' <- unsafeThawMatrix y
commutes2 x' y' a f )
------------------------------------------------------------------------
tests_STMatrix =
[ testProperty "newMatrix" prop_NewMatrix
, testProperty "newListMatrix" prop_NewListMatrix
, testProperty "getSize" prop_GetSize
, testProperty "readElem" prop_ReadElem
, testProperty "canModifyElem" prop_CanModifyElem
, testProperty "writeElem" prop_WriteElem
, testProperty "modifyElem" prop_ModifyElem
, testProperty "getIndices" prop_GetIndicesLazy
, testProperty "getIndices'" prop_GetIndicesStrict
, testProperty "getElems" prop_GetElemsLazy
, testProperty "getElems'" prop_GetElemsStrict
, testProperty "getElems . modifyWith" prop_GetElemsLazyModifyWith
, testProperty "getElems' . modifyWith" prop_GetElemsStrictModifyWith
, testProperty "getAssocs . modifyWith" prop_GetAssocsLazyModifyWith
, testProperty "getAssocs' . modifyWith" prop_GetAssocsStrictModifyWith
, testProperty "newZeroMatrix" prop_NewZeroMatrix
, testProperty "setZeroMatrix" prop_SetZeroMatrix
, testProperty "newConstantMatrix" prop_NewConstantMatrix
, testProperty "setConstantMatrix" prop_SetConstantMatrix
, testProperty "newIdentityMatrix" prop_NewIdentityMatrix
, testProperty "setIdentityMatrix" prop_SetIdentityMatrix
, testProperty "newCopyMatrix" prop_NewCopyMatrix
, testProperty "copyMatrix" prop_CopyMatrix
, testProperty "swapMatrix" prop_SwapMatrix
, testProperty "doConj" prop_DoConj
, testProperty "scaleBy" prop_ScaleBy
, testProperty "shiftBy" prop_ShiftBy
, testProperty "modifyWith" prop_ModifyWith
, testProperty "rank1UpdateMatrix" prop_Rank1UpdateMatrix
, testProperty "getConjMatrix" prop_GetConjMatrix
, testProperty "getScaledMatrix" prop_GetScaledMatrix
, testProperty "getShiftedMatrix" prop_GetShiftedMatrix
, testProperty "axpyMatrix" prop_AxpyMatrix
, testProperty "addMatrix" prop_AddMatrix
, testProperty "subMatrix" prop_SubMatrix
, testProperty "mulMatrix" prop_MulMatrix
, testProperty "divMatrix" prop_DivMatrix
, testProperty "getAddMatrix" prop_GetAddMatrix
, testProperty "getSubMatrix" prop_GetSubMatrix
, testProperty "getMulMatrix" prop_GetMulMatrix
, testProperty "getDivMatrix" prop_GetDivMatrix
]
|
patperry/hs-linear-algebra
|
tests-old/STMatrix.hs
|
bsd-3-clause
| 12,020
| 0
| 17
| 2,807
| 4,084
| 2,123
| 1,961
| -1
| -1
|
module Main where
import System.FilePath.Glob (glob)
import Test.DocTest (doctest)
main = glob "src/**/*.hs" >>= doctest
|
TomRegan/HaskellStarter
|
test/DocTest.hs
|
mit
| 123
| 0
| 6
| 17
| 37
| 22
| 15
| 4
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{- |
Module : Network.MPD.Applicative.Output
Copyright : (c) Joachim Fasting 2012
License : MIT
Maintainer : joachifm@fastmail.fm
Stability : stable
Portability : unportable
Audio output devices.
-}
module Network.MPD.Applicative.Output
( disableOutput
, enableOutput
, toggleOutput
, outputs
) where
import Network.MPD.Applicative.Internal
import Network.MPD.Commands.Arg hiding (Command)
import Network.MPD.Commands.Parse
import Network.MPD.Commands.Types
-- | Turn off output.
disableOutput :: Int -> Command ()
disableOutput n = Command emptyResponse ["disableoutput" <@> n]
-- | Turn on output.
enableOutput :: Int -> Command ()
enableOutput n = Command emptyResponse ["enableoutput" <@> n]
-- | Toggle output.
toggleOutput :: Int -> Command ()
toggleOutput n = Command emptyResponse ["toggleoutput" <@> n]
-- | Get information about all available output devices.
outputs :: Command [Device]
outputs = Command (liftParser parseOutputs) ["outputs"]
|
matthewleon/libmpd-haskell
|
src/Network/MPD/Applicative/Output.hs
|
mit
| 1,071
| 0
| 7
| 211
| 194
| 112
| 82
| 18
| 1
|
module ShrdliteGrammar where
import CombinatorParser
type SParser = Parser String
-- Data types
data Command = Take Entity | Put Location | Move Entity Location
deriving (Eq, Ord, Show)
data Location = Relative Relation Entity
deriving (Eq, Ord, Show)
data Entity = Floor | BasicEntity Quantifier Object | RelativeEntity Quantifier Object Location
deriving (Eq, Ord, Show)
data Object = Object Size Color Form
deriving (Eq, Ord, Show)
data Quantifier = The | Any | All
deriving (Eq, Ord, Show)
data Relation = Beside | Leftof | Rightof | Above | Ontop | Under | Inside
deriving (Eq, Ord, Show)
data Size = AnySize | Small | Large
deriving (Eq, Ord, Show)
data Color = AnyColor | Black | White | Blue | Green | Yellow | Red
deriving (Eq, Ord, Show)
data Form = AnyForm | Brick | Plank | Ball | Pyramid | Box | Table
deriving (Eq, Ord, Show)
-- Grammar rules
command :: SParser Command
command = mkCommand $
Take <$> (takeVerb *> entity)
<|>
Put <$> (moveVerb *> itPron *> location)
<|>
Move <$> (moveVerb *> entity) <*> location
location :: SParser Location
location = Relative <$> relation <*> entity
entity :: SParser Entity
entity = Floor <$ theFloor
<|>
numberAgreement (liftA2 BasicEntity <$> quantifier <*> object)
<|>
numberAgreement (liftA3 RelativeEntity <$> quantifier <*> object <*> relative_clause)
where
relative_clause n = thatIs n *> location
object :: Number -> SParser Object
object n = Object <$> (size <|> pure AnySize) <*> (color <|> pure AnyColor) <*> form n
<|>
flip Object <$> color <*> size <*> form n
-- Lexical rules
quantifier :: Number -> SParser Quantifier
quantifier Sg = lexicon [(The, ["the"]),
(Any, ["a", "an", "any"]),
(All, ["every"])]
quantifier Pl = lexicon [(All, ["all"])]
relation :: SParser Relation
relation = lexicon [(Beside, ["beside"]),
(Leftof, ["left of", "to the left of"]),
(Rightof, ["right of", "to the right of"]),
(Above, ["above"]),
(Ontop, ["on top of", "on"]),
(Under, ["under"]),
(Inside, ["inside", "in", "into"])]
size :: SParser Size
size = lexicon [(Small, ["small", "tiny"]),
(Large, ["large", "big"])]
color :: SParser Color
color = lexicon [(Black, ["black"]),
(White, ["white"]),
(Blue, ["blue"]),
(Green, ["green"]),
(Yellow, ["yellow"]),
(Red, ["red"])]
form :: Number -> SParser Form
form n = lexicon [(AnyForm, [regNoun n "object", regNoun n "thing", regNoun n "form"]),
(Brick, [regNoun n "brick"]),
(Plank, [regNoun n "plank"]),
(Ball, [regNoun n "ball"]),
(Pyramid, [regNoun n "pyramid"]),
(Box, [mkNoun n "box" "boxes"]),
(Table, [regNoun n "table"])]
-- Lexicon
data Number = Sg | Pl
deriving (Eq, Ord, Show)
numberAgreement :: (Number -> SParser a) -> SParser a
numberAgreement p = p Sg <|> p Pl
regNoun :: Number -> String -> String
regNoun n s = mkNoun n s (s ++ "s")
mkNoun :: Number -> String -> String -> String
mkNoun Sg sg pl = sg
mkNoun Pl sg pl = pl
mkCommand :: SParser Command -> SParser Command
mkCommand prs = lexicon [((), ["", "will you", "can you", "could you"])] *>
lexicon [((), ["", "please"])] *>
prs <*
lexicon [((), ["", "please"])]
theFloor :: SParser ()
theFloor = lexicon [((), ["the floor"])]
thatIs :: Number -> SParser ()
thatIs Sg = lexicon [((), ["", "that is"])]
thatIs Pl = lexicon [((), ["", "that are"])]
moveVerb :: SParser ()
moveVerb = lexicon [((), ["move", "put", "drop"])]
takeVerb :: SParser ()
takeVerb = lexicon [((), ["take", "grasp", "pick up"])]
itPron :: SParser ()
itPron = lexicon [((), ["it"])]
|
gautsson/elitresse
|
haskell/ShrdliteGrammar.hs
|
gpl-3.0
| 4,171
| 0
| 14
| 1,314
| 1,519
| 859
| 660
| 97
| 1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sk-SK">
<title>Active Scan Rules - Alpha | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/ascanrulesAlpha/resources/help_sk_SK/helpset_sk_SK.hs
|
apache-2.0
| 987
| 80
| 67
| 163
| 422
| 213
| 209
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="da-DK">
<title>Technology detection | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Søg</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/wappalyzer/resources/help_da_DK/helpset_da_DK.hs
|
apache-2.0
| 980
| 80
| 66
| 160
| 418
| 211
| 207
| -1
| -1
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[Specialise]{Stamping out overloading, and (optionally) polymorphism}
-}
{-# LANGUAGE CPP #-}
module Specialise ( specProgram, specUnfolding ) where
#include "HsVersions.h"
import Id
import TcType hiding( substTy, extendTvSubstList )
import Type hiding( substTy, extendTvSubstList )
import Coercion( Coercion )
import Module( Module )
import CoreMonad
import qualified CoreSubst
import CoreUnfold
import VarSet
import VarEnv
import CoreSyn
import Rules
import PprCore ( pprParendExpr )
import CoreUtils ( exprIsTrivial, applyTypeToArgs )
import CoreFVs ( exprFreeVars, exprsFreeVars, idFreeVars )
import UniqSupply
import Name
import MkId ( voidArgId, voidPrimId )
import Maybes ( catMaybes, isJust )
import BasicTypes
import HscTypes
import Bag
import DynFlags
import Util
import Outputable
import FastString
import State
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative (Applicative(..))
#endif
import Control.Monad
import Data.Map (Map)
import qualified Data.Map as Map
import qualified FiniteMap as Map
{-
************************************************************************
* *
\subsection[notes-Specialise]{Implementation notes [SLPJ, Aug 18 1993]}
* *
************************************************************************
These notes describe how we implement specialisation to eliminate
overloading.
The specialisation pass works on Core
syntax, complete with all the explicit dictionary application,
abstraction and construction as added by the type checker. The
existing type checker remains largely as it is.
One important thought: the {\em types} passed to an overloaded
function, and the {\em dictionaries} passed are mutually redundant.
If the same function is applied to the same type(s) then it is sure to
be applied to the same dictionary(s)---or rather to the same {\em
values}. (The arguments might look different but they will evaluate
to the same value.)
Second important thought: we know that we can make progress by
treating dictionary arguments as static and worth specialising on. So
we can do without binding-time analysis, and instead specialise on
dictionary arguments and no others.
The basic idea
~~~~~~~~~~~~~~
Suppose we have
let f = <f_rhs>
in <body>
and suppose f is overloaded.
STEP 1: CALL-INSTANCE COLLECTION
We traverse <body>, accumulating all applications of f to types and
dictionaries.
(Might there be partial applications, to just some of its types and
dictionaries? In principle yes, but in practice the type checker only
builds applications of f to all its types and dictionaries, so partial
applications could only arise as a result of transformation, and even
then I think it's unlikely. In any case, we simply don't accumulate such
partial applications.)
STEP 2: EQUIVALENCES
So now we have a collection of calls to f:
f t1 t2 d1 d2
f t3 t4 d3 d4
...
Notice that f may take several type arguments. To avoid ambiguity, we
say that f is called at type t1/t2 and t3/t4.
We take equivalence classes using equality of the *types* (ignoring
the dictionary args, which as mentioned previously are redundant).
STEP 3: SPECIALISATION
For each equivalence class, choose a representative (f t1 t2 d1 d2),
and create a local instance of f, defined thus:
f@t1/t2 = <f_rhs> t1 t2 d1 d2
f_rhs presumably has some big lambdas and dictionary lambdas, so lots
of simplification will now result. However we don't actually *do* that
simplification. Rather, we leave it for the simplifier to do. If we
*did* do it, though, we'd get more call instances from the specialised
RHS. We can work out what they are by instantiating the call-instance
set from f's RHS with the types t1, t2.
Add this new id to f's IdInfo, to record that f has a specialised version.
Before doing any of this, check that f's IdInfo doesn't already
tell us about an existing instance of f at the required type/s.
(This might happen if specialisation was applied more than once, or
it might arise from user SPECIALIZE pragmas.)
Recursion
~~~~~~~~~
Wait a minute! What if f is recursive? Then we can't just plug in
its right-hand side, can we?
But it's ok. The type checker *always* creates non-recursive definitions
for overloaded recursive functions. For example:
f x = f (x+x) -- Yes I know its silly
becomes
f a (d::Num a) = let p = +.sel a d
in
letrec fl (y::a) = fl (p y y)
in
fl
We still have recusion for non-overloaded functions which we
speciailise, but the recursive call should get specialised to the
same recursive version.
Polymorphism 1
~~~~~~~~~~~~~~
All this is crystal clear when the function is applied to *constant
types*; that is, types which have no type variables inside. But what if
it is applied to non-constant types? Suppose we find a call of f at type
t1/t2. There are two possibilities:
(a) The free type variables of t1, t2 are in scope at the definition point
of f. In this case there's no problem, we proceed just as before. A common
example is as follows. Here's the Haskell:
g y = let f x = x+x
in f y + f y
After typechecking we have
g a (d::Num a) (y::a) = let f b (d'::Num b) (x::b) = +.sel b d' x x
in +.sel a d (f a d y) (f a d y)
Notice that the call to f is at type type "a"; a non-constant type.
Both calls to f are at the same type, so we can specialise to give:
g a (d::Num a) (y::a) = let f@a (x::a) = +.sel a d x x
in +.sel a d (f@a y) (f@a y)
(b) The other case is when the type variables in the instance types
are *not* in scope at the definition point of f. The example we are
working with above is a good case. There are two instances of (+.sel a d),
but "a" is not in scope at the definition of +.sel. Can we do anything?
Yes, we can "common them up", a sort of limited common sub-expression deal.
This would give:
g a (d::Num a) (y::a) = let +.sel@a = +.sel a d
f@a (x::a) = +.sel@a x x
in +.sel@a (f@a y) (f@a y)
This can save work, and can't be spotted by the type checker, because
the two instances of +.sel weren't originally at the same type.
Further notes on (b)
* There are quite a few variations here. For example, the defn of
+.sel could be floated ouside the \y, to attempt to gain laziness.
It certainly mustn't be floated outside the \d because the d has to
be in scope too.
* We don't want to inline f_rhs in this case, because
that will duplicate code. Just commoning up the call is the point.
* Nothing gets added to +.sel's IdInfo.
* Don't bother unless the equivalence class has more than one item!
Not clear whether this is all worth it. It is of course OK to
simply discard call-instances when passing a big lambda.
Polymorphism 2 -- Overloading
~~~~~~~~~~~~~~
Consider a function whose most general type is
f :: forall a b. Ord a => [a] -> b -> b
There is really no point in making a version of g at Int/Int and another
at Int/Bool, because it's only instancing the type variable "a" which
buys us any efficiency. Since g is completely polymorphic in b there
ain't much point in making separate versions of g for the different
b types.
That suggests that we should identify which of g's type variables
are constrained (like "a") and which are unconstrained (like "b").
Then when taking equivalence classes in STEP 2, we ignore the type args
corresponding to unconstrained type variable. In STEP 3 we make
polymorphic versions. Thus:
f@t1/ = /\b -> <f_rhs> t1 b d1 d2
We do this.
Dictionary floating
~~~~~~~~~~~~~~~~~~~
Consider this
f a (d::Num a) = let g = ...
in
...(let d1::Ord a = Num.Ord.sel a d in g a d1)...
Here, g is only called at one type, but the dictionary isn't in scope at the
definition point for g. Usually the type checker would build a
definition for d1 which enclosed g, but the transformation system
might have moved d1's defn inward. Solution: float dictionary bindings
outwards along with call instances.
Consider
f x = let g p q = p==q
h r s = (r+s, g r s)
in
h x x
Before specialisation, leaving out type abstractions we have
f df x = let g :: Eq a => a -> a -> Bool
g dg p q = == dg p q
h :: Num a => a -> a -> (a, Bool)
h dh r s = let deq = eqFromNum dh
in (+ dh r s, g deq r s)
in
h df x x
After specialising h we get a specialised version of h, like this:
h' r s = let deq = eqFromNum df
in (+ df r s, g deq r s)
But we can't naively make an instance for g from this, because deq is not in scope
at the defn of g. Instead, we have to float out the (new) defn of deq
to widen its scope. Notice that this floating can't be done in advance -- it only
shows up when specialisation is done.
User SPECIALIZE pragmas
~~~~~~~~~~~~~~~~~~~~~~~
Specialisation pragmas can be digested by the type checker, and implemented
by adding extra definitions along with that of f, in the same way as before
f@t1/t2 = <f_rhs> t1 t2 d1 d2
Indeed the pragmas *have* to be dealt with by the type checker, because
only it knows how to build the dictionaries d1 and d2! For example
g :: Ord a => [a] -> [a]
{-# SPECIALIZE f :: [Tree Int] -> [Tree Int] #-}
Here, the specialised version of g is an application of g's rhs to the
Ord dictionary for (Tree Int), which only the type checker can conjure
up. There might not even *be* one, if (Tree Int) is not an instance of
Ord! (All the other specialision has suitable dictionaries to hand
from actual calls.)
Problem. The type checker doesn't have to hand a convenient <f_rhs>, because
it is buried in a complex (as-yet-un-desugared) binding group.
Maybe we should say
f@t1/t2 = f* t1 t2 d1 d2
where f* is the Id f with an IdInfo which says "inline me regardless!".
Indeed all the specialisation could be done in this way.
That in turn means that the simplifier has to be prepared to inline absolutely
any in-scope let-bound thing.
Again, the pragma should permit polymorphism in unconstrained variables:
h :: Ord a => [a] -> b -> b
{-# SPECIALIZE h :: [Int] -> b -> b #-}
We *insist* that all overloaded type variables are specialised to ground types,
(and hence there can be no context inside a SPECIALIZE pragma).
We *permit* unconstrained type variables to be specialised to
- a ground type
- or left as a polymorphic type variable
but nothing in between. So
{-# SPECIALIZE h :: [Int] -> [c] -> [c] #-}
is *illegal*. (It can be handled, but it adds complication, and gains the
programmer nothing.)
SPECIALISING INSTANCE DECLARATIONS
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
instance Foo a => Foo [a] where
...
{-# SPECIALIZE instance Foo [Int] #-}
The original instance decl creates a dictionary-function
definition:
dfun.Foo.List :: forall a. Foo a -> Foo [a]
The SPECIALIZE pragma just makes a specialised copy, just as for
ordinary function definitions:
dfun.Foo.List@Int :: Foo [Int]
dfun.Foo.List@Int = dfun.Foo.List Int dFooInt
The information about what instance of the dfun exist gets added to
the dfun's IdInfo in the same way as a user-defined function too.
Automatic instance decl specialisation?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Can instance decls be specialised automatically? It's tricky.
We could collect call-instance information for each dfun, but
then when we specialised their bodies we'd get new call-instances
for ordinary functions; and when we specialised their bodies, we might get
new call-instances of the dfuns, and so on. This all arises because of
the unrestricted mutual recursion between instance decls and value decls.
Still, there's no actual problem; it just means that we may not do all
the specialisation we could theoretically do.
Furthermore, instance decls are usually exported and used non-locally,
so we'll want to compile enough to get those specialisations done.
Lastly, there's no such thing as a local instance decl, so we can
survive solely by spitting out *usage* information, and then reading that
back in as a pragma when next compiling the file. So for now,
we only specialise instance decls in response to pragmas.
SPITTING OUT USAGE INFORMATION
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To spit out usage information we need to traverse the code collecting
call-instance information for all imported (non-prelude?) functions
and data types. Then we equivalence-class it and spit it out.
This is done at the top-level when all the call instances which escape
must be for imported functions and data types.
*** Not currently done ***
Partial specialisation by pragmas
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What about partial specialisation:
k :: (Ord a, Eq b) => [a] -> b -> b -> [a]
{-# SPECIALIZE k :: Eq b => [Int] -> b -> b -> [a] #-}
or even
{-# SPECIALIZE k :: Eq b => [Int] -> [b] -> [b] -> [a] #-}
Seems quite reasonable. Similar things could be done with instance decls:
instance (Foo a, Foo b) => Foo (a,b) where
...
{-# SPECIALIZE instance Foo a => Foo (a,Int) #-}
{-# SPECIALIZE instance Foo b => Foo (Int,b) #-}
Ho hum. Things are complex enough without this. I pass.
Requirements for the simplifer
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The simplifier has to be able to take advantage of the specialisation.
* When the simplifier finds an application of a polymorphic f, it looks in
f's IdInfo in case there is a suitable instance to call instead. This converts
f t1 t2 d1 d2 ===> f_t1_t2
Note that the dictionaries get eaten up too!
* Dictionary selection operations on constant dictionaries must be
short-circuited:
+.sel Int d ===> +Int
The obvious way to do this is in the same way as other specialised
calls: +.sel has inside it some IdInfo which tells that if it's applied
to the type Int then it should eat a dictionary and transform to +Int.
In short, dictionary selectors need IdInfo inside them for constant
methods.
* Exactly the same applies if a superclass dictionary is being
extracted:
Eq.sel Int d ===> dEqInt
* Something similar applies to dictionary construction too. Suppose
dfun.Eq.List is the function taking a dictionary for (Eq a) to
one for (Eq [a]). Then we want
dfun.Eq.List Int d ===> dEq.List_Int
Where does the Eq [Int] dictionary come from? It is built in
response to a SPECIALIZE pragma on the Eq [a] instance decl.
In short, dfun Ids need IdInfo with a specialisation for each
constant instance of their instance declaration.
All this uses a single mechanism: the SpecEnv inside an Id
What does the specialisation IdInfo look like?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The SpecEnv of an Id maps a list of types (the template) to an expression
[Type] |-> Expr
For example, if f has this SpecInfo:
[Int, a] -> \d:Ord Int. f' a
it means that we can replace the call
f Int t ===> (\d. f' t)
This chucks one dictionary away and proceeds with the
specialised version of f, namely f'.
What can't be done this way?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There is no way, post-typechecker, to get a dictionary for (say)
Eq a from a dictionary for Eq [a]. So if we find
==.sel [t] d
we can't transform to
eqList (==.sel t d')
where
eqList :: (a->a->Bool) -> [a] -> [a] -> Bool
Of course, we currently have no way to automatically derive
eqList, nor to connect it to the Eq [a] instance decl, but you
can imagine that it might somehow be possible. Taking advantage
of this is permanently ruled out.
Still, this is no great hardship, because we intend to eliminate
overloading altogether anyway!
A note about non-tyvar dictionaries
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some Ids have types like
forall a,b,c. Eq a -> Ord [a] -> tau
This seems curious at first, because we usually only have dictionary
args whose types are of the form (C a) where a is a type variable.
But this doesn't hold for the functions arising from instance decls,
which sometimes get arguments with types of form (C (T a)) for some
type constructor T.
Should we specialise wrt this compound-type dictionary? We used to say
"no", saying:
"This is a heuristic judgement, as indeed is the fact that we
specialise wrt only dictionaries. We choose *not* to specialise
wrt compound dictionaries because at the moment the only place
they show up is in instance decls, where they are simply plugged
into a returned dictionary. So nothing is gained by specialising
wrt them."
But it is simpler and more uniform to specialise wrt these dicts too;
and in future GHC is likely to support full fledged type signatures
like
f :: Eq [(a,b)] => ...
************************************************************************
* *
\subsubsection{The new specialiser}
* *
************************************************************************
Our basic game plan is this. For let(rec) bound function
f :: (C a, D c) => (a,b,c,d) -> Bool
* Find any specialised calls of f, (f ts ds), where
ts are the type arguments t1 .. t4, and
ds are the dictionary arguments d1 .. d2.
* Add a new definition for f1 (say):
f1 = /\ b d -> (..body of f..) t1 b t3 d d1 d2
Note that we abstract over the unconstrained type arguments.
* Add the mapping
[t1,b,t3,d] |-> \d1 d2 -> f1 b d
to the specialisations of f. This will be used by the
simplifier to replace calls
(f t1 t2 t3 t4) da db
by
(\d1 d1 -> f1 t2 t4) da db
All the stuff about how many dictionaries to discard, and what types
to apply the specialised function to, are handled by the fact that the
SpecEnv contains a template for the result of the specialisation.
We don't build *partial* specialisations for f. For example:
f :: Eq a => a -> a -> Bool
{-# SPECIALISE f :: (Eq b, Eq c) => (b,c) -> (b,c) -> Bool #-}
Here, little is gained by making a specialised copy of f.
There's a distinct danger that the specialised version would
first build a dictionary for (Eq b, Eq c), and then select the (==)
method from it! Even if it didn't, not a great deal is saved.
We do, however, generate polymorphic, but not overloaded, specialisations:
f :: Eq a => [a] -> b -> b -> b
... SPECIALISE f :: [Int] -> b -> b -> b ...
Hence, the invariant is this:
*** no specialised version is overloaded ***
************************************************************************
* *
\subsubsection{The exported function}
* *
************************************************************************
-}
specProgram :: ModGuts -> CoreM ModGuts
specProgram guts@(ModGuts { mg_module = this_mod
, mg_rules = local_rules
, mg_binds = binds })
= do { dflags <- getDynFlags
-- Specialise the bindings of this module
; (binds', uds) <- runSpecM dflags (go binds)
-- Specialise imported functions
; hpt_rules <- getRuleBase
; let rule_base = extendRuleBaseList hpt_rules local_rules
; (new_rules, spec_binds) <- specImports dflags this_mod emptyVarSet rule_base uds
; let final_binds | null spec_binds = binds'
| otherwise = Rec (flattenBinds spec_binds) : binds'
-- Note [Glom the bindings if imported functions are specialised]
; return (guts { mg_binds = final_binds
, mg_rules = new_rules ++ local_rules }) }
where
-- We need to start with a Subst that knows all the things
-- that are in scope, so that the substitution engine doesn't
-- accidentally re-use a unique that's already in use
-- Easiest thing is to do it all at once, as if all the top-level
-- decls were mutually recursive
top_subst = SE { se_subst = CoreSubst.mkEmptySubst $ mkInScopeSet $ mkVarSet $
bindersOfBinds binds
, se_interesting = emptyVarSet }
go [] = return ([], emptyUDs)
go (bind:binds) = do (binds', uds) <- go binds
(bind', uds') <- specBind top_subst bind uds
return (bind' ++ binds', uds')
specImports :: DynFlags
-> Module
-> VarSet -- Don't specialise these ones
-- See Note [Avoiding recursive specialisation]
-> RuleBase -- Rules from this module and the home package
-- (but not external packages, which can change)
-> UsageDetails -- Calls for imported things, and floating bindings
-> CoreM ( [CoreRule] -- New rules
, [CoreBind] ) -- Specialised bindings and floating bindings
specImports dflags this_mod done rule_base uds
= do { let import_calls = varEnvElts (ud_calls uds)
; (rules, spec_binds) <- go rule_base import_calls
; return (rules, wrapDictBinds (ud_binds uds) spec_binds) }
where
go _ [] = return ([], [])
go rb (CIS fn calls_for_fn : other_calls)
= do { (rules1, spec_binds1) <- specImport dflags this_mod done rb fn $
Map.toList calls_for_fn
; (rules2, spec_binds2) <- go (extendRuleBaseList rb rules1) other_calls
; return (rules1 ++ rules2, spec_binds1 ++ spec_binds2) }
specImport :: DynFlags
-> Module
-> VarSet -- Don't specialise these
-- See Note [Avoiding recursive specialisation]
-> RuleBase -- Rules from this module
-> Id -> [CallInfo] -- Imported function and calls for it
-> CoreM ( [CoreRule] -- New rules
, [CoreBind] ) -- Specialised bindings
specImport dflags this_mod done rb fn calls_for_fn
| fn `elemVarSet` done
= return ([], []) -- No warning. This actually happens all the time
-- when specialising a recursive function, because
-- the RHS of the specialised function contains a recursive
-- call to the original function
| null calls_for_fn -- We filtered out all the calls in deleteCallsMentioning
= return ([], [])
| wantSpecImport dflags unfolding
, Just rhs <- maybeUnfoldingTemplate unfolding
= do { -- Get rules from the external package state
-- We keep doing this in case we "page-fault in"
-- more rules as we go along
; hsc_env <- getHscEnv
; eps <- liftIO $ hscEPS hsc_env
; let full_rb = unionRuleBase rb (eps_rule_base eps)
rules_for_fn = getRules full_rb fn
; (rules1, spec_pairs, uds) <- runSpecM dflags $
specCalls (Just this_mod) emptySpecEnv rules_for_fn calls_for_fn fn rhs
; let spec_binds1 = [NonRec b r | (b,r) <- spec_pairs]
-- After the rules kick in we may get recursion, but
-- we rely on a global GlomBinds to sort that out later
-- See Note [Glom the bindings if imported functions are specialised]
-- Now specialise any cascaded calls
; (rules2, spec_binds2) <- -- pprTrace "specImport" (ppr fn $$ ppr uds $$ ppr rhs) $
specImports dflags this_mod (extendVarSet done fn)
(extendRuleBaseList rb rules1)
uds
; return (rules2 ++ rules1, spec_binds2 ++ spec_binds1) }
| otherwise
= WARN( True, hang (ptext (sLit "specImport discarding:") <+> ppr fn <+> dcolon <+> ppr (idType fn))
2 ( (text "want:" <+> ppr (wantSpecImport dflags unfolding))
$$ (text "stable:" <+> ppr (isStableUnfolding unfolding))
$$ (text "calls:" <+> vcat (map (pprCallInfo fn) calls_for_fn)) ) )
return ([], [])
where
unfolding = realIdUnfolding fn -- We want to see the unfolding even for loop breakers
wantSpecImport :: DynFlags -> Unfolding -> Bool
-- See Note [Specialise imported INLINABLE things]
wantSpecImport dflags unf
= case unf of
NoUnfolding -> False
OtherCon {} -> False
DFunUnfolding {} -> True
CoreUnfolding { uf_src = src, uf_guidance = _guidance }
| gopt Opt_SpecialiseAggressively dflags -> True
| isStableSource src -> True
-- Specialise even INLINE things; it hasn't inlined yet,
-- so perhaps it never will. Moreover it may have calls
-- inside it that we want to specialise
| otherwise -> False -- Stable, not INLINE, hence INLINEABLE
{-
Note [Specialise imported INLINABLE things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What imported functions do we specialise? The basic set is
* DFuns and things with INLINABLE pragmas.
but with -fspecialise-aggressively we add
* Anything with an unfolding template
Trac #8874 has a good example of why we want to auto-specialise DFuns.
We have the -fspecialise-aggressively flag (usually off), because we
risk lots of orphan modules from over-vigorous specialisation.
However it's not a big deal: anything non-recursive with an
unfolding-template will probably have been inlined already.
Note [Glom the bindings if imported functions are specialised]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have an imported, *recursive*, INLINABLE function
f :: Eq a => a -> a
f = /\a \d x. ...(f a d)...
In the module being compiled we have
g x = f (x::Int)
Now we'll make a specialised function
f_spec :: Int -> Int
f_spec = \x -> ...(f Int dInt)...
{-# RULE f Int _ = f_spec #-}
g = \x. f Int dInt x
Note that f_spec doesn't look recursive
After rewriting with the RULE, we get
f_spec = \x -> ...(f_spec)...
BUT since f_spec was non-recursive before it'll *stay* non-recursive.
The occurrence analyser never turns a NonRec into a Rec. So we must
make sure that f_spec is recursive. Easiest thing is to make all
the specialisations for imported bindings recursive.
Note [Avoiding recursive specialisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we specialise 'f' we may find new overloaded calls to 'g', 'h' in
'f's RHS. So we want to specialise g,h. But we don't want to
specialise f any more! It's possible that f's RHS might have a
recursive yet-more-specialised call, so we'd diverge in that case.
And if the call is to the same type, one specialisation is enough.
Avoiding this recursive specialisation loop is the reason for the
'done' VarSet passed to specImports and specImport.
************************************************************************
* *
\subsubsection{@specExpr@: the main function}
* *
************************************************************************
-}
data SpecEnv
= SE { se_subst :: CoreSubst.Subst
-- We carry a substitution down:
-- a) we must clone any binding that might float outwards,
-- to avoid name clashes
-- b) we carry a type substitution to use when analysing
-- the RHS of specialised bindings (no type-let!)
, se_interesting :: VarSet
-- Dict Ids that we know something about
-- and hence may be worth specialising against
-- See Note [Interesting dictionary arguments]
}
emptySpecEnv :: SpecEnv
emptySpecEnv = SE { se_subst = CoreSubst.emptySubst, se_interesting = emptyVarSet}
specVar :: SpecEnv -> Id -> CoreExpr
specVar env v = CoreSubst.lookupIdSubst (text "specVar") (se_subst env) v
specExpr :: SpecEnv -> CoreExpr -> SpecM (CoreExpr, UsageDetails)
---------------- First the easy cases --------------------
specExpr env (Type ty) = return (Type (substTy env ty), emptyUDs)
specExpr env (Coercion co) = return (Coercion (substCo env co), emptyUDs)
specExpr env (Var v) = return (specVar env v, emptyUDs)
specExpr _ (Lit lit) = return (Lit lit, emptyUDs)
specExpr env (Cast e co)
= do { (e', uds) <- specExpr env e
; return ((Cast e' (substCo env co)), uds) }
specExpr env (Tick tickish body)
= do { (body', uds) <- specExpr env body
; return (Tick (specTickish env tickish) body', uds) }
---------------- Applications might generate a call instance --------------------
specExpr env expr@(App {})
= go expr []
where
go (App fun arg) args = do (arg', uds_arg) <- specExpr env arg
(fun', uds_app) <- go fun (arg':args)
return (App fun' arg', uds_arg `plusUDs` uds_app)
go (Var f) args = case specVar env f of
Var f' -> return (Var f', mkCallUDs env f' args)
e' -> return (e', emptyUDs) -- I don't expect this!
go other _ = specExpr env other
---------------- Lambda/case require dumping of usage details --------------------
specExpr env e@(Lam _ _) = do
(body', uds) <- specExpr env' body
let (free_uds, dumped_dbs) = dumpUDs bndrs' uds
return (mkLams bndrs' (wrapDictBindsE dumped_dbs body'), free_uds)
where
(bndrs, body) = collectBinders e
(env', bndrs') = substBndrs env bndrs
-- More efficient to collect a group of binders together all at once
-- and we don't want to split a lambda group with dumped bindings
specExpr env (Case scrut case_bndr ty alts)
= do { (scrut', scrut_uds) <- specExpr env scrut
; (scrut'', case_bndr', alts', alts_uds)
<- specCase env scrut' case_bndr alts
; return (Case scrut'' case_bndr' (substTy env ty) alts'
, scrut_uds `plusUDs` alts_uds) }
---------------- Finally, let is the interesting case --------------------
specExpr env (Let bind body)
= do { -- Clone binders
(rhs_env, body_env, bind') <- cloneBindSM env bind
-- Deal with the body
; (body', body_uds) <- specExpr body_env body
-- Deal with the bindings
; (binds', uds) <- specBind rhs_env bind' body_uds
-- All done
; return (foldr Let body' binds', uds) }
specTickish :: SpecEnv -> Tickish Id -> Tickish Id
specTickish env (Breakpoint ix ids)
= Breakpoint ix [ id' | id <- ids, Var id' <- [specVar env id]]
-- drop vars from the list if they have a non-variable substitution.
-- should never happen, but it's harmless to drop them anyway.
specTickish _ other_tickish = other_tickish
specCase :: SpecEnv
-> CoreExpr -- Scrutinee, already done
-> Id -> [CoreAlt]
-> SpecM ( CoreExpr -- New scrutinee
, Id
, [CoreAlt]
, UsageDetails)
specCase env scrut' case_bndr [(con, args, rhs)]
| isDictId case_bndr -- See Note [Floating dictionaries out of cases]
, interestingDict env scrut'
, not (isDeadBinder case_bndr && null sc_args')
= do { (case_bndr_flt : sc_args_flt) <- mapM clone_me (case_bndr' : sc_args')
; let sc_rhss = [ Case (Var case_bndr_flt) case_bndr' (idType sc_arg')
[(con, args', Var sc_arg')]
| sc_arg' <- sc_args' ]
-- Extend the substitution for RHS to map the *original* binders
-- to their floated verions.
mb_sc_flts :: [Maybe DictId]
mb_sc_flts = map (lookupVarEnv clone_env) args'
clone_env = zipVarEnv sc_args' sc_args_flt
subst_prs = (case_bndr, Var case_bndr_flt)
: [ (arg, Var sc_flt)
| (arg, Just sc_flt) <- args `zip` mb_sc_flts ]
env_rhs' = env_rhs { se_subst = CoreSubst.extendIdSubstList (se_subst env_rhs) subst_prs
, se_interesting = se_interesting env_rhs `extendVarSetList`
(case_bndr_flt : sc_args_flt) }
; (rhs', rhs_uds) <- specExpr env_rhs' rhs
; let scrut_bind = mkDB (NonRec case_bndr_flt scrut')
case_bndr_set = unitVarSet case_bndr_flt
sc_binds = [(NonRec sc_arg_flt sc_rhs, case_bndr_set)
| (sc_arg_flt, sc_rhs) <- sc_args_flt `zip` sc_rhss ]
flt_binds = scrut_bind : sc_binds
(free_uds, dumped_dbs) = dumpUDs (case_bndr':args') rhs_uds
all_uds = flt_binds `addDictBinds` free_uds
alt' = (con, args', wrapDictBindsE dumped_dbs rhs')
; return (Var case_bndr_flt, case_bndr', [alt'], all_uds) }
where
(env_rhs, (case_bndr':args')) = substBndrs env (case_bndr:args)
sc_args' = filter is_flt_sc_arg args'
clone_me bndr = do { uniq <- getUniqueM
; return (mkUserLocal occ uniq ty loc) }
where
name = idName bndr
ty = idType bndr
occ = nameOccName name
loc = getSrcSpan name
arg_set = mkVarSet args'
is_flt_sc_arg var = isId var
&& not (isDeadBinder var)
&& isDictTy var_ty
&& not (tyVarsOfType var_ty `intersectsVarSet` arg_set)
where
var_ty = idType var
specCase env scrut case_bndr alts
= do { (alts', uds_alts) <- mapAndCombineSM spec_alt alts
; return (scrut, case_bndr', alts', uds_alts) }
where
(env_alt, case_bndr') = substBndr env case_bndr
spec_alt (con, args, rhs) = do
(rhs', uds) <- specExpr env_rhs rhs
let (free_uds, dumped_dbs) = dumpUDs (case_bndr' : args') uds
return ((con, args', wrapDictBindsE dumped_dbs rhs'), free_uds)
where
(env_rhs, args') = substBndrs env_alt args
{-
Note [Floating dictionaries out of cases]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
g = \d. case d of { MkD sc ... -> ...(f sc)... }
Naively we can't float d2's binding out of the case expression,
because 'sc' is bound by the case, and that in turn means we can't
specialise f, which seems a pity.
So we invert the case, by floating out a binding
for 'sc_flt' thus:
sc_flt = case d of { MkD sc ... -> sc }
Now we can float the call instance for 'f'. Indeed this is just
what'll happen if 'sc' was originally bound with a let binding,
but case is more efficient, and necessary with equalities. So it's
good to work with both.
You might think that this won't make any difference, because the
call instance will only get nuked by the \d. BUT if 'g' itself is
specialised, then transitively we should be able to specialise f.
In general, given
case e of cb { MkD sc ... -> ...(f sc)... }
we transform to
let cb_flt = e
sc_flt = case cb_flt of { MkD sc ... -> sc }
in
case cb_flt of bg { MkD sc ... -> ....(f sc_flt)... }
The "_flt" things are the floated binds; we use the current substitution
to substitute sc -> sc_flt in the RHS
************************************************************************
* *
Dealing with a binding
* *
************************************************************************
-}
specBind :: SpecEnv -- Use this for RHSs
-> CoreBind
-> UsageDetails -- Info on how the scope of the binding
-> SpecM ([CoreBind], -- New bindings
UsageDetails) -- And info to pass upstream
-- Returned UsageDetails:
-- No calls for binders of this bind
specBind rhs_env (NonRec fn rhs) body_uds
= do { (rhs', rhs_uds) <- specExpr rhs_env rhs
; (fn', spec_defns, body_uds1) <- specDefn rhs_env body_uds fn rhs
; let pairs = spec_defns ++ [(fn', rhs')]
-- fn' mentions the spec_defns in its rules,
-- so put the latter first
combined_uds = body_uds1 `plusUDs` rhs_uds
-- This way round a call in rhs_uds of a function f
-- at type T will override a call of f at T in body_uds1; and
-- that is good because it'll tend to keep "earlier" calls
-- See Note [Specialisation of dictionary functions]
(free_uds, dump_dbs, float_all) = dumpBindUDs [fn] combined_uds
-- See Note [From non-recursive to recursive]
final_binds | isEmptyBag dump_dbs = [NonRec b r | (b,r) <- pairs]
| otherwise = [Rec (flattenDictBinds dump_dbs pairs)]
; if float_all then
-- Rather than discard the calls mentioning the bound variables
-- we float this binding along with the others
return ([], free_uds `snocDictBinds` final_binds)
else
-- No call in final_uds mentions bound variables,
-- so we can just leave the binding here
return (final_binds, free_uds) }
specBind rhs_env (Rec pairs) body_uds
-- Note [Specialising a recursive group]
= do { let (bndrs,rhss) = unzip pairs
; (rhss', rhs_uds) <- mapAndCombineSM (specExpr rhs_env) rhss
; let scope_uds = body_uds `plusUDs` rhs_uds
-- Includes binds and calls arising from rhss
; (bndrs1, spec_defns1, uds1) <- specDefns rhs_env scope_uds pairs
; (bndrs3, spec_defns3, uds3)
<- if null spec_defns1 -- Common case: no specialisation
then return (bndrs1, [], uds1)
else do { -- Specialisation occurred; do it again
(bndrs2, spec_defns2, uds2)
<- specDefns rhs_env uds1 (bndrs1 `zip` rhss)
; return (bndrs2, spec_defns2 ++ spec_defns1, uds2) }
; let (final_uds, dumped_dbs, float_all) = dumpBindUDs bndrs uds3
bind = Rec (flattenDictBinds dumped_dbs $
spec_defns3 ++ zip bndrs3 rhss')
; if float_all then
return ([], final_uds `snocDictBind` bind)
else
return ([bind], final_uds) }
---------------------------
specDefns :: SpecEnv
-> UsageDetails -- Info on how it is used in its scope
-> [(Id,CoreExpr)] -- The things being bound and their un-processed RHS
-> SpecM ([Id], -- Original Ids with RULES added
[(Id,CoreExpr)], -- Extra, specialised bindings
UsageDetails) -- Stuff to fling upwards from the specialised versions
-- Specialise a list of bindings (the contents of a Rec), but flowing usages
-- upwards binding by binding. Example: { f = ...g ...; g = ...f .... }
-- Then if the input CallDetails has a specialised call for 'g', whose specialisation
-- in turn generates a specialised call for 'f', we catch that in this one sweep.
-- But not vice versa (it's a fixpoint problem).
specDefns _env uds []
= return ([], [], uds)
specDefns env uds ((bndr,rhs):pairs)
= do { (bndrs1, spec_defns1, uds1) <- specDefns env uds pairs
; (bndr1, spec_defns2, uds2) <- specDefn env uds1 bndr rhs
; return (bndr1 : bndrs1, spec_defns1 ++ spec_defns2, uds2) }
---------------------------
specDefn :: SpecEnv
-> UsageDetails -- Info on how it is used in its scope
-> Id -> CoreExpr -- The thing being bound and its un-processed RHS
-> SpecM (Id, -- Original Id with added RULES
[(Id,CoreExpr)], -- Extra, specialised bindings
UsageDetails) -- Stuff to fling upwards from the specialised versions
specDefn env body_uds fn rhs
= do { let (body_uds_without_me, calls_for_me) = callsForMe fn body_uds
rules_for_me = idCoreRules fn
; (rules, spec_defns, spec_uds) <- specCalls Nothing env rules_for_me
calls_for_me fn rhs
; return ( fn `addIdSpecialisations` rules
, spec_defns
, body_uds_without_me `plusUDs` spec_uds) }
-- It's important that the `plusUDs` is this way
-- round, because body_uds_without_me may bind
-- dictionaries that are used in calls_for_me passed
-- to specDefn. So the dictionary bindings in
-- spec_uds may mention dictionaries bound in
-- body_uds_without_me
---------------------------
specCalls :: Maybe Module -- Just this_mod => specialising imported fn
-- Nothing => specialising local fn
-> SpecEnv
-> [CoreRule] -- Existing RULES for the fn
-> [CallInfo]
-> Id -> CoreExpr
-> SpecM ([CoreRule], -- New RULES for the fn
[(Id,CoreExpr)], -- Extra, specialised bindings
UsageDetails) -- New usage details from the specialised RHSs
-- This function checks existing rules, and does not create
-- duplicate ones. So the caller does not need to do this filtering.
-- See 'already_covered'
specCalls mb_mod env rules_for_me calls_for_me fn rhs
-- The first case is the interesting one
| rhs_tyvars `lengthIs` n_tyvars -- Rhs of fn's defn has right number of big lambdas
&& rhs_ids `lengthAtLeast` n_dicts -- and enough dict args
&& notNull calls_for_me -- And there are some calls to specialise
&& not (isNeverActive (idInlineActivation fn))
-- Don't specialise NOINLINE things
-- See Note [Auto-specialisation and RULES]
-- && not (certainlyWillInline (idUnfolding fn)) -- And it's not small
-- See Note [Inline specialisation] for why we do not
-- switch off specialisation for inline functions
= -- pprTrace "specDefn: some" (ppr fn $$ ppr calls_for_me $$ ppr rules_for_me) $
do { stuff <- mapM spec_call calls_for_me
; let (spec_defns, spec_uds, spec_rules) = unzip3 (catMaybes stuff)
; return (spec_rules, spec_defns, plusUDList spec_uds) }
| otherwise -- No calls or RHS doesn't fit our preconceptions
= WARN( not (exprIsTrivial rhs) && notNull calls_for_me,
ptext (sLit "Missed specialisation opportunity for")
<+> ppr fn $$ _trace_doc )
-- Note [Specialisation shape]
-- pprTrace "specDefn: none" (ppr fn <+> ppr calls_for_me) $
return ([], [], emptyUDs)
where
_trace_doc = sep [ ppr rhs_tyvars, ppr n_tyvars
, ppr rhs_ids, ppr n_dicts
, ppr (idInlineActivation fn) ]
fn_type = idType fn
fn_arity = idArity fn
fn_unf = realIdUnfolding fn -- Ignore loop-breaker-ness here
(tyvars, theta, _) = tcSplitSigmaTy fn_type
n_tyvars = length tyvars
n_dicts = length theta
inl_prag = idInlinePragma fn
inl_act = inlinePragmaActivation inl_prag
is_local = isLocalId fn
-- Figure out whether the function has an INLINE pragma
-- See Note [Inline specialisations]
(rhs_tyvars, rhs_ids, rhs_body) = collectTyAndValBinders rhs
rhs_dict_ids = take n_dicts rhs_ids
body = mkLams (drop n_dicts rhs_ids) rhs_body
-- Glue back on the non-dict lambdas
already_covered :: DynFlags -> [CoreExpr] -> Bool
already_covered dflags args -- Note [Specialisations already covered]
= isJust (lookupRule dflags
(CoreSubst.substInScope (se_subst env), realIdUnfolding)
(const True)
fn args rules_for_me)
mk_ty_args :: [Maybe Type] -> [TyVar] -> [CoreExpr]
mk_ty_args [] poly_tvs
= ASSERT( null poly_tvs ) []
mk_ty_args (Nothing : call_ts) (poly_tv : poly_tvs)
= Type (mkTyVarTy poly_tv) : mk_ty_args call_ts poly_tvs
mk_ty_args (Just ty : call_ts) poly_tvs
= Type ty : mk_ty_args call_ts poly_tvs
mk_ty_args (Nothing : _) [] = panic "mk_ty_args"
----------------------------------------------------------
-- Specialise to one particular call pattern
spec_call :: CallInfo -- Call instance
-> SpecM (Maybe ((Id,CoreExpr), -- Specialised definition
UsageDetails, -- Usage details from specialised body
CoreRule)) -- Info for the Id's SpecEnv
spec_call (CallKey call_ts, (call_ds, _))
= ASSERT( call_ts `lengthIs` n_tyvars && call_ds `lengthIs` n_dicts )
-- Suppose f's defn is f = /\ a b c -> \ d1 d2 -> rhs
-- Supppose the call is for f [Just t1, Nothing, Just t3] [dx1, dx2]
-- Construct the new binding
-- f1 = SUBST[a->t1,c->t3, d1->d1', d2->d2'] (/\ b -> rhs)
-- PLUS the usage-details
-- { d1' = dx1; d2' = dx2 }
-- where d1', d2' are cloned versions of d1,d2, with the type substitution
-- applied. These auxiliary bindings just avoid duplication of dx1, dx2
--
-- Note that the substitution is applied to the whole thing.
-- This is convenient, but just slightly fragile. Notably:
-- * There had better be no name clashes in a/b/c
do { let
-- poly_tyvars = [b] in the example above
-- spec_tyvars = [a,c]
-- ty_args = [t1,b,t3]
spec_tv_binds = [(tv,ty) | (tv, Just ty) <- rhs_tyvars `zip` call_ts]
env1 = extendTvSubstList env spec_tv_binds
(rhs_env, poly_tyvars) = substBndrs env1
[tv | (tv, Nothing) <- rhs_tyvars `zip` call_ts]
-- Clone rhs_dicts, including instantiating their types
; inst_dict_ids <- mapM (newDictBndr rhs_env) rhs_dict_ids
; let (rhs_env2, dx_binds, spec_dict_args)
= bindAuxiliaryDicts rhs_env rhs_dict_ids call_ds inst_dict_ids
ty_args = mk_ty_args call_ts poly_tyvars
rule_args = ty_args ++ map Var inst_dict_ids
rule_bndrs = poly_tyvars ++ inst_dict_ids
; dflags <- getDynFlags
; if already_covered dflags rule_args then
return Nothing
else do
{ -- Figure out the type of the specialised function
let body_ty = applyTypeToArgs rhs fn_type rule_args
(lam_args, app_args) -- Add a dummy argument if body_ty is unlifted
| isUnLiftedType body_ty -- C.f. WwLib.mkWorkerArgs
= (poly_tyvars ++ [voidArgId], poly_tyvars ++ [voidPrimId])
| otherwise = (poly_tyvars, poly_tyvars)
spec_id_ty = mkPiTypes lam_args body_ty
; spec_f <- newSpecIdSM fn spec_id_ty
; (spec_rhs, rhs_uds) <- specExpr rhs_env2 (mkLams lam_args body)
; let
-- The rule to put in the function's specialisation is:
-- forall b, d1',d2'. f t1 b t3 d1' d2' = f1 b
herald = case mb_mod of
Nothing -- Specialising local fn
-> ptext (sLit "SPEC")
Just this_mod -- Specialising imoprted fn
-> ptext (sLit "SPEC/") <> ppr this_mod
rule_name = mkFastString $ showSDocForUser dflags neverQualify $
herald <+> ppr fn <+> hsep (map ppr_call_key_ty call_ts)
-- This name ends up in interface files, so use showSDocForUser,
-- otherwise uniques end up there, making builds
-- less deterministic (See #4012 comment:61 ff)
spec_env_rule = mkRule True {- Auto generated -} is_local
rule_name
inl_act -- Note [Auto-specialisation and RULES]
(idName fn)
rule_bndrs
rule_args
(mkVarApps (Var spec_f) app_args)
-- Add the { d1' = dx1; d2' = dx2 } usage stuff
final_uds = foldr consDictBind rhs_uds dx_binds
--------------------------------------
-- Add a suitable unfolding if the spec_inl_prag says so
-- See Note [Inline specialisations]
(spec_inl_prag, spec_unf)
| not is_local && isStrongLoopBreaker (idOccInfo fn)
= (neverInlinePragma, noUnfolding)
-- See Note [Specialising imported functions] in OccurAnal
| InlinePragma { inl_inline = Inlinable } <- inl_prag
= (inl_prag { inl_inline = EmptyInlineSpec }, noUnfolding)
| otherwise
= (inl_prag, specUnfolding dflags (se_subst env)
poly_tyvars (ty_args ++ spec_dict_args)
fn_unf)
--------------------------------------
-- Adding arity information just propagates it a bit faster
-- See Note [Arity decrease] in Simplify
-- Copy InlinePragma information from the parent Id.
-- So if f has INLINE[1] so does spec_f
spec_f_w_arity = spec_f `setIdArity` max 0 (fn_arity - n_dicts)
`setInlinePragma` spec_inl_prag
`setIdUnfolding` spec_unf
; return (Just ((spec_f_w_arity, spec_rhs), final_uds, spec_env_rule)) } }
bindAuxiliaryDicts
:: SpecEnv
-> [DictId] -> [CoreExpr] -- Original dict bndrs, and the witnessing expressions
-> [DictId] -- A cloned dict-id for each dict arg
-> (SpecEnv, -- Substitute for all orig_dicts
[CoreBind], -- Auxiliary dict bindings
[CoreExpr]) -- Witnessing expressions (all trivial)
-- Bind any dictionary arguments to fresh names, to preserve sharing
bindAuxiliaryDicts env@(SE { se_subst = subst, se_interesting = interesting })
orig_dict_ids call_ds inst_dict_ids
= (env', dx_binds, spec_dict_args)
where
(dx_binds, spec_dict_args) = go call_ds inst_dict_ids
env' = env { se_subst = CoreSubst.extendIdSubstList subst (orig_dict_ids `zip` spec_dict_args)
, se_interesting = interesting `unionVarSet` interesting_dicts }
interesting_dicts = mkVarSet [ dx_id | NonRec dx_id dx <- dx_binds
, interestingDict env dx ]
-- See Note [Make the new dictionaries interesting]
go [] _ = ([], [])
go (dx:dxs) (dx_id:dx_ids)
| exprIsTrivial dx = (dx_binds, dx:args)
| otherwise = (NonRec dx_id dx : dx_binds, Var dx_id : args)
where
(dx_binds, args) = go dxs dx_ids
-- In the first case extend the substitution but not bindings;
-- in the latter extend the bindings but not the substitution.
-- For the former, note that we bind the *original* dict in the substitution,
-- overriding any d->dx_id binding put there by substBndrs
go _ _ = pprPanic "bindAuxiliaryDicts" (ppr orig_dict_ids $$ ppr call_ds $$ ppr inst_dict_ids)
{-
Note [Make the new dictionaries interesting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Important! We're going to substitute dx_id1 for d
and we want it to look "interesting", else we won't gather *any*
consequential calls. E.g.
f d = ...g d....
If we specialise f for a call (f (dfun dNumInt)), we'll get
a consequent call (g d') with an auxiliary definition
d' = df dNumInt
We want that consequent call to look interesting
Note [From non-recursive to recursive]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Even in the non-recursive case, if any dict-binds depend on 'fn' we might
have built a recursive knot
f a d x = <blah>
MkUD { ud_binds = d7 = MkD ..f..
, ud_calls = ...(f T d7)... }
The we generate
Rec { fs x = <blah>[T/a, d7/d]
f a d x = <blah>
RULE f T _ = fs
d7 = ...f... }
Here the recursion is only through the RULE.
Note [Specialisation of dictionary functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here is a nasty example that bit us badly: see Trac #3591
class Eq a => C a
instance Eq [a] => C [a]
---------------
dfun :: Eq [a] -> C [a]
dfun a d = MkD a d (meth d)
d4 :: Eq [T] = <blah>
d2 :: C [T] = dfun T d4
d1 :: Eq [T] = $p1 d2
d3 :: C [T] = dfun T d1
None of these definitions is recursive. What happened was that we
generated a specialisation:
RULE forall d. dfun T d = dT :: C [T]
dT = (MkD a d (meth d)) [T/a, d1/d]
= MkD T d1 (meth d1)
But now we use the RULE on the RHS of d2, to get
d2 = dT = MkD d1 (meth d1)
d1 = $p1 d2
and now d1 is bottom! The problem is that when specialising 'dfun' we
should first dump "below" the binding all floated dictionary bindings
that mention 'dfun' itself. So d2 and d3 (and hence d1) must be
placed below 'dfun', and thus unavailable to it when specialising
'dfun'. That in turn means that the call (dfun T d1) must be
discarded. On the other hand, the call (dfun T d4) is fine, assuming
d4 doesn't mention dfun.
But look at this:
class C a where { foo,bar :: [a] -> [a] }
instance C Int where
foo x = r_bar x
bar xs = reverse xs
r_bar :: C a => [a] -> [a]
r_bar xs = bar (xs ++ xs)
That translates to:
r_bar a (c::C a) (xs::[a]) = bar a d (xs ++ xs)
Rec { $fCInt :: C Int = MkC foo_help reverse
foo_help (xs::[Int]) = r_bar Int $fCInt xs }
The call (r_bar $fCInt) mentions $fCInt,
which mentions foo_help,
which mentions r_bar
But we DO want to specialise r_bar at Int:
Rec { $fCInt :: C Int = MkC foo_help reverse
foo_help (xs::[Int]) = r_bar Int $fCInt xs
r_bar a (c::C a) (xs::[a]) = bar a d (xs ++ xs)
RULE r_bar Int _ = r_bar_Int
r_bar_Int xs = bar Int $fCInt (xs ++ xs)
}
Note that, because of its RULE, r_bar joins the recursive
group. (In this case it'll unravel a short moment later.)
Conclusion: we catch the nasty case using filter_dfuns in
callsForMe. To be honest I'm not 100% certain that this is 100%
right, but it works. Sigh.
Note [Specialising a recursive group]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
let rec { f x = ...g x'...
; g y = ...f y'.... }
in f 'a'
Here we specialise 'f' at Char; but that is very likely to lead to
a specialisation of 'g' at Char. We must do the latter, else the
whole point of specialisation is lost.
But we do not want to keep iterating to a fixpoint, because in the
presence of polymorphic recursion we might generate an infinite number
of specialisations.
So we use the following heuristic:
* Arrange the rec block in dependency order, so far as possible
(the occurrence analyser already does this)
* Specialise it much like a sequence of lets
* Then go through the block a second time, feeding call-info from
the RHSs back in the bottom, as it were
In effect, the ordering maxmimises the effectiveness of each sweep,
and we do just two sweeps. This should catch almost every case of
monomorphic recursion -- the exception could be a very knotted-up
recursion with multiple cycles tied up together.
This plan is implemented in the Rec case of specBindItself.
Note [Specialisations already covered]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We obviously don't want to generate two specialisations for the same
argument pattern. There are two wrinkles
1. We do the already-covered test in specDefn, not when we generate
the CallInfo in mkCallUDs. We used to test in the latter place, but
we now iterate the specialiser somewhat, and the Id at the call site
might therefore not have all the RULES that we can see in specDefn
2. What about two specialisations where the second is an *instance*
of the first? If the more specific one shows up first, we'll generate
specialisations for both. If the *less* specific one shows up first,
we *don't* currently generate a specialisation for the more specific
one. (See the call to lookupRule in already_covered.) Reasons:
(a) lookupRule doesn't say which matches are exact (bad reason)
(b) if the earlier specialisation is user-provided, it's
far from clear that we should auto-specialise further
Note [Auto-specialisation and RULES]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider:
g :: Num a => a -> a
g = ...
f :: (Int -> Int) -> Int
f w = ...
{-# RULE f g = 0 #-}
Suppose that auto-specialisation makes a specialised version of
g::Int->Int That version won't appear in the LHS of the RULE for f.
So if the specialisation rule fires too early, the rule for f may
never fire.
It might be possible to add new rules, to "complete" the rewrite system.
Thus when adding
RULE forall d. g Int d = g_spec
also add
RULE f g_spec = 0
But that's a bit complicated. For now we ask the programmer's help,
by *copying the INLINE activation pragma* to the auto-specialised
rule. So if g says {-# NOINLINE[2] g #-}, then the auto-spec rule
will also not be active until phase 2. And that's what programmers
should jolly well do anyway, even aside from specialisation, to ensure
that g doesn't inline too early.
This in turn means that the RULE would never fire for a NOINLINE
thing so not much point in generating a specialisation at all.
Note [Specialisation shape]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
We only specialise a function if it has visible top-level lambdas
corresponding to its overloading. E.g. if
f :: forall a. Eq a => ....
then its body must look like
f = /\a. \d. ...
Reason: when specialising the body for a call (f ty dexp), we want to
substitute dexp for d, and pick up specialised calls in the body of f.
This doesn't always work. One example I came across was this:
newtype Gen a = MkGen{ unGen :: Int -> a }
choose :: Eq a => a -> Gen a
choose n = MkGen (\r -> n)
oneof = choose (1::Int)
It's a silly exapmle, but we get
choose = /\a. g `cast` co
where choose doesn't have any dict arguments. Thus far I have not
tried to fix this (wait till there's a real example).
Mind you, then 'choose' will be inlined (since RHS is trivial) so
it doesn't matter. This comes up with single-method classes
class C a where { op :: a -> a }
instance C a => C [a] where ....
==>
$fCList :: C a => C [a]
$fCList = $copList |> (...coercion>...)
....(uses of $fCList at particular types)...
So we suppress the WARN if the rhs is trivial.
Note [Inline specialisations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here is what we do with the InlinePragma of the original function
* Activation/RuleMatchInfo: both transferred to the
specialised function
* InlineSpec:
(a) An INLINE pragma is transferred
(b) An INLINABLE pragma is *not* transferred
Why (a): transfer INLINE pragmas? The point of INLINE was precisely to
specialise the function at its call site, and arguably that's not so
important for the specialised copies. BUT *pragma-directed*
specialisation now takes place in the typechecker/desugarer, with
manually specified INLINEs. The specialisation here is automatic.
It'd be very odd if a function marked INLINE was specialised (because
of some local use), and then forever after (including importing
modules) the specialised version wasn't INLINEd. After all, the
programmer said INLINE!
You might wonder why we specialise INLINE functions at all. After
all they should be inlined, right? Two reasons:
* Even INLINE functions are sometimes not inlined, when they aren't
applied to interesting arguments. But perhaps the type arguments
alone are enough to specialise (even though the args are too boring
to trigger inlining), and it's certainly better to call the
specialised version.
* The RHS of an INLINE function might call another overloaded function,
and we'd like to generate a specialised version of that function too.
This actually happens a lot. Consider
replicateM_ :: (Monad m) => Int -> m a -> m ()
{-# INLINABLE replicateM_ #-}
replicateM_ d x ma = ...
The strictness analyser may transform to
replicateM_ :: (Monad m) => Int -> m a -> m ()
{-# INLINE replicateM_ #-}
replicateM_ d x ma = case x of I# x' -> $wreplicateM_ d x' ma
$wreplicateM_ :: (Monad m) => Int# -> m a -> m ()
{-# INLINABLE $wreplicateM_ #-}
$wreplicateM_ = ...
Now an importing module has a specialised call to replicateM_, say
(replicateM_ dMonadIO). We certainly want to specialise $wreplicateM_!
This particular example had a huge effect on the call to replicateM_
in nofib/shootout/n-body.
Why (b): discard INLINEABLE pragmas? See Trac #4874 for persuasive examples.
Suppose we have
{-# INLINABLE f #-}
f :: Ord a => [a] -> Int
f xs = letrec f' = ...f'... in f'
Then, when f is specialised and optimised we might get
wgo :: [Int] -> Int#
wgo = ...wgo...
f_spec :: [Int] -> Int
f_spec xs = case wgo xs of { r -> I# r }
and we clearly want to inline f_spec at call sites. But if we still
have the big, un-optimised of f (albeit specialised) captured in an
INLINABLE pragma for f_spec, we won't get that optimisation.
So we simply drop INLINABLE pragmas when specialising. It's not really
a complete solution; ignoring specalisation for now, INLINABLE functions
don't get properly strictness analysed, for example. But it works well
for examples involving specialisation, which is the dominant use of
INLINABLE. See Trac #4874.
************************************************************************
* *
\subsubsection{UsageDetails and suchlike}
* *
************************************************************************
-}
data UsageDetails
= MkUD {
ud_binds :: !(Bag DictBind),
-- Floated dictionary bindings
-- The order is important;
-- in ds1 `union` ds2, bindings in ds2 can depend on those in ds1
-- (Remember, Bags preserve order in GHC.)
ud_calls :: !CallDetails
-- INVARIANT: suppose bs = bindersOf ud_binds
-- Then 'calls' may *mention* 'bs',
-- but there should be no calls *for* bs
}
instance Outputable UsageDetails where
ppr (MkUD { ud_binds = dbs, ud_calls = calls })
= ptext (sLit "MkUD") <+> braces (sep (punctuate comma
[ptext (sLit "binds") <+> equals <+> ppr dbs,
ptext (sLit "calls") <+> equals <+> ppr calls]))
type DictBind = (CoreBind, VarSet)
-- The set is the free vars of the binding
-- both tyvars and dicts
type DictExpr = CoreExpr
emptyUDs :: UsageDetails
emptyUDs = MkUD { ud_binds = emptyBag, ud_calls = emptyVarEnv }
------------------------------------------------------------
type CallDetails = IdEnv CallInfoSet
newtype CallKey = CallKey [Maybe Type] -- Nothing => unconstrained type argument
-- CallInfo uses a Map, thereby ensuring that
-- we record only one call instance for any key
--
-- The list of types and dictionaries is guaranteed to
-- match the type of f
data CallInfoSet = CIS Id (Map CallKey ([DictExpr], VarSet))
-- Range is dict args and the vars of the whole
-- call (including tyvars)
-- [*not* include the main id itself, of course]
type CallInfo = (CallKey, ([DictExpr], VarSet))
instance Outputable CallInfoSet where
ppr (CIS fn map) = hang (ptext (sLit "CIS") <+> ppr fn)
2 (ppr map)
pprCallInfo :: Id -> CallInfo -> SDoc
pprCallInfo fn (CallKey mb_tys, (dxs, _))
= hang (ppr fn)
2 (fsep (map ppr_call_key_ty mb_tys ++ map pprParendExpr dxs))
ppr_call_key_ty :: Maybe Type -> SDoc
ppr_call_key_ty Nothing = char '_'
ppr_call_key_ty (Just ty) = char '@' <+> pprParendType ty
instance Outputable CallKey where
ppr (CallKey ts) = ppr ts
-- Type isn't an instance of Ord, so that we can control which
-- instance we use. That's tiresome here. Oh well
instance Eq CallKey where
k1 == k2 = case k1 `compare` k2 of { EQ -> True; _ -> False }
instance Ord CallKey where
compare (CallKey k1) (CallKey k2) = cmpList cmp k1 k2
where
cmp Nothing Nothing = EQ
cmp Nothing (Just _) = LT
cmp (Just _) Nothing = GT
cmp (Just t1) (Just t2) = cmpType t1 t2
unionCalls :: CallDetails -> CallDetails -> CallDetails
unionCalls c1 c2 = plusVarEnv_C unionCallInfoSet c1 c2
unionCallInfoSet :: CallInfoSet -> CallInfoSet -> CallInfoSet
unionCallInfoSet (CIS f calls1) (CIS _ calls2) = CIS f (calls1 `Map.union` calls2)
callDetailsFVs :: CallDetails -> VarSet
callDetailsFVs calls = foldVarEnv (unionVarSet . callInfoFVs) emptyVarSet calls
callInfoFVs :: CallInfoSet -> VarSet
callInfoFVs (CIS _ call_info) = Map.foldRight (\(_,fv) vs -> unionVarSet fv vs) emptyVarSet call_info
------------------------------------------------------------
singleCall :: Id -> [Maybe Type] -> [DictExpr] -> UsageDetails
singleCall id tys dicts
= MkUD {ud_binds = emptyBag,
ud_calls = unitVarEnv id $ CIS id $
Map.singleton (CallKey tys) (dicts, call_fvs) }
where
call_fvs = exprsFreeVars dicts `unionVarSet` tys_fvs
tys_fvs = tyVarsOfTypes (catMaybes tys)
-- The type args (tys) are guaranteed to be part of the dictionary
-- types, because they are just the constrained types,
-- and the dictionary is therefore sure to be bound
-- inside the binding for any type variables free in the type;
-- hence it's safe to neglect tyvars free in tys when making
-- the free-var set for this call
-- BUT I don't trust this reasoning; play safe and include tys_fvs
--
-- We don't include the 'id' itself.
mkCallUDs, mkCallUDs' :: SpecEnv -> Id -> [CoreExpr] -> UsageDetails
mkCallUDs env f args
= -- pprTrace "mkCallUDs" (vcat [ ppr f, ppr args, ppr res ])
res
where
res = mkCallUDs' env f args
mkCallUDs' env f args
| not (want_calls_for f) -- Imported from elsewhere
|| null theta -- Not overloaded
= emptyUDs
| not (all type_determines_value theta)
|| not (spec_tys `lengthIs` n_tyvars)
|| not ( dicts `lengthIs` n_dicts)
|| not (any (interestingDict env) dicts) -- Note [Interesting dictionary arguments]
-- See also Note [Specialisations already covered]
= -- pprTrace "mkCallUDs: discarding" _trace_doc
emptyUDs -- Not overloaded, or no specialisation wanted
| otherwise
= -- pprTrace "mkCallUDs: keeping" _trace_doc
singleCall f spec_tys dicts
where
_trace_doc = vcat [ppr f, ppr args, ppr n_tyvars, ppr n_dicts
, ppr (map (interestingDict env) dicts)]
(tyvars, theta, _) = tcSplitSigmaTy (idType f)
constrained_tyvars = closeOverKinds (tyVarsOfTypes theta)
n_tyvars = length tyvars
n_dicts = length theta
spec_tys = [mk_spec_ty tv ty | (tv, Type ty) <- tyvars `zip` args]
dicts = [dict_expr | (_, dict_expr) <- theta `zip` (drop n_tyvars args)]
mk_spec_ty tyvar ty
| tyvar `elemVarSet` constrained_tyvars = Just ty
| otherwise = Nothing
want_calls_for f = isLocalId f || isJust (maybeUnfoldingTemplate (realIdUnfolding f))
-- For imported things, we gather call instances if
-- there is an unfolding that we could in principle specialise
-- We might still decide not to use it (consulting dflags)
-- in specImports
-- Use 'realIdUnfolding' to ignore the loop-breaker flag!
type_determines_value pred -- See Note [Type determines value]
= case classifyPredType pred of
ClassPred cls _ -> not (isIPClass cls) -- Superclasses can't be IPs
EqPred {} -> True
IrredPred {} -> True -- Things like (D []) where D is a
-- Constraint-ranged family; Trac #7785
{-
Note [Type determines value]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Only specialise if all overloading is on non-IP *class* params,
because these are the ones whose *type* determines their *value*. In
parrticular, with implicit params, the type args *don't* say what the
value of the implicit param is! See Trac #7101
However, consider
type family D (v::*->*) :: Constraint
type instance D [] = ()
f :: D v => v Char -> Int
If we see a call (f "foo"), we'll pass a "dictionary"
() |> (g :: () ~ D [])
and it's good to specialise f at this dictionary.
So the question is: can an implicit parameter "hide inside" a
type-family constraint like (D a). Well, no. We don't allow
type instance D Maybe = ?x:Int
Hence the IrredPred case in type_determines_value.
See Trac #7785.
Note [Interesting dictionary arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
\a.\d:Eq a. let f = ... in ...(f d)...
There really is not much point in specialising f wrt the dictionary d,
because the code for the specialised f is not improved at all, because
d is lambda-bound. We simply get junk specialisations.
What is "interesting"? Just that it has *some* structure. But what about
variables?
* A variable might be imported, in which case its unfolding
will tell us whether it has useful structure
* Local variables are cloned on the way down (to avoid clashes when
we float dictionaries), and cloning drops the unfolding
(cloneIdBndr). Moreover, we make up some new bindings, and it's a
nuisance to give them unfoldings. So we keep track of the
"interesting" dictionaries as a VarSet in SpecEnv.
We have to take care to put any new interesting dictionary
bindings in the set.
We accidentally lost accurate tracking of local variables for a long
time, because cloned variables don't have unfoldings. But makes a
massive difference in a few cases, eg Trac #5113. For nofib as a
whole it's only a small win: 2.2% improvement in allocation for ansi,
1.2% for bspt, but mostly 0.0! Average 0.1% increase in binary size.
-}
interestingDict :: SpecEnv -> CoreExpr -> Bool
-- A dictionary argument is interesting if it has *some* structure
interestingDict env (Var v) = hasSomeUnfolding (idUnfolding v)
|| isDataConWorkId v
|| v `elemVarSet` se_interesting env
interestingDict _ (Type _) = False
interestingDict _ (Coercion _) = False
interestingDict env (App fn (Type _)) = interestingDict env fn
interestingDict env (App fn (Coercion _)) = interestingDict env fn
interestingDict env (Tick _ a) = interestingDict env a
interestingDict env (Cast e _) = interestingDict env e
interestingDict _ _ = True
plusUDs :: UsageDetails -> UsageDetails -> UsageDetails
plusUDs (MkUD {ud_binds = db1, ud_calls = calls1})
(MkUD {ud_binds = db2, ud_calls = calls2})
= MkUD { ud_binds = db1 `unionBags` db2
, ud_calls = calls1 `unionCalls` calls2 }
plusUDList :: [UsageDetails] -> UsageDetails
plusUDList = foldr plusUDs emptyUDs
-----------------------------
_dictBindBndrs :: Bag DictBind -> [Id]
_dictBindBndrs dbs = foldrBag ((++) . bindersOf . fst) [] dbs
mkDB :: CoreBind -> DictBind
mkDB bind = (bind, bind_fvs bind)
bind_fvs :: CoreBind -> VarSet
bind_fvs (NonRec bndr rhs) = pair_fvs (bndr,rhs)
bind_fvs (Rec prs) = foldl delVarSet rhs_fvs bndrs
where
bndrs = map fst prs
rhs_fvs = unionVarSets (map pair_fvs prs)
pair_fvs :: (Id, CoreExpr) -> VarSet
pair_fvs (bndr, rhs) = exprFreeVars rhs `unionVarSet` idFreeVars bndr
-- Don't forget variables mentioned in the
-- rules of the bndr. C.f. OccAnal.addRuleUsage
-- Also tyvars mentioned in its type; they may not appear in the RHS
-- type T a = Int
-- x :: T a = 3
flattenDictBinds :: Bag DictBind -> [(Id,CoreExpr)] -> [(Id,CoreExpr)]
flattenDictBinds dbs pairs
= foldrBag add pairs dbs
where
add (NonRec b r,_) pairs = (b,r) : pairs
add (Rec prs1, _) pairs = prs1 ++ pairs
snocDictBinds :: UsageDetails -> [CoreBind] -> UsageDetails
-- Add ud_binds to the tail end of the bindings in uds
snocDictBinds uds dbs
= uds { ud_binds = ud_binds uds `unionBags`
foldr (consBag . mkDB) emptyBag dbs }
consDictBind :: CoreBind -> UsageDetails -> UsageDetails
consDictBind bind uds = uds { ud_binds = mkDB bind `consBag` ud_binds uds }
addDictBinds :: [DictBind] -> UsageDetails -> UsageDetails
addDictBinds binds uds = uds { ud_binds = listToBag binds `unionBags` ud_binds uds }
snocDictBind :: UsageDetails -> CoreBind -> UsageDetails
snocDictBind uds bind = uds { ud_binds = ud_binds uds `snocBag` mkDB bind }
wrapDictBinds :: Bag DictBind -> [CoreBind] -> [CoreBind]
wrapDictBinds dbs binds
= foldrBag add binds dbs
where
add (bind,_) binds = bind : binds
wrapDictBindsE :: Bag DictBind -> CoreExpr -> CoreExpr
wrapDictBindsE dbs expr
= foldrBag add expr dbs
where
add (bind,_) expr = Let bind expr
----------------------
dumpUDs :: [CoreBndr] -> UsageDetails -> (UsageDetails, Bag DictBind)
-- Used at a lambda or case binder; just dump anything mentioning the binder
dumpUDs bndrs uds@(MkUD { ud_binds = orig_dbs, ud_calls = orig_calls })
| null bndrs = (uds, emptyBag) -- Common in case alternatives
| otherwise = -- pprTrace "dumpUDs" (ppr bndrs $$ ppr free_uds $$ ppr dump_dbs) $
(free_uds, dump_dbs)
where
free_uds = MkUD { ud_binds = free_dbs, ud_calls = free_calls }
bndr_set = mkVarSet bndrs
(free_dbs, dump_dbs, dump_set) = splitDictBinds orig_dbs bndr_set
free_calls = deleteCallsMentioning dump_set $ -- Drop calls mentioning bndr_set on the floor
deleteCallsFor bndrs orig_calls -- Discard calls for bndr_set; there should be
-- no calls for any of the dicts in dump_dbs
dumpBindUDs :: [CoreBndr] -> UsageDetails -> (UsageDetails, Bag DictBind, Bool)
-- Used at a lambda or case binder; just dump anything mentioning the binder
dumpBindUDs bndrs (MkUD { ud_binds = orig_dbs, ud_calls = orig_calls })
= -- pprTrace "dumpBindUDs" (ppr bndrs $$ ppr free_uds $$ ppr dump_dbs) $
(free_uds, dump_dbs, float_all)
where
free_uds = MkUD { ud_binds = free_dbs, ud_calls = free_calls }
bndr_set = mkVarSet bndrs
(free_dbs, dump_dbs, dump_set) = splitDictBinds orig_dbs bndr_set
free_calls = deleteCallsFor bndrs orig_calls
float_all = dump_set `intersectsVarSet` callDetailsFVs free_calls
callsForMe :: Id -> UsageDetails -> (UsageDetails, [CallInfo])
callsForMe fn (MkUD { ud_binds = orig_dbs, ud_calls = orig_calls })
= -- pprTrace ("callsForMe")
-- (vcat [ppr fn,
-- text "Orig dbs =" <+> ppr (_dictBindBndrs orig_dbs),
-- text "Orig calls =" <+> ppr orig_calls,
-- text "Dep set =" <+> ppr dep_set,
-- text "Calls for me =" <+> ppr calls_for_me]) $
(uds_without_me, calls_for_me)
where
uds_without_me = MkUD { ud_binds = orig_dbs, ud_calls = delVarEnv orig_calls fn }
calls_for_me = case lookupVarEnv orig_calls fn of
Nothing -> []
Just (CIS _ calls) -> filter_dfuns (Map.toList calls)
dep_set = foldlBag go (unitVarSet fn) orig_dbs
go dep_set (db,fvs) | fvs `intersectsVarSet` dep_set
= extendVarSetList dep_set (bindersOf db)
| otherwise = dep_set
-- Note [Specialisation of dictionary functions]
filter_dfuns | isDFunId fn = filter ok_call
| otherwise = \cs -> cs
ok_call (_, (_,fvs)) = not (fvs `intersectsVarSet` dep_set)
----------------------
splitDictBinds :: Bag DictBind -> IdSet -> (Bag DictBind, Bag DictBind, IdSet)
-- Returns (free_dbs, dump_dbs, dump_set)
splitDictBinds dbs bndr_set
= foldlBag split_db (emptyBag, emptyBag, bndr_set) dbs
-- Important that it's foldl not foldr;
-- we're accumulating the set of dumped ids in dump_set
where
split_db (free_dbs, dump_dbs, dump_idset) db@(bind, fvs)
| dump_idset `intersectsVarSet` fvs -- Dump it
= (free_dbs, dump_dbs `snocBag` db,
extendVarSetList dump_idset (bindersOf bind))
| otherwise -- Don't dump it
= (free_dbs `snocBag` db, dump_dbs, dump_idset)
----------------------
deleteCallsMentioning :: VarSet -> CallDetails -> CallDetails
-- Remove calls *mentioning* bs
deleteCallsMentioning bs calls
= mapVarEnv filter_calls calls
where
filter_calls :: CallInfoSet -> CallInfoSet
filter_calls (CIS f calls) = CIS f (Map.filter keep_call calls)
keep_call (_, fvs) = not (fvs `intersectsVarSet` bs)
deleteCallsFor :: [Id] -> CallDetails -> CallDetails
-- Remove calls *for* bs
deleteCallsFor bs calls = delVarEnvList calls bs
{-
************************************************************************
* *
\subsubsection{Boring helper functions}
* *
************************************************************************
-}
newtype SpecM a = SpecM (State SpecState a)
data SpecState = SpecState {
spec_uniq_supply :: UniqSupply,
spec_dflags :: DynFlags
}
instance Functor SpecM where
fmap = liftM
instance Applicative SpecM where
pure = return
(<*>) = ap
instance Monad SpecM where
SpecM x >>= f = SpecM $ do y <- x
case f y of
SpecM z ->
z
return x = SpecM $ return x
fail str = SpecM $ fail str
instance MonadUnique SpecM where
getUniqueSupplyM
= SpecM $ do st <- get
let (us1, us2) = splitUniqSupply $ spec_uniq_supply st
put $ st { spec_uniq_supply = us2 }
return us1
getUniqueM
= SpecM $ do st <- get
let (u,us') = takeUniqFromSupply $ spec_uniq_supply st
put $ st { spec_uniq_supply = us' }
return u
instance HasDynFlags SpecM where
getDynFlags = SpecM $ liftM spec_dflags get
runSpecM :: DynFlags -> SpecM a -> CoreM a
runSpecM dflags (SpecM spec)
= do us <- getUniqueSupplyM
let initialState = SpecState {
spec_uniq_supply = us,
spec_dflags = dflags
}
return $ evalState spec initialState
mapAndCombineSM :: (a -> SpecM (b, UsageDetails)) -> [a] -> SpecM ([b], UsageDetails)
mapAndCombineSM _ [] = return ([], emptyUDs)
mapAndCombineSM f (x:xs) = do (y, uds1) <- f x
(ys, uds2) <- mapAndCombineSM f xs
return (y:ys, uds1 `plusUDs` uds2)
extendTvSubstList :: SpecEnv -> [(TyVar,Type)] -> SpecEnv
extendTvSubstList env tv_binds
= env { se_subst = CoreSubst.extendTvSubstList (se_subst env) tv_binds }
substTy :: SpecEnv -> Type -> Type
substTy env ty = CoreSubst.substTy (se_subst env) ty
substCo :: SpecEnv -> Coercion -> Coercion
substCo env co = CoreSubst.substCo (se_subst env) co
substBndr :: SpecEnv -> CoreBndr -> (SpecEnv, CoreBndr)
substBndr env bs = case CoreSubst.substBndr (se_subst env) bs of
(subst', bs') -> (env { se_subst = subst' }, bs')
substBndrs :: SpecEnv -> [CoreBndr] -> (SpecEnv, [CoreBndr])
substBndrs env bs = case CoreSubst.substBndrs (se_subst env) bs of
(subst', bs') -> (env { se_subst = subst' }, bs')
cloneBindSM :: SpecEnv -> CoreBind -> SpecM (SpecEnv, SpecEnv, CoreBind)
-- Clone the binders of the bind; return new bind with the cloned binders
-- Return the substitution to use for RHSs, and the one to use for the body
cloneBindSM env@(SE { se_subst = subst, se_interesting = interesting }) (NonRec bndr rhs)
= do { us <- getUniqueSupplyM
; let (subst', bndr') = CoreSubst.cloneIdBndr subst us bndr
interesting' | interestingDict env rhs
= interesting `extendVarSet` bndr'
| otherwise = interesting
; return (env, env { se_subst = subst', se_interesting = interesting' }
, NonRec bndr' rhs) }
cloneBindSM env@(SE { se_subst = subst, se_interesting = interesting }) (Rec pairs)
= do { us <- getUniqueSupplyM
; let (subst', bndrs') = CoreSubst.cloneRecIdBndrs subst us (map fst pairs)
env' = env { se_subst = subst'
, se_interesting = interesting `extendVarSetList`
[ v | (v,r) <- pairs, interestingDict env r ] }
; return (env', env', Rec (bndrs' `zip` map snd pairs)) }
newDictBndr :: SpecEnv -> CoreBndr -> SpecM CoreBndr
-- Make up completely fresh binders for the dictionaries
-- Their bindings are going to float outwards
newDictBndr env b = do { uniq <- getUniqueM
; let n = idName b
ty' = substTy env (idType b)
; return (mkUserLocal (nameOccName n) uniq ty' (getSrcSpan n)) }
newSpecIdSM :: Id -> Type -> SpecM Id
-- Give the new Id a similar occurrence name to the old one
newSpecIdSM old_id new_ty
= do { uniq <- getUniqueM
; let name = idName old_id
new_occ = mkSpecOcc (nameOccName name)
new_id = mkUserLocal new_occ uniq new_ty (getSrcSpan name)
; return new_id }
{-
Old (but interesting) stuff about unboxed bindings
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What should we do when a value is specialised to a *strict* unboxed value?
map_*_* f (x:xs) = let h = f x
t = map f xs
in h:t
Could convert let to case:
map_*_Int# f (x:xs) = case f x of h# ->
let t = map f xs
in h#:t
This may be undesirable since it forces evaluation here, but the value
may not be used in all branches of the body. In the general case this
transformation is impossible since the mutual recursion in a letrec
cannot be expressed as a case.
There is also a problem with top-level unboxed values, since our
implementation cannot handle unboxed values at the top level.
Solution: Lift the binding of the unboxed value and extract it when it
is used:
map_*_Int# f (x:xs) = let h = case (f x) of h# -> _Lift h#
t = map f xs
in case h of
_Lift h# -> h#:t
Now give it to the simplifier and the _Lifting will be optimised away.
The benfit is that we have given the specialised "unboxed" values a
very simplep lifted semantics and then leave it up to the simplifier to
optimise it --- knowing that the overheads will be removed in nearly
all cases.
In particular, the value will only be evaluted in the branches of the
program which use it, rather than being forced at the point where the
value is bound. For example:
filtermap_*_* p f (x:xs)
= let h = f x
t = ...
in case p x of
True -> h:t
False -> t
==>
filtermap_*_Int# p f (x:xs)
= let h = case (f x) of h# -> _Lift h#
t = ...
in case p x of
True -> case h of _Lift h#
-> h#:t
False -> t
The binding for h can still be inlined in the one branch and the
_Lifting eliminated.
Question: When won't the _Lifting be eliminated?
Answer: When they at the top-level (where it is necessary) or when
inlining would duplicate work (or possibly code depending on
options). However, the _Lifting will still be eliminated if the
strictness analyser deems the lifted binding strict.
-}
|
fmthoma/ghc
|
compiler/specialise/Specialise.hs
|
bsd-3-clause
| 86,510
| 1
| 22
| 25,210
| 10,520
| 5,690
| 4,830
| -1
| -1
|
{-# LANGUAGE TypeFamilies, PatternGuards, CPP #-}
module Yesod.Core.Internal.LiteApp where
#if !(MIN_VERSION_base(4,11,0))
import Data.Semigroup (Semigroup(..))
#endif
import Yesod.Routes.Class
import Yesod.Core.Class.Yesod
import Yesod.Core.Class.Dispatch
import Yesod.Core.Types
import Yesod.Core.Content
import Data.Text (Text)
import Web.PathPieces
import Network.Wai
import Yesod.Core.Handler
import Yesod.Core.Internal.Run
import Network.HTTP.Types (Method)
import Data.Maybe (fromMaybe)
import Control.Applicative ((<|>))
import Control.Monad.Trans.Writer
newtype LiteApp = LiteApp
{ unLiteApp :: Method -> [Text] -> Maybe (LiteHandler TypedContent)
}
instance Yesod LiteApp
instance YesodDispatch LiteApp where
yesodDispatch yre req =
yesodRunner
(fromMaybe notFound $ f (requestMethod req) (pathInfo req))
yre
(Just $ LiteAppRoute $ pathInfo req)
req
where
LiteApp f = yreSite yre
instance RenderRoute LiteApp where
data Route LiteApp = LiteAppRoute [Text]
deriving (Show, Eq, Read, Ord)
renderRoute (LiteAppRoute x) = (x, [])
instance ParseRoute LiteApp where
parseRoute (x, _) = Just $ LiteAppRoute x
instance Semigroup LiteApp where
LiteApp x <> LiteApp y = LiteApp $ \m ps -> x m ps <|> y m ps
instance Monoid LiteApp where
mempty = LiteApp $ \_ _ -> Nothing
#if !(MIN_VERSION_base(4,11,0))
mappend = (<>)
#endif
type LiteHandler = HandlerFor LiteApp
type LiteWidget = WidgetFor LiteApp
liteApp :: Writer LiteApp () -> LiteApp
liteApp = execWriter
dispatchTo :: ToTypedContent a => LiteHandler a -> Writer LiteApp ()
dispatchTo handler = tell $ LiteApp $ \_ ps ->
if null ps
then Just $ fmap toTypedContent handler
else Nothing
onMethod :: Method -> Writer LiteApp () -> Writer LiteApp ()
onMethod method f = tell $ LiteApp $ \m ps ->
if method == m
then unLiteApp (liteApp f) m ps
else Nothing
onStatic :: Text -> Writer LiteApp () -> Writer LiteApp ()
onStatic p0 f = tell $ LiteApp $ \m ps0 ->
case ps0 of
p:ps | p == p0 -> unLiteApp (liteApp f) m ps
_ -> Nothing
withDynamic :: PathPiece p => (p -> Writer LiteApp ()) -> Writer LiteApp ()
withDynamic f = tell $ LiteApp $ \m ps0 ->
case ps0 of
p:ps | Just v <- fromPathPiece p -> unLiteApp (liteApp $ f v) m ps
_ -> Nothing
withDynamicMulti :: PathMultiPiece ps => (ps -> Writer LiteApp ()) -> Writer LiteApp ()
withDynamicMulti f = tell $ LiteApp $ \m ps ->
case fromPathMultiPiece ps of
Nothing -> Nothing
Just v -> unLiteApp (liteApp $ f v) m []
|
geraldus/yesod
|
yesod-core/src/Yesod/Core/Internal/LiteApp.hs
|
mit
| 2,644
| 0
| 14
| 608
| 923
| 483
| 440
| 68
| 2
|
module Ho.Library(
LibDesc(..),
collectLibraries,
libModMap,
libHash,
libMgHash,
libProvides,
libName,
libBaseName,
libHoLib,
preprocess,
listLibraries
) where
import Util.Std
import Data.Version
import System.Directory
import Text.Printf
import qualified Data.Map as Map
import qualified Data.Set as Set
import Ho.Binary
import Ho.ReadSource
import Ho.Type
import Name.Name(Module)
import Options
import PackedString(PackedString,packString,unpackPS)
import Util.Gen
import Util.YAML
import qualified Support.MD5 as MD5
libModMap = hoModuleMap . libHoLib
libHash = hohHash . libHoHeader
libMgHash mg lib = MD5.md5String $ show (libHash lib,mg)
libProvides mg lib = [ m | (m,mg') <- Map.toList (libModMap lib), mg == mg']
libName lib = let HoHeader { hohName = ~(Right (name,vers)) } = libHoHeader lib in unpackPS name ++ "-" ++ showVersion vers
libVersion lib = let HoHeader { hohName = ~(Right (_name,vers)) } = libHoHeader lib in vers
libBaseName lib = let HoHeader { hohName = ~(Right (name,_vers)) } = libHoHeader lib in name
libModules l = let lib = libHoLib l in ([ m | (m,_) <- Map.toList (hoModuleMap lib)],Map.toList (hoReexports lib))
libVersionCompare l1 l2 = compare (libVersion l1) (libVersion l2)
--------------------------------
-- finding and listing libraries
--------------------------------
instance ToNode Module where
toNode m = toNode $ show m
instance ToNode HoHash where
toNode m = toNode $ show m
instance ToNode PackedString where
toNode m = toNode $ unpackPS m
listLibraries :: IO ()
listLibraries = do
(_,byhashes) <- fetchAllLibraries
let libs = Map.toList byhashes
if not verbose then putStr $ showYAML (sort $ map (libName . snd) libs) else do
let f (h,l) = (show h,[
("Name",toNode (libName l)),
("BaseName",toNode (libBaseName l)),
("Version",toNode (showVersion $ libVersion l)),
("FilePath",toNode (libFileName l)),
("LibDeps",toNode [ h | (_,h) <- hohLibDeps (libHoHeader l)]),
("Exported-Modules",toNode $ mod ++ fsts rmod)
]) where
(mod,rmod) = libModules l
putStr $ showYAML (map f libs)
-- Collect all libraries and return those which are explicitly and implicitly imported.
--
-- The basic process is:
-- - Find all libraries and create two indexes, a map of named libraries to
-- the newest version of them, and a map of library hashes to the libraries
-- themselves.
--
-- - For all the libraries listed on the command line, find the newest
-- version of each of them, flag these as the explicitly imported libraries.
--
-- - recursively find the dependencies by the hash's listed in the library deps. if the names
-- match a library already loaded, ensure the hash matches up. flag these libraries as 'implicit' unless
-- already flaged 'explicit'
--
-- - perform sanity checks on final lists of implicit and explicit libraries.
--
-- Library Checks needed:
-- - We have found versions of all libraries listed on the command line
-- - We have all dependencies of all libraries and the hash matches the proper library name
-- - no libraries directly export the same modules, (but re-exporting the same module is fine)
-- - conflicting versions of any particular library are not required due to dependencies
fetchAllLibraries :: IO (Map.Map PackedString [Library],Map.Map HoHash Library)
fetchAllLibraries = ans where
ans = do
(bynames',byhashes') <- unzip `fmap` concatMapM f (optHlPath options)
let bynames = Map.map (reverse . sortBy libVersionCompare) $ Map.unionsWith (++) bynames'
byhashes = Map.unions byhashes'
return (bynames,byhashes)
f fp = do
fs <- flip iocatch (\_ -> return [] ) $ getDirectoryContents fp
forM fs $ \e -> case reverse e of
('l':'h':'.':r) -> flip iocatch (\_ -> return mempty) $ do
lib <- readHlFile (fp ++ "/" ++ e)
return (Map.singleton (libBaseName lib) [lib], Map.singleton (libHash lib) lib)
_ -> return mempty
splitOn' :: (a -> Bool) -> [a] -> [[a]]
splitOn' f xs = split xs
where split xs = case break f xs of
(chunk,[]) -> [chunk]
(chunk,_:rest) -> chunk : split rest
splitVersion :: String -> (String,Data.Version.Version)
splitVersion s = ans where
ans = case reverse (splitOn' ('-' ==) s) of
(vrs:bs@(_:_)) | Just vrs <- runReadP parseVersion vrs -> (intercalate "-" (reverse bs),vrs)
_ -> (s,Data.Version.Version [] [])
-- returns (explicitly imported libraries, implicitly imported libraries, full library map)
collectLibraries :: [String] -> IO ([Library],[Library],Map.Map PackedString [Library])
collectLibraries libs = ans where
ans = do
(bynames,byhashes) <- fetchAllLibraries
let f (pn,vrs) = lname pn vrs `mplus` lhash pn vrs where
lname pn vrs = do
xs <- Map.lookup (packString pn) bynames
(x:_) <- return $ filter isGood xs
return x
isGood lib = versionBranch vrs `isPrefixOf` versionBranch (libVersion lib)
lhash pn vrs = do
[] <- return $ versionBranch vrs
Map.lookup pn byhashes'
byhashes' = Map.fromList [ (show x,y) | (x,y) <- Map.toList byhashes]
let es' = [ (x,f $ splitVersion x) | x <- libs ]
es = [ l | (_,Just l) <- es' ]
bad = [ n | (n,Nothing) <- es' ]
unless (null bad) $ do
putErrLn "Libraries not found:"
forM_ bad $ \b -> putErrLn (" " ++ b)
exitFailure
checkForModuleConficts es
let f lmap _ [] = return lmap
f lmap lset ((ei,l):ls)
| libHash l `Set.member` lset = f lmap lset ls
| otherwise = case Map.lookup (libBaseName l) lmap of
Nothing -> f (Map.insert (libBaseName l) (ei,l) lmap) (Set.insert (libHash l) lset) (ls ++ newdeps)
Just (ei',l') | libHash l == libHash l' -> f (Map.insert (libBaseName l) (ei || ei',l) lmap) lset ls
Just (_,l') -> putErrDie $ printf "Conflicting versions of library '%s' are required. [%s]\n" (libName l) (show (libHash l,libHash l'))
where newdeps = [ (False,fromMaybe (error $ printf "Dependency '%s' with hash '%s' needed by '%s' was not found" (unpackPS p) (show h) (libName l)) (Map.lookup h byhashes)) | let HoHeader { hohLibDeps = ldeps } = libHoHeader l , (p,h) <- ldeps ]
finalmap <- f Map.empty Set.empty [ (True,l) | l <- es ]
checkForModuleConficts [ l | (_,l) <- Map.elems finalmap ]
when verbose $ forM_ (Map.toList finalmap) $ \ (n,(e,l)) ->
printf "-- Base: %s Exported: %s Hash: %s Name: %s\n" (unpackPS n) (show e) (show $ libHash l) (libName l)
return ([ l | (True,l) <- Map.elems finalmap ],[ l | (False,l) <- Map.elems finalmap ],bynames)
checkForModuleConficts ms = do
let mbad = Map.toList $ Map.filter (\c -> case c of [_] -> False; _ -> True) $ Map.fromListWith (++) [ (m,[l]) | l <- ms, m <- fst $ libModules l]
forM_ mbad $ \ (m,l) -> putErrLn $ printf "Module '%s' is exported by multiple libraries: %s" (show m) (show $ map libName l)
unless (null mbad) $ putErrDie "There were conflicting modules!"
|
hvr/jhc
|
src/Ho/Library.hs
|
mit
| 7,464
| 0
| 22
| 1,954
| 2,541
| 1,322
| 1,219
| -1
| -1
|
module Let1 where
import Control.Parallel.Strategies (rpar, runEval)
fib n | n <= 1 = 1
| otherwise = let n1 = fib (n-1)
n2 = fib (n-2)
(n1_2, n2_2)
= runEval
(do n1_2 <- rpar n1
n2_2 <- rpar n2
return (n1_2, n2_2)) in n1_2 + n2_2 + 1
|
RefactoringTools/HaRe
|
old/testing/evalAddEvalMon/Let1_TokOut.hs
|
bsd-3-clause
| 461
| 0
| 15
| 275
| 139
| 70
| 69
| 10
| 1
|
module Multi1 where
import Control.Parallel.Strategies
fib n
| n <= 1 = 1
| otherwise = n1_2 + n2_2 + 1
where
n1 = fib (n-1)
n2 = fib (n-2)
(n1_2, n2_2)
=
runEval
(do n1_2 <- rpar n1
n2_2 <- rpar n2
return (n1_2, n2_2))
n1_2 = fib 42
|
RefactoringTools/HaRe
|
old/testing/introThreshold/Multi1.hs
|
bsd-3-clause
| 364
| 0
| 12
| 180
| 135
| 68
| 67
| 14
| 1
|
{-# LANGUAGE CPP #-}
module Distribution.Simple.HaskellSuite where
import Control.Monad
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Data.Maybe
import Data.Version
import qualified Data.Map as M (empty)
import Distribution.Simple.Program
import Distribution.Simple.Compiler as Compiler
import Distribution.Simple.Utils
import Distribution.Simple.BuildPaths
import Distribution.Verbosity
import Distribution.Text
import Distribution.Package
import Distribution.InstalledPackageInfo hiding (includeDirs)
import Distribution.Simple.PackageIndex as PackageIndex
import Distribution.PackageDescription
import Distribution.Simple.LocalBuildInfo
import Distribution.System (Platform)
import Distribution.Compat.Exception
import Language.Haskell.Extension
import Distribution.Simple.Program.Builtin
(haskellSuiteProgram, haskellSuitePkgProgram)
configure
:: Verbosity -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration -> IO (Compiler, Maybe Platform, ProgramConfiguration)
configure verbosity mbHcPath hcPkgPath conf0 = do
-- We have no idea how a haskell-suite tool is named, so we require at
-- least some information from the user.
hcPath <-
let msg = "You have to provide name or path of a haskell-suite tool (-w PATH)"
in maybe (die msg) return mbHcPath
when (isJust hcPkgPath) $
warn verbosity "--with-hc-pkg option is ignored for haskell-suite"
(comp, confdCompiler, conf1) <- configureCompiler hcPath conf0
-- Update our pkg tool. It uses the same executable as the compiler, but
-- all command start with "pkg"
(confdPkg, _) <- requireProgram verbosity haskellSuitePkgProgram conf1
let conf2 =
updateProgram
confdPkg
{ programLocation = programLocation confdCompiler
, programDefaultArgs = ["pkg"]
}
conf1
return (comp, Nothing, conf2)
where
configureCompiler hcPath conf0' = do
let
haskellSuiteProgram' =
haskellSuiteProgram
{ programFindLocation = \v _p -> findProgramLocation v hcPath }
-- NB: cannot call requireProgram right away — it'd think that
-- the program is already configured and won't reconfigure it again.
-- Instead, call configureProgram directly first.
conf1 <- configureProgram verbosity haskellSuiteProgram' conf0'
(confdCompiler, conf2) <- requireProgram verbosity haskellSuiteProgram' conf1
extensions <- getExtensions verbosity confdCompiler
languages <- getLanguages verbosity confdCompiler
(compName, compVersion) <-
getCompilerVersion verbosity confdCompiler
let
comp = Compiler {
compilerId = CompilerId (HaskellSuite compName) compVersion,
compilerAbiTag = Compiler.NoAbiTag,
compilerCompat = [],
compilerLanguages = languages,
compilerExtensions = extensions,
compilerProperties = M.empty
}
return (comp, confdCompiler, conf2)
hstoolVersion :: Verbosity -> FilePath -> IO (Maybe Version)
hstoolVersion = findProgramVersion "--hspkg-version" id
numericVersion :: Verbosity -> FilePath -> IO (Maybe Version)
numericVersion = findProgramVersion "--compiler-version" (last . words)
getCompilerVersion :: Verbosity -> ConfiguredProgram -> IO (String, Version)
getCompilerVersion verbosity prog = do
output <- rawSystemStdout verbosity (programPath prog) ["--compiler-version"]
let
parts = words output
name = concat $ init parts -- there shouldn't be any spaces in the name anyway
versionStr = last parts
version <-
maybe (die "haskell-suite: couldn't determine compiler version") return $
simpleParse versionStr
return (name, version)
getExtensions :: Verbosity -> ConfiguredProgram -> IO [(Extension, Compiler.Flag)]
getExtensions verbosity prog = do
extStrs <-
lines <$>
rawSystemStdout verbosity (programPath prog) ["--supported-extensions"]
return
[ (ext, "-X" ++ display ext) | Just ext <- map simpleParse extStrs ]
getLanguages :: Verbosity -> ConfiguredProgram -> IO [(Language, Compiler.Flag)]
getLanguages verbosity prog = do
langStrs <-
lines <$>
rawSystemStdout verbosity (programPath prog) ["--supported-languages"]
return
[ (ext, "-G" ++ display ext) | Just ext <- map simpleParse langStrs ]
-- Other compilers do some kind of a packagedb stack check here. Not sure
-- if we need something like that as well.
getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramConfiguration
-> IO InstalledPackageIndex
getInstalledPackages verbosity packagedbs conf =
liftM (PackageIndex.fromList . concat) $ forM packagedbs $ \packagedb ->
do str <-
getDbProgramOutput verbosity haskellSuitePkgProgram conf
["dump", packageDbOpt packagedb]
`catchExit` \_ -> die $ "pkg dump failed"
case parsePackages str of
Right ok -> return ok
_ -> die "failed to parse output of 'pkg dump'"
where
parsePackages str =
let parsed = map parseInstalledPackageInfo (splitPkgs str)
in case [ msg | ParseFailed msg <- parsed ] of
[] -> Right [ pkg | ParseOk _ pkg <- parsed ]
msgs -> Left msgs
splitPkgs :: String -> [String]
splitPkgs = map unlines . splitWith ("---" ==) . lines
where
splitWith :: (a -> Bool) -> [a] -> [[a]]
splitWith p xs = ys : case zs of
[] -> []
_:ws -> splitWith p ws
where (ys,zs) = break p xs
buildLib
:: Verbosity -> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildLib verbosity pkg_descr lbi lib clbi = do
-- In future, there should be a mechanism for the compiler to request any
-- number of the above parameters (or their parts) — in particular,
-- pieces of PackageDescription.
--
-- For now, we only pass those that we know are used.
let odir = buildDir lbi
bi = libBuildInfo lib
srcDirs = hsSourceDirs bi ++ [odir]
dbStack = withPackageDB lbi
language = fromMaybe Haskell98 (defaultLanguage bi)
conf = withPrograms lbi
pkgid = packageId pkg_descr
runDbProgram verbosity haskellSuiteProgram conf $
[ "compile", "--build-dir", odir ] ++
concat [ ["-i", d] | d <- srcDirs ] ++
concat [ ["-I", d] | d <- [autogenModulesDir lbi, odir] ++ includeDirs bi ] ++
[ packageDbOpt pkgDb | pkgDb <- dbStack ] ++
[ "--package-name", display pkgid ] ++
concat [ ["--package-id", display ipkgid ]
| (ipkgid, _) <- componentPackageDeps clbi ] ++
["-G", display language] ++
concat [ ["-X", display ex] | ex <- usedExtensions bi ] ++
cppOptions (libBuildInfo lib) ++
[ display modu | modu <- libModules lib ]
installLib
:: Verbosity
-> LocalBuildInfo
-> FilePath -- ^install location
-> FilePath -- ^install location for dynamic libraries
-> FilePath -- ^Build location
-> PackageDescription
-> Library
-> IO ()
installLib verbosity lbi targetDir dynlibTargetDir builtDir pkg lib = do
let conf = withPrograms lbi
runDbProgram verbosity haskellSuitePkgProgram conf $
[ "install-library"
, "--build-dir", builtDir
, "--target-dir", targetDir
, "--dynlib-target-dir", dynlibTargetDir
, "--package-id", display $ packageId pkg
] ++ map display (libModules lib)
registerPackage
:: Verbosity
-> InstalledPackageInfo
-> PackageDescription
-> LocalBuildInfo
-> Bool
-> PackageDBStack
-> IO ()
registerPackage verbosity installedPkgInfo _pkg lbi _inplace packageDbs = do
(hspkg, _) <- requireProgram verbosity haskellSuitePkgProgram (withPrograms lbi)
runProgramInvocation verbosity $
(programInvocation hspkg
["update", packageDbOpt $ last packageDbs])
{ progInvokeInput = Just $ showInstalledPackageInfo installedPkgInfo }
initPackageDB :: Verbosity -> ProgramConfiguration -> FilePath -> IO ()
initPackageDB verbosity conf dbPath =
runDbProgram verbosity haskellSuitePkgProgram conf
["init", dbPath]
packageDbOpt :: PackageDB -> String
packageDbOpt GlobalPackageDB = "--global"
packageDbOpt UserPackageDB = "--user"
packageDbOpt (SpecificPackageDB db) = "--package-db=" ++ db
|
DavidAlphaFox/ghc
|
libraries/Cabal/Cabal/Distribution/Simple/HaskellSuite.hs
|
bsd-3-clause
| 8,345
| 0
| 21
| 1,869
| 2,008
| 1,044
| 964
| 176
| 4
|
{-# LANGUAGE GADTs, KindSignatures #-}
-- Test a couple of trivial things:
-- explicit layout
-- trailing semicolons
-- kind signatures
module ShouldCompile where
data Expr :: * -> * where {
EInt :: Int -> Expr Int ;
EBool :: Bool -> Expr Bool ;
EIf :: (Expr Bool) -> (Expr a) -> (Expr a) -> Expr a ;
-- Note trailing semicolon, should be ok
}
|
urbanslug/ghc
|
testsuite/tests/gadt/gadt8.hs
|
bsd-3-clause
| 437
| 0
| 10
| 161
| 88
| 52
| 36
| 6
| 0
|
{-# LANGUAGE TypeFamilies, MultiParamTypeClasses #-}
module ShouldFail where
-- must fail: defaults have no patterns
class C2 a b where
type S2 a :: *
type S2 Int = Char
|
siddhanathan/ghc
|
testsuite/tests/indexed-types/should_fail/SimpleFail4.hs
|
bsd-3-clause
| 176
| 0
| 6
| 36
| 33
| 20
| 13
| 5
| 0
|
module HsPredictor.ExportCSV where
-- standard
import Control.Monad (liftM)
import Data.Text (pack)
import System.IO (appendFile)
-- 3rd party
import Database.Esqueleto ((^.))
import qualified Database.Esqueleto as E
import Database.Persist.Sql
import Database.Persist.Sqlite (runSqlite)
-- own
import HsPredictor.LoadCSV (getFileContents, insertMatch)
import HsPredictor.Models
import HsPredictor.ParserCSV (readMatches)
import HsPredictor.Queries
import HsPredictor.Types
{-| Scale value to range [-1,1] -}
normalize :: Int -- ^ current value
-> Int -- ^ min value
-> Int -- ^ max value
-> Double
normalize val min max = x / y - 1
where
x = fromIntegral $ 2*(val - min) :: Double
y = case max - min of
0 -> 1 :: Double
v -> fromIntegral v :: Double
-- | Based on goals scored by each team return outcome of match
outcome :: Int -- ^ goals scored by home team
-> Int -- ^ goals scored by away team
-> Outcome
outcome x y
| x > y = HomeWin
| x < y = AwayWin
| otherwise = NoWinner
{-| Split list of matches by date.
Group matches with the same date in a separate lists.
-}
genListsByDate :: [Match] -- ^ list of matches (must be sorted)
-> [[Match]] -- ^ matches grouped by date
genListsByDate [] = []
genListsByDate (x:xs) = (x:takeWhile compareMatches xs):
genListsByDate (dropWhile compareMatches xs)
where
compareMatches y = case x `compare` y of
EQ -> True
otherwise -> False
{-| Return team stats, every stat value scaled to range [-1,1] -}
getScaledStats :: String -- ^ name of database file
-> String -- ^ name of a team
-> IO Scaled
getScaledStats dbname team = do
w <- getScaledWin dbname team
d <- getScaledDraw dbname team
l <- getScaledLoss dbname team
return $ Scaled w d l
{-| Return given stat scaled to [-1,1] -}
getScaledStat :: EntityField StatsTable Int -- ^ StatsTable column
-> String -- ^ database name
-> String -- ^ team name
-> IO Double
getScaledStat stat dbname team = do
w <- getStat dbname team stat
max_w <- getMaxStat dbname stat
min_w <- getMinStat dbname stat
if w < min_w
then return (-1)
else return $ normalize w min_w max_w
getScaledWin :: String -> String -> IO Double
getScaledWin = getScaledStat StatsTableWin
getScaledDraw :: String -> String -> IO Double
getScaledDraw = getScaledStat StatsTableDraw
getScaledLoss :: String -> String -> IO Double
getScaledLoss = getScaledStat StatsTableLoss
{-| Returns line ready to write to export file -}
prepareLine :: String -- ^ path to Database
-> String -- ^ home team name
-> String -- ^ away team name
-> Outcome -> IO String
prepareLine dbpath home away out = do
h <- getScaledStats dbpath home
a <- getScaledStats dbpath away
return $ show h ++ show a ++ "\n" ++ show out ++ "\n"
{-| Writes line to export file -}
writeExport :: String -- ^ path to export file
-> IO String -- ^ prepared line
-> IO ()
writeExport fpath x = do
line <- x
appendFile fpath line
{-| Insert matches to database. Write data to export file -}
processRound :: String -- ^ path to database
-> String -- ^ path to export file
-> [Match] -- ^ list of matches with the same date
-> IO ()
processRound dbPath fpath m = do
runSqlite (pack dbPath) $ runMigrationSilent migrateAll
let matches = filter (\x -> ghM x >= 0) m
let lines = map prepare matches
mapM_ (writeExport fpath) lines
insertRound m dbPath
where
prepare x = let home = homeM x
away = awayM x
out = outcome (ghM x) (gaM x)
in prepareLine dbPath home away out
{-| Insert matches to db -}
insertRound :: [Match] -> String -> IO ()
insertRound xs dbPath = runSqlite (pack dbPath) $ do
runMigrationSilent migrateAll
mapM_ insertMatch xs
{-| Add header to export file. Number of exported
matches, number of input neurons, number of output neurons -}
addHeader :: String -- ^ path to export file
-> IO ()
addHeader path = do
f <- lines `liftM` getFileContents path
let input = show . length . words . head $ f
let output = show . length . words $ f !! 1
let matches = show $ length f `div` 2
let header = matches ++ " " ++ input ++ " " ++ output ++ "\n"
let new = header ++ unlines f
writeFile path new
{-| Insert CSV file to database and write data to export file -}
export :: String -- ^ path to database
-> String -- ^ path to export file
-> String -- ^ path to csv file
-> IO ()
export dbPath expPath csvPath = do
ms <- getFileContents csvPath
let matches = readMatches $ lines ms
let rounds = genListsByDate matches
mapM_ (processRound dbPath expPath) rounds
addHeader expPath
return ()
|
Taketrung/HsPredictor
|
library/HsPredictor/ExportCSV.hs
|
mit
| 5,105
| 0
| 14
| 1,494
| 1,308
| 661
| 647
| 115
| 2
|
{-# LANGUAGE OverloadedStrings #-}
module Y2017.M10.D24.Exercise where
{--
So, yesterday, we loaded in a slice of the NYT archive then saved it off as
JSON (the articles, proper) and haskell lists (the subject-linking information).
So, what are we going to do today?
Load back in that slice, obviously.
--}
import qualified Codec.Compression.GZip as GZ
import Data.Aeson
import qualified Data.ByteString.Lazy.Char8 as BL
-- below import available via 1HaskellADay git repository
import Y2017.M10.D23.Exercise
-- first up: load in the subject linking information:
artIdsBySubj :: FilePath -> IO [Integer]
artIdsBySubj file = undefined
-- Now load in the articles, themselves, from the compressed archive
-- (my compressed archive is around the topic of hurricanes and floods)
instance FromJSON Article where
parseJSON art = undefined
articlesFromFile :: FilePath -> IO [Article]
articlesFromFile file = undefined
{--
Now that you have the articles by the topic you chose, partition the article
by subtopic, ... a bit of triage. For me, I choose "Hurricanes" so I'm
partitioning articles by "Maria" "Harvey" "Irma" and none of those. For the
topic you choose you 'may' (and for this exercise 'may' means 'shall') choose
to partition your topic into subtopics (one of which being a catch-all.
How do we do this?
I'm thinking applicative functors, ... or something like them ...
--}
type Subcategory a = [a]
seed :: Subcategory Int
seed = [0,0,0,0] -- Maria, Harvey, Irma, none
-- For each article that mentions Maria, increment the Maria-count.
-- For each article that mentions Harvey, ...
-- etc, etc, and you get me
hasSubcategory :: String -> Article -> (Int -> Int)
hasSubcategory hurr art = undefined
-- from that, we get an applicative functor of hurricanes that we can <*>
-- How many articles do you have? How many of each subtopic do you have?
-- How many catchalls do you have (that is, no categorization?)
categorizor :: [Article] -> Subcategory Int
categorizor arts = undefined
-- And, finally, write out a report in human readable form of your subcategories
reportSubcategories :: Show a => Subcategory a -> IO ()
reportSubcategories cats = undefined
|
geophf/1HaskellADay
|
exercises/HAD/Y2017/M10/D24/Exercise.hs
|
mit
| 2,191
| 0
| 8
| 376
| 236
| 141
| 95
| 21
| 1
|
module ReflexExtensions where
import Reflex
import Reflex.Dom
import Data.Monoid ((<>))
metaViewport :: MonadWidget t m => String -> m ()
metaViewport s = elAttr "meta" ("name" =: "viewport" <> "content" =: s) blank
metaCharSet :: MonadWidget t m => String -> m ()
metaCharSet s = elAttr "meta" ("charset" =: s) blank
metaUtf8 :: MonadWidget t m => m ()
metaUtf8 = metaCharSet "utf-8"
stylesheet :: MonadWidget t m => String -> m ()
stylesheet s = elAttr "link" ("rel" =: "stylesheet" <> "href" =: s) blank
styleInline :: MonadWidget t m => String -> m ()
styleInline s = el "style" $ text s
scriptSrc :: MonadWidget t m => String -> m ()
scriptSrc s = elAttr "script" ("src" =: s) blank
label :: MonadWidget t m => String -> String -> m ()
label l f = labelClass l f ""
labelClass :: MonadWidget t m => String -> String -> String -> m ()
labelClass l f c = elAttr "label" ("for" =: f <> "class" =: c) $ text l
-- | s - button text, c - button class
buttonClass :: MonadWidget t m => String -> String -> m (Event t ())
buttonClass s c = do
(e, _) <- elAttr' "button" ("class" =: c) $ text s
return $ domEvent Click e
formClass :: MonadWidget t m => String -> m ()
formClass c = elAttr "form" ("class" =: c) blank
|
muhbaasu/pfennig-client-reflex
|
src/ReflexExtensions.hs
|
mit
| 1,231
| 0
| 11
| 257
| 537
| 265
| 272
| 26
| 1
|
module Watcher.Action (
exec
, textAction
, cmdAction
, printChangedAction
, actionsList
, Action
) where
import System.Process
import Control.Exception
import Data.List
import Watcher.Arquivo
import Help.Command
import Utils.JSON
newtype Action a = Action (Tag, [a] -> IO())
type Tag = String
instance Show (Action a) where
show (Action (tag, _)) = tag
instance Eq (Action a) where
Action (t1, _) == Action (t2, _) = t1 == t2
exec :: Action a -> ([a] -> IO())
exec (Action (_, a)) = a
textAction :: [String] -> Action a
textAction str = Action ("textAction: " ++ show str, \_ -> print (unwords str))
cmdAction :: [String] -> Action a
cmdAction cmd = Action ("cmdAction: " ++ show cmd
, \_ -> catch (mapM_ callCommand cmd)
(\e -> putStrLn $ "\n-> Erro ao executar o comando \'"
++ concat cmd ++ "\':\n"
++ show (e :: IOException)
++ "\nPressione CTRL+C para interromper a execução..."))
printChangedAction :: [String] -> Action Arquivo
printChangedAction _ = Action ("printChangedAction", mapM_ print)
cmdWithParametersAction :: [String] -> Action Arquivo
cmdWithParametersAction cmd = Action ("cmdWithParametersAction: " ++ show cmd,
(\fs -> catch (mapM_ callCommand (formatCmd (jStringfyList fs) cmd))
(\e -> putStrLn $ "\n-> Erro ao executar o comando \'"
++ concat cmd ++ "\':\n"
++ show (e :: IOException)
++ "\nPressione CTRL+C para interromper a execução...")))
stackTestAction :: [String] -> Action a
stackTestAction _ = cmdAction ["stack test"]
formatCmd :: String -> [String] -> [String]
formatCmd p = foldl step []
where step acc x = (x ++ " " ++ p) : acc
actionsList :: [(Option, [String] -> Action Arquivo)]
actionsList = [(Extended ["--p", "--print"]
"Imprime o texto indicado quando mudanças forem identificadas. O argumento de entrada é o texto a ser impresso. Ex: hs-file-watcher --p \"Alterações!\""
, textAction)
,(Extended ["--pc", "--print-changed"]
"Exibe lista de arquivos que sofreram alterações. Não há argumentos de entrada.\nEx: hs-file-watcher --pc"
, printChangedAction)
,(Extended ["--cmd", "--command"]
"Executa um conjunto de comandos a cada modificação detectada. Os argumentos de entrada são os comandos à executar separados por espaços (Usar \" para comandos que contenham espaços).\nEx: hs-file-watcher --cmd \"stack build\" \"stack install\" "
, cmdAction)
,(Extended ["--cmd-p", "--command-with-params"]
"Executa um conjunto de comandos a cada modificação detectada. O comando receberá como parâmetro uma lista dos arquivos alterados no formato JSON. Os argumentos de entrada são os comandos à executar separados por espaços.\nEx: hs-file-watcher --cmd-p echo ==> executará ==> echo [{\"nome\": \"arquivo.hs\" ...}]"
, cmdWithParametersAction)
,(Extended ["--st", "--stack-test"]
"Executa o comando stack test. Não há argumentos de entrada. Ex: hs-file-watcher --st"
, stackTestAction)]
|
Miguel-Fontes/hs-file-watcher
|
src/Watcher/Action.hs
|
mit
| 3,655
| 0
| 16
| 1,219
| 735
| 400
| 335
| 60
| 1
|
module Main where
import Hastron.Server.Types
import Hastron.Game.Engine
main :: IO()
main = putStrLn "Hello World"
|
abhin4v/hastron
|
src/Main.hs
|
mit
| 118
| 0
| 6
| 17
| 34
| 20
| 14
| 5
| 1
|
{-# htermination (==) :: (Eq a, Eq k) => (a, k) -> (a, k) -> Bool #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_EQEQ_12.hs
|
mit
| 70
| 0
| 2
| 17
| 3
| 2
| 1
| 1
| 0
|
{-# htermination compare :: Float -> Float -> Ordering #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_compare_6.hs
|
mit
| 59
| 0
| 2
| 10
| 3
| 2
| 1
| 1
| 0
|
module PayPal.Vault
( createCreditCard
, getCreditCard
, deleteCreditCard
) where
createCreditCard = undefined
getCreditCard = undefined
deleteCreditCard = undefined
|
AndrewRademacher/hs-paypal-rest
|
src/PayPal/Vault.hs
|
mit
| 185
| 0
| 4
| 38
| 32
| 20
| 12
| 7
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
module Phb.Db.TimeLog where
import BasePrelude hiding (on)
import Prelude ()
import qualified Control.Lens as L
import Control.Monad.Trans (MonadIO)
import qualified Data.Function as F
import qualified Data.Map as M
import Data.Time (Day)
import Database.Esqueleto
import Phb.Dates
import Phb.Db.Esqueleto
import Phb.Db.Internal
import Phb.Db.Task
import Phb.Types.Task
import Phb.Types.TimeLog
import Phb.Types.TimeSummary
queryTimeLogs
:: (MonadIO m, Applicative m)
=> Maybe Period
-> [Key Person]
-> Maybe (Int64,Int64)
-> Db m [TimeLogWhole]
queryTimeLogs pp ups pgs = do
twes <- select $ from $ \ tl -> do
where_ (mkFilter tl)
traverse_ (\ (l,o) -> limit l >> offset o) pgs
orderBy [ desc $ tl ^. TimeLogDay ]
return tl
traverse loadTimeLogWhole twes
where
mkFilter tl = foldl (&&.) (val True) . catMaybes $
[ fmap (mkPeriodFilter tl) pp
, mfilter (const (not . null $ ups)) . Just $ tl ^. TimeLogPerson `in_` valList ups
]
mkPeriodFilter tl (ForMonth m) =
withinPeriod tl TimeLogDay (startOfMonth m) (endOfMonth m)
mkPeriodFilter tl (ForWeek w) =
withinPeriod tl TimeLogDay (startOfWeek w) (endOfWeek w)
mkPeriodFilter tl (ForDay d) = (tl ^. TimeLogDay ==. val d)
loadTimeLogWhole
:: (MonadIO m, Applicative m)
=> Entity TimeLog
-> Db m TimeLogWhole
loadTimeLogWhole twe = do
t <- getEntityJust $ twe L.^.eVal.timeLogTask
tw <- loadTaskWhole t
pure (TimeLogWhole twe tw)
loadTimeLogsForPeriod
:: (MonadIO m, Applicative m)
=> Day
-> Day
-> Db m [TimeLogWhole]
loadTimeLogsForPeriod s f = logs >>= traverse loadTimeLogWhole
where
logs =
select $ from $ \ (tl) -> do
where_ (tl ^. TimeLogDay >=. val s &&. tl ^. TimeLogDay <=. val f)
return tl
summaryForTaskForDay
:: (MonadIO m, Applicative m)
=> TaskWhole
-> Day
-> Db m TimeSummary
summaryForTaskForDay t cd = summary . fmap timeLogHours <$> logs
where
logs =
select $ from $ \ (tl) -> do
where_ (tl ^. TimeLogDay ==. val cd
&&. tl ^. TimeLogTask ==. val (t L.^.taskWholeTask.eKey))
return tl
summary hs = TimeSummary
(t L.^.taskWholeLink.L._Just.taskLinkName)
(sum hs)
[t L.^.taskWholePerson]
summariseTimeLogs :: [TimeLogWhole] -> [TimeSummary]
summariseTimeLogs =
fmap (\ (k,(hs,ps)) -> TimeSummary k (getSum hs) ps)
. reverse
. sortBy (compare `F.on` (fst . snd))
. M.toList
. foldl' accumTimeLogSummaryMap M.empty
where
accumTimeLogSummaryMap m tls =
M.insertWith (<>) (summaryLabel tls) (summaryVal tls) m
summaryVal tl =
( Sum (timeLogHours $ tl L.^.timeLogWholeLog )
, [tl L.^.timeLogWholeTask.taskWholePerson]
)
summaryLabel = (L.^.timeLogWholeTask.taskWholeLink.L._Just.taskLinkName)
timeLogHours :: Entity TimeLog -> Double
timeLogHours =
(L.^.eVal.timeLogMinutes.L.to(fromIntegral >>> (/60.0)))
|
benkolera/phb
|
hs/Phb/Db/TimeLog.hs
|
mit
| 3,253
| 3
| 18
| 869
| 1,096
| 576
| 520
| -1
| -1
|
module Optimizer.Constants(optimize) where
import Datatypes
import Data.Maybe
import Data.Bits
import Control.Monad.Writer
import Control.Monad.State
import Optimizer.Dataflow(fixedPoint)
import qualified Data.Map as Map
type Constants = Map.Map Int Int
optimize :: [IR] -> Writer [String] [IR]
optimize ir = do
let optimizedIR =
fixedPoint
(\ir -> evalState (mapM propagateConstant ir) (Map.fromList [(0, 0)]))
ir
return $ map cleanZeros optimizedIR
cleanZeros :: IR -> IR
cleanZeros ir@(ThreeIR op rd (I i) rt m)
| i == 0 = ThreeIR op rd (R 0) rt m
cleanZeros ir@(ThreeIR op rd rs (I i) m)
| i == 0 = ThreeIR op rd rs (R 0) m
cleanZeros other = other
propagateConstant :: IR -> State Constants IR
propagateConstant ir@(TwoIR (R r1) (I i1) m) = do
constants <- get
put (if m
then Map.delete r1 constants
else Map.insert r1 i1 constants)
return ir
propagateConstant (ThreeIR op (R r1) (I i1) (I i2) m) = do
let newImmediate = operatorFor op i1 i2
newIR = TwoIR (R r1) (I newImmediate) m
constants <- get
put (if m
then Map.delete r1 constants
else Map.insert r1 newImmediate constants)
return newIR
propagateConstant original@(ThreeIR op (R r1) r2 r3 mask) = do
r2' <- getFor r2
r3' <- getFor r3
let patched = ThreeIR op (R r1) r2' r3' mask
constants <- get
case (r2', r3') of
(I i1, I i2) -> propagateConstant patched
(I i1, R _)
| op == Plus -> do
put $ Map.delete r1 constants
return patched
(R _, I i1)
| op `elem` [Plus, ShiftLeft, ShiftRight, ShiftRightArithmetic] -> do
put $ Map.delete r1 constants
return patched
_ -> do
put $ Map.delete r1 constants
return original
propagateConstant other = do return other
getFor :: IRItem -> State Constants IRItem
getFor (R reg) = do
constants <- get
return (case Map.lookup reg constants of
Just constant -> I constant
_ -> R reg)
getFor other = do return other
operatorFor :: BinaryOp -> Int -> Int -> Int
operatorFor op =
case op of
BitwiseAnd -> (.&.)
BitwiseOr -> (.|.)
BitwiseXor -> xor
Plus -> (+)
Minus -> (-)
Multiply -> (*)
ShiftLeft -> shiftL
ShiftRight -> rotateR
ShiftRightArithmetic -> shiftR
EqualTo -> \a b -> fromEnum (a == b)
LessThan -> \a b -> fromEnum (a < b)
GreaterThan -> \a b -> fromEnum (a >= b)
|
aleksanb/hdc
|
src/Optimizer/Constants.hs
|
mit
| 2,441
| 0
| 17
| 654
| 1,034
| 515
| 519
| 78
| 12
|
import Test.Hspec
import Test.QuickCheck
import Test.Server.Environment
describes :: [Spec]
describes = serverEnvironmentSpecs
main :: IO ()
main = mapM_ hspec describes
|
yulii/mdslide
|
test/spec.hs
|
mit
| 174
| 0
| 6
| 25
| 50
| 28
| 22
| 7
| 1
|
{-# LANGUAGE ConstraintKinds #-}
module Station.Types.Implementation where
import Import
import Station.Types.Card
import Station.Types.Deck
import Station.Types.Version
import Station.Types.VersionContext
-- | 'n' is what you code your 'Implementation' to. This is how station
-- knows how to store bytes, get the current time, get random numbers, etc.
-- It has no frilly requirements other than being a monad. For example,
-- the 'plainFilesystem' implementation uses 'IO'.
--
-- 'm' is what the functions in the station API run in. It just exists to
-- make them more readable. For instance, without it 'Station.new' would have
-- to take two more arguments: @StationDetails n@ and @Deck@, and well as
-- returning a more complicated error type along with a modified @Deck@
-- (in addition to what it already returns).
type Station m n = ( MonadBase n m
, Monad n
, MonadReader (StationDetails n) m
, MonadState Deck m
, MonadThrow m
)
data StationDetails n = StationDetails
{ _stationImplementation :: Implementation n
, _stationAuthors :: [AuthorLink]
-- ^ For decks with a sigle owner this will have one item.
-- It defines who will be credited with each new version
-- added to the deck.
}
data Implementation n = Implementation
{ _imWriteBytes :: Hash -> ByteString -> n ()
, _imWriteVersion :: VersionHash -> ByteString -> n ()
, _imBuildBytes :: n (HashMap Hash ByteString)
, _imBuildVersions :: n (HashMap VersionLocation (Set VersionHash))
, _imNewId :: n Id
, _imGetTAI :: n TAI
}
|
seagreen/station
|
src/Station/Types/Implementation.hs
|
mit
| 1,733
| 0
| 13
| 482
| 232
| 138
| 94
| 22
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-georestriction.html
module Stratosphere.ResourceProperties.CloudFrontDistributionGeoRestriction where
import Stratosphere.ResourceImports
-- | Full data type definition for CloudFrontDistributionGeoRestriction. See
-- 'cloudFrontDistributionGeoRestriction' for a more convenient constructor.
data CloudFrontDistributionGeoRestriction =
CloudFrontDistributionGeoRestriction
{ _cloudFrontDistributionGeoRestrictionLocations :: Maybe (ValList Text)
, _cloudFrontDistributionGeoRestrictionRestrictionType :: Val Text
} deriving (Show, Eq)
instance ToJSON CloudFrontDistributionGeoRestriction where
toJSON CloudFrontDistributionGeoRestriction{..} =
object $
catMaybes
[ fmap (("Locations",) . toJSON) _cloudFrontDistributionGeoRestrictionLocations
, (Just . ("RestrictionType",) . toJSON) _cloudFrontDistributionGeoRestrictionRestrictionType
]
-- | Constructor for 'CloudFrontDistributionGeoRestriction' containing
-- required fields as arguments.
cloudFrontDistributionGeoRestriction
:: Val Text -- ^ 'cfdgrRestrictionType'
-> CloudFrontDistributionGeoRestriction
cloudFrontDistributionGeoRestriction restrictionTypearg =
CloudFrontDistributionGeoRestriction
{ _cloudFrontDistributionGeoRestrictionLocations = Nothing
, _cloudFrontDistributionGeoRestrictionRestrictionType = restrictionTypearg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-georestriction.html#cfn-cloudfront-distribution-georestriction-locations
cfdgrLocations :: Lens' CloudFrontDistributionGeoRestriction (Maybe (ValList Text))
cfdgrLocations = lens _cloudFrontDistributionGeoRestrictionLocations (\s a -> s { _cloudFrontDistributionGeoRestrictionLocations = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-georestriction.html#cfn-cloudfront-distribution-georestriction-restrictiontype
cfdgrRestrictionType :: Lens' CloudFrontDistributionGeoRestriction (Val Text)
cfdgrRestrictionType = lens _cloudFrontDistributionGeoRestrictionRestrictionType (\s a -> s { _cloudFrontDistributionGeoRestrictionRestrictionType = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/CloudFrontDistributionGeoRestriction.hs
|
mit
| 2,392
| 0
| 13
| 212
| 265
| 151
| 114
| 28
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dax-subnetgroup.html
module Stratosphere.Resources.DAXSubnetGroup where
import Stratosphere.ResourceImports
-- | Full data type definition for DAXSubnetGroup. See 'daxSubnetGroup' for a
-- more convenient constructor.
data DAXSubnetGroup =
DAXSubnetGroup
{ _dAXSubnetGroupDescription :: Maybe (Val Text)
, _dAXSubnetGroupSubnetGroupName :: Maybe (Val Text)
, _dAXSubnetGroupSubnetIds :: ValList Text
} deriving (Show, Eq)
instance ToResourceProperties DAXSubnetGroup where
toResourceProperties DAXSubnetGroup{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::DAX::SubnetGroup"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("Description",) . toJSON) _dAXSubnetGroupDescription
, fmap (("SubnetGroupName",) . toJSON) _dAXSubnetGroupSubnetGroupName
, (Just . ("SubnetIds",) . toJSON) _dAXSubnetGroupSubnetIds
]
}
-- | Constructor for 'DAXSubnetGroup' containing required fields as arguments.
daxSubnetGroup
:: ValList Text -- ^ 'daxsgSubnetIds'
-> DAXSubnetGroup
daxSubnetGroup subnetIdsarg =
DAXSubnetGroup
{ _dAXSubnetGroupDescription = Nothing
, _dAXSubnetGroupSubnetGroupName = Nothing
, _dAXSubnetGroupSubnetIds = subnetIdsarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dax-subnetgroup.html#cfn-dax-subnetgroup-description
daxsgDescription :: Lens' DAXSubnetGroup (Maybe (Val Text))
daxsgDescription = lens _dAXSubnetGroupDescription (\s a -> s { _dAXSubnetGroupDescription = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dax-subnetgroup.html#cfn-dax-subnetgroup-subnetgroupname
daxsgSubnetGroupName :: Lens' DAXSubnetGroup (Maybe (Val Text))
daxsgSubnetGroupName = lens _dAXSubnetGroupSubnetGroupName (\s a -> s { _dAXSubnetGroupSubnetGroupName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dax-subnetgroup.html#cfn-dax-subnetgroup-subnetids
daxsgSubnetIds :: Lens' DAXSubnetGroup (ValList Text)
daxsgSubnetIds = lens _dAXSubnetGroupSubnetIds (\s a -> s { _dAXSubnetGroupSubnetIds = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/Resources/DAXSubnetGroup.hs
|
mit
| 2,341
| 0
| 15
| 300
| 369
| 210
| 159
| 35
| 1
|
{- |
Module : $Header$
Description : abstract syntax for Relational Schemes
Copyright : Dominik Luecke, Uni Bremen 2008
License : GPLv2 or higher, see LICENSE.txt or LIZENZ.txt
Maintainer : luecke@informatik.uni-bremen.de
Stability : provisional
Portability : portable
Abstract syntax for Relational Schemes
-}
module RelationalScheme.AS
(
RSRelType(..)
, RSQualId(..)
, RSRel(..)
, RSRelationships(..)
, RSScheme(..)
, Sentence
, map_rel
, getRels
, getSignature
)
where
import Common.Id
import Common.AS_Annotation
import Common.Doc
import Common.DocUtils
import RelationalScheme.Keywords
import RelationalScheme.Sign
import qualified Data.Map as Map
import Common.Result
-- DrIFT command
{-! global: GetRange !-}
data RSRelType = RSone_to_one | RSone_to_many | RSmany_to_one | RSmany_to_many
deriving (Eq, Ord)
-- first Id is TableId, second is columnId
data RSQualId = RSQualId
{
table :: Id
, column :: Id
, q_pos :: Range
}
deriving (Eq, Ord, Show)
data RSRel = RSRel
{
r_lhs :: [RSQualId]
, r_rhs :: [RSQualId]
, r_type :: RSRelType
, r_pos :: Range
}
deriving (Eq, Ord, Show)
data RSRelationships = RSRelationships [Annoted RSRel] Range
deriving (Eq, Ord, Show)
data RSScheme = RSScheme RSTables RSRelationships Range
deriving (Eq, Ord, Show)
type Sentence = RSRel
-- Pretty printing stuff
instance Pretty RSScheme where
pretty (RSScheme t r _) = pretty t $++$ pretty r
instance Pretty RSRelationships where
pretty (RSRelationships rs _) = if null rs then empty else
keyword rsRelationships $+$ vcat (map pretty rs)
instance Pretty RSRel where
pretty (RSRel i1 i2 tp _) =
let tbl is = case is of
[] -> empty
t : _ -> pretty (table t)
<> brackets (ppWithCommas is)
in fsep [tbl i1, funArrow, tbl i2, keyword (show tp)]
instance Pretty RSQualId where
pretty = pretty . column
instance Show RSRelType where
show r = case r of
RSone_to_one -> rs1to1
RSone_to_many -> rs1tom
RSmany_to_one -> rsmto1
RSmany_to_many -> rsmtom
map_qualId :: RSMorphism -> RSQualId -> Result RSQualId
map_qualId mor qid =
let
(tid, rid, rn) = case qid of
RSQualId i1 i2 rn1 -> (i1, i2,rn1)
in maybe (fail "map_qualId") return $ do
mtid <- Map.lookup tid $ table_map mor
rmor <- Map.lookup tid $ column_map mor
mrid <- Map.lookup rid $ col_map rmor
return $ RSQualId mtid mrid rn
map_rel :: RSMorphism -> RSRel -> Result RSRel
map_rel mor rel =
let
(q1, q2, rt, rn) = case rel of
RSRel qe1 qe2 rte rne -> (qe1, qe2, rte, rne)
in
do
mq1 <- mapM (map_qualId mor) q1
mq2 <- mapM (map_qualId mor) q2
return $ RSRel mq1 mq2 rt rn
{-
map_arel :: RSMorphism -> (Annoted RSRel) -> Result (Annoted RSRel)
map_arel mor arel =
let
rel = item arel
(q1, q2, rt, rn) = case rel of
RSRel qe1 qe2 rte rne -> (qe1, qe2, rte, rne)
in
do
mq1 <- mapM (map_qualId mor) q1
mq2 <- mapM (map_qualId mor) q2
return $ arel
{
item = RSRel mq1 mq2 rt rn
}
map_relships :: RSMorphism -> RSRelationships -> Result RSRelationships
map_relships mor rsh =
let
(arel, rn) = case rsh of
RSRelationships arel1 rn1 -> (arel1, rn1)
in
do
orel <- mapM (map_arel mor) arel
return $ RSRelationships orel rn
-}
-- ^ oo-style getter function for Relations
getRels :: RSScheme -> [Annoted RSRel]
getRels spec = case spec of
RSScheme _ (RSRelationships rels _) _ -> rels
-- ^ oo-style getter function for signatures
getSignature :: RSScheme -> RSTables
getSignature spec = case spec of
RSScheme tb _ _ -> tb
-- Generated by DrIFT, look but don't touch!
instance GetRange RSRelType where
getRange = const nullRange
rangeSpan x = case x of
RSone_to_one -> []
RSone_to_many -> []
RSmany_to_one -> []
RSmany_to_many -> []
instance GetRange RSQualId where
getRange x = case x of
RSQualId _ _ p -> p
rangeSpan x = case x of
RSQualId a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
instance GetRange RSRel where
getRange x = case x of
RSRel _ _ _ p -> p
rangeSpan x = case x of
RSRel a b c d -> joinRanges [rangeSpan a, rangeSpan b, rangeSpan c,
rangeSpan d]
instance GetRange RSRelationships where
getRange x = case x of
RSRelationships _ p -> p
rangeSpan x = case x of
RSRelationships a b -> joinRanges [rangeSpan a, rangeSpan b]
instance GetRange RSScheme where
getRange x = case x of
RSScheme _ _ p -> p
rangeSpan x = case x of
RSScheme a b c -> joinRanges [rangeSpan a, rangeSpan b,
rangeSpan c]
|
nevrenato/Hets_Fork
|
RelationalScheme/AS.hs
|
gpl-2.0
| 5,285
| 0
| 16
| 1,778
| 1,312
| 678
| 634
| 112
| 1
|
module CSP.Fail.Quiz where
import CSP.Roll
import CSP.Step
import qualified CSP.STS.Roll
import CSP.Fail.Compute
import CSP.STS.Type
import CSP.STS.Dot
import CSP.STS.Semantics.Trace
import Autolib.NFA hiding ( symdiff, cross, alphabet )
import qualified Autolib.NFA
import Autolib.NFA.Ops ( cross )
import Autolib.NFA.Det
import Autolib.NFA.Shortest
import qualified Data.Set as S
import Data.List ( maximumBy )
import Data.Ord ( comparing )
import Control.Monad ( forM )
import Data.Either
roll sigma s vis hid mut tries = do
its <- forM [ 1 .. tries ] $ \ k -> do
out @ ( a, b, (st, sf) ) <-
single sigma s vis hid mut
let quality = if null sf then -1
else minimum $ map length sf
return ( (null st, quality), out )
return $ snd $ maximumBy ( comparing fst ) its
single sigma s vis hid mut = do
-- a <- fmap sts $ roll_guarded_rightlinear sigma s
-- b <- fmap sts $ roll_guarded_rightlinear sigma s
a <- CSP.STS.Roll.roll_reachable [ 1 .. s ] sigma vis hid
b <- CSP.STS.Roll.mutate mut a
let dt = symdiff ( partial_traces a )
( partial_traces b )
st = some_shortest dt
let df = symdiff ( failures a ) ( failures b )
sf = some_shortest df
case sf of
s : _ -> let ([r], w) = partitionEithers s in
case ( failure_trace a (w,r), failure_trace b (w,r)) of
c @ ( Left msg1, Left msg2 ) -> error $ show c
c @ ( Right msg1, Right msg2 ) -> error $ show c
_ -> return ()
_ -> return ()
return ( a, b, (st, sf) )
symdiff a b =
let co = S.union ( Autolib.NFA.alphabet a ) ( Autolib.NFA.alphabet b )
da = det0 $ normalize $ a { Autolib.NFA.alphabet = co }
db = det0 $ normalize $ b { Autolib.NFA.alphabet = co }
dd = ( cross da db )
{ finals =
S.filter ( \ (p,q) -> S.member p ( finals da )
/= S.member q ( finals db )
) $ Autolib.NFA.states dd }
in dd
|
marcellussiegburg/autotool
|
collection/src/CSP/Fail/Quiz.hs
|
gpl-2.0
| 2,158
| 3
| 18
| 755
| 766
| 413
| 353
| 52
| 4
|
module Scryptic.Language.LayoutScrypt where
import Scryptic.Language.LexScrypt
import Data.Maybe (isNothing, fromJust)
-- Generated by the BNF Converter
-- local parameters
topLayout = True
layoutWords = []
layoutStopWords = []
-- layout separators
layoutOpen = "{"
layoutClose = "}"
layoutSep = ";"
-- | Replace layout syntax with explicit layout tokens.
resolveLayout :: Bool -- ^ Whether to use top-level layout.
-> [Token] -> [Token]
resolveLayout tp = res Nothing [if tl then Implicit 1 else Explicit]
where
-- Do top-level layout if the function parameter and the grammar say so.
tl = tp && topLayout
res :: Maybe Token -- ^ The previous token, if any.
-> [Block] -- ^ A stack of layout blocks.
-> [Token] -> [Token]
-- The stack should never be empty.
res _ [] ts = error $ "Layout error: stack empty. Tokens: " ++ show ts
res _ st (t0:ts)
-- We found an open brace in the input,
-- put an explicit layout block on the stack.
-- This is done even if there was no layout word,
-- to keep opening and closing braces.
| isLayoutOpen t0 = moveAlong (Explicit:st) [t0] ts
res _ st (t0:ts)
-- Start a new layout block if the first token is a layout word
| isLayout t0 =
case ts of
-- Explicit layout, just move on. The case above
-- will push an explicit layout block.
t1:_ | isLayoutOpen t1 -> moveAlong st [t0] ts
-- at end of file, the start column doesn't matter
_ -> let col = if null ts then column t0 else column (head ts)
-- insert an open brace after the layout word
b:ts' = addToken (nextPos t0) layoutOpen ts
-- save the start column
st' = Implicit col:st
in moveAlong st' [t0,b] ts'
-- If we encounter a closing brace, exit the first explicit layout block.
| isLayoutClose t0 =
let st' = drop 1 (dropWhile isImplicit st)
in if null st'
then error $ "Layout error: Found " ++ layoutClose ++ " at ("
++ show (line t0) ++ "," ++ show (column t0)
++ ") without an explicit layout block."
else moveAlong st' [t0] ts
-- We are in an implicit layout block
res pt st@(Implicit n:ns) (t0:ts)
-- End of implicit block by a layout stop word
| isStop t0 =
-- Exit the current block and all implicit blocks
-- more indented than the current token
let (ebs,ns') = span (`moreIndent` column t0) ns
moreIndent (Implicit x) y = x > y
moreIndent Explicit _ = False
-- the number of blocks exited
b = 1 + length ebs
bs = replicate b layoutClose
-- Insert closing braces after the previous token.
(ts1,ts2) = splitAt (1+b) $ addTokens (afterPrev pt) bs (t0:ts)
in moveAlong ns' ts1 ts2
-- End of an implicit layout block
| newLine && column t0 < n =
-- Insert a closing brace after the previous token.
let b:t0':ts' = addToken (afterPrev pt) layoutClose (t0:ts)
-- Repeat, with the current block removed from the stack
in moveAlong ns [b] (t0':ts')
-- Encounted a new line in an implicit layout block.
| newLine && column t0 == n =
-- Insert a semicolon after the previous token.
-- unless we are the beginning of the file,
-- or the previous token is a semicolon or open brace.
if isNothing pt || isTokenIn [layoutSep,layoutOpen] (fromJust pt)
then moveAlong st [t0] ts
else let b:t0':ts' = addToken (afterPrev pt) layoutSep (t0:ts)
in moveAlong st [b,t0'] ts'
where newLine = case pt of
Nothing -> True
Just t -> line t /= line t0
-- Nothing to see here, move along.
res _ st (t:ts) = moveAlong st [t] ts
-- At EOF: skip explicit blocks.
res (Just t) (Explicit:bs) [] | null bs = []
| otherwise = res (Just t) bs []
-- If we are using top-level layout, insert a semicolon after
-- the last token, if there isn't one already
res (Just t) [Implicit n] []
| isTokenIn [layoutSep] t = []
| otherwise = addToken (nextPos t) layoutSep []
-- At EOF in an implicit, non-top-level block: close the block
res (Just t) (Implicit n:bs) [] =
let c = addToken (nextPos t) layoutClose []
in moveAlong bs c []
-- This should only happen if the input is empty.
res Nothing st [] = []
-- | Move on to the next token.
moveAlong :: [Block] -- ^ The layout stack.
-> [Token] -- ^ Any tokens just processed.
-> [Token] -- ^ the rest of the tokens.
-> [Token]
moveAlong st [] ts = error $ "Layout error: moveAlong got [] as old tokens"
moveAlong st ot ts = ot ++ res (Just $ last ot) st ts
data Block = Implicit Int -- ^ An implicit layout block with its start column.
| Explicit
deriving Show
type Position = Posn
-- | Check if s block is implicit.
isImplicit :: Block -> Bool
isImplicit (Implicit _) = True
isImplicit _ = False
-- | Insert a number of tokens at the begninning of a list of tokens.
addTokens :: Position -- ^ Position of the first new token.
-> [String] -- ^ Token symbols.
-> [Token] -- ^ The rest of the tokens. These will have their
-- positions updated to make room for the new tokens .
-> [Token]
addTokens p ss ts = foldr (addToken p) ts ss
-- | Insert a new symbol token at the begninning of a list of tokens.
addToken :: Position -- ^ Position of the new token.
-> String -- ^ Symbol in the new token.
-> [Token] -- ^ The rest of the tokens. These will have their
-- positions updated to make room for the new token.
-> [Token]
addToken p s ts = sToken p s : map (incrGlobal p (length s)) ts
-- | Get the position immediately to the right of the given token.
-- If no token is given, gets the first position in the file.
afterPrev :: Maybe Token -> Position
afterPrev = maybe (Pn 0 1 1) nextPos
-- | Get the position immediately to the right of the given token.
nextPos :: Token -> Position
nextPos t = Pn (g + s) l (c + s + 1)
where Pn g l c = position t
s = tokenLength t
-- | Add to the global and column positions of a token.
-- The column position is only changed if the token is on
-- the same line as the given position.
incrGlobal :: Position -- ^ If the token is on the same line
-- as this position, update the column position.
-> Int -- ^ Number of characters to add to the position.
-> Token -> Token
incrGlobal (Pn _ l0 _) i (PT (Pn g l c) t) =
if l /= l0 then PT (Pn (g + i) l c) t
else PT (Pn (g + i) l (c + i)) t
incrGlobal _ _ p = error $ "cannot add token at " ++ show p
-- | Create a symbol token.
sToken :: Position -> String -> Token
sToken p s = PT p (TS s i)
where
i = case s of
"&&" -> 1
"(" -> 2
")" -> 3
"." -> 4
"/=" -> 5
";" -> 6
"<" -> 7
"<=" -> 8
"==" -> 9
">" -> 10
">=" -> 11
"opt" -> 12
"sleep" -> 13
"sync" -> 14
"title" -> 15
"unwatch" -> 16
"wait" -> 17
"watch" -> 18
"write" -> 19
"{" -> 20
"||" -> 21
"}" -> 22
_ -> error $ "not a reserved word: " ++ show s
-- | Get the position of a token.
position :: Token -> Position
position t = case t of
PT p _ -> p
Err p -> p
-- | Get the line number of a token.
line :: Token -> Int
line t = case position t of Pn _ l _ -> l
-- | Get the column number of a token.
column :: Token -> Int
column t = case position t of Pn _ _ c -> c
-- | Check if a token is one of the given symbols.
isTokenIn :: [String] -> Token -> Bool
isTokenIn ts t = case t of
PT _ (TS r _) | elem r ts -> True
_ -> False
-- | Check if a word is a layout start token.
isLayout :: Token -> Bool
isLayout = isTokenIn layoutWords
-- | Check if a token is a layout stop token.
isStop :: Token -> Bool
isStop = isTokenIn layoutStopWords
-- | Check if a token is the layout open token.
isLayoutOpen :: Token -> Bool
isLayoutOpen = isTokenIn [layoutOpen]
-- | Check if a token is the layout close token.
isLayoutClose :: Token -> Bool
isLayoutClose = isTokenIn [layoutClose]
-- | Get the number of characters in the token.
tokenLength :: Token -> Int
tokenLength t = length $ prToken t
|
JohnLato/scryptic
|
src/Scryptic/Language/LayoutScrypt.hs
|
gpl-3.0
| 8,679
| 0
| 18
| 2,760
| 2,150
| 1,118
| 1,032
| 148
| 23
|
{-# LANGUAGE CPP #-}
-- | Command line options interface for user
module UserOptions
( UserOptions (..)
, UserCommand (..)
, DumpCommand (..)
, getUserOptions
) where
import Control.Applicative (optional)
import Data.Int (Int64)
import Data.Monoid ((<>))
import Data.Text (Text)
import Options.Applicative (Parser, argument, auto, command,
execParser, fullDesc, help, helper,
info, long, many, metavar, option,
progDesc, short, showDefault, some,
subparser, switch, value)
import System.FilePath ((</>))
import Serokell.Util.OptParse (strOption)
import RSCoin.Core (MintetteId, PeriodId, Severity (Info),
configDirectory, defaultAccountsNumber,
defaultConfigurationPath,
defaultSecretKeyPath)
-- | Command that describes single action from command-line interface
-- POV
data UserCommand
-- | List all addresses in wallet, starting with 1
= ListAddresses
-- | Query bank to update wallet state according to blockchain
-- status
| UpdateBlockchain
-- | First argument represents inputs -- pairs (a,b,c), where a is
-- index (starting from 1) of address in wallet, b is positive
-- integer representing value to send. c is color. Second
-- argument represents the address to send, and amount.
| FormTransaction [(Word, Int64, Int)] Text [(Int64, Int)]
-- | Initialize multisignature address allocation.
-- 1. Number m of required signatures from addr;
-- 2. List of user parties in addresses;
-- 3. List of trust parties in addresses;
-- 4. Master public key;
-- 5. Signature of slave key with master key.
| CreateMultisigAddress Int
[Text]
[Text]
(Maybe Text)
(Maybe Text)
-- | Query notary to get list of pending transactions
| ListPendingTransactions
-- | Sign and send transaction from the pending list by id ∈ [1..list.length]
| SendPendingTransaction Int
-- | Get a pending transaction and dump it to the file
| PendingToCold Int FilePath
-- | List all addresses in which current user acts like party.
-- Specify trust public key if you also want to receive MS addresses
-- with trust as party.
| ListAllocations (Maybe Text)
-- | List all allocations in the blacklist
| ListAllocationsBlacklist (Maybe Text)
-- | For a request #N in local list send confirmation to a Notary.
-- 1. #N in user list;
-- 2. @Just (pathToHot, partyAddr)@ : if we want to sign as a 'TrustParty';
-- 3. Master public key;
-- 4. Signature of slave key with master key.
| ConfirmAllocation Int (Maybe String) (Maybe Text) (Maybe Text)
-- | Put an allocation into blacklist and ignore it
| BlacklistAllocation Int
-- | Unignore the allocation
| WhitelistAllocation Int
-- | Form a transaction in the same way it's done in FormTransaction,
-- but dump the transaction and empty signature bundle to the file.
| ColdFormTransaction [(Word, Int64, Int)] Text [(Int64, Int)] FilePath
-- | Parse a transaction and full bundle from the file and process it
| ColdSendTransaction FilePath
-- | Given a file with transaction and empty signature bundle,
-- sign everything we can
| ColdSignTransaction FilePath
-- | Add a local address to storage (filepaths to sk and pk, then
-- blockchain heights to query -- minimum and maximum)
| ImportAddress (Maybe FilePath) FilePath Int
| ExportAddress Int FilePath
| DeleteAddress Int Bool
| Dump DumpCommand
#if GtkGui
-- | Start graphical user interface
| StartGUI
#endif
deriving (Show)
data DumpCommand
= DumpHBlocks PeriodId PeriodId
| DumpHBlock PeriodId
| DumpMintettes
| DumpPeriod
| DumpMintetteUtxo MintetteId
| DumpMintetteLogs MintetteId PeriodId
| DumpAddress Word
deriving (Show)
-- | Datatype describing user command line options
data UserOptions = UserOptions
{ userCommand :: UserCommand -- ^ Command for the program to process
, isBankMode :: Bool -- ^ If creating wallet in bank-mode,
, bankModePath :: FilePath -- ^ Path to bank's secret key
, addressesNum :: Int -- ^ Number of addresses to create initially
#if GtkGui
, guidbPath :: FilePath -- ^ Path to the gui database.
#endif
, walletPath :: FilePath -- ^ Path to the wallet
, logSeverity :: Severity -- ^ Logging severity
, configPath :: FilePath -- ^ Configuration file path
, defaultContext :: Bool -- ^ Use defaultNodeContext
, rebuildDB :: Bool -- ^ Rebuild User DB
} deriving (Show)
userCommandParser :: Parser UserCommand
userCommandParser =
subparser
(command
"list"
(info
(pure ListAddresses)
(progDesc
("List all available addresses from wallet " <>
"and information about them."))) <>
#if GtkGui
command
"start-gui"
(info (pure StartGUI) (progDesc "Start graphical user interface.")) <>
#endif
command
"update"
(info
(pure UpdateBlockchain)
(progDesc "Query bank to sync local state with blockchain.")) <>
command
"send"
(info formTransactionOpts (progDesc "Form and send transaction.")) <>
command
"create-multisig"
(info
createMultisigOpts
(progDesc "Create multisignature address allocation")) <>
command
"pending-list"
(info
(pure ListPendingTransactions)
(progDesc "List transactions that are pending to be signed")) <>
command
"pending-send"
(info sendPendingOpts
(progDesc "Send a pending transaction from list-pending by index")) <>
command
"pending-to-cold"
(info pendingToColdOpts
(progDesc $ "Download pending transaction and dump it into " <>
"the file to sign it with cold key")) <>
command
"alloc-list"
(info
(listAllocOpts ListAllocations)
(progDesc
"List all multisignature address allocations you need to confirm")) <>
command "alloc-list-blacklisted"
(info
(listAllocOpts ListAllocationsBlacklist)
(progDesc $
"List all multisignature address allocations that " <>
"are blacklisted (ignored).")) <>
command
"alloc-confirm"
(info
confirmOpts
(progDesc
"Confirm MS address allocation from `rscoin-user list-alloc`")) <>
command "alloc-blacklist"
(info blacklistAllocationOpts (progDesc "Blacklist an allocation")) <>
command "alloc-whitelist"
(info whitelistAllocationOpts
(progDesc "Restore an allocation from the blacklist.")) <>
command "cold-form"
(info coldFormOpts
(progDesc "Form a transaction and write it to disk to be signed by cold key.")) <>
command "cold-send"
(info coldSendOpts
(progDesc "Read a signed transaction from file and process/send it.")) <>
command "cold-sign"
(info coldSignOpts
(progDesc "Read non-signed transaction from file and sign it.")) <>
command
"address-import"
(info
importAddressOpts
(progDesc
"Import address to storage given a (secretKey,publicKey) pair")) <>
command
"address-export"
(info
exportAddressOpts
(progDesc "Export address' keypair to the file.")) <>
command
"address-delete"
(info
deleteAddressOpts
(progDesc $
"Delete all information about address from " <>
"the wallet (can't be returned back if not exported before).")) <>
command
"dump-blocks"
(info
(fmap Dump $
DumpHBlocks <$>
argument
auto
(metavar "FROM" <> help "Dump from which block") <*>
argument auto (metavar "TO" <> help "Dump to which block"))
(progDesc "Dump Bank high level blocks.")) <>
command
"dump-block"
(info
(fmap Dump $
DumpHBlock <$>
argument
auto
(metavar "ID" <>
help "Dump block with specific periodId"))
(progDesc "Dump Bank high level block.")) <>
command
"dump-mintettes"
(info
(pure $ Dump DumpMintettes)
(progDesc "Dump list of mintettes.")) <>
command
"dump-period"
(info (pure $ Dump DumpPeriod) (progDesc "Dump last period.")) <>
command
"dump-mintette-utxo"
(info
(fmap Dump $
DumpMintetteUtxo <$>
argument
auto
(metavar "MINTETTE_ID" <>
help "Dump utxo of mintette with this id."))
(progDesc "Dump utxo of corresponding mintette.")) <>
command
"dump-mintette-logs"
(info
(fmap Dump . DumpMintetteLogs <$>
argument
auto
(metavar "MINTETTE_ID" <>
help "Dump logs of mintette with this id.") <*>
argument
auto
(metavar "PERIOD_ID" <>
help "Dump logs with this period id."))
(progDesc "Dump logs of corresponding mintette and periodId.")) <>
command
"dump-address"
(info
(fmap Dump $
DumpAddress <$>
argument
auto
(metavar "INDEX" <> help "Index of address to dump"))
(progDesc "Dump address with given index.")))
where
formTxFrom =
option auto
(long "from" <>
help
("Tuples (a,b,c) where " <>
"'a' is id of address as numbered in list-wallets output, " <>
"'b' is integer -- amount of coins to send, " <>
"'c' is the color (0 for uncolored), any uncolored ~ colored.") <>
metavar "(INT,INT,INT)")
formTxToAddr = strOption (long "toaddr" <> help "Address to send coins to.")
formTxToCoin =
option auto
(long "tocoin" <>
help
("Pairs (a,b) where " <>
"'a' is amount of coins to send, " <>
"'b' is the color of that coin") <>
metavar "(INT,INT)")
formTransactionOpts =
FormTransaction <$> some formTxFrom <*> formTxToAddr <*> many formTxToCoin
createMultisigOpts =
CreateMultisigAddress <$>
option auto (short 'm' <> metavar "INT" <> help "Number m from m/n") <*>
many
(strOption $
long "uaddr" <> metavar "ADDRESS" <>
help "User party Addresses that would own this MS address") <*>
many
(strOption $
long "taddr" <> metavar "ADDRESS" <>
help "Trust party Addresses that would own this MS address") <*>
optional
(strOption $
long "master-pk" <> metavar "ADDRESS" <>
help "Public key of master for party") <*>
optional
(strOption $
long "slave-sig" <> metavar "SIGNATURE" <>
help "Signature of slave with master public key")
listAllocOpts allocCtor =
allocCtor <$>
optional
(strOption $
long "trust-party" <> metavar "PUBLIC KEY" <>
help "Trust address as party")
confirmOpts =
ConfirmAllocation <$>
option
auto
(short 'i' <> long "index" <> metavar "INT" <>
help "Index starting from 1 in `list-alloc`") <*>
optional
(strOption $
long "hot-trust" <> metavar "(SKPATH, ADDRESS)" <>
help
"Pair of hot sk path and party pk if we want to confirm as Trust)") <*>
optional
(strOption $
long "master-pk" <> metavar "ADDRESS" <>
help "Public key of master for party") <*>
optional
(strOption $
long "slave-sig" <> metavar "SIGNATURE" <>
help "Signature of slave with master public key")
importAddressOpts =
ImportAddress <$>
(optional $
strOption $
long "sk" <> help "Path to file with binary-encoded secret key" <>
metavar "FILEPATH") <*>
(strOption $
long "pk" <> help "Path to file with base64-encoded public key" <>
metavar "FILEPATH") <*>
(option auto $
long "query-from" <> help "Height to query blockchain from" <> value 0 <>
metavar "INT")
exportAddressOpts =
ExportAddress <$>
option
auto
(short 'i' <> long "index" <> help "Id of address in `list` command output." <>
metavar "INT") <*>
strOption
(long "path" <> help "Path to export address' keys to." <>
metavar "FILEPATH")
deleteAddressOpts =
DeleteAddress <$>
option
auto
(short 'i' <> long "index" <> help "Id of address in `list` command output." <>
metavar "INT") <*>
switch
(long "force" <> short 'f' <>
help "Don't ask confirmation for deletion")
blacklistAllocationOpts = BlacklistAllocation <$> option
auto (short 'i' <> long "index" <> metavar "INT" <>
help "Index of allocation, starting from 1 in `list-alloc`")
whitelistAllocationOpts = WhitelistAllocation <$> option
auto (short 'i' <> long "index" <> metavar "INT" <>
help "Index of allocation, starting from 1 in `list-alloc`")
coldToWritePath =
strOption
(long "path" <>
help "Path to file for non-signed transaction to write into" <>
metavar "FILEPATH")
coldFormOpts =
ColdFormTransaction <$>
some formTxFrom <*> formTxToAddr <*> many formTxToCoin <*> coldToWritePath
coldSendOpts =
ColdSendTransaction <$>
strOption
(long "path" <>
help "Path to file with signed transaction" <>
metavar "FILEPATH")
coldSignOpts =
ColdSignTransaction <$>
strOption
(long "path" <>
help "Path to file with transaction to sign" <>
metavar "FILEPATH")
pendingTxId =
option auto
(short 'i' <> long "index" <>
help "Id of transaction in list-pending list" <>
metavar "INT")
sendPendingOpts = SendPendingTransaction <$> pendingTxId
pendingToColdOpts = PendingToCold <$> pendingTxId <*> coldToWritePath
userOptionsParser :: FilePath -> FilePath -> FilePath -> Parser UserOptions
userOptionsParser dskp configDir defaultConfigPath =
UserOptions <$> userCommandParser <*>
switch
(long "bank-mode" <>
help
("Start the client in bank-mode. " <>
"Is needed only on wallet initialization. " <>
"Will load bank's secret key.")) <*>
strOption
(long "bank-sk-path" <> help "Path to bank's secret key." <> value dskp <>
showDefault <>
metavar "FILEPATH") <*>
option
auto
(long "addresses-num" <>
help
("The number of addresses to create " <>
"initially with the wallet") <>
value defaultAccountsNumber <>
showDefault <>
metavar "INT") <*>
strOption
(long "wallet-path" <> help "Path to wallet database." <>
value (configDir </> "wallet-db") <>
showDefault <>
metavar "FILEPATH") <*>
#if GtkGui
strOption
(long "guidb-path" <> help "Path to gui database" <>
value "gui-db" <>
showDefault <>
metavar "FILEPATH") <*>
#endif
option auto
(long "log-severity" <> value Info <> showDefault <>
help "Logging severity" <>
metavar "SEVERITY") <*>
strOption
(long "config-path" <> help "Path to configuration file" <>
value defaultConfigPath <>
showDefault <>
metavar "FILEPATH") <*>
switch (mconcat [short 'd',
long "default-context",
help ("Use default NodeContext. "
<> "Intended to be used for local deployment")
]) <*>
switch
(mconcat
[ short 'r'
, long "rebuild-db"
, help
("Erase database if it already exists")])
-- | IO call that retrieves command line options
getUserOptions :: IO UserOptions
getUserOptions = do
defaultSKPath <- defaultSecretKeyPath
configDir <- configDirectory
defaultConfigPath <- defaultConfigurationPath
execParser $
info
(helper <*>
userOptionsParser defaultSKPath configDir defaultConfigPath)
(fullDesc <> progDesc "RSCoin user client")
|
input-output-hk/rscoin-haskell
|
src/User/UserOptions.hs
|
gpl-3.0
| 18,513
| 0
| 38
| 7,079
| 2,920
| 1,489
| 1,431
| 397
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdExchangeSeller.Accounts.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List all accounts available to this Ad Exchange account.
--
-- /See:/ <https://developers.google.com/ad-exchange/seller-rest/ Ad Exchange Seller API Reference> for @adexchangeseller.accounts.list@.
module Network.Google.Resource.AdExchangeSeller.Accounts.List
(
-- * REST Resource
AccountsListResource
-- * Creating a Request
, accountsList
, AccountsList
-- * Request Lenses
, alPageToken
, alMaxResults
) where
import Network.Google.AdExchangeSeller.Types
import Network.Google.Prelude
-- | A resource alias for @adexchangeseller.accounts.list@ method which the
-- 'AccountsList' request conforms to.
type AccountsListResource =
"adexchangeseller" :>
"v2.0" :>
"accounts" :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Int32) :>
QueryParam "alt" AltJSON :> Get '[JSON] Accounts
-- | List all accounts available to this Ad Exchange account.
--
-- /See:/ 'accountsList' smart constructor.
data AccountsList = AccountsList'
{ _alPageToken :: !(Maybe Text)
, _alMaxResults :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AccountsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'alPageToken'
--
-- * 'alMaxResults'
accountsList
:: AccountsList
accountsList =
AccountsList'
{ _alPageToken = Nothing
, _alMaxResults = Nothing
}
-- | A continuation token, used to page through accounts. To retrieve the
-- next page, set this parameter to the value of \"nextPageToken\" from the
-- previous response.
alPageToken :: Lens' AccountsList (Maybe Text)
alPageToken
= lens _alPageToken (\ s a -> s{_alPageToken = a})
-- | The maximum number of accounts to include in the response, used for
-- paging.
alMaxResults :: Lens' AccountsList (Maybe Int32)
alMaxResults
= lens _alMaxResults (\ s a -> s{_alMaxResults = a})
. mapping _Coerce
instance GoogleRequest AccountsList where
type Rs AccountsList = Accounts
type Scopes AccountsList =
'["https://www.googleapis.com/auth/adexchange.seller",
"https://www.googleapis.com/auth/adexchange.seller.readonly"]
requestClient AccountsList'{..}
= go _alPageToken _alMaxResults (Just AltJSON)
adExchangeSellerService
where go
= buildClient (Proxy :: Proxy AccountsListResource)
mempty
|
rueshyna/gogol
|
gogol-adexchange-seller/gen/Network/Google/Resource/AdExchangeSeller/Accounts/List.hs
|
mpl-2.0
| 3,349
| 0
| 13
| 736
| 411
| 245
| 166
| 60
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Books.MyLibrary.Bookshelves.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of bookshelves belonging to the authenticated user.
--
-- /See:/ <https://developers.google.com/books/docs/v1/getting_started Books API Reference> for @books.mylibrary.bookshelves.list@.
module Network.Google.Resource.Books.MyLibrary.Bookshelves.List
(
-- * REST Resource
MyLibraryBookshelvesListResource
-- * Creating a Request
, myLibraryBookshelvesList
, MyLibraryBookshelvesList
-- * Request Lenses
, mlblSource
) where
import Network.Google.Books.Types
import Network.Google.Prelude
-- | A resource alias for @books.mylibrary.bookshelves.list@ method which the
-- 'MyLibraryBookshelvesList' request conforms to.
type MyLibraryBookshelvesListResource =
"books" :>
"v1" :>
"mylibrary" :>
"bookshelves" :>
QueryParam "source" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Bookshelves
-- | Retrieves a list of bookshelves belonging to the authenticated user.
--
-- /See:/ 'myLibraryBookshelvesList' smart constructor.
newtype MyLibraryBookshelvesList = MyLibraryBookshelvesList'
{ _mlblSource :: Maybe Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MyLibraryBookshelvesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mlblSource'
myLibraryBookshelvesList
:: MyLibraryBookshelvesList
myLibraryBookshelvesList =
MyLibraryBookshelvesList'
{ _mlblSource = Nothing
}
-- | String to identify the originator of this request.
mlblSource :: Lens' MyLibraryBookshelvesList (Maybe Text)
mlblSource
= lens _mlblSource (\ s a -> s{_mlblSource = a})
instance GoogleRequest MyLibraryBookshelvesList where
type Rs MyLibraryBookshelvesList = Bookshelves
type Scopes MyLibraryBookshelvesList =
'["https://www.googleapis.com/auth/books"]
requestClient MyLibraryBookshelvesList'{..}
= go _mlblSource (Just AltJSON) booksService
where go
= buildClient
(Proxy :: Proxy MyLibraryBookshelvesListResource)
mempty
|
rueshyna/gogol
|
gogol-books/gen/Network/Google/Resource/Books/MyLibrary/Bookshelves/List.hs
|
mpl-2.0
| 2,977
| 0
| 13
| 631
| 305
| 187
| 118
| 48
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Orderinvoices.Createrefundinvoice
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a refund invoice for one or more shipment groups, and triggers a
-- refund for orderinvoice enabled orders. This can only be used for line
-- items that have previously been charged using \`createChargeInvoice\`.
-- All amounts (except for the summary) are incremental with respect to the
-- previous invoice.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.orderinvoices.createrefundinvoice@.
module Network.Google.Resource.Content.Orderinvoices.Createrefundinvoice
(
-- * REST Resource
OrderinvoicesCreaterefundinvoiceResource
-- * Creating a Request
, orderinvoicesCreaterefundinvoice
, OrderinvoicesCreaterefundinvoice
-- * Request Lenses
, ocXgafv
, ocMerchantId
, ocUploadProtocol
, ocAccessToken
, ocUploadType
, ocPayload
, ocOrderId
, ocCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.orderinvoices.createrefundinvoice@ method which the
-- 'OrderinvoicesCreaterefundinvoice' request conforms to.
type OrderinvoicesCreaterefundinvoiceResource =
"content" :>
"v2.1" :>
Capture "merchantId" (Textual Word64) :>
"orderinvoices" :>
Capture "orderId" Text :>
"createRefundInvoice" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
OrderinvoicesCreateRefundInvoiceRequest
:>
Post '[JSON]
OrderinvoicesCreateRefundInvoiceResponse
-- | Creates a refund invoice for one or more shipment groups, and triggers a
-- refund for orderinvoice enabled orders. This can only be used for line
-- items that have previously been charged using \`createChargeInvoice\`.
-- All amounts (except for the summary) are incremental with respect to the
-- previous invoice.
--
-- /See:/ 'orderinvoicesCreaterefundinvoice' smart constructor.
data OrderinvoicesCreaterefundinvoice =
OrderinvoicesCreaterefundinvoice'
{ _ocXgafv :: !(Maybe Xgafv)
, _ocMerchantId :: !(Textual Word64)
, _ocUploadProtocol :: !(Maybe Text)
, _ocAccessToken :: !(Maybe Text)
, _ocUploadType :: !(Maybe Text)
, _ocPayload :: !OrderinvoicesCreateRefundInvoiceRequest
, _ocOrderId :: !Text
, _ocCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrderinvoicesCreaterefundinvoice' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ocXgafv'
--
-- * 'ocMerchantId'
--
-- * 'ocUploadProtocol'
--
-- * 'ocAccessToken'
--
-- * 'ocUploadType'
--
-- * 'ocPayload'
--
-- * 'ocOrderId'
--
-- * 'ocCallback'
orderinvoicesCreaterefundinvoice
:: Word64 -- ^ 'ocMerchantId'
-> OrderinvoicesCreateRefundInvoiceRequest -- ^ 'ocPayload'
-> Text -- ^ 'ocOrderId'
-> OrderinvoicesCreaterefundinvoice
orderinvoicesCreaterefundinvoice pOcMerchantId_ pOcPayload_ pOcOrderId_ =
OrderinvoicesCreaterefundinvoice'
{ _ocXgafv = Nothing
, _ocMerchantId = _Coerce # pOcMerchantId_
, _ocUploadProtocol = Nothing
, _ocAccessToken = Nothing
, _ocUploadType = Nothing
, _ocPayload = pOcPayload_
, _ocOrderId = pOcOrderId_
, _ocCallback = Nothing
}
-- | V1 error format.
ocXgafv :: Lens' OrderinvoicesCreaterefundinvoice (Maybe Xgafv)
ocXgafv = lens _ocXgafv (\ s a -> s{_ocXgafv = a})
-- | The ID of the account that manages the order. This cannot be a
-- multi-client account.
ocMerchantId :: Lens' OrderinvoicesCreaterefundinvoice Word64
ocMerchantId
= lens _ocMerchantId (\ s a -> s{_ocMerchantId = a})
. _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ocUploadProtocol :: Lens' OrderinvoicesCreaterefundinvoice (Maybe Text)
ocUploadProtocol
= lens _ocUploadProtocol
(\ s a -> s{_ocUploadProtocol = a})
-- | OAuth access token.
ocAccessToken :: Lens' OrderinvoicesCreaterefundinvoice (Maybe Text)
ocAccessToken
= lens _ocAccessToken
(\ s a -> s{_ocAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ocUploadType :: Lens' OrderinvoicesCreaterefundinvoice (Maybe Text)
ocUploadType
= lens _ocUploadType (\ s a -> s{_ocUploadType = a})
-- | Multipart request metadata.
ocPayload :: Lens' OrderinvoicesCreaterefundinvoice OrderinvoicesCreateRefundInvoiceRequest
ocPayload
= lens _ocPayload (\ s a -> s{_ocPayload = a})
-- | The ID of the order.
ocOrderId :: Lens' OrderinvoicesCreaterefundinvoice Text
ocOrderId
= lens _ocOrderId (\ s a -> s{_ocOrderId = a})
-- | JSONP
ocCallback :: Lens' OrderinvoicesCreaterefundinvoice (Maybe Text)
ocCallback
= lens _ocCallback (\ s a -> s{_ocCallback = a})
instance GoogleRequest
OrderinvoicesCreaterefundinvoice
where
type Rs OrderinvoicesCreaterefundinvoice =
OrderinvoicesCreateRefundInvoiceResponse
type Scopes OrderinvoicesCreaterefundinvoice =
'["https://www.googleapis.com/auth/content"]
requestClient OrderinvoicesCreaterefundinvoice'{..}
= go _ocMerchantId _ocOrderId _ocXgafv
_ocUploadProtocol
_ocAccessToken
_ocUploadType
_ocCallback
(Just AltJSON)
_ocPayload
shoppingContentService
where go
= buildClient
(Proxy ::
Proxy OrderinvoicesCreaterefundinvoiceResource)
mempty
|
brendanhay/gogol
|
gogol-shopping-content/gen/Network/Google/Resource/Content/Orderinvoices/Createrefundinvoice.hs
|
mpl-2.0
| 6,766
| 0
| 20
| 1,608
| 890
| 519
| 371
| 133
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.OperatingSystemVersions.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets one operating system version by ID.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ DCM/DFA Reporting And Trafficking API Reference> for @dfareporting.operatingSystemVersions.get@.
module Network.Google.Resource.DFAReporting.OperatingSystemVersions.Get
(
-- * REST Resource
OperatingSystemVersionsGetResource
-- * Creating a Request
, operatingSystemVersionsGet
, OperatingSystemVersionsGet
-- * Request Lenses
, osvgProFileId
, osvgId
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.operatingSystemVersions.get@ method which the
-- 'OperatingSystemVersionsGet' request conforms to.
type OperatingSystemVersionsGetResource =
"dfareporting" :>
"v2.7" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"operatingSystemVersions" :>
Capture "id" (Textual Int64) :>
QueryParam "alt" AltJSON :>
Get '[JSON] OperatingSystemVersion
-- | Gets one operating system version by ID.
--
-- /See:/ 'operatingSystemVersionsGet' smart constructor.
data OperatingSystemVersionsGet = OperatingSystemVersionsGet'
{ _osvgProFileId :: !(Textual Int64)
, _osvgId :: !(Textual Int64)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperatingSystemVersionsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'osvgProFileId'
--
-- * 'osvgId'
operatingSystemVersionsGet
:: Int64 -- ^ 'osvgProFileId'
-> Int64 -- ^ 'osvgId'
-> OperatingSystemVersionsGet
operatingSystemVersionsGet pOsvgProFileId_ pOsvgId_ =
OperatingSystemVersionsGet'
{ _osvgProFileId = _Coerce # pOsvgProFileId_
, _osvgId = _Coerce # pOsvgId_
}
-- | User profile ID associated with this request.
osvgProFileId :: Lens' OperatingSystemVersionsGet Int64
osvgProFileId
= lens _osvgProFileId
(\ s a -> s{_osvgProFileId = a})
. _Coerce
-- | Operating system version ID.
osvgId :: Lens' OperatingSystemVersionsGet Int64
osvgId
= lens _osvgId (\ s a -> s{_osvgId = a}) . _Coerce
instance GoogleRequest OperatingSystemVersionsGet
where
type Rs OperatingSystemVersionsGet =
OperatingSystemVersion
type Scopes OperatingSystemVersionsGet =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient OperatingSystemVersionsGet'{..}
= go _osvgProFileId _osvgId (Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy OperatingSystemVersionsGetResource)
mempty
|
rueshyna/gogol
|
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/OperatingSystemVersions/Get.hs
|
mpl-2.0
| 3,600
| 0
| 14
| 804
| 421
| 249
| 172
| 66
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.RDS.Types
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
module Network.AWS.RDS.Types
(
-- * Service
RDS
-- ** Error
, RESTError
-- ** XML
, ns
-- * PendingMaintenanceAction
, PendingMaintenanceAction
, pendingMaintenanceAction
, pmaAction
, pmaAutoAppliedAfterDate
, pmaCurrentApplyDate
, pmaDescription
, pmaForcedApplyDate
, pmaOptInStatus
-- * OptionGroup
, OptionGroup
, optionGroup
, ogAllowsVpcAndNonVpcInstanceMemberships
, ogEngineName
, ogMajorEngineVersion
, ogOptionGroupDescription
, ogOptionGroupName
, ogOptions
, ogVpcId
-- * DBParameterGroupStatus
, DBParameterGroupStatus
, dbparameterGroupStatus
, dbpgsDBParameterGroupName
, dbpgsParameterApplyStatus
-- * Event
, Event
, event
, eDate
, eEventCategories
, eMessage
, eSourceIdentifier
, eSourceType
-- * DBSecurityGroup
, DBSecurityGroup
, dbsecurityGroup
, dbsgDBSecurityGroupDescription
, dbsgDBSecurityGroupName
, dbsgEC2SecurityGroups
, dbsgIPRanges
, dbsgOwnerId
, dbsgVpcId
-- * Tag
, Tag
, tag
, tagKey
, tagValue
-- * DBEngineVersion
, DBEngineVersion
, dbengineVersion
, dbevDBEngineDescription
, dbevDBEngineVersionDescription
, dbevDBParameterGroupFamily
, dbevDefaultCharacterSet
, dbevEngine
, dbevEngineVersion
, dbevSupportedCharacterSets
-- * DBSnapshot
, DBSnapshot
, dbsnapshot
, dbsAllocatedStorage
, dbsAvailabilityZone
, dbsDBInstanceIdentifier
, dbsDBSnapshotIdentifier
, dbsEncrypted
, dbsEngine
, dbsEngineVersion
, dbsInstanceCreateTime
, dbsIops
, dbsKmsKeyId
, dbsLicenseModel
, dbsMasterUsername
, dbsOptionGroupName
, dbsPercentProgress
, dbsPort
, dbsSnapshotCreateTime
, dbsSnapshotType
, dbsSourceRegion
, dbsStatus
, dbsStorageType
, dbsTdeCredentialArn
, dbsVpcId
-- * DBSecurityGroupMembership
, DBSecurityGroupMembership
, dbsecurityGroupMembership
, dbsgmDBSecurityGroupName
, dbsgmStatus
-- * EC2SecurityGroup
, EC2SecurityGroup
, ec2SecurityGroup
, ecsgEC2SecurityGroupId
, ecsgEC2SecurityGroupName
, ecsgEC2SecurityGroupOwnerId
, ecsgStatus
-- * SourceType
, SourceType (..)
-- * ResourcePendingMaintenanceActions
, ResourcePendingMaintenanceActions
, resourcePendingMaintenanceActions
, rpmaPendingMaintenanceActionDetails
, rpmaResourceIdentifier
-- * DBParameterGroup
, DBParameterGroup
, dbparameterGroup
, dbpgDBParameterGroupFamily
, dbpgDBParameterGroupName
, dbpgDescription
-- * ReservedDBInstancesOffering
, ReservedDBInstancesOffering
, reservedDBInstancesOffering
, rdbioCurrencyCode
, rdbioDBInstanceClass
, rdbioDuration
, rdbioFixedPrice
, rdbioMultiAZ
, rdbioOfferingType
, rdbioProductDescription
, rdbioRecurringCharges
, rdbioReservedDBInstancesOfferingId
, rdbioUsagePrice
-- * ApplyMethod
, ApplyMethod (..)
-- * CharacterSet
, CharacterSet
, characterSet
, csCharacterSetDescription
, csCharacterSetName
-- * Subnet
, Subnet
, subnet
, sSubnetAvailabilityZone
, sSubnetIdentifier
, sSubnetStatus
-- * ReservedDBInstance
, ReservedDBInstance
, reservedDBInstance
, rdbiCurrencyCode
, rdbiDBInstanceClass
, rdbiDBInstanceCount
, rdbiDuration
, rdbiFixedPrice
, rdbiMultiAZ
, rdbiOfferingType
, rdbiProductDescription
, rdbiRecurringCharges
, rdbiReservedDBInstanceId
, rdbiReservedDBInstancesOfferingId
, rdbiStartTime
, rdbiState
, rdbiUsagePrice
-- * EngineDefaults
, EngineDefaults
, engineDefaults
, edDBParameterGroupFamily
, edMarker
, edParameters
-- * DBParameterGroupNameMessage
, DBParameterGroupNameMessage
, dbparameterGroupNameMessage
, dbpgnmDBParameterGroupName
-- * OptionGroupOption
, OptionGroupOption
, optionGroupOption
, ogoDefaultPort
, ogoDescription
, ogoEngineName
, ogoMajorEngineVersion
, ogoMinimumRequiredMinorEngineVersion
, ogoName
, ogoOptionGroupOptionSettings
, ogoOptionsDependedOn
, ogoPermanent
, ogoPersistent
, ogoPortRequired
-- * DBInstance
, DBInstance
, dbinstance
, dbiAllocatedStorage
, dbiAutoMinorVersionUpgrade
, dbiAvailabilityZone
, dbiBackupRetentionPeriod
, dbiCACertificateIdentifier
, dbiCharacterSetName
, dbiDBInstanceClass
, dbiDBInstanceIdentifier
, dbiDBInstanceStatus
, dbiDBName
, dbiDBParameterGroups
, dbiDBSecurityGroups
, dbiDBSubnetGroup
, dbiDbiResourceId
, dbiEndpoint
, dbiEngine
, dbiEngineVersion
, dbiInstanceCreateTime
, dbiIops
, dbiKmsKeyId
, dbiLatestRestorableTime
, dbiLicenseModel
, dbiMasterUsername
, dbiMultiAZ
, dbiOptionGroupMemberships
, dbiPendingModifiedValues
, dbiPreferredBackupWindow
, dbiPreferredMaintenanceWindow
, dbiPubliclyAccessible
, dbiReadReplicaDBInstanceIdentifiers
, dbiReadReplicaSourceDBInstanceIdentifier
, dbiSecondaryAvailabilityZone
, dbiStatusInfos
, dbiStorageEncrypted
, dbiStorageType
, dbiTdeCredentialArn
, dbiVpcSecurityGroups
-- * AccountQuota
, AccountQuota
, accountQuota
, aqAccountQuotaName
, aqMax
, aqUsed
-- * AvailabilityZone
, AvailabilityZone
, availabilityZone
, azName
-- * EventSubscription
, EventSubscription
, eventSubscription
, esCustSubscriptionId
, esCustomerAwsId
, esEnabled
, esEventCategoriesList
, esSnsTopicArn
, esSourceIdsList
, esSourceType
, esStatus
, esSubscriptionCreationTime
-- * DBSubnetGroup
, DBSubnetGroup
, dbsubnetGroup
, dbsg1DBSubnetGroupDescription
, dbsg1DBSubnetGroupName
, dbsg1SubnetGroupStatus
, dbsg1Subnets
, dbsg1VpcId
-- * Certificate
, Certificate
, certificate
, cCertificateIdentifier
, cCertificateType
, cThumbprint
, cValidFrom
, cValidTill
-- * DBInstanceStatusInfo
, DBInstanceStatusInfo
, dbinstanceStatusInfo
, dbisiMessage
, dbisiNormal
, dbisiStatus
, dbisiStatusType
-- * OptionSetting
, OptionSetting
, optionSetting
, osAllowedValues
, osApplyType
, osDataType
, osDefaultValue
, osDescription
, osIsCollection
, osIsModifiable
, osName
, osValue
-- * DescribeDBLogFilesDetails
, DescribeDBLogFilesDetails
, describeDBLogFilesDetails
, ddblfdLastWritten
, ddblfdLogFileName
, ddblfdSize
-- * OrderableDBInstanceOption
, OrderableDBInstanceOption
, orderableDBInstanceOption
, odbioAvailabilityZones
, odbioDBInstanceClass
, odbioEngine
, odbioEngineVersion
, odbioLicenseModel
, odbioMultiAZCapable
, odbioReadReplicaCapable
, odbioStorageType
, odbioSupportsIops
, odbioSupportsStorageEncryption
, odbioVpc
-- * Filter
, Filter
, filter'
, fName
, fValues
-- * RecurringCharge
, RecurringCharge
, recurringCharge
, rcRecurringChargeAmount
, rcRecurringChargeFrequency
-- * Endpoint
, Endpoint
, endpoint
, eAddress
, ePort
-- * OptionConfiguration
, OptionConfiguration
, optionConfiguration
, ocDBSecurityGroupMemberships
, ocOptionName
, ocOptionSettings
, ocPort
, ocVpcSecurityGroupMemberships
-- * Option
, Option
, option
, oDBSecurityGroupMemberships
, oOptionDescription
, oOptionName
, oOptionSettings
, oPermanent
, oPersistent
, oPort
, oVpcSecurityGroupMemberships
-- * IPRange
, IPRange
, iprange
, iprCIDRIP
, iprStatus
-- * OptionGroupMembership
, OptionGroupMembership
, optionGroupMembership
, ogmOptionGroupName
, ogmStatus
-- * EventCategoriesMap
, EventCategoriesMap
, eventCategoriesMap
, ecmEventCategories
, ecmSourceType
-- * PendingModifiedValues
, PendingModifiedValues
, pendingModifiedValues
, pmvAllocatedStorage
, pmvBackupRetentionPeriod
, pmvCACertificateIdentifier
, pmvDBInstanceClass
, pmvDBInstanceIdentifier
, pmvEngineVersion
, pmvIops
, pmvMasterUserPassword
, pmvMultiAZ
, pmvPort
, pmvStorageType
-- * VpcSecurityGroupMembership
, VpcSecurityGroupMembership
, vpcSecurityGroupMembership
, vsgmStatus
, vsgmVpcSecurityGroupId
-- * Parameter
, Parameter
, parameter
, pAllowedValues
, pApplyMethod
, pApplyType
, pDataType
, pDescription
, pIsModifiable
, pMinimumEngineVersion
, pParameterName
, pParameterValue
, pSource
-- * OptionGroupOptionSetting
, OptionGroupOptionSetting
, optionGroupOptionSetting
, ogosAllowedValues
, ogosApplyType
, ogosDefaultValue
, ogosIsModifiable
, ogosSettingDescription
, ogosSettingName
) where
import Network.AWS.Prelude
import Network.AWS.Signing
import qualified GHC.Exts
-- | Version @2014-10-31@ of the Amazon Relational Database Service service.
data RDS
instance AWSService RDS where
type Sg RDS = V4
type Er RDS = RESTError
service = service'
where
service' :: Service RDS
service' = Service
{ _svcAbbrev = "RDS"
, _svcPrefix = "rds"
, _svcVersion = "2014-10-31"
, _svcTargetPrefix = Nothing
, _svcJSONVersion = Nothing
, _svcHandle = handle
, _svcRetry = retry
}
handle :: Status
-> Maybe (LazyByteString -> ServiceError RESTError)
handle = restError statusSuccess service'
retry :: Retry RDS
retry = Exponential
{ _retryBase = 0.05
, _retryGrowth = 2
, _retryAttempts = 5
, _retryCheck = check
}
check :: Status
-> RESTError
-> Bool
check (statusCode -> s) (awsErrorCode -> e)
| s == 400 && "Throttling" == e = True -- Throttling
| s == 500 = True -- General Server Error
| s == 509 = True -- Limit Exceeded
| s == 503 = True -- Service Unavailable
| otherwise = False
ns :: Text
ns = "http://rds.amazonaws.com/doc/2014-10-31/"
{-# INLINE ns #-}
data PendingMaintenanceAction = PendingMaintenanceAction
{ _pmaAction :: Maybe Text
, _pmaAutoAppliedAfterDate :: Maybe ISO8601
, _pmaCurrentApplyDate :: Maybe ISO8601
, _pmaDescription :: Maybe Text
, _pmaForcedApplyDate :: Maybe ISO8601
, _pmaOptInStatus :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'PendingMaintenanceAction' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'pmaAction' @::@ 'Maybe' 'Text'
--
-- * 'pmaAutoAppliedAfterDate' @::@ 'Maybe' 'UTCTime'
--
-- * 'pmaCurrentApplyDate' @::@ 'Maybe' 'UTCTime'
--
-- * 'pmaDescription' @::@ 'Maybe' 'Text'
--
-- * 'pmaForcedApplyDate' @::@ 'Maybe' 'UTCTime'
--
-- * 'pmaOptInStatus' @::@ 'Maybe' 'Text'
--
pendingMaintenanceAction :: PendingMaintenanceAction
pendingMaintenanceAction = PendingMaintenanceAction
{ _pmaAction = Nothing
, _pmaAutoAppliedAfterDate = Nothing
, _pmaForcedApplyDate = Nothing
, _pmaOptInStatus = Nothing
, _pmaCurrentApplyDate = Nothing
, _pmaDescription = Nothing
}
-- | The type of pending maintenance action that is available for the resource.
pmaAction :: Lens' PendingMaintenanceAction (Maybe Text)
pmaAction = lens _pmaAction (\s a -> s { _pmaAction = a })
-- | The date of the maintenance window when the action will be applied. The
-- maintenance action will be applied to the resource during its first
-- maintenance window after this date. If this date is specified, any 'next-maintenance' opt-in requests are ignored.
pmaAutoAppliedAfterDate :: Lens' PendingMaintenanceAction (Maybe UTCTime)
pmaAutoAppliedAfterDate =
lens _pmaAutoAppliedAfterDate (\s a -> s { _pmaAutoAppliedAfterDate = a })
. mapping _Time
-- | The effective date when the pending maintenance action will be applied to the
-- resource. This date takes into account opt-in requests received from the 'ApplyPendingMaintenanceAction' API, the 'AutoAppliedAfterDate', and the 'ForcedApplyDate'. This value is blank
-- if an opt-in request has not been received and nothing has been specified as 'AutoAppliedAfterDate' or 'ForcedApplyDate'.
pmaCurrentApplyDate :: Lens' PendingMaintenanceAction (Maybe UTCTime)
pmaCurrentApplyDate =
lens _pmaCurrentApplyDate (\s a -> s { _pmaCurrentApplyDate = a })
. mapping _Time
-- | A description providing more detail about the maintenance action.
pmaDescription :: Lens' PendingMaintenanceAction (Maybe Text)
pmaDescription = lens _pmaDescription (\s a -> s { _pmaDescription = a })
-- | The date when the maintenance action will be automatically applied. The
-- maintenance action will be applied to the resource on this date regardless of
-- the maintenance window for the resource. If this date is specified, any 'immediate' opt-in requests are ignored.
pmaForcedApplyDate :: Lens' PendingMaintenanceAction (Maybe UTCTime)
pmaForcedApplyDate =
lens _pmaForcedApplyDate (\s a -> s { _pmaForcedApplyDate = a })
. mapping _Time
-- | Indicates the type of opt-in request that has been received for the resource.
pmaOptInStatus :: Lens' PendingMaintenanceAction (Maybe Text)
pmaOptInStatus = lens _pmaOptInStatus (\s a -> s { _pmaOptInStatus = a })
instance FromXML PendingMaintenanceAction where
parseXML x = PendingMaintenanceAction
<$> x .@? "Action"
<*> x .@? "AutoAppliedAfterDate"
<*> x .@? "CurrentApplyDate"
<*> x .@? "Description"
<*> x .@? "ForcedApplyDate"
<*> x .@? "OptInStatus"
instance ToQuery PendingMaintenanceAction where
toQuery PendingMaintenanceAction{..} = mconcat
[ "Action" =? _pmaAction
, "AutoAppliedAfterDate" =? _pmaAutoAppliedAfterDate
, "CurrentApplyDate" =? _pmaCurrentApplyDate
, "Description" =? _pmaDescription
, "ForcedApplyDate" =? _pmaForcedApplyDate
, "OptInStatus" =? _pmaOptInStatus
]
data OptionGroup = OptionGroup
{ _ogAllowsVpcAndNonVpcInstanceMemberships :: Maybe Bool
, _ogEngineName :: Maybe Text
, _ogMajorEngineVersion :: Maybe Text
, _ogOptionGroupDescription :: Maybe Text
, _ogOptionGroupName :: Maybe Text
, _ogOptions :: List "member" Option
, _ogVpcId :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'OptionGroup' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ogAllowsVpcAndNonVpcInstanceMemberships' @::@ 'Maybe' 'Bool'
--
-- * 'ogEngineName' @::@ 'Maybe' 'Text'
--
-- * 'ogMajorEngineVersion' @::@ 'Maybe' 'Text'
--
-- * 'ogOptionGroupDescription' @::@ 'Maybe' 'Text'
--
-- * 'ogOptionGroupName' @::@ 'Maybe' 'Text'
--
-- * 'ogOptions' @::@ ['Option']
--
-- * 'ogVpcId' @::@ 'Maybe' 'Text'
--
optionGroup :: OptionGroup
optionGroup = OptionGroup
{ _ogOptionGroupName = Nothing
, _ogOptionGroupDescription = Nothing
, _ogEngineName = Nothing
, _ogMajorEngineVersion = Nothing
, _ogOptions = mempty
, _ogAllowsVpcAndNonVpcInstanceMemberships = Nothing
, _ogVpcId = Nothing
}
-- | Indicates whether this option group can be applied to both VPC and non-VPC
-- instances. The value 'true' indicates the option group can be applied to both
-- VPC and non-VPC instances.
ogAllowsVpcAndNonVpcInstanceMemberships :: Lens' OptionGroup (Maybe Bool)
ogAllowsVpcAndNonVpcInstanceMemberships =
lens _ogAllowsVpcAndNonVpcInstanceMemberships
(\s a -> s { _ogAllowsVpcAndNonVpcInstanceMemberships = a })
-- | Engine name that this option group can be applied to.
ogEngineName :: Lens' OptionGroup (Maybe Text)
ogEngineName = lens _ogEngineName (\s a -> s { _ogEngineName = a })
-- | Indicates the major engine version associated with this option group.
ogMajorEngineVersion :: Lens' OptionGroup (Maybe Text)
ogMajorEngineVersion =
lens _ogMajorEngineVersion (\s a -> s { _ogMajorEngineVersion = a })
-- | Provides a description of the option group.
ogOptionGroupDescription :: Lens' OptionGroup (Maybe Text)
ogOptionGroupDescription =
lens _ogOptionGroupDescription
(\s a -> s { _ogOptionGroupDescription = a })
-- | Specifies the name of the option group.
ogOptionGroupName :: Lens' OptionGroup (Maybe Text)
ogOptionGroupName =
lens _ogOptionGroupName (\s a -> s { _ogOptionGroupName = a })
-- | Indicates what options are available in the option group.
ogOptions :: Lens' OptionGroup [Option]
ogOptions = lens _ogOptions (\s a -> s { _ogOptions = a }) . _List
-- | If AllowsVpcAndNonVpcInstanceMemberships is 'false', this field is blank. If AllowsVpcAndNonVpcInstanceMemberships
-- is 'true' and this field is blank, then this option group can be applied to
-- both VPC and non-VPC instances. If this field contains a value, then this
-- option group can only be applied to instances that are in the VPC indicated
-- by this field.
ogVpcId :: Lens' OptionGroup (Maybe Text)
ogVpcId = lens _ogVpcId (\s a -> s { _ogVpcId = a })
instance FromXML OptionGroup where
parseXML x = OptionGroup
<$> x .@? "AllowsVpcAndNonVpcInstanceMemberships"
<*> x .@? "EngineName"
<*> x .@? "MajorEngineVersion"
<*> x .@? "OptionGroupDescription"
<*> x .@? "OptionGroupName"
<*> x .@? "Options" .!@ mempty
<*> x .@? "VpcId"
instance ToQuery OptionGroup where
toQuery OptionGroup{..} = mconcat
[ "AllowsVpcAndNonVpcInstanceMemberships" =? _ogAllowsVpcAndNonVpcInstanceMemberships
, "EngineName" =? _ogEngineName
, "MajorEngineVersion" =? _ogMajorEngineVersion
, "OptionGroupDescription" =? _ogOptionGroupDescription
, "OptionGroupName" =? _ogOptionGroupName
, "Options" =? _ogOptions
, "VpcId" =? _ogVpcId
]
data DBParameterGroupStatus = DBParameterGroupStatus
{ _dbpgsDBParameterGroupName :: Maybe Text
, _dbpgsParameterApplyStatus :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DBParameterGroupStatus' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbpgsDBParameterGroupName' @::@ 'Maybe' 'Text'
--
-- * 'dbpgsParameterApplyStatus' @::@ 'Maybe' 'Text'
--
dbparameterGroupStatus :: DBParameterGroupStatus
dbparameterGroupStatus = DBParameterGroupStatus
{ _dbpgsDBParameterGroupName = Nothing
, _dbpgsParameterApplyStatus = Nothing
}
-- | The name of the DP parameter group.
dbpgsDBParameterGroupName :: Lens' DBParameterGroupStatus (Maybe Text)
dbpgsDBParameterGroupName =
lens _dbpgsDBParameterGroupName
(\s a -> s { _dbpgsDBParameterGroupName = a })
-- | The status of parameter updates.
dbpgsParameterApplyStatus :: Lens' DBParameterGroupStatus (Maybe Text)
dbpgsParameterApplyStatus =
lens _dbpgsParameterApplyStatus
(\s a -> s { _dbpgsParameterApplyStatus = a })
instance FromXML DBParameterGroupStatus where
parseXML x = DBParameterGroupStatus
<$> x .@? "DBParameterGroupName"
<*> x .@? "ParameterApplyStatus"
instance ToQuery DBParameterGroupStatus where
toQuery DBParameterGroupStatus{..} = mconcat
[ "DBParameterGroupName" =? _dbpgsDBParameterGroupName
, "ParameterApplyStatus" =? _dbpgsParameterApplyStatus
]
data Event = Event
{ _eDate :: Maybe ISO8601
, _eEventCategories :: List "member" Text
, _eMessage :: Maybe Text
, _eSourceIdentifier :: Maybe Text
, _eSourceType :: Maybe SourceType
} deriving (Eq, Read, Show)
-- | 'Event' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'eDate' @::@ 'Maybe' 'UTCTime'
--
-- * 'eEventCategories' @::@ ['Text']
--
-- * 'eMessage' @::@ 'Maybe' 'Text'
--
-- * 'eSourceIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'eSourceType' @::@ 'Maybe' 'SourceType'
--
event :: Event
event = Event
{ _eSourceIdentifier = Nothing
, _eSourceType = Nothing
, _eMessage = Nothing
, _eEventCategories = mempty
, _eDate = Nothing
}
-- | Specifies the date and time of the event.
eDate :: Lens' Event (Maybe UTCTime)
eDate = lens _eDate (\s a -> s { _eDate = a }) . mapping _Time
-- | Specifies the category for the event.
eEventCategories :: Lens' Event [Text]
eEventCategories = lens _eEventCategories (\s a -> s { _eEventCategories = a }) . _List
-- | Provides the text of this event.
eMessage :: Lens' Event (Maybe Text)
eMessage = lens _eMessage (\s a -> s { _eMessage = a })
-- | Provides the identifier for the source of the event.
eSourceIdentifier :: Lens' Event (Maybe Text)
eSourceIdentifier =
lens _eSourceIdentifier (\s a -> s { _eSourceIdentifier = a })
-- | Specifies the source type for this event.
eSourceType :: Lens' Event (Maybe SourceType)
eSourceType = lens _eSourceType (\s a -> s { _eSourceType = a })
instance FromXML Event where
parseXML x = Event
<$> x .@? "Date"
<*> x .@? "EventCategories" .!@ mempty
<*> x .@? "Message"
<*> x .@? "SourceIdentifier"
<*> x .@? "SourceType"
instance ToQuery Event where
toQuery Event{..} = mconcat
[ "Date" =? _eDate
, "EventCategories" =? _eEventCategories
, "Message" =? _eMessage
, "SourceIdentifier" =? _eSourceIdentifier
, "SourceType" =? _eSourceType
]
data DBSecurityGroup = DBSecurityGroup
{ _dbsgDBSecurityGroupDescription :: Maybe Text
, _dbsgDBSecurityGroupName :: Maybe Text
, _dbsgEC2SecurityGroups :: List "member" EC2SecurityGroup
, _dbsgIPRanges :: List "member" IPRange
, _dbsgOwnerId :: Maybe Text
, _dbsgVpcId :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DBSecurityGroup' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbsgDBSecurityGroupDescription' @::@ 'Maybe' 'Text'
--
-- * 'dbsgDBSecurityGroupName' @::@ 'Maybe' 'Text'
--
-- * 'dbsgEC2SecurityGroups' @::@ ['EC2SecurityGroup']
--
-- * 'dbsgIPRanges' @::@ ['IPRange']
--
-- * 'dbsgOwnerId' @::@ 'Maybe' 'Text'
--
-- * 'dbsgVpcId' @::@ 'Maybe' 'Text'
--
dbsecurityGroup :: DBSecurityGroup
dbsecurityGroup = DBSecurityGroup
{ _dbsgOwnerId = Nothing
, _dbsgDBSecurityGroupName = Nothing
, _dbsgDBSecurityGroupDescription = Nothing
, _dbsgVpcId = Nothing
, _dbsgEC2SecurityGroups = mempty
, _dbsgIPRanges = mempty
}
-- | Provides the description of the DB security group.
dbsgDBSecurityGroupDescription :: Lens' DBSecurityGroup (Maybe Text)
dbsgDBSecurityGroupDescription =
lens _dbsgDBSecurityGroupDescription
(\s a -> s { _dbsgDBSecurityGroupDescription = a })
-- | Specifies the name of the DB security group.
dbsgDBSecurityGroupName :: Lens' DBSecurityGroup (Maybe Text)
dbsgDBSecurityGroupName =
lens _dbsgDBSecurityGroupName (\s a -> s { _dbsgDBSecurityGroupName = a })
-- | Contains a list of 'EC2SecurityGroup' elements.
dbsgEC2SecurityGroups :: Lens' DBSecurityGroup [EC2SecurityGroup]
dbsgEC2SecurityGroups =
lens _dbsgEC2SecurityGroups (\s a -> s { _dbsgEC2SecurityGroups = a })
. _List
-- | Contains a list of 'IPRange' elements.
dbsgIPRanges :: Lens' DBSecurityGroup [IPRange]
dbsgIPRanges = lens _dbsgIPRanges (\s a -> s { _dbsgIPRanges = a }) . _List
-- | Provides the AWS ID of the owner of a specific DB security group.
dbsgOwnerId :: Lens' DBSecurityGroup (Maybe Text)
dbsgOwnerId = lens _dbsgOwnerId (\s a -> s { _dbsgOwnerId = a })
-- | Provides the VpcId of the DB security group.
dbsgVpcId :: Lens' DBSecurityGroup (Maybe Text)
dbsgVpcId = lens _dbsgVpcId (\s a -> s { _dbsgVpcId = a })
instance FromXML DBSecurityGroup where
parseXML x = DBSecurityGroup
<$> x .@? "DBSecurityGroupDescription"
<*> x .@? "DBSecurityGroupName"
<*> x .@? "EC2SecurityGroups" .!@ mempty
<*> x .@? "IPRanges" .!@ mempty
<*> x .@? "OwnerId"
<*> x .@? "VpcId"
instance ToQuery DBSecurityGroup where
toQuery DBSecurityGroup{..} = mconcat
[ "DBSecurityGroupDescription" =? _dbsgDBSecurityGroupDescription
, "DBSecurityGroupName" =? _dbsgDBSecurityGroupName
, "EC2SecurityGroups" =? _dbsgEC2SecurityGroups
, "IPRanges" =? _dbsgIPRanges
, "OwnerId" =? _dbsgOwnerId
, "VpcId" =? _dbsgVpcId
]
data Tag = Tag
{ _tagKey :: Maybe Text
, _tagValue :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'Tag' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'tagKey' @::@ 'Maybe' 'Text'
--
-- * 'tagValue' @::@ 'Maybe' 'Text'
--
tag :: Tag
tag = Tag
{ _tagKey = Nothing
, _tagValue = Nothing
}
-- | A key is the required name of the tag. The string value can be from 1 to 128
-- Unicode characters in length and cannot be prefixed with "aws:" or "rds:".
-- The string may only contain only the set of Unicode letters, digits,
-- white-space, '_', '.', '/', '=', '+', '-' (Java regex:
-- "^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$").
tagKey :: Lens' Tag (Maybe Text)
tagKey = lens _tagKey (\s a -> s { _tagKey = a })
-- | A value is the optional value of the tag. The string value can be from 1 to
-- 256 Unicode characters in length and cannot be prefixed with "aws:" or
-- "rds:". The string may only contain only the set of Unicode letters, digits,
-- white-space, '_', '.', '/', '=', '+', '-' (Java regex:
-- "^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$").
tagValue :: Lens' Tag (Maybe Text)
tagValue = lens _tagValue (\s a -> s { _tagValue = a })
instance FromXML Tag where
parseXML x = Tag
<$> x .@? "Key"
<*> x .@? "Value"
instance ToQuery Tag where
toQuery Tag{..} = mconcat
[ "Key" =? _tagKey
, "Value" =? _tagValue
]
data DBEngineVersion = DBEngineVersion
{ _dbevDBEngineDescription :: Maybe Text
, _dbevDBEngineVersionDescription :: Maybe Text
, _dbevDBParameterGroupFamily :: Maybe Text
, _dbevDefaultCharacterSet :: Maybe CharacterSet
, _dbevEngine :: Maybe Text
, _dbevEngineVersion :: Maybe Text
, _dbevSupportedCharacterSets :: List "member" CharacterSet
} deriving (Eq, Read, Show)
-- | 'DBEngineVersion' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbevDBEngineDescription' @::@ 'Maybe' 'Text'
--
-- * 'dbevDBEngineVersionDescription' @::@ 'Maybe' 'Text'
--
-- * 'dbevDBParameterGroupFamily' @::@ 'Maybe' 'Text'
--
-- * 'dbevDefaultCharacterSet' @::@ 'Maybe' 'CharacterSet'
--
-- * 'dbevEngine' @::@ 'Maybe' 'Text'
--
-- * 'dbevEngineVersion' @::@ 'Maybe' 'Text'
--
-- * 'dbevSupportedCharacterSets' @::@ ['CharacterSet']
--
dbengineVersion :: DBEngineVersion
dbengineVersion = DBEngineVersion
{ _dbevEngine = Nothing
, _dbevEngineVersion = Nothing
, _dbevDBParameterGroupFamily = Nothing
, _dbevDBEngineDescription = Nothing
, _dbevDBEngineVersionDescription = Nothing
, _dbevDefaultCharacterSet = Nothing
, _dbevSupportedCharacterSets = mempty
}
-- | The description of the database engine.
dbevDBEngineDescription :: Lens' DBEngineVersion (Maybe Text)
dbevDBEngineDescription =
lens _dbevDBEngineDescription (\s a -> s { _dbevDBEngineDescription = a })
-- | The description of the database engine version.
dbevDBEngineVersionDescription :: Lens' DBEngineVersion (Maybe Text)
dbevDBEngineVersionDescription =
lens _dbevDBEngineVersionDescription
(\s a -> s { _dbevDBEngineVersionDescription = a })
-- | The name of the DB parameter group family for the database engine.
dbevDBParameterGroupFamily :: Lens' DBEngineVersion (Maybe Text)
dbevDBParameterGroupFamily =
lens _dbevDBParameterGroupFamily
(\s a -> s { _dbevDBParameterGroupFamily = a })
-- | The default character set for new instances of this engine version, if the 'CharacterSetName' parameter of the CreateDBInstance API is not specified.
dbevDefaultCharacterSet :: Lens' DBEngineVersion (Maybe CharacterSet)
dbevDefaultCharacterSet =
lens _dbevDefaultCharacterSet (\s a -> s { _dbevDefaultCharacterSet = a })
-- | The name of the database engine.
dbevEngine :: Lens' DBEngineVersion (Maybe Text)
dbevEngine = lens _dbevEngine (\s a -> s { _dbevEngine = a })
-- | The version number of the database engine.
dbevEngineVersion :: Lens' DBEngineVersion (Maybe Text)
dbevEngineVersion =
lens _dbevEngineVersion (\s a -> s { _dbevEngineVersion = a })
-- | A list of the character sets supported by this engine for the 'CharacterSetName' parameter of the CreateDBInstance API.
dbevSupportedCharacterSets :: Lens' DBEngineVersion [CharacterSet]
dbevSupportedCharacterSets =
lens _dbevSupportedCharacterSets
(\s a -> s { _dbevSupportedCharacterSets = a })
. _List
instance FromXML DBEngineVersion where
parseXML x = DBEngineVersion
<$> x .@? "DBEngineDescription"
<*> x .@? "DBEngineVersionDescription"
<*> x .@? "DBParameterGroupFamily"
<*> x .@? "DefaultCharacterSet"
<*> x .@? "Engine"
<*> x .@? "EngineVersion"
<*> x .@? "SupportedCharacterSets" .!@ mempty
instance ToQuery DBEngineVersion where
toQuery DBEngineVersion{..} = mconcat
[ "DBEngineDescription" =? _dbevDBEngineDescription
, "DBEngineVersionDescription" =? _dbevDBEngineVersionDescription
, "DBParameterGroupFamily" =? _dbevDBParameterGroupFamily
, "DefaultCharacterSet" =? _dbevDefaultCharacterSet
, "Engine" =? _dbevEngine
, "EngineVersion" =? _dbevEngineVersion
, "SupportedCharacterSets" =? _dbevSupportedCharacterSets
]
data DBSnapshot = DBSnapshot
{ _dbsAllocatedStorage :: Maybe Int
, _dbsAvailabilityZone :: Maybe Text
, _dbsDBInstanceIdentifier :: Maybe Text
, _dbsDBSnapshotIdentifier :: Maybe Text
, _dbsEncrypted :: Maybe Bool
, _dbsEngine :: Maybe Text
, _dbsEngineVersion :: Maybe Text
, _dbsInstanceCreateTime :: Maybe ISO8601
, _dbsIops :: Maybe Int
, _dbsKmsKeyId :: Maybe Text
, _dbsLicenseModel :: Maybe Text
, _dbsMasterUsername :: Maybe Text
, _dbsOptionGroupName :: Maybe Text
, _dbsPercentProgress :: Maybe Int
, _dbsPort :: Maybe Int
, _dbsSnapshotCreateTime :: Maybe ISO8601
, _dbsSnapshotType :: Maybe Text
, _dbsSourceRegion :: Maybe Text
, _dbsStatus :: Maybe Text
, _dbsStorageType :: Maybe Text
, _dbsTdeCredentialArn :: Maybe Text
, _dbsVpcId :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DBSnapshot' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbsAllocatedStorage' @::@ 'Maybe' 'Int'
--
-- * 'dbsAvailabilityZone' @::@ 'Maybe' 'Text'
--
-- * 'dbsDBInstanceIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'dbsDBSnapshotIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'dbsEncrypted' @::@ 'Maybe' 'Bool'
--
-- * 'dbsEngine' @::@ 'Maybe' 'Text'
--
-- * 'dbsEngineVersion' @::@ 'Maybe' 'Text'
--
-- * 'dbsInstanceCreateTime' @::@ 'Maybe' 'UTCTime'
--
-- * 'dbsIops' @::@ 'Maybe' 'Int'
--
-- * 'dbsKmsKeyId' @::@ 'Maybe' 'Text'
--
-- * 'dbsLicenseModel' @::@ 'Maybe' 'Text'
--
-- * 'dbsMasterUsername' @::@ 'Maybe' 'Text'
--
-- * 'dbsOptionGroupName' @::@ 'Maybe' 'Text'
--
-- * 'dbsPercentProgress' @::@ 'Maybe' 'Int'
--
-- * 'dbsPort' @::@ 'Maybe' 'Int'
--
-- * 'dbsSnapshotCreateTime' @::@ 'Maybe' 'UTCTime'
--
-- * 'dbsSnapshotType' @::@ 'Maybe' 'Text'
--
-- * 'dbsSourceRegion' @::@ 'Maybe' 'Text'
--
-- * 'dbsStatus' @::@ 'Maybe' 'Text'
--
-- * 'dbsStorageType' @::@ 'Maybe' 'Text'
--
-- * 'dbsTdeCredentialArn' @::@ 'Maybe' 'Text'
--
-- * 'dbsVpcId' @::@ 'Maybe' 'Text'
--
dbsnapshot :: DBSnapshot
dbsnapshot = DBSnapshot
{ _dbsDBSnapshotIdentifier = Nothing
, _dbsDBInstanceIdentifier = Nothing
, _dbsSnapshotCreateTime = Nothing
, _dbsEngine = Nothing
, _dbsAllocatedStorage = Nothing
, _dbsStatus = Nothing
, _dbsPort = Nothing
, _dbsAvailabilityZone = Nothing
, _dbsVpcId = Nothing
, _dbsInstanceCreateTime = Nothing
, _dbsMasterUsername = Nothing
, _dbsEngineVersion = Nothing
, _dbsLicenseModel = Nothing
, _dbsSnapshotType = Nothing
, _dbsIops = Nothing
, _dbsOptionGroupName = Nothing
, _dbsPercentProgress = Nothing
, _dbsSourceRegion = Nothing
, _dbsStorageType = Nothing
, _dbsTdeCredentialArn = Nothing
, _dbsEncrypted = Nothing
, _dbsKmsKeyId = Nothing
}
-- | Specifies the allocated storage size in gigabytes (GB).
dbsAllocatedStorage :: Lens' DBSnapshot (Maybe Int)
dbsAllocatedStorage =
lens _dbsAllocatedStorage (\s a -> s { _dbsAllocatedStorage = a })
-- | Specifies the name of the Availability Zone the DB instance was located in
-- at the time of the DB snapshot.
dbsAvailabilityZone :: Lens' DBSnapshot (Maybe Text)
dbsAvailabilityZone =
lens _dbsAvailabilityZone (\s a -> s { _dbsAvailabilityZone = a })
-- | Specifies the DB instance identifier of the DB instance this DB snapshot was
-- created from.
dbsDBInstanceIdentifier :: Lens' DBSnapshot (Maybe Text)
dbsDBInstanceIdentifier =
lens _dbsDBInstanceIdentifier (\s a -> s { _dbsDBInstanceIdentifier = a })
-- | Specifies the identifier for the DB snapshot.
dbsDBSnapshotIdentifier :: Lens' DBSnapshot (Maybe Text)
dbsDBSnapshotIdentifier =
lens _dbsDBSnapshotIdentifier (\s a -> s { _dbsDBSnapshotIdentifier = a })
-- | Specifies whether the DB snapshot is encrypted.
dbsEncrypted :: Lens' DBSnapshot (Maybe Bool)
dbsEncrypted = lens _dbsEncrypted (\s a -> s { _dbsEncrypted = a })
-- | Specifies the name of the database engine.
dbsEngine :: Lens' DBSnapshot (Maybe Text)
dbsEngine = lens _dbsEngine (\s a -> s { _dbsEngine = a })
-- | Specifies the version of the database engine.
dbsEngineVersion :: Lens' DBSnapshot (Maybe Text)
dbsEngineVersion = lens _dbsEngineVersion (\s a -> s { _dbsEngineVersion = a })
-- | Specifies the time (UTC) when the snapshot was taken.
dbsInstanceCreateTime :: Lens' DBSnapshot (Maybe UTCTime)
dbsInstanceCreateTime =
lens _dbsInstanceCreateTime (\s a -> s { _dbsInstanceCreateTime = a })
. mapping _Time
-- | Specifies the Provisioned IOPS (I/O operations per second) value of the DB
-- instance at the time of the snapshot.
dbsIops :: Lens' DBSnapshot (Maybe Int)
dbsIops = lens _dbsIops (\s a -> s { _dbsIops = a })
-- | If 'Encrypted' is true, the KMS key identifier for the encrypted DB snapshot.
dbsKmsKeyId :: Lens' DBSnapshot (Maybe Text)
dbsKmsKeyId = lens _dbsKmsKeyId (\s a -> s { _dbsKmsKeyId = a })
-- | License model information for the restored DB instance.
dbsLicenseModel :: Lens' DBSnapshot (Maybe Text)
dbsLicenseModel = lens _dbsLicenseModel (\s a -> s { _dbsLicenseModel = a })
-- | Provides the master username for the DB snapshot.
dbsMasterUsername :: Lens' DBSnapshot (Maybe Text)
dbsMasterUsername =
lens _dbsMasterUsername (\s a -> s { _dbsMasterUsername = a })
-- | Provides the option group name for the DB snapshot.
dbsOptionGroupName :: Lens' DBSnapshot (Maybe Text)
dbsOptionGroupName =
lens _dbsOptionGroupName (\s a -> s { _dbsOptionGroupName = a })
-- | The percentage of the estimated data that has been transferred.
dbsPercentProgress :: Lens' DBSnapshot (Maybe Int)
dbsPercentProgress =
lens _dbsPercentProgress (\s a -> s { _dbsPercentProgress = a })
-- | Specifies the port that the database engine was listening on at the time of
-- the snapshot.
dbsPort :: Lens' DBSnapshot (Maybe Int)
dbsPort = lens _dbsPort (\s a -> s { _dbsPort = a })
-- | Provides the time (UTC) when the snapshot was taken.
dbsSnapshotCreateTime :: Lens' DBSnapshot (Maybe UTCTime)
dbsSnapshotCreateTime =
lens _dbsSnapshotCreateTime (\s a -> s { _dbsSnapshotCreateTime = a })
. mapping _Time
-- | Provides the type of the DB snapshot.
dbsSnapshotType :: Lens' DBSnapshot (Maybe Text)
dbsSnapshotType = lens _dbsSnapshotType (\s a -> s { _dbsSnapshotType = a })
-- | The region that the DB snapshot was created in or copied from.
dbsSourceRegion :: Lens' DBSnapshot (Maybe Text)
dbsSourceRegion = lens _dbsSourceRegion (\s a -> s { _dbsSourceRegion = a })
-- | Specifies the status of this DB snapshot.
dbsStatus :: Lens' DBSnapshot (Maybe Text)
dbsStatus = lens _dbsStatus (\s a -> s { _dbsStatus = a })
-- | Specifies the storage type associated with DB Snapshot.
dbsStorageType :: Lens' DBSnapshot (Maybe Text)
dbsStorageType = lens _dbsStorageType (\s a -> s { _dbsStorageType = a })
-- | The ARN from the Key Store with which to associate the instance for TDE
-- encryption.
dbsTdeCredentialArn :: Lens' DBSnapshot (Maybe Text)
dbsTdeCredentialArn =
lens _dbsTdeCredentialArn (\s a -> s { _dbsTdeCredentialArn = a })
-- | Provides the Vpc Id associated with the DB snapshot.
dbsVpcId :: Lens' DBSnapshot (Maybe Text)
dbsVpcId = lens _dbsVpcId (\s a -> s { _dbsVpcId = a })
instance FromXML DBSnapshot where
parseXML x = DBSnapshot
<$> x .@? "AllocatedStorage"
<*> x .@? "AvailabilityZone"
<*> x .@? "DBInstanceIdentifier"
<*> x .@? "DBSnapshotIdentifier"
<*> x .@? "Encrypted"
<*> x .@? "Engine"
<*> x .@? "EngineVersion"
<*> x .@? "InstanceCreateTime"
<*> x .@? "Iops"
<*> x .@? "KmsKeyId"
<*> x .@? "LicenseModel"
<*> x .@? "MasterUsername"
<*> x .@? "OptionGroupName"
<*> x .@? "PercentProgress"
<*> x .@? "Port"
<*> x .@? "SnapshotCreateTime"
<*> x .@? "SnapshotType"
<*> x .@? "SourceRegion"
<*> x .@? "Status"
<*> x .@? "StorageType"
<*> x .@? "TdeCredentialArn"
<*> x .@? "VpcId"
instance ToQuery DBSnapshot where
toQuery DBSnapshot{..} = mconcat
[ "AllocatedStorage" =? _dbsAllocatedStorage
, "AvailabilityZone" =? _dbsAvailabilityZone
, "DBInstanceIdentifier" =? _dbsDBInstanceIdentifier
, "DBSnapshotIdentifier" =? _dbsDBSnapshotIdentifier
, "Encrypted" =? _dbsEncrypted
, "Engine" =? _dbsEngine
, "EngineVersion" =? _dbsEngineVersion
, "InstanceCreateTime" =? _dbsInstanceCreateTime
, "Iops" =? _dbsIops
, "KmsKeyId" =? _dbsKmsKeyId
, "LicenseModel" =? _dbsLicenseModel
, "MasterUsername" =? _dbsMasterUsername
, "OptionGroupName" =? _dbsOptionGroupName
, "PercentProgress" =? _dbsPercentProgress
, "Port" =? _dbsPort
, "SnapshotCreateTime" =? _dbsSnapshotCreateTime
, "SnapshotType" =? _dbsSnapshotType
, "SourceRegion" =? _dbsSourceRegion
, "Status" =? _dbsStatus
, "StorageType" =? _dbsStorageType
, "TdeCredentialArn" =? _dbsTdeCredentialArn
, "VpcId" =? _dbsVpcId
]
data DBSecurityGroupMembership = DBSecurityGroupMembership
{ _dbsgmDBSecurityGroupName :: Maybe Text
, _dbsgmStatus :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DBSecurityGroupMembership' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbsgmDBSecurityGroupName' @::@ 'Maybe' 'Text'
--
-- * 'dbsgmStatus' @::@ 'Maybe' 'Text'
--
dbsecurityGroupMembership :: DBSecurityGroupMembership
dbsecurityGroupMembership = DBSecurityGroupMembership
{ _dbsgmDBSecurityGroupName = Nothing
, _dbsgmStatus = Nothing
}
-- | The name of the DB security group.
dbsgmDBSecurityGroupName :: Lens' DBSecurityGroupMembership (Maybe Text)
dbsgmDBSecurityGroupName =
lens _dbsgmDBSecurityGroupName
(\s a -> s { _dbsgmDBSecurityGroupName = a })
-- | The status of the DB security group.
dbsgmStatus :: Lens' DBSecurityGroupMembership (Maybe Text)
dbsgmStatus = lens _dbsgmStatus (\s a -> s { _dbsgmStatus = a })
instance FromXML DBSecurityGroupMembership where
parseXML x = DBSecurityGroupMembership
<$> x .@? "DBSecurityGroupName"
<*> x .@? "Status"
instance ToQuery DBSecurityGroupMembership where
toQuery DBSecurityGroupMembership{..} = mconcat
[ "DBSecurityGroupName" =? _dbsgmDBSecurityGroupName
, "Status" =? _dbsgmStatus
]
data EC2SecurityGroup = EC2SecurityGroup
{ _ecsgEC2SecurityGroupId :: Maybe Text
, _ecsgEC2SecurityGroupName :: Maybe Text
, _ecsgEC2SecurityGroupOwnerId :: Maybe Text
, _ecsgStatus :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'EC2SecurityGroup' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ecsgEC2SecurityGroupId' @::@ 'Maybe' 'Text'
--
-- * 'ecsgEC2SecurityGroupName' @::@ 'Maybe' 'Text'
--
-- * 'ecsgEC2SecurityGroupOwnerId' @::@ 'Maybe' 'Text'
--
-- * 'ecsgStatus' @::@ 'Maybe' 'Text'
--
ec2SecurityGroup :: EC2SecurityGroup
ec2SecurityGroup = EC2SecurityGroup
{ _ecsgStatus = Nothing
, _ecsgEC2SecurityGroupName = Nothing
, _ecsgEC2SecurityGroupId = Nothing
, _ecsgEC2SecurityGroupOwnerId = Nothing
}
-- | Specifies the id of the EC2 security group.
ecsgEC2SecurityGroupId :: Lens' EC2SecurityGroup (Maybe Text)
ecsgEC2SecurityGroupId =
lens _ecsgEC2SecurityGroupId (\s a -> s { _ecsgEC2SecurityGroupId = a })
-- | Specifies the name of the EC2 security group.
ecsgEC2SecurityGroupName :: Lens' EC2SecurityGroup (Maybe Text)
ecsgEC2SecurityGroupName =
lens _ecsgEC2SecurityGroupName
(\s a -> s { _ecsgEC2SecurityGroupName = a })
-- | Specifies the AWS ID of the owner of the EC2 security group specified in the 'EC2SecurityGroupName' field.
ecsgEC2SecurityGroupOwnerId :: Lens' EC2SecurityGroup (Maybe Text)
ecsgEC2SecurityGroupOwnerId =
lens _ecsgEC2SecurityGroupOwnerId
(\s a -> s { _ecsgEC2SecurityGroupOwnerId = a })
-- | Provides the status of the EC2 security group. Status can be "authorizing",
-- "authorized", "revoking", and "revoked".
ecsgStatus :: Lens' EC2SecurityGroup (Maybe Text)
ecsgStatus = lens _ecsgStatus (\s a -> s { _ecsgStatus = a })
instance FromXML EC2SecurityGroup where
parseXML x = EC2SecurityGroup
<$> x .@? "EC2SecurityGroupId"
<*> x .@? "EC2SecurityGroupName"
<*> x .@? "EC2SecurityGroupOwnerId"
<*> x .@? "Status"
instance ToQuery EC2SecurityGroup where
toQuery EC2SecurityGroup{..} = mconcat
[ "EC2SecurityGroupId" =? _ecsgEC2SecurityGroupId
, "EC2SecurityGroupName" =? _ecsgEC2SecurityGroupName
, "EC2SecurityGroupOwnerId" =? _ecsgEC2SecurityGroupOwnerId
, "Status" =? _ecsgStatus
]
data SourceType
= DbInstance -- ^ db-instance
| DbParameterGroup -- ^ db-parameter-group
| DbSecurityGroup -- ^ db-security-group
| DbSnapshot -- ^ db-snapshot
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable SourceType
instance FromText SourceType where
parser = takeLowerText >>= \case
"db-instance" -> pure DbInstance
"db-parameter-group" -> pure DbParameterGroup
"db-security-group" -> pure DbSecurityGroup
"db-snapshot" -> pure DbSnapshot
e -> fail $
"Failure parsing SourceType from " ++ show e
instance ToText SourceType where
toText = \case
DbInstance -> "db-instance"
DbParameterGroup -> "db-parameter-group"
DbSecurityGroup -> "db-security-group"
DbSnapshot -> "db-snapshot"
instance ToByteString SourceType
instance ToHeader SourceType
instance ToQuery SourceType
instance FromXML SourceType where
parseXML = parseXMLText "SourceType"
data ResourcePendingMaintenanceActions = ResourcePendingMaintenanceActions
{ _rpmaPendingMaintenanceActionDetails :: List "member" PendingMaintenanceAction
, _rpmaResourceIdentifier :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'ResourcePendingMaintenanceActions' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rpmaPendingMaintenanceActionDetails' @::@ ['PendingMaintenanceAction']
--
-- * 'rpmaResourceIdentifier' @::@ 'Maybe' 'Text'
--
resourcePendingMaintenanceActions :: ResourcePendingMaintenanceActions
resourcePendingMaintenanceActions = ResourcePendingMaintenanceActions
{ _rpmaResourceIdentifier = Nothing
, _rpmaPendingMaintenanceActionDetails = mempty
}
-- | A list that provides details about the pending maintenance actions for the
-- resource.
rpmaPendingMaintenanceActionDetails :: Lens' ResourcePendingMaintenanceActions [PendingMaintenanceAction]
rpmaPendingMaintenanceActionDetails =
lens _rpmaPendingMaintenanceActionDetails
(\s a -> s { _rpmaPendingMaintenanceActionDetails = a })
. _List
-- | The ARN of the resource that has pending maintenance actions.
rpmaResourceIdentifier :: Lens' ResourcePendingMaintenanceActions (Maybe Text)
rpmaResourceIdentifier =
lens _rpmaResourceIdentifier (\s a -> s { _rpmaResourceIdentifier = a })
instance FromXML ResourcePendingMaintenanceActions where
parseXML x = ResourcePendingMaintenanceActions
<$> x .@? "PendingMaintenanceActionDetails" .!@ mempty
<*> x .@? "ResourceIdentifier"
instance ToQuery ResourcePendingMaintenanceActions where
toQuery ResourcePendingMaintenanceActions{..} = mconcat
[ "PendingMaintenanceActionDetails" =? _rpmaPendingMaintenanceActionDetails
, "ResourceIdentifier" =? _rpmaResourceIdentifier
]
data DBParameterGroup = DBParameterGroup
{ _dbpgDBParameterGroupFamily :: Maybe Text
, _dbpgDBParameterGroupName :: Maybe Text
, _dbpgDescription :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DBParameterGroup' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbpgDBParameterGroupFamily' @::@ 'Maybe' 'Text'
--
-- * 'dbpgDBParameterGroupName' @::@ 'Maybe' 'Text'
--
-- * 'dbpgDescription' @::@ 'Maybe' 'Text'
--
dbparameterGroup :: DBParameterGroup
dbparameterGroup = DBParameterGroup
{ _dbpgDBParameterGroupName = Nothing
, _dbpgDBParameterGroupFamily = Nothing
, _dbpgDescription = Nothing
}
-- | Provides the name of the DB parameter group family that this DB parameter
-- group is compatible with.
dbpgDBParameterGroupFamily :: Lens' DBParameterGroup (Maybe Text)
dbpgDBParameterGroupFamily =
lens _dbpgDBParameterGroupFamily
(\s a -> s { _dbpgDBParameterGroupFamily = a })
-- | Provides the name of the DB parameter group.
dbpgDBParameterGroupName :: Lens' DBParameterGroup (Maybe Text)
dbpgDBParameterGroupName =
lens _dbpgDBParameterGroupName
(\s a -> s { _dbpgDBParameterGroupName = a })
-- | Provides the customer-specified description for this DB parameter group.
dbpgDescription :: Lens' DBParameterGroup (Maybe Text)
dbpgDescription = lens _dbpgDescription (\s a -> s { _dbpgDescription = a })
instance FromXML DBParameterGroup where
parseXML x = DBParameterGroup
<$> x .@? "DBParameterGroupFamily"
<*> x .@? "DBParameterGroupName"
<*> x .@? "Description"
instance ToQuery DBParameterGroup where
toQuery DBParameterGroup{..} = mconcat
[ "DBParameterGroupFamily" =? _dbpgDBParameterGroupFamily
, "DBParameterGroupName" =? _dbpgDBParameterGroupName
, "Description" =? _dbpgDescription
]
data ReservedDBInstancesOffering = ReservedDBInstancesOffering
{ _rdbioCurrencyCode :: Maybe Text
, _rdbioDBInstanceClass :: Maybe Text
, _rdbioDuration :: Maybe Int
, _rdbioFixedPrice :: Maybe Double
, _rdbioMultiAZ :: Maybe Bool
, _rdbioOfferingType :: Maybe Text
, _rdbioProductDescription :: Maybe Text
, _rdbioRecurringCharges :: List "member" RecurringCharge
, _rdbioReservedDBInstancesOfferingId :: Maybe Text
, _rdbioUsagePrice :: Maybe Double
} deriving (Eq, Read, Show)
-- | 'ReservedDBInstancesOffering' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rdbioCurrencyCode' @::@ 'Maybe' 'Text'
--
-- * 'rdbioDBInstanceClass' @::@ 'Maybe' 'Text'
--
-- * 'rdbioDuration' @::@ 'Maybe' 'Int'
--
-- * 'rdbioFixedPrice' @::@ 'Maybe' 'Double'
--
-- * 'rdbioMultiAZ' @::@ 'Maybe' 'Bool'
--
-- * 'rdbioOfferingType' @::@ 'Maybe' 'Text'
--
-- * 'rdbioProductDescription' @::@ 'Maybe' 'Text'
--
-- * 'rdbioRecurringCharges' @::@ ['RecurringCharge']
--
-- * 'rdbioReservedDBInstancesOfferingId' @::@ 'Maybe' 'Text'
--
-- * 'rdbioUsagePrice' @::@ 'Maybe' 'Double'
--
reservedDBInstancesOffering :: ReservedDBInstancesOffering
reservedDBInstancesOffering = ReservedDBInstancesOffering
{ _rdbioReservedDBInstancesOfferingId = Nothing
, _rdbioDBInstanceClass = Nothing
, _rdbioDuration = Nothing
, _rdbioFixedPrice = Nothing
, _rdbioUsagePrice = Nothing
, _rdbioCurrencyCode = Nothing
, _rdbioProductDescription = Nothing
, _rdbioOfferingType = Nothing
, _rdbioMultiAZ = Nothing
, _rdbioRecurringCharges = mempty
}
-- | The currency code for the reserved DB instance offering.
rdbioCurrencyCode :: Lens' ReservedDBInstancesOffering (Maybe Text)
rdbioCurrencyCode =
lens _rdbioCurrencyCode (\s a -> s { _rdbioCurrencyCode = a })
-- | The DB instance class for the reserved DB instance.
rdbioDBInstanceClass :: Lens' ReservedDBInstancesOffering (Maybe Text)
rdbioDBInstanceClass =
lens _rdbioDBInstanceClass (\s a -> s { _rdbioDBInstanceClass = a })
-- | The duration of the offering in seconds.
rdbioDuration :: Lens' ReservedDBInstancesOffering (Maybe Int)
rdbioDuration = lens _rdbioDuration (\s a -> s { _rdbioDuration = a })
-- | The fixed price charged for this offering.
rdbioFixedPrice :: Lens' ReservedDBInstancesOffering (Maybe Double)
rdbioFixedPrice = lens _rdbioFixedPrice (\s a -> s { _rdbioFixedPrice = a })
-- | Indicates if the offering applies to Multi-AZ deployments.
rdbioMultiAZ :: Lens' ReservedDBInstancesOffering (Maybe Bool)
rdbioMultiAZ = lens _rdbioMultiAZ (\s a -> s { _rdbioMultiAZ = a })
-- | The offering type.
rdbioOfferingType :: Lens' ReservedDBInstancesOffering (Maybe Text)
rdbioOfferingType =
lens _rdbioOfferingType (\s a -> s { _rdbioOfferingType = a })
-- | The database engine used by the offering.
rdbioProductDescription :: Lens' ReservedDBInstancesOffering (Maybe Text)
rdbioProductDescription =
lens _rdbioProductDescription (\s a -> s { _rdbioProductDescription = a })
-- | The recurring price charged to run this reserved DB instance.
rdbioRecurringCharges :: Lens' ReservedDBInstancesOffering [RecurringCharge]
rdbioRecurringCharges =
lens _rdbioRecurringCharges (\s a -> s { _rdbioRecurringCharges = a })
. _List
-- | The offering identifier.
rdbioReservedDBInstancesOfferingId :: Lens' ReservedDBInstancesOffering (Maybe Text)
rdbioReservedDBInstancesOfferingId =
lens _rdbioReservedDBInstancesOfferingId
(\s a -> s { _rdbioReservedDBInstancesOfferingId = a })
-- | The hourly price charged for this offering.
rdbioUsagePrice :: Lens' ReservedDBInstancesOffering (Maybe Double)
rdbioUsagePrice = lens _rdbioUsagePrice (\s a -> s { _rdbioUsagePrice = a })
instance FromXML ReservedDBInstancesOffering where
parseXML x = ReservedDBInstancesOffering
<$> x .@? "CurrencyCode"
<*> x .@? "DBInstanceClass"
<*> x .@? "Duration"
<*> x .@? "FixedPrice"
<*> x .@? "MultiAZ"
<*> x .@? "OfferingType"
<*> x .@? "ProductDescription"
<*> x .@? "RecurringCharges" .!@ mempty
<*> x .@? "ReservedDBInstancesOfferingId"
<*> x .@? "UsagePrice"
instance ToQuery ReservedDBInstancesOffering where
toQuery ReservedDBInstancesOffering{..} = mconcat
[ "CurrencyCode" =? _rdbioCurrencyCode
, "DBInstanceClass" =? _rdbioDBInstanceClass
, "Duration" =? _rdbioDuration
, "FixedPrice" =? _rdbioFixedPrice
, "MultiAZ" =? _rdbioMultiAZ
, "OfferingType" =? _rdbioOfferingType
, "ProductDescription" =? _rdbioProductDescription
, "RecurringCharges" =? _rdbioRecurringCharges
, "ReservedDBInstancesOfferingId" =? _rdbioReservedDBInstancesOfferingId
, "UsagePrice" =? _rdbioUsagePrice
]
data ApplyMethod
= Immediate -- ^ immediate
| PendingReboot -- ^ pending-reboot
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable ApplyMethod
instance FromText ApplyMethod where
parser = takeLowerText >>= \case
"immediate" -> pure Immediate
"pending-reboot" -> pure PendingReboot
e -> fail $
"Failure parsing ApplyMethod from " ++ show e
instance ToText ApplyMethod where
toText = \case
Immediate -> "immediate"
PendingReboot -> "pending-reboot"
instance ToByteString ApplyMethod
instance ToHeader ApplyMethod
instance ToQuery ApplyMethod
instance FromXML ApplyMethod where
parseXML = parseXMLText "ApplyMethod"
data CharacterSet = CharacterSet
{ _csCharacterSetDescription :: Maybe Text
, _csCharacterSetName :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'CharacterSet' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'csCharacterSetDescription' @::@ 'Maybe' 'Text'
--
-- * 'csCharacterSetName' @::@ 'Maybe' 'Text'
--
characterSet :: CharacterSet
characterSet = CharacterSet
{ _csCharacterSetName = Nothing
, _csCharacterSetDescription = Nothing
}
-- | The description of the character set.
csCharacterSetDescription :: Lens' CharacterSet (Maybe Text)
csCharacterSetDescription =
lens _csCharacterSetDescription
(\s a -> s { _csCharacterSetDescription = a })
-- | The name of the character set.
csCharacterSetName :: Lens' CharacterSet (Maybe Text)
csCharacterSetName =
lens _csCharacterSetName (\s a -> s { _csCharacterSetName = a })
instance FromXML CharacterSet where
parseXML x = CharacterSet
<$> x .@? "CharacterSetDescription"
<*> x .@? "CharacterSetName"
instance ToQuery CharacterSet where
toQuery CharacterSet{..} = mconcat
[ "CharacterSetDescription" =? _csCharacterSetDescription
, "CharacterSetName" =? _csCharacterSetName
]
data Subnet = Subnet
{ _sSubnetAvailabilityZone :: Maybe AvailabilityZone
, _sSubnetIdentifier :: Maybe Text
, _sSubnetStatus :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'Subnet' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'sSubnetAvailabilityZone' @::@ 'Maybe' 'AvailabilityZone'
--
-- * 'sSubnetIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'sSubnetStatus' @::@ 'Maybe' 'Text'
--
subnet :: Subnet
subnet = Subnet
{ _sSubnetIdentifier = Nothing
, _sSubnetAvailabilityZone = Nothing
, _sSubnetStatus = Nothing
}
sSubnetAvailabilityZone :: Lens' Subnet (Maybe AvailabilityZone)
sSubnetAvailabilityZone =
lens _sSubnetAvailabilityZone (\s a -> s { _sSubnetAvailabilityZone = a })
-- | Specifies the identifier of the subnet.
sSubnetIdentifier :: Lens' Subnet (Maybe Text)
sSubnetIdentifier =
lens _sSubnetIdentifier (\s a -> s { _sSubnetIdentifier = a })
-- | Specifies the status of the subnet.
sSubnetStatus :: Lens' Subnet (Maybe Text)
sSubnetStatus = lens _sSubnetStatus (\s a -> s { _sSubnetStatus = a })
instance FromXML Subnet where
parseXML x = Subnet
<$> x .@? "SubnetAvailabilityZone"
<*> x .@? "SubnetIdentifier"
<*> x .@? "SubnetStatus"
instance ToQuery Subnet where
toQuery Subnet{..} = mconcat
[ "SubnetAvailabilityZone" =? _sSubnetAvailabilityZone
, "SubnetIdentifier" =? _sSubnetIdentifier
, "SubnetStatus" =? _sSubnetStatus
]
data ReservedDBInstance = ReservedDBInstance
{ _rdbiCurrencyCode :: Maybe Text
, _rdbiDBInstanceClass :: Maybe Text
, _rdbiDBInstanceCount :: Maybe Int
, _rdbiDuration :: Maybe Int
, _rdbiFixedPrice :: Maybe Double
, _rdbiMultiAZ :: Maybe Bool
, _rdbiOfferingType :: Maybe Text
, _rdbiProductDescription :: Maybe Text
, _rdbiRecurringCharges :: List "member" RecurringCharge
, _rdbiReservedDBInstanceId :: Maybe Text
, _rdbiReservedDBInstancesOfferingId :: Maybe Text
, _rdbiStartTime :: Maybe ISO8601
, _rdbiState :: Maybe Text
, _rdbiUsagePrice :: Maybe Double
} deriving (Eq, Read, Show)
-- | 'ReservedDBInstance' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rdbiCurrencyCode' @::@ 'Maybe' 'Text'
--
-- * 'rdbiDBInstanceClass' @::@ 'Maybe' 'Text'
--
-- * 'rdbiDBInstanceCount' @::@ 'Maybe' 'Int'
--
-- * 'rdbiDuration' @::@ 'Maybe' 'Int'
--
-- * 'rdbiFixedPrice' @::@ 'Maybe' 'Double'
--
-- * 'rdbiMultiAZ' @::@ 'Maybe' 'Bool'
--
-- * 'rdbiOfferingType' @::@ 'Maybe' 'Text'
--
-- * 'rdbiProductDescription' @::@ 'Maybe' 'Text'
--
-- * 'rdbiRecurringCharges' @::@ ['RecurringCharge']
--
-- * 'rdbiReservedDBInstanceId' @::@ 'Maybe' 'Text'
--
-- * 'rdbiReservedDBInstancesOfferingId' @::@ 'Maybe' 'Text'
--
-- * 'rdbiStartTime' @::@ 'Maybe' 'UTCTime'
--
-- * 'rdbiState' @::@ 'Maybe' 'Text'
--
-- * 'rdbiUsagePrice' @::@ 'Maybe' 'Double'
--
reservedDBInstance :: ReservedDBInstance
reservedDBInstance = ReservedDBInstance
{ _rdbiReservedDBInstanceId = Nothing
, _rdbiReservedDBInstancesOfferingId = Nothing
, _rdbiDBInstanceClass = Nothing
, _rdbiStartTime = Nothing
, _rdbiDuration = Nothing
, _rdbiFixedPrice = Nothing
, _rdbiUsagePrice = Nothing
, _rdbiCurrencyCode = Nothing
, _rdbiDBInstanceCount = Nothing
, _rdbiProductDescription = Nothing
, _rdbiOfferingType = Nothing
, _rdbiMultiAZ = Nothing
, _rdbiState = Nothing
, _rdbiRecurringCharges = mempty
}
-- | The currency code for the reserved DB instance.
rdbiCurrencyCode :: Lens' ReservedDBInstance (Maybe Text)
rdbiCurrencyCode = lens _rdbiCurrencyCode (\s a -> s { _rdbiCurrencyCode = a })
-- | The DB instance class for the reserved DB instance.
rdbiDBInstanceClass :: Lens' ReservedDBInstance (Maybe Text)
rdbiDBInstanceClass =
lens _rdbiDBInstanceClass (\s a -> s { _rdbiDBInstanceClass = a })
-- | The number of reserved DB instances.
rdbiDBInstanceCount :: Lens' ReservedDBInstance (Maybe Int)
rdbiDBInstanceCount =
lens _rdbiDBInstanceCount (\s a -> s { _rdbiDBInstanceCount = a })
-- | The duration of the reservation in seconds.
rdbiDuration :: Lens' ReservedDBInstance (Maybe Int)
rdbiDuration = lens _rdbiDuration (\s a -> s { _rdbiDuration = a })
-- | The fixed price charged for this reserved DB instance.
rdbiFixedPrice :: Lens' ReservedDBInstance (Maybe Double)
rdbiFixedPrice = lens _rdbiFixedPrice (\s a -> s { _rdbiFixedPrice = a })
-- | Indicates if the reservation applies to Multi-AZ deployments.
rdbiMultiAZ :: Lens' ReservedDBInstance (Maybe Bool)
rdbiMultiAZ = lens _rdbiMultiAZ (\s a -> s { _rdbiMultiAZ = a })
-- | The offering type of this reserved DB instance.
rdbiOfferingType :: Lens' ReservedDBInstance (Maybe Text)
rdbiOfferingType = lens _rdbiOfferingType (\s a -> s { _rdbiOfferingType = a })
-- | The description of the reserved DB instance.
rdbiProductDescription :: Lens' ReservedDBInstance (Maybe Text)
rdbiProductDescription =
lens _rdbiProductDescription (\s a -> s { _rdbiProductDescription = a })
-- | The recurring price charged to run this reserved DB instance.
rdbiRecurringCharges :: Lens' ReservedDBInstance [RecurringCharge]
rdbiRecurringCharges =
lens _rdbiRecurringCharges (\s a -> s { _rdbiRecurringCharges = a })
. _List
-- | The unique identifier for the reservation.
rdbiReservedDBInstanceId :: Lens' ReservedDBInstance (Maybe Text)
rdbiReservedDBInstanceId =
lens _rdbiReservedDBInstanceId
(\s a -> s { _rdbiReservedDBInstanceId = a })
-- | The offering identifier.
rdbiReservedDBInstancesOfferingId :: Lens' ReservedDBInstance (Maybe Text)
rdbiReservedDBInstancesOfferingId =
lens _rdbiReservedDBInstancesOfferingId
(\s a -> s { _rdbiReservedDBInstancesOfferingId = a })
-- | The time the reservation started.
rdbiStartTime :: Lens' ReservedDBInstance (Maybe UTCTime)
rdbiStartTime = lens _rdbiStartTime (\s a -> s { _rdbiStartTime = a }) . mapping _Time
-- | The state of the reserved DB instance.
rdbiState :: Lens' ReservedDBInstance (Maybe Text)
rdbiState = lens _rdbiState (\s a -> s { _rdbiState = a })
-- | The hourly price charged for this reserved DB instance.
rdbiUsagePrice :: Lens' ReservedDBInstance (Maybe Double)
rdbiUsagePrice = lens _rdbiUsagePrice (\s a -> s { _rdbiUsagePrice = a })
instance FromXML ReservedDBInstance where
parseXML x = ReservedDBInstance
<$> x .@? "CurrencyCode"
<*> x .@? "DBInstanceClass"
<*> x .@? "DBInstanceCount"
<*> x .@? "Duration"
<*> x .@? "FixedPrice"
<*> x .@? "MultiAZ"
<*> x .@? "OfferingType"
<*> x .@? "ProductDescription"
<*> x .@? "RecurringCharges" .!@ mempty
<*> x .@? "ReservedDBInstanceId"
<*> x .@? "ReservedDBInstancesOfferingId"
<*> x .@? "StartTime"
<*> x .@? "State"
<*> x .@? "UsagePrice"
instance ToQuery ReservedDBInstance where
toQuery ReservedDBInstance{..} = mconcat
[ "CurrencyCode" =? _rdbiCurrencyCode
, "DBInstanceClass" =? _rdbiDBInstanceClass
, "DBInstanceCount" =? _rdbiDBInstanceCount
, "Duration" =? _rdbiDuration
, "FixedPrice" =? _rdbiFixedPrice
, "MultiAZ" =? _rdbiMultiAZ
, "OfferingType" =? _rdbiOfferingType
, "ProductDescription" =? _rdbiProductDescription
, "RecurringCharges" =? _rdbiRecurringCharges
, "ReservedDBInstanceId" =? _rdbiReservedDBInstanceId
, "ReservedDBInstancesOfferingId" =? _rdbiReservedDBInstancesOfferingId
, "StartTime" =? _rdbiStartTime
, "State" =? _rdbiState
, "UsagePrice" =? _rdbiUsagePrice
]
data EngineDefaults = EngineDefaults
{ _edDBParameterGroupFamily :: Maybe Text
, _edMarker :: Maybe Text
, _edParameters :: List "member" Parameter
} deriving (Eq, Read, Show)
-- | 'EngineDefaults' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'edDBParameterGroupFamily' @::@ 'Maybe' 'Text'
--
-- * 'edMarker' @::@ 'Maybe' 'Text'
--
-- * 'edParameters' @::@ ['Parameter']
--
engineDefaults :: EngineDefaults
engineDefaults = EngineDefaults
{ _edDBParameterGroupFamily = Nothing
, _edMarker = Nothing
, _edParameters = mempty
}
-- | Specifies the name of the DB parameter group family which the engine default
-- parameters apply to.
edDBParameterGroupFamily :: Lens' EngineDefaults (Maybe Text)
edDBParameterGroupFamily =
lens _edDBParameterGroupFamily
(\s a -> s { _edDBParameterGroupFamily = a })
-- | An optional pagination token provided by a previous EngineDefaults request.
-- If this parameter is specified, the response includes only records beyond the
-- marker, up to the value specified by 'MaxRecords' .
edMarker :: Lens' EngineDefaults (Maybe Text)
edMarker = lens _edMarker (\s a -> s { _edMarker = a })
-- | Contains a list of engine default parameters.
edParameters :: Lens' EngineDefaults [Parameter]
edParameters = lens _edParameters (\s a -> s { _edParameters = a }) . _List
instance FromXML EngineDefaults where
parseXML x = EngineDefaults
<$> x .@? "DBParameterGroupFamily"
<*> x .@? "Marker"
<*> x .@? "Parameters" .!@ mempty
instance ToQuery EngineDefaults where
toQuery EngineDefaults{..} = mconcat
[ "DBParameterGroupFamily" =? _edDBParameterGroupFamily
, "Marker" =? _edMarker
, "Parameters" =? _edParameters
]
newtype DBParameterGroupNameMessage = DBParameterGroupNameMessage
{ _dbpgnmDBParameterGroupName :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'DBParameterGroupNameMessage' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbpgnmDBParameterGroupName' @::@ 'Maybe' 'Text'
--
dbparameterGroupNameMessage :: DBParameterGroupNameMessage
dbparameterGroupNameMessage = DBParameterGroupNameMessage
{ _dbpgnmDBParameterGroupName = Nothing
}
-- | The name of the DB parameter group.
dbpgnmDBParameterGroupName :: Lens' DBParameterGroupNameMessage (Maybe Text)
dbpgnmDBParameterGroupName =
lens _dbpgnmDBParameterGroupName
(\s a -> s { _dbpgnmDBParameterGroupName = a })
instance FromXML DBParameterGroupNameMessage where
parseXML x = DBParameterGroupNameMessage
<$> x .@? "DBParameterGroupName"
instance ToQuery DBParameterGroupNameMessage where
toQuery DBParameterGroupNameMessage{..} = mconcat
[ "DBParameterGroupName" =? _dbpgnmDBParameterGroupName
]
data OptionGroupOption = OptionGroupOption
{ _ogoDefaultPort :: Maybe Int
, _ogoDescription :: Maybe Text
, _ogoEngineName :: Maybe Text
, _ogoMajorEngineVersion :: Maybe Text
, _ogoMinimumRequiredMinorEngineVersion :: Maybe Text
, _ogoName :: Maybe Text
, _ogoOptionGroupOptionSettings :: List "member" OptionGroupOptionSetting
, _ogoOptionsDependedOn :: List "member" Text
, _ogoPermanent :: Maybe Bool
, _ogoPersistent :: Maybe Bool
, _ogoPortRequired :: Maybe Bool
} deriving (Eq, Read, Show)
-- | 'OptionGroupOption' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ogoDefaultPort' @::@ 'Maybe' 'Int'
--
-- * 'ogoDescription' @::@ 'Maybe' 'Text'
--
-- * 'ogoEngineName' @::@ 'Maybe' 'Text'
--
-- * 'ogoMajorEngineVersion' @::@ 'Maybe' 'Text'
--
-- * 'ogoMinimumRequiredMinorEngineVersion' @::@ 'Maybe' 'Text'
--
-- * 'ogoName' @::@ 'Maybe' 'Text'
--
-- * 'ogoOptionGroupOptionSettings' @::@ ['OptionGroupOptionSetting']
--
-- * 'ogoOptionsDependedOn' @::@ ['Text']
--
-- * 'ogoPermanent' @::@ 'Maybe' 'Bool'
--
-- * 'ogoPersistent' @::@ 'Maybe' 'Bool'
--
-- * 'ogoPortRequired' @::@ 'Maybe' 'Bool'
--
optionGroupOption :: OptionGroupOption
optionGroupOption = OptionGroupOption
{ _ogoName = Nothing
, _ogoDescription = Nothing
, _ogoEngineName = Nothing
, _ogoMajorEngineVersion = Nothing
, _ogoMinimumRequiredMinorEngineVersion = Nothing
, _ogoPortRequired = Nothing
, _ogoDefaultPort = Nothing
, _ogoOptionsDependedOn = mempty
, _ogoPersistent = Nothing
, _ogoPermanent = Nothing
, _ogoOptionGroupOptionSettings = mempty
}
-- | If the option requires a port, specifies the default port for the option.
ogoDefaultPort :: Lens' OptionGroupOption (Maybe Int)
ogoDefaultPort = lens _ogoDefaultPort (\s a -> s { _ogoDefaultPort = a })
-- | The description of the option.
ogoDescription :: Lens' OptionGroupOption (Maybe Text)
ogoDescription = lens _ogoDescription (\s a -> s { _ogoDescription = a })
-- | The name of the engine that this option can be applied to.
ogoEngineName :: Lens' OptionGroupOption (Maybe Text)
ogoEngineName = lens _ogoEngineName (\s a -> s { _ogoEngineName = a })
-- | Indicates the major engine version that the option is available for.
ogoMajorEngineVersion :: Lens' OptionGroupOption (Maybe Text)
ogoMajorEngineVersion =
lens _ogoMajorEngineVersion (\s a -> s { _ogoMajorEngineVersion = a })
-- | The minimum required engine version for the option to be applied.
ogoMinimumRequiredMinorEngineVersion :: Lens' OptionGroupOption (Maybe Text)
ogoMinimumRequiredMinorEngineVersion =
lens _ogoMinimumRequiredMinorEngineVersion
(\s a -> s { _ogoMinimumRequiredMinorEngineVersion = a })
-- | The name of the option.
ogoName :: Lens' OptionGroupOption (Maybe Text)
ogoName = lens _ogoName (\s a -> s { _ogoName = a })
-- | Specifies the option settings that are available (and the default value) for
-- each option in an option group.
ogoOptionGroupOptionSettings :: Lens' OptionGroupOption [OptionGroupOptionSetting]
ogoOptionGroupOptionSettings =
lens _ogoOptionGroupOptionSettings
(\s a -> s { _ogoOptionGroupOptionSettings = a })
. _List
-- | List of all options that are prerequisites for this option.
ogoOptionsDependedOn :: Lens' OptionGroupOption [Text]
ogoOptionsDependedOn =
lens _ogoOptionsDependedOn (\s a -> s { _ogoOptionsDependedOn = a })
. _List
-- | A permanent option cannot be removed from the option group once the option
-- group is used, and it cannot be removed from the db instance after assigning
-- an option group with this permanent option.
ogoPermanent :: Lens' OptionGroupOption (Maybe Bool)
ogoPermanent = lens _ogoPermanent (\s a -> s { _ogoPermanent = a })
-- | A persistent option cannot be removed from the option group once the option
-- group is used, but this option can be removed from the db instance while
-- modifying the related data and assigning another option group without this
-- option.
ogoPersistent :: Lens' OptionGroupOption (Maybe Bool)
ogoPersistent = lens _ogoPersistent (\s a -> s { _ogoPersistent = a })
-- | Specifies whether the option requires a port.
ogoPortRequired :: Lens' OptionGroupOption (Maybe Bool)
ogoPortRequired = lens _ogoPortRequired (\s a -> s { _ogoPortRequired = a })
instance FromXML OptionGroupOption where
parseXML x = OptionGroupOption
<$> x .@? "DefaultPort"
<*> x .@? "Description"
<*> x .@? "EngineName"
<*> x .@? "MajorEngineVersion"
<*> x .@? "MinimumRequiredMinorEngineVersion"
<*> x .@? "Name"
<*> x .@? "OptionGroupOptionSettings" .!@ mempty
<*> x .@? "OptionsDependedOn" .!@ mempty
<*> x .@? "Permanent"
<*> x .@? "Persistent"
<*> x .@? "PortRequired"
instance ToQuery OptionGroupOption where
toQuery OptionGroupOption{..} = mconcat
[ "DefaultPort" =? _ogoDefaultPort
, "Description" =? _ogoDescription
, "EngineName" =? _ogoEngineName
, "MajorEngineVersion" =? _ogoMajorEngineVersion
, "MinimumRequiredMinorEngineVersion" =? _ogoMinimumRequiredMinorEngineVersion
, "Name" =? _ogoName
, "OptionGroupOptionSettings" =? _ogoOptionGroupOptionSettings
, "OptionsDependedOn" =? _ogoOptionsDependedOn
, "Permanent" =? _ogoPermanent
, "Persistent" =? _ogoPersistent
, "PortRequired" =? _ogoPortRequired
]
data DBInstance = DBInstance
{ _dbiAllocatedStorage :: Maybe Int
, _dbiAutoMinorVersionUpgrade :: Maybe Bool
, _dbiAvailabilityZone :: Maybe Text
, _dbiBackupRetentionPeriod :: Maybe Int
, _dbiCACertificateIdentifier :: Maybe Text
, _dbiCharacterSetName :: Maybe Text
, _dbiDBInstanceClass :: Maybe Text
, _dbiDBInstanceIdentifier :: Maybe Text
, _dbiDBInstanceStatus :: Maybe Text
, _dbiDBName :: Maybe Text
, _dbiDBParameterGroups :: List "member" DBParameterGroupStatus
, _dbiDBSecurityGroups :: List "member" DBSecurityGroupMembership
, _dbiDBSubnetGroup :: Maybe DBSubnetGroup
, _dbiDbiResourceId :: Maybe Text
, _dbiEndpoint :: Maybe Endpoint
, _dbiEngine :: Maybe Text
, _dbiEngineVersion :: Maybe Text
, _dbiInstanceCreateTime :: Maybe ISO8601
, _dbiIops :: Maybe Int
, _dbiKmsKeyId :: Maybe Text
, _dbiLatestRestorableTime :: Maybe ISO8601
, _dbiLicenseModel :: Maybe Text
, _dbiMasterUsername :: Maybe Text
, _dbiMultiAZ :: Maybe Bool
, _dbiOptionGroupMemberships :: List "member" OptionGroupMembership
, _dbiPendingModifiedValues :: Maybe PendingModifiedValues
, _dbiPreferredBackupWindow :: Maybe Text
, _dbiPreferredMaintenanceWindow :: Maybe Text
, _dbiPubliclyAccessible :: Maybe Bool
, _dbiReadReplicaDBInstanceIdentifiers :: List "member" Text
, _dbiReadReplicaSourceDBInstanceIdentifier :: Maybe Text
, _dbiSecondaryAvailabilityZone :: Maybe Text
, _dbiStatusInfos :: List "member" DBInstanceStatusInfo
, _dbiStorageEncrypted :: Maybe Bool
, _dbiStorageType :: Maybe Text
, _dbiTdeCredentialArn :: Maybe Text
, _dbiVpcSecurityGroups :: List "member" VpcSecurityGroupMembership
} deriving (Eq, Read, Show)
-- | 'DBInstance' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbiAllocatedStorage' @::@ 'Maybe' 'Int'
--
-- * 'dbiAutoMinorVersionUpgrade' @::@ 'Maybe' 'Bool'
--
-- * 'dbiAvailabilityZone' @::@ 'Maybe' 'Text'
--
-- * 'dbiBackupRetentionPeriod' @::@ 'Maybe' 'Int'
--
-- * 'dbiCACertificateIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'dbiCharacterSetName' @::@ 'Maybe' 'Text'
--
-- * 'dbiDBInstanceClass' @::@ 'Maybe' 'Text'
--
-- * 'dbiDBInstanceIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'dbiDBInstanceStatus' @::@ 'Maybe' 'Text'
--
-- * 'dbiDBName' @::@ 'Maybe' 'Text'
--
-- * 'dbiDBParameterGroups' @::@ ['DBParameterGroupStatus']
--
-- * 'dbiDBSecurityGroups' @::@ ['DBSecurityGroupMembership']
--
-- * 'dbiDBSubnetGroup' @::@ 'Maybe' 'DBSubnetGroup'
--
-- * 'dbiDbiResourceId' @::@ 'Maybe' 'Text'
--
-- * 'dbiEndpoint' @::@ 'Maybe' 'Endpoint'
--
-- * 'dbiEngine' @::@ 'Maybe' 'Text'
--
-- * 'dbiEngineVersion' @::@ 'Maybe' 'Text'
--
-- * 'dbiInstanceCreateTime' @::@ 'Maybe' 'UTCTime'
--
-- * 'dbiIops' @::@ 'Maybe' 'Int'
--
-- * 'dbiKmsKeyId' @::@ 'Maybe' 'Text'
--
-- * 'dbiLatestRestorableTime' @::@ 'Maybe' 'UTCTime'
--
-- * 'dbiLicenseModel' @::@ 'Maybe' 'Text'
--
-- * 'dbiMasterUsername' @::@ 'Maybe' 'Text'
--
-- * 'dbiMultiAZ' @::@ 'Maybe' 'Bool'
--
-- * 'dbiOptionGroupMemberships' @::@ ['OptionGroupMembership']
--
-- * 'dbiPendingModifiedValues' @::@ 'Maybe' 'PendingModifiedValues'
--
-- * 'dbiPreferredBackupWindow' @::@ 'Maybe' 'Text'
--
-- * 'dbiPreferredMaintenanceWindow' @::@ 'Maybe' 'Text'
--
-- * 'dbiPubliclyAccessible' @::@ 'Maybe' 'Bool'
--
-- * 'dbiReadReplicaDBInstanceIdentifiers' @::@ ['Text']
--
-- * 'dbiReadReplicaSourceDBInstanceIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'dbiSecondaryAvailabilityZone' @::@ 'Maybe' 'Text'
--
-- * 'dbiStatusInfos' @::@ ['DBInstanceStatusInfo']
--
-- * 'dbiStorageEncrypted' @::@ 'Maybe' 'Bool'
--
-- * 'dbiStorageType' @::@ 'Maybe' 'Text'
--
-- * 'dbiTdeCredentialArn' @::@ 'Maybe' 'Text'
--
-- * 'dbiVpcSecurityGroups' @::@ ['VpcSecurityGroupMembership']
--
dbinstance :: DBInstance
dbinstance = DBInstance
{ _dbiDBInstanceIdentifier = Nothing
, _dbiDBInstanceClass = Nothing
, _dbiEngine = Nothing
, _dbiDBInstanceStatus = Nothing
, _dbiMasterUsername = Nothing
, _dbiDBName = Nothing
, _dbiEndpoint = Nothing
, _dbiAllocatedStorage = Nothing
, _dbiInstanceCreateTime = Nothing
, _dbiPreferredBackupWindow = Nothing
, _dbiBackupRetentionPeriod = Nothing
, _dbiDBSecurityGroups = mempty
, _dbiVpcSecurityGroups = mempty
, _dbiDBParameterGroups = mempty
, _dbiAvailabilityZone = Nothing
, _dbiDBSubnetGroup = Nothing
, _dbiPreferredMaintenanceWindow = Nothing
, _dbiPendingModifiedValues = Nothing
, _dbiLatestRestorableTime = Nothing
, _dbiMultiAZ = Nothing
, _dbiEngineVersion = Nothing
, _dbiAutoMinorVersionUpgrade = Nothing
, _dbiReadReplicaSourceDBInstanceIdentifier = Nothing
, _dbiReadReplicaDBInstanceIdentifiers = mempty
, _dbiLicenseModel = Nothing
, _dbiIops = Nothing
, _dbiOptionGroupMemberships = mempty
, _dbiCharacterSetName = Nothing
, _dbiSecondaryAvailabilityZone = Nothing
, _dbiPubliclyAccessible = Nothing
, _dbiStatusInfos = mempty
, _dbiStorageType = Nothing
, _dbiTdeCredentialArn = Nothing
, _dbiStorageEncrypted = Nothing
, _dbiKmsKeyId = Nothing
, _dbiDbiResourceId = Nothing
, _dbiCACertificateIdentifier = Nothing
}
-- | Specifies the allocated storage size specified in gigabytes.
dbiAllocatedStorage :: Lens' DBInstance (Maybe Int)
dbiAllocatedStorage =
lens _dbiAllocatedStorage (\s a -> s { _dbiAllocatedStorage = a })
-- | Indicates that minor version patches are applied automatically.
dbiAutoMinorVersionUpgrade :: Lens' DBInstance (Maybe Bool)
dbiAutoMinorVersionUpgrade =
lens _dbiAutoMinorVersionUpgrade
(\s a -> s { _dbiAutoMinorVersionUpgrade = a })
-- | Specifies the name of the Availability Zone the DB instance is located in.
dbiAvailabilityZone :: Lens' DBInstance (Maybe Text)
dbiAvailabilityZone =
lens _dbiAvailabilityZone (\s a -> s { _dbiAvailabilityZone = a })
-- | Specifies the number of days for which automatic DB snapshots are retained.
dbiBackupRetentionPeriod :: Lens' DBInstance (Maybe Int)
dbiBackupRetentionPeriod =
lens _dbiBackupRetentionPeriod
(\s a -> s { _dbiBackupRetentionPeriod = a })
-- | The identifier of the CA certificate for this DB instance.
dbiCACertificateIdentifier :: Lens' DBInstance (Maybe Text)
dbiCACertificateIdentifier =
lens _dbiCACertificateIdentifier
(\s a -> s { _dbiCACertificateIdentifier = a })
-- | If present, specifies the name of the character set that this instance is
-- associated with.
dbiCharacterSetName :: Lens' DBInstance (Maybe Text)
dbiCharacterSetName =
lens _dbiCharacterSetName (\s a -> s { _dbiCharacterSetName = a })
-- | Contains the name of the compute and memory capacity class of the DB
-- instance.
dbiDBInstanceClass :: Lens' DBInstance (Maybe Text)
dbiDBInstanceClass =
lens _dbiDBInstanceClass (\s a -> s { _dbiDBInstanceClass = a })
-- | Contains a user-supplied database identifier. This is the unique key that
-- identifies a DB instance.
dbiDBInstanceIdentifier :: Lens' DBInstance (Maybe Text)
dbiDBInstanceIdentifier =
lens _dbiDBInstanceIdentifier (\s a -> s { _dbiDBInstanceIdentifier = a })
-- | Specifies the current state of this database.
dbiDBInstanceStatus :: Lens' DBInstance (Maybe Text)
dbiDBInstanceStatus =
lens _dbiDBInstanceStatus (\s a -> s { _dbiDBInstanceStatus = a })
-- | The meaning of this parameter differs according to the database engine you
-- use. For example, this value returns either MySQL or PostgreSQL information
-- when returning values from CreateDBInstanceReadReplica since Read Replicas
-- are only supported for MySQL and PostgreSQL.
--
-- MySQL, SQL Server, PostgreSQL
--
-- Contains the name of the initial database of this instance that was
-- provided at create time, if one was specified when the DB instance was
-- created. This same name is returned for the life of the DB instance.
--
-- Type: String
--
-- Oracle
--
-- Contains the Oracle System ID (SID) of the created DB instance. Not shown
-- when the returned parameters do not apply to an Oracle DB instance.
dbiDBName :: Lens' DBInstance (Maybe Text)
dbiDBName = lens _dbiDBName (\s a -> s { _dbiDBName = a })
-- | Provides the list of DB parameter groups applied to this DB instance.
dbiDBParameterGroups :: Lens' DBInstance [DBParameterGroupStatus]
dbiDBParameterGroups =
lens _dbiDBParameterGroups (\s a -> s { _dbiDBParameterGroups = a })
. _List
-- | Provides List of DB security group elements containing only 'DBSecurityGroup.Name' and 'DBSecurityGroup.Status' subelements.
dbiDBSecurityGroups :: Lens' DBInstance [DBSecurityGroupMembership]
dbiDBSecurityGroups =
lens _dbiDBSecurityGroups (\s a -> s { _dbiDBSecurityGroups = a })
. _List
-- | Specifies information on the subnet group associated with the DB instance,
-- including the name, description, and subnets in the subnet group.
dbiDBSubnetGroup :: Lens' DBInstance (Maybe DBSubnetGroup)
dbiDBSubnetGroup = lens _dbiDBSubnetGroup (\s a -> s { _dbiDBSubnetGroup = a })
-- | If 'StorageEncrypted' is true, the region-unique, immutable identifier for the
-- encrypted DB instance. This identifier is found in AWS CloudTrail log entries
-- whenever the KMS key for the DB instance is accessed.
dbiDbiResourceId :: Lens' DBInstance (Maybe Text)
dbiDbiResourceId = lens _dbiDbiResourceId (\s a -> s { _dbiDbiResourceId = a })
-- | Specifies the connection endpoint.
dbiEndpoint :: Lens' DBInstance (Maybe Endpoint)
dbiEndpoint = lens _dbiEndpoint (\s a -> s { _dbiEndpoint = a })
-- | Provides the name of the database engine to be used for this DB instance.
dbiEngine :: Lens' DBInstance (Maybe Text)
dbiEngine = lens _dbiEngine (\s a -> s { _dbiEngine = a })
-- | Indicates the database engine version.
dbiEngineVersion :: Lens' DBInstance (Maybe Text)
dbiEngineVersion = lens _dbiEngineVersion (\s a -> s { _dbiEngineVersion = a })
-- | Provides the date and time the DB instance was created.
dbiInstanceCreateTime :: Lens' DBInstance (Maybe UTCTime)
dbiInstanceCreateTime =
lens _dbiInstanceCreateTime (\s a -> s { _dbiInstanceCreateTime = a })
. mapping _Time
-- | Specifies the Provisioned IOPS (I/O operations per second) value.
dbiIops :: Lens' DBInstance (Maybe Int)
dbiIops = lens _dbiIops (\s a -> s { _dbiIops = a })
-- | If 'StorageEncrypted' is true, the KMS key identifier for the encrypted DB
-- instance.
dbiKmsKeyId :: Lens' DBInstance (Maybe Text)
dbiKmsKeyId = lens _dbiKmsKeyId (\s a -> s { _dbiKmsKeyId = a })
-- | Specifies the latest time to which a database can be restored with
-- point-in-time restore.
dbiLatestRestorableTime :: Lens' DBInstance (Maybe UTCTime)
dbiLatestRestorableTime =
lens _dbiLatestRestorableTime (\s a -> s { _dbiLatestRestorableTime = a })
. mapping _Time
-- | License model information for this DB instance.
dbiLicenseModel :: Lens' DBInstance (Maybe Text)
dbiLicenseModel = lens _dbiLicenseModel (\s a -> s { _dbiLicenseModel = a })
-- | Contains the master username for the DB instance.
dbiMasterUsername :: Lens' DBInstance (Maybe Text)
dbiMasterUsername =
lens _dbiMasterUsername (\s a -> s { _dbiMasterUsername = a })
-- | Specifies if the DB instance is a Multi-AZ deployment.
dbiMultiAZ :: Lens' DBInstance (Maybe Bool)
dbiMultiAZ = lens _dbiMultiAZ (\s a -> s { _dbiMultiAZ = a })
-- | Provides the list of option group memberships for this DB instance.
dbiOptionGroupMemberships :: Lens' DBInstance [OptionGroupMembership]
dbiOptionGroupMemberships =
lens _dbiOptionGroupMemberships
(\s a -> s { _dbiOptionGroupMemberships = a })
. _List
-- | Specifies that changes to the DB instance are pending. This element is only
-- included when changes are pending. Specific changes are identified by
-- subelements.
dbiPendingModifiedValues :: Lens' DBInstance (Maybe PendingModifiedValues)
dbiPendingModifiedValues =
lens _dbiPendingModifiedValues
(\s a -> s { _dbiPendingModifiedValues = a })
-- | Specifies the daily time range during which automated backups are created if
-- automated backups are enabled, as determined by the 'BackupRetentionPeriod'.
dbiPreferredBackupWindow :: Lens' DBInstance (Maybe Text)
dbiPreferredBackupWindow =
lens _dbiPreferredBackupWindow
(\s a -> s { _dbiPreferredBackupWindow = a })
-- | Specifies the weekly time range (in UTC) during which system maintenance can
-- occur.
dbiPreferredMaintenanceWindow :: Lens' DBInstance (Maybe Text)
dbiPreferredMaintenanceWindow =
lens _dbiPreferredMaintenanceWindow
(\s a -> s { _dbiPreferredMaintenanceWindow = a })
-- | Specifies the accessibility options for the DB instance. A value of true
-- specifies an Internet-facing instance with a publicly resolvable DNS name,
-- which resolves to a public IP address. A value of false specifies an internal
-- instance with a DNS name that resolves to a private IP address.
--
-- Default: The default behavior varies depending on whether a VPC has been
-- requested or not. The following list shows the default behavior in each case.
--
-- Default VPC:true VPC:false If no DB subnet group has been specified as
-- part of the request and the PubliclyAccessible value has not been set, the DB
-- instance will be publicly accessible. If a specific DB subnet group has been
-- specified as part of the request and the PubliclyAccessible value has not
-- been set, the DB instance will be private.
dbiPubliclyAccessible :: Lens' DBInstance (Maybe Bool)
dbiPubliclyAccessible =
lens _dbiPubliclyAccessible (\s a -> s { _dbiPubliclyAccessible = a })
-- | Contains one or more identifiers of the Read Replicas associated with this
-- DB instance.
dbiReadReplicaDBInstanceIdentifiers :: Lens' DBInstance [Text]
dbiReadReplicaDBInstanceIdentifiers =
lens _dbiReadReplicaDBInstanceIdentifiers
(\s a -> s { _dbiReadReplicaDBInstanceIdentifiers = a })
. _List
-- | Contains the identifier of the source DB instance if this DB instance is a
-- Read Replica.
dbiReadReplicaSourceDBInstanceIdentifier :: Lens' DBInstance (Maybe Text)
dbiReadReplicaSourceDBInstanceIdentifier =
lens _dbiReadReplicaSourceDBInstanceIdentifier
(\s a -> s { _dbiReadReplicaSourceDBInstanceIdentifier = a })
-- | If present, specifies the name of the secondary Availability Zone for a DB
-- instance with multi-AZ support.
dbiSecondaryAvailabilityZone :: Lens' DBInstance (Maybe Text)
dbiSecondaryAvailabilityZone =
lens _dbiSecondaryAvailabilityZone
(\s a -> s { _dbiSecondaryAvailabilityZone = a })
-- | The status of a Read Replica. If the instance is not a Read Replica, this
-- will be blank.
dbiStatusInfos :: Lens' DBInstance [DBInstanceStatusInfo]
dbiStatusInfos = lens _dbiStatusInfos (\s a -> s { _dbiStatusInfos = a }) . _List
-- | Specifies whether the DB instance is encrypted.
dbiStorageEncrypted :: Lens' DBInstance (Maybe Bool)
dbiStorageEncrypted =
lens _dbiStorageEncrypted (\s a -> s { _dbiStorageEncrypted = a })
-- | Specifies the storage type associated with DB instance.
dbiStorageType :: Lens' DBInstance (Maybe Text)
dbiStorageType = lens _dbiStorageType (\s a -> s { _dbiStorageType = a })
-- | The ARN from the Key Store with which the instance is associated for TDE
-- encryption.
dbiTdeCredentialArn :: Lens' DBInstance (Maybe Text)
dbiTdeCredentialArn =
lens _dbiTdeCredentialArn (\s a -> s { _dbiTdeCredentialArn = a })
-- | Provides List of VPC security group elements that the DB instance belongs
-- to.
dbiVpcSecurityGroups :: Lens' DBInstance [VpcSecurityGroupMembership]
dbiVpcSecurityGroups =
lens _dbiVpcSecurityGroups (\s a -> s { _dbiVpcSecurityGroups = a })
. _List
instance FromXML DBInstance where
parseXML x = DBInstance
<$> x .@? "AllocatedStorage"
<*> x .@? "AutoMinorVersionUpgrade"
<*> x .@? "AvailabilityZone"
<*> x .@? "BackupRetentionPeriod"
<*> x .@? "CACertificateIdentifier"
<*> x .@? "CharacterSetName"
<*> x .@? "DBInstanceClass"
<*> x .@? "DBInstanceIdentifier"
<*> x .@? "DBInstanceStatus"
<*> x .@? "DBName"
<*> x .@? "DBParameterGroups" .!@ mempty
<*> x .@? "DBSecurityGroups" .!@ mempty
<*> x .@? "DBSubnetGroup"
<*> x .@? "DbiResourceId"
<*> x .@? "Endpoint"
<*> x .@? "Engine"
<*> x .@? "EngineVersion"
<*> x .@? "InstanceCreateTime"
<*> x .@? "Iops"
<*> x .@? "KmsKeyId"
<*> x .@? "LatestRestorableTime"
<*> x .@? "LicenseModel"
<*> x .@? "MasterUsername"
<*> x .@? "MultiAZ"
<*> x .@? "OptionGroupMemberships" .!@ mempty
<*> x .@? "PendingModifiedValues"
<*> x .@? "PreferredBackupWindow"
<*> x .@? "PreferredMaintenanceWindow"
<*> x .@? "PubliclyAccessible"
<*> x .@? "ReadReplicaDBInstanceIdentifiers" .!@ mempty
<*> x .@? "ReadReplicaSourceDBInstanceIdentifier"
<*> x .@? "SecondaryAvailabilityZone"
<*> x .@? "StatusInfos" .!@ mempty
<*> x .@? "StorageEncrypted"
<*> x .@? "StorageType"
<*> x .@? "TdeCredentialArn"
<*> x .@? "VpcSecurityGroups" .!@ mempty
instance ToQuery DBInstance where
toQuery DBInstance{..} = mconcat
[ "AllocatedStorage" =? _dbiAllocatedStorage
, "AutoMinorVersionUpgrade" =? _dbiAutoMinorVersionUpgrade
, "AvailabilityZone" =? _dbiAvailabilityZone
, "BackupRetentionPeriod" =? _dbiBackupRetentionPeriod
, "CACertificateIdentifier" =? _dbiCACertificateIdentifier
, "CharacterSetName" =? _dbiCharacterSetName
, "DBInstanceClass" =? _dbiDBInstanceClass
, "DBInstanceIdentifier" =? _dbiDBInstanceIdentifier
, "DBInstanceStatus" =? _dbiDBInstanceStatus
, "DBName" =? _dbiDBName
, "DBParameterGroups" =? _dbiDBParameterGroups
, "DBSecurityGroups" =? _dbiDBSecurityGroups
, "DBSubnetGroup" =? _dbiDBSubnetGroup
, "DbiResourceId" =? _dbiDbiResourceId
, "Endpoint" =? _dbiEndpoint
, "Engine" =? _dbiEngine
, "EngineVersion" =? _dbiEngineVersion
, "InstanceCreateTime" =? _dbiInstanceCreateTime
, "Iops" =? _dbiIops
, "KmsKeyId" =? _dbiKmsKeyId
, "LatestRestorableTime" =? _dbiLatestRestorableTime
, "LicenseModel" =? _dbiLicenseModel
, "MasterUsername" =? _dbiMasterUsername
, "MultiAZ" =? _dbiMultiAZ
, "OptionGroupMemberships" =? _dbiOptionGroupMemberships
, "PendingModifiedValues" =? _dbiPendingModifiedValues
, "PreferredBackupWindow" =? _dbiPreferredBackupWindow
, "PreferredMaintenanceWindow" =? _dbiPreferredMaintenanceWindow
, "PubliclyAccessible" =? _dbiPubliclyAccessible
, "ReadReplicaDBInstanceIdentifiers" =? _dbiReadReplicaDBInstanceIdentifiers
, "ReadReplicaSourceDBInstanceIdentifier" =? _dbiReadReplicaSourceDBInstanceIdentifier
, "SecondaryAvailabilityZone" =? _dbiSecondaryAvailabilityZone
, "StatusInfos" =? _dbiStatusInfos
, "StorageEncrypted" =? _dbiStorageEncrypted
, "StorageType" =? _dbiStorageType
, "TdeCredentialArn" =? _dbiTdeCredentialArn
, "VpcSecurityGroups" =? _dbiVpcSecurityGroups
]
data AccountQuota = AccountQuota
{ _aqAccountQuotaName :: Maybe Text
, _aqMax :: Maybe Integer
, _aqUsed :: Maybe Integer
} deriving (Eq, Ord, Read, Show)
-- | 'AccountQuota' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'aqAccountQuotaName' @::@ 'Maybe' 'Text'
--
-- * 'aqMax' @::@ 'Maybe' 'Integer'
--
-- * 'aqUsed' @::@ 'Maybe' 'Integer'
--
accountQuota :: AccountQuota
accountQuota = AccountQuota
{ _aqAccountQuotaName = Nothing
, _aqUsed = Nothing
, _aqMax = Nothing
}
-- | The name of the Amazon RDS quota for this AWS account.
aqAccountQuotaName :: Lens' AccountQuota (Maybe Text)
aqAccountQuotaName =
lens _aqAccountQuotaName (\s a -> s { _aqAccountQuotaName = a })
-- | The maximum allowed value for the quota.
aqMax :: Lens' AccountQuota (Maybe Integer)
aqMax = lens _aqMax (\s a -> s { _aqMax = a })
-- | The amount currently used toward the quota maximum.
aqUsed :: Lens' AccountQuota (Maybe Integer)
aqUsed = lens _aqUsed (\s a -> s { _aqUsed = a })
instance FromXML AccountQuota where
parseXML x = AccountQuota
<$> x .@? "AccountQuotaName"
<*> x .@? "Max"
<*> x .@? "Used"
instance ToQuery AccountQuota where
toQuery AccountQuota{..} = mconcat
[ "AccountQuotaName" =? _aqAccountQuotaName
, "Max" =? _aqMax
, "Used" =? _aqUsed
]
newtype AvailabilityZone = AvailabilityZone
{ _azName :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'AvailabilityZone' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'azName' @::@ 'Maybe' 'Text'
--
availabilityZone :: AvailabilityZone
availabilityZone = AvailabilityZone
{ _azName = Nothing
}
-- | The name of the availability zone.
azName :: Lens' AvailabilityZone (Maybe Text)
azName = lens _azName (\s a -> s { _azName = a })
instance FromXML AvailabilityZone where
parseXML x = AvailabilityZone
<$> x .@? "Name"
instance ToQuery AvailabilityZone where
toQuery AvailabilityZone{..} = mconcat
[ "Name" =? _azName
]
data EventSubscription = EventSubscription
{ _esCustSubscriptionId :: Maybe Text
, _esCustomerAwsId :: Maybe Text
, _esEnabled :: Maybe Bool
, _esEventCategoriesList :: List "member" Text
, _esSnsTopicArn :: Maybe Text
, _esSourceIdsList :: List "member" Text
, _esSourceType :: Maybe Text
, _esStatus :: Maybe Text
, _esSubscriptionCreationTime :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'EventSubscription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'esCustSubscriptionId' @::@ 'Maybe' 'Text'
--
-- * 'esCustomerAwsId' @::@ 'Maybe' 'Text'
--
-- * 'esEnabled' @::@ 'Maybe' 'Bool'
--
-- * 'esEventCategoriesList' @::@ ['Text']
--
-- * 'esSnsTopicArn' @::@ 'Maybe' 'Text'
--
-- * 'esSourceIdsList' @::@ ['Text']
--
-- * 'esSourceType' @::@ 'Maybe' 'Text'
--
-- * 'esStatus' @::@ 'Maybe' 'Text'
--
-- * 'esSubscriptionCreationTime' @::@ 'Maybe' 'Text'
--
eventSubscription :: EventSubscription
eventSubscription = EventSubscription
{ _esCustomerAwsId = Nothing
, _esCustSubscriptionId = Nothing
, _esSnsTopicArn = Nothing
, _esStatus = Nothing
, _esSubscriptionCreationTime = Nothing
, _esSourceType = Nothing
, _esSourceIdsList = mempty
, _esEventCategoriesList = mempty
, _esEnabled = Nothing
}
-- | The RDS event notification subscription Id.
esCustSubscriptionId :: Lens' EventSubscription (Maybe Text)
esCustSubscriptionId =
lens _esCustSubscriptionId (\s a -> s { _esCustSubscriptionId = a })
-- | The AWS customer account associated with the RDS event notification
-- subscription.
esCustomerAwsId :: Lens' EventSubscription (Maybe Text)
esCustomerAwsId = lens _esCustomerAwsId (\s a -> s { _esCustomerAwsId = a })
-- | A Boolean value indicating if the subscription is enabled. True indicates the
-- subscription is enabled.
esEnabled :: Lens' EventSubscription (Maybe Bool)
esEnabled = lens _esEnabled (\s a -> s { _esEnabled = a })
-- | A list of event categories for the RDS event notification subscription.
esEventCategoriesList :: Lens' EventSubscription [Text]
esEventCategoriesList =
lens _esEventCategoriesList (\s a -> s { _esEventCategoriesList = a })
. _List
-- | The topic ARN of the RDS event notification subscription.
esSnsTopicArn :: Lens' EventSubscription (Maybe Text)
esSnsTopicArn = lens _esSnsTopicArn (\s a -> s { _esSnsTopicArn = a })
-- | A list of source IDs for the RDS event notification subscription.
esSourceIdsList :: Lens' EventSubscription [Text]
esSourceIdsList = lens _esSourceIdsList (\s a -> s { _esSourceIdsList = a }) . _List
-- | The source type for the RDS event notification subscription.
esSourceType :: Lens' EventSubscription (Maybe Text)
esSourceType = lens _esSourceType (\s a -> s { _esSourceType = a })
-- | The status of the RDS event notification subscription.
--
-- Constraints:
--
-- Can be one of the following: creating | modifying | deleting | active |
-- no-permission | topic-not-exist
--
-- The status "no-permission" indicates that RDS no longer has permission to
-- post to the SNS topic. The status "topic-not-exist" indicates that the topic
-- was deleted after the subscription was created.
esStatus :: Lens' EventSubscription (Maybe Text)
esStatus = lens _esStatus (\s a -> s { _esStatus = a })
-- | The time the RDS event notification subscription was created.
esSubscriptionCreationTime :: Lens' EventSubscription (Maybe Text)
esSubscriptionCreationTime =
lens _esSubscriptionCreationTime
(\s a -> s { _esSubscriptionCreationTime = a })
instance FromXML EventSubscription where
parseXML x = EventSubscription
<$> x .@? "CustSubscriptionId"
<*> x .@? "CustomerAwsId"
<*> x .@? "Enabled"
<*> x .@? "EventCategoriesList" .!@ mempty
<*> x .@? "SnsTopicArn"
<*> x .@? "SourceIdsList" .!@ mempty
<*> x .@? "SourceType"
<*> x .@? "Status"
<*> x .@? "SubscriptionCreationTime"
instance ToQuery EventSubscription where
toQuery EventSubscription{..} = mconcat
[ "CustSubscriptionId" =? _esCustSubscriptionId
, "CustomerAwsId" =? _esCustomerAwsId
, "Enabled" =? _esEnabled
, "EventCategoriesList" =? _esEventCategoriesList
, "SnsTopicArn" =? _esSnsTopicArn
, "SourceIdsList" =? _esSourceIdsList
, "SourceType" =? _esSourceType
, "Status" =? _esStatus
, "SubscriptionCreationTime" =? _esSubscriptionCreationTime
]
data DBSubnetGroup = DBSubnetGroup
{ _dbsg1DBSubnetGroupDescription :: Maybe Text
, _dbsg1DBSubnetGroupName :: Maybe Text
, _dbsg1SubnetGroupStatus :: Maybe Text
, _dbsg1Subnets :: List "member" Subnet
, _dbsg1VpcId :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'DBSubnetGroup' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbsg1DBSubnetGroupDescription' @::@ 'Maybe' 'Text'
--
-- * 'dbsg1DBSubnetGroupName' @::@ 'Maybe' 'Text'
--
-- * 'dbsg1SubnetGroupStatus' @::@ 'Maybe' 'Text'
--
-- * 'dbsg1Subnets' @::@ ['Subnet']
--
-- * 'dbsg1VpcId' @::@ 'Maybe' 'Text'
--
dbsubnetGroup :: DBSubnetGroup
dbsubnetGroup = DBSubnetGroup
{ _dbsg1DBSubnetGroupName = Nothing
, _dbsg1DBSubnetGroupDescription = Nothing
, _dbsg1VpcId = Nothing
, _dbsg1SubnetGroupStatus = Nothing
, _dbsg1Subnets = mempty
}
-- | Provides the description of the DB subnet group.
dbsg1DBSubnetGroupDescription :: Lens' DBSubnetGroup (Maybe Text)
dbsg1DBSubnetGroupDescription =
lens _dbsg1DBSubnetGroupDescription
(\s a -> s { _dbsg1DBSubnetGroupDescription = a })
-- | Specifies the name of the DB subnet group.
dbsg1DBSubnetGroupName :: Lens' DBSubnetGroup (Maybe Text)
dbsg1DBSubnetGroupName =
lens _dbsg1DBSubnetGroupName (\s a -> s { _dbsg1DBSubnetGroupName = a })
-- | Provides the status of the DB subnet group.
dbsg1SubnetGroupStatus :: Lens' DBSubnetGroup (Maybe Text)
dbsg1SubnetGroupStatus =
lens _dbsg1SubnetGroupStatus (\s a -> s { _dbsg1SubnetGroupStatus = a })
-- | Contains a list of 'Subnet' elements.
dbsg1Subnets :: Lens' DBSubnetGroup [Subnet]
dbsg1Subnets = lens _dbsg1Subnets (\s a -> s { _dbsg1Subnets = a }) . _List
-- | Provides the VpcId of the DB subnet group.
dbsg1VpcId :: Lens' DBSubnetGroup (Maybe Text)
dbsg1VpcId = lens _dbsg1VpcId (\s a -> s { _dbsg1VpcId = a })
instance FromXML DBSubnetGroup where
parseXML x = DBSubnetGroup
<$> x .@? "DBSubnetGroupDescription"
<*> x .@? "DBSubnetGroupName"
<*> x .@? "SubnetGroupStatus"
<*> x .@? "Subnets" .!@ mempty
<*> x .@? "VpcId"
instance ToQuery DBSubnetGroup where
toQuery DBSubnetGroup{..} = mconcat
[ "DBSubnetGroupDescription" =? _dbsg1DBSubnetGroupDescription
, "DBSubnetGroupName" =? _dbsg1DBSubnetGroupName
, "SubnetGroupStatus" =? _dbsg1SubnetGroupStatus
, "Subnets" =? _dbsg1Subnets
, "VpcId" =? _dbsg1VpcId
]
data Certificate = Certificate
{ _cCertificateIdentifier :: Maybe Text
, _cCertificateType :: Maybe Text
, _cThumbprint :: Maybe Text
, _cValidFrom :: Maybe ISO8601
, _cValidTill :: Maybe ISO8601
} deriving (Eq, Ord, Read, Show)
-- | 'Certificate' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cCertificateIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'cCertificateType' @::@ 'Maybe' 'Text'
--
-- * 'cThumbprint' @::@ 'Maybe' 'Text'
--
-- * 'cValidFrom' @::@ 'Maybe' 'UTCTime'
--
-- * 'cValidTill' @::@ 'Maybe' 'UTCTime'
--
certificate :: Certificate
certificate = Certificate
{ _cCertificateIdentifier = Nothing
, _cCertificateType = Nothing
, _cThumbprint = Nothing
, _cValidFrom = Nothing
, _cValidTill = Nothing
}
-- | The unique key that identifies a certificate.
cCertificateIdentifier :: Lens' Certificate (Maybe Text)
cCertificateIdentifier =
lens _cCertificateIdentifier (\s a -> s { _cCertificateIdentifier = a })
-- | The type of the certificate.
cCertificateType :: Lens' Certificate (Maybe Text)
cCertificateType = lens _cCertificateType (\s a -> s { _cCertificateType = a })
-- | The thumbprint of the certificate.
cThumbprint :: Lens' Certificate (Maybe Text)
cThumbprint = lens _cThumbprint (\s a -> s { _cThumbprint = a })
-- | The starting date from which the certificate is valid.
cValidFrom :: Lens' Certificate (Maybe UTCTime)
cValidFrom = lens _cValidFrom (\s a -> s { _cValidFrom = a }) . mapping _Time
-- | The final date that the certificate continues to be valid.
cValidTill :: Lens' Certificate (Maybe UTCTime)
cValidTill = lens _cValidTill (\s a -> s { _cValidTill = a }) . mapping _Time
instance FromXML Certificate where
parseXML x = Certificate
<$> x .@? "CertificateIdentifier"
<*> x .@? "CertificateType"
<*> x .@? "Thumbprint"
<*> x .@? "ValidFrom"
<*> x .@? "ValidTill"
instance ToQuery Certificate where
toQuery Certificate{..} = mconcat
[ "CertificateIdentifier" =? _cCertificateIdentifier
, "CertificateType" =? _cCertificateType
, "Thumbprint" =? _cThumbprint
, "ValidFrom" =? _cValidFrom
, "ValidTill" =? _cValidTill
]
data DBInstanceStatusInfo = DBInstanceStatusInfo
{ _dbisiMessage :: Maybe Text
, _dbisiNormal :: Maybe Bool
, _dbisiStatus :: Maybe Text
, _dbisiStatusType :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DBInstanceStatusInfo' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dbisiMessage' @::@ 'Maybe' 'Text'
--
-- * 'dbisiNormal' @::@ 'Maybe' 'Bool'
--
-- * 'dbisiStatus' @::@ 'Maybe' 'Text'
--
-- * 'dbisiStatusType' @::@ 'Maybe' 'Text'
--
dbinstanceStatusInfo :: DBInstanceStatusInfo
dbinstanceStatusInfo = DBInstanceStatusInfo
{ _dbisiStatusType = Nothing
, _dbisiNormal = Nothing
, _dbisiStatus = Nothing
, _dbisiMessage = Nothing
}
-- | Details of the error if there is an error for the instance. If the instance
-- is not in an error state, this value is blank.
dbisiMessage :: Lens' DBInstanceStatusInfo (Maybe Text)
dbisiMessage = lens _dbisiMessage (\s a -> s { _dbisiMessage = a })
-- | Boolean value that is true if the instance is operating normally, or false
-- if the instance is in an error state.
dbisiNormal :: Lens' DBInstanceStatusInfo (Maybe Bool)
dbisiNormal = lens _dbisiNormal (\s a -> s { _dbisiNormal = a })
-- | Status of the DB instance. For a StatusType of read replica, the values can
-- be replicating, error, stopped, or terminated.
dbisiStatus :: Lens' DBInstanceStatusInfo (Maybe Text)
dbisiStatus = lens _dbisiStatus (\s a -> s { _dbisiStatus = a })
-- | This value is currently "read replication."
dbisiStatusType :: Lens' DBInstanceStatusInfo (Maybe Text)
dbisiStatusType = lens _dbisiStatusType (\s a -> s { _dbisiStatusType = a })
instance FromXML DBInstanceStatusInfo where
parseXML x = DBInstanceStatusInfo
<$> x .@? "Message"
<*> x .@? "Normal"
<*> x .@? "Status"
<*> x .@? "StatusType"
instance ToQuery DBInstanceStatusInfo where
toQuery DBInstanceStatusInfo{..} = mconcat
[ "Message" =? _dbisiMessage
, "Normal" =? _dbisiNormal
, "Status" =? _dbisiStatus
, "StatusType" =? _dbisiStatusType
]
data OptionSetting = OptionSetting
{ _osAllowedValues :: Maybe Text
, _osApplyType :: Maybe Text
, _osDataType :: Maybe Text
, _osDefaultValue :: Maybe Text
, _osDescription :: Maybe Text
, _osIsCollection :: Maybe Bool
, _osIsModifiable :: Maybe Bool
, _osName :: Maybe Text
, _osValue :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'OptionSetting' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'osAllowedValues' @::@ 'Maybe' 'Text'
--
-- * 'osApplyType' @::@ 'Maybe' 'Text'
--
-- * 'osDataType' @::@ 'Maybe' 'Text'
--
-- * 'osDefaultValue' @::@ 'Maybe' 'Text'
--
-- * 'osDescription' @::@ 'Maybe' 'Text'
--
-- * 'osIsCollection' @::@ 'Maybe' 'Bool'
--
-- * 'osIsModifiable' @::@ 'Maybe' 'Bool'
--
-- * 'osName' @::@ 'Maybe' 'Text'
--
-- * 'osValue' @::@ 'Maybe' 'Text'
--
optionSetting :: OptionSetting
optionSetting = OptionSetting
{ _osName = Nothing
, _osValue = Nothing
, _osDefaultValue = Nothing
, _osDescription = Nothing
, _osApplyType = Nothing
, _osDataType = Nothing
, _osAllowedValues = Nothing
, _osIsModifiable = Nothing
, _osIsCollection = Nothing
}
-- | The allowed values of the option setting.
osAllowedValues :: Lens' OptionSetting (Maybe Text)
osAllowedValues = lens _osAllowedValues (\s a -> s { _osAllowedValues = a })
-- | The DB engine specific parameter type.
osApplyType :: Lens' OptionSetting (Maybe Text)
osApplyType = lens _osApplyType (\s a -> s { _osApplyType = a })
-- | The data type of the option setting.
osDataType :: Lens' OptionSetting (Maybe Text)
osDataType = lens _osDataType (\s a -> s { _osDataType = a })
-- | The default value of the option setting.
osDefaultValue :: Lens' OptionSetting (Maybe Text)
osDefaultValue = lens _osDefaultValue (\s a -> s { _osDefaultValue = a })
-- | The description of the option setting.
osDescription :: Lens' OptionSetting (Maybe Text)
osDescription = lens _osDescription (\s a -> s { _osDescription = a })
-- | Indicates if the option setting is part of a collection.
osIsCollection :: Lens' OptionSetting (Maybe Bool)
osIsCollection = lens _osIsCollection (\s a -> s { _osIsCollection = a })
-- | A Boolean value that, when true, indicates the option setting can be
-- modified from the default.
osIsModifiable :: Lens' OptionSetting (Maybe Bool)
osIsModifiable = lens _osIsModifiable (\s a -> s { _osIsModifiable = a })
-- | The name of the option that has settings that you can set.
osName :: Lens' OptionSetting (Maybe Text)
osName = lens _osName (\s a -> s { _osName = a })
-- | The current value of the option setting.
osValue :: Lens' OptionSetting (Maybe Text)
osValue = lens _osValue (\s a -> s { _osValue = a })
instance FromXML OptionSetting where
parseXML x = OptionSetting
<$> x .@? "AllowedValues"
<*> x .@? "ApplyType"
<*> x .@? "DataType"
<*> x .@? "DefaultValue"
<*> x .@? "Description"
<*> x .@? "IsCollection"
<*> x .@? "IsModifiable"
<*> x .@? "Name"
<*> x .@? "Value"
instance ToQuery OptionSetting where
toQuery OptionSetting{..} = mconcat
[ "AllowedValues" =? _osAllowedValues
, "ApplyType" =? _osApplyType
, "DataType" =? _osDataType
, "DefaultValue" =? _osDefaultValue
, "Description" =? _osDescription
, "IsCollection" =? _osIsCollection
, "IsModifiable" =? _osIsModifiable
, "Name" =? _osName
, "Value" =? _osValue
]
data DescribeDBLogFilesDetails = DescribeDBLogFilesDetails
{ _ddblfdLastWritten :: Maybe Integer
, _ddblfdLogFileName :: Maybe Text
, _ddblfdSize :: Maybe Integer
} deriving (Eq, Ord, Read, Show)
-- | 'DescribeDBLogFilesDetails' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ddblfdLastWritten' @::@ 'Maybe' 'Integer'
--
-- * 'ddblfdLogFileName' @::@ 'Maybe' 'Text'
--
-- * 'ddblfdSize' @::@ 'Maybe' 'Integer'
--
describeDBLogFilesDetails :: DescribeDBLogFilesDetails
describeDBLogFilesDetails = DescribeDBLogFilesDetails
{ _ddblfdLogFileName = Nothing
, _ddblfdLastWritten = Nothing
, _ddblfdSize = Nothing
}
-- | A POSIX timestamp when the last log entry was written.
ddblfdLastWritten :: Lens' DescribeDBLogFilesDetails (Maybe Integer)
ddblfdLastWritten =
lens _ddblfdLastWritten (\s a -> s { _ddblfdLastWritten = a })
-- | The name of the log file for the specified DB instance.
ddblfdLogFileName :: Lens' DescribeDBLogFilesDetails (Maybe Text)
ddblfdLogFileName =
lens _ddblfdLogFileName (\s a -> s { _ddblfdLogFileName = a })
-- | The size, in bytes, of the log file for the specified DB instance.
ddblfdSize :: Lens' DescribeDBLogFilesDetails (Maybe Integer)
ddblfdSize = lens _ddblfdSize (\s a -> s { _ddblfdSize = a })
instance FromXML DescribeDBLogFilesDetails where
parseXML x = DescribeDBLogFilesDetails
<$> x .@? "LastWritten"
<*> x .@? "LogFileName"
<*> x .@? "Size"
instance ToQuery DescribeDBLogFilesDetails where
toQuery DescribeDBLogFilesDetails{..} = mconcat
[ "LastWritten" =? _ddblfdLastWritten
, "LogFileName" =? _ddblfdLogFileName
, "Size" =? _ddblfdSize
]
data OrderableDBInstanceOption = OrderableDBInstanceOption
{ _odbioAvailabilityZones :: List "member" AvailabilityZone
, _odbioDBInstanceClass :: Maybe Text
, _odbioEngine :: Maybe Text
, _odbioEngineVersion :: Maybe Text
, _odbioLicenseModel :: Maybe Text
, _odbioMultiAZCapable :: Maybe Bool
, _odbioReadReplicaCapable :: Maybe Bool
, _odbioStorageType :: Maybe Text
, _odbioSupportsIops :: Maybe Bool
, _odbioSupportsStorageEncryption :: Maybe Bool
, _odbioVpc :: Maybe Bool
} deriving (Eq, Read, Show)
-- | 'OrderableDBInstanceOption' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'odbioAvailabilityZones' @::@ ['AvailabilityZone']
--
-- * 'odbioDBInstanceClass' @::@ 'Maybe' 'Text'
--
-- * 'odbioEngine' @::@ 'Maybe' 'Text'
--
-- * 'odbioEngineVersion' @::@ 'Maybe' 'Text'
--
-- * 'odbioLicenseModel' @::@ 'Maybe' 'Text'
--
-- * 'odbioMultiAZCapable' @::@ 'Maybe' 'Bool'
--
-- * 'odbioReadReplicaCapable' @::@ 'Maybe' 'Bool'
--
-- * 'odbioStorageType' @::@ 'Maybe' 'Text'
--
-- * 'odbioSupportsIops' @::@ 'Maybe' 'Bool'
--
-- * 'odbioSupportsStorageEncryption' @::@ 'Maybe' 'Bool'
--
-- * 'odbioVpc' @::@ 'Maybe' 'Bool'
--
orderableDBInstanceOption :: OrderableDBInstanceOption
orderableDBInstanceOption = OrderableDBInstanceOption
{ _odbioEngine = Nothing
, _odbioEngineVersion = Nothing
, _odbioDBInstanceClass = Nothing
, _odbioLicenseModel = Nothing
, _odbioAvailabilityZones = mempty
, _odbioMultiAZCapable = Nothing
, _odbioReadReplicaCapable = Nothing
, _odbioVpc = Nothing
, _odbioSupportsStorageEncryption = Nothing
, _odbioStorageType = Nothing
, _odbioSupportsIops = Nothing
}
-- | A list of availability zones for the orderable DB instance.
odbioAvailabilityZones :: Lens' OrderableDBInstanceOption [AvailabilityZone]
odbioAvailabilityZones =
lens _odbioAvailabilityZones (\s a -> s { _odbioAvailabilityZones = a })
. _List
-- | The DB instance Class for the orderable DB instance
odbioDBInstanceClass :: Lens' OrderableDBInstanceOption (Maybe Text)
odbioDBInstanceClass =
lens _odbioDBInstanceClass (\s a -> s { _odbioDBInstanceClass = a })
-- | The engine type of the orderable DB instance.
odbioEngine :: Lens' OrderableDBInstanceOption (Maybe Text)
odbioEngine = lens _odbioEngine (\s a -> s { _odbioEngine = a })
-- | The engine version of the orderable DB instance.
odbioEngineVersion :: Lens' OrderableDBInstanceOption (Maybe Text)
odbioEngineVersion =
lens _odbioEngineVersion (\s a -> s { _odbioEngineVersion = a })
-- | The license model for the orderable DB instance.
odbioLicenseModel :: Lens' OrderableDBInstanceOption (Maybe Text)
odbioLicenseModel =
lens _odbioLicenseModel (\s a -> s { _odbioLicenseModel = a })
-- | Indicates whether this orderable DB instance is multi-AZ capable.
odbioMultiAZCapable :: Lens' OrderableDBInstanceOption (Maybe Bool)
odbioMultiAZCapable =
lens _odbioMultiAZCapable (\s a -> s { _odbioMultiAZCapable = a })
-- | Indicates whether this orderable DB instance can have a Read Replica.
odbioReadReplicaCapable :: Lens' OrderableDBInstanceOption (Maybe Bool)
odbioReadReplicaCapable =
lens _odbioReadReplicaCapable (\s a -> s { _odbioReadReplicaCapable = a })
-- | The storage type for this orderable DB instance.
odbioStorageType :: Lens' OrderableDBInstanceOption (Maybe Text)
odbioStorageType = lens _odbioStorageType (\s a -> s { _odbioStorageType = a })
-- | Indicates whether this orderable DB instance supports provisioned IOPS.
odbioSupportsIops :: Lens' OrderableDBInstanceOption (Maybe Bool)
odbioSupportsIops =
lens _odbioSupportsIops (\s a -> s { _odbioSupportsIops = a })
-- | Indicates whether this orderable DB instance supports encrypted storage.
odbioSupportsStorageEncryption :: Lens' OrderableDBInstanceOption (Maybe Bool)
odbioSupportsStorageEncryption =
lens _odbioSupportsStorageEncryption
(\s a -> s { _odbioSupportsStorageEncryption = a })
-- | Indicates whether this is a VPC orderable DB instance.
odbioVpc :: Lens' OrderableDBInstanceOption (Maybe Bool)
odbioVpc = lens _odbioVpc (\s a -> s { _odbioVpc = a })
instance FromXML OrderableDBInstanceOption where
parseXML x = OrderableDBInstanceOption
<$> x .@? "AvailabilityZones" .!@ mempty
<*> x .@? "DBInstanceClass"
<*> x .@? "Engine"
<*> x .@? "EngineVersion"
<*> x .@? "LicenseModel"
<*> x .@? "MultiAZCapable"
<*> x .@? "ReadReplicaCapable"
<*> x .@? "StorageType"
<*> x .@? "SupportsIops"
<*> x .@? "SupportsStorageEncryption"
<*> x .@? "Vpc"
instance ToQuery OrderableDBInstanceOption where
toQuery OrderableDBInstanceOption{..} = mconcat
[ "AvailabilityZones" =? _odbioAvailabilityZones
, "DBInstanceClass" =? _odbioDBInstanceClass
, "Engine" =? _odbioEngine
, "EngineVersion" =? _odbioEngineVersion
, "LicenseModel" =? _odbioLicenseModel
, "MultiAZCapable" =? _odbioMultiAZCapable
, "ReadReplicaCapable" =? _odbioReadReplicaCapable
, "StorageType" =? _odbioStorageType
, "SupportsIops" =? _odbioSupportsIops
, "SupportsStorageEncryption" =? _odbioSupportsStorageEncryption
, "Vpc" =? _odbioVpc
]
data Filter = Filter
{ _fName :: Text
, _fValues :: List "member" Text
} deriving (Eq, Ord, Read, Show)
-- | 'Filter' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'fName' @::@ 'Text'
--
-- * 'fValues' @::@ ['Text']
--
filter' :: Text -- ^ 'fName'
-> Filter
filter' p1 = Filter
{ _fName = p1
, _fValues = mempty
}
-- | This parameter is not currently supported.
fName :: Lens' Filter Text
fName = lens _fName (\s a -> s { _fName = a })
-- | This parameter is not currently supported.
fValues :: Lens' Filter [Text]
fValues = lens _fValues (\s a -> s { _fValues = a }) . _List
instance FromXML Filter where
parseXML x = Filter
<$> x .@ "Name"
<*> x .@? "Values" .!@ mempty
instance ToQuery Filter where
toQuery Filter{..} = mconcat
[ "Name" =? _fName
, "Values" =? _fValues
]
data RecurringCharge = RecurringCharge
{ _rcRecurringChargeAmount :: Maybe Double
, _rcRecurringChargeFrequency :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'RecurringCharge' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rcRecurringChargeAmount' @::@ 'Maybe' 'Double'
--
-- * 'rcRecurringChargeFrequency' @::@ 'Maybe' 'Text'
--
recurringCharge :: RecurringCharge
recurringCharge = RecurringCharge
{ _rcRecurringChargeAmount = Nothing
, _rcRecurringChargeFrequency = Nothing
}
-- | The amount of the recurring charge.
rcRecurringChargeAmount :: Lens' RecurringCharge (Maybe Double)
rcRecurringChargeAmount =
lens _rcRecurringChargeAmount (\s a -> s { _rcRecurringChargeAmount = a })
-- | The frequency of the recurring charge.
rcRecurringChargeFrequency :: Lens' RecurringCharge (Maybe Text)
rcRecurringChargeFrequency =
lens _rcRecurringChargeFrequency
(\s a -> s { _rcRecurringChargeFrequency = a })
instance FromXML RecurringCharge where
parseXML x = RecurringCharge
<$> x .@? "RecurringChargeAmount"
<*> x .@? "RecurringChargeFrequency"
instance ToQuery RecurringCharge where
toQuery RecurringCharge{..} = mconcat
[ "RecurringChargeAmount" =? _rcRecurringChargeAmount
, "RecurringChargeFrequency" =? _rcRecurringChargeFrequency
]
data Endpoint = Endpoint
{ _eAddress :: Maybe Text
, _ePort :: Maybe Int
} deriving (Eq, Ord, Read, Show)
-- | 'Endpoint' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'eAddress' @::@ 'Maybe' 'Text'
--
-- * 'ePort' @::@ 'Maybe' 'Int'
--
endpoint :: Endpoint
endpoint = Endpoint
{ _eAddress = Nothing
, _ePort = Nothing
}
-- | Specifies the DNS address of the DB instance.
eAddress :: Lens' Endpoint (Maybe Text)
eAddress = lens _eAddress (\s a -> s { _eAddress = a })
-- | Specifies the port that the database engine is listening on.
ePort :: Lens' Endpoint (Maybe Int)
ePort = lens _ePort (\s a -> s { _ePort = a })
instance FromXML Endpoint where
parseXML x = Endpoint
<$> x .@? "Address"
<*> x .@? "Port"
instance ToQuery Endpoint where
toQuery Endpoint{..} = mconcat
[ "Address" =? _eAddress
, "Port" =? _ePort
]
data OptionConfiguration = OptionConfiguration
{ _ocDBSecurityGroupMemberships :: List "member" Text
, _ocOptionName :: Text
, _ocOptionSettings :: List "member" OptionSetting
, _ocPort :: Maybe Int
, _ocVpcSecurityGroupMemberships :: List "member" Text
} deriving (Eq, Read, Show)
-- | 'OptionConfiguration' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ocDBSecurityGroupMemberships' @::@ ['Text']
--
-- * 'ocOptionName' @::@ 'Text'
--
-- * 'ocOptionSettings' @::@ ['OptionSetting']
--
-- * 'ocPort' @::@ 'Maybe' 'Int'
--
-- * 'ocVpcSecurityGroupMemberships' @::@ ['Text']
--
optionConfiguration :: Text -- ^ 'ocOptionName'
-> OptionConfiguration
optionConfiguration p1 = OptionConfiguration
{ _ocOptionName = p1
, _ocPort = Nothing
, _ocDBSecurityGroupMemberships = mempty
, _ocVpcSecurityGroupMemberships = mempty
, _ocOptionSettings = mempty
}
-- | A list of DBSecurityGroupMemebrship name strings used for this option.
ocDBSecurityGroupMemberships :: Lens' OptionConfiguration [Text]
ocDBSecurityGroupMemberships =
lens _ocDBSecurityGroupMemberships
(\s a -> s { _ocDBSecurityGroupMemberships = a })
. _List
-- | The configuration of options to include in a group.
ocOptionName :: Lens' OptionConfiguration Text
ocOptionName = lens _ocOptionName (\s a -> s { _ocOptionName = a })
-- | The option settings to include in an option group.
ocOptionSettings :: Lens' OptionConfiguration [OptionSetting]
ocOptionSettings = lens _ocOptionSettings (\s a -> s { _ocOptionSettings = a }) . _List
-- | The optional port for the option.
ocPort :: Lens' OptionConfiguration (Maybe Int)
ocPort = lens _ocPort (\s a -> s { _ocPort = a })
-- | A list of VpcSecurityGroupMemebrship name strings used for this option.
ocVpcSecurityGroupMemberships :: Lens' OptionConfiguration [Text]
ocVpcSecurityGroupMemberships =
lens _ocVpcSecurityGroupMemberships
(\s a -> s { _ocVpcSecurityGroupMemberships = a })
. _List
instance FromXML OptionConfiguration where
parseXML x = OptionConfiguration
<$> x .@? "DBSecurityGroupMemberships" .!@ mempty
<*> x .@ "OptionName"
<*> x .@? "OptionSettings" .!@ mempty
<*> x .@? "Port"
<*> x .@? "VpcSecurityGroupMemberships" .!@ mempty
instance ToQuery OptionConfiguration where
toQuery OptionConfiguration{..} = mconcat
[ "DBSecurityGroupMemberships" =? _ocDBSecurityGroupMemberships
, "OptionName" =? _ocOptionName
, "OptionSettings" =? _ocOptionSettings
, "Port" =? _ocPort
, "VpcSecurityGroupMemberships" =? _ocVpcSecurityGroupMemberships
]
data Option = Option
{ _oDBSecurityGroupMemberships :: List "member" DBSecurityGroupMembership
, _oOptionDescription :: Maybe Text
, _oOptionName :: Maybe Text
, _oOptionSettings :: List "member" OptionSetting
, _oPermanent :: Maybe Bool
, _oPersistent :: Maybe Bool
, _oPort :: Maybe Int
, _oVpcSecurityGroupMemberships :: List "member" VpcSecurityGroupMembership
} deriving (Eq, Read, Show)
-- | 'Option' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'oDBSecurityGroupMemberships' @::@ ['DBSecurityGroupMembership']
--
-- * 'oOptionDescription' @::@ 'Maybe' 'Text'
--
-- * 'oOptionName' @::@ 'Maybe' 'Text'
--
-- * 'oOptionSettings' @::@ ['OptionSetting']
--
-- * 'oPermanent' @::@ 'Maybe' 'Bool'
--
-- * 'oPersistent' @::@ 'Maybe' 'Bool'
--
-- * 'oPort' @::@ 'Maybe' 'Int'
--
-- * 'oVpcSecurityGroupMemberships' @::@ ['VpcSecurityGroupMembership']
--
option :: Option
option = Option
{ _oOptionName = Nothing
, _oOptionDescription = Nothing
, _oPersistent = Nothing
, _oPermanent = Nothing
, _oPort = Nothing
, _oOptionSettings = mempty
, _oDBSecurityGroupMemberships = mempty
, _oVpcSecurityGroupMemberships = mempty
}
-- | If the option requires access to a port, then this DB security group allows
-- access to the port.
oDBSecurityGroupMemberships :: Lens' Option [DBSecurityGroupMembership]
oDBSecurityGroupMemberships =
lens _oDBSecurityGroupMemberships
(\s a -> s { _oDBSecurityGroupMemberships = a })
. _List
-- | The description of the option.
oOptionDescription :: Lens' Option (Maybe Text)
oOptionDescription =
lens _oOptionDescription (\s a -> s { _oOptionDescription = a })
-- | The name of the option.
oOptionName :: Lens' Option (Maybe Text)
oOptionName = lens _oOptionName (\s a -> s { _oOptionName = a })
-- | The option settings for this option.
oOptionSettings :: Lens' Option [OptionSetting]
oOptionSettings = lens _oOptionSettings (\s a -> s { _oOptionSettings = a }) . _List
-- | Indicate if this option is permanent.
oPermanent :: Lens' Option (Maybe Bool)
oPermanent = lens _oPermanent (\s a -> s { _oPermanent = a })
-- | Indicate if this option is persistent.
oPersistent :: Lens' Option (Maybe Bool)
oPersistent = lens _oPersistent (\s a -> s { _oPersistent = a })
-- | If required, the port configured for this option to use.
oPort :: Lens' Option (Maybe Int)
oPort = lens _oPort (\s a -> s { _oPort = a })
-- | If the option requires access to a port, then this VPC security group allows
-- access to the port.
oVpcSecurityGroupMemberships :: Lens' Option [VpcSecurityGroupMembership]
oVpcSecurityGroupMemberships =
lens _oVpcSecurityGroupMemberships
(\s a -> s { _oVpcSecurityGroupMemberships = a })
. _List
instance FromXML Option where
parseXML x = Option
<$> x .@? "DBSecurityGroupMemberships" .!@ mempty
<*> x .@? "OptionDescription"
<*> x .@? "OptionName"
<*> x .@? "OptionSettings" .!@ mempty
<*> x .@? "Permanent"
<*> x .@? "Persistent"
<*> x .@? "Port"
<*> x .@? "VpcSecurityGroupMemberships" .!@ mempty
instance ToQuery Option where
toQuery Option{..} = mconcat
[ "DBSecurityGroupMemberships" =? _oDBSecurityGroupMemberships
, "OptionDescription" =? _oOptionDescription
, "OptionName" =? _oOptionName
, "OptionSettings" =? _oOptionSettings
, "Permanent" =? _oPermanent
, "Persistent" =? _oPersistent
, "Port" =? _oPort
, "VpcSecurityGroupMemberships" =? _oVpcSecurityGroupMemberships
]
data IPRange = IPRange
{ _iprCIDRIP :: Maybe Text
, _iprStatus :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'IPRange' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'iprCIDRIP' @::@ 'Maybe' 'Text'
--
-- * 'iprStatus' @::@ 'Maybe' 'Text'
--
iprange :: IPRange
iprange = IPRange
{ _iprStatus = Nothing
, _iprCIDRIP = Nothing
}
-- | Specifies the IP range.
iprCIDRIP :: Lens' IPRange (Maybe Text)
iprCIDRIP = lens _iprCIDRIP (\s a -> s { _iprCIDRIP = a })
-- | Specifies the status of the IP range. Status can be "authorizing",
-- "authorized", "revoking", and "revoked".
iprStatus :: Lens' IPRange (Maybe Text)
iprStatus = lens _iprStatus (\s a -> s { _iprStatus = a })
instance FromXML IPRange where
parseXML x = IPRange
<$> x .@? "CIDRIP"
<*> x .@? "Status"
instance ToQuery IPRange where
toQuery IPRange{..} = mconcat
[ "CIDRIP" =? _iprCIDRIP
, "Status" =? _iprStatus
]
data OptionGroupMembership = OptionGroupMembership
{ _ogmOptionGroupName :: Maybe Text
, _ogmStatus :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'OptionGroupMembership' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ogmOptionGroupName' @::@ 'Maybe' 'Text'
--
-- * 'ogmStatus' @::@ 'Maybe' 'Text'
--
optionGroupMembership :: OptionGroupMembership
optionGroupMembership = OptionGroupMembership
{ _ogmOptionGroupName = Nothing
, _ogmStatus = Nothing
}
-- | The name of the option group that the instance belongs to.
ogmOptionGroupName :: Lens' OptionGroupMembership (Maybe Text)
ogmOptionGroupName =
lens _ogmOptionGroupName (\s a -> s { _ogmOptionGroupName = a })
-- | The status of the DB instance's option group membership (e.g. in-sync,
-- pending, pending-maintenance, applying).
ogmStatus :: Lens' OptionGroupMembership (Maybe Text)
ogmStatus = lens _ogmStatus (\s a -> s { _ogmStatus = a })
instance FromXML OptionGroupMembership where
parseXML x = OptionGroupMembership
<$> x .@? "OptionGroupName"
<*> x .@? "Status"
instance ToQuery OptionGroupMembership where
toQuery OptionGroupMembership{..} = mconcat
[ "OptionGroupName" =? _ogmOptionGroupName
, "Status" =? _ogmStatus
]
data EventCategoriesMap = EventCategoriesMap
{ _ecmEventCategories :: List "member" Text
, _ecmSourceType :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'EventCategoriesMap' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ecmEventCategories' @::@ ['Text']
--
-- * 'ecmSourceType' @::@ 'Maybe' 'Text'
--
eventCategoriesMap :: EventCategoriesMap
eventCategoriesMap = EventCategoriesMap
{ _ecmSourceType = Nothing
, _ecmEventCategories = mempty
}
-- | The event categories for the specified source type
ecmEventCategories :: Lens' EventCategoriesMap [Text]
ecmEventCategories =
lens _ecmEventCategories (\s a -> s { _ecmEventCategories = a })
. _List
-- | The source type that the returned categories belong to
ecmSourceType :: Lens' EventCategoriesMap (Maybe Text)
ecmSourceType = lens _ecmSourceType (\s a -> s { _ecmSourceType = a })
instance FromXML EventCategoriesMap where
parseXML x = EventCategoriesMap
<$> x .@? "EventCategories" .!@ mempty
<*> x .@? "SourceType"
instance ToQuery EventCategoriesMap where
toQuery EventCategoriesMap{..} = mconcat
[ "EventCategories" =? _ecmEventCategories
, "SourceType" =? _ecmSourceType
]
data PendingModifiedValues = PendingModifiedValues
{ _pmvAllocatedStorage :: Maybe Int
, _pmvBackupRetentionPeriod :: Maybe Int
, _pmvCACertificateIdentifier :: Maybe Text
, _pmvDBInstanceClass :: Maybe Text
, _pmvDBInstanceIdentifier :: Maybe Text
, _pmvEngineVersion :: Maybe Text
, _pmvIops :: Maybe Int
, _pmvMasterUserPassword :: Maybe Text
, _pmvMultiAZ :: Maybe Bool
, _pmvPort :: Maybe Int
, _pmvStorageType :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'PendingModifiedValues' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'pmvAllocatedStorage' @::@ 'Maybe' 'Int'
--
-- * 'pmvBackupRetentionPeriod' @::@ 'Maybe' 'Int'
--
-- * 'pmvCACertificateIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'pmvDBInstanceClass' @::@ 'Maybe' 'Text'
--
-- * 'pmvDBInstanceIdentifier' @::@ 'Maybe' 'Text'
--
-- * 'pmvEngineVersion' @::@ 'Maybe' 'Text'
--
-- * 'pmvIops' @::@ 'Maybe' 'Int'
--
-- * 'pmvMasterUserPassword' @::@ 'Maybe' 'Text'
--
-- * 'pmvMultiAZ' @::@ 'Maybe' 'Bool'
--
-- * 'pmvPort' @::@ 'Maybe' 'Int'
--
-- * 'pmvStorageType' @::@ 'Maybe' 'Text'
--
pendingModifiedValues :: PendingModifiedValues
pendingModifiedValues = PendingModifiedValues
{ _pmvDBInstanceClass = Nothing
, _pmvAllocatedStorage = Nothing
, _pmvMasterUserPassword = Nothing
, _pmvPort = Nothing
, _pmvBackupRetentionPeriod = Nothing
, _pmvMultiAZ = Nothing
, _pmvEngineVersion = Nothing
, _pmvIops = Nothing
, _pmvDBInstanceIdentifier = Nothing
, _pmvStorageType = Nothing
, _pmvCACertificateIdentifier = Nothing
}
-- | Contains the new 'AllocatedStorage' size for the DB instance that will be
-- applied or is in progress.
pmvAllocatedStorage :: Lens' PendingModifiedValues (Maybe Int)
pmvAllocatedStorage =
lens _pmvAllocatedStorage (\s a -> s { _pmvAllocatedStorage = a })
-- | Specifies the pending number of days for which automated backups are
-- retained.
pmvBackupRetentionPeriod :: Lens' PendingModifiedValues (Maybe Int)
pmvBackupRetentionPeriod =
lens _pmvBackupRetentionPeriod
(\s a -> s { _pmvBackupRetentionPeriod = a })
-- | Specifies the identifier of the CA certificate for the DB instance.
pmvCACertificateIdentifier :: Lens' PendingModifiedValues (Maybe Text)
pmvCACertificateIdentifier =
lens _pmvCACertificateIdentifier
(\s a -> s { _pmvCACertificateIdentifier = a })
-- | Contains the new 'DBInstanceClass' for the DB instance that will be applied or
-- is in progress.
pmvDBInstanceClass :: Lens' PendingModifiedValues (Maybe Text)
pmvDBInstanceClass =
lens _pmvDBInstanceClass (\s a -> s { _pmvDBInstanceClass = a })
-- | Contains the new 'DBInstanceIdentifier' for the DB instance that will be
-- applied or is in progress.
pmvDBInstanceIdentifier :: Lens' PendingModifiedValues (Maybe Text)
pmvDBInstanceIdentifier =
lens _pmvDBInstanceIdentifier (\s a -> s { _pmvDBInstanceIdentifier = a })
-- | Indicates the database engine version.
pmvEngineVersion :: Lens' PendingModifiedValues (Maybe Text)
pmvEngineVersion = lens _pmvEngineVersion (\s a -> s { _pmvEngineVersion = a })
-- | Specifies the new Provisioned IOPS value for the DB instance that will be
-- applied or is being applied.
pmvIops :: Lens' PendingModifiedValues (Maybe Int)
pmvIops = lens _pmvIops (\s a -> s { _pmvIops = a })
-- | Contains the pending or in-progress change of the master credentials for the
-- DB instance.
pmvMasterUserPassword :: Lens' PendingModifiedValues (Maybe Text)
pmvMasterUserPassword =
lens _pmvMasterUserPassword (\s a -> s { _pmvMasterUserPassword = a })
-- | Indicates that the Single-AZ DB instance is to change to a Multi-AZ
-- deployment.
pmvMultiAZ :: Lens' PendingModifiedValues (Maybe Bool)
pmvMultiAZ = lens _pmvMultiAZ (\s a -> s { _pmvMultiAZ = a })
-- | Specifies the pending port for the DB instance.
pmvPort :: Lens' PendingModifiedValues (Maybe Int)
pmvPort = lens _pmvPort (\s a -> s { _pmvPort = a })
-- | Specifies the storage type to be associated with the DB instance.
pmvStorageType :: Lens' PendingModifiedValues (Maybe Text)
pmvStorageType = lens _pmvStorageType (\s a -> s { _pmvStorageType = a })
instance FromXML PendingModifiedValues where
parseXML x = PendingModifiedValues
<$> x .@? "AllocatedStorage"
<*> x .@? "BackupRetentionPeriod"
<*> x .@? "CACertificateIdentifier"
<*> x .@? "DBInstanceClass"
<*> x .@? "DBInstanceIdentifier"
<*> x .@? "EngineVersion"
<*> x .@? "Iops"
<*> x .@? "MasterUserPassword"
<*> x .@? "MultiAZ"
<*> x .@? "Port"
<*> x .@? "StorageType"
instance ToQuery PendingModifiedValues where
toQuery PendingModifiedValues{..} = mconcat
[ "AllocatedStorage" =? _pmvAllocatedStorage
, "BackupRetentionPeriod" =? _pmvBackupRetentionPeriod
, "CACertificateIdentifier" =? _pmvCACertificateIdentifier
, "DBInstanceClass" =? _pmvDBInstanceClass
, "DBInstanceIdentifier" =? _pmvDBInstanceIdentifier
, "EngineVersion" =? _pmvEngineVersion
, "Iops" =? _pmvIops
, "MasterUserPassword" =? _pmvMasterUserPassword
, "MultiAZ" =? _pmvMultiAZ
, "Port" =? _pmvPort
, "StorageType" =? _pmvStorageType
]
data VpcSecurityGroupMembership = VpcSecurityGroupMembership
{ _vsgmStatus :: Maybe Text
, _vsgmVpcSecurityGroupId :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'VpcSecurityGroupMembership' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'vsgmStatus' @::@ 'Maybe' 'Text'
--
-- * 'vsgmVpcSecurityGroupId' @::@ 'Maybe' 'Text'
--
vpcSecurityGroupMembership :: VpcSecurityGroupMembership
vpcSecurityGroupMembership = VpcSecurityGroupMembership
{ _vsgmVpcSecurityGroupId = Nothing
, _vsgmStatus = Nothing
}
-- | The status of the VPC security group.
vsgmStatus :: Lens' VpcSecurityGroupMembership (Maybe Text)
vsgmStatus = lens _vsgmStatus (\s a -> s { _vsgmStatus = a })
-- | The name of the VPC security group.
vsgmVpcSecurityGroupId :: Lens' VpcSecurityGroupMembership (Maybe Text)
vsgmVpcSecurityGroupId =
lens _vsgmVpcSecurityGroupId (\s a -> s { _vsgmVpcSecurityGroupId = a })
instance FromXML VpcSecurityGroupMembership where
parseXML x = VpcSecurityGroupMembership
<$> x .@? "Status"
<*> x .@? "VpcSecurityGroupId"
instance ToQuery VpcSecurityGroupMembership where
toQuery VpcSecurityGroupMembership{..} = mconcat
[ "Status" =? _vsgmStatus
, "VpcSecurityGroupId" =? _vsgmVpcSecurityGroupId
]
data Parameter = Parameter
{ _pAllowedValues :: Maybe Text
, _pApplyMethod :: Maybe ApplyMethod
, _pApplyType :: Maybe Text
, _pDataType :: Maybe Text
, _pDescription :: Maybe Text
, _pIsModifiable :: Maybe Bool
, _pMinimumEngineVersion :: Maybe Text
, _pParameterName :: Maybe Text
, _pParameterValue :: Maybe Text
, _pSource :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'Parameter' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'pAllowedValues' @::@ 'Maybe' 'Text'
--
-- * 'pApplyMethod' @::@ 'Maybe' 'ApplyMethod'
--
-- * 'pApplyType' @::@ 'Maybe' 'Text'
--
-- * 'pDataType' @::@ 'Maybe' 'Text'
--
-- * 'pDescription' @::@ 'Maybe' 'Text'
--
-- * 'pIsModifiable' @::@ 'Maybe' 'Bool'
--
-- * 'pMinimumEngineVersion' @::@ 'Maybe' 'Text'
--
-- * 'pParameterName' @::@ 'Maybe' 'Text'
--
-- * 'pParameterValue' @::@ 'Maybe' 'Text'
--
-- * 'pSource' @::@ 'Maybe' 'Text'
--
parameter :: Parameter
parameter = Parameter
{ _pParameterName = Nothing
, _pParameterValue = Nothing
, _pDescription = Nothing
, _pSource = Nothing
, _pApplyType = Nothing
, _pDataType = Nothing
, _pAllowedValues = Nothing
, _pIsModifiable = Nothing
, _pMinimumEngineVersion = Nothing
, _pApplyMethod = Nothing
}
-- | Specifies the valid range of values for the parameter.
pAllowedValues :: Lens' Parameter (Maybe Text)
pAllowedValues = lens _pAllowedValues (\s a -> s { _pAllowedValues = a })
-- | Indicates when to apply parameter updates.
pApplyMethod :: Lens' Parameter (Maybe ApplyMethod)
pApplyMethod = lens _pApplyMethod (\s a -> s { _pApplyMethod = a })
-- | Specifies the engine specific parameters type.
pApplyType :: Lens' Parameter (Maybe Text)
pApplyType = lens _pApplyType (\s a -> s { _pApplyType = a })
-- | Specifies the valid data type for the parameter.
pDataType :: Lens' Parameter (Maybe Text)
pDataType = lens _pDataType (\s a -> s { _pDataType = a })
-- | Provides a description of the parameter.
pDescription :: Lens' Parameter (Maybe Text)
pDescription = lens _pDescription (\s a -> s { _pDescription = a })
-- | Indicates whether ('true') or not ('false') the parameter can be modified. Some
-- parameters have security or operational implications that prevent them from
-- being changed.
pIsModifiable :: Lens' Parameter (Maybe Bool)
pIsModifiable = lens _pIsModifiable (\s a -> s { _pIsModifiable = a })
-- | The earliest engine version to which the parameter can apply.
pMinimumEngineVersion :: Lens' Parameter (Maybe Text)
pMinimumEngineVersion =
lens _pMinimumEngineVersion (\s a -> s { _pMinimumEngineVersion = a })
-- | Specifies the name of the parameter.
pParameterName :: Lens' Parameter (Maybe Text)
pParameterName = lens _pParameterName (\s a -> s { _pParameterName = a })
-- | Specifies the value of the parameter.
pParameterValue :: Lens' Parameter (Maybe Text)
pParameterValue = lens _pParameterValue (\s a -> s { _pParameterValue = a })
-- | Indicates the source of the parameter value.
pSource :: Lens' Parameter (Maybe Text)
pSource = lens _pSource (\s a -> s { _pSource = a })
instance FromXML Parameter where
parseXML x = Parameter
<$> x .@? "AllowedValues"
<*> x .@? "ApplyMethod"
<*> x .@? "ApplyType"
<*> x .@? "DataType"
<*> x .@? "Description"
<*> x .@? "IsModifiable"
<*> x .@? "MinimumEngineVersion"
<*> x .@? "ParameterName"
<*> x .@? "ParameterValue"
<*> x .@? "Source"
instance ToQuery Parameter where
toQuery Parameter{..} = mconcat
[ "AllowedValues" =? _pAllowedValues
, "ApplyMethod" =? _pApplyMethod
, "ApplyType" =? _pApplyType
, "DataType" =? _pDataType
, "Description" =? _pDescription
, "IsModifiable" =? _pIsModifiable
, "MinimumEngineVersion" =? _pMinimumEngineVersion
, "ParameterName" =? _pParameterName
, "ParameterValue" =? _pParameterValue
, "Source" =? _pSource
]
data OptionGroupOptionSetting = OptionGroupOptionSetting
{ _ogosAllowedValues :: Maybe Text
, _ogosApplyType :: Maybe Text
, _ogosDefaultValue :: Maybe Text
, _ogosIsModifiable :: Maybe Bool
, _ogosSettingDescription :: Maybe Text
, _ogosSettingName :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'OptionGroupOptionSetting' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ogosAllowedValues' @::@ 'Maybe' 'Text'
--
-- * 'ogosApplyType' @::@ 'Maybe' 'Text'
--
-- * 'ogosDefaultValue' @::@ 'Maybe' 'Text'
--
-- * 'ogosIsModifiable' @::@ 'Maybe' 'Bool'
--
-- * 'ogosSettingDescription' @::@ 'Maybe' 'Text'
--
-- * 'ogosSettingName' @::@ 'Maybe' 'Text'
--
optionGroupOptionSetting :: OptionGroupOptionSetting
optionGroupOptionSetting = OptionGroupOptionSetting
{ _ogosSettingName = Nothing
, _ogosSettingDescription = Nothing
, _ogosDefaultValue = Nothing
, _ogosApplyType = Nothing
, _ogosAllowedValues = Nothing
, _ogosIsModifiable = Nothing
}
-- | Indicates the acceptable values for the option group option.
ogosAllowedValues :: Lens' OptionGroupOptionSetting (Maybe Text)
ogosAllowedValues =
lens _ogosAllowedValues (\s a -> s { _ogosAllowedValues = a })
-- | The DB engine specific parameter type for the option group option.
ogosApplyType :: Lens' OptionGroupOptionSetting (Maybe Text)
ogosApplyType = lens _ogosApplyType (\s a -> s { _ogosApplyType = a })
-- | The default value for the option group option.
ogosDefaultValue :: Lens' OptionGroupOptionSetting (Maybe Text)
ogosDefaultValue = lens _ogosDefaultValue (\s a -> s { _ogosDefaultValue = a })
-- | Boolean value where true indicates that this option group option can be
-- changed from the default value.
ogosIsModifiable :: Lens' OptionGroupOptionSetting (Maybe Bool)
ogosIsModifiable = lens _ogosIsModifiable (\s a -> s { _ogosIsModifiable = a })
-- | The description of the option group option.
ogosSettingDescription :: Lens' OptionGroupOptionSetting (Maybe Text)
ogosSettingDescription =
lens _ogosSettingDescription (\s a -> s { _ogosSettingDescription = a })
-- | The name of the option group option.
ogosSettingName :: Lens' OptionGroupOptionSetting (Maybe Text)
ogosSettingName = lens _ogosSettingName (\s a -> s { _ogosSettingName = a })
instance FromXML OptionGroupOptionSetting where
parseXML x = OptionGroupOptionSetting
<$> x .@? "AllowedValues"
<*> x .@? "ApplyType"
<*> x .@? "DefaultValue"
<*> x .@? "IsModifiable"
<*> x .@? "SettingDescription"
<*> x .@? "SettingName"
instance ToQuery OptionGroupOptionSetting where
toQuery OptionGroupOptionSetting{..} = mconcat
[ "AllowedValues" =? _ogosAllowedValues
, "ApplyType" =? _ogosApplyType
, "DefaultValue" =? _ogosDefaultValue
, "IsModifiable" =? _ogosIsModifiable
, "SettingDescription" =? _ogosSettingDescription
, "SettingName" =? _ogosSettingName
]
|
kim/amazonka
|
amazonka-rds/gen/Network/AWS/RDS/Types.hs
|
mpl-2.0
| 149,886
| 0
| 85
| 37,289
| 25,068
| 14,365
| 10,703
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.AttachVpnGateway
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Attaches a virtual private gateway to a VPC. For more information, see <http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_VPN.html Addinga Hardware Virtual Private Gateway to Your VPC> in the /Amazon Virtual PrivateCloud User Guide/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-AttachVpnGateway.html>
module Network.AWS.EC2.AttachVpnGateway
(
-- * Request
AttachVpnGateway
-- ** Request constructor
, attachVpnGateway
-- ** Request lenses
, avgDryRun
, avgVpcId
, avgVpnGatewayId
-- * Response
, AttachVpnGatewayResponse
-- ** Response constructor
, attachVpnGatewayResponse
-- ** Response lenses
, avgrVpcAttachment
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data AttachVpnGateway = AttachVpnGateway
{ _avgDryRun :: Maybe Bool
, _avgVpcId :: Text
, _avgVpnGatewayId :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'AttachVpnGateway' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'avgDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'avgVpcId' @::@ 'Text'
--
-- * 'avgVpnGatewayId' @::@ 'Text'
--
attachVpnGateway :: Text -- ^ 'avgVpnGatewayId'
-> Text -- ^ 'avgVpcId'
-> AttachVpnGateway
attachVpnGateway p1 p2 = AttachVpnGateway
{ _avgVpnGatewayId = p1
, _avgVpcId = p2
, _avgDryRun = Nothing
}
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have the
-- required permissions, the error response is 'DryRunOperation'. Otherwise, it is 'UnauthorizedOperation'.
avgDryRun :: Lens' AttachVpnGateway (Maybe Bool)
avgDryRun = lens _avgDryRun (\s a -> s { _avgDryRun = a })
-- | The ID of the VPC.
avgVpcId :: Lens' AttachVpnGateway Text
avgVpcId = lens _avgVpcId (\s a -> s { _avgVpcId = a })
-- | The ID of the virtual private gateway.
avgVpnGatewayId :: Lens' AttachVpnGateway Text
avgVpnGatewayId = lens _avgVpnGatewayId (\s a -> s { _avgVpnGatewayId = a })
newtype AttachVpnGatewayResponse = AttachVpnGatewayResponse
{ _avgrVpcAttachment :: Maybe VpcAttachment
} deriving (Eq, Read, Show)
-- | 'AttachVpnGatewayResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'avgrVpcAttachment' @::@ 'Maybe' 'VpcAttachment'
--
attachVpnGatewayResponse :: AttachVpnGatewayResponse
attachVpnGatewayResponse = AttachVpnGatewayResponse
{ _avgrVpcAttachment = Nothing
}
-- | Information about the attachment.
avgrVpcAttachment :: Lens' AttachVpnGatewayResponse (Maybe VpcAttachment)
avgrVpcAttachment =
lens _avgrVpcAttachment (\s a -> s { _avgrVpcAttachment = a })
instance ToPath AttachVpnGateway where
toPath = const "/"
instance ToQuery AttachVpnGateway where
toQuery AttachVpnGateway{..} = mconcat
[ "DryRun" =? _avgDryRun
, "VpcId" =? _avgVpcId
, "VpnGatewayId" =? _avgVpnGatewayId
]
instance ToHeaders AttachVpnGateway
instance AWSRequest AttachVpnGateway where
type Sv AttachVpnGateway = EC2
type Rs AttachVpnGateway = AttachVpnGatewayResponse
request = post "AttachVpnGateway"
response = xmlResponse
instance FromXML AttachVpnGatewayResponse where
parseXML x = AttachVpnGatewayResponse
<$> x .@? "attachment"
|
romanb/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/AttachVpnGateway.hs
|
mpl-2.0
| 4,504
| 0
| 9
| 979
| 551
| 335
| 216
| 67
| 1
|
{-# LANGUAGE TypeOperators, TypeFamilies, FlexibleContexts
, TypeSynonymInstances, MultiParamTypeClasses, Rank2Types
, FlexibleInstances
#-}
{-# OPTIONS_GHC -Wall -fno-warn-orphans #-}
----------------------------------------------------------------------
-- |
-- Module : Shady.Image
-- Copyright : (c) Conal Elliott 2009
-- License : GPLv3
--
-- Maintainer : conal@conal.net
-- Stability : experimental
--
-- Images (infinite & continuous)
----------------------------------------------------------------------
-- This variation uses Complex
module Shady.Image
(
Point, pointToR2, r2ToPoint
, FilterG, Filter, samplerIm, scale2, uscale2, translate2, rotate2
, bilerp, bilerpC
, ImageG, Image
-- * General regions
, PRegion, Region
, universeR, emptyR, eqF, neqF, intersectR, unionR, xorR, diffR, complementR
, udisk, disk, annulus, checker
, crop
-- * Space-varying transformations
, transformG, translate2Im, scale2Im, uscale2Im, rotate2Im
, swirl -- , uswirl
, utile, tile
) where
import Control.Applicative (Applicative(..),liftA2)
import Shady.Complex
import Data.VectorSpace
import Data.Boolean
import Shady.Misc
import Shady.Language.Exp
import Shady.Color
import Shady.ITransform
type Point = ComplexE R
pointToR2 :: Point -> R2E
pointToR2 (x :+ y) = vec2 x y
r2ToPoint :: R2E -> Point
r2ToPoint xy = getX xy :+ getY xy
-- | Generalized image -- continuous & infinite
type ImageG s a = Complex s -> a
-- | Continuous, infinite image
type Image a = ImageG FloatE a
-- == Point -> a
-- | Generalized filter, polymorphic over domain
type FilterG p a = Unop (p -> a)
-- | Image filter
type Filter a = FilterG Point a
-- Unop (Image a)
-- | Wrap up a sampler as an image
samplerIm :: Sampler2 :=> Image Color
samplerIm s = r4ToColor . texture s . pointToR2
-- -- | 2D invertible transform
-- type ITransform2 = ITransform Point
translate2X :: AdditiveGroup a => a -> ITransform a
scale2X :: Fractional s => Complex s -> ITransform (Complex s)
uscale2X :: Fractional s => s -> ITransform (Complex s)
rotate2X :: (AdditiveGroup s, Eq s, Floating s) => s -> ITransform (Complex s)
translate2X = andInverse (^+^) negateV
scale2X = andInverse (onRI2 (*)) (onRI recip)
rotate2X = andInverse rotate2C negate
uscale2X = scale2X . \ a -> a :+ a
rotate2C :: (AdditiveGroup s, Eq s, Floating s) => s -> Unop (Complex s)
rotate2C theta = (cis theta *)
-- experiment
translate2, scale2 :: (AdditiveGroup s, Eq s, Floating s, ITrans (Complex s) a) => Complex s -> Unop a
uscale2,rotate2 :: (AdditiveGroup s, Eq s, Floating s, ITrans (Complex s) a) => s -> Unop a
translate2 = (*:) . translate2X
scale2 = (*:) . scale2X
rotate2 = (*:) . rotate2X
uscale2 = (*:) . uscale2X
-- translate2 :: ITransform Point
-- (*:) :: ITransform w -> Unop a
-- (*:) . translate2 :: ITransform Point
-- :: ITransform w -> Unop a
-- | Bilinear interpolation
bilerp :: VectorSpace w =>
w -> w -> w -> w -> (Scalar w, Scalar w) -> w
bilerp ll lr ul ur (dx,dy) =
lerp (lerp ll lr dx) (lerp ul ur dx) dy
-- | Bilinear interpolation image
bilerpC :: (VectorSpace w, Scalar w ~ s) =>
w -> w -> w -> w -> ImageG s w
bilerpC ll lr ul ur (dx :+ dy) = bilerp ll lr ul ur (dx,dy)
{--------------------------------------------------------------------
Generalized regions
--------------------------------------------------------------------}
-- TODO: Move most of these definitions elsewhere, since they're not
-- specific to 2D.
-- | Region over general space
type PRegion p = p -> BoolE
-- | 2D spatial region
type Region = Image BoolE
universeR, emptyR :: Applicative f => f BoolE
universeR = pure true
emptyR = pure false
eqF, neqF :: (IsNat n, IsScalar a, Eq a, Applicative f) =>
f (VecE n a) -> f (VecE n a) -> f BoolE
eqF = liftA2 (==^)
neqF = liftA2 (/=^)
-- intersectR, unionR, xorR, diffR
-- :: LiftA2 BoolE BoolE BoolE b b b => b -> b -> b
-- complementR :: LiftA1 BoolE BoolE b b => b -> b
intersectR, unionR, xorR, diffR :: Applicative f => Binop (f BoolE)
complementR :: Applicative f => Unop (f BoolE)
intersectR = liftA2 (&&*)
unionR = liftA2 (||*)
complementR = fmap notE
xorR = neqF
diffR r r' = r `intersectR` complementR r'
-- | Generalized unit disk/ball
udisk :: (InnerSpace p, Scalar p ~ FloatE) => PRegion p
udisk p = magnitudeSq p <=* 1
-- | Generalized disk/ball, given radius
disk :: (InnerSpace p, Scalar p ~ FloatE) => FloatE -> PRegion p
disk s = udisk . (^/ s)
-- | Generalized annulus, given outer & inner radii
annulus :: (InnerSpace p, Scalar p ~ FloatE) => FloatE -> FloatE -> PRegion p
annulus o i = disk o `diffR` disk i
-- | Checker-board
checker :: Region
checker (x :+ y) = getX c ==* getY c
where c = frac (x <+> y) >* 0.5
-- checker (x :+ y) = big x ==* big y
-- where
-- big = (>* 0.5) . frac
{--------------------------------------------------------------------
Some generalized transforms
--------------------------------------------------------------------}
-- | General domain-varying transformation.
transformG' :: (c -> Unop p) -> (p -> c) -> Unop (p -> a)
transformG' f imc ima p = ima (f (imc p) p)
-- transformG' :: (c -> Unop Point) -> Image c -> Filter a
-- | General domain-varying transformation.
transformG :: (c -> ITransform p) -> (p -> c) -> Unop (p -> a)
transformG f = transformG' (itBackward . f)
-- transformG :: (c -> ITransform2) -> Image c -> Filter a
-- translate2Im :: Image Point -> Filter a
-- scale2Im :: Image Point -> Filter a
-- uscale2Im :: Image FloatE -> Filter a
-- rotate2Im :: Image FloatE -> Filter a
-- | Space-varying 'translate2'
translate2Im :: AdditiveGroup p => Unop p -> Unop (p -> a)
translate2Im = transformG translate2X
-- | Space-varying 'scale2'
scale2Im :: Fractional s => Unop (Complex s) -> Unop (ImageG s a)
scale2Im = transformG scale2X
-- | Space-varying 'uscale2'
uscale2Im :: Fractional s => ImageG s s -> Unop (ImageG s a)
uscale2Im = transformG uscale2X
-- | Space-varying 'rotate2'
rotate2Im :: (AdditiveGroup s, Eq s, Floating s) => ImageG s s -> Unop (ImageG s a)
rotate2Im = transformG rotate2X
{--------------------------------------------------------------------
Other transformations
--------------------------------------------------------------------}
-- -- | Unit swirl
-- uswirl :: Filter a
-- uswirl = rotate2Im magnitude
-- -- | Swirl transformation
-- swirl :: FloatE -> Filter a
-- swirl s = hyperUscale2 s uswirl
-- *Almost* equivalent, but differs for negative s.
-- | Swirl transformation
swirl :: (AdditiveGroup s, Eq s, Floating s) => s -> Unop (ImageG s a)
swirl s = rotate2Im ((2*pi*s*) . magnitude)
utile' :: Frac p => Unop (p -> a)
utile' = (. frac)
-- Hm! This utile' definition repeats [0,1), not [-.5,.5). Eep. How can
-- I shift without loss of generality? For instance, the current
-- definition can handle nD.
-- | Unit, rectangular tiling.
utile :: (AdditiveGroup s, Eq s, Frac p, ITrans (Complex s) p, ITrans (Complex s) a, Floating s) =>
Unop (p -> a)
utile = translate2 (negate (0.5 :+ 0.5)) utile'
-- TODO: Generalize uniform scaling to arbitrary vector spaces, scaling
-- via scalar field.
-- Rectangle tiling with given size.
-- tile :: ITrans Point a => Point -> Filter a
tile :: (AdditiveGroup s, Eq s, Floating s, Frac s, ITrans (Complex s) a) =>
Complex s -> Unop (ImageG s a)
tile s = scale2 s utile
-- tile = flip scale2 utile
{--------------------------------------------------------------------
Orphans
--------------------------------------------------------------------}
-- Standard do-nothing transformation
instance ITrans Point Color where (*:) = const id
|
sseefried/shady-graphics
|
src/Shady/Image.hs
|
agpl-3.0
| 7,818
| 0
| 12
| 1,605
| 1,998
| 1,099
| 899
| 107
| 1
|
-- CIS 194, Spring 2015
--
-- Test cases for HW 01
module HW01Tests where
import HW01
import Testing
-- Exercise 1 -----------------------------------------
testLastDigit :: (Integer, Integer) -> Bool
testLastDigit (n, d) = lastDigit n == d
testDropLastDigit :: (Integer, Integer) -> Bool
testDropLastDigit (n, d) = dropLastDigit n == d
ex1Tests :: [Test]
ex1Tests = [ Test "lastDigit test" testLastDigit
[(123, 3), (1234, 4), (5, 5), (10, 0), (0, 0)]
, Test "dropLastDigit test" testDropLastDigit
[(123, 12), (1234, 123), (5, 0), (10, 1), (0,0)]
]
-- Exercise 2 -----------------------------------------
testToRevDigits :: (Integer, [Integer]) -> Bool
testToRevDigits (number, digits) = toRevDigits number == digits
ex2Tests :: [Test]
ex2Tests = [
Test "toRevDigits test" testToRevDigits [
(1234, [4,3,2,1]),
(0, []),
(-17, [])
]]
-- Exercise 3 -----------------------------------------
testDoubleEveryOther :: ([Integer], [Integer]) -> Bool
testDoubleEveryOther (input, expected) =
doubleEveryOther(input) == expected
ex3Tests :: [Test]
ex3Tests = [
Test "doubleEveryOther test" testDoubleEveryOther [
([4, 9, 5, 5], [4, 18, 5, 10]),
([0, 0], [0, 0])
]]
-- Exercise 4 -----------------------------------------
testSumDigits :: ([Integer], Integer) -> Bool
testSumDigits (digits, expectedSum) =
sumDigits(digits) == expectedSum
ex4Tests :: [Test]
ex4Tests = [
Test "test sumDigits" testSumDigits [
([10, 5, 18, 4], 19),
([100, 5, 18, 4], 19),
([12], 3),
([], 0)
]]
-- Exercise 5 -----------------------------------------
testLuhn :: (Integer, Bool) -> Bool
testLuhn (creditCard, expectedValid) =
luhn(creditCard) == expectedValid
ex5Tests :: [Test]
ex5Tests = [
Test "test luhn" testLuhn [
(5594589764218858, True),
(1234567898765432, False)
]]
-- Exercise 6 -----------------------------------------
ex6Tests :: [Test]
ex6Tests = []
-- All Tests ------------------------------------------
allTests :: [Test]
allTests = concat [ ex1Tests
, ex2Tests
, ex3Tests
, ex4Tests
, ex5Tests
, ex6Tests
]
|
nicokosi/intro-to-haskell-cis-194
|
HW01/HW01Tests.hs
|
unlicense
| 2,236
| 0
| 9
| 512
| 703
| 431
| 272
| 55
| 1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
module Consensus
( AppendEntry (..)
, AppendEntryResponse (..)
, ConsensusMessage
, handleConsensusMessage
, HandleConsensusMessage
)
where
import Blockchain (Block, BlockData)
import Logging
import Data.Aeson (FromJSON, ToJSON, decodeStrict, encode)
import Data.ByteString as BS
import Data.ByteString.Lazy (toStrict)
import Data.Monoid ((<>))
import GHC.Generics
import Network.Socket as N (HostName, PortNumber)
import System.Log.Logger (infoM)
data AppendEntry =
AppendEntry { appendEntry :: Block
}
deriving (Eq, Generic, Show)
data AppendEntryResponse =
AppendEntryResponse { appendEntryResponse :: Bool
, block :: Maybe Block
}
deriving (Eq, Generic, Show)
------------------------------------------------------------------------------
type ConsensusMessage = ByteString
type HandleConsensusMessage = HostName
-> PortNumber
-> (BlockData -> IO ())
-> (Block -> IO (Maybe String))
-> ConsensusMessage
-> IO ()
handleConsensusMessage :: HandleConsensusMessage
handleConsensusMessage host port sendToConsensusNodes isValid msg =
if | BS.isPrefixOf "{\"appendEntry\":" msg -> do
infoC host port "APPENDENTRY"
case decodeStrict msg of
Nothing -> sendToConsensusNodes (toStrict (encode (AppendEntryResponse False Nothing)))
Just (AppendEntry blk) -> do
v <- isValid blk
case v of
Nothing -> sendToConsensusNodes (toStrict (encode (AppendEntryResponse True (Just blk))))
_ -> sendToConsensusNodes (toStrict (encode (AppendEntryResponse False (Just blk))))
| BS.isInfixOf "\"appendEntryResponse\":" msg -> do
infoC host port "APPENDENTRYRESPONSE"
case decodeStrict msg of
Just aer@(AppendEntryResponse _ _) -> infoC host port (show aer)
Nothing -> infoC host port "AER NOT OK"
| otherwise -> infoC host port ("handleMessage: unknown message: " ++ show msg)
------------------------------------------------------------------------------
instance ToJSON AppendEntry
instance FromJSON AppendEntry
instance ToJSON AppendEntryResponse
instance FromJSON AppendEntryResponse
------------------------------------------------------------------------------
infoC :: HostName -> PortNumber -> String -> IO ()
infoC h p msg =
infoM consensus ("C " <> h <> ":" <> show p <> " " <> msg)
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/playpen/blockchain/blockchain-framework-DELETE/src/Consensus.hs
|
unlicense
| 2,843
| 0
| 25
| 849
| 652
| 341
| 311
| 56
| 6
|
module SubprimeFib.A282814 (a282814) where
import Helpers.Table (tableByAntidiagonals)
import Helpers.SubprimeFib (loopLength)
a282814 n = loopLength (n' + 1) (k' + 1) where
(n', k') = tableByAntidiagonals (n - 1)
|
peterokagey/haskellOEIS
|
src/SubprimeFib/A282814.hs
|
apache-2.0
| 219
| 0
| 9
| 33
| 82
| 46
| 36
| 5
| 1
|
-- Copyright 2020 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-- | Library for building integration tests for hrepl.
--
-- This test is built to run somewhat hermetically, running with a temporary
-- output_base and against a given source client (passed via the
-- "HREPL_TEST_CLIENT" environment variable. The reason for such
-- requirements is a gigantic set of bzl and Haskell toolchain
-- dependencies that are not factored into publicly visible
-- targets. Moreover, the dependencies are likely unstable as they are
-- implementation artifacts and not designed interfaces.
--
-- You may also share the same output directory across runs by setting
-- HREPL_TEST_OUTPUT. This setting is useful in particular for caching
-- build outputs.
module ReplTestLib
( TestScript(..)
, hreplTest
) where
import Prelude hiding (readFile)
import Bazel(BazelOpts(..), bazelClean, bazelShutdown, defBazelOpts)
import qualified Bazel.Runfiles as Runfiles
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.Encoding as T
import System.Environment (getEnv, lookupEnv)
import System.FilePath ((</>))
import System.IO (hPrint, stderr)
import System.IO.Strict (readFile)
import System.IO.Temp (withSystemTempDirectory)
import qualified System.Process.Typed as Process
import Test.HUnit (assertEqual)
import Test.HUnit.Lang (Assertion)
data TestDirs = TestDirs
{ clientDir :: FilePath
, outputDir :: FilePath
}
withTestDirs :: (TestDirs -> IO a) -> IO a
withTestDirs act = do
client <- getEnv "HREPL_TEST_CLIENT"
let run output = act TestDirs { clientDir = client, outputDir = output }
maybeOutputDir <- lookupEnv "HREPL_TEST_OUTPUT"
case maybeOutputDir of
Nothing -> withSystemTempDirectory "hrepl_test_output" run
Just o -> run o
-- | Runs the test with the given script and verifies the output
-- printed to argv[0] matches the expected.
hreplTest :: TestScript -> String -> Assertion
hreplTest t expected = do
got <- withTestDirs (runHrepl t)
assertEqual "Unexpected result" expected got
-- Parameters for runTest.
data TestScript = TestScript
{ tsUserArgs :: [String]
, tsStdin :: FilePath -> T.Text -- The parameter is the result file name
}
-- | Convenient options for running bazel in isolation.
testBazelOpts :: TestDirs -> BazelOpts
testBazelOpts testDirs =
defBazelOpts
{ bazelPre = bazelPre defBazelOpts ++
[ -- Isolates from any config settings.
"--bazelrc=/dev/null"
-- Redirect outputs to a temporary location
, "--output_base=" ++ outputDir testDirs </> "output-base"
]
, bazelPost = bazelPost defBazelOpts ++
[ "--noshow_progress"
-- TODO(judahjacobson): This was used to work around
-- b/111500525; see if we can remove it and have the test
-- still be robust.
, "--features=-layering_check"
, "--verbose_failures"
-- Don't output symlinks, since the SrcFS dir is read-only.
, "--symlink_prefix=/"
]
, bazelShowCommands = True
, bazelCwd = Just $ clientDir testDirs
}
runHrepl :: TestScript -> TestDirs -> IO String
runHrepl TestScript{..} testDirs = do
-- Share the same disk cache across runs in the same HREPL_OUTPUT_BASE.
writeFile (outputDir testDirs </> ".bazelrc")
$ "build --disk_cache=" ++ outputDir testDirs </> "cache"
rfiles <- Runfiles.create
path <- getEnv "PATH"
let hrepl = Runfiles.rlocation rfiles "hrepl/hrepl/hrepl"
let bazelOpts = testBazelOpts testDirs
args = [ "--bazel"
, bazelBin bazelOpts
, "--bazel-pre-args"
, unwords $ bazelPre bazelOpts
, "--bazel-args"
, unwords $ bazelPost bazelOpts
, "--show-commands"
]
++ tsUserArgs
-- Uses a distinct explicit result file instead of stdout or stderr.
-- Both of these are polluted by hrepl, bazel, and ghci.
output = outputDir testDirs </> "result"
input = T.encodeUtf8 $ tsStdin output <> "\n:quit\n"
cp = Process.setEnv [("HOME", outputDir testDirs), ("PATH", path)]
-- Run within a subdirectory, to test that hrepl isn't
-- relying on being run from the project root.
$ Process.setWorkingDir (clientDir testDirs </> "hrepl")
$ Process.setStdin (Process.byteStringInput input)
$ Process.proc hrepl args
-- Clean up any previous builds (such as when sharing HREPL_OUTPUT_BASE)
bazelClean bazelOpts
-- If the output file already exists, overwrite any previous values.
-- If it doesn't exist, prevent confusing "file not found" errors in case the
-- test fails and doesn't append anything to it.
writeFile output ""
hPrint stderr cp
Process.runProcess_ cp
-- Shut down the async bazel process to prevent test flakiness
-- and zombie bazel processes.
bazelShutdown bazelOpts
readFile output
|
google/hrepl
|
hrepl/tests/ReplTestLib.hs
|
apache-2.0
| 5,754
| 0
| 16
| 1,431
| 822
| 463
| 359
| 78
| 2
|
{-# LANGUAGE CPP #-}
{-# OPTIONS_HADDOCK not-home #-}
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2015 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability : non-portable
--
-----------------------------------------------------------------------------
module Data.Struct.Internal.LinkCut where
import Control.Exception
import Control.Monad
import Control.Monad.Primitive
import Control.Monad.ST
import Data.Struct.Internal
#ifdef HLINT
{-# ANN module "HLint: ignore Reduce duplication" #-}
{-# ANN module "HLint: ignore Redundant do" #-}
#endif
-- | Amortized Link-Cut trees via splay trees based on Tarjan's little book.
--
-- These support O(log n) operations for a lot of stuff.
--
-- The parameter `a` is an arbitrary user-supplied monoid that will be summarized
-- along the path to the root of the tree.
--
-- In this example the choice of 'Monoid' is 'String', so we can get a textual description of the path to the root.
--
-- >>> x <- new "x"
-- >>> y <- new "y"
-- >>> link x y -- now x is a child of y
-- >>> x == y
-- False
-- >>> connected x y
-- True
-- >>> z <- new "z"
-- >>> link z x -- now z is a child of y
-- >>> (y ==) <$> root z
-- True
-- >>> cost z
-- "yxz"
-- >>> w <- new "w"
-- >>> u <- new "u"
-- >>> v <- new "v"
-- >>> link u w
-- >>> link v z
-- >>> link w z
-- >>> cost u
-- "yxzwu"
-- >>> (y ==) <$> root v
-- True
-- >>> connected x v
-- True
-- >>> cut z
--
-- @
-- y
-- x z y
-- z ==> w v x
-- w v u
-- u
-- @
--
-- >>> connected x v
-- False
-- >>> cost u
-- "zwu"
-- >>> (z ==) <$> root v
-- True
newtype LinkCut a s = LinkCut (Object s)
instance Struct (LinkCut a) where
struct _ = Dict
instance Eq (LinkCut a s) where
(==) = eqStruct
path, parent, left, right :: Slot (LinkCut a) (LinkCut a)
path = slot 0
parent = slot 1
left = slot 2
right = slot 3
value, summary :: Field (LinkCut a) a
value = field 4
summary = field 5
-- | O(1). Allocate a new link-cut tree with a given monoidal summary.
new :: (PrimMonad m, Monoid a) => a -> m (LinkCut a (PrimState m))
new a = st $ do
this <- alloc 6
set path this Nil
set parent this Nil
set left this Nil
set right this Nil
setField value this a
setField summary this a
return this
{-# INLINE new #-}
-- | O(log n). @'cut' v@ removes the linkage between @v@ upwards to whatever tree it was in, making @v@ a root node.
--
-- Repeated calls on the same value without intermediate accesses are O(1).
cut :: (PrimMonad m, Monoid a) => LinkCut a (PrimState m) -> m ()
cut this = st $ do
access this
l <- get left this
unless (isNil l) $ do
set left this Nil
set parent l Nil
v <- getField value this
setField summary this v
{-# INLINE cut #-}
-- | O(log n). @'link' v w@ inserts @v@ which must be the root of a tree in as a child of @w@. @v@ and @w@ must not be 'connected'.
link :: (PrimMonad m, Monoid a) => LinkCut a (PrimState m) -> LinkCut a (PrimState m) -> m ()
link v w = st $ do
-- w w<~v
-- a , v => a
--
--
access v
access w
set path v w
{-# INLINE link #-}
-- | O(log n). @'connected' v w@ determines if @v@ and @w@ inhabit the same tree.
connected :: (PrimMonad m, Monoid a) => LinkCut a (PrimState m) -> LinkCut a (PrimState m) -> m Bool
connected v w = st $ (==) <$> root v <*> root w
{-# INLINE connected #-}
-- | O(log n). @'cost' v@ computes the root-to-leaf path cost of @v@ under whatever 'Monoid' was built into the tree.
--
-- Repeated calls on the same value without intermediate accesses are O(1).
cost :: (PrimMonad m, Monoid a) => LinkCut a (PrimState m) -> m a
cost v = st $ do
access v
getField summary v
{-# INLINE cost #-}
-- | O(log n). Find the root of a tree.
--
-- Repeated calls on the same value without intermediate accesses are O(1).
root :: (PrimMonad m, Monoid a) => LinkCut a (PrimState m) -> m (LinkCut a (PrimState m))
root this = st $ do
access this
r <- leftmost this
splay r -- r is already in the root aux tree
return r
where
leftmost v = do
l <- get left v
if isNil l then return v
else leftmost l
{-# INLINE root #-}
-- | O(log n). Move upward one level.
--
-- This will return 'Nil' if the parent is not available.
--
-- Note: Repeated calls on the same value without intermediate accesses are O(1).
up :: (PrimMonad m, Monoid a) => LinkCut a (PrimState m) -> m (LinkCut a (PrimState m))
up this = st $ do
access this
a <- get left this
if isNil a then return Nil
else do
p <- rightmost a
splay p -- p is already in the root aux tree
return p
where
rightmost v = do
p <- get right v
if isNil p then return v
else rightmost p
{-# INLINE up #-}
-- | O(1)
summarize :: Monoid a => LinkCut a s -> ST s a
summarize this
| isNil this = return mempty
| otherwise = getField summary this
{-# INLINE summarize #-}
-- | O(log n)
access :: Monoid a => LinkCut a s -> ST s ()
access this = do
when (isNil this) $ throw NullPointerException
splay this
-- the right hand child is no longer preferred
r <- get right this
unless (isNil r) $ do
set right this Nil
set parent r Nil
set path r this
-- resummarize
l <- get left this
sl <- summarize l
v <- getField value this
setField summary this (sl `mappend` v)
go this
splay this
where
go v = do
w <- get path v
unless (isNil w) $ do
splay w
-- w v w
-- a b c d ==> a v, b.path = w
-- c d
b <- get right w
unless (isNil b) $ do -- b is no longer on the preferred path
set path b w
set parent b Nil
a <- get left w
sa <- summarize a
vw <- getField value w
sv <- getField summary v
set parent v w
set right w v
setField summary w (sa `mappend` vw `mappend` sv)
go w
-- | O(log n). Splay within an auxiliary tree
splay :: Monoid a => LinkCut a s -> ST s ()
splay x = do
p <- get parent x
unless (isNil p) $ do
g <- get parent p
pl <- get left p
if isNil g then do -- zig step
set parent p x
set parent x Nil
pp <- get path p
set path x pp
set path p Nil
sp <- getField summary p
setField summary x sp
if pl == x then do
-- p x
-- x d ==> b p
-- b c c d
c <- get right x
d <- get right p
unless (isNil c) $ set parent c p
set right x p
set left p c
sc <- summarize c
sd <- summarize d
vp <- getField value p
setField summary p (sc `mappend` vp `mappend` sd)
else do
-- p x
-- a x ==> p c
-- b c a b
b <- get left x
unless (isNil b) $ set parent b p
let a = pl
set left x p
set right p b
sa <- summarize a
sb <- summarize b
vp <- getField value p
setField summary p (sa `mappend` vp `mappend` sb)
else do -- zig-zig or zig-zag
gg <- get parent g
gl <- get left g
sg <- getField summary g
setField summary x sg
set parent x gg
gp <- get path g
set path x gp
set path g Nil
if gl == p then do
if pl == x then do -- zig-zig
-- g x
-- p d a p
-- x c ==> b g
-- a b c d
b <- get right x
c <- get right p
d <- get right g
set parent p x
set parent g p
unless (isNil b) $ set parent b p
unless (isNil c) $ set parent c g
set right x p
set right p g
set left p b
set left g c
sb <- summarize b
vp <- getField value p
sc <- summarize c
vg <- getField value g
sd <- summarize d
let sg' = sc `mappend` vg `mappend` sd
setField summary g sg'
setField summary p (sb `mappend` vp `mappend` sg')
else do -- zig-zag
-- g x
-- p d ==> p g
-- a x a b c d
-- b c
let a = pl
b <- get left x
c <- get right x
d <- get right g
set parent p x
set parent g x
unless (isNil b) $ set parent b p
unless (isNil c) $ set parent c g
set left x p
set right x g
set right p b
set left g c
sa <- summarize a
vp <- getField value p
sb <- summarize b
setField summary p (sa `mappend` vp `mappend` sb)
sc <- summarize c
vg <- getField value g
sd <- summarize d
setField summary g (sc `mappend` vg `mappend` sd)
else if pl == x then do -- zig-zag
-- g x
-- a p g p
-- x d ==> a b c d
-- b c
let a = gl
b <- get left x
c <- get right x
d <- get right p
set parent g x
set parent p x
unless (isNil b) $ set parent b g
unless (isNil c) $ set parent c p
set left x g
set right x p
set right g b
set left p c
sa <- summarize a
vg <- getField value g
sb <- summarize b
setField summary g (sa `mappend` vg `mappend` sb)
sc <- summarize c
vp <- getField value p
sd <- summarize d
setField summary p (sc `mappend` vp `mappend` sd)
else do -- zig-zig
-- g x
-- a p p d
-- b x ==> g c
-- c d a b
let a = gl
let b = pl
c <- get left x
unless (isNil b) $ set parent b g
unless (isNil c) $ set parent c p
set parent p x
set parent g p
set left x p
set left p g
set right g b
set right p c
sa <- summarize a
vg <- getField value g
sb <- summarize b
vp <- getField value p
sc <- summarize c
let sg' = sa `mappend` vg `mappend` sb
setField summary g sg'
setField summary p (sg' `mappend` vp `mappend` sc)
unless (isNil gg) $ do
ggl <- get left gg
-- NB: this replacement leaves the summary intact
if ggl == g then set left gg x
else set right gg x
splay x
|
bitemyapp/structs
|
src/Data/Struct/Internal/LinkCut.hs
|
bsd-2-clause
| 10,680
| 12
| 15
| 3,913
| 3,151
| 1,526
| 1,625
| 248
| 7
|
module Paths_Hermes (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName
) where
import Data.Version (Version(..))
import System.Environment (getEnv)
version :: Version
version = Version {versionBranch = [0,0,0], versionTags = []}
bindir, libdir, datadir, libexecdir :: FilePath
bindir = "/home/kostas/.cabal/bin"
libdir = "/home/kostas/.cabal/lib/Hermes-0.0.0/ghc-7.0.4"
datadir = "/home/kostas/.cabal/share/Hermes-0.0.0"
libexecdir = "/home/kostas/.cabal/libexec"
getBinDir, getLibDir, getDataDir, getLibexecDir :: IO FilePath
getBinDir = catch (getEnv "Hermes_bindir") (\_ -> return bindir)
getLibDir = catch (getEnv "Hermes_libdir") (\_ -> return libdir)
getDataDir = catch (getEnv "Hermes_datadir") (\_ -> return datadir)
getLibexecDir = catch (getEnv "Hermes_libexecdir") (\_ -> return libexecdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
|
kostas1/Hermes
|
dist/build/autogen/Paths_Hermes.hs
|
bsd-2-clause
| 977
| 0
| 10
| 144
| 280
| 161
| 119
| 22
| 1
|
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module Life.Engine.QTree where
import Data.QuadTree
import Data.Boolean (xor)
import Life.Types
type Board = LifeBoard Config (QuadTree Bool)
neighbs :: Config -> Pos -> [Pos]
neighbs ((w,h),warp) (x,y) = if warp
then map (\(x,y) -> (x `mod` w, y `mod` h)) neighbors
else filter (\(x,y) -> (x >= 0 && x < w) && (y >= 0 && y < h)) neighbors
where neighbors = [(x-1,y-1), (x,y-1), (x+1,y-1), (x-1,y), (x+1,y), (x-1,y+1), (x,y+1), (x+1,y+1)]
isAlive :: Board -> Pos -> Bool
isAlive b p = getLocation p $ board b
isEmpty :: Board -> Pos -> Bool
isEmpty b = not . (isAlive b)
liveneighbs :: Board -> Pos -> Int
liveneighbs b = length . filter (isAlive b) . (neighbs (config b))
indices :: Size -> [Pos]
indices (w,h) = [ (x,y) | x<-[0..w-1], y<-[0..h-1] ]
survivors :: Board -> Board
survivors b = LifeBoard cb $ foldr (\p qt -> setLocation p qt (isAlive b p && elem (liveneighbs b p) [2,3])) (makeTree sz False) $ indices sz
where
cb = config b
sz = fst cb
births :: Board -> Board
births b = LifeBoard cb $ foldr (\p qt -> setLocation p qt (isEmpty b p && liveneighbs b p == 3))
(makeTree sz False)
$ indices sz
where
cb = config b
sz = fst cb
instance Life Board where
empty c = LifeBoard c $ makeTree (fst c) False
alive b = [ p | p <- indices (treeDimensions bb), getLocation p bb ]
where bb = board b
inv p b = LifeBoard (config b) $ setLocation p bb $ not $ getLocation p bb
where bb = board b
next b = LifeBoard cb $ foldr (\p qt -> setLocation p qt (getLocation p (board (survivors b)) || getLocation p (board (births b)))) (makeTree sz False) $ indices sz
where
cb = config b
sz = fst cb
|
ku-fpg/better-life
|
Life/Engine/QTree.hs
|
bsd-2-clause
| 1,704
| 10
| 18
| 373
| 959
| 507
| 452
| 38
| 2
|
-- |
-- Module : Crypto.Hash.Algorithms
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : unknown
--
-- Definitions of known hash algorithms
--
module Crypto.Hash.Algorithms
( HashAlgorithm
-- * hash algorithms
, MD2(..)
, MD4(..)
, MD5(..)
, SHA1(..)
, SHA224(..)
, SHA256(..)
, SHA384(..)
, SHA512(..)
, SHA512t_224(..)
, SHA512t_256(..)
, RIPEMD160(..)
, Tiger(..)
, Kekkak_224(..)
, Kekkak_256(..)
, Kekkak_384(..)
, Kekkak_512(..)
, SHA3_224(..)
, SHA3_256(..)
, SHA3_384(..)
, SHA3_512(..)
, Skein256_224(..)
, Skein256_256(..)
, Skein512_224(..)
, Skein512_256(..)
, Skein512_384(..)
, Skein512_512(..)
, Whirlpool(..)
) where
import Crypto.Hash.Types (HashAlgorithm)
import Crypto.Hash.MD2
import Crypto.Hash.MD4
import Crypto.Hash.MD5
import Crypto.Hash.SHA1
import Crypto.Hash.SHA224
import Crypto.Hash.SHA256
import Crypto.Hash.SHA384
import Crypto.Hash.SHA512
import Crypto.Hash.SHA512t
import Crypto.Hash.SHA3
import Crypto.Hash.Kekkak
import Crypto.Hash.RIPEMD160
import Crypto.Hash.Tiger
import Crypto.Hash.Skein256
import Crypto.Hash.Skein512
import Crypto.Hash.Whirlpool
|
nomeata/cryptonite
|
Crypto/Hash/Algorithms.hs
|
bsd-3-clause
| 1,461
| 0
| 5
| 434
| 318
| 224
| 94
| 46
| 0
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
module Snap.Chat.ChatRoom.Tests (tests) where
------------------------------------------------------------------------------
import Control.Concurrent
import qualified Data.ByteString.Char8 as S
import System.PosixCompat.Time
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit hiding (Test)
------------------------------------------------------------------------------
import Snap.Chat.ChatRoom
import Snap.Chat.Internal.Util
import Snap.Chat.Test.Common
import Snap.Chat.Internal.Types
------------------------------------------------------------------------------
tests :: [Test]
tests = [ testConnectAndLookup
, testUserTimeout
, testConnectTwice
, testAuthenticate
]
------------------------------------------------------------------------------
testConnectAndLookup :: Test
testConnectAndLookup = testCase "chatroom/connectAndLookup" $
withChatRoom 10 proc
where
------------------------------------------------------------------------
userName = "cufp2011"
userName2 = "secondUser"
------------------------------------------------------------------------
isJoin :: MessageContents -> Bool
isJoin Join = True
isJoin _ = False
------------------------------------------------------------------------
isLeave :: MessageContents -> Bool
isLeave (Leave x) = x `seq` True
isLeave _ = False
------------------------------------------------------------------------
proc chatRoom = do
now <- epochTime
user <- joinUser userName chatRoom
lookupUser userName chatRoom >>=
maybe (assertBool "user not found" False)
(\u2 -> do
assertEqual "usernames don't match"
userName
(getUserName u2))
-- only message on the channel should be the join message
msgs <- getMessages 1 user chatRoom
assertEqual "only one message on channel" 1 $ length msgs
let msg = head msgs
assertEqual "message user doesn't match"
userName
(getMessageUserName msg)
let timeDelta = getMessageTime msg - now
assertBool "message time mismatch" (abs timeDelta <= 2)
assertBool "message is a join" $ isJoin $ getMessageContents msg
user2 <- joinUser userName2 chatRoom
disconnectUser userName "goodbye" chatRoom
lookupUser userName chatRoom >>=
maybe (return ())
(\_ -> assertBool "user should be gone" False)
msgs2 <- getMessages 1 user2 chatRoom
assertEqual "two messages on channel" 2 $ length msgs2
let [joinMsg, leaveMsg] = msgs2
assertEqual "message user doesn't match"
userName2
(getMessageUserName joinMsg)
assertBool "message is a join" $ isJoin $ getMessageContents joinMsg
assertEqual "message user doesn't match"
userName
(getMessageUserName leaveMsg)
assertBool "message is a leave" $ isLeave $ getMessageContents leaveMsg
------------------------------------------------------------------------------
testConnectTwice :: Test
testConnectTwice = testCase "chatroom/connectTwice" $
withChatRoom 10 proc
where
------------------------------------------------------------------------
userName = "cufp2011"
------------------------------------------------------------------------
proc chatRoom = do
_ <- joinUser userName chatRoom
expectExceptionH $ joinUser userName chatRoom
------------------------------------------------------------------------------
testUserTimeout :: Test
testUserTimeout = testCase "chatroom/userTimeout" $
withChatRoom 1 proc
where
------------------------------------------------------------------------
userName = "cufp2011"
------------------------------------------------------------------------
proc chatRoom = do
_ <- joinUser userName chatRoom
threadDelay $ seconds 3
lookupUser userName chatRoom >>=
maybe (return ())
(\_ -> assertBool "user didn't timeout" False)
------------------------------------------------------------------------------
testAuthenticate :: Test
testAuthenticate = testCase "chatroom/authenticate" $
withChatRoom 10 proc
where
------------------------------------------------------------------------
userName = "cufp2011"
userName2 = "junk"
------------------------------------------------------------------------
proc chatRoom = do
user <- joinUser userName chatRoom
let oldToken = getUserToken user
let (UserToken oldTokenBS) = oldToken
let newToken = UserToken $ S.drop 1 oldTokenBS
expectExceptionH $ authenticateUser userName newToken chatRoom
expectExceptionH $ authenticateUser userName2 oldToken chatRoom
user' <- authenticateUser userName oldToken chatRoom
-- expect the token to not have changed.
assertBool "token didn't change" $ getUserToken user' == oldToken
|
snapframework/cufp2011
|
test/suite/Snap/Chat/ChatRoom/Tests.hs
|
bsd-3-clause
| 5,482
| 0
| 17
| 1,433
| 899
| 449
| 450
| 94
| 3
|
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.FloatSnap
-- Copyright : (c) 2009 Anders Engstrom <ankaan@gmail.com>
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Anders Engstrom <ankaan@gmail.com>
-- Stability : unstable
-- Portability : unportable
--
-- Move and resize floating windows using other windows and the edge of the
-- screen as guidelines.
-----------------------------------------------------------------------------
module XMonad.Actions.FloatSnap (
-- * Usage
-- $usage
Direction2D(..),
snapMove,
snapGrow,
snapShrink,
snapMagicMove,
snapMagicResize,
snapMagicMouseResize,
afterDrag,
ifClick,
ifClick') where
import XMonad
import Control.Applicative((<$>))
import Data.List (sort)
import Data.Maybe (listToMaybe,fromJust,isNothing)
import qualified XMonad.StackSet as W
import qualified Data.Set as S
import XMonad.Hooks.ManageDocks (calcGapForAll)
import XMonad.Util.Types (Direction2D(..))
import XMonad.Actions.AfterDrag
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Actions.FloatSnap
--
-- Then add appropriate key bindings, for example:
--
-- > , ((modm, xK_Left), withFocused $ snapMove L Nothing)
-- > , ((modm, xK_Right), withFocused $ snapMove R Nothing)
-- > , ((modm, xK_Up), withFocused $ snapMove U Nothing)
-- > , ((modm, xK_Down), withFocused $ snapMove D Nothing)
-- > , ((modm .|. shiftMask, xK_Left), withFocused $ snapShrink R Nothing)
-- > , ((modm .|. shiftMask, xK_Right), withFocused $ snapGrow R Nothing)
-- > , ((modm .|. shiftMask, xK_Up), withFocused $ snapShrink D Nothing)
-- > , ((modm .|. shiftMask, xK_Down), withFocused $ snapGrow D Nothing)
--
-- For detailed instructions on editing your key bindings, see
-- "XMonad.Doc.Extending#Editing_key_bindings".
--
-- And possibly add appropriate mouse bindings, for example:
--
-- > , ((modm, button1), (\w -> focus w >> mouseMoveWindow w >> ifClick (snapMagicMove (Just 50) (Just 50) w)))
-- > , ((modm .|. shiftMask, button1), (\w -> focus w >> mouseMoveWindow w >> ifClick (snapMagicResize [L,R,U,D] (Just 50) (Just 50) w)))
-- > , ((modm, button3), (\w -> focus w >> mouseResizeWindow w >> ifClick (snapMagicResize [R,D] (Just 50) (Just 50) w)))
--
-- For detailed instructions on editing your mouse bindings, see
-- "XMonad.Doc.Extending#Editing_mouse_bindings".
--
-- Using these mouse bindings, it will not snap while moving, but allow you to click the window once after it has been moved or resized to snap it into place.
-- Note that the order in which the commands are applied in the mouse bindings are important. Snapping can also be used together with other window resizing
-- functions, such as those from "XMonad.Actions.FlexibleResize"
--
-- An alternative set of mouse bindings that will always snap after the drag is:
--
-- > , ((modm, button1), (\w -> focus w >> mouseMoveWindow w >> afterDrag (snapMagicMove (Just 50) (Just 50) w)))
-- > , ((modm .|. shiftMask, button1), (\w -> focus w >> mouseMoveWindow w >> afterDrag (snapMagicResize [L,R,U,D] (Just 50) (Just 50) w)))
-- > , ((modm, button3), (\w -> focus w >> mouseResizeWindow w >> afterDrag (snapMagicResize [R,D] (Just 50) (Just 50) w)))
--
-- Interesting values for the distance to look for window in the orthogonal axis are Nothing (to snap against every window), Just 0 (to only snap
-- against windows that we should collide with geometrically while moving) and Just 1 (to also snap against windows we brush against).
--
-- For 'snapMagicMove', 'snapMagicResize' and 'snapMagicMouseResize', try instead setting it to the same as the maximum snapping distance.
--
-- When a value is specified it can be geometrically conceived as adding a border with the specified width around the window and then checking which
-- windows it should collide with.
-- | Resize the window by each edge independently to snap against the closest part of other windows or the edge of the screen. Use the location of the
-- mouse over the window to decide which edges to snap. In corners, the two adjoining edges will be snapped, along the middle of an edge only that edge
-- will be snapped. In the center of the window all edges will snap. Intended to be used together with "XMonad.Actions.FlexibleResize" or
-- "XMonad.Actions.FlexibleManipulate".
snapMagicMouseResize
:: Rational -- ^ How big the middle snap area of each axis should be.
-> Maybe Int -- ^ The distance in the orthogonal axis to look for windows to snap against. Use Nothing to snap against every window.
-> Maybe Int -- ^ The maximum distance to snap. Use Nothing to not impose any boundary.
-> Window -- ^ The window to move and resize.
-> X ()
snapMagicMouseResize middle collidedist snapdist w = whenX (isClient w) $ withDisplay $ \d -> do
wa <- io $ getWindowAttributes d w
(_, _, _, px, py, _, _, _) <- io $ queryPointer d w
let x = (fromIntegral px - wx wa)/(ww wa)
y = (fromIntegral py - wy wa)/(wh wa)
ml = if x <= (0.5 - middle/2) then [L] else []
mr = if x > (0.5 + middle/2) then [R] else []
mu = if y <= (0.5 - middle/2) then [U] else []
md = if y > (0.5 + middle/2) then [D] else []
mdir = ml++mr++mu++md
dir = if mdir == []
then [L,R,U,D]
else mdir
snapMagicResize dir collidedist snapdist w
where
wx = fromIntegral.wa_x
wy = fromIntegral.wa_y
ww = fromIntegral.wa_width
wh = fromIntegral.wa_height
-- | Resize the window by each edge independently to snap against the closest part of other windows or the edge of the screen.
snapMagicResize
:: [Direction2D] -- ^ The edges to snap.
-> Maybe Int -- ^ The distance in the orthogonal axis to look for windows to snap against. Use Nothing to snap against every window.
-> Maybe Int -- ^ The maximum distance to snap. Use Nothing to not impose any boundary.
-> Window -- ^ The window to move and resize.
-> X ()
snapMagicResize dir collidedist snapdist w = whenX (isClient w) $ withDisplay $ \d -> do
io $ raiseWindow d w
wa <- io $ getWindowAttributes d w
(xbegin,xend) <- handleAxis True d wa
(ybegin,yend) <- handleAxis False d wa
let xbegin' = if L `elem` dir then xbegin else (wx wa)
xend' = if R `elem` dir then xend else (wx wa + ww wa)
ybegin' = if U `elem` dir then ybegin else (wy wa)
yend' = if D `elem` dir then yend else (wy wa + wh wa)
io $ moveWindow d w (fromIntegral $ xbegin') (fromIntegral $ ybegin')
io $ resizeWindow d w (fromIntegral $ xend' - xbegin') (fromIntegral $ yend' - ybegin')
float w
where
wx = fromIntegral.wa_x
wy = fromIntegral.wa_y
ww = fromIntegral.wa_width
wh = fromIntegral.wa_height
handleAxis horiz d wa = do
((mbl,mbr,bs),(mfl,mfr,fs)) <- getSnap horiz collidedist d w
let begin = if bs
then wpos wa
else case (mbl,mbr) of
(Just bl,Just br) -> if wpos wa - bl < br - wpos wa then bl else br
(Just bl,Nothing) -> bl
(Nothing,Just br) -> br
(Nothing,Nothing) -> wpos wa
end = if fs
then wpos wa + wdim wa
else case (if mfl==(Just begin) then Nothing else mfl,mfr) of
(Just fl,Just fr) -> if wpos wa + wdim wa - fl < fr - wpos wa - wdim wa then fl else fr
(Just fl,Nothing) -> fl
(Nothing,Just fr) -> fr
(Nothing,Nothing) -> wpos wa + wdim wa
begin' = if isNothing snapdist || abs (begin - wpos wa) <= fromJust snapdist then begin else (wpos wa)
end' = if isNothing snapdist || abs (end - wpos wa - wdim wa) <= fromJust snapdist then end else (wpos wa + wdim wa)
return (begin',end')
where
(wpos, wdim, _, _) = constructors horiz
-- | Move a window by both axises in any direction to snap against the closest part of other windows or the edge of the screen.
snapMagicMove
:: Maybe Int -- ^ The distance in the orthogonal axis to look for windows to snap against. Use Nothing to snap against every window.
-> Maybe Int -- ^ The maximum distance to snap. Use Nothing to not impose any boundary.
-> Window -- ^ The window to move.
-> X ()
snapMagicMove collidedist snapdist w = whenX (isClient w) $ withDisplay $ \d -> do
io $ raiseWindow d w
wa <- io $ getWindowAttributes d w
nx <- handleAxis True d wa
ny <- handleAxis False d wa
io $ moveWindow d w (fromIntegral nx) (fromIntegral ny)
float w
where
handleAxis horiz d wa = do
((mbl,mbr,bs),(mfl,mfr,fs)) <- getSnap horiz collidedist d w
return $ if bs || fs
then wpos wa
else let b = case (mbl,mbr) of
(Just bl,Just br) -> if wpos wa - bl < br - wpos wa then bl else br
(Just bl,Nothing) -> bl
(Nothing,Just br) -> br
(Nothing,Nothing) -> wpos wa
f = case (mfl,mfr) of
(Just fl,Just fr) -> if wpos wa + wdim wa - fl < fr - wpos wa - wdim wa then fl else fr
(Just fl,Nothing) -> fl
(Nothing,Just fr) -> fr
(Nothing,Nothing) -> wpos wa
newpos = if abs (b - wpos wa) <= abs (f - wpos wa - wdim wa) then b else (f - wdim wa)
in if isNothing snapdist || abs (newpos - wpos wa) <= fromJust snapdist then newpos else (wpos wa)
where
(wpos, wdim, _, _) = constructors horiz
-- | Move a window in the specified direction until it snaps against another window or the edge of the screen.
snapMove
:: Direction2D -- ^ What direction to move the window in.
-> Maybe Int -- ^ The distance in the orthogonal axis to look for windows to snap against. Use Nothing to snap against every window.
-> Window -- ^ The window to move.
-> X ()
snapMove L = doSnapMove True True
snapMove R = doSnapMove True False
snapMove U = doSnapMove False True
snapMove D = doSnapMove False False
doSnapMove :: Bool -> Bool -> Maybe Int -> Window -> X ()
doSnapMove horiz rev collidedist w = whenX (isClient w) $ withDisplay $ \d -> do
io $ raiseWindow d w
wa <- io $ getWindowAttributes d w
((bl,br,_),(fl,fr,_)) <- getSnap horiz collidedist d w
let (mb,mf) = if rev then (bl,fl)
else (br,fr)
newpos = fromIntegral $ case (mb,mf) of
(Just b,Nothing) -> b
(Nothing,Just f) -> f - wdim wa
(Just b,Just f) -> if rev /= (b < f - wdim wa)
then b
else f - wdim wa
_ -> wpos wa
if horiz then io $ moveWindow d w newpos (fromIntegral $ wa_y wa)
else io $ moveWindow d w (fromIntegral $ wa_x wa) newpos
float w
where
(wpos, wdim, _, _) = constructors horiz
-- | Grow the specified edge of a window until it snaps against another window or the edge of the screen.
snapGrow
:: Direction2D -- ^ What edge of the window to grow.
-> Maybe Int -- ^ The distance in the orthogonal axis to look for windows to snap against. Use Nothing to snap against every window.
-> Window -- ^ The window to grow.
-> X ()
snapGrow = snapResize True
-- | Shrink the specified edge of a window until it snaps against another window or the edge of the screen.
snapShrink
:: Direction2D -- ^ What edge of the window to shrink.
-> Maybe Int -- ^ The distance in the orthogonal axis to look for windows to snap against. Use Nothing to snap against every window.
-> Window -- ^ The window to shrink.
-> X ()
snapShrink = snapResize False
snapResize :: Bool -> Direction2D -> Maybe Int -> Window -> X ()
snapResize grow dir collidedist w = whenX (isClient w) $ withDisplay $ \d -> do
io $ raiseWindow d w
wa <- io $ getWindowAttributes d w
mr <- case dir of
L -> do ((mg,ms,_),(_,_,_)) <- getSnap True collidedist d w
return $ case (if grow then mg else ms) of
Just v -> Just (v, wy wa, ww wa + wx wa - v, wh wa)
_ -> Nothing
R -> do ((_,_,_),(ms,mg,_)) <- getSnap True collidedist d w
return $ case (if grow then mg else ms) of
Just v -> Just (wx wa, wy wa, v - wx wa, wh wa)
_ -> Nothing
U -> do ((mg,ms,_),(_,_,_)) <- getSnap False collidedist d w
return $ case (if grow then mg else ms) of
Just v -> Just (wx wa, v, ww wa, wh wa + wy wa - v)
_ -> Nothing
D -> do ((_,_,_),(ms,mg,_)) <- getSnap False collidedist d w
return $ case (if grow then mg else ms) of
Just v -> Just (wx wa, wy wa, ww wa, v - wy wa)
_ -> Nothing
case mr of
Nothing -> return ()
Just (nx,ny,nw,nh) -> if nw>0 && nh>0 then do io $ moveWindow d w (fromIntegral nx) (fromIntegral ny)
io $ resizeWindow d w (fromIntegral nw) (fromIntegral nh)
else return ()
float w
where
wx = fromIntegral.wa_x
wy = fromIntegral.wa_y
ww = fromIntegral.wa_width
wh = fromIntegral.wa_height
getSnap :: Bool -> Maybe Int -> Display -> Window -> X ((Maybe Int,Maybe Int,Bool),(Maybe Int,Maybe Int,Bool))
getSnap horiz collidedist d w = do
wa <- io $ getWindowAttributes d w
screen <- W.current <$> gets windowset
let sr = screenRect $ W.screenDetail screen
wl = W.integrate' . W.stack $ W.workspace screen
gr <- fmap ($sr) $ calcGapForAll $ S.fromList [minBound .. maxBound]
wla <- filter (collides wa) `fmap` (io $ mapM (getWindowAttributes d) $ filter (/=w) wl)
return ( neighbours (back wa sr gr wla) (wpos wa)
, neighbours (front wa sr gr wla) (wpos wa + wdim wa)
)
where
wborder = fromIntegral.wa_border_width
(wpos, wdim, rpos, rdim) = constructors horiz
(refwpos, refwdim, _, _) = constructors $ not horiz
back wa sr gr wla = dropWhile (< rpos sr) $
takeWhile (< rpos sr + rdim sr) $
sort $ (rpos sr):(rpos gr):(rpos gr + rdim gr):
foldr (\a as -> (wpos a):(wpos a + wdim a + wborder a + wborder wa):as) [] wla
front wa sr gr wla = dropWhile (<= rpos sr) $
takeWhile (<= rpos sr + rdim sr) $
sort $ (rpos gr - 2*wborder wa):(rpos gr + rdim gr - 2*wborder wa):(rpos sr + rdim sr - 2*wborder wa):
foldr (\a as -> (wpos a - wborder a - wborder wa):(wpos a + wdim a):as) [] wla
neighbours l v = ( listToMaybe $ reverse $ takeWhile (< v) l
, listToMaybe $ dropWhile (<= v) l
, v `elem` l
)
collides wa oa = case collidedist of
Nothing -> True
Just dist -> ( refwpos oa - wborder oa < refwpos wa + refwdim wa + wborder wa + dist
&& refwpos wa - wborder wa - dist < refwpos oa + refwdim oa + wborder oa )
constructors :: Bool -> (WindowAttributes -> Int, WindowAttributes -> Int, Rectangle -> Int, Rectangle -> Int)
constructors True = ( fromIntegral.wa_x
, fromIntegral.wa_width
, fromIntegral.rect_x
, fromIntegral.rect_width
)
constructors False = ( fromIntegral.wa_y
, fromIntegral.wa_height
, fromIntegral.rect_y
, fromIntegral.rect_height
)
|
CaptainPatate/xmonad-contrib
|
XMonad/Actions/FloatSnap.hs
|
bsd-3-clause
| 17,084
| 0
| 24
| 5,825
| 4,226
| 2,221
| 2,005
| 222
| 18
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Web.RTBBidder.Types.Request.Segment (Segment(..)) where
import qualified Data.Aeson as AESON
import Data.Aeson ((.=), (.:), (.:?), (.!=))
import qualified Data.Text as TX
data Segment = Segment
{ segId :: Maybe TX.Text
, segName :: Maybe TX.Text
, segValue :: Maybe TX.Text
, segExt :: Maybe AESON.Value
} deriving (Show, Eq)
instance AESON.FromJSON Segment where
parseJSON = AESON.withObject "segment" $ \o -> do
segId <- o .:? "id"
segName <- o .:? "name"
segValue <- o .:? "value"
segExt <- o .:? "ext"
return Segment{..}
instance AESON.ToJSON Segment where
toJSON Segment{..} = AESON.object
[ "id" .= segId
, "name" .= segName
, "value" .= segValue
, "ext" .= segExt
]
|
hiratara/hs-rtb-bidder
|
src/Web/RTBBidder/Types/Request/Segment.hs
|
bsd-3-clause
| 807
| 0
| 11
| 174
| 268
| 152
| 116
| 25
| 0
|
-- Keyed scheduler, exists to ensure that no more than one task is
-- active per 'key' at a time. In this context, a key is likely a
-- filepath or directory path. Wraps another scheduler.
module KeyedSched
( newKeyedSched
) where
import Control.Monad (join)
import Control.Exception (finally, assert)
import Data.IORef
import qualified Data.Map as M
import qualified Data.Sequence as S
data KSched k = KS !(IORef (WorkMap k)) !Sched
type Sched = Work -> IO ()
type WorkMap k = M.Map k (S.Seq Work)
type Work = IO ()
newKeyedSched :: (Ord k) => (Work -> IO ()) -> IO (k -> Work -> IO ())
newKeyedSched sched =
newIORef M.empty >>= \ rf ->
let ks = KS rf sched in
return (addKeyedWork ks)
addKeyedWork :: (Ord k) => KSched k -> k -> Work -> IO ()
addKeyedWork ks@(KS rf _) k w = join $ atomicModifyIORef rf addw where
addw m0 = addw' m0 (M.lookup k m0)
addw' m0 Nothing = (M.insert k S.empty m0, initKeyedWork ks k w)
addw' m0 (Just ws) = (M.insert k (ws S.|> w) m0, return ())
returnKey :: (Ord k) => KSched k -> k -> IO ()
returnKey ks@(KS rf _) k = join $ atomicModifyIORef rf rel where
rel m0 = rel' m0 (M.lookup k m0)
rel' m0 Nothing = assert False $ (m0,return ()) -- illegal state
rel' m0 (Just ops) = case S.viewl ops of
S.EmptyL -> (M.delete k m0, return ()) -- key released
(w S.:< ws) -> (M.insert k ws m0, initKeyedWork ks k w)
initKeyedWork :: (Ord k) => KSched k -> k -> Work -> IO ()
initKeyedWork ks@(KS _ sched) k w = sched (w `finally` returnKey ks k)
|
dmbarbour/awelon
|
hsrc_util/KeyedSched.hs
|
bsd-3-clause
| 1,546
| 0
| 12
| 364
| 673
| 349
| 324
| 34
| 3
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.PhoneNumber.Corpus
( corpus
, negativeCorpus
) where
import Prelude
import Data.String
import Duckling.PhoneNumber.Types
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext, testOptions, allExamples)
negativeCorpus :: NegativeCorpus
negativeCorpus = (testContext, testOptions, examples)
where
examples =
[ "12345"
, "1234567890123456777777"
, "12345678901234567"
]
allExamples :: [Example]
allExamples = concat
[ examples (PhoneNumberValue "6507018887")
[ "650-701-8887"
]
, examples (PhoneNumberValue "(+1) 6507018887")
[ "(+1)650-701-8887"
, "(+1) 650 - 701 8887"
, "(+1) 650-701-8887"
, "+1 6507018887"
]
, examples (PhoneNumberValue "(+33) 146647998")
[ "+33 1 46647998"
]
, examples (PhoneNumberValue "0620702220")
[ "06 2070 2220"
]
, examples (PhoneNumberValue "6507018887 ext 897")
[ "(650)-701-8887 ext 897"
]
, examples (PhoneNumberValue "(+1) 2025550121")
[ "+1-202-555-0121"
, "+1 202.555.0121"
]
, examples (PhoneNumberValue "4866827")
[ "4.8.6.6.8.2.7"
]
, examples (PhoneNumberValue "06354640807")
[ "06354640807"
]
, examples (PhoneNumberValue "18998078030")
[ "18998078030"
]
, examples (PhoneNumberValue "61992852776")
[ "61 - 9 9285-2776"
]
, examples (PhoneNumberValue "19997424919")
[ "(19) 997424919"
]
, examples (PhoneNumberValue "(+55) 19992842606")
[ "+55 19992842606"
]
]
|
facebookincubator/duckling
|
Duckling/PhoneNumber/Corpus.hs
|
bsd-3-clause
| 2,009
| 0
| 9
| 641
| 327
| 188
| 139
| 46
| 1
|
module Utils where
import Data.Word
import Data.List (elemIndex)
tr :: [Char] -> [Char] -> String -> String
tr as bs str = flip map str $
\c -> maybe c (\ix -> bs !! ix) $ elemIndex c as
readBinary :: String -> Word32
readBinary = bin2dec . read
bin2dec :: Word32 -> Word32
bin2dec = convertBase 2 10
{-dec2bin :: Word32 -> Word32-}
{-dec2bin = convertBase 10 2-}
convertBase :: Integral a => a -> a -> a -> a
convertBase fromBase toBase = convertDec 10 toBase . convertDec fromBase 10
where convertDec fb tb n = go n 1
where go 0 _ = 0
go x fac = if lsb `elem` [0..min fb tb - 1]
then addition + go (x `div` tb) (fac*fb)
else error "convertBase - invalid character"
where lsb = x `mod` tb
addition = lsb*fac
|
talw/crisp-compiler
|
src/Utils.hs
|
bsd-3-clause
| 845
| 0
| 13
| 278
| 300
| 160
| 140
| 19
| 3
|
module Control.Concurrent.Timer
( Timer
, TimerIO
, oneShotTimer
, oneShotStart
, oneShotRestart
, repeatedTimer
, repeatedStart
, repeatedRestart
, newTimer
, stopTimer
) where
------------------------------------------------------------------------------
import Control.Applicative
import Control.Concurrent (ThreadId, forkIO, killThread)
import Control.Concurrent.MVar (newMVar, tryTakeMVar, putMVar, modifyMVar_)
import Control.Concurrent.Suspend (Delay, suspend)
import Control.Monad
------------------------------------------------------------------------------
import Control.Concurrent.Timer.Types (Timer(..), TimerImmutable(..))
------------------------------------------------------------------------------
-- | Attempts to start a timer.
-- The started timer will have the given delay and action associated and will be one-shot timer.
--
-- If the timer was already initialized, it the previous timer will be stoped (the thread killed) and the timer will be started anew.
--
-- Returns True if the strat was successful,
-- otherwise (e.g. other thread is attempting to manipulate the timer) returns False.
oneShotStart :: TimerIO
-> IO () -- ^ The action the timer will start with.
-> Delay -- ^ The dealy the timer will start with.
-> IO Bool
oneShotStart (Timer mvmtim) a d = do
mtim <- tryTakeMVar mvmtim
case mtim of
Just (Just (TimerImmutable _ _ tid)) -> do
killThread tid
oneShotTimerImmutable a d >>= putMVar mvmtim . Just
return True
Just (Nothing) -> do
oneShotTimerImmutable a d >>= putMVar mvmtim . Just
return True
Nothing -> return False
{-# INLINEABLE oneShotStart #-}
-- | Attempts to start a timer.
-- The started timer will have the given delay and action associated and will be repeated timer.
--
-- If the timer was already initialized, it the previous timer will be stoped (the thread killed) and the timer will be started anew.
--
-- Returns True if the strat was successful,
-- otherwise (e.g. other thread is attempting to manipulate the timer) returns False.
repeatedStart :: TimerIO
-> IO () -- ^ The action the timer will start with.
-> Delay -- ^ The dealy the timer will start with.
-> IO Bool
repeatedStart (Timer mvmtim) a d = do
mtim <- tryTakeMVar mvmtim
case mtim of
Just (Just (TimerImmutable _ _ tid)) -> do
killThread tid
repeatedTimerImmutable a d >>= putMVar mvmtim . Just
return True
Just (Nothing) -> do
repeatedTimerImmutable a d >>= putMVar mvmtim . Just
return True
Nothing -> return False
{-# INLINEABLE repeatedStart #-}
-- | Attempts to restart already initialized timer.
-- The restarted timer will have the same delay and action associated and will be one-shot timer.
--
-- Returns True if the restrat was successful,
-- otherwise (e.g. other thread is attempting to manipulate the timer or the timer was not initialized) returns False.
oneShotRestart :: TimerIO
-> IO Bool
oneShotRestart (Timer mvmtim) = do
mtim <- tryTakeMVar mvmtim
case mtim of
Just (Just (TimerImmutable a d tid)) -> do
killThread tid
oneShotTimerImmutable a d >>= putMVar mvmtim . Just
return True
_ -> return False
{-# INLINEABLE oneShotRestart #-}
-- | Attempts to restart already initialized timer.
-- The restarted timer will have the same delay and action associated and will be one-shot timer.
--
-- Returns True if the restrat was successful,
-- otherwise (e.g. other thread is attempting to manipulate the timer or the timer was not initialized) returns False.
repeatedRestart :: TimerIO
-> IO Bool
repeatedRestart (Timer mvmtim) = do
mtim <- tryTakeMVar mvmtim
case mtim of
Just (Just (TimerImmutable a d tid)) -> do
killThread tid
repeatedTimerImmutable a d >>= putMVar mvmtim . Just
return True
_ -> return False
{-# INLINEABLE repeatedRestart #-}
-- | Executes the the given action once after the given delay elapsed, no sooner, maybe later.
oneShotTimer :: IO () -- ^ The action to be executed.
-> Delay -- ^ The (minimal) time until the execution in microseconds.
-> IO TimerIO
oneShotTimer a d = Timer <$> (oneShotTimerImmutable a d >>= newMVar . Just)
{-# INLINE oneShotTimer #-}
-- | Executes the the given action repeatedly with at least the given delay between executions.
repeatedTimer :: IO () -- ^ The action to be executed.
-> Delay -- ^ The (minimal) delay between executions.
-> IO TimerIO
repeatedTimer a d = Timer <$> (repeatedTimerImmutable a d >>= newMVar . Just)
{-# INLINE repeatedTimer #-}
-- | This function is blocking. It waits until it can stop the timer
-- (until there is a value in the MVar), then it kills the timer's thread.
--
-- After this action completes, the Timer is not innitialized anymore (the MVar contains Nothing).
stopTimer :: TimerIO
-> IO ()
stopTimer (Timer mvmtim) = modifyMVar_ mvmtim $
maybe (return Nothing)
(\(TimerImmutable _ _ tid) -> killThread tid >> return Nothing)
{-# INLINE stopTimer #-}
-- | Creates a new timer. This does not start the timer.
newTimer :: IO TimerIO
newTimer = Timer <$> newMVar Nothing
{-# INLINE newTimer #-}
------------------------------------------------------------------------------
-- | Utility
type TimerIO = Timer IO
type TimerImmutableIO = TimerImmutable IO
-- | Forks a new thread that runs the supplied action
-- (at least) after the given delay and stores the action,
-- delay and thread id in the immutable TimerImmutable value.
oneShotTimerImmutable :: IO () -- ^ The action to be executed.
-> Delay -- ^ The (minimal) time until the execution in microseconds.
-> IO TimerImmutableIO
oneShotTimerImmutable a d = TimerImmutable a d <$> oneShotAction a d
{-# INLINE oneShotTimerImmutable #-}
-- | Forks a new thread that repeats the supplied action
-- with (at least) the given delay between each execution and stores the action,
-- delay and thread id in the immutable TimerImmutable value.
repeatedTimerImmutable :: IO () -- ^ The action to be executed.
-> Delay -- ^ The (minimal) time until the execution in microseconds.
-> IO TimerImmutableIO
repeatedTimerImmutable a d = TimerImmutable a d <$> repeatedAction a d
{-# INLINE repeatedTimerImmutable #-}
-- | Forks a new thread that runs the supplied action
-- (at least) after the given delay.
oneShotAction :: IO ()
-> Delay
-> IO ThreadId
oneShotAction action delay = fork (suspend delay >> action)
{-# INLINE oneShotAction #-}
-- | Forks a new thread that repeats the supplied action
-- with (at least) the given delay between each execution.
repeatedAction :: IO ()
-> Delay
-> IO ThreadId
repeatedAction action delay = fork (forever $ suspend delay >> action)
{-# INLINE repeatedAction #-}
fork :: IO () -> IO ThreadId
fork = forkIO
{-# INLINE fork #-}
|
uwap/timers
|
src/Control/Concurrent/Timer.hs
|
bsd-3-clause
| 7,354
| 0
| 14
| 1,845
| 1,151
| 594
| 557
| 111
| 3
|
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module JFlow.ReachAC
(
RACFact
, racFlow
, mayReach
, mayReaches
, mayReachingVars
, isAcyclic
)
where
import Jinja.Program
import Jat.Utils.Pretty as PP
import JFlow.Data
import Prelude
import qualified Data.Set as S
import Data.Maybe (fromMaybe)
--import Debug.Trace
data Reaches = Var:~>:Var deriving (Eq,Ord)
type Reaching = S.Set Reaches
instance Pretty Reaches where
pretty (x:~>:y) = pretty x <> text "~>" <> pretty y
instance Show Reaches where
show = show . pretty
reaches :: Var -> Reaching -> [Var]
reaches x = S.fold k []
where k (y:~>:z) = if y == x then (z:) else id
reachable :: Var -> Reaching -> [Var]
reachable x = S.fold k []
where k (z:~>:y) = if y == x then (z:) else id
type Cyclic = S.Set Var
maybeCyclic :: Var -> Cyclic -> Bool
maybeCyclic = S.member
--isAcyclic' :: Var -> Cyclic -> Bool
--isAcyclic' = S.notMember
data RACFact = RACFact Reaching Cyclic deriving (Eq,Ord)
mayReach :: RACFact -> Var -> Var -> Bool
mayReach (RACFact rs _) x y = y `elem` reaches x rs
mayReaches :: RACFact -> Var -> [Var]
mayReaches (RACFact rs _) x = reaches x rs
mayReachingVars :: RACFact -> [(Var,Var)]
mayReachingVars (RACFact rs _) = map (\(x:~>:y) -> (x,y)) $ S.toList rs
isAcyclic :: RACFact -> Var -> Bool
isAcyclic (RACFact _ cs) x = x `S.notMember` cs
instance MayReachQ RACFact where mayReachQ = mayReach
instance MayReachesQ RACFact where mayReachesQ = mayReaches
instance MayReachingVarsQ RACFact where mayReachingVarsQ = mayReachingVars
instance IsAcyclicQ RACFact where isAcyclicQ = isAcyclic
instance Pretty RACFact where
pretty (RACFact rs cs) =
string "RACFact"
PP.<$> list (map pretty (S.elems rs))
PP.<$> list (map (\v -> char '&' <> pretty v) (S.elems cs))
instance Show RACFact where
show = show . pretty
reduce :: RACFact -> RACFact
reduce (RACFact rs cs) = RACFact rs' cs
where rs' = S.filter (\(v1:~>:v2) -> v1 /= v2 || maybeCyclic v1 cs) rs
rename :: (Var -> Var) -> RACFact -> RACFact
rename f (RACFact rs cs) = RACFact (k `S.map` rs) (f `S.map` cs)
where k (x:~>:y) = f x:~>:f y
union :: RACFact -> RACFact -> RACFact
union (RACFact rs1 cs1) (RACFact rs2 cs2) = RACFact (rs1 `S.union` rs2) (cs1 `S.union` cs2)
delete :: Var -> RACFact -> RACFact
delete x (RACFact rs cs) = RACFact (x `rdelete` rs) (x `S.delete` cs)
where rdelete x1 = S.filter (\(y:~>:z) -> x1 /= y && x1 /= z)
filter' :: (Reaches -> Bool) -> (Var -> Bool) -> RACFact -> RACFact
filter' f g (RACFact rs cs) = RACFact (S.filter f rs) (S.filter g cs)
normalize :: (Var -> Var -> Bool) -> (Var -> Bool) -> RACFact -> RACFact
normalize shTypes cyType ac = reduce $ filter' (\(v1:~>:v2) -> shTypes v1 v2) cyType ac
racFlow :: (HasIndexQ w, HasTypeQ w, MayShareQ w, MaySharesWithQ w, MayAliasQ w, Show w) => Flow RACFact w
racFlow = Flow racLattice racTransfer
racLattice :: SemiLattice RACFact
racLattice = SemiLattice racName racBot racJoin
where
racName = "Reachability+Acyclicity"
racBot = RACFact S.empty S.empty
racJoin _ = union
racTransfer :: (HasIndexQ w, HasTypeQ w, MayShareQ w, MaySharesWithQ w, MayAliasQ w, Show w) => Transfer RACFact w
racTransfer = Transfer racTransferf racSetup racProject racExtend
where
normalize' p w = normalize shTypes cyType
where
shTypes x y = areReachingTypes p (hasTypeQ w x) (hasTypeQ w y)
cyType x = not $ isAcyclicType p (hasTypeQ w x)
{-singleField p dn cn fn = and $ single (subClassesOf p dn)-}
{-where single cns = [ False | (fn',cn',ty') <- concatMap (hasFields p) cns, (fn' /= fn || cn' /= cn) && not (isAcyclicType' p ty')]-}
{-singleField p-}
x `to` y = \z -> if z == x then y else z
assign x y rac = rac' `union` rename (y `to` x) rac'
where rac' = x `delete` rac
racTransferf p _ ins (w',w) rac =
let (i,j) = hasIndexQ w in racTransferf' p ins (w',w) rac i j
racTransferf' p ins (w',w) rac@(RACFact rs cs) i j = case ins of
Load n ->
if isPrimitiveType $ hasTypeQ w' (LocVar i n)
then rac
else (StkVar i j `assign` LocVar i n) rac
Store n -> let (x,y) = (LocVar i n, StkVar i (j+1)) in y `delete` (x `assign` y) rac
Push _ -> rac
Pop -> StkVar i j `delete` rac
IAdd -> rac
ISub -> rac
ICmpGeq -> rac
Goto _ -> rac
IfFalse _ -> rac
BNot -> rac
BOr -> rac
BAnd -> rac
CmpEq -> StkVar i (j+1) `delete` (StkVar i j `delete` rac)
CmpNeq -> StkVar i (j+1) `delete` (StkVar i j `delete` rac)
New _ -> rac
CheckCast _ -> normalize' p w rac -- error "JFlow.ReachAC.CheckCast: not implemented" -- normalize p q rac
GetField fn cn ->
if isPrimitiveType $ snd (field p cn fn)
then x `delete` rac
else normalize' p w $ RACFact (rs `S.union` rs') cs
where
x = StkVar i j
rs' = S.fromList [ y :~>: x | y <- maySharesWithQ w x]
PutField _ _ | hasTypeQ w' (StkVar i (j+1)) == NullType -> RACFact S.empty S.empty
PutField fn cn -> delete val . delete ref $ RACFact rs1 cs1 `union` RACFact rs' cs'
where
(val,ref) = (StkVar i (j+2), StkVar i (j+1))
RefType tref = hasTypeQ w' ref
cs1 = if singleCyclicField p tref (cn,fn) then ref `S.delete` cs else cs
{-cs1 = cs-}
rs1 = rs
rs' = S.fromList [ w1 :~>: w2 | w1 <- lhs1, w2 <- rhs1 ]
where
lhs1 = aliasWith ref ++ ref `reachable` rs
rhs1 = aliasWith val ++ val `reaches` rs
cs'
| isAcyclic'' p cn fn = cs1
| (val `S.notMember`) cs1 && not (val `mayShare` ref) = cs1
| otherwise = S.fromList [ x | lhs, x <- rhs ]
where
lhs = ref `elem` (val `reaches` rs1) || val `alias` ref || maybeCyclic val cs1
rhs = aliasWith val ++ val `reachable` rs
isAcyclic'' p1 cn1 fn1 = isAcyclicType p (snd $ field p1 cn1 fn1)
--hasType = hasTypeQ w
mayAlias = mayAliasQ w
mayShare = mayShareQ w
alias x y = x `talias` y && x `mayShare` y && x `mayAlias` y
talias x y = areRelatedTypes p (hasTypeQ w' x) (hasTypeQ w' y)
aliasWith x = filter (\y -> x `talias` y && x `mayAlias` y) $ maySharesWithQ w x
Return -> undefined
Invoke _ _ -> undefined
racSetup _ _ _ = RACFact S.empty S.empty
racProject _ _ _ nparams w = rename toV
where
(i,j) = hasIndexQ w
toV z = z `fromMaybe` lookup z (zip [StkVar i k | k <- [j,j-1..]] [LocVar (i+1) k | k <- [nparams,nparams-1..0]])
racExtend _ _ nparams w _ ac =
filter' k1 k2 $ (rl `assign` rs) ac
where
(i,j) = hasIndexQ w
(rs,rl) = (StkVar (i+1) 0, StkVar i (j -nparams))
k (StkVar i2 _) = i2 <= i
k (LocVar i2 _) = i2 <= i
k1 (x:~>:y) = k x && k y
k2 x = k x && k x
|
ComputationWithBoundedResources/jat
|
src/JFlow/ReachAC.hs
|
bsd-3-clause
| 7,216
| 0
| 19
| 2,155
| 2,932
| 1,542
| 1,390
| 144
| 26
|
{-# LANGUAGE Arrows #-}
module Playlistach.Model.WithId where
import Data.Profunctor.Product (p2)
import Control.Arrow
import Opaleye
type WithId r = (Int, r)
type IdColumn = Column PGInt4
type WithIdColumn r = (IdColumn, r)
withId :: TableProperties a b -> TableProperties (Column id_t, a) (Column id_t, b)
withId r = p2 (required "id", r)
findById :: QueryArr () (WithIdColumn r) -> QueryArr IdColumn r
findById query = proc (id) -> do
(id', entry) <- query -< ()
restrict -< id' .== id
returnA -< entry
|
aemxdp/playlistach
|
backend/Playlistach/Model/WithId.hs
|
bsd-3-clause
| 524
| 1
| 10
| 100
| 202
| 110
| 92
| 15
| 1
|
module Machine.Internal.Step where
import Control.Lens
import qualified Data.HashMap.Lazy as H
import Machine.Internal.Data
import Machine.Internal.GC.MarkScan
import Machine.Internal.Heap (Addr)
import Machine.Internal.Instantiate
import qualified Machine.Internal.Heap as U
-- TODO use prism?
isDataNode :: Node -> Bool
isDataNode (NNum _) = True
isDataNode (NData _ _) = True
isDataNode _ = False
getargs :: Heap -> Stack -> [Addr]
getargs heap (sc:stack) = map getArg stack where
getArg addr = arg where (NAp _ arg) = U.lookup addr heap
primStep :: State -> Primitive -> State
primStep state Neg = primNeg state
primStep state Add = primArith state (+)
primStep state Sub = primArith state (-)
primStep state Mul = primArith state (*)
primStep state Div = primArith state div
primStep state If = primIf state
primStep state (PrimConstr tag arity) = primConstr state tag arity
primStep state Greater = primComp state (>)
primStep state GreaterEq = primComp state (>=)
primStep state Less = primComp state (<)
primStep state LessEq = primComp state (<=)
primStep state Eq = primComp state (==)
primStep state NotEq = primComp state (/=)
primStep state CasePair = primCasePair state
primStep state Abort = error "Abort!"
primStep state CaseList = primCaseList state
primStep state Print = primPrint state
primStep state Stop = primStop state
primArith :: State -> (Int -> Int -> Int) -> State
primArith state f = primDyadic state (\(NNum x) (NNum y) -> (NNum (f x y)))
primComp :: State -> (Int -> Int -> Bool) -> State
primComp state f = primDyadic state $ \(NNum x) (NNum y) ->
-- TODO remove all this magical NData 1/2 stuff!
NData (if f x y then 2 else 1) []
primDyadic :: State -> (Node -> Node -> Node) -> State
primDyadic state f
| length stack' > 3 = error "More than two arguments to dyadic prim"
| length stack' < 3 = error "Less than two arguments to dyadic prim"
| not (isDataNode m) = state & stack .~ [arg1Addr]
& dump %~ ([rootNode]:)
| not (isDataNode n) = state & stack .~ [arg2Addr]
& dump %~ ([rootNode]:)
| otherwise = state & stack %~ drop 2
& heap %~ U.update rootNode (m `f` n)
where stack' = state^.stack
heap' = state^.heap
rootNode = stack' !! 2
args = getargs heap' stack'
arg1Addr = head args
arg2Addr = head $ tail args
m = U.lookup arg1Addr heap'
n = U.lookup arg2Addr heap'
primNeg :: State -> State
primNeg state
| length stack' > 2 = error "More than one argument to neg"
| length stack' < 2 = error "No arguments to neg"
| isDataNode arg = state & stack %~ tail
& heap %~ U.update (stack' !! 1) (NNum (-n))
| otherwise = state & stack .~ [argAddr]
& dump %~ ([stack' !! 1]:)
where stack' = state^.stack
heap' = state^.heap
argAddr = head $ getargs heap' stack'
arg = U.lookup argAddr heap'
NNum n = arg
followIndirection :: Node -> Heap -> Node
followIndirection (NInd addr) heap = followIndirection (U.lookup addr heap) heap
followIndirection node _ = node
-- a0:a1:...:an:s d h[a0:NSupercomb[x1, ..., xn] body] f
-- ==> ar:s d h'[an:NInd ar] f
-- a0:s d h[ a0:NPrim _ If
-- a1:NData _ ] f
-- ==>
primIf :: State -> State
primIf state
| length stack' < 4 = error "Less than three arguments to if"
| length stack' > 4 = error "More than three arguments to if"
| isDataNode bool = state & stack .~ newStack
& heap .~ newHeap
| otherwise = state & stack .~ [boolAddr]
& dump %~ ([stack' !! 3]:)
where stack' = state^.stack
heap' = state^.heap
args = getargs heap' stack'
boolAddr = head args
-- TODO - do we need followIndirection?
bool = followIndirection (U.lookup boolAddr heap') heap'
-- isDataNode case
NData t _ = bool
branchAddr = args !! (if t == 1 then 2 else 1)
newHeap = U.update (stack' !! 3) (NInd branchAddr) heap'
newStack = drop 3 stack'
primCasePair :: State -> State
primCasePair state
| length stack' < 3 = error "Less than two arguments to casePair"
| length stack' > 3 = error "More than two arguments to casePair"
| isDataNode pair = state & stack .~ newStack
& heap .~ newHeap
| otherwise = state & stack .~ [pairAddr]
& dump %~ ([stack' !! 2]:)
where stack' = state^.stack
heap' = state^.heap
args = getargs heap' stack'
pairAddr = head args
-- TODO - do we need followIndirection?
pair = followIndirection (U.lookup pairAddr heap') heap'
-- isDataNode case
-- this is disgusting
(NData _ [aAddr, bAddr]) = pair
fAddr = head $ tail args
(heap'', fAppA) = U.alloc (NAp fAddr aAddr) heap'
(heap''', newRoot) = U.alloc (NAp fAppA bAddr) heap''
newHeap = U.update (last stack') (NInd newRoot) heap'''
newStack = [fAddr, fAppA, newRoot]
primCaseList :: State -> State
primCaseList state
| length stack' < 4 = error "Less than three arguments to caseList"
| length stack' > 4 = error "More than three arguments to caseList"
| isDataNode lst = state & stack .~ newStack
& heap .~ newHeap
| otherwise = state & stack .~ [lstAddr]
& dump %~ ([stack' !! 3]:)
where stack' = state^.stack
heap' = state^.heap
args = getargs heap' stack'
lstAddr = head args
-- TODO - do we need followIndirection?
lst = followIndirection (U.lookup lstAddr heap') heap'
cnAddr = args !! 1
ccAddr = args !! 2
-- isDataNode case
-- caseList Pack{1, 0} cn cc = cn
-- caseList (Pack{2, 2} x xs) cn cc = cc x xs
NData tag lstParts = lst
(newHeap, newStack) = case tag of
-- cn
1 -> let heap'' = U.update (last stack') (NInd cnAddr) heap'
in (heap'', drop 3 stack')
-- cc x xs
2 -> let [xAddr, xsAddr] = lstParts
(heap'', app1) = U.alloc (NAp ccAddr xAddr) heap'
(heap''', app2) = U.alloc (NAp app1 xsAddr) heap''
in (heap''', [ccAddr, app1, app2])
primStop :: State -> State
primStop state = state & stack .~ []
primPrint :: State -> State
primPrint state
| b1IsNum = state & output <>~ [n]
& stack .~ [b2]
| otherwise = state & stack .~ [b1]
& dump .~ [[stack' !! 2]]
where heap' = state^.heap
stack' = state^.stack
[b1, b2] = getargs heap' stack'
b1' = U.lookup b1 heap'
b1IsNum = case b1' of
(NNum _) -> True
_ -> False
NNum n = b1'
-- a0:a1:...:an:[] d h [ a0:NPrim (PrimConstr t n) f
-- a1:NAp a b1
-- ...
-- an:NAp a(n-1) bn ]
-- ==> an:[] d h [ an:NData t [b1,...,bn] ] f
primConstr :: State -> Int -> Int -> State
primConstr state tag arity
| length stack' > (arity + 1) = error "Too many arguments to constructor"
| length stack' < (arity + 1) = error "Not enough arguments to constructor"
| otherwise = state & stack .~ [updAddr]
& heap .~ newHeap
where stack' = state^.stack
heap' = state^.heap
-- Get the addr of a component from the address where it's applied
-- TODO - this is ill-conceived
componentAddrs = getargs heap' stack'
componentAddrs' = take arity componentAddrs
updAddr = stack' ^?! ix arity
-- updAddr = last stack'
newHeap = U.update updAddr (NData tag componentAddrs') heap'
-- (newHeap, dataAddr) = U.alloc (NData tag componentAddrs) (state^.heap)
-- (newHeap, dataAddr) = instantiate constr (state^.heap)
unDump :: State -> State
unDump state
| state^.stack^.to length == 1 && not (null $ state^.dump)
= state & stack .~ head (state^.dump)
& dump %~ tail
unDump _ = error "Data applied as a function!"
step :: State -> State
step state = dispatch $ U.lookup (head (state^.stack)) (state^.heap) where
dispatch (NNum _) = unDump state
dispatch (NAp a1 a2) = apStep state a1 a2
dispatch (NSupercomb sc args body) = scStep state sc args body
-- a :s d h[a:NInd a1] f
-- ==> a1:s d h f
dispatch (NInd a1) = state & stack._head .~ a1 -- TODO update stats?
dispatch (NPrim _ prim) = primStep state prim
dispatch (NData _ _) = unDump state
apStep :: State -> Addr -> Addr -> State
apStep state a1 a2 = case state^.heap^.to (U.lookup a2) of
NInd a2' -> state & heap %~ U.update (head (state^.stack)) (NAp a1 a2')
_ -> state & stack %~ (a1:)
-- | Step a supercombinator. Described by the transition rule:
--
-- a0:a1:...:an:s d h[a0:NSupercomb[x1, ..., xn] body] f
-- ==> ar:s d h'[an:NInd ar] f
-- where (h', ar) = instantiate body h f [x1 -> a1, ..., xn -> an]
--
-- In other words, overwrite node an (the root of the redex) with an
-- indirection to ar (the root of the result). If the supercombinator is
-- a CAF then n=0 and the node to be modified is the supercombinator node
-- itself.
scStep :: State -> Name -> [Name] -> CoreExpr -> State
scStep state _ argNames body = state & stack .~ newStack
& heap .~ newHeap
where
stack' = state^.stack
heap' = state^.heap
newStack = drop (length argNames) stack'
argBindings = zip argNames $ getargs heap' stack'
env = H.union (H.fromList argBindings) (state^.globals)
bodyAddr = head newStack
newHeap = instantiateAndUpdate body bodyAddr heap' env
|
joelburget/graphreduction
|
src/Machine/Internal/Step.hs
|
bsd-3-clause
| 10,258
| 0
| 16
| 3,342
| 3,047
| 1,568
| 1,479
| -1
| -1
|
-----------------------------------------------------------------------------
--
-- Pretty-printing assembly language
--
-- (c) The University of Glasgow 1993-2005
--
-----------------------------------------------------------------------------
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
module PPC.Ppr (
pprNatCmmDecl,
pprBasicBlock,
pprSectionHeader,
pprData,
pprInstr,
pprSize,
pprImm,
pprDataItem,
)
where
import PPC.Regs
import PPC.Instr
import PPC.Cond
import PprBase
import Instruction
import Size
import Reg
import RegClass
import TargetReg
import OldCmm
import CLabel
import Unique ( pprUnique, Uniquable(..) )
import Platform
import Pretty
import FastString
import qualified Outputable
import Outputable ( PlatformOutputable, panic )
import Data.Word
import Data.Bits
-- -----------------------------------------------------------------------------
-- Printing this stuff out
pprNatCmmDecl :: Platform -> NatCmmDecl CmmStatics Instr -> Doc
pprNatCmmDecl platform (CmmData section dats) =
pprSectionHeader platform section $$ pprDatas platform dats
-- special case for split markers:
pprNatCmmDecl platform (CmmProc Nothing lbl (ListGraph []))
= pprLabel platform lbl
-- special case for code without an info table:
pprNatCmmDecl platform (CmmProc Nothing lbl (ListGraph blocks)) =
pprSectionHeader platform Text $$
pprLabel platform lbl $$ -- blocks guaranteed not null, so label needed
vcat (map (pprBasicBlock platform) blocks)
pprNatCmmDecl platform (CmmProc (Just (Statics info_lbl info)) _entry_lbl (ListGraph blocks)) =
pprSectionHeader platform Text $$
(
(if platformHasSubsectionsViaSymbols platform
then pprCLabel_asm platform (mkDeadStripPreventer info_lbl) <> char ':'
else empty) $$
vcat (map (pprData platform) info) $$
pprLabel platform info_lbl
) $$
vcat (map (pprBasicBlock platform) blocks) $$
-- above: Even the first block gets a label, because with branch-chain
-- elimination, it might be the target of a goto.
(if platformHasSubsectionsViaSymbols platform
then
-- If we are using the .subsections_via_symbols directive
-- (available on recent versions of Darwin),
-- we have to make sure that there is some kind of reference
-- from the entry code to a label on the _top_ of of the info table,
-- so that the linker will not think it is unreferenced and dead-strip
-- it. That's why the label is called a DeadStripPreventer (_dsp).
text "\t.long "
<+> pprCLabel_asm platform info_lbl
<+> char '-'
<+> pprCLabel_asm platform (mkDeadStripPreventer info_lbl)
else empty)
pprBasicBlock :: Platform -> NatBasicBlock Instr -> Doc
pprBasicBlock platform (BasicBlock blockid instrs) =
pprLabel platform (mkAsmTempLabel (getUnique blockid)) $$
vcat (map (pprInstr platform) instrs)
pprDatas :: Platform -> CmmStatics -> Doc
pprDatas platform (Statics lbl dats) = vcat (pprLabel platform lbl : map (pprData platform) dats)
pprData :: Platform -> CmmStatic -> Doc
pprData _ (CmmString str) = pprASCII str
pprData platform (CmmUninitialised bytes) = ptext (sLit keyword) <> int bytes
where keyword = case platformOS platform of
OSDarwin -> ".space "
_ -> ".skip "
pprData platform (CmmStaticLit lit) = pprDataItem platform lit
pprGloblDecl :: Platform -> CLabel -> Doc
pprGloblDecl platform lbl
| not (externallyVisibleCLabel lbl) = empty
| otherwise = ptext (sLit ".globl ") <> pprCLabel_asm platform lbl
pprTypeAndSizeDecl :: Platform -> CLabel -> Doc
pprTypeAndSizeDecl platform lbl
| platformOS platform == OSLinux && externallyVisibleCLabel lbl
= ptext (sLit ".type ") <>
pprCLabel_asm platform lbl <> ptext (sLit ", @object")
pprTypeAndSizeDecl _ _
= empty
pprLabel :: Platform -> CLabel -> Doc
pprLabel platform lbl = pprGloblDecl platform lbl
$$ pprTypeAndSizeDecl platform lbl
$$ (pprCLabel_asm platform lbl <> char ':')
pprASCII :: [Word8] -> Doc
pprASCII str
= vcat (map do1 str) $$ do1 0
where
do1 :: Word8 -> Doc
do1 w = ptext (sLit "\t.byte\t") <> int (fromIntegral w)
-- -----------------------------------------------------------------------------
-- pprInstr: print an 'Instr'
instance PlatformOutputable Instr where
pprPlatform platform instr = Outputable.docToSDoc $ pprInstr platform instr
pprReg :: Platform -> Reg -> Doc
pprReg platform r
= case r of
RegReal (RealRegSingle i) -> ppr_reg_no i
RegReal (RealRegPair{}) -> panic "PPC.pprReg: no reg pairs on this arch"
RegVirtual (VirtualRegI u) -> text "%vI_" <> asmSDoc (pprUnique u)
RegVirtual (VirtualRegHi u) -> text "%vHi_" <> asmSDoc (pprUnique u)
RegVirtual (VirtualRegF u) -> text "%vF_" <> asmSDoc (pprUnique u)
RegVirtual (VirtualRegD u) -> text "%vD_" <> asmSDoc (pprUnique u)
RegVirtual (VirtualRegSSE u) -> text "%vSSE_" <> asmSDoc (pprUnique u)
where
ppr_reg_no :: Int -> Doc
ppr_reg_no i =
case platformOS platform of
OSDarwin ->
ptext
(case i of {
0 -> sLit "r0"; 1 -> sLit "r1";
2 -> sLit "r2"; 3 -> sLit "r3";
4 -> sLit "r4"; 5 -> sLit "r5";
6 -> sLit "r6"; 7 -> sLit "r7";
8 -> sLit "r8"; 9 -> sLit "r9";
10 -> sLit "r10"; 11 -> sLit "r11";
12 -> sLit "r12"; 13 -> sLit "r13";
14 -> sLit "r14"; 15 -> sLit "r15";
16 -> sLit "r16"; 17 -> sLit "r17";
18 -> sLit "r18"; 19 -> sLit "r19";
20 -> sLit "r20"; 21 -> sLit "r21";
22 -> sLit "r22"; 23 -> sLit "r23";
24 -> sLit "r24"; 25 -> sLit "r25";
26 -> sLit "r26"; 27 -> sLit "r27";
28 -> sLit "r28"; 29 -> sLit "r29";
30 -> sLit "r30"; 31 -> sLit "r31";
32 -> sLit "f0"; 33 -> sLit "f1";
34 -> sLit "f2"; 35 -> sLit "f3";
36 -> sLit "f4"; 37 -> sLit "f5";
38 -> sLit "f6"; 39 -> sLit "f7";
40 -> sLit "f8"; 41 -> sLit "f9";
42 -> sLit "f10"; 43 -> sLit "f11";
44 -> sLit "f12"; 45 -> sLit "f13";
46 -> sLit "f14"; 47 -> sLit "f15";
48 -> sLit "f16"; 49 -> sLit "f17";
50 -> sLit "f18"; 51 -> sLit "f19";
52 -> sLit "f20"; 53 -> sLit "f21";
54 -> sLit "f22"; 55 -> sLit "f23";
56 -> sLit "f24"; 57 -> sLit "f25";
58 -> sLit "f26"; 59 -> sLit "f27";
60 -> sLit "f28"; 61 -> sLit "f29";
62 -> sLit "f30"; 63 -> sLit "f31";
_ -> sLit "very naughty powerpc register"
})
_
| i <= 31 -> int i -- GPRs
| i <= 63 -> int (i-32) -- FPRs
| otherwise -> ptext (sLit "very naughty powerpc register")
pprSize :: Size -> Doc
pprSize x
= ptext (case x of
II8 -> sLit "b"
II16 -> sLit "h"
II32 -> sLit "w"
FF32 -> sLit "fs"
FF64 -> sLit "fd"
_ -> panic "PPC.Ppr.pprSize: no match")
pprCond :: Cond -> Doc
pprCond c
= ptext (case c of {
ALWAYS -> sLit "";
EQQ -> sLit "eq"; NE -> sLit "ne";
LTT -> sLit "lt"; GE -> sLit "ge";
GTT -> sLit "gt"; LE -> sLit "le";
LU -> sLit "lt"; GEU -> sLit "ge";
GU -> sLit "gt"; LEU -> sLit "le"; })
pprImm :: Platform -> Imm -> Doc
pprImm _ (ImmInt i) = int i
pprImm _ (ImmInteger i) = integer i
pprImm platform (ImmCLbl l) = pprCLabel_asm platform l
pprImm platform (ImmIndex l i) = pprCLabel_asm platform l <> char '+' <> int i
pprImm _ (ImmLit s) = s
pprImm _ (ImmFloat _) = ptext (sLit "naughty float immediate")
pprImm _ (ImmDouble _) = ptext (sLit "naughty double immediate")
pprImm platform (ImmConstantSum a b) = pprImm platform a <> char '+' <> pprImm platform b
pprImm platform (ImmConstantDiff a b) = pprImm platform a <> char '-'
<> lparen <> pprImm platform b <> rparen
pprImm platform (LO i)
= if platformOS platform == OSDarwin
then hcat [ text "lo16(", pprImm platform i, rparen ]
else pprImm platform i <> text "@l"
pprImm platform (HI i)
= if platformOS platform == OSDarwin
then hcat [ text "hi16(", pprImm platform i, rparen ]
else pprImm platform i <> text "@h"
pprImm platform (HA i)
= if platformOS platform == OSDarwin
then hcat [ text "ha16(", pprImm platform i, rparen ]
else pprImm platform i <> text "@ha"
pprAddr :: Platform -> AddrMode -> Doc
pprAddr platform (AddrRegReg r1 r2)
= pprReg platform r1 <+> ptext (sLit ", ") <+> pprReg platform r2
pprAddr platform (AddrRegImm r1 (ImmInt i)) = hcat [ int i, char '(', pprReg platform r1, char ')' ]
pprAddr platform (AddrRegImm r1 (ImmInteger i)) = hcat [ integer i, char '(', pprReg platform r1, char ')' ]
pprAddr platform (AddrRegImm r1 imm) = hcat [ pprImm platform imm, char '(', pprReg platform r1, char ')' ]
pprSectionHeader :: Platform -> Section -> Doc
pprSectionHeader platform seg
= case seg of
Text -> ptext (sLit ".text\n.align 2")
Data -> ptext (sLit ".data\n.align 2")
ReadOnlyData
| osDarwin -> ptext (sLit ".const\n.align 2")
| otherwise -> ptext (sLit ".section .rodata\n\t.align 2")
RelocatableReadOnlyData
| osDarwin -> ptext (sLit ".const_data\n.align 2")
| otherwise -> ptext (sLit ".data\n\t.align 2")
UninitialisedData
| osDarwin -> ptext (sLit ".const_data\n.align 2")
| otherwise -> ptext (sLit ".section .bss\n\t.align 2")
ReadOnlyData16
| osDarwin -> ptext (sLit ".const\n.align 4")
| otherwise -> ptext (sLit ".section .rodata\n\t.align 4")
OtherSection _ ->
panic "PprMach.pprSectionHeader: unknown section"
where osDarwin = platformOS platform == OSDarwin
pprDataItem :: Platform -> CmmLit -> Doc
pprDataItem platform lit
= vcat (ppr_item (cmmTypeSize $ cmmLitType lit) lit)
where
imm = litToImm lit
ppr_item II8 _ = [ptext (sLit "\t.byte\t") <> pprImm platform imm]
ppr_item II32 _ = [ptext (sLit "\t.long\t") <> pprImm platform imm]
ppr_item FF32 (CmmFloat r _)
= let bs = floatToBytes (fromRational r)
in map (\b -> ptext (sLit "\t.byte\t") <> pprImm platform (ImmInt b)) bs
ppr_item FF64 (CmmFloat r _)
= let bs = doubleToBytes (fromRational r)
in map (\b -> ptext (sLit "\t.byte\t") <> pprImm platform (ImmInt b)) bs
ppr_item II16 _ = [ptext (sLit "\t.short\t") <> pprImm platform imm]
ppr_item II64 (CmmInt x _) =
[ptext (sLit "\t.long\t")
<> int (fromIntegral
(fromIntegral (x `shiftR` 32) :: Word32)),
ptext (sLit "\t.long\t")
<> int (fromIntegral (fromIntegral x :: Word32))]
ppr_item _ _
= panic "PPC.Ppr.pprDataItem: no match"
pprInstr :: Platform -> Instr -> Doc
pprInstr _ (COMMENT _) = empty -- nuke 'em
{-
pprInstr platform (COMMENT s) =
if platformOS platform == OSLinux
then ptext (sLit "# ") <> ftext s
else ptext (sLit "; ") <> ftext s
-}
pprInstr platform (DELTA d)
= pprInstr platform (COMMENT (mkFastString ("\tdelta = " ++ show d)))
pprInstr _ (NEWBLOCK _)
= panic "PprMach.pprInstr: NEWBLOCK"
pprInstr _ (LDATA _ _)
= panic "PprMach.pprInstr: LDATA"
{-
pprInstr _ (SPILL reg slot)
= hcat [
ptext (sLit "\tSPILL"),
char '\t',
pprReg platform reg,
comma,
ptext (sLit "SLOT") <> parens (int slot)]
pprInstr _ (RELOAD slot reg)
= hcat [
ptext (sLit "\tRELOAD"),
char '\t',
ptext (sLit "SLOT") <> parens (int slot),
comma,
pprReg platform reg]
-}
pprInstr platform (LD sz reg addr) = hcat [
char '\t',
ptext (sLit "l"),
ptext (case sz of
II8 -> sLit "bz"
II16 -> sLit "hz"
II32 -> sLit "wz"
FF32 -> sLit "fs"
FF64 -> sLit "fd"
_ -> panic "PPC.Ppr.pprInstr: no match"
),
case addr of AddrRegImm _ _ -> empty
AddrRegReg _ _ -> char 'x',
char '\t',
pprReg platform reg,
ptext (sLit ", "),
pprAddr platform addr
]
pprInstr platform (LA sz reg addr) = hcat [
char '\t',
ptext (sLit "l"),
ptext (case sz of
II8 -> sLit "ba"
II16 -> sLit "ha"
II32 -> sLit "wa"
FF32 -> sLit "fs"
FF64 -> sLit "fd"
_ -> panic "PPC.Ppr.pprInstr: no match"
),
case addr of AddrRegImm _ _ -> empty
AddrRegReg _ _ -> char 'x',
char '\t',
pprReg platform reg,
ptext (sLit ", "),
pprAddr platform addr
]
pprInstr platform (ST sz reg addr) = hcat [
char '\t',
ptext (sLit "st"),
pprSize sz,
case addr of AddrRegImm _ _ -> empty
AddrRegReg _ _ -> char 'x',
char '\t',
pprReg platform reg,
ptext (sLit ", "),
pprAddr platform addr
]
pprInstr platform (STU sz reg addr) = hcat [
char '\t',
ptext (sLit "st"),
pprSize sz,
ptext (sLit "u\t"),
case addr of AddrRegImm _ _ -> empty
AddrRegReg _ _ -> char 'x',
pprReg platform reg,
ptext (sLit ", "),
pprAddr platform addr
]
pprInstr platform (LIS reg imm) = hcat [
char '\t',
ptext (sLit "lis"),
char '\t',
pprReg platform reg,
ptext (sLit ", "),
pprImm platform imm
]
pprInstr platform (LI reg imm) = hcat [
char '\t',
ptext (sLit "li"),
char '\t',
pprReg platform reg,
ptext (sLit ", "),
pprImm platform imm
]
pprInstr platform (MR reg1 reg2)
| reg1 == reg2 = empty
| otherwise = hcat [
char '\t',
case targetClassOfReg platform reg1 of
RcInteger -> ptext (sLit "mr")
_ -> ptext (sLit "fmr"),
char '\t',
pprReg platform reg1,
ptext (sLit ", "),
pprReg platform reg2
]
pprInstr platform (CMP sz reg ri) = hcat [
char '\t',
op,
char '\t',
pprReg platform reg,
ptext (sLit ", "),
pprRI platform ri
]
where
op = hcat [
ptext (sLit "cmp"),
pprSize sz,
case ri of
RIReg _ -> empty
RIImm _ -> char 'i'
]
pprInstr platform (CMPL sz reg ri) = hcat [
char '\t',
op,
char '\t',
pprReg platform reg,
ptext (sLit ", "),
pprRI platform ri
]
where
op = hcat [
ptext (sLit "cmpl"),
pprSize sz,
case ri of
RIReg _ -> empty
RIImm _ -> char 'i'
]
pprInstr platform (BCC cond blockid) = hcat [
char '\t',
ptext (sLit "b"),
pprCond cond,
char '\t',
pprCLabel_asm platform lbl
]
where lbl = mkAsmTempLabel (getUnique blockid)
pprInstr platform (BCCFAR cond blockid) = vcat [
hcat [
ptext (sLit "\tb"),
pprCond (condNegate cond),
ptext (sLit "\t$+8")
],
hcat [
ptext (sLit "\tb\t"),
pprCLabel_asm platform lbl
]
]
where lbl = mkAsmTempLabel (getUnique blockid)
pprInstr platform (JMP lbl) = hcat [ -- an alias for b that takes a CLabel
char '\t',
ptext (sLit "b"),
char '\t',
pprCLabel_asm platform lbl
]
pprInstr platform (MTCTR reg) = hcat [
char '\t',
ptext (sLit "mtctr"),
char '\t',
pprReg platform reg
]
pprInstr _ (BCTR _ _) = hcat [
char '\t',
ptext (sLit "bctr")
]
pprInstr platform (BL lbl _) = hcat [
ptext (sLit "\tbl\t"),
pprCLabel_asm platform lbl
]
pprInstr _ (BCTRL _) = hcat [
char '\t',
ptext (sLit "bctrl")
]
pprInstr platform (ADD reg1 reg2 ri) = pprLogic platform (sLit "add") reg1 reg2 ri
pprInstr platform (ADDIS reg1 reg2 imm) = hcat [
char '\t',
ptext (sLit "addis"),
char '\t',
pprReg platform reg1,
ptext (sLit ", "),
pprReg platform reg2,
ptext (sLit ", "),
pprImm platform imm
]
pprInstr platform (ADDC reg1 reg2 reg3) = pprLogic platform (sLit "addc") reg1 reg2 (RIReg reg3)
pprInstr platform (ADDE reg1 reg2 reg3) = pprLogic platform (sLit "adde") reg1 reg2 (RIReg reg3)
pprInstr platform (SUBF reg1 reg2 reg3) = pprLogic platform (sLit "subf") reg1 reg2 (RIReg reg3)
pprInstr platform (MULLW reg1 reg2 ri@(RIReg _)) = pprLogic platform (sLit "mullw") reg1 reg2 ri
pprInstr platform (MULLW reg1 reg2 ri@(RIImm _)) = pprLogic platform (sLit "mull") reg1 reg2 ri
pprInstr platform (DIVW reg1 reg2 reg3) = pprLogic platform (sLit "divw") reg1 reg2 (RIReg reg3)
pprInstr platform (DIVWU reg1 reg2 reg3) = pprLogic platform (sLit "divwu") reg1 reg2 (RIReg reg3)
pprInstr platform (MULLW_MayOflo reg1 reg2 reg3) = vcat [
hcat [ ptext (sLit "\tmullwo\t"), pprReg platform reg1, ptext (sLit ", "),
pprReg platform reg2, ptext (sLit ", "),
pprReg platform reg3 ],
hcat [ ptext (sLit "\tmfxer\t"), pprReg platform reg1 ],
hcat [ ptext (sLit "\trlwinm\t"), pprReg platform reg1, ptext (sLit ", "),
pprReg platform reg1, ptext (sLit ", "),
ptext (sLit "2, 31, 31") ]
]
-- for some reason, "andi" doesn't exist.
-- we'll use "andi." instead.
pprInstr platform (AND reg1 reg2 (RIImm imm)) = hcat [
char '\t',
ptext (sLit "andi."),
char '\t',
pprReg platform reg1,
ptext (sLit ", "),
pprReg platform reg2,
ptext (sLit ", "),
pprImm platform imm
]
pprInstr platform (AND reg1 reg2 ri) = pprLogic platform (sLit "and") reg1 reg2 ri
pprInstr platform (OR reg1 reg2 ri) = pprLogic platform (sLit "or") reg1 reg2 ri
pprInstr platform (XOR reg1 reg2 ri) = pprLogic platform (sLit "xor") reg1 reg2 ri
pprInstr platform (XORIS reg1 reg2 imm) = hcat [
char '\t',
ptext (sLit "xoris"),
char '\t',
pprReg platform reg1,
ptext (sLit ", "),
pprReg platform reg2,
ptext (sLit ", "),
pprImm platform imm
]
pprInstr platform (EXTS sz reg1 reg2) = hcat [
char '\t',
ptext (sLit "exts"),
pprSize sz,
char '\t',
pprReg platform reg1,
ptext (sLit ", "),
pprReg platform reg2
]
pprInstr platform (NEG reg1 reg2) = pprUnary platform (sLit "neg") reg1 reg2
pprInstr platform (NOT reg1 reg2) = pprUnary platform (sLit "not") reg1 reg2
pprInstr platform (SLW reg1 reg2 ri) = pprLogic platform (sLit "slw") reg1 reg2 (limitShiftRI ri)
pprInstr platform (SRW reg1 reg2 ri) = pprLogic platform (sLit "srw") reg1 reg2 (limitShiftRI ri)
pprInstr platform (SRAW reg1 reg2 ri) = pprLogic platform (sLit "sraw") reg1 reg2 (limitShiftRI ri)
pprInstr platform (RLWINM reg1 reg2 sh mb me) = hcat [
ptext (sLit "\trlwinm\t"),
pprReg platform reg1,
ptext (sLit ", "),
pprReg platform reg2,
ptext (sLit ", "),
int sh,
ptext (sLit ", "),
int mb,
ptext (sLit ", "),
int me
]
pprInstr platform (FADD sz reg1 reg2 reg3) = pprBinaryF platform (sLit "fadd") sz reg1 reg2 reg3
pprInstr platform (FSUB sz reg1 reg2 reg3) = pprBinaryF platform (sLit "fsub") sz reg1 reg2 reg3
pprInstr platform (FMUL sz reg1 reg2 reg3) = pprBinaryF platform (sLit "fmul") sz reg1 reg2 reg3
pprInstr platform (FDIV sz reg1 reg2 reg3) = pprBinaryF platform (sLit "fdiv") sz reg1 reg2 reg3
pprInstr platform (FNEG reg1 reg2) = pprUnary platform (sLit "fneg") reg1 reg2
pprInstr platform (FCMP reg1 reg2) = hcat [
char '\t',
ptext (sLit "fcmpu\tcr0, "),
-- Note: we're using fcmpu, not fcmpo
-- The difference is with fcmpo, compare with NaN is an invalid operation.
-- We don't handle invalid fp ops, so we don't care
pprReg platform reg1,
ptext (sLit ", "),
pprReg platform reg2
]
pprInstr platform (FCTIWZ reg1 reg2) = pprUnary platform (sLit "fctiwz") reg1 reg2
pprInstr platform (FRSP reg1 reg2) = pprUnary platform (sLit "frsp") reg1 reg2
pprInstr _ (CRNOR dst src1 src2) = hcat [
ptext (sLit "\tcrnor\t"),
int dst,
ptext (sLit ", "),
int src1,
ptext (sLit ", "),
int src2
]
pprInstr platform (MFCR reg) = hcat [
char '\t',
ptext (sLit "mfcr"),
char '\t',
pprReg platform reg
]
pprInstr platform (MFLR reg) = hcat [
char '\t',
ptext (sLit "mflr"),
char '\t',
pprReg platform reg
]
pprInstr platform (FETCHPC reg) = vcat [
ptext (sLit "\tbcl\t20,31,1f"),
hcat [ ptext (sLit "1:\tmflr\t"), pprReg platform reg ]
]
pprInstr _ LWSYNC = ptext (sLit "\tlwsync")
-- pprInstr _ _ = panic "pprInstr (ppc)"
pprLogic :: Platform -> LitString -> Reg -> Reg -> RI -> Doc
pprLogic platform op reg1 reg2 ri = hcat [
char '\t',
ptext op,
case ri of
RIReg _ -> empty
RIImm _ -> char 'i',
char '\t',
pprReg platform reg1,
ptext (sLit ", "),
pprReg platform reg2,
ptext (sLit ", "),
pprRI platform ri
]
pprUnary :: Platform -> LitString -> Reg -> Reg -> Doc
pprUnary platform op reg1 reg2 = hcat [
char '\t',
ptext op,
char '\t',
pprReg platform reg1,
ptext (sLit ", "),
pprReg platform reg2
]
pprBinaryF :: Platform -> LitString -> Size -> Reg -> Reg -> Reg -> Doc
pprBinaryF platform op sz reg1 reg2 reg3 = hcat [
char '\t',
ptext op,
pprFSize sz,
char '\t',
pprReg platform reg1,
ptext (sLit ", "),
pprReg platform reg2,
ptext (sLit ", "),
pprReg platform reg3
]
pprRI :: Platform -> RI -> Doc
pprRI platform (RIReg r) = pprReg platform r
pprRI platform (RIImm r) = pprImm platform r
pprFSize :: Size -> Doc
pprFSize FF64 = empty
pprFSize FF32 = char 's'
pprFSize _ = panic "PPC.Ppr.pprFSize: no match"
-- limit immediate argument for shift instruction to range 0..32
-- (yes, the maximum is really 32, not 31)
limitShiftRI :: RI -> RI
limitShiftRI (RIImm (ImmInt i)) | i > 32 || i < 0 = RIImm (ImmInt 32)
limitShiftRI x = x
|
mcmaniac/ghc
|
compiler/nativeGen/PPC/Ppr.hs
|
bsd-3-clause
| 22,262
| 371
| 18
| 6,201
| 7,931
| 3,913
| 4,018
| 522
| 72
|
module Embot.Slack where
import ClassyPrelude
import Control.Lens (Getter, view, to)
import Control.Lens.TH (makeLenses, makePrisms)
import Data.Aeson ((.:), (.:?), (.=), (.!=), Value(Object, String), Object, FromJSON(parseJSON), ToJSON(toJSON), object, withText, withObject, withScientific, withText)
import Data.Aeson.Types (Parser)
import qualified Data.HashMap.Strict as HM
import Data.Proxy (Proxy(Proxy))
import Data.Scientific (toBoundedInteger)
import TextShow (FromStringShow(FromStringShow), TextShow(showb), showt)
import TextShow.TH (deriveTextShow)
import Embot.TextShowOrphans ()
newtype TS = TS { unTS :: Text } deriving (Eq, Ord)
instance FromJSON TS where
parseJSON = withText "timestamp" $ pure . TS
deriveTextShow ''TS
newtype Time = Time { unTime :: Word32 } deriving (Eq, Ord)
instance FromJSON Time where
parseJSON = withScientific "time" $ \ s ->
case toBoundedInteger s of
Just w32 -> pure (Time w32)
Nothing -> fail . unpack $ "out of bound unix time " <> showt (FromStringShow s)
deriveTextShow ''Time
newtype ID a = ID { unID :: Text } deriving (Eq, Ord)
instance FromJSON (ID a) where
parseJSON = withText "id" $ pure . ID
instance ToJSON (ID a) where
toJSON = String . unID
deriveTextShow ''ID
idedName :: Getter s Text -> Getter s (ID k) -> (s -> Text)
idedName name ident s = view name s ++ " <" ++ view (ident . to unID) s ++ ">"
data Response a = ResponseNotOk !Text | ResponseOk a
data RtmStartRequest = RtmStartRequest { rtmStartToken :: Text }
data RtmStartRp = RtmStartRp
{ _rtmStartUrl :: Text
, _rtmStartSelf :: Self
, _rtmStartTeam :: Team
, _rtmStartUsers :: [User]
, _rtmStartChannels :: [Channel]
, _rtmStartGroups :: [Group]
, _rtmStartIMs :: [IM]
, _rtmStartBots :: [Bot] }
testRtmStartRp :: RtmStartRp
testRtmStartRp = RtmStartRp
{ _rtmStartUrl = "url"
, _rtmStartSelf = Self (ID "UEMBOT") "Embot" mempty (Time 0) PresenceActive
, _rtmStartTeam = Team (ID "TTEAM") "Team" Nothing "domain" Nothing False mempty
, _rtmStartUsers = []
, _rtmStartChannels = []
, _rtmStartGroups = []
, _rtmStartIMs = []
, _rtmStartBots = [] }
data Self = Self
{ _selfID :: ID User
, _selfName :: Text
, _selfPrefs :: Object
, _selfCreated :: Time
, _selfManualPresence :: Presence }
data Presence = PresenceActive | PresenceAway
data Team = Team
{ _teamID :: ID Team
, _teamName :: Text
, _teamEmailDomain :: Maybe Text
, _teamDomain :: Text
, _teamMsgEditWindowMins :: Maybe Int
, _teamOverStorageLimit :: Bool
, _teamPrefs :: Object }
data User = User
{ _userID :: ID User
, _userName :: Text
, _userRealName :: Text
, _userDeleted :: Bool
, _userColor :: Text
, _userProfile :: Profile
, _userIsAdmin :: Bool
, _userIsOwner :: Bool
, _userIsPrimaryOwner :: Bool
, _userIsRestricted :: Bool
, _userIsUltraRestricted :: Bool
, _userHas2fa :: Bool
, _userTwoFactorType :: Maybe Text
, _userHasFiles :: Bool
, _userPresence :: Maybe Presence }
data Profile = Profile
{ _profileFirstName :: Maybe Text
, _profileLastName :: Maybe Text
, _profileRealName :: Maybe Text
, _profileEmail :: Maybe Text
, _profileSkype :: Maybe Text
, _profilePhone :: Maybe Text }
data Channel = Channel
{ _channelID :: ID Channel
, _channelName :: Text
, _channelCreated :: Time
, _channelCreator :: ID User
, _channelIsArchived :: Bool
, _channelIsGeneral :: Bool
, _channelMembers :: [ID User]
, _channelTopic :: Maybe (SlackTracked Text)
, _channelPurpose :: Maybe (SlackTracked Text)
, _channelIsMember :: Bool
, _channelLastRead :: Maybe TS
, _channelLatest :: Maybe Message
, _channelUnreadCount :: Maybe Int }
data Group = Group
{ _groupID :: ID Group
, _groupName :: Text
, _groupCreated :: Time
, _groupCreator :: ID User
, _groupIsArchived :: Bool
, _groupMembers :: [ID User]
, _groupTopic :: Maybe (SlackTracked Text)
, _groupPurpose :: Maybe (SlackTracked Text)
, _groupIsOpen :: Bool
, _groupLastRead :: Maybe TS
, _groupLatest :: Maybe Message
, _groupUnreadCount :: Maybe Int }
data IM = IM
{ _imID :: ID IM
, _imUser :: ID User
, _imCreated :: Time
, _imIsUserDeleted :: Bool
, _imIsOpen :: Bool
, _imLastRead :: Maybe TS
, _imLatest :: Maybe Message
, _imUnreadCount :: Maybe Int }
data Bot = Bot
{ _botID :: ID Bot
, _botName :: Text
, _botIcons :: HM.HashMap Text Text }
data Chat
data Message = Message
{ _messageChat :: Maybe (ID Chat)
, _messageUser :: ID User
, _messageSubtype :: Maybe MessageSubtype
, _messageText :: Text
, _messageTS :: TS
, _messageEdited :: Maybe MessageEdited
, _messageDeletedTS :: Maybe TS
, _messageEventTS :: Maybe TS
, _messageHidden :: Bool
, _messageAttachments :: [Attachment]
, _messageInviter :: Maybe (ID User)
, _messageIsStarred :: Maybe Bool
, _messagePinnedTo :: [ID Channel]
, _messageReactions :: [MessageReaction] }
testMessage :: ID Chat -> ID User -> Text -> Message
testMessage chat from text = Message
{ _messageChat = Just chat
, _messageUser = from
, _messageSubtype = Nothing
, _messageText = text
, _messageTS = TS "0"
, _messageEdited = Nothing
, _messageDeletedTS = Nothing
, _messageEventTS = Nothing
, _messageHidden = False
, _messageAttachments = []
, _messageInviter = Nothing
, _messageIsStarred = Nothing
, _messagePinnedTo = []
, _messageReactions = [] }
data MessageSubtype
= BotMS | MeMS | ChangedMS | DeletedMS
| ChannelJoinMS | ChannelLeaveMS | ChannelTopicMS | ChannelPurposeMS | ChannelNameMS | ChannelArchiveMS | ChannelUnarchiveMS
| GroupJoinMS | GroupLeaveMS | GroupTopicMS | GroupPurposeMS | GroupNameMS | GroupArchiveMS | GroupUnarchiveMS
| FileShareMS | FileCommentMS | FileMentionMS
data MessageEdited = MessageEdited
{ _messageEditedUser :: ID User
, _messageEditedTS :: TS }
data MessageReaction = MessageReaction
{ _messageReactionName :: Text
, _messageReactionCount :: Int
, _messageReactionUsers :: [ID User] }
data Attachment = Attachment
{ _attachmentFallback :: Text
, _attachmentColor :: Maybe Text
, _attachmentPretext :: Maybe Text
, _attachmentAuthorName :: Maybe Text
, _attachmentAuthorLink :: Maybe Text
, _attachmentAuthorIcon :: Maybe Text
, _attachmentTitle :: Maybe Text
, _attachmentTitleLink :: Maybe Text
, _attachmentText :: Maybe Text
, _attachmentFields :: [AttachmentField] }
data AttachmentField = AttachmentField
{ _fieldTitle :: Text
, _fieldValue :: Text
, _fieldShort :: Bool }
data SlackTracked a = SlackTracked
{ _trackedValue :: a
, _trackedCreator :: ID User
, _trackedLastSet :: Time }
data File = File
{ _fileID :: ID File
, _fileCreated :: Time
, _fileTimestamp :: Time
, _fileName :: Text
, _fileTitle :: Text
, _fileMimeType :: Text
, _fileFileType :: Text
, _filePrettyType :: Text
, _fileUser :: ID User
, _fileMode :: FileMode
, _fileEditable :: Bool
, _fileIsExternal :: Bool
, _fileExternalType :: Text
, _fileSize :: Word64
, _fileURL :: Text
, _fileURLDownload :: Text
, _fileURLPrivate :: Text
, _fileURLPrivateDownload :: Text
, _fileThumb :: HM.HashMap Text Text
, _filePermalink :: Text
, _fileEditLink :: Text
, _filePreview :: Text
, _filePreviewHighlight :: Text
, _fileLines :: Int
, _fileLinesMore :: Int
, _fileIsPublic :: Bool
, _filePublicURLShared :: Bool
, _fileChannels :: [ID Channel]
, _fileGroups :: [ID Group]
, _fileIMs :: [ID IM]
, _fileInitialComment :: Maybe Message
, _fileNumStars :: Int
, _fileIsStarred :: Bool }
data FileMode
= FileHosted
| FileExternal
| FileSnippet
| FilePost
data FileComment = FileComment
{ _fileCommentID :: ID FileComment
, _fileCommentTimestamp :: Time
, _fileCommentUser :: ID User
, _fileCommentComment :: Text }
data RtmEvent
= RtmHello
| RtmReplyOk Word64 (Maybe TS) (Maybe Text)
| RtmReplyNotOk Word64 Int32 Text
| RtmMessage Message
| RtmChannelMarked (ChatMarked Channel)
| RtmChannelCreated Channel
| RtmChannelJoined Channel
| RtmChannelLeft (ID Channel)
| RtmChannelDeleted (ID Channel)
| RtmChannelRenamed (ChatRenamed Channel)
| RtmChannelArchive (ChatUser Channel)
| RtmChannelUnarchive (ChatUser Channel)
| RtmChannelHistoryChanged (ChatHistoryChanged Channel)
| RtmIMCreated IMCreated
| RtmIMOpen (ChatUser IM)
| RtmIMClose (ChatUser IM)
| RtmIMMarked (ChatMarked IM)
| RtmIMHistoryChanged (ChatHistoryChanged IM)
| RtmGroupJoined Group
| RtmGroupLeft (ID Group)
| RtmGroupOpen (ChatUser Group)
| RtmGroupClose (ChatUser Group)
| RtmGroupArchive (ID Group)
| RtmGroupUnarchive (ID Group)
| RtmGroupRename (ChatRenamed Group)
| RtmGroupMarked (ChatMarked Group)
| RtmGroupHistoryChanged (ChatHistoryChanged Group)
| RtmFileCreated File
| RtmFileShared File
| RtmFileUnshared File
| RtmFilePublic File
| RtmFilePrivate (ID File)
| RtmFileChange File
| RtmFileDeleted FileDeleted
| RtmFileCommentAdded FileCommentUpdated
| RtmFileCommentEdited FileCommentUpdated
| RtmFileCommentDeleted FileCommentDeleted
| RtmPresenceChange PresenceChange
| RtmManualPresenceChange Presence
| RtmPrefChange PrefChange
| RtmUserChange User
| RtmUserTyping UserTyping
| RtmTeamJoin User
| RtmStarAdded Star
| RtmStarRemoved Star
| RtmEmojiChanged TS
| RtmCommandsChanged TS
| RtmTeamPrefChange PrefChange
| RtmTeamRename Text
| RtmTeamDomainChange TeamDomainChange
| RtmEmailDomainChanged EmailDomainChanged
| RtmBotAdded Bot
| RtmBotChanged Bot
| RtmAccountsChanged
data ChatMarked a = ChatMarked
{ _chatMarkedChannel :: ID a
, _chatMarkedTS :: TS }
data ChatUser a = ChatUser
{ _chatUserUser :: ID User
, _chatUserChannelID :: ID a }
data ChatRenamed a = ChatRenamed
{ _chatRenamedChannelID :: ID a
, _chatRenamedName :: Text }
data ChatHistoryChanged a = ChatHistoryChanged
{ _chatHistoryChangedLatest :: Text
, _chatHistoryChangedTS :: TS
, _chatHistoryChangedEventTS :: TS }
data IMCreated = IMCreated
{ _imCreatedUser :: Text
, _imCreatedChannel :: IM }
data FileDeleted = FileDeleted
{ _fileDeletedFileID :: Text
, _fileDeletedEventTS :: Text }
data FileCommentUpdated = FileCommentUpdated
{ _fileCommentUpdatedFile :: File
, _fileCommentUpdatedComment :: FileComment }
data FileCommentDeleted = FileCommentDeleted
{ _fileCommentDeletedFile :: File
, _fileCommentDeletedComment :: ID FileComment }
data PresenceChange = PresenceChange
{ _presenceChangeUser :: ID User
, _presenceChangePresence :: Presence }
data PrefChange = PrefChange
{ _prefChangeName :: Text
, _prefChangeValue :: Value }
data UserTyping = UserTyping
{ _userTypingUser :: ID User
, _userTypingChannel :: ID Chat }
data Star = Star
{ _starUser :: Text
, _starItem :: StarItem
, _starEventTS :: TS }
data StarItem
= StarItemMessage Message
| StarItemFile File
| StarItemFileComment File FileComment
| StarItemChannel (ID Channel)
| StarItemIM (ID IM)
| StarItemGroup (ID Group)
data TeamDomainChange = TeamDomainChange
{ _teamDomainChangeUrl :: Text
, _teamDomainChangeDomain :: Text }
data EmailDomainChanged = EmailDomainChanged
{ _emailDomainChangedEmailDomain :: Text
, _emailDomainChangedEventTS :: TS }
data RtmSendMessage = RtmSendMessage
{ _sendMessageSeqnum :: Word64
, _sendMessageChat :: ID Chat
, _sendMessageText :: Text }
class SlackTyped a where
isTypedID :: Proxy a -> ID b -> Bool
instance SlackTyped Channel where
isTypedID _ = isPrefixOf "C" . unID
instance SlackTyped File where
isTypedID _ (ID t) = "F" `isPrefixOf` t && not ("Fc" `isPrefixOf` t)
instance SlackTyped FileComment where
isTypedID _ (ID t) = "Fc" `isPrefixOf` t
instance SlackTyped Group where
isTypedID _ = isPrefixOf "G" . unID
instance SlackTyped Chat where
isTypedID _ i
= isTypedID (Proxy :: Proxy Channel) i
|| isTypedID (Proxy :: Proxy IM) i
|| isTypedID (Proxy :: Proxy Group) i
instance SlackTyped IM where
isTypedID _ = isPrefixOf "D" . unID
instance SlackTyped User where
isTypedID _ = isPrefixOf "U" . unID
asTypedID :: forall a b. SlackTyped b => ID a -> Maybe (ID b)
asTypedID i =
if isTypedID (Proxy :: Proxy b) i
then Just (ID . unID $ i)
else Nothing
asChannelID :: ID Chat -> Maybe (ID Channel)
asChannelID = asTypedID
asGroupID :: ID Chat -> Maybe (ID Group)
asGroupID = asTypedID
asIMID :: ID Chat -> Maybe (ID IM)
asIMID = asTypedID
deriving instance Eq RtmStartRequest
deriving instance Eq RtmStartRp
deriving instance Eq Self
deriving instance Eq Team
deriving instance Eq User
deriving instance Eq Profile
deriving instance Eq Channel
deriving instance Eq Group
deriving instance Eq IM
deriving instance Eq Bot
deriving instance Eq MessageSubtype
deriving instance Eq MessageReaction
deriving instance Eq Message
deriving instance Eq MessageEdited
deriving instance Eq Attachment
deriving instance Eq AttachmentField
deriving instance Eq a => Eq (SlackTracked a)
deriving instance Eq FileMode
deriving instance Eq File
deriving instance Eq FileComment
deriving instance Eq RtmEvent
deriving instance Eq a => Eq (ChatMarked a)
deriving instance Eq a => Eq (ChatUser a)
deriving instance Eq a => Eq (ChatRenamed a)
deriving instance Eq a => Eq (ChatHistoryChanged a)
deriving instance Eq IMCreated
deriving instance Eq FileDeleted
deriving instance Eq FileCommentUpdated
deriving instance Eq FileCommentDeleted
deriving instance Eq Presence
deriving instance Eq PresenceChange
deriving instance Eq UserTyping
deriving instance Eq PrefChange
deriving instance Eq Star
deriving instance Eq StarItem
deriving instance Eq TeamDomainChange
deriving instance Eq EmailDomainChanged
deriving instance Eq RtmSendMessage
makeLenses ''RtmStartRequest
makeLenses ''RtmStartRp
makeLenses ''Self
makeLenses ''Team
makeLenses ''User
makeLenses ''Profile
makeLenses ''Channel
makeLenses ''Group
makeLenses ''IM
makeLenses ''Bot
makeLenses ''MessageReaction
makeLenses ''Message
makeLenses ''MessageEdited
makeLenses ''Attachment
makeLenses ''AttachmentField
makeLenses ''SlackTracked
makeLenses ''File
makeLenses ''FileComment
makePrisms ''RtmEvent
makeLenses ''ChatMarked
makeLenses ''ChatUser
makeLenses ''ChatRenamed
makeLenses ''ChatHistoryChanged
makeLenses ''IMCreated
makeLenses ''FileDeleted
makeLenses ''FileCommentUpdated
makeLenses ''FileCommentDeleted
makeLenses ''PresenceChange
makeLenses ''UserTyping
makeLenses ''PrefChange
makeLenses ''Star
makePrisms ''StarItem
makeLenses ''TeamDomainChange
makeLenses ''EmailDomainChanged
makeLenses ''RtmSendMessage
instance TextShow Chat where
showb _ = "Chat"
deriveTextShow ''RtmStartRequest
deriveTextShow ''RtmStartRp
deriveTextShow ''Self
deriveTextShow ''Presence
deriveTextShow ''Team
deriveTextShow ''User
deriveTextShow ''Profile
deriveTextShow ''Channel
deriveTextShow ''Group
deriveTextShow ''IM
deriveTextShow ''Bot
deriveTextShow ''Message
deriveTextShow ''MessageSubtype
deriveTextShow ''MessageEdited
deriveTextShow ''MessageReaction
deriveTextShow ''Attachment
deriveTextShow ''AttachmentField
deriveTextShow ''SlackTracked
deriveTextShow ''File
deriveTextShow ''FileMode
deriveTextShow ''FileComment
deriveTextShow ''RtmEvent
deriveTextShow ''ChatMarked
deriveTextShow ''ChatUser
deriveTextShow ''ChatRenamed
deriveTextShow ''ChatHistoryChanged
deriveTextShow ''IMCreated
deriveTextShow ''FileDeleted
deriveTextShow ''FileCommentUpdated
deriveTextShow ''FileCommentDeleted
deriveTextShow ''PresenceChange
deriveTextShow ''UserTyping
deriveTextShow ''PrefChange
deriveTextShow ''Star
deriveTextShow ''StarItem
deriveTextShow ''TeamDomainChange
deriveTextShow ''EmailDomainChanged
deriveTextShow ''RtmSendMessage
instance ToJSON RtmStartRequest where
toJSON (RtmStartRequest { .. }) = object
[ ("token", toJSON rtmStartToken) ]
instance FromJSON a => FromJSON (Response a) where
parseJSON = withObject "slack reply" $ \ o ->
o .: "ok" >>= \ case
True -> ResponseOk <$> parseJSON (Object o)
False -> ResponseNotOk <$> o .:? "error" .!= "unknown error"
instance FromJSON RtmStartRp where
parseJSON = withObject "rtm.start reply" $ \ o -> RtmStartRp
<$> o .: "url"
<*> o .: "self"
<*> o .: "team"
<*> o .: "users"
<*> o .: "channels"
<*> o .: "groups"
<*> o .: "ims"
<*> o .: "bots"
instance FromJSON Self where
parseJSON = withObject "self object" $ \ o -> Self
<$> o .: "id"
<*> o .: "name"
<*> o .: "prefs"
<*> o .: "created"
<*> o .: "manual_presence"
instance FromJSON Presence where
parseJSON = withText "presence value" $ \ case
"active" -> pure PresenceActive
"away" -> pure PresenceAway
other -> fail . unpack $ "unknown presence value " <> other
instance FromJSON Team where
parseJSON = withObject "team object" $ \ o -> Team
<$> o .: "id"
<*> o .: "name"
<*> (o .:? "email_domain" >>= \ case
Just "" -> pure Nothing
Just s -> pure $ Just s
Nothing -> pure Nothing)
<*> o .: "domain"
<*> (o .:? "msg_edit_window_mins" >>= \ case
Just (-1) -> pure Nothing
Just i -> pure $ Just i
Nothing -> pure Nothing)
<*> o .: "over_storage_limit"
<*> o .: "prefs"
instance FromJSON User where
parseJSON = withObject "user object" $ \ o -> User
<$> o .: "id"
<*> o .: "name"
<*> o .: "real_name"
<*> o .: "deleted"
<*> o .: "color"
<*> o .: "profile"
<*> o .: "is_admin"
<*> o .: "is_owner"
<*> o .: "is_primary_owner"
<*> o .: "is_restricted"
<*> o .: "is_ultra_restricted"
<*> o .:? "has_2fa" .!= False
<*> o .:? "two_factor_type"
<*> o .:? "has_files" .!= False
<*> o .:? "presence"
instance FromJSON Profile where
parseJSON = withObject "user profile object" $ \ o -> Profile
<$> o .:? "first_name"
<*> o .:? "last_name"
<*> o .:? "real_name"
<*> o .:? "email"
<*> o .:? "skype"
<*> o .:? "phone"
instance FromJSON Channel where
parseJSON = withObject "channel object" $ \ o -> Channel
<$> o .: "id"
<*> o .: "name"
<*> o .: "created"
<*> o .: "creator"
<*> o .: "is_archived"
<*> o .:? "is_general" .!= False
<*> o .:? "members" .!= []
<*> o .:? "topic"
<*> o .:? "purpose"
<*> o .:? "is_member" .!= False
<*> o .:? "last_read"
<*> o .:? "latest"
<*> o .:? "unread_count"
instance FromJSON Group where
parseJSON = withObject "group object" $ \ o -> Group
<$> o .: "id"
<*> o .: "name"
<*> o .: "created"
<*> o .: "creator"
<*> o .: "is_archived"
<*> o .:? "members" .!= []
<*> o .:? "topic"
<*> o .:? "purpose"
<*> o .:? "is_open" .!= False
<*> o .:? "last_read"
<*> o .:? "latest"
<*> o .:? "unread_count"
instance FromJSON IM where
parseJSON = withObject "im object" $ \ o -> IM
<$> o .: "id"
<*> o .: "user"
<*> o .: "created"
<*> o .:? "is_user_deleted" .!= False
<*> o .:? "is_open" .!= False
<*> o .:? "last_read"
<*> o .:? "latest"
<*> o .:? "unread_count"
instance FromJSON Bot where
parseJSON = withObject "bot object" $ \ o -> Bot
<$> o .: "id"
<*> o .: "name"
<*> o .:? "icons" .!= HM.empty
instance FromJSON a => FromJSON (SlackTracked a) where
parseJSON = withObject "tracked value object" $ \ o -> SlackTracked
<$> o .: "value"
<*> o .: "creator"
<*> o .: "last_set"
instance FromJSON Message where
parseJSON = withObject "message object" $ \ o -> Message
<$> o .:? "channel"
<*> o .: "user"
<*> o .:? "subtype"
<*> o .: "text"
<*> o .: "ts"
<*> o .:? "edited"
<*> o .:? "deleted_ts"
<*> o .:? "event_ts"
<*> o .:? "hidden" .!= False
<*> o .:? "attachments" .!= []
<*> o .:? "inviter"
<*> o .:? "is_starred"
<*> o .:? "pinned_to" .!= []
<*> o .:? "reactions" .!= []
instance FromJSON MessageSubtype where
parseJSON = withText "message subtype" $ \ case
"bot_message" -> pure BotMS
"me_message" -> pure MeMS
"message_changed" -> pure ChangedMS
"message_deleted" -> pure DeletedMS
"channel_join" -> pure ChannelJoinMS
"channel_leave" -> pure ChannelLeaveMS
"channel_topic" -> pure ChannelTopicMS
"channel_purpose" -> pure ChannelPurposeMS
"channel_name" -> pure ChannelNameMS
"channel_archive" -> pure ChannelArchiveMS
"channel_unarchive" -> pure ChannelUnarchiveMS
"group_join" -> pure GroupJoinMS
"group_leave" -> pure GroupLeaveMS
"group_topic" -> pure GroupTopicMS
"group_purpose" -> pure GroupPurposeMS
"group_name" -> pure GroupNameMS
"group_archive" -> pure GroupArchiveMS
"group_unarchive" -> pure GroupUnarchiveMS
"file_share" -> pure FileShareMS
"file_comment" -> pure FileCommentMS
"file_mention" -> pure FileMentionMS
other -> fail . unpack $ "unknown message subtype " <> other
instance FromJSON MessageEdited where
parseJSON = withObject "message edited object" $ \ o -> MessageEdited
<$> o .: "user"
<*> o .: "ts"
instance FromJSON MessageReaction where
parseJSON = withObject "message reaction object" $ \ o -> MessageReaction
<$> o .: "name"
<*> o .: "count"
<*> o .: "users"
instance FromJSON Attachment where
parseJSON = withObject "attachment object" $ \ o -> Attachment
<$> o .: "fallback"
<*> o .:? "color"
<*> o .:? "pretext"
<*> o .:? "author_name"
<*> o .:? "author_link"
<*> o .:? "author_icon"
<*> o .:? "title"
<*> o .:? "title_link"
<*> o .:? "text"
<*> o .:? "fields" .!= []
instance FromJSON AttachmentField where
parseJSON = withObject "attachment field object" $ \ o -> AttachmentField
<$> o .: "title"
<*> o .: "value"
<*> o .: "short"
instance FromJSON File where
parseJSON = withObject "file object" $ \ o -> File
<$> o .: "id"
<*> o .: "created"
<*> o .: "timestamp"
<*> o .: "name"
<*> o .: "title"
<*> o .: "mimetype"
<*> o .: "filetype"
<*> o .: "pretty_type"
<*> o .: "user"
<*> o .: "mode"
<*> o .: "editable"
<*> o .: "is_external"
<*> o .: "external_type"
<*> o .: "size"
<*> o .: "url"
<*> o .: "url_download"
<*> o .: "url_private"
<*> o .: "url_private_download"
<*> parseJSON (Object . HM.fromList . concatMap (\ (k, v) -> maybeToList . map (, v) . stripPrefix "thumb_" $ k) . HM.toList $ o)
<*> o .: "permalink"
<*> o .: "edit_link"
<*> o .: "preview"
<*> o .: "preview_highlight"
<*> o .: "lines"
<*> o .: "lines_more"
<*> o .: "is_public"
<*> o .: "public_url_shared"
<*> o .:? "channels" .!= []
<*> o .:? "groups" .!= []
<*> o .:? "ims" .!= []
<*> o .:? "initial_comment"
<*> o .:? "num_stars" .!= 0
<*> o .:? "is_starred" .!= False
instance FromJSON FileMode where
parseJSON = withText "file mode" $ \ case
"hosted" -> pure FileHosted
"external" -> pure FileExternal
"snippet" -> pure FileSnippet
"post" -> pure FilePost
other -> fail . unpack $ "unknown file mode " <> other
instance FromJSON FileComment where
parseJSON = withObject "file comment object" $ \ o -> FileComment
<$> o .: "id"
<*> o .: "timestamp"
<*> o .: "user"
<*> o .: "comment"
instance FromJSON RtmEvent where
parseJSON v =
let recur :: FromJSON a => Parser a
recur = parseJSON v
in flip (withObject "event object") v $ \ o ->
o .:? "reply_to" >>= \ case
Just seqnum ->
o .: "ok" >>= \ case
True -> RtmReplyOk seqnum <$> o .:? "ts" <*> o .:? "text"
False -> o .: "error" >>= (withObject "RTM error" $ \ o2 -> RtmReplyNotOk seqnum <$> o2 .: "code" <*> o2 .: "msg")
Nothing ->
o .: "type" >>= pure . asText >>= \ case
"hello" -> pure RtmHello
"message" -> RtmMessage <$> recur
"channel_marked" -> RtmChannelMarked <$> recur
"channel_created" -> RtmChannelCreated <$> o .: "channel"
"channel_joined" -> RtmChannelJoined <$> o .: "channel"
"channel_left" -> RtmChannelLeft <$> o .: "channel"
"channel_deleted" -> RtmChannelDeleted <$> o .: "channel"
"channel_rename" -> RtmChannelRenamed <$> o .: "channel"
"channel_archive" -> RtmChannelArchive <$> recur
"channel_unarchive" -> RtmChannelUnarchive <$> recur
"channel_history_changed" -> RtmChannelHistoryChanged <$> recur
"im_created" -> RtmIMCreated <$> recur
"im_open" -> RtmIMOpen <$> recur
"im_close" -> RtmIMClose <$> recur
"im_marked" -> RtmIMMarked <$> recur
"im_history_changed" -> RtmIMHistoryChanged <$> recur
"group_joined" -> RtmGroupJoined <$> o .: "channel"
"group_left" -> RtmGroupLeft <$> o .: "channel"
"group_open" -> RtmGroupOpen <$> recur
"group_close" -> RtmGroupClose <$> recur
"group_archive" -> RtmGroupArchive <$> o .: "channel"
"group_unarchive" -> RtmGroupUnarchive <$> o .: "channel"
"group_rename" -> RtmGroupRename <$> o .: "channel"
"group_marked" -> RtmGroupMarked <$> recur
"group_history_changed" -> RtmGroupHistoryChanged <$> recur
"file_created" -> RtmFileCreated <$> o .: "file"
"file_shared" -> RtmFileShared <$> o .: "file"
"file_unshared" -> RtmFileUnshared <$> o .: "file"
"file_public" -> RtmFilePublic <$> o .: "file"
"file_private" -> RtmFilePrivate <$> o .: "file"
"file_change" -> RtmFileChange <$> o .: "file"
"file_deleted" -> RtmFileDeleted <$> recur
"file_comment_added" -> RtmFileCommentAdded <$> recur
"file_comment_edited" -> RtmFileCommentEdited <$> recur
"file_comment_deleted" -> RtmFileCommentDeleted <$> recur
"presence_change" -> RtmPresenceChange <$> recur
"manual_presence_change" -> RtmManualPresenceChange <$> o .: "presence"
"user_typing" -> RtmUserTyping <$> recur
"pref_change" -> RtmPrefChange <$> recur
"user_change" -> RtmUserChange <$> o .: "user"
"team_join" -> RtmTeamJoin <$> o .: "user"
"star_added" -> RtmStarAdded <$> recur
"star_removed" -> RtmStarRemoved <$> recur
"emoji_changed" -> RtmEmojiChanged <$> o .: "event_ts"
"commands_changed" -> RtmCommandsChanged <$> o .: "event_ts"
"team_pref_change" -> RtmTeamPrefChange <$> recur
"team_rename" -> RtmTeamRename <$> o .: "name"
"team_domain_change" -> RtmTeamDomainChange <$> recur
"email_domain_changed" -> RtmEmailDomainChanged <$> recur
"bot_added" -> RtmBotAdded <$> o .: "bot"
"bot_changed" -> RtmBotChanged <$> o .: "bot"
"accounts_changed" -> pure RtmAccountsChanged
other -> fail . unpack $ "unknown RTM event type " <> other
instance FromJSON (ChatMarked a) where
parseJSON = withObject "channel / im / group marked event" $ \ o -> ChatMarked
<$> o .: "channel"
<*> o .: "ts"
instance FromJSON (ChatUser a) where
parseJSON = withObject "channel and user from event" $ \ o -> ChatUser
<$> o .: "channel"
<*> o .: "user"
instance FromJSON (ChatRenamed a) where
parseJSON = withObject "channel and new name from event" $ \ o -> ChatRenamed
<$> o .: "id"
<*> o .: "name"
instance FromJSON (ChatHistoryChanged a) where
parseJSON = withObject "channel history changed event" $ \ o -> ChatHistoryChanged
<$> o .: "latest"
<*> o .: "ts"
<*> o .: "event_ts"
instance FromJSON IMCreated where
parseJSON = withObject "im created event" $ \ o -> IMCreated
<$> o .: "user"
<*> o .: "channel"
instance FromJSON FileDeleted where
parseJSON = withObject "file deleted event" $ \ o -> FileDeleted
<$> o .: "file_id"
<*> o .: "event_ts"
instance FromJSON FileCommentUpdated where
parseJSON = withObject "file comment event" $ \ o -> FileCommentUpdated
<$> o .: "file"
<*> o .: "comment"
instance FromJSON FileCommentDeleted where
parseJSON = withObject "file comment deleted event" $ \ o -> FileCommentDeleted
<$> o .: "file"
<*> o .: "comment"
instance FromJSON PresenceChange where
parseJSON = withObject "presence change event" $ \ o -> PresenceChange
<$> o .: "user"
<*> o .: "presence"
instance FromJSON UserTyping where
parseJSON = withObject "user typing event" $ \ o -> UserTyping
<$> o .: "user"
<*> o .: "channel"
instance FromJSON PrefChange where
parseJSON = withObject "pref change event" $ \ o -> PrefChange
<$> o .: "name"
<*> o .: "value"
instance FromJSON Star where
parseJSON = withObject "star event" $ \ o -> Star
<$> o .: "user"
<*> o .: "item"
<*> o .: "event_ts"
instance FromJSON StarItem where
parseJSON = withObject "starred item reference" $ \ o -> o .: "type" >>= pure . asText >>= \ case
"message" -> StarItemMessage <$> o .: "message"
"file" -> StarItemFile <$> o .: "file"
"file_comment" -> StarItemFileComment <$> o .: "file" <*> o .: "comment"
"channel" -> StarItemChannel <$> o .: "channel"
"im" -> StarItemIM <$> o .: "im"
"group" -> StarItemGroup <$> o .: "group"
other -> fail . unpack $ "unknown starrable item type " <> other
instance FromJSON TeamDomainChange where
parseJSON = withObject "team domain change event" $ \ o -> TeamDomainChange
<$> o .: "url"
<*> o .: "domain"
instance FromJSON EmailDomainChanged where
parseJSON = withObject "email domain changed event" $ \ o -> EmailDomainChanged
<$> o .: "email_domain"
<*> o .: "event_ts"
instance ToJSON RtmSendMessage where
toJSON (RtmSendMessage seqnum chat message) = object
[ "type" .= ("message" :: Text)
, "id" .= seqnum
, "channel" .= chat
, "text" .= message
]
|
Dridus/embot
|
src/Embot/Slack.hs
|
bsd-3-clause
| 31,959
| 0
| 77
| 8,619
| 8,474
| 4,438
| 4,036
| -1
| -1
|
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE TypeApplications #-}
module Database.PostgreSQL.PQTypes.Format (
PQFormat(..)
, pqFormatP
, pqFormat0P
, pqVariablesP
, (:*:)(..)
) where
import Data.Functor.Identity
import Data.Int
import Data.Proxy
import Data.Time
import Data.Word
import Data.UUID.Types
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BSL
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
----------------------------------------
-- | Methods in this class are supposed to be used with the
-- @TypeApplications@ extension.
class PQFormat t where
-- | Map a type to its libpqtypes format.
pqFormat :: BS.ByteString
-- | Map type to its null-terminated libpqtypes format, so
-- it can safely be used by 'unsafeUseAsCString'. Also, for
-- a specific type it becomes a top level CAF, therefore it
-- will be computed by GHC at most once.
pqFormat0 :: BS.ByteString
pqFormat0 = pqFormat @t `BS.snoc` '\0'
-- | Map type to number of type formats it contains.
pqVariables :: Int
pqVariables = 1
-- Helpers that are parametrised by a 'Proxy t' instead of 't'.
pqFormatP :: forall t . PQFormat t => Proxy t -> BS.ByteString
pqFormatP _ = pqFormat @t
pqFormat0P :: forall t . PQFormat t => Proxy t -> BS.ByteString
pqFormat0P _ = pqFormat0 @t
pqVariablesP :: forall t . PQFormat t => Proxy t -> Int
pqVariablesP _ = pqVariables @t
-- CARTESIAN PRODUCT
-- | Cartesian product of rows.
data a :*: b = a :*: b
deriving (Eq, Ord, Show)
instance (PQFormat t1, PQFormat t2) => PQFormat (t1 :*: t2) where
pqFormat = pqFormat @t1 `BS.append` pqFormat @t2
pqVariables = pqVariables @t1 + pqVariables @t2
-- NULLables
instance PQFormat t => PQFormat (Maybe t) where
pqFormat = pqFormat @t
pqVariables = pqVariables @t
-- NUMERICS
instance PQFormat Int16 where
pqFormat = BS.pack "%int2"
instance PQFormat Int32 where
pqFormat = BS.pack "%int4"
instance PQFormat Int64 where
pqFormat = BS.pack "%int8"
instance PQFormat Int where
pqFormat = BS.pack "%int8"
instance PQFormat Float where
pqFormat = BS.pack "%float4"
instance PQFormat Double where
pqFormat = BS.pack "%float8"
-- CHAR
instance PQFormat Char where
pqFormat = BS.pack "%char"
instance PQFormat Word8 where
pqFormat = BS.pack "%char"
-- VARIABLE-LENGTH CHARACTER TYPES
instance PQFormat String where
pqFormat = BS.pack "%btext"
instance PQFormat T.Text where
pqFormat = BS.pack "%btext"
instance PQFormat TL.Text where
pqFormat = BS.pack "%btext"
instance PQFormat UUID where
pqFormat = BS.pack "%uuid"
-- BYTEA
instance PQFormat BS.ByteString where
pqFormat = BS.pack "%bytea"
instance PQFormat BSL.ByteString where
pqFormat = BS.pack "%bytea"
-- DATE
instance PQFormat Day where
pqFormat = BS.pack "%date"
-- TIME
instance PQFormat TimeOfDay where
pqFormat = BS.pack "%time"
-- TIMESTAMP
instance PQFormat LocalTime where
pqFormat = BS.pack "%timestamp"
-- TIMESTAMPTZ
instance PQFormat UTCTime where
pqFormat = BS.pack "%timestamptz"
instance PQFormat ZonedTime where
pqFormat = BS.pack "%timestamptz"
-- BOOL
instance PQFormat Bool where
pqFormat = BS.pack "%bool"
-- TUPLES
instance PQFormat () where
pqFormat = BS.empty
pqVariables = 0
instance (
PQFormat t
) => PQFormat (Identity t) where
pqFormat = pqFormat @t
pqVariables = 1
instance (
PQFormat t1, PQFormat t2
) => PQFormat (t1, t2) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2
]
pqVariables = 2
instance (
PQFormat t1, PQFormat t2, PQFormat t3
) => PQFormat (t1, t2, t3) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3
]
pqVariables = 3
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4
) => PQFormat (t1, t2, t3, t4) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
]
pqVariables = 4
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5
) => PQFormat (t1, t2, t3, t4, t5) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5
]
pqVariables = 5
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
) => PQFormat (t1, t2, t3, t4, t5, t6) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6
]
pqVariables = 6
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7
) => PQFormat (t1, t2, t3, t4, t5, t6, t7) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7
]
pqVariables = 7
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
]
pqVariables = 8
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9
]
pqVariables = 9
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10
]
pqVariables = 10
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11
]
pqVariables = 11
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
]
pqVariables = 12
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13
]
pqVariables = 13
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14
]
pqVariables = 14
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15
]
pqVariables = 15
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
]
pqVariables = 16
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17
]
pqVariables = 17
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18
]
pqVariables = 18
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19
]
pqVariables = 19
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
]
pqVariables = 20
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21
]
pqVariables = 21
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22
]
pqVariables = 22
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23
]
pqVariables = 23
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
]
pqVariables = 24
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25
]
pqVariables = 25
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26
]
pqVariables = 26
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27
]
pqVariables = 27
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
]
pqVariables = 28
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29
]
pqVariables = 29
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30
]
pqVariables = 30
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31
]
pqVariables = 31
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
]
pqVariables = 32
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33
]
pqVariables = 33
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34
]
pqVariables = 34
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35
]
pqVariables = 35
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
]
pqVariables = 36
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37
]
pqVariables = 37
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38
]
pqVariables = 38
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39
]
pqVariables = 39
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39, PQFormat t40
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39, t40) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39, pqFormat @t40
]
pqVariables = 40
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39, PQFormat t40, PQFormat t41
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39, t40, t41) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39, pqFormat @t40
, pqFormat @t41
]
pqVariables = 41
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39, PQFormat t40, PQFormat t41, PQFormat t42
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39, t40, t41, t42) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39, pqFormat @t40
, pqFormat @t41, pqFormat @t42
]
pqVariables = 42
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39, PQFormat t40, PQFormat t41, PQFormat t42
, PQFormat t43
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39, t40, t41, t42, t43) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39, pqFormat @t40
, pqFormat @t41, pqFormat @t42, pqFormat @t43
]
pqVariables = 43
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39, PQFormat t40, PQFormat t41, PQFormat t42
, PQFormat t43, PQFormat t44
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39, t40, t41, t42, t43, t44) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39, pqFormat @t40
, pqFormat @t41, pqFormat @t42, pqFormat @t43, pqFormat @t44
]
pqVariables = 44
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39, PQFormat t40, PQFormat t41, PQFormat t42
, PQFormat t43, PQFormat t44, PQFormat t45
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39, t40, t41, t42, t43, t44, t45) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39, pqFormat @t40
, pqFormat @t41, pqFormat @t42, pqFormat @t43, pqFormat @t44
, pqFormat @t45
]
pqVariables = 45
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39, PQFormat t40, PQFormat t41, PQFormat t42
, PQFormat t43, PQFormat t44, PQFormat t45, PQFormat t46
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39, t40, t41, t42, t43, t44, t45, t46) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39, pqFormat @t40
, pqFormat @t41, pqFormat @t42, pqFormat @t43, pqFormat @t44
, pqFormat @t45, pqFormat @t46
]
pqVariables = 46
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39, PQFormat t40, PQFormat t41, PQFormat t42
, PQFormat t43, PQFormat t44, PQFormat t45, PQFormat t46, PQFormat t47
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39, t40, t41, t42, t43, t44, t45, t46, t47) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39, pqFormat @t40
, pqFormat @t41, pqFormat @t42, pqFormat @t43, pqFormat @t44
, pqFormat @t45, pqFormat @t46, pqFormat @t47
]
pqVariables = 47
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39, PQFormat t40, PQFormat t41, PQFormat t42
, PQFormat t43, PQFormat t44, PQFormat t45, PQFormat t46, PQFormat t47, PQFormat t48
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39, t40, t41, t42, t43, t44, t45, t46, t47, t48) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39, pqFormat @t40
, pqFormat @t41, pqFormat @t42, pqFormat @t43, pqFormat @t44
, pqFormat @t45, pqFormat @t46, pqFormat @t47, pqFormat @t48
]
pqVariables = 48
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39, PQFormat t40, PQFormat t41, PQFormat t42
, PQFormat t43, PQFormat t44, PQFormat t45, PQFormat t46, PQFormat t47, PQFormat t48
, PQFormat t49
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39, t40, t41, t42, t43, t44, t45, t46, t47, t48, t49) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39, pqFormat @t40
, pqFormat @t41, pqFormat @t42, pqFormat @t43, pqFormat @t44
, pqFormat @t45, pqFormat @t46, pqFormat @t47, pqFormat @t48
, pqFormat @t49
]
pqVariables = 49
instance (
PQFormat t1, PQFormat t2, PQFormat t3, PQFormat t4, PQFormat t5, PQFormat t6
, PQFormat t7, PQFormat t8, PQFormat t9, PQFormat t10, PQFormat t11, PQFormat t12
, PQFormat t13, PQFormat t14, PQFormat t15, PQFormat t16, PQFormat t17, PQFormat t18
, PQFormat t19, PQFormat t20, PQFormat t21, PQFormat t22, PQFormat t23, PQFormat t24
, PQFormat t25, PQFormat t26, PQFormat t27, PQFormat t28, PQFormat t29, PQFormat t30
, PQFormat t31, PQFormat t32, PQFormat t33, PQFormat t34, PQFormat t35, PQFormat t36
, PQFormat t37, PQFormat t38, PQFormat t39, PQFormat t40, PQFormat t41, PQFormat t42
, PQFormat t43, PQFormat t44, PQFormat t45, PQFormat t46, PQFormat t47, PQFormat t48
, PQFormat t49, PQFormat t50
) => PQFormat (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24, t25, t26, t27, t28, t29, t30, t31, t32, t33, t34, t35, t36, t37, t38, t39, t40, t41, t42, t43, t44, t45, t46, t47, t48, t49, t50) where
pqFormat = BS.concat [
pqFormat @t1, pqFormat @t2, pqFormat @t3, pqFormat @t4
, pqFormat @t5, pqFormat @t6, pqFormat @t7, pqFormat @t8
, pqFormat @t9, pqFormat @t10, pqFormat @t11, pqFormat @t12
, pqFormat @t13, pqFormat @t14, pqFormat @t15, pqFormat @t16
, pqFormat @t17, pqFormat @t18, pqFormat @t19, pqFormat @t20
, pqFormat @t21, pqFormat @t22, pqFormat @t23, pqFormat @t24
, pqFormat @t25, pqFormat @t26, pqFormat @t27, pqFormat @t28
, pqFormat @t29, pqFormat @t30, pqFormat @t31, pqFormat @t32
, pqFormat @t33, pqFormat @t34, pqFormat @t35, pqFormat @t36
, pqFormat @t37, pqFormat @t38, pqFormat @t39, pqFormat @t40
, pqFormat @t41, pqFormat @t42, pqFormat @t43, pqFormat @t44
, pqFormat @t45, pqFormat @t46, pqFormat @t47, pqFormat @t48
, pqFormat @t49, pqFormat @t50
]
pqVariables = 50
|
scrive/hpqtypes
|
src/Database/PostgreSQL/PQTypes/Format.hs
|
bsd-3-clause
| 53,042
| 0
| 9
| 11,728
| 23,796
| 12,623
| 11,173
| -1
| -1
|
{-# LANGUAGE PackageImports #-}
module Data.Typeable.Internal (module M) where
import "base" Data.Typeable.Internal as M
|
silkapp/base-noprelude
|
src/Data/Typeable/Internal.hs
|
bsd-3-clause
| 126
| 0
| 4
| 18
| 23
| 17
| 6
| 3
| 0
|
module Control.Distributed.Process.Internal.Messaging
( sendPayload
, sendBinary
, sendMessage
, disconnect
, closeImplicitReconnections
, impliesDeathOf
, sendCtrlMsg
) where
import Data.Accessor ((^.), (^=))
import Data.Binary (Binary, encode)
import qualified Data.Map as Map (partitionWithKey, elems)
import qualified Data.ByteString.Lazy as BSL (toChunks)
import qualified Data.ByteString as BSS (ByteString)
import Control.Distributed.Process.Serializable ()
import Control.Concurrent (forkIO)
import Control.Concurrent.Chan (writeChan)
import Control.Exception (mask_)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Reader (ask)
import qualified Network.Transport as NT
( Connection
, send
, defaultConnectHints
, connect
, Reliability(ReliableOrdered)
, close
)
import Control.Distributed.Process.Internal.Types
( LocalNode(localEndPoint, localCtrlChan)
, withValidLocalState
, modifyValidLocalState
, modifyValidLocalState_
, Identifier
, localConnections
, localConnectionBetween
, nodeAddress
, nodeOf
, messageToPayload
, createMessage
, NCMsg(..)
, ProcessSignal(Died)
, DiedReason(DiedDisconnect)
, ImplicitReconnect(WithImplicitReconnect)
, NodeId(..)
, ProcessId(..)
, LocalNode(..)
, LocalProcess(..)
, Process(..)
, SendPortId(sendPortProcessId)
, Identifier(NodeIdentifier, ProcessIdentifier, SendPortIdentifier)
)
import Control.Distributed.Process.Serializable (Serializable)
import Data.Foldable (forM_)
--------------------------------------------------------------------------------
-- Message sending --
--------------------------------------------------------------------------------
sendPayload :: LocalNode
-> Identifier
-> Identifier
-> ImplicitReconnect
-> [BSS.ByteString]
-> IO ()
sendPayload node from to implicitReconnect payload = do
mConn <- connBetween node from to implicitReconnect
didSend <- case mConn of
Just conn -> do
didSend <- NT.send conn payload
case didSend of
Left _err -> return False
Right () -> return True
Nothing -> return False
unless didSend $ do
writeChan (localCtrlChan node) NCMsg
{ ctrlMsgSender = to
, ctrlMsgSignal = Died (NodeIdentifier $ nodeOf to) DiedDisconnect
}
sendBinary :: Binary a
=> LocalNode
-> Identifier
-> Identifier
-> ImplicitReconnect
-> a
-> IO ()
sendBinary node from to implicitReconnect
= sendPayload node from to implicitReconnect . BSL.toChunks . encode
sendMessage :: Serializable a
=> LocalNode
-> Identifier
-> Identifier
-> ImplicitReconnect
-> a
-> IO ()
sendMessage node from to implicitReconnect =
sendPayload node from to implicitReconnect . messageToPayload . createMessage
setupConnBetween :: LocalNode
-> Identifier
-> Identifier
-> ImplicitReconnect
-> IO (Maybe NT.Connection)
setupConnBetween node from to implicitReconnect = do
mConn <- NT.connect (localEndPoint node)
(nodeAddress . nodeOf $ to)
NT.ReliableOrdered
NT.defaultConnectHints
case mConn of
Right conn -> do
didSend <- NT.send conn (BSL.toChunks . encode $ to)
case didSend of
Left _ ->
return Nothing
Right () -> do
modifyValidLocalState_ node $
return . (localConnectionBetween from to ^= Just (conn, implicitReconnect))
return $ Just conn
Left _ ->
return Nothing
connBetween :: LocalNode
-> Identifier
-> Identifier
-> ImplicitReconnect
-> IO (Maybe NT.Connection)
connBetween node from to implicitReconnect = do
mConn <- withValidLocalState node $
return . (^. localConnectionBetween from to)
case mConn of
Just (conn, _) ->
return $ Just conn
Nothing ->
setupConnBetween node from to implicitReconnect
disconnect :: LocalNode -> Identifier -> Identifier -> IO ()
disconnect node from to = mask_ $ do
mio <- modifyValidLocalState node $ \vst ->
case vst ^. localConnectionBetween from to of
Nothing ->
return (vst, return ())
Just (conn, _) -> do
return ( localConnectionBetween from to ^= Nothing $ vst
, NT.close conn
)
forM_ mio forkIO
closeImplicitReconnections :: LocalNode -> Identifier -> IO ()
closeImplicitReconnections node to = mask_ $ do
mconns <- modifyValidLocalState node $ \vst -> do
let shouldClose (_, to') (_, WithImplicitReconnect) = to `impliesDeathOf` to'
shouldClose _ _ = False
let (affected, unaffected) =
Map.partitionWithKey shouldClose (vst ^. localConnections)
return ( localConnections ^= unaffected $ vst
, map fst $ Map.elems affected
)
forM_ mconns $ forkIO . mapM_ NT.close
-- | @a `impliesDeathOf` b@ is true if the death of @a@ (for instance, a node)
-- implies the death of @b@ (for instance, a process on that node)
impliesDeathOf :: Identifier
-> Identifier
-> Bool
NodeIdentifier nid `impliesDeathOf` NodeIdentifier nid' =
nid' == nid
NodeIdentifier nid `impliesDeathOf` ProcessIdentifier pid =
processNodeId pid == nid
NodeIdentifier nid `impliesDeathOf` SendPortIdentifier cid =
processNodeId (sendPortProcessId cid) == nid
ProcessIdentifier pid `impliesDeathOf` ProcessIdentifier pid' =
pid' == pid
ProcessIdentifier pid `impliesDeathOf` SendPortIdentifier cid =
sendPortProcessId cid == pid
SendPortIdentifier cid `impliesDeathOf` SendPortIdentifier cid' =
cid' == cid
_ `impliesDeathOf` _ =
False
-- Send a control message. Evaluates the message to HNF before sending it.
--
-- The message shouldn't produce more errors when further evaluated. If
-- evaluation threw errors the node controller or the receiver would crash when
-- inspecting it.
sendCtrlMsg :: Maybe NodeId -- ^ Nothing for the local node
-> ProcessSignal -- ^ Message to send
-> Process ()
sendCtrlMsg mNid signal = do
proc <- ask
let msg = NCMsg { ctrlMsgSender = ProcessIdentifier (processId proc)
, ctrlMsgSignal = signal
}
case mNid of
Nothing -> do
liftIO $ writeChan (localCtrlChan (processNode proc)) $! msg
Just nid ->
liftIO $ sendBinary (processNode proc)
(NodeIdentifier (processNodeId $ processId proc))
(NodeIdentifier nid)
WithImplicitReconnect
msg
|
haskell-distributed/distributed-process
|
src/Control/Distributed/Process/Internal/Messaging.hs
|
bsd-3-clause
| 6,903
| 0
| 21
| 1,880
| 1,698
| 896
| 802
| 177
| 3
|
module TestHelpers where
import Data.Decimal
import Data.List
import SplitBill
defaultState :: BillProcessingState
defaultState = BillProcessingState 0 0 0 0 0
runActions :: WhoPaidQuestion
-> [ (Decimal, BoughtForWhomQuestion, CategoryQuestion) ]
-> BillProcessingState
runActions payee actions =
let fn = \state (x, y, z) -> processItem' state payee x y z
in foldl' fn defaultState actions
|
Minoru/split-bill-hs
|
test/TestHelpers.hs
|
bsd-3-clause
| 422
| 0
| 10
| 84
| 118
| 65
| 53
| 12
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : TestSuite.BitPrecise.MergeSort
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Test suite for Data.SBV.Examples.BitPrecise.MergeSort
-----------------------------------------------------------------------------
module TestSuite.BitPrecise.MergeSort where
import Data.SBV
import Data.SBV.Internals
import Data.SBV.Examples.BitPrecise.MergeSort
import SBVTest
-- Test suite
testSuite :: SBVTestSuite
testSuite = mkTestSuite $ \goldCheck -> test [
"mergeSort" ~: mergeC `goldCheck` "merge.gold"
]
where mergeC = compileToC' "merge" "" $ do
cgSetDriverValues [10, 6, 4, 82, 71]
xs <- cgInputArr 5 "xs"
cgOutputArr "ys" (mergeSort xs)
|
Copilot-Language/sbv-for-copilot
|
SBVUnitTest/TestSuite/BitPrecise/MergeSort.hs
|
bsd-3-clause
| 879
| 0
| 12
| 171
| 138
| 82
| 56
| 12
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.S3.GetBucketTagging
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the tag set associated with the bucket.
--
-- /See:/ <http://docs.aws.amazon.com/AmazonS3/latest/API/GetBucketTagging.html AWS API Reference> for GetBucketTagging.
module Network.AWS.S3.GetBucketTagging
(
-- * Creating a Request
getBucketTagging
, GetBucketTagging
-- * Request Lenses
, gbtBucket
-- * Destructuring the Response
, getBucketTaggingResponse
, GetBucketTaggingResponse
-- * Response Lenses
, gbtrsResponseStatus
, gbtrsTagSet
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.S3.Types
import Network.AWS.S3.Types.Product
-- | /See:/ 'getBucketTagging' smart constructor.
newtype GetBucketTagging = GetBucketTagging'
{ _gbtBucket :: BucketName
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetBucketTagging' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gbtBucket'
getBucketTagging
:: BucketName -- ^ 'gbtBucket'
-> GetBucketTagging
getBucketTagging pBucket_ =
GetBucketTagging'
{ _gbtBucket = pBucket_
}
-- | Undocumented member.
gbtBucket :: Lens' GetBucketTagging BucketName
gbtBucket = lens _gbtBucket (\ s a -> s{_gbtBucket = a});
instance AWSRequest GetBucketTagging where
type Rs GetBucketTagging = GetBucketTaggingResponse
request = get s3
response
= receiveXML
(\ s h x ->
GetBucketTaggingResponse' <$>
(pure (fromEnum s)) <*>
(x .@? "TagSet" .!@ mempty >>= parseXMLList "Tag"))
instance ToHeaders GetBucketTagging where
toHeaders = const mempty
instance ToPath GetBucketTagging where
toPath GetBucketTagging'{..}
= mconcat ["/", toBS _gbtBucket]
instance ToQuery GetBucketTagging where
toQuery = const (mconcat ["tagging"])
-- | /See:/ 'getBucketTaggingResponse' smart constructor.
data GetBucketTaggingResponse = GetBucketTaggingResponse'
{ _gbtrsResponseStatus :: !Int
, _gbtrsTagSet :: ![Tag]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetBucketTaggingResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gbtrsResponseStatus'
--
-- * 'gbtrsTagSet'
getBucketTaggingResponse
:: Int -- ^ 'gbtrsResponseStatus'
-> GetBucketTaggingResponse
getBucketTaggingResponse pResponseStatus_ =
GetBucketTaggingResponse'
{ _gbtrsResponseStatus = pResponseStatus_
, _gbtrsTagSet = mempty
}
-- | The response status code.
gbtrsResponseStatus :: Lens' GetBucketTaggingResponse Int
gbtrsResponseStatus = lens _gbtrsResponseStatus (\ s a -> s{_gbtrsResponseStatus = a});
-- | Undocumented member.
gbtrsTagSet :: Lens' GetBucketTaggingResponse [Tag]
gbtrsTagSet = lens _gbtrsTagSet (\ s a -> s{_gbtrsTagSet = a}) . _Coerce;
|
fmapfmapfmap/amazonka
|
amazonka-s3/gen/Network/AWS/S3/GetBucketTagging.hs
|
mpl-2.0
| 3,732
| 0
| 14
| 781
| 538
| 322
| 216
| 68
| 1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="si-LK">
<title>Replacer | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/replacer/src/main/javahelp/org/zaproxy/zap/extension/replacer/resources/help_si_LK/helpset_si_LK.hs
|
apache-2.0
| 970
| 80
| 66
| 159
| 413
| 209
| 204
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ParallelListComp #-}
{-# LANGUAGE PatternGuards #-}
{-
Copyright 2019 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module CodeWorld.Compile.Base (generateBaseBundle, baseVersion) where
import Data.Char
import Data.Monoid
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T
import System.Process
import Text.Regex.TDFA
import Text.Regex.TDFA.Text
generateBaseBundle ::
[FilePath] -> [Text] -> String -> FilePath -> FilePath -> IO ()
generateBaseBundle hoogleDBs blacklist mode mainFile baseFile = do
(imports, exprs) <- readHoogleDBs hoogleDBs blacklist
let defs =
[ "d" <> T.pack (show i) <> " = " <> e
| i <- [0 :: Int ..]
| e <- exprs
]
src = T.unlines ("module LinkBase where" : imports ++ defs)
mainDef =
case mode of
"codeworld" -> "program = drawingOf(blank)"
_ -> "main = return ()"
T.writeFile baseFile src
T.writeFile mainFile $
T.unlines ["module Main where", "import LinkBase", mainDef]
readHoogleDBs :: [FilePath] -> [Text] -> IO ([Text], [Text])
readHoogleDBs files blacklist = foldMap (flip readHoogleDB blacklist) files
readHoogleDB :: FilePath -> [Text] -> IO ([Text], [Text])
readHoogleDB file blacklist = do
lns <- T.lines <$> T.readFile file
return (parseHoogleDB blacklist Nothing lns)
parseHoogleDB :: [Text] -> Maybe Text -> [Text] -> ([Text], [Text])
parseHoogleDB blacklist _ (t:ts)
| Just mod <- submatch t "^module ([A-Za-z0-9._']+)"
, not (mod `elem` blacklist) =
let (i, e) = parseHoogleDB blacklist (Just mod) ts
in ("import qualified " <> mod : i, e)
parseHoogleDB blacklist (Just mod) (t:ts)
| Just ident <- submatch t "^([A-Za-z0-9_']+) :: .*"
, not (ident `elem` blacklist) =
let (i, e) = parseHoogleDB blacklist (Just mod) ts
in (i, mod <> "." <> ident : e)
| Just sym <- submatch t "^\\(([!#$%&*+./<=>?@\\\\^|-~]+)\\) :: .*"
, not (sym `elem` blacklist) =
let (i, e) = parseHoogleDB blacklist (Just mod) ts
in (i, "(" <> mod <> "." <> sym <> ")" : e)
parseHoogleDB blacklist mmod (_:ts) = parseHoogleDB blacklist mmod ts
parseHoogleDB _ _ [] = ([], [])
submatch :: Text -> Text -> Maybe Text
submatch t pat
| [_, match] <- getAllTextSubmatches (t =~ pat) = Just match
| otherwise = Nothing
baseVersion :: IO Text
baseVersion = do
(_, Just outh, _, pid) <-
createProcess
(shell "ghcjs-pkg list -v 2>&1 | sha256sum")
{ std_in = NoStream
, std_out = CreatePipe
, std_err = NoStream
, close_fds = True
}
hash <- T.decodeUtf8 <$> B.takeWhile (/= fromIntegral (ord ' ')) <$> B.hGetContents outh
waitForProcess pid
return hash
|
alphalambda/codeworld
|
codeworld-compiler/src/CodeWorld/Compile/Base.hs
|
apache-2.0
| 3,536
| 1
| 16
| 856
| 1,013
| 536
| 477
| 70
| 2
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.Init
( initProject
, InitOpts (..)
) where
import Control.Exception (assert)
import Control.Exception.Enclosed (catchAny)
import Control.Monad
import Control.Monad.Catch (MonadMask, throwM)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as L
import qualified Data.Foldable as F
import Data.Function (on)
import qualified Data.HashMap.Strict as HM
import qualified Data.IntMap as IntMap
import Data.List ( intercalate, intersect
, maximumBy)
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NonEmpty
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Yaml as Yaml
import qualified Distribution.PackageDescription as C
import qualified Distribution.Text as C
import qualified Distribution.Version as C
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.IO
import qualified Paths_stack as Meta
import Stack.BuildPlan
import Stack.Config (getSnapshots,
makeConcreteResolver)
import Stack.Constants
import Stack.Solver
import Stack.Types.BuildPlan
import Stack.Types.FlagName
import Stack.Types.PackageName
import Stack.Types.Version
import Stack.Types.Config
import Stack.Types.Build
import Stack.Types.Internal (HasLogLevel, HasReExec,
HasTerminal)
import qualified System.FilePath as FP
-- | Generate stack.yaml
initProject
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> WhichSolverCmd
-> Path Abs Dir
-> InitOpts
-> Maybe AbstractResolver
-> m ()
initProject whichCmd currDir initOpts mresolver = do
let dest = currDir </> stackDotYaml
reldest <- toFilePath `liftM` makeRelativeToCurrentDir dest
exists <- doesFileExist dest
when (not (forceOverwrite initOpts) && exists) $ do
error ("Stack configuration file " <> reldest <>
" exists, use 'stack solver' to fix the existing config file or \
\'--force' to overwrite it.")
dirs <- mapM (resolveDir' . T.unpack) (searchDirs initOpts)
let noPkgMsg = "In order to init, you should have an existing .cabal \
\file. Please try \"stack new\" instead."
find = findCabalFiles (includeSubDirs initOpts)
dirs' = if null dirs then [currDir] else dirs
$logInfo "Looking for .cabal or package.yaml files to use to init the project."
cabalfps <- liftM concat $ mapM find dirs'
(bundle, dupPkgs) <- cabalPackagesCheck cabalfps noPkgMsg Nothing
(r, flags, extraDeps, rbundle) <- getDefaultResolver whichCmd dest initOpts
mresolver bundle
let ignored = Map.difference bundle rbundle
dupPkgMsg
| (dupPkgs /= []) =
"Warning (added by new or init): Some packages were found to \
\have names conflicting with others and have been commented \
\out in the packages section.\n"
| otherwise = ""
missingPkgMsg
| (Map.size ignored > 0) =
"Warning (added by new or init): Some packages were found to \
\be incompatible with the resolver and have been left commented \
\out in the packages section.\n"
| otherwise = ""
extraDepMsg
| (Map.size extraDeps > 0) =
"Warning (added by new or init): Specified resolver could not \
\satisfy all dependencies. Some external packages have been \
\added as dependencies.\n"
| otherwise = ""
makeUserMsg msgs =
let msg = concat msgs
in if msg /= "" then
msg <> "You can suppress this message by removing it from \
\stack.yaml\n"
else ""
userMsg = makeUserMsg [dupPkgMsg, missingPkgMsg, extraDepMsg]
gpds = Map.elems $ fmap snd rbundle
p = Project
{ projectUserMsg = if userMsg == "" then Nothing else Just userMsg
, projectPackages = pkgs
, projectExtraDeps = extraDeps
, projectFlags = PackageFlags (removeSrcPkgDefaultFlags gpds flags)
, projectResolver = r
, projectCompiler = Nothing
, projectExtraPackageDBs = []
}
makeRelDir dir =
case stripDir currDir dir of
Nothing
| currDir == dir -> "."
| otherwise -> assert False $ toFilePath dir
Just rel -> toFilePath rel
makeRel = fmap toFilePath . makeRelativeToCurrentDir
pkgs = map toPkg $ Map.elems (fmap (parent . fst) rbundle)
toPkg dir = PackageEntry
{ peExtraDep = False
, peLocation = PLFilePath $ makeRelDir dir
, peSubdirs = []
}
indent t = T.unlines $ fmap (" " <>) (T.lines t)
$logInfo $ "Initialising configuration using resolver: " <> resolverName r
$logInfo $ "Total number of user packages considered: "
<> (T.pack $ show $ (Map.size bundle + length dupPkgs))
when (dupPkgs /= []) $ do
$logWarn $ "Warning! Ignoring "
<> (T.pack $ show $ length dupPkgs)
<> " duplicate packages:"
rels <- mapM makeRel dupPkgs
$logWarn $ indent $ showItems rels
when (Map.size ignored > 0) $ do
$logWarn $ "Warning! Ignoring "
<> (T.pack $ show $ Map.size ignored)
<> " packages due to dependency conflicts:"
rels <- mapM makeRel (Map.elems (fmap fst ignored))
$logWarn $ indent $ showItems $ rels
when (Map.size extraDeps > 0) $ do
$logWarn $ "Warning! " <> (T.pack $ show $ Map.size extraDeps)
<> " external dependencies were added."
$logInfo $
(if exists then "Overwriting existing configuration file: "
else "Writing configuration to file: ")
<> T.pack reldest
liftIO $ L.writeFile (toFilePath dest)
$ B.toLazyByteString
$ renderStackYaml p
(Map.elems $ fmap (makeRelDir . parent . fst) ignored)
(map (makeRelDir . parent) dupPkgs)
$logInfo "All done."
-- | Render a stack.yaml file with comments, see:
-- https://github.com/commercialhaskell/stack/issues/226
renderStackYaml :: Project -> [FilePath] -> [FilePath] -> B.Builder
renderStackYaml p ignoredPackages dupPackages =
case Yaml.toJSON p of
Yaml.Object o -> renderObject o
_ -> assert False $ B.byteString $ Yaml.encode p
where
renderObject o =
B.byteString headerHelp
<> B.byteString "\n\n"
<> F.foldMap (goComment o) comments
<> goOthers (o `HM.difference` HM.fromList comments)
<> B.byteString footerHelp
goComment o (name, comment) =
case HM.lookup name o of
Nothing -> assert (name == "user-message") mempty
Just v ->
B.byteString comment <>
B.byteString "\n" <>
B.byteString (Yaml.encode $ Yaml.object [(name, v)]) <>
if (name == "packages") then commentedPackages else "" <>
B.byteString "\n"
commentLine l | null l = "#"
| otherwise = "# " ++ l
commentHelp = BC.pack . intercalate "\n" . map commentLine
commentedPackages =
let ignoredComment = commentHelp
[ "The following packages have been ignored due to incompatibility with the"
, "resolver compiler, dependency conflicts with other packages"
, "or unsatisfied dependencies."
]
dupComment = commentHelp
[ "The following packages have been ignored due to package name conflict "
, "with other packages."
]
in commentPackages ignoredComment ignoredPackages
<> commentPackages dupComment dupPackages
commentPackages comment pkgs
| pkgs /= [] =
B.byteString comment
<> B.byteString "\n"
<> (B.byteString $ BC.pack $ concat
$ (map (\x -> "#- " ++ x ++ "\n") pkgs) ++ ["\n"])
| otherwise = ""
goOthers o
| HM.null o = mempty
| otherwise = assert False $ B.byteString $ Yaml.encode o
-- Per Section Help
comments =
[ ("user-message" , userMsgHelp)
, ("resolver" , resolverHelp)
, ("packages" , packageHelp)
, ("extra-deps" , "# Dependency packages to be pulled from upstream that are not in the resolver\n# (e.g., acme-missiles-0.3)")
, ("flags" , "# Override default flag values for local packages and extra-deps")
, ("extra-package-dbs", "# Extra package databases containing global packages")
]
-- Help strings
headerHelp = commentHelp
[ "This file was automatically generated by 'stack init'"
, ""
, "Some commonly used options have been documented as comments in this file."
, "For advanced use and comprehensive documentation of the format, please see:"
, "http://docs.haskellstack.org/en/stable/yaml_configuration/"
]
resolverHelp = commentHelp
[ "Resolver to choose a 'specific' stackage snapshot or a compiler version."
, "A snapshot resolver dictates the compiler version and the set of packages"
, "to be used for project dependencies. For example:"
, ""
, "resolver: lts-3.5"
, "resolver: nightly-2015-09-21"
, "resolver: ghc-7.10.2"
, "resolver: ghcjs-0.1.0_ghc-7.10.2"
, "resolver:"
, " name: custom-snapshot"
, " location: \"./custom-snapshot.yaml\""
]
userMsgHelp = commentHelp
[ "A warning or info to be displayed to the user on config load." ]
packageHelp = commentHelp
[ "User packages to be built."
, "Various formats can be used as shown in the example below."
, ""
, "packages:"
, "- some-directory"
, "- https://example.com/foo/bar/baz-0.0.2.tar.gz"
, "- location:"
, " git: https://github.com/commercialhaskell/stack.git"
, " commit: e7b331f14bcffb8367cd58fbfc8b40ec7642100a"
, "- location: https://github.com/commercialhaskell/stack/commit/e7b331f14bcffb8367cd58fbfc8b40ec7642100a"
, " extra-dep: true"
, " subdirs:"
, " - auto-update"
, " - wai"
, ""
, "A package marked 'extra-dep: true' will only be built if demanded by a"
, "non-dependency (i.e. a user package), and its test suites and benchmarks"
, "will not be run. This is useful for tweaking upstream packages."
]
footerHelp =
let major = toCabalVersion
$ toMajorVersion $ fromCabalVersion Meta.version
in commentHelp
[ "Control whether we use the GHC we find on the path"
, "system-ghc: true"
, ""
, "Require a specific version of stack, using version ranges"
, "require-stack-version: -any # Default"
, "require-stack-version: \""
++ C.display (C.orLaterVersion major) ++ "\""
, ""
, "Override the architecture used by stack, especially useful on Windows"
, "arch: i386"
, "arch: x86_64"
, ""
, "Extra directories used by stack for building"
, "extra-include-dirs: [/path/to/dir]"
, "extra-lib-dirs: [/path/to/dir]"
, ""
, "Allow a newer minor version of GHC than the snapshot specifies"
, "compiler-check: newer-minor"
]
getSnapshots' :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m, MonadBaseControl IO m)
=> m Snapshots
getSnapshots' =
getSnapshots `catchAny` \e -> do
$logError $
"Unable to download snapshot list, and therefore could " <>
"not generate a stack.yaml file automatically"
$logError $
"This sometimes happens due to missing Certificate Authorities " <>
"on your system. For more information, see:"
$logError ""
$logError " https://github.com/commercialhaskell/stack/issues/234"
$logError ""
$logError "You can try again, or create your stack.yaml file by hand. See:"
$logError ""
$logError " http://docs.haskellstack.org/en/stable/yaml_configuration/"
$logError ""
$logError $ "Exception was: " <> T.pack (show e)
error ""
-- | Get the default resolver value
getDefaultResolver
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> WhichSolverCmd
-> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Maybe AbstractResolver
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> m ( Resolver
, Map PackageName (Map FlagName Bool)
, Map PackageName Version
, Map PackageName (Path Abs File, C.GenericPackageDescription))
-- ^ ( Resolver
-- , Flags for src packages and extra deps
-- , Extra dependencies
-- , Src packages actually considered)
getDefaultResolver whichCmd stackYaml initOpts mresolver bundle =
maybe selectSnapResolver makeConcreteResolver mresolver
>>= getWorkingResolverPlan whichCmd stackYaml initOpts bundle
where
-- TODO support selecting best across regular and custom snapshots
selectSnapResolver = do
let gpds = Map.elems (fmap snd bundle)
snaps <- fmap getRecommendedSnapshots getSnapshots'
(s, r) <- selectBestSnapshot gpds snaps
case r of
BuildPlanCheckFail {} | not (omitPackages initOpts)
-> throwM (NoMatchingSnapshot whichCmd snaps)
_ -> return $ ResolverSnapshot s
getWorkingResolverPlan
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> WhichSolverCmd
-> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> Resolver
-> m ( Resolver
, Map PackageName (Map FlagName Bool)
, Map PackageName Version
, Map PackageName (Path Abs File, C.GenericPackageDescription))
-- ^ ( Resolver
-- , Flags for src packages and extra deps
-- , Extra dependencies
-- , Src packages actually considered)
getWorkingResolverPlan whichCmd stackYaml initOpts bundle resolver = do
$logInfo $ "Selected resolver: " <> resolverName resolver
go bundle
where
go info = do
eres <- checkBundleResolver whichCmd stackYaml initOpts info resolver
-- if some packages failed try again using the rest
case eres of
Right (f, edeps)-> return (resolver, f, edeps, info)
Left ignored
| Map.null available -> do
$logWarn "*** Could not find a working plan for any of \
\the user packages.\nProceeding to create a \
\config anyway."
return (resolver, Map.empty, Map.empty, Map.empty)
| otherwise -> do
when ((Map.size available) == (Map.size info)) $
error "Bug: No packages to ignore"
if length ignored > 1 then do
$logWarn "*** Ignoring packages:"
$logWarn $ indent $ showItems ignored
else
$logWarn $ "*** Ignoring package: "
<> (T.pack $ packageNameString (head ignored))
go available
where
indent t = T.unlines $ fmap (" " <>) (T.lines t)
isAvailable k _ = not (k `elem` ignored)
available = Map.filterWithKey isAvailable info
checkBundleResolver
:: ( MonadBaseControl IO m, MonadIO m, MonadLogger m, MonadMask m
, MonadReader env m, HasConfig env , HasGHCVariant env
, HasHttpManager env , HasLogLevel env , HasReExec env
, HasTerminal env)
=> WhichSolverCmd
-> Path Abs File -- ^ stack.yaml
-> InitOpts
-> Map PackageName (Path Abs File, C.GenericPackageDescription)
-- ^ Src package name: cabal dir, cabal package description
-> Resolver
-> m (Either [PackageName] ( Map PackageName (Map FlagName Bool)
, Map PackageName Version))
checkBundleResolver whichCmd stackYaml initOpts bundle resolver = do
result <- checkResolverSpec gpds Nothing resolver
case result of
BuildPlanCheckOk f -> return $ Right (f, Map.empty)
BuildPlanCheckPartial f e
| needSolver resolver initOpts -> do
warnPartial result
solve f
| omitPackages initOpts -> do
warnPartial result
$logWarn "*** Omitting packages with unsatisfied dependencies"
return $ Left $ failedUserPkgs e
| otherwise -> throwM $ ResolverPartial whichCmd resolver (show result)
BuildPlanCheckFail _ e _
| omitPackages initOpts -> do
$logWarn $ "*** Resolver compiler mismatch: "
<> resolverName resolver
$logWarn $ indent $ T.pack $ show result
return $ Left $ failedUserPkgs e
| otherwise -> throwM $ ResolverMismatch whichCmd resolver (show result)
where
indent t = T.unlines $ fmap (" " <>) (T.lines t)
warnPartial res = do
$logWarn $ "*** Resolver " <> resolverName resolver
<> " will need external packages: "
$logWarn $ indent $ T.pack $ show res
failedUserPkgs e = Map.keys $ Map.unions (Map.elems (fmap deNeededBy e))
gpds = Map.elems (fmap snd bundle)
solve flags = do
let cabalDirs = map parent (Map.elems (fmap fst bundle))
srcConstraints = mergeConstraints (gpdPackages gpds) flags
eresult <- solveResolverSpec stackYaml cabalDirs
(resolver, srcConstraints, Map.empty)
case eresult of
Right (src, ext) ->
return $ Right (fmap snd (Map.union src ext), fmap fst ext)
Left packages
| omitPackages initOpts, srcpkgs /= []-> do
pkg <- findOneIndependent srcpkgs flags
return $ Left [pkg]
| otherwise -> throwM (SolverGiveUp giveUpMsg)
where srcpkgs = intersect (Map.keys bundle) packages
-- among a list of packages find one on which none among the rest of the
-- packages depend. This package is a good candidate to be removed from
-- the list of packages when there is conflict in dependencies among this
-- set of packages.
findOneIndependent packages flags = do
platform <- asks (configPlatform . getConfig)
(compiler, _) <- getResolverConstraints stackYaml resolver
let getGpd pkg = snd (fromJust (Map.lookup pkg bundle))
getFlags pkg = fromJust (Map.lookup pkg flags)
deps pkg = gpdPackageDeps (getGpd pkg) compiler platform
(getFlags pkg)
allDeps = concat $ map (Map.keys . deps) packages
isIndependent pkg = not $ pkg `elem` allDeps
-- prefer to reject packages in deeper directories
path pkg = fst (fromJust (Map.lookup pkg bundle))
pathlen = length . FP.splitPath . toFilePath . path
maxPathlen = maximumBy (compare `on` pathlen)
return $ maxPathlen (filter isIndependent packages)
giveUpMsg = concat
[ " - Use '--omit-packages to exclude conflicting package(s).\n"
, " - Tweak the generated "
, toFilePath stackDotYaml <> " and then run 'stack solver':\n"
, " - Add any missing remote packages.\n"
, " - Add extra dependencies to guide solver.\n"
, " - Update external packages with 'stack update' and try again.\n"
]
needSolver _ (InitOpts {useSolver = True}) = True
needSolver (ResolverCompiler _) _ = True
needSolver _ _ = False
getRecommendedSnapshots :: Snapshots -> (NonEmpty SnapName)
getRecommendedSnapshots snapshots =
-- in order - Latest LTS, Latest Nightly, all LTS most recent first
case NonEmpty.nonEmpty ltss of
Just (mostRecent :| older)
-> mostRecent :| (nightly : older)
Nothing
-> nightly :| []
where
ltss = map (uncurry LTS) (IntMap.toDescList $ snapshotsLts snapshots)
nightly = Nightly (snapshotsNightly snapshots)
data InitOpts = InitOpts
{ searchDirs :: ![T.Text]
-- ^ List of sub directories to search for .cabal files
, useSolver :: Bool
-- ^ Use solver to determine required external dependencies
, omitPackages :: Bool
-- ^ Exclude conflicting or incompatible user packages
, forceOverwrite :: Bool
-- ^ Overwrite existing stack.yaml
, includeSubDirs :: Bool
-- ^ If True, include all .cabal files found in any sub directories
}
|
AndrewRademacher/stack
|
src/Stack/Init.hs
|
bsd-3-clause
| 23,369
| 0
| 21
| 7,979
| 4,712
| 2,413
| 2,299
| 433
| 6
|
-- |
-- Module : Debian.Package.Data.Packages
-- Copyright : 2014-2015 Kei Hibino
-- License : BSD3
--
-- Maintainer : ex8k.hibino@gmail.com
-- Stability : experimental
-- Portability : portable
--
-- This module provides data types of debian packages meta information.
module Debian.Package.Data.Packages
( DebianVersion, versionFromHackageVersion, readDebianVersion, origVersion', isNative'
, Source, mkSource, sourceName, version, origVersion, isNative
, origArchiveName, nativeArchiveName, sourceDirName, deriveHackageVersion
, parseChangeLog
, PackageType (..), takeChangesType, isSourcePackage, isBinaryPackage
, Control (..), parseControlEntry, parseControl
, HaskellPackage, hackage, package
, haskellPackageDefault, haskellPackageFromPackage
) where
import Control.Applicative ((<$>), pure, (<*>), (*>), (<*), empty, (<|>), many, some, optional)
import Control.Monad.Trans.State (StateT, runStateT, get, put)
import Data.Maybe (listToMaybe, maybeToList, mapMaybe)
import Data.Char (isSpace, isDigit)
import Data.Version (Version (Version, versionBranch), showVersion)
import Data.List.Split (splitOn)
import System.FilePath ((<.>), takeFileName, splitExtension)
import Debian.Package.Data.Hackage
(HackageVersion, mkHackageVersion', hackageVersionNumbers,
Hackage, mkHackageDefault, NameRule (Simple), debianNamesFromSourceName)
type Parser = StateT String Maybe
satisfy :: (Char -> Bool) -> Parser Char
satisfy p = do
s <- get
case s of
c:cs -> if p c
then put cs *> pure c
else empty
[] -> empty
_look :: Parser String
_look = get
eof :: Parser ()
eof = do
s <- get
case s of
[] -> pure ()
_:_ -> empty
runParser :: Parser a -> String -> Maybe (a, String)
runParser = runStateT
anyChar :: Parser Char
anyChar = satisfy (const True)
char :: Char -> Parser Char
char x = satisfy (== x)
notChar :: Char -> Parser Char
notChar x = satisfy (/= x)
space :: Parser Char
space = char ' '
digit :: Parser Char
digit = satisfy isDigit
int :: Parser Int
int = read <$> some digit
string :: String -> Parser String
string = mapM char
-- | Version type for Debian
data DebianVersion
= DebianNative Version (Maybe Int)
| DebianNonNative Version String
debianNativeVersion :: [Int] -> Maybe Int -> DebianVersion
debianNativeVersion v = DebianNative (Version v [])
debianNonNativeVersion :: [Int] -> String -> DebianVersion
debianNonNativeVersion v = DebianNonNative (Version v [])
-- | Make deebian version from hackage version
versionFromHackageVersion :: HackageVersion -> Maybe String -> DebianVersion
versionFromHackageVersion hv = d where
d (Just rev) = debianNonNativeVersion ns rev
d Nothing = debianNativeVersion ns Nothing
ns = hackageVersionNumbers hv
-- | Version without debian revision
origVersion' :: DebianVersion -> Version
origVersion' = d where
d (DebianNative v _) = v
d (DebianNonNative v _) = v
-- | Is debian-native or not
isNative' :: DebianVersion -> Bool
isNative' = d where
d (DebianNative _ _) = True
d (DebianNonNative _ _) = False
parseVersion' :: Parser Version
parseVersion' =
Version
<$> ((:) <$> int <*> many (char '.' *> int))
<*> pure []
parseDebianVersion :: Parser DebianVersion
parseDebianVersion = do
v <- parseVersion'
(DebianNonNative v <$> (char '-' *> some (satisfy (not . isSpace)))
<|>
DebianNative v <$> optional (string "+nmu" *> int))
_testParseDebianVersion :: [Maybe (DebianVersion, String)]
_testParseDebianVersion =
[ runParser parseDebianVersion s | s <- [ "1.23.3-4", "1.23", "12.3+nmu2" ] ]
instance Show DebianVersion where
show = d where
d (DebianNative v nr) = showVersion v ++ maybe "" (("+nmu" ++) . show) nr
d (DebianNonNative v r) = showVersion v ++ '-': r
instance Read DebianVersion where
readsPrec _ = maybeToList . runParser parseDebianVersion
readMaybe' :: Read a => String -> Maybe a
readMaybe' = fmap fst . listToMaybe . filter ((== "") . snd) . reads
-- | Try to read debian package version
readDebianVersion :: String -> Maybe DebianVersion
readDebianVersion = readMaybe'
-- | Debian source package type, name with version
data Source = Source String DebianVersion deriving Show
-- | Make 'Source'
mkSource :: String -> DebianVersion -> Source
mkSource = Source
-- | Source package name of 'Source'
sourceName :: Source -> String
sourceName (Source n _) = n
-- | Debian version of 'Source'
version :: Source -> DebianVersion
version (Source _ v) = v
-- | Version without debian revision
origVersion :: Source -> Version
origVersion = origVersion' . version
-- | Is debian-native or not
isNative :: Source -> Bool
isNative = isNative' . version
-- | Original source archive basename
origArchiveName :: Source -> FilePath
origArchiveName pkg = sourceName pkg ++ '_' : showVersion (origVersion pkg) <.> "orig" <.> "tar" <.> "gz"
-- | Debian native archive basename
nativeArchiveName :: Source -> String
nativeArchiveName pkg = sourceName pkg ++ '_' : show (version pkg) <.> "tar" <.> "gz"
-- | Source directory basename
sourceDirName :: Source -> FilePath
sourceDirName pkg = sourceName pkg ++ '-' : showVersion (origVersion pkg)
-- | Try to make 'HackageVersion' from 'Source'
deriveHackageVersion :: Source -> HackageVersion
deriveHackageVersion = mkHackageVersion' . versionBranch . origVersion where
parseColonLine :: String -> Maybe (String, String)
parseColonLine =
(fmap fst .) . runParser $
(,) <$> some (notChar ':') <*> (char ':' *> many space *> many anyChar <* eof)
-- | Try to generate 'Source' from debian changelog string
parseChangeLog :: String -- ^ dpkg-parsechangelog result string
-> Maybe Source -- ^ Source structure
parseChangeLog log' = do
deb <- mayDebSrc
dver <- mayDebVer
return $ mkSource deb dver
where
pairs = mapMaybe parseColonLine . lines $ log'
lookup' = (`lookup` pairs)
mayDebSrc = lookup' "Source"
mayDebVer = do
dverS <- lookup' "Version"
readDebianVersion dverS
-- | Debian package types
data PackageType
= PackageArch (Maybe String)
| PackageAll
| PackageSource
deriving (Eq, Show)
-- | Take 'PackageType' from debian .changes file path
takeChangesType :: FilePath -> Maybe PackageType
takeChangesType path = d . splitExtension $ takeFileName path where
d (n, ".changes") = case xs of
[_, _, a] -> case a of
"all" -> Just PackageAll
"source" -> Just PackageSource
_ -> Just . PackageArch $ Just a
_ -> Nothing
where xs = splitOn "_" n
d (_, _) = Nothing
-- | Test package type is source package.
isSourcePackage :: PackageType -> Bool
isSourcePackage = d where
d (PackageArch _) = False
d PackageAll = False
d PackageSource = True
-- | Test package type is binary package.
isBinaryPackage :: PackageType -> Bool
isBinaryPackage = not . isSourcePackage
-- | Type for debian control meta-data.
data Control =
Control
{ controlSource :: String
, controlArch :: [String]
, controlAll :: [String]
} deriving (Eq, Show)
-- | Parse an package entry in control file.
parseControlEntry :: [String] -> Maybe (PackageType, String)
parseControlEntry b =
do a <- lookup' "Architecture"
p <- lookup' "Package"
Just $ if a == "all"
then (PackageAll, p)
else (PackageArch $ Just a, p)
<|>
do s <- lookup' "Source"
Just (PackageSource, s)
where ps = mapMaybe parseColonLine b
lookup' = (`lookup` ps)
packagesPartition :: [(PackageType, a)] -> ([a], [a], [a])
packagesPartition = rec' where
rec' [] = ([], [], [])
rec' (x:xs) = case x of
(PackageSource, a) -> (a:p, q, r)
(PackageArch _, a) -> (p, a:q, r)
(PackageAll , a) -> (p, q, a:r)
where (p, q, r) = rec' xs
-- | Parse debian control file into package list.
parseControl :: String -> Maybe Control
parseControl in' = do
let (src, arch, all') =
packagesPartition . mapMaybe parseControlEntry
. filter (not . null) . splitOn [""] . lines $ in'
s <- listToMaybe src
Just $ Control s arch all'
-- | Debian source package type for Haskell
data HaskellPackage = HaskellPackage Hackage Source deriving Show
-- | 'Hackage' meta-info of 'HaskellPackage'
hackage :: HaskellPackage -> Hackage
hackage (HaskellPackage h _) = h
-- | Debian source package meta-info of 'HaskellPackage'
package :: HaskellPackage -> Source
package (HaskellPackage _ p) = p
-- | Generate 'HaskellPackage' type from debian package name and version
-- using 'NameRule'
haskellPackageDefault :: NameRule
-> String -- ^ Hackage name string
-> HackageVersion -- ^ Version of hackage
-> Maybe String -- ^ Debian revision String
-> HaskellPackage -- ^ Result structure
haskellPackageDefault rule hname hver mayDevRev =
HaskellPackage
(mkHackageDefault rule hname hver)
(mkSource sn (versionFromHackageVersion hver mayDevRev))
where
(sn, _) = debianNamesFromSourceName rule hname
-- | Generate 'HaskellPackage' with hackage name and debian package meta-info
haskellPackageFromPackage :: String -- ^ Hackage name string
-> Source -- ^ Debian package meta info
-> HaskellPackage -- ^ Result
haskellPackageFromPackage hname pkg = HaskellPackage hkg pkg where
hv = deriveHackageVersion pkg
hkg = mkHackageDefault Simple hname hv
|
khibino/haskell-debian-build.rebuild
|
src/Debian/Package/Data/Packages.hs
|
bsd-3-clause
| 9,572
| 0
| 17
| 2,108
| 2,644
| 1,437
| 1,207
| 207
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.