code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE NoImplicitPrelude, GeneralizedNewtypeDeriving, TemplateHaskell, PolymorphicComponents, ConstraintKinds, RecordWildCards #-}
module Lamdu.Sugar.Convert.Monad
( TagParamInfo(..)
, TagFieldParam(..), _TagFieldParam, _CollidingFieldParam
, OuterScopeInfo(..), osiPos, osiVarsUnderPos
, ScopeInfo(..), siTagParamInfos, siNullParams, siLetItems, siOuter
, Context(..)
, scInferContext, scReinferCheckRoot, scGlobalsInScope
, scCodeAnchors, scScopeInfo, scNominalsMap
, ConvertM(..), run
, readContext, liftTransaction, local
, codeAnchor
, getP
, convertSubexpression
, typeProtectTransaction, typeProtectedSetToVal, wrapOnTypeError
) where
import qualified Control.Lens as Lens
import Control.Lens.Operators
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.Trans.Reader (ReaderT, runReaderT)
import qualified Control.Monad.Trans.Reader as Reader
import Data.Map (Map)
import Data.Set (Set)
import qualified Data.Store.Property as Property
import Data.Store.Transaction (Transaction)
import qualified Data.Store.Transaction as Transaction
import Lamdu.Calc.Type.Nominal (Nominal(..))
import qualified Lamdu.Calc.Type as T
import qualified Lamdu.Calc.Val as V
import Lamdu.Calc.Val.Annotated (Val)
import qualified Lamdu.Data.Anchors as Anchors
import qualified Lamdu.Data.Ops as DataOps
import qualified Lamdu.Expr.IRef as ExprIRef
import qualified Lamdu.Infer as Infer
import qualified Lamdu.Sugar.Convert.Input as Input
import Lamdu.Sugar.Internal
import qualified Lamdu.Sugar.Types as Sugar
import Prelude.Compat
data TagParamInfo = TagParamInfo
{ tpiFromParameters :: V.Var -- TODO: Rename "From" to something else
, tpiJumpTo :: Sugar.EntityId
}
data TagFieldParam
= -- Sugared field param:
TagFieldParam TagParamInfo
| -- Colliding (and thus non-sugared) field param
CollidingFieldParam TagParamInfo
data OuterScopeInfo m = OuterScopeInfo
{ _osiPos :: Maybe (ExprIRef.ValIProperty m)
, -- The vars that disappear from scope when moving up to pos
_osiVarsUnderPos :: [V.Var]
}
Lens.makeLenses ''OuterScopeInfo
data ScopeInfo m = ScopeInfo
{ _siTagParamInfos :: Map T.Tag TagFieldParam -- tag uuids
, _siNullParams :: Set V.Var
, -- Each let item potentially has an inline action
_siLetItems :: Map V.Var (Sugar.BinderVarInline m)
-- TODO: siTagParamInfos needs a reverse-lookup map too
, -- Where "extract to let" goes:
_siOuter :: OuterScopeInfo m
}
Lens.makeLenses ''ScopeInfo
type T = Transaction
newtype ConvertM m a = ConvertM (ReaderT (Context m) (T m) a)
deriving (Functor, Applicative, Monad)
data Context m = Context
{ _scInferContext :: Infer.Context
, -- The globals we artificially inject into the scope in order to
-- infer their type supporting mutual recursions
_scGlobalsInScope :: Set (ExprIRef.DefI m)
, _scCodeAnchors :: Anchors.CodeProps m
, _scScopeInfo :: ScopeInfo m
, -- Check whether the definition is valid after an edit,
-- so that can hole-wrap bad edits.
_scReinferCheckRoot :: T m Bool
, -- The nominal types appearing in the converted expr and its subexpression
_scNominalsMap :: Map T.NominalId Nominal
, scConvertSubexpression ::
forall a. Monoid a => Val (Input.Payload m a) -> ConvertM m (ExpressionU m a)
}
Lens.makeLenses ''Context
Lens.makePrisms ''TagFieldParam
typeProtectTransaction :: Monad m => ConvertM m (T m a -> T m (Maybe a))
typeProtectTransaction =
do
checkOk <- (^. scReinferCheckRoot) <$> readContext
let protect act =
do
(resume, changes) <-
Transaction.fork $
do
result <- act
isOk <- checkOk
return $
if isOk
then (>> return (Just result)) . Transaction.merge
else const $ return Nothing
resume changes
return protect
typeProtectedSetToVal ::
Monad m =>
ConvertM m
(ExprIRef.ValIProperty m -> ExprIRef.ValI m -> T m (ExprIRef.ValI m))
typeProtectedSetToVal =
do
typeProtect <- typeProtectTransaction
let setToVal dest valI =
do
mResult <- typeProtect $ DataOps.replace dest valI
case mResult of
Just result -> return result
Nothing -> DataOps.setToWrapper valI dest
return setToVal
wrapOnTypeError ::
Monad m =>
ConvertM m (ExprIRef.ValIProperty m -> T m (ExprIRef.ValI m))
wrapOnTypeError =
do
protectedSetToVal <- typeProtectedSetToVal
let wrap prop = protectedSetToVal prop (Property.value prop)
return wrap
run :: Monad m => Context m -> ConvertM m a -> T m a
run ctx (ConvertM action) = runReaderT action ctx
readContext :: Monad m => ConvertM m (Context m)
readContext = ConvertM Reader.ask
local :: Monad m => (Context m -> Context m) -> ConvertM m a -> ConvertM m a
local f (ConvertM act) = ConvertM $ Reader.local f act
liftTransaction :: Monad m => T m a -> ConvertM m a
liftTransaction = ConvertM . lift
codeAnchor :: Monad m => (Anchors.CodeProps m -> a) -> ConvertM m a
codeAnchor f = f . (^. scCodeAnchors) <$> readContext
getP :: Monad m => Transaction.MkProperty m a -> ConvertM m a
getP = liftTransaction . Transaction.getP
convertSubexpression :: (Monad m, Monoid a) => Val (Input.Payload m a) -> ConvertM m (ExpressionU m a)
convertSubexpression exprI =
do
convertSub <- scConvertSubexpression <$> readContext
convertSub exprI
| da-x/lamdu | Lamdu/Sugar/Convert/Monad.hs | gpl-3.0 | 5,886 | 0 | 22 | 1,539 | 1,449 | 795 | 654 | 123 | 2 |
module MonadTrans where
import Control.Monad.Trans
import IdentityT
import MaybeT
import ReaderT
import EitherT
import StateT
-- 1. IdentityT
instance MonadTrans IdentityT where
lift = IdentityT
-- 2. MaybeT
instance MonadTrans MaybeT where
lift = MaybeT . (fmap Just)
-- 3. ReaderT
instance MonadTrans (ReaderT r) where
lift = ReaderT . const
-- Exercises: Lift More
-- Keep in mind what these are doing, follow the types, lift till you drop.
-- 1. You thought you were done with EitherT.
instance MonadTrans (EitherT e) where
lift = EitherT . (fmap Right)
-- 2. Or StateT. This one’ll be more obnoxious. It’s fine if you’ve seen
-- this before.
instance MonadTrans (StateT s) where
lift ma = StateT $ \s -> do
a <- ma
return (a, s)
| nirvinm/Solving-Exercises-in-Haskell-Programming-From-First-Principles | MonadTransformers/src/MonadTrans.hs | gpl-3.0 | 783 | 0 | 11 | 169 | 171 | 96 | 75 | 19 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.MembershipsLevels.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of all pricing levels offered by a creator to the fans.
--
-- /See:/ <https://developers.google.com/youtube/ YouTube Data API v3 Reference> for @youtube.membershipsLevels.list@.
module Network.Google.Resource.YouTube.MembershipsLevels.List
(
-- * REST Resource
MembershipsLevelsListResource
-- * Creating a Request
, membershipsLevelsList
, MembershipsLevelsList
-- * Request Lenses
, mllXgafv
, mllPart
, mllUploadProtocol
, mllAccessToken
, mllUploadType
, mllCallback
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.membershipsLevels.list@ method which the
-- 'MembershipsLevelsList' request conforms to.
type MembershipsLevelsListResource =
"youtube" :>
"v3" :>
"membershipsLevels" :>
QueryParams "part" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] MembershipsLevelListResponse
-- | Retrieves a list of all pricing levels offered by a creator to the fans.
--
-- /See:/ 'membershipsLevelsList' smart constructor.
data MembershipsLevelsList =
MembershipsLevelsList'
{ _mllXgafv :: !(Maybe Xgafv)
, _mllPart :: ![Text]
, _mllUploadProtocol :: !(Maybe Text)
, _mllAccessToken :: !(Maybe Text)
, _mllUploadType :: !(Maybe Text)
, _mllCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'MembershipsLevelsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mllXgafv'
--
-- * 'mllPart'
--
-- * 'mllUploadProtocol'
--
-- * 'mllAccessToken'
--
-- * 'mllUploadType'
--
-- * 'mllCallback'
membershipsLevelsList
:: [Text] -- ^ 'mllPart'
-> MembershipsLevelsList
membershipsLevelsList pMllPart_ =
MembershipsLevelsList'
{ _mllXgafv = Nothing
, _mllPart = _Coerce # pMllPart_
, _mllUploadProtocol = Nothing
, _mllAccessToken = Nothing
, _mllUploadType = Nothing
, _mllCallback = Nothing
}
-- | V1 error format.
mllXgafv :: Lens' MembershipsLevelsList (Maybe Xgafv)
mllXgafv = lens _mllXgafv (\ s a -> s{_mllXgafv = a})
-- | The *part* parameter specifies the membershipsLevel resource parts that
-- the API response will include. Supported values are id and snippet.
mllPart :: Lens' MembershipsLevelsList [Text]
mllPart
= lens _mllPart (\ s a -> s{_mllPart = a}) . _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
mllUploadProtocol :: Lens' MembershipsLevelsList (Maybe Text)
mllUploadProtocol
= lens _mllUploadProtocol
(\ s a -> s{_mllUploadProtocol = a})
-- | OAuth access token.
mllAccessToken :: Lens' MembershipsLevelsList (Maybe Text)
mllAccessToken
= lens _mllAccessToken
(\ s a -> s{_mllAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
mllUploadType :: Lens' MembershipsLevelsList (Maybe Text)
mllUploadType
= lens _mllUploadType
(\ s a -> s{_mllUploadType = a})
-- | JSONP
mllCallback :: Lens' MembershipsLevelsList (Maybe Text)
mllCallback
= lens _mllCallback (\ s a -> s{_mllCallback = a})
instance GoogleRequest MembershipsLevelsList where
type Rs MembershipsLevelsList =
MembershipsLevelListResponse
type Scopes MembershipsLevelsList =
'["https://www.googleapis.com/auth/youtube.channel-memberships.creator"]
requestClient MembershipsLevelsList'{..}
= go _mllPart _mllXgafv _mllUploadProtocol
_mllAccessToken
_mllUploadType
_mllCallback
(Just AltJSON)
youTubeService
where go
= buildClient
(Proxy :: Proxy MembershipsLevelsListResource)
mempty
| brendanhay/gogol | gogol-youtube/gen/Network/Google/Resource/YouTube/MembershipsLevels/List.hs | mpl-2.0 | 4,903 | 0 | 17 | 1,148 | 720 | 420 | 300 | 105 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Gmail.Users.Threads.Trash
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Moves the specified thread to the trash.
--
-- /See:/ <https://developers.google.com/gmail/api/ Gmail API Reference> for @gmail.users.threads.trash@.
module Network.Google.Resource.Gmail.Users.Threads.Trash
(
-- * REST Resource
UsersThreadsTrashResource
-- * Creating a Request
, usersThreadsTrash
, UsersThreadsTrash
-- * Request Lenses
, uttUserId
, uttId
) where
import Network.Google.Gmail.Types
import Network.Google.Prelude
-- | A resource alias for @gmail.users.threads.trash@ method which the
-- 'UsersThreadsTrash' request conforms to.
type UsersThreadsTrashResource =
"gmail" :>
"v1" :>
"users" :>
Capture "userId" Text :>
"threads" :>
Capture "id" Text :>
"trash" :>
QueryParam "alt" AltJSON :> Post '[JSON] Thread
-- | Moves the specified thread to the trash.
--
-- /See:/ 'usersThreadsTrash' smart constructor.
data UsersThreadsTrash = UsersThreadsTrash'
{ _uttUserId :: !Text
, _uttId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UsersThreadsTrash' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uttUserId'
--
-- * 'uttId'
usersThreadsTrash
:: Text -- ^ 'uttId'
-> UsersThreadsTrash
usersThreadsTrash pUttId_ =
UsersThreadsTrash'
{ _uttUserId = "me"
, _uttId = pUttId_
}
-- | The user\'s email address. The special value me can be used to indicate
-- the authenticated user.
uttUserId :: Lens' UsersThreadsTrash Text
uttUserId
= lens _uttUserId (\ s a -> s{_uttUserId = a})
-- | The ID of the thread to Trash.
uttId :: Lens' UsersThreadsTrash Text
uttId = lens _uttId (\ s a -> s{_uttId = a})
instance GoogleRequest UsersThreadsTrash where
type Rs UsersThreadsTrash = Thread
type Scopes UsersThreadsTrash =
'["https://mail.google.com/",
"https://www.googleapis.com/auth/gmail.modify"]
requestClient UsersThreadsTrash'{..}
= go _uttUserId _uttId (Just AltJSON) gmailService
where go
= buildClient
(Proxy :: Proxy UsersThreadsTrashResource)
mempty
| rueshyna/gogol | gogol-gmail/gen/Network/Google/Resource/Gmail/Users/Threads/Trash.hs | mpl-2.0 | 3,096 | 0 | 15 | 757 | 383 | 231 | 152 | 61 | 1 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Model.Stats.TypesTest where
import qualified Data.Map as Map
import qualified Data.Array.Unboxed as ArrayU
import Test.Tasty
import Model.Stats.Types
import Model.Id.Types
siteStats1 :: SiteStats
siteStats1 =
SiteStats {
statsAuthorizedSite = ArrayU.listArray (minBound, maxBound) [0,1,2,3,4,5]
, statsVolumes = 2
, statsVolumesShared = 1
, statsAssets = 3
, statsAssetDuration = 0
, statsAssetBytes = 100
, statsRecords =
Map.fromList
[ (Id 1, 1)
, (Id 2, 2)
, (Id 3, 3)
, (Id 4, 4)
, (Id 5, 5)
, (Id 6, 6)
, (Id 7, 7)
] -- should be list of category to count of records
}
test_all :: [TestTree]
test_all =
[
]
| databrary/databrary | test/Model/Stats/TypesTest.hs | agpl-3.0 | 881 | 0 | 10 | 314 | 232 | 145 | 87 | 28 | 1 |
{-# LANGUAGE RecordWildCards, DeriveDataTypeable #-}
module Main where
import Control.Applicative
import Control.Monad
import Graphics.OpenGLES
import qualified Data.ByteString.Char8 as B
import qualified Graphics.UI.GLFW as GLFW
import Control.Concurrent
-- ghc examples/billboard.hs -lEGL -lGLESv2 -threaded && examples/billboard
main = do
GLFW.init
Just win <- GLFW.createWindow 600 480 "The Billboard" Nothing Nothing
forkGL
(GLFW.makeContextCurrent (Just win) >> return False)
(GLFW.makeContextCurrent Nothing)
(GLFW.swapBuffers win)
forkIO $ mapM_ (putStrLn.("# "++)) =<< glLogContents
future <- withGL $ mkBillboard >>= mkSomeObj
let loop c = do
withGL $ runAction $ draw <$> future
endFrameGL
putStrLn . show $ c
GLFW.pollEvents
closing <- GLFW.windowShouldClose win
when (not closing) $ loop (c+1)
loop 0
data Billboard = Billboard
{ billboard :: Program Billboard
, mvpMatrix :: Uniform Billboard Mat3
, pos :: Attrib Billboard Vec2
, uv :: Attrib Billboard Vec2
} deriving Typeable
mkBillboard :: GL Billboard
mkBillboard = do
Finished p <- glCompile NoFeedback
[ vertexShader "bb.vs" vsSrc
, fragmentShader "bb.fs" fsSrc ]
$ \prog step msg bin ->
putStrLn $ "> step " ++ show step ++ ", " ++ msg
Billboard p <$> uniform "mvpMatrix"
<*> attrib "pos" <*> attrib "uv"
vsSrc3 = B.pack $
"#version 300 es\n\
\ in mat4 ttt;in mat4 sss;\
\ void main(){gl_Position = vec4(1) * ttt * sss;}"
fsSrc3 = B.pack $
"#version 300 es\n\
\precision mediump float;\
\ out vec4 var;\
\ void main(){var = vec4(1);}"
vsSrc = B.pack $
"#version 100\n" ++
"uniform mat3 mvpMatrix;\n" ++
"attribute vec2 pos;\n" ++
"attribute vec2 uv;\n" ++
"uniform struct qqq { vec2 w[2]; };uniform struct aww { vec4 wew; vec3 www[3]; ivec2 oo[10]; qqq s[10];} ogg[20];uniform vec4 uuuuu[63];" ++
"varying vec4 vColor;\n" ++
"void main() {\n" ++
" gl_Position = vec4(mvpMatrix*vec3(pos, -1.0), 1.0);\n" ++
" vColor = vec4(uv, 0.5, 1.0)/*+uuuuu[10]+ogg[19].wew*/;\n" ++
"}\n"
fsSrc = B.pack $
"#version 100\n" ++
"precision mediump float;\n" ++
"varying vec4 vColor;\n" ++
"void main() {\n" ++
" gl_FragColor = vColor;\n" ++
"}\n"
data SomeObj = SomeObj
{ prog :: Billboard
, vao :: VertexArray Billboard
, posBuf :: Buffer Vec2
, uvBuf :: Buffer (V2 Word8)
}
mkSomeObj :: Billboard -> GL SomeObj
mkSomeObj prog@Billboard{..} = do
posBuf <- glLoad app2gl (posData,4::Int)
uvBuf <- glLoad app2gl uvData
vao <- glVA [ pos &= posBuf, uv &= uvBuf]
return SomeObj {..}
posData = [V2 (-1) (-1), V2 1 (-1), V2 (-1) 1, V2 1 1]
uvData = [V2 0 0, V2 0 1, V2 1 0, V2 1 1]
draw :: SomeObj -> GL ()
draw SomeObj{..} = do
let Billboard{..} = prog
updateSomeObj posBuf uvBuf
r <- glDraw triangleStrip billboard
[ begin culling, cullFace hideBack]
[ mvpMatrix $= eye3]
vao $ takeFrom 0 4
putStrLn . show $ r
updateSomeObj _ _ = return ()
| capsjac/opengles | examples/billboard.hs | lgpl-3.0 | 2,898 | 164 | 15 | 553 | 886 | 481 | 405 | 85 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Haskoin.Crypto.Mnemonic.Tests (tests) where
import Test.QuickCheck (Arbitrary, Property, arbitrary, choose, (==>))
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Data.Bits ((.&.), shiftR)
import Data.Binary (Binary)
import Data.Word (Word32, Word64)
import qualified Data.ByteString as BS
( ByteString
, empty
, append
, concat
, length
, last
)
import qualified Data.ByteString.Char8 as C (words)
import Network.Haskoin.Test
import Network.Haskoin.Crypto
import Network.Haskoin.Util
import Network.Haskoin.Internals (fromMnemonic, getBits)
tests :: [Test]
tests =
[ testGroup "Encode mnemonic"
[ testProperty "128-bit entropy -> 12 words" toMnemonic128
, testProperty "160-bit entropy -> 18 words" toMnemonic160
, testProperty "256-bit entropy -> 24 words" toMnemonic256
, testProperty "512-bit entropy -> 48 words" toMnemonic512
, testProperty "n-bit entropy -> m words" toMnemonicVar
]
, testGroup "Encode/Decode Mnemonic"
[ testProperty "128-bit entropy" fromToMnemonic128
, testProperty "160-bit entropy" fromToMnemonic160
, testProperty "256-bit entropy" fromToMnemonic256
, testProperty "512-bit entropy" fromToMnemonic512
, testProperty "n-bit entropy" fromToMnemonicVar
]
, testGroup "Mnemonic to seed"
[ testProperty "128-bit entropy" mnemonicToSeed128
, testProperty "160-bit entropy" mnemonicToSeed160
, testProperty "256-bit entropy" mnemonicToSeed256
, testProperty "512-bit entropy" mnemonicToSeed512
, testProperty "n-bit entropy" mnemonicToSeedVar
]
, testGroup "Get bits from ByteString"
[ testProperty "Byte count" getBitsByteCount
, testProperty "End bits" getBitsEndBits
]
]
binWordsToBS :: Binary a => [a] -> BS.ByteString
binWordsToBS = foldr f BS.empty
where
f b a = a `BS.append` encode' b
{- Encode mnemonic -}
toMnemonic128 :: (Word64, Word64) -> Bool
toMnemonic128 (a, b) = l == 12
where
bs = encode' a `BS.append` encode' b
l = length . C.words . fromRight $ toMnemonic bs
toMnemonic160 :: (Word32, Word64, Word64) -> Bool
toMnemonic160 (a, b, c) = l == 15
where
bs = BS.concat [encode' a, encode' b, encode' c]
l = length . C.words . fromRight $ toMnemonic bs
toMnemonic256 :: (Word64, Word64, Word64, Word64) -> Bool
toMnemonic256 (a, b, c, d) = l == 24
where
bs = BS.concat [encode' a, encode' b, encode' c, encode' d]
l = length . C.words . fromRight $ toMnemonic bs
toMnemonic512 ::
((Word64, Word64, Word64, Word64), (Word64, Word64, Word64, Word64)) -> Bool
toMnemonic512 ((a, b, c, d), (e, f, g, h)) = l == 48
where
bs = BS.concat
[ encode' a, encode' b, encode' c, encode' d
, encode' e, encode' f, encode' g, encode' h
]
l = length . C.words . fromRight $ toMnemonic bs
toMnemonicVar :: [Word32] -> Property
toMnemonicVar ls = not (length ls > 8) ==> l == wc
where
bs = binWordsToBS ls
bl = BS.length bs
cb = bl `div` 4
wc = (cb + bl * 8) `div` 11
l = length . C.words . fromRight $ toMnemonic bs
{- Encode/Decode -}
fromToMnemonic128 :: (Word64, Word64) -> Bool
fromToMnemonic128 (a, b) = bs == bs'
where
bs = encode' a `BS.append` encode' b
bs' = fromRight (fromMnemonic =<< toMnemonic bs)
fromToMnemonic160 :: (Word32, Word64, Word64) -> Bool
fromToMnemonic160 (a, b, c) = bs == bs'
where
bs = BS.concat [encode' a, encode' b, encode' c]
bs' = fromRight (fromMnemonic =<< toMnemonic bs)
fromToMnemonic256 :: (Word64, Word64, Word64, Word64) -> Bool
fromToMnemonic256 (a, b, c, d) = bs == bs'
where
bs = BS.concat [encode' a, encode' b, encode' c, encode' d]
bs' = fromRight (fromMnemonic =<< toMnemonic bs)
fromToMnemonic512 ::
((Word64, Word64, Word64, Word64), (Word64, Word64, Word64, Word64)) -> Bool
fromToMnemonic512 ((a, b, c, d), (e, f, g, h)) = bs == bs'
where
bs = BS.concat
[ encode' a, encode' b, encode' c, encode' d
, encode' e, encode' f, encode' g, encode' h
]
bs' = fromRight (fromMnemonic =<< toMnemonic bs)
fromToMnemonicVar :: [Word32] -> Property
fromToMnemonicVar ls = not (length ls > 8) ==> bs == bs'
where
bs = binWordsToBS ls
bs' = fromRight (fromMnemonic =<< toMnemonic bs)
{- Mnemonic to seed -}
mnemonicToSeed128 :: (Word64, Word64) -> Bool
mnemonicToSeed128 (a, b) = l == 64
where
bs = encode' a `BS.append` encode' b
seed = fromRight (mnemonicToSeed "" =<< toMnemonic bs)
l = BS.length seed
mnemonicToSeed160 :: (Word32, Word64, Word64) -> Bool
mnemonicToSeed160 (a, b, c) = l == 64
where
bs = BS.concat [encode' a, encode' b, encode' c]
seed = fromRight (mnemonicToSeed "" =<< toMnemonic bs)
l = BS.length seed
mnemonicToSeed256 :: (Word64, Word64, Word64, Word64) -> Bool
mnemonicToSeed256 (a, b, c, d) = l == 64
where
bs = BS.concat [encode' a, encode' b, encode' c, encode' d]
seed = fromRight (mnemonicToSeed "" =<< toMnemonic bs)
l = BS.length seed
mnemonicToSeed512 ::
((Word64, Word64, Word64, Word64), (Word64, Word64, Word64, Word64)) -> Bool
mnemonicToSeed512 ((a, b, c, d), (e, f, g, h)) = l == 64
where
bs = BS.concat
[ encode' a, encode' b, encode' c, encode' d
, encode' e, encode' f, encode' g, encode' h
]
seed = fromRight (mnemonicToSeed "" =<< toMnemonic bs)
l = BS.length seed
mnemonicToSeedVar :: [Word32] -> Property
mnemonicToSeedVar ls = not (length ls > 16) ==> l == 64
where
bs = binWordsToBS ls
seed = fromRight (mnemonicToSeed "" =<< toMnemonic bs)
l = BS.length seed
{- Get bits from ByteString -}
data ByteCountGen = ByteCountGen BS.ByteString Int deriving Show
instance Arbitrary ByteCountGen where
arbitrary = do
ArbitraryByteString bs <- arbitrary
i <- choose (0, BS.length bs * 8)
return $ ByteCountGen bs i
getBitsByteCount :: ByteCountGen -> Bool
getBitsByteCount (ByteCountGen bs i) = BS.length bits == l
where
(q, r) = i `quotRem` 8
bits = getBits i bs
l = if r == 0 then q else q + 1
getBitsEndBits :: ByteCountGen -> Bool
getBitsEndBits (ByteCountGen bs i) = mask
where
r = i `mod` 8
bits = getBits i bs
mask = if r == 0 then True else BS.last bits .&. (0xff `shiftR` r) == 0x00
| tphyahoo/haskoin | haskoin-core/tests/Network/Haskoin/Crypto/Mnemonic/Tests.hs | unlicense | 6,475 | 0 | 13 | 1,517 | 2,251 | 1,233 | 1,018 | 139 | 2 |
-- This module is a compatibility layer that emulates the behaviour of Python's format strings.
-- It is needed because several Powerline segments take a format argument that uses this notation.
-- Interpolation sections have the grammar {[name][:fmt]}, and may also contain arbitrary text.
--
-- See: https://docs.python.org/3.3/library/string.html#format-specification-mini-language
module Format (pyFormat, parseFmt, convertFmt) where
import Data.Char (isDigit)
import Data.Maybe (fromMaybe)
import Text.ParserCombinators.ReadP
import Text.Printf (printf, PrintfType)
import Util
type PyFormatStr = String -- format used by Python's str.format()
type HsFormatStr = String -- format used by Text.Printf
-- Grammar: [[fill]align][sign][#][0][width][,][.precision][type]
data FormatSegment = FormatStr {
fillAlign :: Maybe FillAlign,
numericSign :: Maybe NumericSign,
formatWidth :: Maybe Int,
formatPrecision :: Maybe Int,
formatChar :: Maybe Char
}
| TextStr String
deriving (Eq, Show)
data FillAlign = FillAlign Char Alignment
deriving (Eq, Show)
data Alignment = LeftAlign -- <
| RightAlign -- >
| SignAlign -- =
| CentredAlign -- ^
deriving (Eq, Show)
data NumericSign = Always -- +
| OnlyNegative -- -
| SpaceSign -- ' '
deriving (Eq, Show)
-- Entry point: converts the format String and calls printf with it
pyFormat :: PrintfType r => PyFormatStr -> r
pyFormat = printf . concatMap convertFmt . parseFmt
parseFmt :: PyFormatStr -> [FormatSegment]
parseFmt fmt = res where
res = case runParser parser fmt of
Just x -> x
_ -> fail $ '\'' : fmt ++ "' does not have a recognized format."
textParser = TextStr <$> munch1 (/= '{')
parser :: ReadP [FormatSegment] = many1 $ fmtParser <++ textParser
-- "{" [field_name] ["!" conversion] [":" format_spec] "}"
fmtParser = between (char '{') (char '}') $ nameParser *> convParser *> fmtSpec
nameParser = skipMany . satisfy $ not . flip elem ":!}"
convParser = optional $ char '!' *> (char 'r' +++ char 's' +++ char 'a')
fmtSpec = option defFmtSpec $ char ':' *> fmtSpecParser
defFmtSpec = FormatStr Nothing Nothing Nothing Nothing Nothing
-- Format Spec Grammar: [[fill]align][sign][#][0][width][,][.precision][type]
fillChar = satisfy (not . flip elem "<>=^") +++ return ' '
fillAlignParser = maybeParse (FillAlign <$> fillChar <*> alignParser)
alignParser = mapParser [
('<', LeftAlign),
('>', RightAlign),
('=', SignAlign),
('^', CentredAlign)
]
signParser = maybeParse $ mapParser [
('+', Always),
('-', OnlyNegative),
(' ', SpaceSign)
]
-- Note that we are not implementing these for now
ignoredFlags1 = optional (char '#') <* optional (char '0')
ignoredFlags2 = optional (char ',')
widthParser = maybeParse parseInt
precisionParser = maybeParse $ char '.' >> parseInt
typeParser = maybeParse . satisfy $ flip elem [
-- These are extensions that are present in Python but not printf
-- 'b', -- binary
-- 'n', -- like %i, but with 1000s separators
'%', -- percentage (ignored)
'c', -- character
'd', -- decimal
'e', 'E', -- floats
'f', 'F',
'g', 'G',
'o', -- octal
's', -- string
'x', 'X' -- hexadecimal
]
fmtSpecParser = FormatStr <$> fillAlignParser
<*> signParser
<* ignoredFlags1
<*> widthParser
<* ignoredFlags2
<*> precisionParser
<*> typeParser
-- Helper function that actually runs the parser
runParser :: Show a => ReadP a -> String -> Maybe a
runParser parser s = res where
res = case map fst . filter (null . snd) $ readP_to_S parser s of
[] -> Nothing
[x] -> Just x
xs -> error $ "Ambiguity found in grammar: " ++ show xs
-- Parsers a key in the map into the corresponding value, or fails.
mapParser :: [(Char, b)] -> ReadP b
mapParser dict = do
c <- get
case lookup c dict of
Just x -> return x
Nothing -> pfail
-- Parses an unsigned int
parseInt :: ReadP Int
parseInt = read <$> munch1 isDigit
maybeParse :: ReadP a -> ReadP (Maybe a)
maybeParse p = option Nothing (Just <$> p)
convertFmt :: FormatSegment -> HsFormatStr
convertFmt (TextStr s) = replace "%" "%%" s
convertFmt FormatStr{..} = res where
res = concatMap (fromMaybe "") [
Just "%",
convAlign <$> fillAlign,
convSign <$> numericSign,
show <$> formatWidth',
('.':) . show <$> formatPrecision,
Just formatChar'
]
-- width includes percentage symbol
formatWidth' = f <$> formatWidth
where f = if formatChar == Just '%'
then pred
else id
formatChar' = case formatChar of
Just '%' -> "v%%" -- Don't process percentage signs, as printf doesn't support them
Just c -> [c]
Nothing -> "v" -- %v accepts any type
convAlign :: FillAlign -> String
convAlign (FillAlign ' ' LeftAlign) = "-"
convAlign (FillAlign ' ' RightAlign) = ""
convAlign (FillAlign '0' a) = '0' : convAlign (FillAlign ' ' a)
convAlign fa = error $ "Unsupported alignment format with: " ++ show fa
convSign :: NumericSign -> String
convSign Always = "+"
convSign SpaceSign = " "
convSign OnlyNegative = ""
| rdnetto/powerline-hs | src/Format.hs | apache-2.0 | 6,131 | 0 | 13 | 2,111 | 1,331 | 720 | 611 | -1 | -1 |
module PowerDivisibility.A072905 (a072905) where
import HelperSequences.A000188 (a000188)
import HelperSequences.A007913 (a007913)
a072905 :: Integer -> Integer
a072905 n = a007913 n * (a000188 n + 1)^2
| peterokagey/haskellOEIS | src/PowerDivisibility/A072905.hs | apache-2.0 | 204 | 0 | 9 | 26 | 69 | 38 | 31 | 5 | 1 |
main = print (solveIt 1000)
solveIt :: Int -> Int
solveIt n = foldl1 (+) (filter matches (takeWhile (< n) (enumFrom 1)))
matches :: Int -> Bool
matches n = ((mod n 3) == 0) || ((mod n 5) == 0) | pdbartlett/misc-stuff | euler/haskell/euler1.hs | apache-2.0 | 200 | 0 | 11 | 48 | 116 | 61 | 55 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-
Copyright 2020 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Util.EmbedAsUrl
( embedAsUrl,
)
where
import qualified Data.ByteString as B
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Unsafe as B
import qualified Data.Text.Encoding as T
import Language.Haskell.TH.Syntax
import System.IO.Unsafe
embedAsUrl :: String -> FilePath -> Q Exp
embedAsUrl contentType f = do
qAddDependentFile f
payload <- runIO $ B64.encode <$> B.readFile f
let uri = "data:" <> BC.pack contentType <> ";base64," <> payload
[e|
T.decodeUtf8 $ unsafePerformIO $
B.unsafePackAddressLen
$(return $ LitE $ IntegerL $ fromIntegral $ B.length uri)
$(return $ LitE $ StringPrimL $ B.unpack uri)
|]
| google/codeworld | codeworld-api/src/Util/EmbedAsUrl.hs | apache-2.0 | 1,415 | 0 | 14 | 263 | 159 | 96 | 63 | 21 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : Network.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:14
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Network (
module Qtc.Classes.Network
, module Qtc.Classes.Network_h
, module Qtc.ClassTypes.Network
, module Qtc.Network.QAbstractSocket
, module Qtc.Network.QAbstractSocket_h
, module Qtc.Enums.Network.QAbstractSocket
, module Qtc.Network.QFtp
, module Qtc.Network.QFtp_h
, module Qtc.Enums.Network.QFtp
, module Qtc.Network.QHostAddress
, module Qtc.Enums.Network.QHostAddress
, module Qtc.Network.QHostInfo
, module Qtc.Enums.Network.QHostInfo
, module Qtc.Network.QHttp
, module Qtc.Network.QHttp_h
, module Qtc.Enums.Network.QHttp
, module Qtc.Network.QHttpHeader
, module Qtc.Network.QHttpHeader_h
, module Qtc.Network.QHttpRequestHeader
, module Qtc.Network.QHttpRequestHeader_h
, module Qtc.Network.QHttpResponseHeader
, module Qtc.Network.QHttpResponseHeader_h
, module Qtc.Network.QNetworkAddressEntry
, module Qtc.Network.QNetworkInterface
, module Qtc.Enums.Network.QNetworkInterface
, module Qtc.Network.QNetworkProxy
, module Qtc.Enums.Network.QNetworkProxy
, module Qtc.Network.QTcpServer
, module Qtc.Network.QTcpServer_h
, module Qtc.Network.QTcpSocket
, module Qtc.Network.QTcpSocket_h
, module Qtc.Network.QUdpSocket
, module Qtc.Network.QUdpSocket_h
, module Qtc.Enums.Network.QUdpSocket
, module Qtc.Network.QUrlInfo
, module Qtc.Network.QUrlInfo_h
, module Qtc.Enums.Network.QUrlInfo
)
where
import Qtc.ClassTypes.Network
import Qtc.Classes.Network
import Qtc.Classes.Network_h
import Qtc.Network.QAbstractSocket
import Qtc.Network.QAbstractSocket_h
import Qtc.Enums.Network.QAbstractSocket
import Qtc.Network.QFtp
import Qtc.Network.QFtp_h
import Qtc.Enums.Network.QFtp
import Qtc.Network.QHostAddress
import Qtc.Enums.Network.QHostAddress
import Qtc.Network.QHostInfo
import Qtc.Enums.Network.QHostInfo
import Qtc.Network.QHttp
import Qtc.Network.QHttp_h
import Qtc.Enums.Network.QHttp
import Qtc.Network.QHttpHeader
import Qtc.Network.QHttpHeader_h
import Qtc.Network.QHttpRequestHeader
import Qtc.Network.QHttpRequestHeader_h
import Qtc.Network.QHttpResponseHeader
import Qtc.Network.QHttpResponseHeader_h
import Qtc.Network.QNetworkAddressEntry
import Qtc.Network.QNetworkInterface
import Qtc.Enums.Network.QNetworkInterface
import Qtc.Network.QNetworkProxy
import Qtc.Enums.Network.QNetworkProxy
import Qtc.Network.QTcpServer
import Qtc.Network.QTcpServer_h
import Qtc.Network.QTcpSocket
import Qtc.Network.QTcpSocket_h
import Qtc.Network.QUdpSocket
import Qtc.Network.QUdpSocket_h
import Qtc.Enums.Network.QUdpSocket
import Qtc.Network.QUrlInfo
import Qtc.Network.QUrlInfo_h
import Qtc.Enums.Network.QUrlInfo
| uduki/hsQt | Qtc/Network.hs | bsd-2-clause | 3,039 | 0 | 5 | 343 | 511 | 360 | 151 | 76 | 0 |
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Handler.Blog
( getEntryR
, getEditEntryR
, getBlogPageR
, getBlogR
, getNewEntryR
, postNewEntryR
, getDeleteEntryR
) where
import Helpers.Messages
import Helpers.Posts
import Import
import Yesod.Markdown
getEntryR :: Text -> Handler RepHtml
getEntryR slug = do
entry <- runDB $ getBySlug404 slug
defaultLayout $ do
setTitleI $ entryTitle entry
$(widgetFile "blog-entry")
--resultsForPage :: (PersistEntity Entry, PersistQuery b m, PersistEntityBackend Entry ~ b) => Int -> b m [Entity Entry]
resultsForPage pageNumber = do
let resultsPerPage = 30
selectList [] [ Desc EntryPosted
, LimitTo resultsPerPage
, OffsetBy $ (pageNumber - 1) * resultsPerPage
]
getBlogPageR :: Int -> Handler RepHtml
getBlogPageR page = do
entries <- runDB $ resultsForPage page
now <- liftIO $ getCurrentTime
muser <- maybeAuth
let isCurrentUserAdmin :: Bool
isCurrentUserAdmin =
case muser of
Just (Entity _ user) -> if userAdmin user then True else False
_ -> False
defaultLayout $ do
setTitleI MsgBlog
$(widgetFile "blog-entries")
getBlogR :: Handler RepHtml
getBlogR = getBlogPageR 1
entryHandler :: Maybe Entry -> Handler RepHtml
entryHandler entry = do
(formWidget, formEnctype) <- generateFormPost $ newEntryForm entry
defaultLayout $ do
setTitle "New post"
$(widgetFile "admin/new-post")
getNewEntryR :: Handler RepHtml
getNewEntryR = entryHandler Nothing
postNewEntryR :: Handler RepHtml
postNewEntryR = do
((result, formWidget), formEnctype) <- runFormPost $ newEntryForm Nothing
case result of
FormSuccess entry -> do
(msg, redirection) <- runDB $ do
res <- insertBy entry
case res of
Right _ ->
return ( successMessage "Successfully added a new entry."
, EntryR $ entrySlug entry)
Left (Entity k _) -> do
update k
[ EntrySlug =. entrySlug entry
, EntryTitle =. entryTitle entry
, EntryPosted =. entryPosted entry
, EntryDescr =. entryDescr entry
, EntryPost =. entryPost entry
]
return ( successMessage "Successfully updated the entry."
, EntryR $ entrySlug entry)
setMessage msg
redirect redirection
{-
case iResult of
Nothing -> defaultLayout $ do
setTitle "New post"
$(widgetFile "admin/new-post")
Just _ -> do
setMessage $ successMessage "Successfully added a new entry."
redirect $ EntryR $ entrySlug entry
-}
_ -> defaultLayout $ do
setTitle "New post"
$(widgetFile "admin/new-post")
newEntryForm :: Maybe Entry -> Form Entry
newEntryForm entry = renderBootstrap $ Entry
<$> areq textField (setLabel MsgBlogSlug) (fmap entrySlug entry)
<*> areq textField (setLabel MsgBlogTitle) (fmap entryTitle entry)
<*> aformM (liftIO getCurrentTime)
<*> aopt markdownField (setLabel MsgBlogDescription) (fmap entryDescr entry)
<*> areq markdownField (setLabel MsgBlogPost) (fmap entryPost entry)
getEditEntryR :: Text -> Handler RepHtml
getEditEntryR slug = do
(Entity _ entity) <- runDB $ getBy404 $ UniquePost slug
entryHandler $ Just entity
getDeleteEntryR :: Text -> Handler RepHtml
getDeleteEntryR slug = do
(msgType, msg) <- runDB $ do
mentity <- getBy $ UniquePost slug
case mentity of
Just (Entity key _) -> do
delete key
return (Success, "post deleted!")
Nothing -> return (Error, "post not found")
case msgType of
Success -> setMessage $ successMessage msg
_ -> setMessage "failure"
redirect AdminR
| ModernSteward/blog | Handler/Blog.hs | bsd-2-clause | 4,349 | 0 | 24 | 1,571 | 984 | 471 | 513 | 96 | 3 |
{-# LANGUAGE LambdaCase, NamedFieldPuns, RecordWildCards, TupleSections #-}
module Mote.Case where
import Bag (bagToList)
import BasicTypes (Boxity (..))
import Control.Applicative ((<$), (<$>), (<*>), (<|>))
import Control.Arrow (second)
import Control.Monad.Error (throwError)
import Control.Monad.State
import qualified Data.List as List
import Data.Maybe (fromMaybe, mapMaybe)
import qualified Data.Set as S
import qualified Data.Char as Char
import qualified Data.IntervalMap.FingerTree as I
import DataCon (DataCon, dataConFieldLabels,
dataConInstArgTys, dataConIsInfix,
dataConName, isTupleDataCon,
isUnboxedTupleCon)
import DynFlags (ExtensionFlag (Opt_PolyKinds))
import FamInst (tcGetFamInstEnvs)
import qualified FamInstEnv
import FastString
import qualified GHC
import GhcMonad
import HsBinds (HsBindLR (..), HsLocalBindsLR (..),
HsValBindsLR (..))
import HsDecls (ClsInstDecl (..), HsDecl (..),
InstDecl (..))
import HsExpr (GRHS (..), GRHSs (..), HsExpr (..),
HsTupArg (..), LHsExpr, LMatch,
Match (..), StmtLR (..))
import HsPat
import HsSyn (HsModule (..))
import Name (Name)
import OccName (occNameString, occNameFS, occName)
import RdrName (RdrName (..), mkVarUnqual, nameRdrName)
import SrcLoc (GenLocated (..), Located, SrcLoc(..), SrcSpan,
getLoc, isSubspanOf, noLoc, realSrcSpanStart)
import TcRnDriver (runTcInteractive)
import TcRnMonad (setXOptM)
import TyCon
import Type
import TypeRep
import Mote.Types
import Mote.Util
type SatDataConRep = (Name, [Type])
type TypedDataCon = (DataCon, [Type])
type DataType = [TypedDataCon]
normaliseType :: (GhcMonad m) => Type -> m Type
normaliseType t = do
hsc_env <- getSession
fmap (fromMaybe t . snd) . liftIO . runTcInteractive hsc_env $
setXOptM Opt_PolyKinds $ do
fam_envs <- tcGetFamInstEnvs
return (snd (FamInstEnv.normaliseType fam_envs Nominal t))
-- TODO: It's probably unnecessary to normalise here
unpeel :: GhcMonad m => Type -> m (Maybe DataType)
unpeel t =
fmap (splitTyConApp_maybe . dropForAlls) (normaliseType t) >>| \case
Nothing -> Nothing
Just (tc, args) ->
fmap (map (\dc -> (dc, dataConInstArgTys dc args)))
(tyConDataCons_maybe tc)
type Scoped = State (S.Set FastString)
-- algorithm for expanding variable.
-- > be in a hole.
-- > walk down parse tree looking for srcpos of that hole
-- > keep stack of case exprs you've stepped into.
-- > after finding the hole, walk back up and find the closest
-- one that has a variable named as requested.
-- > replace case exprs with expanded one
conPattern :: S.Set FastString -> (DataCon, [Type]) -> Pat RdrName
conPattern scope (dc, argTys)
| isTupleDataCon dc =
let b = if isUnboxedTupleCon dc then Unboxed else Boxed
pats = evalState (mapM varPat argTys) scope
in
TuplePat pats b argTys
| otherwise = ConPatIn (noLoc (nameRdrName (dataConName dc))) deets
-- ConPatIn (noLoc . nameRdrName $ dataConName dc) deets
where
deets :: HsConPatDetails RdrName
deets
| dataConIsInfix dc = case argTys of
[x, y] -> evalState (InfixCon <$> varPat x <*> varPat y) scope
_ -> error "Unexpected number of arguments to an infix constructor."
-- TODO: Records
| otherwise =
case dataConFieldLabels dc of
[] -> PrefixCon (evalState (mapM varPat argTys) scope)
fs -> RecCon $
HsRecFields
{ rec_flds = map (\l -> HsRecField (noLoc (Exact l)) (noLoc $ WildPat undefined) pun) fs
, rec_dotdot = Nothing
}
where pun = True
varPat :: Type -> Scoped (LPat RdrName)
varPat t = noLoc . VarPat . mkVarUnqual <$> freshWithPrefix (typePrefix t)
freshWithPrefix :: FastString -> Scoped FastString
freshWithPrefix pre = do
s <- get
if not (pre `S.member` s)
then pre <$ modify (S.insert pre)
else freshWithPrefix (appendFS pre (fsLit "'"))
-- Should be a normalized type as argument, though not with
-- synonyms expanded
typePrefix :: Type -> FastString
typePrefix = fsLit . typePrefix' where
typePrefix' = \case
AppTy {} -> "x"
-- TODO: This will probably break on infix tycons
-- TODO: Special case maybe
-- TODO: Special case either
-- TODO: Type variables
TyConApp tc args ->
if isListTyCon tc
then typePrefix' (head args) ++ "s"
else if isTupleTyCon tc
then List.intercalate "_and_" (map typePrefix' args)
else initials $ occNameString (occName (tyConName tc))
FunTy s t -> concat [typePrefix' s, "_to_", typePrefix' t]
ForAllTy _x t -> typePrefix' t
LitTy t -> case t of
StrTyLit fs -> unpackFS fs
NumTyLit n -> '_' : show n
TyVarTy _v -> "x"
where
initials :: String -> [Char]
initials (c : cs) = Char.toLower c : initials (dropWhile Char.isLower cs)
initials [] = []
isListTyCon :: TyCon -> Bool
isListTyCon tc = occNameString (occName (tyConName tc)) == "[]"
-- TODO: This version not working for some reason
-- isListTyCon tc = tc `hasKey` consDataConKey
-- noLoc
dummyLocated :: a -> Located a
dummyLocated = L (error "dummyLocated")
newName :: m Name
newName = undefined
{-
patterns :: Type -> m [Pat id]
patterns t =
t' <- normaliseType t
unpeel t' >>= \case
Nothing -> [SigPatOut (dummyLocated $ VarPat )]
-}
-- cases :: HsType -> Ty
-- TODO: Refine with single constructor things
data MatchInfo id
= Equation (Located id) -- name of the function in which this match is an equation
| SingleLambda SrcSpan -- the srcspan of the whole lambda
| CaseBranch
namesBound
:: GenLocated t (Match id body) -> [(id, Pat id -> Match id body)]
namesBound (L _ (Match pats t rhs)) = listyPat (\pats' -> Match pats' t rhs) pats where
goPat = \case
WildPat _ -> []
VarPat x -> [(x, id)]
LazyPat p -> wrapWith LazyPat (goLPat p)
AsPat x p -> wrapWith (AsPat x) (goLPat p)
ParPat p -> wrapWith ParPat (goLPat p)
BangPat p -> wrapWith BangPat (goLPat p)
TuplePat ps b ts -> listyPat (\ps' -> TuplePat ps' b ts) ps
ListPat ps t e -> listyPat (\ps' -> ListPat ps' t e) ps
PArrPat ps t -> listyPat (\ps' -> PArrPat ps' t) ps
ConPatIn c deets -> case deets of
InfixCon a1 a2 -> listyPat (\[a1', a2'] -> ConPatIn c (InfixCon a1' a2')) [a1, a2]
PrefixCon args -> listyPat (ConPatIn c . PrefixCon) args
RecCon (HsRecFields {rec_flds, rec_dotdot}) -> case rec_dotdot of
Just _ -> [] -- TODO: This should really expand out the dotdot
Nothing ->
concatMap (\(pre,fld,post) ->
wrapWith (wrap pre fld post) . goLPat $ hsRecFieldArg fld)
(listViews rec_flds)
where
wrap pre fld post lp =
ConPatIn c $
RecCon (HsRecFields (pre ++ fld { hsRecFieldArg = lp } : post) Nothing)
ConPatOut {} -> error "TODO: ConPatOut"
ViewPat {} -> error "TODO: ViewPat"
SplicePat {} -> error "TODO: SplicePat"
QuasiQuotePat {} -> error "TODO: QuasiQuotePat"
LitPat {} -> []
NPat {} -> []
NPlusKPat {} -> error "TODO: NPlusKPat"
SigPatIn lp bs -> wrapWith (\lp' -> SigPatIn lp' bs) (goLPat lp)
SigPatOut lp t -> wrapWith (\lp' -> SigPatOut lp' t) (goLPat lp)
CoPat co p t -> wrapWith (\p' -> CoPat co p' t) (goPat p)
wrapWith k = map (second (k.))
listyPat :: ([LPat id] -> a) -> [LPat id] -> [(id, Pat id -> a)]
listyPat k ps = concatMap (\(l, p, r) -> wrapWith (\p' -> k (l ++ p' : r)) (goLPat p)) (listViews ps)
goLPat (L l p) = map (second (L l .)) (goPat p)
listViews = go [] where
go _ [] = []
go pre (x:xs) = (pre, x, xs) : go (x:pre) xs
patternsForType :: S.Set FastString -> Type -> M [LPat RdrName]
patternsForType scope ty =
lift (unpeel ty) >>| \case
Just dt -> map (noLoc . conPattern scope) dt
Nothing -> [evalState (varPat ty) scope]
scopeAt :: Ref MoteState -> SrcLoc -> M (S.Set FastString)
scopeAt stRef loc = do
FileData {scopeMap} <- getFileDataErr stRef
return $ S.fromList . map (occNameFS . occName . snd) . I.search loc $ scopeMap
matchesForTypeAt :: Ref MoteState -> Type -> SrcLoc -> M [Match RdrName (LHsExpr RdrName)]
matchesForTypeAt stRef ty loc = do
scope <- scopeAt stRef loc
fmap (map (\p -> Match [p] Nothing holyGRHSs)) (patternsForType scope ty)
where
holyGRHSs :: GRHSs RdrName (LHsExpr RdrName)
holyGRHSs = GRHSs [noLoc $ GRHS [] (noLoc EWildPat)] EmptyLocalBinds
-- TODO: We have an actual Var at our disposal now when we call this so the
-- string argument can be replaced with a Var argument
expansions
:: Ref MoteState
-> String
-> Type
-> SrcSpan
-> HsModule RdrName
-> M (Maybe
((LMatch RdrName (LHsExpr RdrName),
MatchInfo RdrName),
[Match RdrName (LHsExpr RdrName)]))
expansions stRef var ty loc mod =
case findMap (\mgi@(lm,_) -> fmap (mgi,) . aListLookup varName . namesBound $ lm) mgs of
Just (mgi, patPosn) -> do
dcsMay <- lift $ unpeel ty
scope <- scopeAt stRef (RealSrcLoc $ realSrcSpanStart (toRealSrcSpan loc))
case dcsMay of
Nothing -> return Nothing
Just dcs -> do
let matches = map (patPosn . conPattern scope) dcs
logS stRef . show $ map (\(dc, args) -> (dataConIsInfix dc, map typePrefix args)) dcs -- lift (showSDocM . vcat . map (pprMatch (CaseAlt :: HsMatchContext RdrName)) $ matches)
return $ Just (mgi, matches)
Nothing -> throwError $ NoVariable var
where
varName = mkVarUnqual $ fsLit var
mgs = containingMatchGroups loc mod
findMap f = foldr (\x r -> f x <|> r) Nothing
aListLookup k = fmap snd . List.find ((== k) . fst)
-- matchToExpand loc var
containingMatchGroups :: SrcSpan -> HsModule id ->
[(GenLocated SrcSpan (Match id (GenLocated SrcSpan (HsExpr id))), MatchInfo id)]
containingMatchGroups loc = goDecls [] . GHC.hsmodDecls where
goDecls acc = goDecl acc . nextSubexpr loc
goDecl acc = \case
ValD bd -> goBind acc bd
InstD (ClsInstD (ClsInstDecl {cid_binds})) ->
goBind acc . nextSubexpr loc $ bagToList cid_binds
_ -> acc
goBind acc = \case
FunBind {..} -> goMatchGroup (Equation fun_id) acc $ fun_matches
PatBind {..} -> goGRHSs acc pat_rhs
_ -> acc
goMatchGroup mi acc =
maybe acc (goLMatch mi acc) . List.find ((loc `isSubspanOf`) . getLoc) . GHC.mg_alts
goLMatch mi acc lm@(L _ (Match _pats _ty grhss)) = goGRHSs ((lm, mi):acc) grhss
goGRHSs acc (GRHSs { grhssGRHSs, grhssLocalBinds }) =
case nextSubexpr' loc grhssGRHSs of
Just g -> goGRHS acc g
Nothing -> goLocalBinds acc grhssLocalBinds
goLocalBinds acc = \case
HsValBinds vb -> goValBinds acc vb
HsIPBinds {} -> acc
EmptyLocalBinds -> acc
goValBinds acc vbs = goBind acc . nextSubexpr loc $ case vbs of
ValBindsIn bs _sigs -> bagToList bs
ValBindsOut rbs _sigs -> concatMap (bagToList . snd) rbs
-- TODO: Guards should be returned too
goGRHS acc (GRHS _gs b) = goLExpr acc b
goLExpr acc (L l e) = case e of
HsLamCase _ mg -> goMatchGroup CaseBranch acc mg
HsLam mg -> goMatchGroup (SingleLambda l) acc mg
HsApp a b -> goLExpr acc $ nextSubLExpr [a, b]
OpApp a b _ c -> goLExpr acc $ nextSubLExpr [a, b, c]
NegApp e' _ -> goLExpr acc e'
HsPar e' -> goLExpr acc e'
SectionL a b -> goLExpr acc $ nextSubLExpr [a, b]
SectionR a b -> goLExpr acc $ nextSubLExpr [a, b]
ExplicitTuple ts _ -> goLExpr acc . nextSubLExpr $ mapMaybe (\case {Present e -> Just e; _ -> Nothing}) ts
HsCase _scrut mg -> goMatchGroup CaseBranch acc mg
HsIf _ a b c -> goLExpr acc . nextSubLExpr $ [a, b, c]
HsMultiIf _ grhss -> goGRHS acc . nextSubexpr loc $ grhss
HsDo _ stmts _ -> goStmt acc . nextSubexpr loc $ stmts
ExplicitList _ _ es -> goLExpr acc . nextSubLExpr $ es
ExplicitPArr _ es -> goLExpr acc . nextSubLExpr $ es
-- TODO: let expr
_ -> acc
nextSubLExpr = fromMaybe (error "Where?") . List.find ((loc `isSubspanOf`) . getLoc)
goStmt acc = \case
LastStmt e _synE -> goLExpr acc e -- TODO: figure out what the deal is with syntaxexpr
BindStmt _lhs rhs _se _se' -> goLExpr acc rhs
BodyStmt e _se _se' _ -> goLExpr acc e
-- TODO
-- LetStmt bs -> goLocalBinds acc bs
_ -> acc
| imeckler/mote | Mote/Case.hs | bsd-3-clause | 13,461 | 137 | 24 | 4,095 | 4,063 | 2,175 | 1,888 | 253 | 28 |
-- | Skein 256 as a PRNG.
module Crypto.Threefish.Random (
SkeinGen, Block256, Random (..), RandomGen (..),
newSkeinGen, mkSkeinGen, mkSkeinGenEx, randomBytes, reseedSkeinGen,
toBlock, fromBlock
) where
import Crypto.Threefish.Skein
import Crypto.Threefish.Threefish256
import System.Random
import System.Entropy
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import Data.ByteString.Unsafe
import System.IO.Unsafe
import Foreign.Storable (sizeOf, peek)
import Foreign.Ptr (castPtr)
import Data.Serialize
import Crypto.Random
import Data.Tagged
emptyKey :: Key256
emptyKey = Block256 BS.empty
-- | Default amount of random bytes to buffer.
defaultSkeinGenPoolSize :: Int
defaultSkeinGenPoolSize = 256
-- | Skein-based PRNG as defined in the Skein 1.3 paper.
data SkeinGen = SkeinGen {
sgState :: Block256,
sgPool :: BS.ByteString,
sgPoolSize :: Int
}
instance RandomGen SkeinGen where
next g =
case randomBytes (sizeOf (0::Int)) g of
(bs, g') -> (unsafePerformIO $ unsafeUseAsCString bs $ peek . castPtr, g')
split g =
case BS.splitAt 32 (fst $ randomBytes 64 g) of
(a, b) -> (mkSkeinGenEx (sgPoolSize g) (Block256 a),
mkSkeinGenEx (sgPoolSize g) (Block256 b))
-- | Create a new Skein PRNG from the system's entropy pool.
newSkeinGen :: IO SkeinGen
newSkeinGen =
(mkSkeinGenEx defaultSkeinGenPoolSize . Block256) `fmap` getEntropy 32
-- | Create a Skein PRNG from a seed.
mkSkeinGen :: Serialize a => a -> SkeinGen
mkSkeinGen = mkSkeinGenEx defaultSkeinGenPoolSize . Block256 . encode
-- | Create a Skein PRNG with a custom pool size. Larger pool sizes give faster
-- random data, but obviously take up more memory. Pool size is preserved
-- across splits.
mkSkeinGenEx :: Int -> Block256 -> SkeinGen
mkSkeinGenEx poolsize (Block256 seed) = SkeinGen {
sgState = skein $ BSL.fromStrict (BS.replicate 32 0 `BS.append` seed),
sgPool = BS.empty,
sgPoolSize = poolsize
}
-- | Reseed a Skein PRNG.
reseedSkeinGen :: Block256 -> SkeinGen -> SkeinGen
reseedSkeinGen (Block256 seed) (SkeinGen (Block256 state) _ poolsize) =
SkeinGen {
sgState = skein $ BSL.fromStrict (state `BS.append` seed),
sgPool = BS.empty,
sgPoolSize = poolsize
}
-- | Generate n random bytes using the given generator.
randomBytes :: Int -> SkeinGen -> (BS.ByteString, SkeinGen)
randomBytes nbytes (SkeinGen (Block256 state) pool poolsize)
| BS.length pool >= nbytes =
case BS.splitAt nbytes pool of
(output, rest) -> (output, SkeinGen (Block256 state) rest poolsize)
| otherwise =
(BS.append pool out, SkeinGen (Block256 state') pool' poolsize)
where
-- Use all of the output to avoid making unnecessary calls
nbytes' = fromIntegral $ 32 + max (nbytes + (32-(nbytes`rem`32))) poolsize
bytes = hash256 nbytes' emptyKey (BSL.fromStrict state)
(state', buffer) = BS.splitAt 32 bytes
(out, pool') = BS.splitAt (nbytes - BS.length pool) buffer
instance CryptoRandomGen SkeinGen where
newGen seed =
case BS.length seed of
n | n >= 32 ->
Right $ mkSkeinGenEx ps (Block256 $ BS.take 32 seed)
| otherwise ->
Left NotEnoughEntropy
where ps = defaultSkeinGenPoolSize
genSeedLength = Tagged 32
genBytes n g = Right $ randomBytes n g
reseedInfo = const Never
reseedPeriod = const Never
reseed seed g =
case BS.length seed of
n | n >= 32 ->
Right $ reseedSkeinGen (Block256 $ BS.take 32 seed) g
| otherwise ->
Left NotEnoughEntropy
| valderman/threefish | Crypto/Threefish/Random.hs | bsd-3-clause | 3,590 | 0 | 15 | 765 | 1,034 | 560 | 474 | 78 | 1 |
{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, RankNTypes #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE NoMonoLocalBinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
-- | Planning how to build everything in a project.
--
module Distribution.Client.ProjectPlanning (
-- * elaborated install plan types
ElaboratedInstallPlan,
ElaboratedConfiguredPackage(..),
ElaboratedPlanPackage,
ElaboratedSharedConfig(..),
ElaboratedReadyPackage,
BuildStyle(..),
CabalFileText,
-- * Producing the elaborated install plan
rebuildInstallPlan,
-- * Build targets
PackageTarget(..),
ComponentTarget(..),
SubComponentTarget(..),
showComponentTarget,
-- * Selecting a plan subset
pruneInstallPlanToTargets,
pruneInstallPlanToDependencies,
-- * Utils required for building
pkgHasEphemeralBuildTargets,
elabBuildTargetWholeComponents,
-- * Setup.hs CLI flags for building
setupHsScriptOptions,
setupHsConfigureFlags,
setupHsConfigureArgs,
setupHsBuildFlags,
setupHsBuildArgs,
setupHsReplFlags,
setupHsReplArgs,
setupHsCopyFlags,
setupHsRegisterFlags,
setupHsHaddockFlags,
packageHashInputs,
-- TODO: [code cleanup] utils that should live in some shared place?
createPackageDBIfMissing
) where
import Distribution.Client.ProjectPlanning.Types
import Distribution.Client.PackageHash
import Distribution.Client.RebuildMonad
import Distribution.Client.ProjectConfig
import Distribution.Client.ProjectPlanOutput
import Distribution.Client.Types
import qualified Distribution.Client.InstallPlan as InstallPlan
import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan
import Distribution.Client.Dependency
import Distribution.Client.Dependency.Types
import qualified Distribution.Client.IndexUtils as IndexUtils
import Distribution.Client.Targets (userToPackageConstraint)
import Distribution.Client.DistDirLayout
import Distribution.Client.SetupWrapper
import Distribution.Client.JobControl
import Distribution.Client.FetchUtils
import qualified Hackage.Security.Client as Sec
import Distribution.Client.Setup hiding (packageName, cabalVersion)
import Distribution.Utils.NubList
import qualified Distribution.Solver.Types.ComponentDeps as CD
import Distribution.Solver.Types.ComponentDeps (ComponentDeps)
import Distribution.Solver.Types.ConstraintSource
import Distribution.Solver.Types.LabeledPackageConstraint
import Distribution.Solver.Types.OptionalStanza
import Distribution.Solver.Types.PkgConfigDb
import Distribution.Solver.Types.ResolverPackage
import Distribution.Solver.Types.SolverId
import Distribution.Solver.Types.SolverPackage
import Distribution.Solver.Types.InstSolverPackage
import Distribution.Solver.Types.SourcePackage
import Distribution.Package hiding
(InstalledPackageId, installedPackageId)
import Distribution.System
import qualified Distribution.InstalledPackageInfo as Installed
import qualified Distribution.PackageDescription as Cabal
import qualified Distribution.PackageDescription as PD
import qualified Distribution.PackageDescription.Configuration as PD
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.Compiler hiding (Flag)
import qualified Distribution.Simple.GHC as GHC --TODO: [code cleanup] eliminate
import qualified Distribution.Simple.GHCJS as GHCJS --TODO: [code cleanup] eliminate
import Distribution.Simple.Program
import Distribution.Simple.Program.Db
import Distribution.Simple.Program.Find
import qualified Distribution.Simple.Setup as Cabal
import Distribution.Simple.Setup
(Flag, toFlag, flagToMaybe, flagToList, fromFlagOrDefault)
import qualified Distribution.Simple.Configure as Cabal
import qualified Distribution.Simple.LocalBuildInfo as Cabal
import Distribution.Simple.LocalBuildInfo (ComponentName(..))
import qualified Distribution.Simple.Register as Cabal
import qualified Distribution.Simple.InstallDirs as InstallDirs
import Distribution.Simple.Utils hiding (matchFileGlob)
import Distribution.Version
import Distribution.Verbosity
import Distribution.Text
import qualified Distribution.Compat.Graph as Graph
import Distribution.Compat.Graph(IsNode(..))
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Control.Monad
import Control.Monad.State as State
import Control.Exception
import Data.Typeable
import Data.List
import Data.Maybe
import Data.Either
import Data.Monoid
import Data.Function
import System.FilePath
import System.Directory (doesDirectoryExist, getDirectoryContents)
------------------------------------------------------------------------------
-- * Elaborated install plan
------------------------------------------------------------------------------
-- "Elaborated" -- worked out with great care and nicety of detail;
-- executed with great minuteness: elaborate preparations;
-- elaborate care.
--
-- So here's the idea:
--
-- Rather than a miscellaneous collection of 'ConfigFlags', 'InstallFlags' etc
-- all passed in as separate args and which are then further selected,
-- transformed etc during the execution of the build. Instead we construct
-- an elaborated install plan that includes everything we will need, and then
-- during the execution of the plan we do as little transformation of this
-- info as possible.
--
-- So we're trying to split the work into two phases: construction of the
-- elaborated install plan (which as far as possible should be pure) and
-- then simple execution of that plan without any smarts, just doing what the
-- plan says to do.
--
-- So that means we need a representation of this fully elaborated install
-- plan. The representation consists of two parts:
--
-- * A 'ElaboratedInstallPlan'. This is a 'GenericInstallPlan' with a
-- representation of source packages that includes a lot more detail about
-- that package's individual configuration
--
-- * A 'ElaboratedSharedConfig'. Some package configuration is the same for
-- every package in a plan. Rather than duplicate that info every entry in
-- the 'GenericInstallPlan' we keep that separately.
--
-- The division between the shared and per-package config is /not set in stone
-- for all time/. For example if we wanted to generalise the install plan to
-- describe a situation where we want to build some packages with GHC and some
-- with GHCJS then the platform and compiler would no longer be shared between
-- all packages but would have to be per-package (probably with some sanity
-- condition on the graph structure).
--
-- Refer to ProjectPlanning.Types for details of these important types:
-- type ElaboratedInstallPlan = ...
-- type ElaboratedPlanPackage = ...
-- data ElaboratedSharedConfig = ...
-- data ElaboratedConfiguredPackage = ...
-- data BuildStyle =
-- | Check that an 'ElaboratedConfiguredPackage' actually makes
-- sense under some 'ElaboratedSharedConfig'.
sanityCheckElaboratedConfiguredPackage
:: ElaboratedSharedConfig
-> ElaboratedConfiguredPackage
-> a
-> a
sanityCheckElaboratedConfiguredPackage sharedConfig
elab@ElaboratedConfiguredPackage{..} =
(case elabPkgOrComp of
ElabPackage pkg -> sanityCheckElaboratedPackage elab pkg
ElabComponent comp -> sanityCheckElaboratedComponent elab comp)
-- either a package is being built inplace, or the
-- 'installedPackageId' we assigned is consistent with
-- the 'hashedInstalledPackageId' we would compute from
-- the elaborated configured package
. assert (elabBuildStyle == BuildInplaceOnly ||
unitIdComponentId elabUnitId == hashedInstalledPackageId
(packageHashInputs sharedConfig elab))
-- the stanzas explicitly disabled should not be available
. assert (Set.null (Map.keysSet (Map.filter not elabStanzasRequested)
`Set.intersection` elabStanzasAvailable))
-- either a package is built inplace, or we are not attempting to
-- build any test suites or benchmarks (we never build these
-- for remote packages!)
. assert (elabBuildStyle == BuildInplaceOnly ||
Set.null elabStanzasAvailable)
sanityCheckElaboratedComponent
:: ElaboratedConfiguredPackage
-> ElaboratedComponent
-> a
-> a
sanityCheckElaboratedComponent ElaboratedConfiguredPackage{..}
ElaboratedComponent{..} =
-- Should not be building bench or test if not inplace.
assert (elabBuildStyle == BuildInplaceOnly ||
case compComponentName of
Nothing -> True
Just CLibName -> True
Just (CSubLibName _) -> True
Just (CExeName _) -> True
Just (CBenchName _) -> False
Just (CTestName _) -> False)
sanityCheckElaboratedPackage
:: ElaboratedConfiguredPackage
-> ElaboratedPackage
-> a
-> a
sanityCheckElaboratedPackage ElaboratedConfiguredPackage{..}
ElaboratedPackage{..} =
-- we should only have enabled stanzas that actually can be built
-- (according to the solver)
assert (pkgStanzasEnabled `Set.isSubsetOf` elabStanzasAvailable)
-- the stanzas that the user explicitly requested should be
-- enabled (by the previous test, they are also available)
. assert (Map.keysSet (Map.filter id elabStanzasRequested)
`Set.isSubsetOf` pkgStanzasEnabled)
------------------------------------------------------------------------------
-- * Deciding what to do: making an 'ElaboratedInstallPlan'
------------------------------------------------------------------------------
-- | Return an up-to-date elaborated install plan and associated config.
--
-- Two variants of the install plan are returned: with and without packages
-- from the store. That is, the \"improved\" plan where source packages are
-- replaced by pre-existing installed packages from the store (when their ids
-- match), and also the original elaborated plan which uses primarily source
-- packages.
-- The improved plan is what we use for building, but the original elaborated
-- plan is useful for reporting and configuration. For example the @freeze@
-- command needs the source package info to know about flag choices and
-- dependencies of executables and setup scripts.
--
rebuildInstallPlan :: Verbosity
-> FilePath -> DistDirLayout -> CabalDirLayout
-> ProjectConfig
-> IO ( ElaboratedInstallPlan -- with store packages
, ElaboratedInstallPlan -- with source packages
, ElaboratedSharedConfig
, ProjectConfig )
-- ^ @(improvedPlan, elaboratedPlan, _, _)@
rebuildInstallPlan verbosity
projectRootDir
distDirLayout@DistDirLayout {
distDirectory,
distProjectCacheFile,
distProjectCacheDirectory
}
cabalDirLayout@CabalDirLayout {
cabalStoreDirectory,
cabalStorePackageDB
}
cliConfig =
runRebuild projectRootDir $ do
progsearchpath <- liftIO $ getSystemSearchPath
let cliConfigPersistent = cliConfig { projectConfigBuildOnly = mempty }
-- The overall improved plan is cached
rerunIfChanged verbosity fileMonitorImprovedPlan
-- react to changes in command line args and the path
(cliConfigPersistent, progsearchpath) $ do
-- And so is the elaborated plan that the improved plan based on
(elaboratedPlan, elaboratedShared,
projectConfig) <-
rerunIfChanged verbosity fileMonitorElaboratedPlan
(cliConfigPersistent, progsearchpath) $ do
(projectConfig, projectConfigTransient) <- phaseReadProjectConfig
localPackages <- phaseReadLocalPackages projectConfig
compilerEtc <- phaseConfigureCompiler projectConfig
_ <- phaseConfigurePrograms projectConfig compilerEtc
(solverPlan, pkgConfigDB)
<- phaseRunSolver projectConfigTransient
compilerEtc
localPackages
(elaboratedPlan,
elaboratedShared) <- phaseElaboratePlan projectConfigTransient
compilerEtc pkgConfigDB
solverPlan
localPackages
return (elaboratedPlan, elaboratedShared, projectConfig)
-- The improved plan changes each time we install something, whereas
-- the underlying elaborated plan only changes when input config
-- changes, so it's worth caching them separately.
improvedPlan <- phaseImprovePlan elaboratedPlan elaboratedShared
phaseMaintainPlanOutputs improvedPlan elaboratedPlan elaboratedShared
return (improvedPlan, elaboratedPlan, elaboratedShared, projectConfig)
where
fileMonitorCompiler = newFileMonitorInCacheDir "compiler"
fileMonitorSolverPlan = newFileMonitorInCacheDir "solver-plan"
fileMonitorSourceHashes = newFileMonitorInCacheDir "source-hashes"
fileMonitorElaboratedPlan = newFileMonitorInCacheDir "elaborated-plan"
fileMonitorImprovedPlan = newFileMonitorInCacheDir "improved-plan"
newFileMonitorInCacheDir :: Eq a => FilePath -> FileMonitor a b
newFileMonitorInCacheDir = newFileMonitor . distProjectCacheFile
-- Read the cabal.project (or implicit config) and combine it with
-- arguments from the command line
--
phaseReadProjectConfig :: Rebuild (ProjectConfig, ProjectConfig)
phaseReadProjectConfig = do
liftIO $ do
info verbosity "Project settings changed, reconfiguring..."
createDirectoryIfMissingVerbose verbosity True distDirectory
createDirectoryIfMissingVerbose verbosity True distProjectCacheDirectory
projectConfig <- readProjectConfig verbosity projectRootDir
-- The project config comming from the command line includes "build only"
-- flags that we don't cache persistently (because like all "build only"
-- flags they do not affect the value of the outcome) but that we do
-- sometimes using during planning (in particular the http transport)
let projectConfigTransient = projectConfig <> cliConfig
projectConfigPersistent = projectConfig
<> cliConfig {
projectConfigBuildOnly = mempty
}
liftIO $ writeProjectConfigFile (distProjectCacheFile "config")
projectConfigPersistent
return (projectConfigPersistent, projectConfigTransient)
-- Look for all the cabal packages in the project
-- some of which may be local src dirs, tarballs etc
--
phaseReadLocalPackages :: ProjectConfig
-> Rebuild [UnresolvedSourcePackage]
phaseReadLocalPackages projectConfig = do
localCabalFiles <- findProjectPackages projectRootDir projectConfig
mapM (readSourcePackage verbosity) localCabalFiles
-- Configure the compiler we're using.
--
-- This is moderately expensive and doesn't change that often so we cache
-- it independently.
--
phaseConfigureCompiler :: ProjectConfig
-> Rebuild (Compiler, Platform, ProgramDb)
phaseConfigureCompiler ProjectConfig {
projectConfigShared = ProjectConfigShared {
projectConfigHcFlavor,
projectConfigHcPath,
projectConfigHcPkg
},
projectConfigLocalPackages = PackageConfig {
packageConfigProgramPaths,
packageConfigProgramArgs,
packageConfigProgramPathExtra
}
} = do
progsearchpath <- liftIO $ getSystemSearchPath
rerunIfChanged verbosity fileMonitorCompiler
(hcFlavor, hcPath, hcPkg, progsearchpath,
packageConfigProgramPaths,
packageConfigProgramArgs,
packageConfigProgramPathExtra) $ do
liftIO $ info verbosity "Compiler settings changed, reconfiguring..."
result@(_, _, progdb') <- liftIO $
Cabal.configCompilerEx
hcFlavor hcPath hcPkg
progdb verbosity
-- Note that we added the user-supplied program locations and args
-- for /all/ programs, not just those for the compiler prog and
-- compiler-related utils. In principle we don't know which programs
-- the compiler will configure (and it does vary between compilers).
-- We do know however that the compiler will only configure the
-- programs it cares about, and those are the ones we monitor here.
monitorFiles (programsMonitorFiles progdb')
return result
where
hcFlavor = flagToMaybe projectConfigHcFlavor
hcPath = flagToMaybe projectConfigHcPath
hcPkg = flagToMaybe projectConfigHcPkg
progdb =
userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths))
. userSpecifyArgss (Map.toList (getMapMappend packageConfigProgramArgs))
. modifyProgramSearchPath
(++ [ ProgramSearchPathDir dir
| dir <- fromNubList packageConfigProgramPathExtra ])
$ defaultProgramDb
-- Configuring other programs.
--
-- Having configred the compiler, now we configure all the remaining
-- programs. This is to check we can find them, and to monitor them for
-- changes.
--
-- TODO: [required eventually] we don't actually do this yet.
--
-- We rely on the fact that the previous phase added the program config for
-- all local packages, but that all the programs configured so far are the
-- compiler program or related util programs.
--
phaseConfigurePrograms :: ProjectConfig
-> (Compiler, Platform, ProgramDb)
-> Rebuild ()
phaseConfigurePrograms projectConfig (_, _, compilerprogdb) = do
-- Users are allowed to specify program locations independently for
-- each package (e.g. to use a particular version of a pre-processor
-- for some packages). However they cannot do this for the compiler
-- itself as that's just not going to work. So we check for this.
liftIO $ checkBadPerPackageCompilerPaths
(configuredPrograms compilerprogdb)
(getMapMappend (projectConfigSpecificPackage projectConfig))
--TODO: [required eventually] find/configure other programs that the
-- user specifies.
--TODO: [required eventually] find/configure all build-tools
-- but note that some of them may be built as part of the plan.
-- Run the solver to get the initial install plan.
-- This is expensive so we cache it independently.
--
phaseRunSolver :: ProjectConfig
-> (Compiler, Platform, ProgramDb)
-> [UnresolvedSourcePackage]
-> Rebuild (SolverInstallPlan, PkgConfigDb)
phaseRunSolver projectConfig@ProjectConfig {
projectConfigShared,
projectConfigBuildOnly
}
(compiler, platform, progdb)
localPackages =
rerunIfChanged verbosity fileMonitorSolverPlan
(solverSettings,
localPackages, localPackagesEnabledStanzas,
compiler, platform, programDbSignature progdb) $ do
installedPkgIndex <- getInstalledPackages verbosity
compiler progdb platform
corePackageDbs
sourcePkgDb <- getSourcePackages verbosity withRepoCtx
pkgConfigDB <- getPkgConfigDb verbosity progdb
--TODO: [code cleanup] it'd be better if the Compiler contained the
-- ConfiguredPrograms that it needs, rather than relying on the progdb
-- since we don't need to depend on all the programs here, just the
-- ones relevant for the compiler.
liftIO $ do
solver <- chooseSolver verbosity
(solverSettingSolver solverSettings)
(compilerInfo compiler)
notice verbosity "Resolving dependencies..."
plan <- foldProgress logMsg die return $
planPackages compiler platform solver solverSettings
installedPkgIndex sourcePkgDb pkgConfigDB
localPackages localPackagesEnabledStanzas
return (plan, pkgConfigDB)
where
corePackageDbs = [GlobalPackageDB]
withRepoCtx = projectConfigWithSolverRepoContext verbosity
projectConfigShared
projectConfigBuildOnly
solverSettings = resolveSolverSettings projectConfig
logMsg message rest = debugNoWrap verbosity message >> rest
localPackagesEnabledStanzas =
Map.fromList
[ (pkgname, stanzas)
| pkg <- localPackages
, let pkgname = packageName pkg
testsEnabled = lookupLocalPackageConfig
packageConfigTests
projectConfig pkgname
benchmarksEnabled = lookupLocalPackageConfig
packageConfigBenchmarks
projectConfig pkgname
stanzas =
Map.fromList $
[ (TestStanzas, enabled)
| enabled <- flagToList testsEnabled ]
++ [ (BenchStanzas , enabled)
| enabled <- flagToList benchmarksEnabled ]
]
-- Elaborate the solver's install plan to get a fully detailed plan. This
-- version of the plan has the final nix-style hashed ids.
--
phaseElaboratePlan :: ProjectConfig
-> (Compiler, Platform, ProgramDb)
-> PkgConfigDb
-> SolverInstallPlan
-> [SourcePackage loc]
-> Rebuild ( ElaboratedInstallPlan
, ElaboratedSharedConfig )
phaseElaboratePlan ProjectConfig {
projectConfigShared,
projectConfigLocalPackages,
projectConfigSpecificPackage,
projectConfigBuildOnly
}
(compiler, platform, progdb) pkgConfigDB
solverPlan localPackages = do
liftIO $ debug verbosity "Elaborating the install plan..."
sourcePackageHashes <-
rerunIfChanged verbosity fileMonitorSourceHashes
(packageLocationsSignature solverPlan) $
getPackageSourceHashes verbosity withRepoCtx solverPlan
defaultInstallDirs <- liftIO $ userInstallDirTemplates compiler
let (elaboratedPlan, elaboratedShared) =
elaborateInstallPlan
platform compiler progdb pkgConfigDB
distDirLayout
cabalDirLayout
solverPlan
localPackages
sourcePackageHashes
defaultInstallDirs
projectConfigShared
projectConfigLocalPackages
(getMapMappend projectConfigSpecificPackage)
liftIO $ debugNoWrap verbosity (InstallPlan.showInstallPlan elaboratedPlan)
return (elaboratedPlan, elaboratedShared)
where
withRepoCtx = projectConfigWithSolverRepoContext verbosity
projectConfigShared
projectConfigBuildOnly
-- Update the files we maintain that reflect our current build environment.
-- In particular we maintain a JSON representation of the elaborated
-- install plan.
--
-- TODO: [required eventually] maintain the ghc environment file reflecting
-- the libs available. This will need to be after plan improvement phase.
--
phaseMaintainPlanOutputs :: ElaboratedInstallPlan
-> ElaboratedInstallPlan
-> ElaboratedSharedConfig
-> Rebuild ()
phaseMaintainPlanOutputs _improvedPlan elaboratedPlan elaboratedShared = do
liftIO $ debug verbosity "Updating plan.json"
liftIO $ writePlanExternalRepresentation
distDirLayout
elaboratedPlan
elaboratedShared
-- Improve the elaborated install plan. The elaborated plan consists
-- mostly of source packages (with full nix-style hashed ids). Where
-- corresponding installed packages already exist in the store, replace
-- them in the plan.
--
-- Note that we do monitor the store's package db here, so we will redo
-- this improvement phase when the db changes -- including as a result of
-- executing a plan and installing things.
--
phaseImprovePlan :: ElaboratedInstallPlan
-> ElaboratedSharedConfig
-> Rebuild ElaboratedInstallPlan
phaseImprovePlan elaboratedPlan elaboratedShared = do
liftIO $ debug verbosity "Improving the install plan..."
recreateDirectory verbosity True storeDirectory
storePkgIndex <- getPackageDBContents verbosity
compiler progdb platform
storePackageDb
storeExeIndex <- getExecutableDBContents storeDirectory
let improvedPlan = improveInstallPlanWithPreExistingPackages
storePkgIndex
storeExeIndex
elaboratedPlan
liftIO $ debugNoWrap verbosity (InstallPlan.showInstallPlan improvedPlan)
return improvedPlan
where
storeDirectory = cabalStoreDirectory (compilerId compiler)
storePackageDb = cabalStorePackageDB (compilerId compiler)
ElaboratedSharedConfig {
pkgConfigPlatform = platform,
pkgConfigCompiler = compiler,
pkgConfigCompilerProgs = progdb
} = elaboratedShared
programsMonitorFiles :: ProgramDb -> [MonitorFilePath]
programsMonitorFiles progdb =
[ monitor
| prog <- configuredPrograms progdb
, monitor <- monitorFileSearchPath (programMonitorFiles prog)
(programPath prog)
]
-- | Select the bits of a 'ProgramDb' to monitor for value changes.
-- Use 'programsMonitorFiles' for the files to monitor.
--
programDbSignature :: ProgramDb -> [ConfiguredProgram]
programDbSignature progdb =
[ prog { programMonitorFiles = []
, programOverrideEnv = filter ((/="PATH") . fst)
(programOverrideEnv prog) }
| prog <- configuredPrograms progdb ]
getInstalledPackages :: Verbosity
-> Compiler -> ProgramDb -> Platform
-> PackageDBStack
-> Rebuild InstalledPackageIndex
getInstalledPackages verbosity compiler progdb platform packagedbs = do
monitorFiles . map monitorFileOrDirectory
=<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles
verbosity compiler
packagedbs progdb platform)
liftIO $ IndexUtils.getInstalledPackages
verbosity compiler
packagedbs progdb
getPackageDBContents :: Verbosity
-> Compiler -> ProgramDb -> Platform
-> PackageDB
-> Rebuild InstalledPackageIndex
getPackageDBContents verbosity compiler progdb platform packagedb = do
monitorFiles . map monitorFileOrDirectory
=<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles
verbosity compiler
[packagedb] progdb platform)
liftIO $ do
createPackageDBIfMissing verbosity compiler progdb packagedb
Cabal.getPackageDBContents verbosity compiler
packagedb progdb
-- | Return the list of all already installed executables
getExecutableDBContents
:: FilePath -- store directory
-> Rebuild (Set ComponentId)
getExecutableDBContents storeDirectory = do
monitorFiles [monitorFileGlob (FilePathGlob (FilePathRoot storeDirectory) (GlobFile [WildCard]))]
paths <- liftIO $ getDirectoryContents storeDirectory
return (Set.fromList (map ComponentId (filter valid paths)))
where
valid "." = False
valid ".." = False
valid "package.db" = False
valid _ = True
getSourcePackages :: Verbosity -> (forall a. (RepoContext -> IO a) -> IO a)
-> Rebuild SourcePackageDb
getSourcePackages verbosity withRepoCtx = do
(sourcePkgDb, repos) <-
liftIO $
withRepoCtx $ \repoctx -> do
sourcePkgDb <- IndexUtils.getSourcePackages verbosity repoctx
return (sourcePkgDb, repoContextRepos repoctx)
monitorFiles . map monitorFile
. IndexUtils.getSourcePackagesMonitorFiles
$ repos
return sourcePkgDb
-- | Create a package DB if it does not currently exist. Note that this action
-- is /not/ safe to run concurrently.
--
createPackageDBIfMissing :: Verbosity -> Compiler -> ProgramDb
-> PackageDB -> IO ()
createPackageDBIfMissing verbosity compiler progdb
(SpecificPackageDB dbPath) = do
exists <- liftIO $ Cabal.doesPackageDBExist dbPath
unless exists $ do
createDirectoryIfMissingVerbose verbosity True (takeDirectory dbPath)
Cabal.createPackageDB verbosity compiler progdb False dbPath
createPackageDBIfMissing _ _ _ _ = return ()
getPkgConfigDb :: Verbosity -> ProgramDb -> Rebuild PkgConfigDb
getPkgConfigDb verbosity progdb = do
dirs <- liftIO $ getPkgConfigDbDirs verbosity progdb
-- Just monitor the dirs so we'll notice new .pc files.
-- Alternatively we could monitor all the .pc files too.
forM_ dirs $ \dir -> do
dirExists <- liftIO $ doesDirectoryExist dir
-- TODO: turn this into a utility function
monitorFiles [if dirExists
then monitorDirectory dir
else monitorNonExistentDirectory dir]
liftIO $ readPkgConfigDb verbosity progdb
recreateDirectory :: Verbosity -> Bool -> FilePath -> Rebuild ()
recreateDirectory verbosity createParents dir = do
liftIO $ createDirectoryIfMissingVerbose verbosity createParents dir
monitorFiles [monitorDirectoryExistence dir]
-- | Select the config values to monitor for changes package source hashes.
packageLocationsSignature :: SolverInstallPlan
-> [(PackageId, PackageLocation (Maybe FilePath))]
packageLocationsSignature solverPlan =
[ (packageId pkg, packageSource pkg)
| SolverInstallPlan.Configured (SolverPackage { solverPkgSource = pkg})
<- SolverInstallPlan.toList solverPlan
]
-- | Get the 'HashValue' for all the source packages where we use hashes,
-- and download any packages required to do so.
--
-- Note that we don't get hashes for local unpacked packages.
--
getPackageSourceHashes :: Verbosity
-> (forall a. (RepoContext -> IO a) -> IO a)
-> SolverInstallPlan
-> Rebuild (Map PackageId PackageSourceHash)
getPackageSourceHashes verbosity withRepoCtx solverPlan = do
-- Determine if and where to get the package's source hash from.
--
let allPkgLocations :: [(PackageId, PackageLocation (Maybe FilePath))]
allPkgLocations =
[ (packageId pkg, packageSource pkg)
| SolverInstallPlan.Configured (SolverPackage { solverPkgSource = pkg})
<- SolverInstallPlan.toList solverPlan ]
-- Tarballs that were local in the first place.
-- We'll hash these tarball files directly.
localTarballPkgs :: [(PackageId, FilePath)]
localTarballPkgs =
[ (pkgid, tarball)
| (pkgid, LocalTarballPackage tarball) <- allPkgLocations ]
-- Tarballs from remote URLs. We must have downloaded these already
-- (since we extracted the .cabal file earlier)
--TODO: [required eventually] finish remote tarball functionality
-- allRemoteTarballPkgs =
-- [ (pkgid, )
-- | (pkgid, RemoteTarballPackage ) <- allPkgLocations ]
-- Tarballs from repositories, either where the repository provides
-- hashes as part of the repo metadata, or where we will have to
-- download and hash the tarball.
repoTarballPkgsWithMetadata :: [(PackageId, Repo)]
repoTarballPkgsWithoutMetadata :: [(PackageId, Repo)]
(repoTarballPkgsWithMetadata,
repoTarballPkgsWithoutMetadata) =
partitionEithers
[ case repo of
RepoSecure{} -> Left (pkgid, repo)
_ -> Right (pkgid, repo)
| (pkgid, RepoTarballPackage repo _ _) <- allPkgLocations ]
-- For tarballs from repos that do not have hashes available we now have
-- to check if the packages were downloaded already.
--
(repoTarballPkgsToDownload,
repoTarballPkgsDownloaded)
<- fmap partitionEithers $
liftIO $ sequence
[ do mtarball <- checkRepoTarballFetched repo pkgid
case mtarball of
Nothing -> return (Left (pkgid, repo))
Just tarball -> return (Right (pkgid, tarball))
| (pkgid, repo) <- repoTarballPkgsWithoutMetadata ]
(hashesFromRepoMetadata,
repoTarballPkgsNewlyDownloaded) <-
-- Avoid having to initialise the repository (ie 'withRepoCtx') if we
-- don't have to. (The main cost is configuring the http client.)
if null repoTarballPkgsToDownload && null repoTarballPkgsWithMetadata
then return (Map.empty, [])
else liftIO $ withRepoCtx $ \repoctx -> do
-- For tarballs from repos that do have hashes available as part of the
-- repo metadata we now load up the index for each repo and retrieve
-- the hashes for the packages
--
hashesFromRepoMetadata <-
Sec.uncheckClientErrors $ --TODO: [code cleanup] wrap in our own exceptions
fmap (Map.fromList . concat) $
sequence
-- Reading the repo index is expensive so we group the packages by repo
[ repoContextWithSecureRepo repoctx repo $ \secureRepo ->
Sec.withIndex secureRepo $ \repoIndex ->
sequence
[ do hash <- Sec.trusted <$> -- strip off Trusted tag
Sec.indexLookupHash repoIndex pkgid
-- Note that hackage-security currently uses SHA256
-- but this API could in principle give us some other
-- choice in future.
return (pkgid, hashFromTUF hash)
| pkgid <- pkgids ]
| (repo, pkgids) <-
map (\grp@((_,repo):_) -> (repo, map fst grp))
. groupBy ((==) `on` (remoteRepoName . repoRemote . snd))
. sortBy (compare `on` (remoteRepoName . repoRemote . snd))
$ repoTarballPkgsWithMetadata
]
-- For tarballs from repos that do not have hashes available, download
-- the ones we previously determined we need.
--
repoTarballPkgsNewlyDownloaded <-
sequence
[ do tarball <- fetchRepoTarball verbosity repoctx repo pkgid
return (pkgid, tarball)
| (pkgid, repo) <- repoTarballPkgsToDownload ]
return (hashesFromRepoMetadata,
repoTarballPkgsNewlyDownloaded)
-- Hash tarball files for packages where we have to do that. This includes
-- tarballs that were local in the first place, plus tarballs from repos,
-- either previously cached or freshly downloaded.
--
let allTarballFilePkgs :: [(PackageId, FilePath)]
allTarballFilePkgs = localTarballPkgs
++ repoTarballPkgsDownloaded
++ repoTarballPkgsNewlyDownloaded
hashesFromTarballFiles <- liftIO $
fmap Map.fromList $
sequence
[ do srchash <- readFileHashValue tarball
return (pkgid, srchash)
| (pkgid, tarball) <- allTarballFilePkgs
]
monitorFiles [ monitorFile tarball
| (_pkgid, tarball) <- allTarballFilePkgs ]
-- Return the combination
return $! hashesFromRepoMetadata
<> hashesFromTarballFiles
-- ------------------------------------------------------------
-- * Installation planning
-- ------------------------------------------------------------
planPackages :: Compiler
-> Platform
-> Solver -> SolverSettings
-> InstalledPackageIndex
-> SourcePackageDb
-> PkgConfigDb
-> [UnresolvedSourcePackage]
-> Map PackageName (Map OptionalStanza Bool)
-> Progress String String SolverInstallPlan
planPackages comp platform solver SolverSettings{..}
installedPkgIndex sourcePkgDb pkgConfigDB
localPackages pkgStanzasEnable =
resolveDependencies
platform (compilerInfo comp)
pkgConfigDB solver
resolverParams
where
--TODO: [nice to have] disable multiple instances restriction in the solver, but then
-- make sure we can cope with that in the output.
resolverParams =
setMaxBackjumps solverSettingMaxBackjumps
--TODO: [required eventually] should only be configurable for custom installs
-- . setIndependentGoals solverSettingIndependentGoals
. setReorderGoals solverSettingReorderGoals
. setCountConflicts solverSettingCountConflicts
--TODO: [required eventually] should only be configurable for custom installs
-- . setAvoidReinstalls solverSettingAvoidReinstalls
--TODO: [required eventually] should only be configurable for custom installs
-- . setShadowPkgs solverSettingShadowPkgs
. setStrongFlags solverSettingStrongFlags
--TODO: [required eventually] decide if we need to prefer installed for
-- global packages, or prefer latest even for global packages. Perhaps
-- should be configurable but with a different name than "upgrade-dependencies".
. setPreferenceDefault PreferLatestForSelected
{-(if solverSettingUpgradeDeps
then PreferAllLatest
else PreferLatestForSelected)-}
. removeLowerBounds solverSettingAllowOlder
. removeUpperBounds solverSettingAllowNewer
. addDefaultSetupDependencies (defaultSetupDeps comp platform
. PD.packageDescription
. packageDescription)
. addPreferences
-- preferences from the config file or command line
[ PackageVersionPreference name ver
| Dependency name ver <- solverSettingPreferences ]
. addConstraints
-- version constraints from the config file or command line
[ LabeledPackageConstraint (userToPackageConstraint pc) src
| (pc, src) <- solverSettingConstraints ]
. addPreferences
-- enable stanza preference where the user did not specify
[ PackageStanzasPreference pkgname stanzas
| pkg <- localPackages
, let pkgname = packageName pkg
stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable
stanzas = [ stanza | stanza <- [minBound..maxBound]
, Map.lookup stanza stanzaM == Nothing ]
, not (null stanzas)
]
. addConstraints
-- enable stanza constraints where the user asked to enable
[ LabeledPackageConstraint
(PackageConstraintStanzas pkgname stanzas)
ConstraintSourceConfigFlagOrTarget
| pkg <- localPackages
, let pkgname = packageName pkg
stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable
stanzas = [ stanza | stanza <- [minBound..maxBound]
, Map.lookup stanza stanzaM == Just True ]
, not (null stanzas)
]
. addConstraints
--TODO: [nice to have] should have checked at some point that the
-- package in question actually has these flags.
[ LabeledPackageConstraint
(PackageConstraintFlags pkgname flags)
ConstraintSourceConfigFlagOrTarget
| (pkgname, flags) <- Map.toList solverSettingFlagAssignments ]
. addConstraints
--TODO: [nice to have] we have user-supplied flags for unspecified
-- local packages (as well as specific per-package flags). For the
-- former we just apply all these flags to all local targets which
-- is silly. We should check if the flags are appropriate.
[ LabeledPackageConstraint
(PackageConstraintFlags pkgname flags)
ConstraintSourceConfigFlagOrTarget
| let flags = solverSettingFlagAssignment
, not (null flags)
, pkg <- localPackages
, let pkgname = packageName pkg ]
$ stdResolverParams
stdResolverParams =
-- Note: we don't use the standardInstallPolicy here, since that uses
-- its own addDefaultSetupDependencies that is not appropriate for us.
basicInstallPolicy
installedPkgIndex sourcePkgDb
(map SpecificSourcePackage localPackages)
------------------------------------------------------------------------------
-- * Install plan post-processing
------------------------------------------------------------------------------
-- This phase goes from the InstallPlan we get from the solver and has to
-- make an elaborated install plan.
--
-- We go in two steps:
--
-- 1. elaborate all the source packages that the solver has chosen.
-- 2. swap source packages for pre-existing installed packages wherever
-- possible.
--
-- We do it in this order, elaborating and then replacing, because the easiest
-- way to calculate the installed package ids used for the replacement step is
-- from the elaborated configuration for each package.
------------------------------------------------------------------------------
-- * Install plan elaboration
------------------------------------------------------------------------------
-- | Produce an elaborated install plan using the policy for local builds with
-- a nix-style shared store.
--
-- In theory should be able to make an elaborated install plan with a policy
-- matching that of the classic @cabal install --user@ or @--global@
--
elaborateInstallPlan
:: Platform -> Compiler -> ProgramDb -> PkgConfigDb
-> DistDirLayout
-> CabalDirLayout
-> SolverInstallPlan
-> [SourcePackage loc]
-> Map PackageId PackageSourceHash
-> InstallDirs.InstallDirTemplates
-> ProjectConfigShared
-> PackageConfig
-> Map PackageName PackageConfig
-> (ElaboratedInstallPlan, ElaboratedSharedConfig)
elaborateInstallPlan platform compiler compilerprogdb pkgConfigDB
DistDirLayout{..}
cabalDirLayout@CabalDirLayout{cabalStorePackageDB}
solverPlan localPackages
sourcePackageHashes
defaultInstallDirs
_sharedPackageConfig
localPackagesConfig
perPackageConfig =
(elaboratedInstallPlan, elaboratedSharedConfig)
where
elaboratedSharedConfig =
ElaboratedSharedConfig {
pkgConfigPlatform = platform,
pkgConfigCompiler = compiler,
pkgConfigCompilerProgs = compilerprogdb
}
elaboratedInstallPlan =
flip InstallPlan.fromSolverInstallPlan solverPlan $ \mapDep planpkg ->
case planpkg of
SolverInstallPlan.PreExisting pkg ->
[InstallPlan.PreExisting (instSolverPkgIPI pkg)]
SolverInstallPlan.Configured pkg ->
-- SolverPackage
let pd = PD.packageDescription (packageDescription (solverPkgSource pkg))
eligible
-- At this point in time, only non-Custom setup scripts
-- are supported. Implementing per-component builds with
-- Custom would require us to create a new 'ElabSetup'
-- type, and teach all of the code paths how to handle it.
-- Once you've implemented that, delete this guard.
| fromMaybe PD.Custom (PD.buildType pd) == PD.Custom
= False
-- Only non-Custom or sufficiently recent Custom
-- scripts can be expanded.
| otherwise
= (fromMaybe PD.Custom (PD.buildType pd) /= PD.Custom
-- This is when we started distributing dependencies
-- per component (instead of glomming them altogether
-- and distributing to everything.) I didn't feel
-- like implementing the legacy behavior.
&& PD.specVersion pd >= Version [1,7,1] []
)
|| PD.specVersion pd >= Version [2,0,0] []
in map InstallPlan.Configured $ if eligible
then elaborateSolverToComponents mapDep pkg
else [elaborateSolverToPackage mapDep pkg]
elaborateSolverToComponents
:: (SolverId -> [ElaboratedPlanPackage])
-> SolverPackage UnresolvedPkgLoc
-> [ElaboratedConfiguredPackage]
elaborateSolverToComponents mapDep spkg@(SolverPackage _ _ _ deps0 exe_deps0)
= snd (mapAccumL buildComponent (Map.empty, Map.empty) comps_graph)
where
elab0@ElaboratedConfiguredPackage{..} = elaborateSolverToCommon mapDep spkg
comps_graph =
case Cabal.mkComponentsGraph
elabEnabledSpec
elabPkgDescription
elabInternalPackages of
Left _ -> error ("component cycle in " ++ display elabPkgSourceId)
Right g -> g
buildComponent :: (Map PackageName ConfiguredId, Map String (ConfiguredId, FilePath))
-> (Cabal.Component, [Cabal.ComponentName])
-> ((Map PackageName ConfiguredId, Map String (ConfiguredId, FilePath)),
ElaboratedConfiguredPackage)
buildComponent (internal_map, exe_map) (comp, _cdeps) =
((internal_map', exe_map'), elab)
where
elab = elab0 {
elabUnitId = SimpleUnitId cid, -- Backpack later!
elabInstallDirs = install_dirs,
elabRequiresRegistration = requires_reg,
elabPkgOrComp = ElabComponent $ ElaboratedComponent {..}
}
cid :: ComponentId
cid = case elabBuildStyle of
BuildInplaceOnly ->
ComponentId $
display elabPkgSourceId ++ "-inplace" ++
(case Cabal.componentNameString cname of
Nothing -> ""
Just s -> "-" ++ s)
BuildAndInstall ->
hashedInstalledPackageId
(packageHashInputs
elaboratedSharedConfig
elab) -- knot tied
cname = Cabal.componentName comp
requires_reg = case cname of
CLibName -> True
CSubLibName _ -> True
_ -> False
compComponentName = Just cname
compSolverName = CD.componentNameToComponent cname
compLibDependencies =
concatMap (elaborateLibSolverId mapDep)
(CD.select (== compSolverName) deps0) ++
internal_lib_deps
compExeDependencies =
(map confInstId $
concatMap (elaborateExeSolverId mapDep)
(CD.select (== compSolverName) exe_deps0)) ++
internal_exe_deps
compExeDependencyPaths =
concatMap (elaborateExePath mapDep)
(CD.select (== compSolverName) exe_deps0) ++
internal_exe_paths
compPkgConfigDependencies =
[ (pn, fromMaybe (error $ "compPkgConfigDependencies: impossible! "
++ display pn ++ " from " ++ display elabPkgSourceId)
(pkgConfigDbPkgVersion pkgConfigDB pn))
| Dependency pn _ <- PD.pkgconfigDepends bi ]
bi = Cabal.componentBuildInfo comp
confid = ConfiguredId elabPkgSourceId cid
compSetupDependencies = concatMap (elaborateLibSolverId mapDep) (CD.setupDeps deps0)
internal_lib_deps
= [ confid'
| Dependency pkgname _ <- PD.targetBuildDepends bi
, Just confid' <- [Map.lookup pkgname internal_map] ]
(internal_exe_deps, internal_exe_paths)
= unzip $
[ (confInstId confid', path)
| Dependency (PackageName toolname) _ <- PD.buildTools bi
, toolname `elem` map PD.exeName (PD.executables elabPkgDescription)
, Just (confid', path) <- [Map.lookup toolname exe_map]
]
internal_map' = case cname of
CLibName
-> Map.insert (packageName elabPkgSourceId) confid internal_map
CSubLibName libname
-> Map.insert (PackageName libname) confid internal_map
_ -> internal_map
exe_map' = case cname of
CExeName exename
-> Map.insert exename (confid, inplace_bin_dir) exe_map
_ -> exe_map
-- NB: For inplace NOT InstallPaths.bindir installDirs; for an
-- inplace build those values are utter nonsense. So we
-- have to guess where the directory is going to be.
-- Fortunately this is "stable" part of Cabal API.
-- But the way we get the build directory is A HORRIBLE
-- HACK.
inplace_bin_dir
| shouldBuildInplaceOnly spkg
= distBuildDirectory
(elabDistDirParams elaboratedSharedConfig elab) </>
"build" </> case Cabal.componentNameString cname of
Just n -> n
Nothing -> ""
| otherwise
= InstallDirs.bindir install_dirs
install_dirs
| shouldBuildInplaceOnly spkg
-- use the ordinary default install dirs
= (InstallDirs.absoluteInstallDirs
elabPkgSourceId
(SimpleUnitId cid)
(compilerInfo compiler)
InstallDirs.NoCopyDest
platform
defaultInstallDirs) {
InstallDirs.libsubdir = "", -- absoluteInstallDirs sets these as
InstallDirs.datasubdir = "" -- 'undefined' but we have to use
} -- them as "Setup.hs configure" args
| otherwise
-- use special simplified install dirs
= storePackageInstallDirs
cabalDirLayout
(compilerId compiler)
cid
elaborateLibSolverId :: (SolverId -> [ElaboratedPlanPackage])
-> SolverId -> [ConfiguredId]
elaborateLibSolverId mapDep = map configuredId . filter is_lib . mapDep
where is_lib (InstallPlan.PreExisting _) = True
is_lib (InstallPlan.Configured elab) =
case elabPkgOrComp elab of
ElabPackage _ -> True
ElabComponent comp -> compSolverName comp == CD.ComponentLib
elaborateExeSolverId :: (SolverId -> [ElaboratedPlanPackage])
-> SolverId -> [ConfiguredId]
elaborateExeSolverId mapDep = map configuredId . filter is_exe . mapDep
where is_exe (InstallPlan.PreExisting _) = False
is_exe (InstallPlan.Configured elab) =
case elabPkgOrComp elab of
ElabPackage _ -> True
ElabComponent comp ->
case compSolverName comp of
CD.ComponentExe _ -> True
_ -> False
elaborateExePath :: (SolverId -> [ElaboratedPlanPackage])
-> SolverId -> [FilePath]
elaborateExePath mapDep = concatMap get_exe_path . mapDep
where
-- Pre-existing executables are assumed to be in PATH
-- already. In fact, this should be impossible.
-- Modest duplication with 'inplace_bin_dir'
get_exe_path (InstallPlan.PreExisting _) = []
get_exe_path (InstallPlan.Configured elab) =
[if elabBuildStyle elab == BuildInplaceOnly
then distBuildDirectory
(elabDistDirParams elaboratedSharedConfig elab) </>
"build" </>
case elabPkgOrComp elab of
ElabPackage _ -> ""
ElabComponent comp ->
case fmap Cabal.componentNameString
(compComponentName comp) of
Just (Just n) -> n
_ -> ""
else InstallDirs.bindir (elabInstallDirs elab)]
elaborateSolverToPackage :: (SolverId -> [ElaboratedPlanPackage])
-> SolverPackage UnresolvedPkgLoc
-> ElaboratedConfiguredPackage
elaborateSolverToPackage
mapDep
pkg@(SolverPackage (SourcePackage pkgid _gdesc _srcloc _descOverride)
_flags _stanzas deps0 exe_deps0) =
-- Knot tying: the final elab includes the
-- pkgInstalledId, which is calculated by hashing many
-- of the other fields of the elaboratedPackage.
elab
where
elab0@ElaboratedConfiguredPackage{..} = elaborateSolverToCommon mapDep pkg
elab = elab0 {
elabUnitId = SimpleUnitId pkgInstalledId,
elabInstallDirs = install_dirs,
elabRequiresRegistration = requires_reg,
elabPkgOrComp = ElabPackage $ ElaboratedPackage {..}
}
deps = fmap (concatMap (elaborateLibSolverId mapDep)) deps0
requires_reg = PD.hasPublicLib elabPkgDescription
pkgInstalledId
| shouldBuildInplaceOnly pkg
= ComponentId (display pkgid ++ "-inplace")
| otherwise
= assert (isJust elabPkgSourceHash) $
hashedInstalledPackageId
(packageHashInputs
elaboratedSharedConfig
elab) -- recursive use of elab
| otherwise
= error $ "elaborateInstallPlan: non-inplace package "
++ " is missing a source hash: " ++ display pkgid
pkgLibDependencies = deps
pkgExeDependencies = fmap (concatMap (elaborateExeSolverId mapDep)) exe_deps0
pkgExeDependencyPaths = fmap (concatMap (elaborateExePath mapDep)) exe_deps0
pkgPkgConfigDependencies =
ordNub
$ [ (pn, fromMaybe (error $ "pkgPkgConfigDependencies: impossible! "
++ display pn ++ " from " ++ display pkgid)
(pkgConfigDbPkgVersion pkgConfigDB pn))
| Dependency pn _ <- concatMap PD.pkgconfigDepends
(PD.allBuildInfo elabPkgDescription)
]
-- Filled in later
pkgStanzasEnabled = Set.empty
install_dirs
| shouldBuildInplaceOnly pkg
-- use the ordinary default install dirs
= (InstallDirs.absoluteInstallDirs
pkgid
(SimpleUnitId pkgInstalledId)
(compilerInfo compiler)
InstallDirs.NoCopyDest
platform
defaultInstallDirs) {
InstallDirs.libsubdir = "", -- absoluteInstallDirs sets these as
InstallDirs.datasubdir = "" -- 'undefined' but we have to use
} -- them as "Setup.hs configure" args
| otherwise
-- use special simplified install dirs
= storePackageInstallDirs
cabalDirLayout
(compilerId compiler)
pkgInstalledId
elaborateSolverToCommon :: (SolverId -> [ElaboratedPlanPackage])
-> SolverPackage UnresolvedPkgLoc
-> ElaboratedConfiguredPackage
elaborateSolverToCommon mapDep
pkg@(SolverPackage (SourcePackage pkgid gdesc srcloc descOverride)
flags stanzas deps0 _exe_deps0) =
elaboratedPackage
where
elaboratedPackage = ElaboratedConfiguredPackage {..}
-- These get filled in later
elabUnitId = error "elaborateSolverToCommon: elabUnitId"
elabPkgOrComp = error "elaborateSolverToCommon: elabPkgOrComp"
elabInstallDirs = error "elaborateSolverToCommon: elabInstallDirs"
elabRequiresRegistration = error "elaborateSolverToCommon: elabRequiresRegistration"
elabPkgSourceId = pkgid
elabPkgDescription = let Right (desc, _) =
PD.finalizePD
flags elabEnabledSpec (const True)
platform (compilerInfo compiler)
[] gdesc
in desc
elabInternalPackages = Cabal.getInternalPackages gdesc
elabFlagAssignment = flags
elabFlagDefaults = [ (Cabal.flagName flag, Cabal.flagDefault flag)
| flag <- PD.genPackageFlags gdesc ]
elabEnabledSpec = enableStanzas stanzas
elabStanzasAvailable = Set.fromList stanzas
elabStanzasRequested =
-- NB: even if a package stanza is requested, if the package
-- doesn't actually have any of that stanza we omit it from
-- the request, to ensure that we don't decide that this
-- package needs to be rebuilt. (It needs to be done here,
-- because the ElaboratedConfiguredPackage is where we test
-- whether or not there have been changes.)
Map.fromList $ [ (TestStanzas, v) | v <- maybeToList tests
, _ <- PD.testSuites elabPkgDescription ]
++ [ (BenchStanzas, v) | v <- maybeToList benchmarks
, _ <- PD.benchmarks elabPkgDescription ]
where
tests, benchmarks :: Maybe Bool
tests = perPkgOptionMaybe pkgid packageConfigTests
benchmarks = perPkgOptionMaybe pkgid packageConfigBenchmarks
-- This is a placeholder which will get updated by 'pruneInstallPlanPass1'
-- and 'pruneInstallPlanPass2'. We can't populate it here
-- because whether or not tests/benchmarks should be enabled
-- is heuristically calculated based on whether or not the
-- dependencies of the test suite have already been installed,
-- but this function doesn't know what is installed (since
-- we haven't improved the plan yet), so we do it in another pass.
-- Check the comments of those functions for more details.
elabBuildTargets = []
elabReplTarget = Nothing
elabBuildHaddocks = False
elabPkgSourceLocation = srcloc
elabPkgSourceHash = Map.lookup pkgid sourcePackageHashes
elabLocalToProject = isLocalToProject pkg
elabBuildStyle = if shouldBuildInplaceOnly pkg
then BuildInplaceOnly else BuildAndInstall
elabBuildPackageDBStack = buildAndRegisterDbs
elabRegisterPackageDBStack = buildAndRegisterDbs
elabSetupScriptStyle = packageSetupScriptStyle elabPkgDescription
-- Computing the deps here is a little awful
deps = fmap (concatMap (elaborateLibSolverId mapDep)) deps0
elabSetupScriptCliVersion = packageSetupScriptSpecVersion
elabSetupScriptStyle elabPkgDescription deps
elabSetupPackageDBStack = buildAndRegisterDbs
buildAndRegisterDbs
| shouldBuildInplaceOnly pkg = inplacePackageDbs
| otherwise = storePackageDbs
elabPkgDescriptionOverride = descOverride
elabVanillaLib = perPkgOptionFlag pkgid True packageConfigVanillaLib --TODO: [required feature]: also needs to be handled recursively
elabSharedLib = pkgid `Set.member` pkgsUseSharedLibrary
elabDynExe = perPkgOptionFlag pkgid False packageConfigDynExe
elabGHCiLib = perPkgOptionFlag pkgid False packageConfigGHCiLib --TODO: [required feature] needs to default to enabled on windows still
elabProfExe = perPkgOptionFlag pkgid False packageConfigProf
elabProfLib = pkgid `Set.member` pkgsUseProfilingLibrary
(elabProfExeDetail,
elabProfLibDetail) = perPkgOptionLibExeFlag pkgid ProfDetailDefault
packageConfigProfDetail
packageConfigProfLibDetail
elabCoverage = perPkgOptionFlag pkgid False packageConfigCoverage
elabOptimization = perPkgOptionFlag pkgid NormalOptimisation packageConfigOptimization
elabSplitObjs = perPkgOptionFlag pkgid False packageConfigSplitObjs
elabStripLibs = perPkgOptionFlag pkgid False packageConfigStripLibs
elabStripExes = perPkgOptionFlag pkgid False packageConfigStripExes
elabDebugInfo = perPkgOptionFlag pkgid NoDebugInfo packageConfigDebugInfo
-- Combine the configured compiler prog settings with the user-supplied
-- config. For the compiler progs any user-supplied config was taken
-- into account earlier when configuring the compiler so its ok that
-- our configured settings for the compiler override the user-supplied
-- config here.
elabProgramPaths = Map.fromList
[ (programId prog, programPath prog)
| prog <- configuredPrograms compilerprogdb ]
<> perPkgOptionMapLast pkgid packageConfigProgramPaths
elabProgramArgs = Map.fromList
[ (programId prog, args)
| prog <- configuredPrograms compilerprogdb
, let args = programOverrideArgs prog
, not (null args)
]
<> perPkgOptionMapMappend pkgid packageConfigProgramArgs
elabProgramPathExtra = perPkgOptionNubList pkgid packageConfigProgramPathExtra
elabConfigureScriptArgs = perPkgOptionList pkgid packageConfigConfigureArgs
elabExtraLibDirs = perPkgOptionList pkgid packageConfigExtraLibDirs
elabExtraFrameworkDirs = perPkgOptionList pkgid packageConfigExtraFrameworkDirs
elabExtraIncludeDirs = perPkgOptionList pkgid packageConfigExtraIncludeDirs
elabProgPrefix = perPkgOptionMaybe pkgid packageConfigProgPrefix
elabProgSuffix = perPkgOptionMaybe pkgid packageConfigProgSuffix
elabHaddockHoogle = perPkgOptionFlag pkgid False packageConfigHaddockHoogle
elabHaddockHtml = perPkgOptionFlag pkgid False packageConfigHaddockHtml
elabHaddockHtmlLocation = perPkgOptionMaybe pkgid packageConfigHaddockHtmlLocation
elabHaddockExecutables = perPkgOptionFlag pkgid False packageConfigHaddockExecutables
elabHaddockTestSuites = perPkgOptionFlag pkgid False packageConfigHaddockTestSuites
elabHaddockBenchmarks = perPkgOptionFlag pkgid False packageConfigHaddockBenchmarks
elabHaddockInternal = perPkgOptionFlag pkgid False packageConfigHaddockInternal
elabHaddockCss = perPkgOptionMaybe pkgid packageConfigHaddockCss
elabHaddockHscolour = perPkgOptionFlag pkgid False packageConfigHaddockHscolour
elabHaddockHscolourCss = perPkgOptionMaybe pkgid packageConfigHaddockHscolourCss
elabHaddockContents = perPkgOptionMaybe pkgid packageConfigHaddockContents
perPkgOptionFlag :: PackageId -> a -> (PackageConfig -> Flag a) -> a
perPkgOptionMaybe :: PackageId -> (PackageConfig -> Flag a) -> Maybe a
perPkgOptionList :: PackageId -> (PackageConfig -> [a]) -> [a]
perPkgOptionFlag pkgid def f = fromFlagOrDefault def (lookupPerPkgOption pkgid f)
perPkgOptionMaybe pkgid f = flagToMaybe (lookupPerPkgOption pkgid f)
perPkgOptionList pkgid f = lookupPerPkgOption pkgid f
perPkgOptionNubList pkgid f = fromNubList (lookupPerPkgOption pkgid f)
perPkgOptionMapLast pkgid f = getMapLast (lookupPerPkgOption pkgid f)
perPkgOptionMapMappend pkgid f = getMapMappend (lookupPerPkgOption pkgid f)
perPkgOptionLibExeFlag pkgid def fboth flib = (exe, lib)
where
exe = fromFlagOrDefault def bothflag
lib = fromFlagOrDefault def (bothflag <> libflag)
bothflag = lookupPerPkgOption pkgid fboth
libflag = lookupPerPkgOption pkgid flib
lookupPerPkgOption :: (Package pkg, Monoid m)
=> pkg -> (PackageConfig -> m) -> m
lookupPerPkgOption pkg f
-- the project config specifies values that apply to packages local to
-- but by default non-local packages get all default config values
-- the project, and can specify per-package values for any package,
| isLocalToProject pkg = local <> perpkg
| otherwise = perpkg
where
local = f localPackagesConfig
perpkg = maybe mempty f (Map.lookup (packageName pkg) perPackageConfig)
inplacePackageDbs = storePackageDbs
++ [ distPackageDB (compilerId compiler) ]
storePackageDbs = [ GlobalPackageDB
, cabalStorePackageDB (compilerId compiler) ]
-- For this local build policy, every package that lives in a local source
-- dir (as opposed to a tarball), or depends on such a package, will be
-- built inplace into a shared dist dir. Tarball packages that depend on
-- source dir packages will also get unpacked locally.
shouldBuildInplaceOnly :: SolverPackage loc -> Bool
shouldBuildInplaceOnly pkg = Set.member (packageId pkg)
pkgsToBuildInplaceOnly
pkgsToBuildInplaceOnly :: Set PackageId
pkgsToBuildInplaceOnly =
Set.fromList
$ map packageId
$ SolverInstallPlan.reverseDependencyClosure
solverPlan
[ PlannedId (packageId pkg)
| pkg <- localPackages ]
isLocalToProject :: Package pkg => pkg -> Bool
isLocalToProject pkg = Set.member (packageId pkg)
pkgsLocalToProject
pkgsLocalToProject :: Set PackageId
pkgsLocalToProject = Set.fromList [ packageId pkg | pkg <- localPackages ]
pkgsUseSharedLibrary :: Set PackageId
pkgsUseSharedLibrary =
packagesWithLibDepsDownwardClosedProperty needsSharedLib
where
needsSharedLib pkg =
fromMaybe compilerShouldUseSharedLibByDefault
(liftM2 (||) pkgSharedLib pkgDynExe)
where
pkgid = packageId pkg
pkgSharedLib = perPkgOptionMaybe pkgid packageConfigSharedLib
pkgDynExe = perPkgOptionMaybe pkgid packageConfigDynExe
--TODO: [code cleanup] move this into the Cabal lib. It's currently open
-- coded in Distribution.Simple.Configure, but should be made a proper
-- function of the Compiler or CompilerInfo.
compilerShouldUseSharedLibByDefault =
case compilerFlavor compiler of
GHC -> GHC.isDynamic compiler
GHCJS -> GHCJS.isDynamic compiler
_ -> False
pkgsUseProfilingLibrary :: Set PackageId
pkgsUseProfilingLibrary =
packagesWithLibDepsDownwardClosedProperty needsProfilingLib
where
needsProfilingLib pkg =
fromFlagOrDefault False (profBothFlag <> profLibFlag)
where
pkgid = packageId pkg
profBothFlag = lookupPerPkgOption pkgid packageConfigProf
profLibFlag = lookupPerPkgOption pkgid packageConfigProfLib
--TODO: [code cleanup] unused: the old deprecated packageConfigProfExe
libDepGraph = Graph.fromList (map NonSetupLibDepSolverPlanPackage
(SolverInstallPlan.toList solverPlan))
packagesWithLibDepsDownwardClosedProperty property =
Set.fromList
. map packageId
. fromMaybe []
$ Graph.closure
libDepGraph
[ Graph.nodeKey pkg
| pkg <- SolverInstallPlan.toList solverPlan
, property pkg ] -- just the packages that satisfy the propety
--TODO: [nice to have] this does not check the config consistency,
-- e.g. a package explicitly turning off profiling, but something
-- depending on it that needs profiling. This really needs a separate
-- package config validation/resolution pass.
--TODO: [nice to have] config consistency checking:
-- + profiling libs & exes, exe needs lib, recursive
-- + shared libs & exes, exe needs lib, recursive
-- + vanilla libs & exes, exe needs lib, recursive
-- + ghci or shared lib needed by TH, recursive, ghc version dependent
-- | A newtype for 'SolverInstallPlan.SolverPlanPackage' for which the
-- dependency graph considers only dependencies on libraries which are
-- NOT from setup dependencies. Used to compute the set
-- of packages needed for profiling and dynamic libraries.
newtype NonSetupLibDepSolverPlanPackage
= NonSetupLibDepSolverPlanPackage
{ unNonSetupLibDepSolverPlanPackage :: SolverInstallPlan.SolverPlanPackage }
instance Package NonSetupLibDepSolverPlanPackage where
packageId = packageId . unNonSetupLibDepSolverPlanPackage
instance IsNode NonSetupLibDepSolverPlanPackage where
type Key NonSetupLibDepSolverPlanPackage = SolverId
nodeKey = nodeKey . unNonSetupLibDepSolverPlanPackage
nodeNeighbors (NonSetupLibDepSolverPlanPackage spkg)
= ordNub $ CD.nonSetupDeps (resolverPackageLibDeps spkg)
---------------------------
-- Build targets
--
-- Refer to ProjectPlanning.Types for details of these important types:
-- data PackageTarget = ...
-- data ComponentTarget = ...
-- data SubComponentTarget = ...
--TODO: this needs to report some user target/config errors
elaboratePackageTargets :: ElaboratedConfiguredPackage -> [PackageTarget]
-> ([ComponentTarget], Maybe ComponentTarget, Bool)
elaboratePackageTargets ElaboratedConfiguredPackage{..} targets =
let buildTargets = nubComponentTargets
. map compatSubComponentTargets
. concatMap elaborateBuildTarget
$ targets
--TODO: instead of listToMaybe we should be reporting an error here
replTargets = listToMaybe
. nubComponentTargets
. map compatSubComponentTargets
. concatMap elaborateReplTarget
$ targets
buildHaddocks = HaddockDefaultComponents `elem` targets
in (buildTargets, replTargets, buildHaddocks)
where
--TODO: need to report an error here if defaultComponents is empty
elaborateBuildTarget BuildDefaultComponents = pkgDefaultComponents
elaborateBuildTarget (BuildSpecificComponent t) = [t]
elaborateBuildTarget _ = []
--TODO: need to report an error here if defaultComponents is empty
elaborateReplTarget ReplDefaultComponent = take 1 pkgDefaultComponents
elaborateReplTarget (ReplSpecificComponent t) = [t]
elaborateReplTarget _ = []
pkgDefaultComponents =
[ ComponentTarget cname WholeComponent
| c <- Cabal.pkgComponents elabPkgDescription
, PD.buildable (Cabal.componentBuildInfo c)
, let cname = Cabal.componentName c
, enabledOptionalStanza cname
]
where
enabledOptionalStanza cname =
case componentOptionalStanza cname of
Nothing -> True
Just stanza -> Map.lookup stanza elabStanzasRequested
== Just True
-- Not all Cabal Setup.hs versions support sub-component targets, so switch
-- them over to the whole component
compatSubComponentTargets :: ComponentTarget -> ComponentTarget
compatSubComponentTargets target@(ComponentTarget cname _subtarget)
| not setupHsSupportsSubComponentTargets
= ComponentTarget cname WholeComponent
| otherwise = target
-- Actually the reality is that no current version of Cabal's Setup.hs
-- build command actually support building specific files or modules.
setupHsSupportsSubComponentTargets = False
-- TODO: when that changes, adjust this test, e.g.
-- | pkgSetupScriptCliVersion >= Version [x,y] []
nubComponentTargets :: [ComponentTarget] -> [ComponentTarget]
nubComponentTargets =
concatMap (wholeComponentOverrides . map snd)
. groupBy ((==) `on` fst)
. sortBy (compare `on` fst)
. map (\t@(ComponentTarget cname _) -> (cname, t))
-- If we're building the whole component then that the only target all we
-- need, otherwise we can have several targets within the component.
wholeComponentOverrides :: [ComponentTarget] -> [ComponentTarget]
wholeComponentOverrides ts =
case [ t | t@(ComponentTarget _ WholeComponent) <- ts ] of
(t:_) -> [t]
[] -> ts
pkgHasEphemeralBuildTargets :: ElaboratedConfiguredPackage -> Bool
pkgHasEphemeralBuildTargets elab =
isJust (elabReplTarget elab)
|| (not . null) [ () | ComponentTarget _ subtarget <- elabBuildTargets elab
, subtarget /= WholeComponent ]
-- | The components that we'll build all of, meaning that after they're built
-- we can skip building them again (unlike with building just some modules or
-- other files within a component).
--
elabBuildTargetWholeComponents :: ElaboratedConfiguredPackage
-> Set ComponentName
elabBuildTargetWholeComponents elab =
Set.fromList
[ cname | ComponentTarget cname WholeComponent <- elabBuildTargets elab ]
------------------------------------------------------------------------------
-- * Install plan pruning
------------------------------------------------------------------------------
-- | Given a set of package targets (and optionally component targets within
-- those packages), take the subset of the install plan needed to build those
-- targets. Also, update the package config to specify which optional stanzas
-- to enable, and which targets within each package to build.
--
pruneInstallPlanToTargets :: Map UnitId [PackageTarget]
-> ElaboratedInstallPlan -> ElaboratedInstallPlan
pruneInstallPlanToTargets perPkgTargetsMap elaboratedPlan =
InstallPlan.new (InstallPlan.planIndepGoals elaboratedPlan)
. Graph.fromList
-- We have to do this in two passes
. pruneInstallPlanPass2
. pruneInstallPlanPass1 perPkgTargetsMap
. InstallPlan.toList
$ elaboratedPlan
-- | This is a temporary data type, where we temporarily
-- override the graph dependencies of an 'ElaboratedPackage',
-- so we can take a closure over them. We'll throw out the
-- overriden dependencies when we're done so it's strictly temporary.
--
-- For 'ElaboratedComponent', this the cached unit IDs always
-- coincide with the real thing.
data PrunedPackage = PrunedPackage ElaboratedConfiguredPackage [UnitId]
instance Package PrunedPackage where
packageId (PrunedPackage elab _) = packageId elab
instance HasUnitId PrunedPackage where
installedUnitId = nodeKey
instance IsNode PrunedPackage where
type Key PrunedPackage = UnitId
nodeKey (PrunedPackage elab _) = nodeKey elab
nodeNeighbors (PrunedPackage _ deps) = deps
fromPrunedPackage :: PrunedPackage -> ElaboratedConfiguredPackage
fromPrunedPackage (PrunedPackage elab _) = elab
-- | The first pass does three things:
--
-- * Set the build targets based on the user targets (but not rev deps yet).
-- * A first go at determining which optional stanzas (testsuites, benchmarks)
-- are needed. We have a second go in the next pass.
-- * Take the dependency closure using pruned dependencies. We prune deps that
-- are used only by unneeded optional stanzas. These pruned deps are only
-- used for the dependency closure and are not persisted in this pass.
--
pruneInstallPlanPass1 :: Map UnitId [PackageTarget]
-> [ElaboratedPlanPackage]
-> [ElaboratedPlanPackage]
pruneInstallPlanPass1 perPkgTargetsMap pkgs =
map (mapConfiguredPackage fromPrunedPackage)
(fromMaybe [] $ Graph.closure g roots)
where
pkgs' = map (mapConfiguredPackage prune) pkgs
g = Graph.fromList pkgs'
prune elab =
let elab' = (pruneOptionalStanzas . setElabBuildTargets) elab
in PrunedPackage elab' (pruneOptionalDependencies elab')
roots = mapMaybe find_root pkgs'
find_root (InstallPlan.Configured (PrunedPackage elab _)) =
if not (null (elabBuildTargets elab)
&& isNothing (elabReplTarget elab)
&& not (elabBuildHaddocks elab))
then Just (installedUnitId elab)
else Nothing
find_root _ = Nothing
-- Elaborate and set the targets we'll build for this package. This is just
-- based on the targets from the user, not targets implied by reverse
-- dependencies. Those comes in the second pass once we know the rev deps.
--
setElabBuildTargets elab =
elab {
elabBuildTargets = mapMaybe targetForElab buildTargets,
elabReplTarget = replTarget >>= targetForElab,
elabBuildHaddocks = buildHaddocks
}
where
(buildTargets, replTarget, buildHaddocks)
= elaboratePackageTargets elab targets
targets = fromMaybe []
$ Map.lookup (installedUnitId elab) perPkgTargetsMap
targetForElab tgt@(ComponentTarget cname _) =
case elabPkgOrComp elab of
ElabPackage _ -> Just tgt -- always valid
ElabComponent comp
-- Only if the component name matches
| compComponentName comp == Just cname -> Just tgt
| otherwise -> Nothing
-- Decide whether or not to enable testsuites and benchmarks
--
-- The testsuite and benchmark targets are somewhat special in that we need
-- to configure the packages with them enabled, and we need to do that even
-- if we only want to build one of several testsuites.
--
-- There are two cases in which we will enable the testsuites (or
-- benchmarks): if one of the targets is a testsuite, or if all of the
-- testsuite dependencies are already cached in the store. The rationale
-- for the latter is to minimise how often we have to reconfigure due to
-- the particular targets we choose to build. Otherwise choosing to build
-- a testsuite target, and then later choosing to build an exe target
-- would involve unnecessarily reconfiguring the package with testsuites
-- disabled. Technically this introduces a little bit of stateful
-- behaviour to make this "sticky", but it should be benign.
--
pruneOptionalStanzas :: ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage
pruneOptionalStanzas elab@ElaboratedConfiguredPackage{ elabPkgOrComp = ElabPackage pkg } =
elab {
elabPkgOrComp = ElabPackage (pkg { pkgStanzasEnabled = stanzas })
}
where
stanzas :: Set OptionalStanza
stanzas = optionalStanzasRequiredByTargets elab
<> optionalStanzasRequestedByDefault elab
<> optionalStanzasWithDepsAvailable availablePkgs elab pkg
pruneOptionalStanzas elab = elab
-- Calculate package dependencies but cut out those needed only by
-- optional stanzas that we've determined we will not enable.
-- These pruned deps are not persisted in this pass since they're based on
-- the optional stanzas and we'll make further tweaks to the optional
-- stanzas in the next pass.
--
pruneOptionalDependencies :: ElaboratedConfiguredPackage -> [UnitId]
pruneOptionalDependencies elab@ElaboratedConfiguredPackage{ elabPkgOrComp = ElabComponent _ }
= InstallPlan.depends elab -- no pruning
pruneOptionalDependencies ElaboratedConfiguredPackage{ elabPkgOrComp = ElabPackage pkg }
= (CD.flatDeps . CD.filterDeps keepNeeded) (pkgOrderDependencies pkg)
where
keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas
keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas
keepNeeded _ _ = True
stanzas = pkgStanzasEnabled pkg
optionalStanzasRequiredByTargets :: ElaboratedConfiguredPackage
-> Set OptionalStanza
optionalStanzasRequiredByTargets pkg =
Set.fromList
[ stanza
| ComponentTarget cname _ <- elabBuildTargets pkg
++ maybeToList (elabReplTarget pkg)
, stanza <- maybeToList (componentOptionalStanza cname)
]
optionalStanzasRequestedByDefault :: ElaboratedConfiguredPackage
-> Set OptionalStanza
optionalStanzasRequestedByDefault =
Map.keysSet
. Map.filter (id :: Bool -> Bool)
. elabStanzasRequested
availablePkgs =
Set.fromList
[ installedUnitId pkg
| InstallPlan.PreExisting pkg <- pkgs ]
-- | Given a set of already installed packages @availablePkgs@,
-- determine the set of available optional stanzas from @pkg@
-- which have all of their dependencies already installed. This is used
-- to implement "sticky" testsuites, where once we have installed
-- all of the deps needed for the test suite, we go ahead and
-- enable it always.
optionalStanzasWithDepsAvailable :: Set UnitId
-> ElaboratedConfiguredPackage
-> ElaboratedPackage
-> Set OptionalStanza
optionalStanzasWithDepsAvailable availablePkgs elab pkg =
Set.fromList
[ stanza
| stanza <- Set.toList (elabStanzasAvailable elab)
, let deps :: [UnitId]
deps = CD.select (optionalStanzaDeps stanza)
-- TODO: probably need to select other
-- dep types too eventually
(pkgOrderDependencies pkg)
, all (`Set.member` availablePkgs) deps
]
where
optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True
optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True
optionalStanzaDeps _ _ = False
-- The second pass does three things:
--
-- * A second go at deciding which optional stanzas to enable.
-- * Prune the dependencies based on the final choice of optional stanzas.
-- * Extend the targets within each package to build, now we know the reverse
-- dependencies, ie we know which libs are needed as deps by other packages.
--
-- Achieving sticky behaviour with enabling\/disabling optional stanzas is
-- tricky. The first approximation was handled by the first pass above, but
-- it's not quite enough. That pass will enable stanzas if all of the deps
-- of the optional stanza are already installed /in the store/. That's important
-- but it does not account for dependencies that get built inplace as part of
-- the project. We cannot take those inplace build deps into account in the
-- pruning pass however because we don't yet know which ones we're going to
-- build. Once we do know, we can have another go and enable stanzas that have
-- all their deps available. Now we can consider all packages in the pruned
-- plan to be available, including ones we already decided to build from
-- source.
--
-- Deciding which targets to build depends on knowing which packages have
-- reverse dependencies (ie are needed). This requires the result of first
-- pass, which is another reason we have to split it into two passes.
--
-- Note that just because we might enable testsuites or benchmarks (in the
-- first or second pass) doesn't mean that we build all (or even any) of them.
-- That depends on which targets we picked in the first pass.
--
pruneInstallPlanPass2 :: [ElaboratedPlanPackage]
-> [ElaboratedPlanPackage]
pruneInstallPlanPass2 pkgs =
map (mapConfiguredPackage setStanzasDepsAndTargets) pkgs
where
setStanzasDepsAndTargets elab =
elab {
elabBuildTargets = ordNub
$ elabBuildTargets elab
++ libTargetsRequiredForRevDeps
++ exeTargetsRequiredForRevDeps,
elabPkgOrComp =
case elabPkgOrComp elab of
ElabPackage pkg ->
let stanzas = pkgStanzasEnabled pkg
<> optionalStanzasWithDepsAvailable availablePkgs elab pkg
keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas
keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas
keepNeeded _ _ = True
in ElabPackage $ pkg {
pkgStanzasEnabled = stanzas,
pkgLibDependencies = CD.filterDeps keepNeeded (pkgLibDependencies pkg),
pkgExeDependencies = CD.filterDeps keepNeeded (pkgExeDependencies pkg),
pkgExeDependencyPaths = CD.filterDeps keepNeeded (pkgExeDependencyPaths pkg)
}
r@(ElabComponent _) -> r
}
where
libTargetsRequiredForRevDeps =
[ ComponentTarget Cabal.defaultLibName WholeComponent
| installedUnitId elab `Set.member` hasReverseLibDeps
]
exeTargetsRequiredForRevDeps =
-- TODO: allow requesting executable with different name
-- than package name
[ ComponentTarget (Cabal.CExeName (unPackageName (packageName (elabPkgSourceId elab))))
WholeComponent
| installedUnitId elab `Set.member` hasReverseExeDeps
]
availablePkgs :: Set UnitId
availablePkgs = Set.fromList (map installedUnitId pkgs)
hasReverseLibDeps :: Set UnitId
hasReverseLibDeps =
Set.fromList [ SimpleUnitId (confInstId depid)
| InstallPlan.Configured pkg <- pkgs
, depid <- elabLibDependencies pkg ]
hasReverseExeDeps :: Set UnitId
hasReverseExeDeps =
Set.fromList [ SimpleUnitId depid
| InstallPlan.Configured pkg <- pkgs
, depid <- elabExeDependencies pkg ]
mapConfiguredPackage :: (srcpkg -> srcpkg')
-> InstallPlan.GenericPlanPackage ipkg srcpkg
-> InstallPlan.GenericPlanPackage ipkg srcpkg'
mapConfiguredPackage f (InstallPlan.Configured pkg) =
InstallPlan.Configured (f pkg)
mapConfiguredPackage _ (InstallPlan.PreExisting pkg) =
InstallPlan.PreExisting pkg
componentOptionalStanza :: Cabal.ComponentName -> Maybe OptionalStanza
componentOptionalStanza (Cabal.CTestName _) = Just TestStanzas
componentOptionalStanza (Cabal.CBenchName _) = Just BenchStanzas
componentOptionalStanza _ = Nothing
------------------------------------
-- Support for --only-dependencies
--
-- | Try to remove the given targets from the install plan.
--
-- This is not always possible.
--
pruneInstallPlanToDependencies :: Set UnitId
-> ElaboratedInstallPlan
-> Either CannotPruneDependencies
ElaboratedInstallPlan
pruneInstallPlanToDependencies pkgTargets installPlan =
assert (all (isJust . InstallPlan.lookup installPlan)
(Set.toList pkgTargets)) $
fmap (InstallPlan.new (InstallPlan.planIndepGoals installPlan))
. checkBrokenDeps
. Graph.fromList
. filter (\pkg -> installedUnitId pkg `Set.notMember` pkgTargets)
. InstallPlan.toList
$ installPlan
where
-- Our strategy is to remove the packages we don't want and then check
-- if the remaining graph is broken or not, ie any packages with dangling
-- dependencies. If there are then we cannot prune the given targets.
checkBrokenDeps :: Graph.Graph ElaboratedPlanPackage
-> Either CannotPruneDependencies
(Graph.Graph ElaboratedPlanPackage)
checkBrokenDeps graph =
case Graph.broken graph of
[] -> Right graph
brokenPackages ->
Left $ CannotPruneDependencies
[ (pkg, missingDeps)
| (pkg, missingDepIds) <- brokenPackages
, let missingDeps = catMaybes (map lookupDep missingDepIds)
]
where
-- lookup in the original unpruned graph
lookupDep = InstallPlan.lookup installPlan
-- | It is not always possible to prune to only the dependencies of a set of
-- targets. It may be the case that removing a package leaves something else
-- that still needed the pruned package.
--
-- This lists all the packages that would be broken, and their dependencies
-- that would be missing if we did prune.
--
newtype CannotPruneDependencies =
CannotPruneDependencies [(ElaboratedPlanPackage,
[ElaboratedPlanPackage])]
#if MIN_VERSION_base(4,8,0)
deriving (Show, Typeable)
#else
deriving (Typeable)
instance Show CannotPruneDependencies where
show = renderCannotPruneDependencies
#endif
instance Exception CannotPruneDependencies where
#if MIN_VERSION_base(4,8,0)
displayException = renderCannotPruneDependencies
#endif
renderCannotPruneDependencies :: CannotPruneDependencies -> String
renderCannotPruneDependencies (CannotPruneDependencies brokenPackages) =
"Cannot select only the dependencies (as requested by the "
++ "'--only-dependencies' flag), "
++ (case pkgids of
[pkgid] -> "the package " ++ display pkgid ++ " is "
_ -> "the packages "
++ intercalate ", " (map display pkgids) ++ " are ")
++ "required by a dependency of one of the other targets."
where
-- throw away the details and just list the deps that are needed
pkgids :: [PackageId]
pkgids = nub . map packageId . concatMap snd $ brokenPackages
---------------------------
-- Setup.hs script policy
--
-- Handling for Setup.hs scripts is a bit tricky, part of it lives in the
-- solver phase, and part in the elaboration phase. We keep the helper
-- functions for both phases together here so at least you can see all of it
-- in one place.
--
-- There are four major cases for Setup.hs handling:
--
-- 1. @build-type@ Custom with a @custom-setup@ section
-- 2. @build-type@ Custom without a @custom-setup@ section
-- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@
-- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@
--
-- It's also worth noting that packages specifying @cabal-version: >= 1.23@
-- or later that have @build-type@ Custom will always have a @custom-setup@
-- section. Therefore in case 2, the specified @cabal-version@ will always be
-- less than 1.23.
--
-- In cases 1 and 2 we obviously have to build an external Setup.hs script,
-- while in case 4 we can use the internal library API. In case 3 we also have
-- to build an external Setup.hs script because the package needs a later
-- Cabal lib version than we can support internally.
--
-- data SetupScriptStyle = ... -- see ProjectPlanning.Types
-- | Work out the 'SetupScriptStyle' given the package description.
--
packageSetupScriptStyle :: PD.PackageDescription -> SetupScriptStyle
packageSetupScriptStyle pkg
| buildType == PD.Custom
, Just setupbi <- PD.setupBuildInfo pkg -- does have a custom-setup stanza
, not (PD.defaultSetupDepends setupbi) -- but not one we added internally
= SetupCustomExplicitDeps
| buildType == PD.Custom
, Just setupbi <- PD.setupBuildInfo pkg -- we get this case post-solver as
, PD.defaultSetupDepends setupbi -- the solver fills in the deps
= SetupCustomImplicitDeps
| buildType == PD.Custom
, Nothing <- PD.setupBuildInfo pkg -- we get this case pre-solver
= SetupCustomImplicitDeps
| PD.specVersion pkg > cabalVersion -- one cabal-install is built against
= SetupNonCustomExternalLib
| otherwise
= SetupNonCustomInternalLib
where
buildType = fromMaybe PD.Custom (PD.buildType pkg)
-- | Part of our Setup.hs handling policy is implemented by getting the solver
-- to work out setup dependencies for packages. The solver already handles
-- packages that explicitly specify setup dependencies, but we can also tell
-- the solver to treat other packages as if they had setup dependencies.
-- That's what this function does, it gets called by the solver for all
-- packages that don't already have setup dependencies.
--
-- The dependencies we want to add is different for each 'SetupScriptStyle'.
--
-- Note that adding default deps means these deps are actually /added/ to the
-- packages that we get out of the solver in the 'SolverInstallPlan'. Making
-- implicit setup deps explicit is a problem in the post-solver stages because
-- we still need to distinguish the case of explicit and implict setup deps.
-- See 'rememberImplicitSetupDeps'.
--
defaultSetupDeps :: Compiler -> Platform
-> PD.PackageDescription
-> Maybe [Dependency]
defaultSetupDeps compiler platform pkg =
case packageSetupScriptStyle pkg of
-- For packages with build type custom that do not specify explicit
-- setup dependencies, we add a dependency on Cabal and a number
-- of other packages.
SetupCustomImplicitDeps ->
Just $
[ Dependency depPkgname anyVersion
| depPkgname <- legacyCustomSetupPkgs compiler platform ] ++
[ Dependency cabalPkgname cabalConstraint
| packageName pkg /= cabalPkgname ]
where
-- The Cabal dep is slightly special:
-- * We omit the dep for the Cabal lib itself, since it bootstraps.
-- * We constrain it to be >= 1.18 < 2
--
cabalConstraint = orLaterVersion cabalCompatMinVer
`intersectVersionRanges`
orLaterVersion (PD.specVersion pkg)
`intersectVersionRanges`
earlierVersion cabalCompatMaxVer
-- The idea here is that at some point we will make significant
-- breaking changes to the Cabal API that Setup.hs scripts use.
-- So for old custom Setup scripts that do not specify explicit
-- constraints, we constrain them to use a compatible Cabal version.
-- The exact version where we'll make this API break has not yet been
-- decided, so for the meantime we guess at 2.x.
cabalCompatMaxVer = Version [2] []
-- In principle we can talk to any old Cabal version, and we need to
-- be able to do that for custom Setup scripts that require older
-- Cabal lib versions. However in practice we have currently have
-- problems with Cabal-1.16. (1.16 does not know about build targets)
-- If this is fixed we can relax this constraint.
cabalCompatMinVer = Version [1,18] []
-- For other build types (like Simple) if we still need to compile an
-- external Setup.hs, it'll be one of the simple ones that only depends
-- on Cabal and base.
SetupNonCustomExternalLib ->
Just [ Dependency cabalPkgname cabalConstraint
, Dependency basePkgname anyVersion ]
where
cabalConstraint = orLaterVersion (PD.specVersion pkg)
-- The internal setup wrapper method has no deps at all.
SetupNonCustomInternalLib -> Just []
SetupCustomExplicitDeps ->
error $ "defaultSetupDeps: called for a package with explicit "
++ "setup deps: " ++ display (packageId pkg)
-- | Work out which version of the Cabal spec we will be using to talk to the
-- Setup.hs interface for this package.
--
-- This depends somewhat on the 'SetupScriptStyle' but most cases are a result
-- of what the solver picked for us, based on the explicit setup deps or the
-- ones added implicitly by 'defaultSetupDeps'.
--
packageSetupScriptSpecVersion :: Package pkg
=> SetupScriptStyle
-> PD.PackageDescription
-> ComponentDeps [pkg]
-> Version
-- We're going to be using the internal Cabal library, so the spec version of
-- that is simply the version of the Cabal library that cabal-install has been
-- built with.
packageSetupScriptSpecVersion SetupNonCustomInternalLib _ _ =
cabalVersion
-- If we happen to be building the Cabal lib itself then because that
-- bootstraps itself then we use the version of the lib we're building.
packageSetupScriptSpecVersion SetupCustomImplicitDeps pkg _
| packageName pkg == cabalPkgname
= packageVersion pkg
-- In all other cases we have a look at what version of the Cabal lib the
-- solver picked. Or if it didn't depend on Cabal at all (which is very rare)
-- then we look at the .cabal file to see what spec version it declares.
packageSetupScriptSpecVersion _ pkg deps =
case find ((cabalPkgname ==) . packageName) (CD.setupDeps deps) of
Just dep -> packageVersion dep
Nothing -> PD.specVersion pkg
cabalPkgname, basePkgname :: PackageName
cabalPkgname = PackageName "Cabal"
basePkgname = PackageName "base"
legacyCustomSetupPkgs :: Compiler -> Platform -> [PackageName]
legacyCustomSetupPkgs compiler (Platform _ os) =
map PackageName $
[ "array", "base", "binary", "bytestring", "containers"
, "deepseq", "directory", "filepath", "old-time", "pretty"
, "process", "time", "transformers" ]
++ [ "Win32" | os == Windows ]
++ [ "unix" | os /= Windows ]
++ [ "ghc-prim" | isGHC ]
++ [ "template-haskell" | isGHC ]
where
isGHC = compilerCompatFlavor GHC compiler
-- The other aspects of our Setup.hs policy lives here where we decide on
-- the 'SetupScriptOptions'.
--
-- Our current policy for the 'SetupCustomImplicitDeps' case is that we
-- try to make the implicit deps cover everything, and we don't allow the
-- compiler to pick up other deps. This may or may not be sustainable, and
-- we might have to allow the deps to be non-exclusive, but that itself would
-- be tricky since we would have to allow the Setup access to all the packages
-- in the store and local dbs.
setupHsScriptOptions :: ElaboratedReadyPackage
-> ElaboratedSharedConfig
-> FilePath
-> FilePath
-> Bool
-> Lock
-> SetupScriptOptions
-- TODO: Fix this so custom is a separate component. Custom can ALWAYS
-- be a separate component!!!
setupHsScriptOptions (ReadyPackage elab@ElaboratedConfiguredPackage{..})
ElaboratedSharedConfig{..} srcdir builddir
isParallelBuild cacheLock =
SetupScriptOptions {
useCabalVersion = thisVersion elabSetupScriptCliVersion,
useCabalSpecVersion = Just elabSetupScriptCliVersion,
useCompiler = Just pkgConfigCompiler,
usePlatform = Just pkgConfigPlatform,
usePackageDB = elabSetupPackageDBStack,
usePackageIndex = Nothing,
useDependencies = [ (uid, srcid)
| ConfiguredId srcid uid
<- elabSetupDependencies elab ],
useDependenciesExclusive = True,
useVersionMacros = elabSetupScriptStyle == SetupCustomExplicitDeps,
useProgramDb = pkgConfigCompilerProgs,
useDistPref = builddir,
useLoggingHandle = Nothing, -- this gets set later
useWorkingDir = Just srcdir,
useExtraPathEnv = elabExeDependencyPaths elab,
useWin32CleanHack = False, --TODO: [required eventually]
forceExternalSetupMethod = isParallelBuild,
setupCacheLock = Just cacheLock
}
-- | To be used for the input for elaborateInstallPlan.
--
-- TODO: [code cleanup] make InstallDirs.defaultInstallDirs pure.
--
userInstallDirTemplates :: Compiler
-> IO InstallDirs.InstallDirTemplates
userInstallDirTemplates compiler = do
InstallDirs.defaultInstallDirs
(compilerFlavor compiler)
True -- user install
False -- unused
storePackageInstallDirs :: CabalDirLayout
-> CompilerId
-> InstalledPackageId
-> InstallDirs.InstallDirs FilePath
storePackageInstallDirs CabalDirLayout{cabalStorePackageDirectory}
compid ipkgid =
InstallDirs.InstallDirs {..}
where
prefix = cabalStorePackageDirectory compid ipkgid
bindir = prefix </> "bin"
libdir = prefix </> "lib"
libsubdir = ""
dynlibdir = libdir
libexecdir = prefix </> "libexec"
includedir = libdir </> "include"
datadir = prefix </> "share"
datasubdir = ""
docdir = datadir </> "doc"
mandir = datadir </> "man"
htmldir = docdir </> "html"
haddockdir = htmldir
sysconfdir = prefix </> "etc"
--TODO: [code cleanup] perhaps reorder this code
-- based on the ElaboratedInstallPlan + ElaboratedSharedConfig,
-- make the various Setup.hs {configure,build,copy} flags
setupHsConfigureFlags :: ElaboratedReadyPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.ConfigFlags
setupHsConfigureFlags (ReadyPackage elab@ElaboratedConfiguredPackage{..})
sharedConfig@ElaboratedSharedConfig{..}
verbosity builddir =
sanityCheckElaboratedConfiguredPackage sharedConfig elab
(Cabal.ConfigFlags {..})
where
configArgs = mempty -- unused, passed via args
configDistPref = toFlag builddir
configCabalFilePath = mempty
configVerbosity = toFlag verbosity
configIPID = case elabPkgOrComp of
ElabPackage pkg -> toFlag (display (pkgInstalledId pkg))
ElabComponent _ -> mempty
configCID = case elabPkgOrComp of
ElabPackage _ -> mempty
ElabComponent _ -> toFlag (unitIdComponentId elabUnitId)
configProgramPaths = Map.toList elabProgramPaths
configProgramArgs = Map.toList elabProgramArgs
configProgramPathExtra = toNubList elabProgramPathExtra
configHcFlavor = toFlag (compilerFlavor pkgConfigCompiler)
configHcPath = mempty -- we use configProgramPaths instead
configHcPkg = mempty -- we use configProgramPaths instead
configVanillaLib = toFlag elabVanillaLib
configSharedLib = toFlag elabSharedLib
configDynExe = toFlag elabDynExe
configGHCiLib = toFlag elabGHCiLib
configProfExe = mempty
configProfLib = toFlag elabProfLib
configProf = toFlag elabProfExe
-- configProfDetail is for exe+lib, but overridden by configProfLibDetail
-- so we specify both so we can specify independently
configProfDetail = toFlag elabProfExeDetail
configProfLibDetail = toFlag elabProfLibDetail
configCoverage = toFlag elabCoverage
configLibCoverage = mempty
configOptimization = toFlag elabOptimization
configSplitObjs = toFlag elabSplitObjs
configStripExes = toFlag elabStripExes
configStripLibs = toFlag elabStripLibs
configDebugInfo = toFlag elabDebugInfo
configAllowOlder = mempty -- we use configExactConfiguration True
configAllowNewer = mempty -- we use configExactConfiguration True
configConfigurationsFlags = elabFlagAssignment
configConfigureArgs = elabConfigureScriptArgs
configExtraLibDirs = elabExtraLibDirs
configExtraFrameworkDirs = elabExtraFrameworkDirs
configExtraIncludeDirs = elabExtraIncludeDirs
configProgPrefix = maybe mempty toFlag elabProgPrefix
configProgSuffix = maybe mempty toFlag elabProgSuffix
configInstallDirs = fmap (toFlag . InstallDirs.toPathTemplate)
elabInstallDirs
-- we only use configDependencies, unless we're talking to an old Cabal
-- in which case we use configConstraints
-- NB: This does NOT use InstallPlan.depends, which includes executable
-- dependencies which should NOT be fed in here (also you don't have
-- enough info anyway)
configDependencies = [ (packageName srcid, cid)
| ConfiguredId srcid cid <- elabLibDependencies elab ]
configConstraints =
case elabPkgOrComp of
ElabPackage _ ->
[ thisPackageVersion srcid
| ConfiguredId srcid _uid <- elabLibDependencies elab ]
ElabComponent _ -> []
-- explicitly clear, then our package db stack
-- TODO: [required eventually] have to do this differently for older Cabal versions
configPackageDBs = Nothing : map Just elabBuildPackageDBStack
configTests = case elabPkgOrComp of
ElabPackage pkg -> toFlag (TestStanzas `Set.member` pkgStanzasEnabled pkg)
ElabComponent _ -> mempty
configBenchmarks = case elabPkgOrComp of
ElabPackage pkg -> toFlag (BenchStanzas `Set.member` pkgStanzasEnabled pkg)
ElabComponent _ -> mempty
configExactConfiguration = toFlag True
configFlagError = mempty --TODO: [research required] appears not to be implemented
configRelocatable = mempty --TODO: [research required] ???
configScratchDir = mempty -- never use
configUserInstall = mempty -- don't rely on defaults
configPrograms_ = mempty -- never use, shouldn't exist
setupHsConfigureArgs :: ElaboratedConfiguredPackage
-> [String]
setupHsConfigureArgs (ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage _ }) = []
setupHsConfigureArgs elab@(ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp }) =
[showComponentTarget (packageId elab) (ComponentTarget cname WholeComponent)]
where
cname = fromMaybe (error "setupHsConfigureArgs: trying to configure setup")
(compComponentName comp)
setupHsBuildFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.BuildFlags
setupHsBuildFlags _ _ verbosity builddir =
Cabal.BuildFlags {
buildProgramPaths = mempty, --unused, set at configure time
buildProgramArgs = mempty, --unused, set at configure time
buildVerbosity = toFlag verbosity,
buildDistPref = toFlag builddir,
buildAssumeDepsUpToDate = toFlag False,
buildNumJobs = mempty, --TODO: [nice to have] sometimes want to use toFlag (Just numBuildJobs),
buildArgs = mempty -- unused, passed via args not flags
}
setupHsBuildArgs :: ElaboratedConfiguredPackage -> [String]
setupHsBuildArgs elab@(ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage _ })
-- Fix for #3335, don't pass build arguments if it's not supported
| elabSetupScriptCliVersion elab >= Version [1,17] []
= map (showComponentTarget (packageId elab)) (elabBuildTargets elab)
| otherwise
= []
setupHsBuildArgs (ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent _ })
= []
setupHsReplFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.ReplFlags
setupHsReplFlags _ _ verbosity builddir =
Cabal.ReplFlags {
replProgramPaths = mempty, --unused, set at configure time
replProgramArgs = mempty, --unused, set at configure time
replVerbosity = toFlag verbosity,
replDistPref = toFlag builddir,
replReload = mempty --only used as callback from repl
}
setupHsReplArgs :: ElaboratedConfiguredPackage -> [String]
setupHsReplArgs elab =
maybe [] (\t -> [showComponentTarget (packageId elab) t]) (elabReplTarget elab)
--TODO: should be able to give multiple modules in one component
setupHsCopyFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.CopyFlags
setupHsCopyFlags _ _ verbosity builddir =
Cabal.CopyFlags {
--TODO: [nice to have] we currently just rely on Setup.hs copy to always do the right
-- thing, but perhaps we ought really to copy into an image dir and do
-- some sanity checks and move into the final location ourselves
copyArgs = [], -- TODO: could use this to only copy what we enabled
copyDest = toFlag InstallDirs.NoCopyDest,
copyDistPref = toFlag builddir,
copyAssumeDepsUpToDate = toFlag False,
copyVerbosity = toFlag verbosity
}
setupHsRegisterFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> FilePath
-> Cabal.RegisterFlags
setupHsRegisterFlags ElaboratedConfiguredPackage{..} _
verbosity builddir pkgConfFile =
Cabal.RegisterFlags {
regPackageDB = mempty, -- misfeature
regGenScript = mempty, -- never use
regGenPkgConf = toFlag (Just pkgConfFile),
regInPlace = case elabBuildStyle of
BuildInplaceOnly -> toFlag True
_ -> toFlag False,
regPrintId = mempty, -- never use
regDistPref = toFlag builddir,
regVerbosity = toFlag verbosity,
-- Currently not used, because this is per-package.
regAssumeDepsUpToDate = toFlag False,
regArgs = []
}
setupHsHaddockFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.HaddockFlags
-- TODO: reconsider whether or not Executables/TestSuites/...
-- needed for component
setupHsHaddockFlags (ElaboratedConfiguredPackage{..}) _ verbosity builddir =
Cabal.HaddockFlags {
haddockProgramPaths = mempty, --unused, set at configure time
haddockProgramArgs = mempty, --unused, set at configure time
haddockHoogle = toFlag elabHaddockHoogle,
haddockHtml = toFlag elabHaddockHtml,
haddockHtmlLocation = maybe mempty toFlag elabHaddockHtmlLocation,
haddockForHackage = mempty, --TODO: new flag
haddockExecutables = toFlag elabHaddockExecutables,
haddockTestSuites = toFlag elabHaddockTestSuites,
haddockBenchmarks = toFlag elabHaddockBenchmarks,
haddockInternal = toFlag elabHaddockInternal,
haddockCss = maybe mempty toFlag elabHaddockCss,
haddockHscolour = toFlag elabHaddockHscolour,
haddockHscolourCss = maybe mempty toFlag elabHaddockHscolourCss,
haddockContents = maybe mempty toFlag elabHaddockContents,
haddockDistPref = toFlag builddir,
haddockKeepTempFiles = mempty, --TODO: from build settings
haddockVerbosity = toFlag verbosity
}
{-
setupHsTestFlags :: ElaboratedConfiguredPackage
-> ElaboratedSharedConfig
-> Verbosity
-> FilePath
-> Cabal.TestFlags
setupHsTestFlags _ _ verbosity builddir =
Cabal.TestFlags {
}
-}
------------------------------------------------------------------------------
-- * Sharing installed packages
------------------------------------------------------------------------------
--
-- Nix style store management for tarball packages
--
-- So here's our strategy:
--
-- We use a per-user nix-style hashed store, but /only/ for tarball packages.
-- So that includes packages from hackage repos (and other http and local
-- tarballs). For packages in local directories we do not register them into
-- the shared store by default, we just build them locally inplace.
--
-- The reason we do it like this is that it's easy to make stable hashes for
-- tarball packages, and these packages benefit most from sharing. By contrast
-- unpacked dir packages are harder to hash and they tend to change more
-- frequently so there's less benefit to sharing them.
--
-- When using the nix store approach we have to run the solver *without*
-- looking at the packages installed in the store, just at the source packages
-- (plus core\/global installed packages). Then we do a post-processing pass
-- to replace configured packages in the plan with pre-existing ones, where
-- possible. Where possible of course means where the nix-style package hash
-- equals one that's already in the store.
--
-- One extra wrinkle is that unless we know package tarball hashes upfront, we
-- will have to download the tarballs to find their hashes. So we have two
-- options: delay replacing source with pre-existing installed packages until
-- the point during the execution of the install plan where we have the
-- tarball, or try to do as much up-front as possible and then check again
-- during plan execution. The former isn't great because we would end up
-- telling users we're going to re-install loads of packages when in fact we
-- would just share them. It'd be better to give as accurate a prediction as
-- we can. The latter is better for users, but we do still have to check
-- during plan execution because it's important that we don't replace existing
-- installed packages even if they have the same package hash, because we
-- don't guarantee ABI stability.
-- TODO: [required eventually] for safety of concurrent installs, we must make sure we register but
-- not replace installed packages with ghc-pkg.
packageHashInputs :: ElaboratedSharedConfig
-> ElaboratedConfiguredPackage
-> PackageHashInputs
packageHashInputs
pkgshared
elab@(ElaboratedConfiguredPackage {
elabPkgSourceHash = Just srchash
}) =
PackageHashInputs {
pkgHashPkgId = packageId elab,
pkgHashComponent = Nothing,
pkgHashSourceHash = srchash,
pkgHashPkgConfigDeps = Set.fromList (elabPkgConfigDependencies elab),
pkgHashDirectDeps =
case elabPkgOrComp elab of
ElabPackage (ElaboratedPackage{..}) ->
Set.fromList $
[ confInstId dep
| dep <- CD.select relevantDeps pkgLibDependencies ] ++
[ confInstId dep
| dep <- CD.select relevantDeps pkgExeDependencies ]
ElabComponent comp ->
Set.fromList (map confInstId (compLibDependencies comp)
++ compExeDependencies comp),
pkgHashOtherConfig = packageHashConfigInputs pkgshared elab
}
where
-- Obviously the main deps are relevant
relevantDeps CD.ComponentLib = True
relevantDeps (CD.ComponentSubLib _) = True
relevantDeps (CD.ComponentExe _) = True
-- Setup deps can affect the Setup.hs behaviour and thus what is built
relevantDeps CD.ComponentSetup = True
-- However testsuites and benchmarks do not get installed and should not
-- affect the result, so we do not include them.
relevantDeps (CD.ComponentTest _) = False
relevantDeps (CD.ComponentBench _) = False
packageHashInputs _ pkg =
error $ "packageHashInputs: only for packages with source hashes. "
++ display (packageId pkg)
packageHashConfigInputs :: ElaboratedSharedConfig
-> ElaboratedConfiguredPackage
-> PackageHashConfigInputs
packageHashConfigInputs
ElaboratedSharedConfig{..}
ElaboratedConfiguredPackage{..} =
PackageHashConfigInputs {
pkgHashCompilerId = compilerId pkgConfigCompiler,
pkgHashPlatform = pkgConfigPlatform,
pkgHashFlagAssignment = elabFlagAssignment,
pkgHashConfigureScriptArgs = elabConfigureScriptArgs,
pkgHashVanillaLib = elabVanillaLib,
pkgHashSharedLib = elabSharedLib,
pkgHashDynExe = elabDynExe,
pkgHashGHCiLib = elabGHCiLib,
pkgHashProfLib = elabProfLib,
pkgHashProfExe = elabProfExe,
pkgHashProfLibDetail = elabProfLibDetail,
pkgHashProfExeDetail = elabProfExeDetail,
pkgHashCoverage = elabCoverage,
pkgHashOptimization = elabOptimization,
pkgHashSplitObjs = elabSplitObjs,
pkgHashStripLibs = elabStripLibs,
pkgHashStripExes = elabStripExes,
pkgHashDebugInfo = elabDebugInfo,
pkgHashProgramArgs = elabProgramArgs,
pkgHashExtraLibDirs = elabExtraLibDirs,
pkgHashExtraFrameworkDirs = elabExtraFrameworkDirs,
pkgHashExtraIncludeDirs = elabExtraIncludeDirs,
pkgHashProgPrefix = elabProgPrefix,
pkgHashProgSuffix = elabProgSuffix
}
-- | Given the 'InstalledPackageIndex' for a nix-style package store, and an
-- 'ElaboratedInstallPlan', replace configured source packages by pre-existing
-- installed packages whenever they exist.
--
improveInstallPlanWithPreExistingPackages :: InstalledPackageIndex
-> Set ComponentId
-> ElaboratedInstallPlan
-> ElaboratedInstallPlan
improveInstallPlanWithPreExistingPackages installedPkgIndex installedExes installPlan =
replaceWithPreExisting installPlan
[ ipkg
| InstallPlan.Configured pkg
<- InstallPlan.reverseTopologicalOrder installPlan
, ipkg <- maybeToList (canPackageBeImproved pkg) ]
where
--TODO: sanity checks:
-- * the installed package must have the expected deps etc
-- * the installed package must not be broken, valid dep closure
--TODO: decide what to do if we encounter broken installed packages,
-- since overwriting is never safe.
canPackageBeImproved pkg =
case PackageIndex.lookupUnitId
installedPkgIndex (installedUnitId pkg) of
Just x -> Just x
Nothing | SimpleUnitId cid <- installedUnitId pkg
, cid `Set.member` installedExes
-- Same hack as replacewithPrePreExisting
-> Just (Installed.emptyInstalledPackageInfo {
Installed.installedUnitId = installedUnitId pkg
})
| otherwise -> Nothing
replaceWithPreExisting =
foldl' (\plan ipkg -> InstallPlan.preexisting
(installedUnitId ipkg) ipkg plan)
| sopvop/cabal | cabal-install/Distribution/Client/ProjectPlanning.hs | bsd-3-clause | 123,142 | 948 | 22 | 36,515 | 10,032 | 7,360 | 2,672 | 1,691 | 24 |
main = (readFile "input/p013_input.txt") >>= (putStrLn . take 10 . show . sum . (map (\x -> read x :: Integer)) . lines)
| foreverbell/project-euler-solutions | src/13.hs | bsd-3-clause | 122 | 1 | 13 | 24 | 65 | 32 | 33 | 1 | 1 |
import Test.QuickCheck
import Test.QuickCheck.Function
-- Functor law
-- fmap id = id
-- fmap (f . g) = (fmap f) . (fmap g)
-- quickcheck helper function
functorIdentity :: (Functor f, Eq (f a)) => f a -> Bool
functorIdentity f = fmap id f == id f
functorCompose :: (Functor f, Eq (f c)) =>
(a -> b) ->
(b -> c) ->
f a ->
Bool
functorCompose f g x = (fmap (g . f) x) == (fmap g $ fmap f x)
-- using QuickCheck generating functions
functorCompose' :: (Functor f, Eq (f c)) =>
f a
-> Fun a b
-> Fun b c
-> Bool
functorCompose' xs (Fun _ f) (Fun _ g) =
(fmap (g . f) xs) == (fmap g . fmap f $ xs)
type IntToInt = Fun Int Int
type IntFC = [Int] -> IntToInt -> IntToInt -> Bool
main :: IO ()
main = do
quickCheck $ \xs -> functorIdentity (xs :: [Int])
quickCheck $ \xs -> functorCompose (+1) (*2) (xs :: [Int])
-- arbitrary function generated from quickcheck for testing function law
quickCheck (functorCompose' :: IntFC)
quickCheck $ ((\xs -> functorCompose' xs) :: IntFC)
| chengzh2008/hpffp | src/ch16-Functor/quickCheckFunctorLaw.hs | bsd-3-clause | 1,150 | 1 | 11 | 376 | 433 | 227 | 206 | 25 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Snap.Snaplet.PostgresqlSimple.Internal where
import Control.Monad.IO.Class
import Control.Monad.Trans
import Control.Monad.Trans.Control (MonadBaseControl(..), control)
import Control.Monad.Trans.Identity
import Control.Monad.Trans.List
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Reader
import qualified Control.Monad.Trans.RWS.Lazy as LRWS
import qualified Control.Monad.Trans.RWS.Strict as SRWS
import qualified Control.Monad.Trans.State.Lazy as LS
import qualified Control.Monad.Trans.State.Strict as SS
import qualified Control.Monad.Trans.Writer.Lazy as LW
import qualified Control.Monad.Trans.Writer.Strict as SW
import Data.ByteString (ByteString)
import Data.Monoid
import Data.Pool
import qualified Database.PostgreSQL.Simple as P
------------------------------------------------------------------------------
-- | The state for the postgresql-simple snaplet. To use it in your app
-- include this in your application state and use pgsInit to initialize it.
data Postgres = PostgresPool (Pool P.Connection)
| PostgresConn P.Connection
------------------------------------------------------------------------------
-- | Instantiate this typeclass on 'Handler b YourAppState' so this snaplet
-- can find the connection source. If you need to have multiple instances of
-- the postgres snaplet in your application, then don't provide this instance
-- and leverage the default instance by using \"@with dbLens@\" in front of calls
-- to snaplet-postgresql-simple functions.
class (MonadIO m, MonadBaseControl IO m) => HasPostgres m where
getPostgresState :: m Postgres
setLocalPostgresState :: Postgres -> m a -> m a
instance HasPostgres m => HasPostgres (IdentityT m) where
getPostgresState = lift getPostgresState
setLocalPostgresState pg (IdentityT m) = IdentityT $
setLocalPostgresState pg m
instance HasPostgres m => HasPostgres (ListT m) where
getPostgresState = lift getPostgresState
setLocalPostgresState pg (ListT m) = ListT $
setLocalPostgresState pg m
instance HasPostgres m => HasPostgres (MaybeT m) where
getPostgresState = lift getPostgresState
setLocalPostgresState pg (MaybeT m) = MaybeT $
setLocalPostgresState pg m
instance {-#OVERLAPPABLE #-} HasPostgres m => HasPostgres (ReaderT r m) where
getPostgresState = lift getPostgresState
setLocalPostgresState pg (ReaderT m) = ReaderT $ \e ->
setLocalPostgresState pg (m e)
instance (Monoid w, HasPostgres m) => HasPostgres (LW.WriterT w m) where
getPostgresState = lift getPostgresState
setLocalPostgresState pg (LW.WriterT m) = LW.WriterT $
setLocalPostgresState pg m
instance (Monoid w, HasPostgres m) => HasPostgres (SW.WriterT w m) where
getPostgresState = lift getPostgresState
setLocalPostgresState pg (SW.WriterT m) = SW.WriterT $
setLocalPostgresState pg m
instance HasPostgres m => HasPostgres (LS.StateT w m) where
getPostgresState = lift getPostgresState
setLocalPostgresState pg (LS.StateT m) = LS.StateT $ \s ->
setLocalPostgresState pg (m s)
instance HasPostgres m => HasPostgres (SS.StateT w m) where
getPostgresState = lift getPostgresState
setLocalPostgresState pg (SS.StateT m) = SS.StateT $ \s ->
setLocalPostgresState pg (m s)
instance (Monoid w, HasPostgres m) => HasPostgres (LRWS.RWST r w s m) where
getPostgresState = lift getPostgresState
setLocalPostgresState pg (LRWS.RWST m) = LRWS.RWST $ \e s ->
setLocalPostgresState pg (m e s)
instance (Monoid w, HasPostgres m) => HasPostgres (SRWS.RWST r w s m) where
getPostgresState = lift getPostgresState
setLocalPostgresState pg (SRWS.RWST m) = SRWS.RWST $ \e s ->
setLocalPostgresState pg (m e s)
------------------------------------------------------------------------------
-- | Data type holding all the snaplet's config information.
data PGSConfig = PGSConfig
{ pgsConnStr :: ByteString
-- ^ A libpq connection string.
, pgsNumStripes :: Int
-- ^ The number of distinct sub-pools to maintain. The smallest
-- acceptable value is 1.
, pgsIdleTime :: Double
-- ^ Amount of time for which an unused resource is kept open. The
-- smallest acceptable value is 0.5 seconds.
, pgsResources :: Int
-- ^ Maximum number of resources to keep open per stripe. The smallest
-- acceptable value is 1.
}
------------------------------------------------------------------------------
-- | Returns a config object with default values and the specified connection
-- string.
pgsDefaultConfig :: ByteString
-- ^ A connection string such as \"host=localhost
-- port=5432 dbname=mydb\"
-> PGSConfig
pgsDefaultConfig connstr = PGSConfig connstr 1 5 20
------------------------------------------------------------------------------
-- | Function that reserves a single connection for the duration of the given
-- action. Nested calls to withPG will only reserve one connection. For example,
-- the following code calls withPG twice in a nested way yet only results in a single
-- connection being reserved:
--
-- > myHandler = withPG $ do
-- > queryTheDatabase
-- > commonDatabaseMethod
-- >
-- > commonDatabaseMethod = withPG $ do
-- > moreDatabaseActions
-- > evenMoreDatabaseActions
--
-- This is useful in a practical setting because you may often find yourself in a situation
-- where you have common code (that requires a database connection) that you wish to call from
-- other blocks of code that may require a database connection and you still want to make sure
-- that you are only using one connection through all of your nested methods.
withPG :: (HasPostgres m)
=> m b -> m b
withPG f = do
s <- getPostgresState
case s of
(PostgresPool p) -> withResource p (\c -> setLocalPostgresState (PostgresConn c) f)
(PostgresConn _) -> f
------------------------------------------------------------------------------
-- | Convenience function for executing a function that needs a database
-- connection.
liftPG :: (HasPostgres m) => (P.Connection -> m a) -> m a
liftPG act = do
pg <- getPostgresState
control $ \run ->
withConnection pg $ \con -> run (act con)
-- | Convenience function for executing a function that needs a database
-- connection specialized to IO.
liftPG' :: (HasPostgres m) => (P.Connection -> IO b) -> m b
liftPG' f = do
s <- getPostgresState
withConnection s f
------------------------------------------------------------------------------
-- | Convenience function for executing a function that needs a database
-- connection.
withConnection :: MonadIO m => Postgres -> (P.Connection -> IO b) -> m b
withConnection (PostgresPool p) f = liftIO (withResource p f)
withConnection (PostgresConn c) f = liftIO (f c)
| sopvop/snaplet-postgresql-simple | src/Snap/Snaplet/PostgresqlSimple/Internal.hs | bsd-3-clause | 7,117 | 59 | 23 | 1,389 | 1,474 | 836 | 638 | 94 | 2 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Ordinal.PL.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.PL.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "PL Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
| facebookincubator/duckling | tests/Duckling/Ordinal/PL/Tests.hs | bsd-3-clause | 504 | 0 | 9 | 78 | 79 | 50 | 29 | 11 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Network
import System.IO
import Control.Monad.IO.Class
import Control.Monad
import Data.Conduit
import Data.Conduit.List
import Data.Conduit.Binary
import Text.XML.Stream.Parse
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import Client
main :: IO ()
main = do
h <- connectTo "localhost" (PortNumber 54492)
BS.hPut h $ beginDoc +++ stream
hPutStr h starttls
replicateM_ 12 . toTagEnd $ hGetChar h
tls <- openTlsServer [(undefined, undefined)] h
tPut tls $ beginDoc +++ stream
ioSource (tGetContent tls)
=$= parseBytes def
=$= runIO print
$$ sinkNull
-- tGetContent tls >>= print
-- sourceHandle h {- =$= parseBytes def -} =$= runIO BSC.putStrLn $$ sinkNull
ioSource :: MonadIO m => IO a -> Source m a
ioSource io = do
x <- liftIO io
yield x
ioSource io
toTagEnd :: IO Char -> IO ()
toTagEnd io = do
c <- io
putChar c
if c == '>' then return () else toTagEnd io
(+++) = BS.append
beginDoc, stream :: BS.ByteString
beginDoc = "<?xml version=\"1.0\"?>"
stream = "<stream:stream to=\"localhost\" xml:lang=\"en\" version=\"1.0\" " +++
"xmlns=\"jabber:client\" " +++
"xmlns:stream=\"http://etherx.jabber.org/streams\">"
starttls :: String
starttls = "<starttls xmlns=\"urn:ietf:params:xml:ns:xmpp-tls\"/>"
runIO :: (Monad m, MonadIO m) => (a -> IO ()) -> Conduit a m a
runIO io = do
mx <- await
maybe (return ()) (\x -> liftIO (io x) >> yield x) mx
runIO io
| YoshikuniJujo/forest | subprojects/xmpp-tls-analysis/client.hs | bsd-3-clause | 1,471 | 4 | 13 | 256 | 478 | 242 | 236 | 47 | 2 |
{-# LANGUAGE CPP #-}
module TcSimplify(
simplifyInfer,
pickQuantifiablePreds, growThetaTyVars,
simplifyAmbiguityCheck,
simplifyDefault,
simplifyTop, simplifyInteractive,
solveWantedsTcM,
-- For Rules we need these twoo
solveWanteds, runTcS
) where
#include "HsVersions.h"
import TcRnTypes
import TcRnMonad
import TcErrors
import TcMType as TcM
import TcType
import TcSMonad as TcS
import TcInteract
import Kind ( isKind, defaultKind_maybe )
import Inst
import Unify ( tcMatchTy )
import Type ( classifyPredType, isIPClass, PredTree(..)
, getClassPredTys_maybe, EqRel(..) )
import TyCon ( isTypeFamilyTyCon )
import Class ( Class )
import Id ( idType )
import Var
import Unique
import VarSet
import TcEvidence
import Name
import Bag
import ListSetOps
import Util
import PrelInfo
import PrelNames
import Control.Monad ( unless )
import DynFlags ( ExtensionFlag( Opt_AllowAmbiguousTypes, Opt_FlexibleContexts ) )
import Class ( classKey )
import Maybes ( isNothing )
import Outputable
import FastString
import TrieMap () -- DV: for now
import Data.List( partition )
{-
*********************************************************************************
* *
* External interface *
* *
*********************************************************************************
-}
simplifyTop :: WantedConstraints -> TcM (Bag EvBind)
-- Simplify top-level constraints
-- Usually these will be implications,
-- but when there is nothing to quantify we don't wrap
-- in a degenerate implication, so we do that here instead
simplifyTop wanteds
= do { traceTc "simplifyTop {" $ text "wanted = " <+> ppr wanteds
; (final_wc, binds1) <- runTcS (simpl_top wanteds)
; traceTc "End simplifyTop }" empty
; traceTc "reportUnsolved {" empty
; binds2 <- reportUnsolved final_wc
; traceTc "reportUnsolved }" empty
; return (binds1 `unionBags` binds2) }
simpl_top :: WantedConstraints -> TcS WantedConstraints
-- See Note [Top-level Defaulting Plan]
simpl_top wanteds
= do { wc_first_go <- nestTcS (solveWantedsAndDrop wanteds)
-- This is where the main work happens
; try_tyvar_defaulting wc_first_go }
where
try_tyvar_defaulting :: WantedConstraints -> TcS WantedConstraints
try_tyvar_defaulting wc
| isEmptyWC wc
= return wc
| otherwise
= do { free_tvs <- TcS.zonkTyVarsAndFV (tyVarsOfWC wc)
; let meta_tvs = varSetElems (filterVarSet isMetaTyVar free_tvs)
-- zonkTyVarsAndFV: the wc_first_go is not yet zonked
-- filter isMetaTyVar: we might have runtime-skolems in GHCi,
-- and we definitely don't want to try to assign to those!
; meta_tvs' <- mapM defaultTyVar meta_tvs -- Has unification side effects
; if meta_tvs' == meta_tvs -- No defaulting took place;
-- (defaulting returns fresh vars)
then try_class_defaulting wc
else do { wc_residual <- nestTcS (solveWantedsAndDrop wc)
-- See Note [Must simplify after defaulting]
; try_class_defaulting wc_residual } }
try_class_defaulting :: WantedConstraints -> TcS WantedConstraints
try_class_defaulting wc
| isEmptyWC wc
= return wc
| otherwise -- See Note [When to do type-class defaulting]
= do { something_happened <- applyDefaultingRules wc
-- See Note [Top-level Defaulting Plan]
; if something_happened
then do { wc_residual <- nestTcS (solveWantedsAndDrop wc)
; try_class_defaulting wc_residual }
else return wc }
{-
Note [When to do type-class defaulting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In GHC 7.6 and 7.8.2, we did type-class defaulting only if insolubleWC
was false, on the grounds that defaulting can't help solve insoluble
constraints. But if we *don't* do defaulting we may report a whole
lot of errors that would be solved by defaulting; these errors are
quite spurious because fixing the single insoluble error means that
defaulting happens again, which makes all the other errors go away.
This is jolly confusing: Trac #9033.
So it seems better to always do type-class defaulting.
However, always doing defaulting does mean that we'll do it in
situations like this (Trac #5934):
run :: (forall s. GenST s) -> Int
run = fromInteger 0
We don't unify the return type of fromInteger with the given function
type, because the latter involves foralls. So we're left with
(Num alpha, alpha ~ (forall s. GenST s) -> Int)
Now we do defaulting, get alpha := Integer, and report that we can't
match Integer with (forall s. GenST s) -> Int. That's not totally
stupid, but perhaps a little strange.
Another potential alternative would be to suppress *all* non-insoluble
errors if there are *any* insoluble errors, anywhere, but that seems
too drastic.
Note [Must simplify after defaulting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We may have a deeply buried constraint
(t:*) ~ (a:Open)
which we couldn't solve because of the kind incompatibility, and 'a' is free.
Then when we default 'a' we can solve the constraint. And we want to do
that before starting in on type classes. We MUST do it before reporting
errors, because it isn't an error! Trac #7967 was due to this.
Note [Top-level Defaulting Plan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We have considered two design choices for where/when to apply defaulting.
(i) Do it in SimplCheck mode only /whenever/ you try to solve some
simple constraints, maybe deep inside the context of implications.
This used to be the case in GHC 7.4.1.
(ii) Do it in a tight loop at simplifyTop, once all other constraint has
finished. This is the current story.
Option (i) had many disadvantages:
a) First it was deep inside the actual solver,
b) Second it was dependent on the context (Infer a type signature,
or Check a type signature, or Interactive) since we did not want
to always start defaulting when inferring (though there is an exception to
this see Note [Default while Inferring])
c) It plainly did not work. Consider typecheck/should_compile/DfltProb2.hs:
f :: Int -> Bool
f x = const True (\y -> let w :: a -> a
w a = const a (y+1)
in w y)
We will get an implication constraint (for beta the type of y):
[untch=beta] forall a. 0 => Num beta
which we really cannot default /while solving/ the implication, since beta is
untouchable.
Instead our new defaulting story is to pull defaulting out of the solver loop and
go with option (i), implemented at SimplifyTop. Namely:
- First have a go at solving the residual constraint of the whole program
- Try to approximate it with a simple constraint
- Figure out derived defaulting equations for that simple constraint
- Go round the loop again if you did manage to get some equations
Now, that has to do with class defaulting. However there exists type variable /kind/
defaulting. Again this is done at the top-level and the plan is:
- At the top-level, once you had a go at solving the constraint, do
figure out /all/ the touchable unification variables of the wanted constraints.
- Apply defaulting to their kinds
More details in Note [DefaultTyVar].
-}
------------------
simplifyAmbiguityCheck :: Type -> WantedConstraints -> TcM ()
simplifyAmbiguityCheck ty wanteds
= do { traceTc "simplifyAmbiguityCheck {" (text "type = " <+> ppr ty $$ text "wanted = " <+> ppr wanteds)
; (final_wc, _binds) <- runTcS (simpl_top wanteds)
; traceTc "End simplifyAmbiguityCheck }" empty
-- Normally report all errors; but with -XAllowAmbiguousTypes
-- report only insoluble ones, since they represent genuinely
-- inaccessible code
; allow_ambiguous <- xoptM Opt_AllowAmbiguousTypes
; traceTc "reportUnsolved(ambig) {" empty
; unless (allow_ambiguous && not (insolubleWC final_wc))
(discardResult (reportUnsolved final_wc))
; traceTc "reportUnsolved(ambig) }" empty
; return () }
------------------
simplifyInteractive :: WantedConstraints -> TcM (Bag EvBind)
simplifyInteractive wanteds
= traceTc "simplifyInteractive" empty >>
simplifyTop wanteds
------------------
simplifyDefault :: ThetaType -- Wanted; has no type variables in it
-> TcM () -- Succeeds iff the constraint is soluble
simplifyDefault theta
= do { traceTc "simplifyInteractive" empty
; wanted <- newWanteds DefaultOrigin theta
; unsolved <- solveWantedsTcM wanted
; traceTc "reportUnsolved {" empty
-- See Note [Deferring coercion errors to runtime]
; reportAllUnsolved unsolved
; traceTc "reportUnsolved }" empty
; return () }
{-
*********************************************************************************
* *
* Inference
* *
***********************************************************************************
Note [Inferring the type of a let-bound variable]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = rhs
To infer f's type we do the following:
* Gather the constraints for the RHS with ambient level *one more than*
the current one. This is done by the call
pushLevelAndCaptureConstraints (tcMonoBinds...)
in TcBinds.tcPolyInfer
* Call simplifyInfer to simplify the constraints and decide what to
quantify over. We pass in the level used for the RHS constraints,
here called rhs_tclvl.
This ensures that the implication constraint we generate, if any,
has a strictly-increased level compared to the ambient level outside
the let binding.
-}
simplifyInfer :: TcLevel -- Used when generating the constraints
-> Bool -- Apply monomorphism restriction
-> [(Name, TcTauType)] -- Variables to be generalised,
-- and their tau-types
-> WantedConstraints
-> TcM ([TcTyVar], -- Quantify over these type variables
[EvVar], -- ... and these constraints (fully zonked)
Bool, -- The monomorphism restriction did something
-- so the results type is not as general as
-- it could be
TcEvBinds) -- ... binding these evidence variables
simplifyInfer rhs_tclvl apply_mr name_taus wanteds
| isEmptyWC wanteds
= do { gbl_tvs <- tcGetGlobalTyVars
; qtkvs <- quantifyTyVars gbl_tvs (tyVarsOfTypes (map snd name_taus))
; traceTc "simplifyInfer: empty WC" (ppr name_taus $$ ppr qtkvs)
; return (qtkvs, [], False, emptyTcEvBinds) }
| otherwise
= do { traceTc "simplifyInfer {" $ vcat
[ ptext (sLit "binds =") <+> ppr name_taus
, ptext (sLit "rhs_tclvl =") <+> ppr rhs_tclvl
, ptext (sLit "apply_mr =") <+> ppr apply_mr
, ptext (sLit "(unzonked) wanted =") <+> ppr wanteds
]
-- Historical note: Before step 2 we used to have a
-- HORRIBLE HACK described in Note [Avoid unecessary
-- constraint simplification] but, as described in Trac
-- #4361, we have taken in out now. That's why we start
-- with step 2!
-- Step 2) First try full-blown solving
-- NB: we must gather up all the bindings from doing
-- this solving; hence (runTcSWithEvBinds ev_binds_var).
-- And note that since there are nested implications,
-- calling solveWanteds will side-effect their evidence
-- bindings, so we can't just revert to the input
-- constraint.
; ev_binds_var <- TcM.newTcEvBinds
; wanted_transformed_incl_derivs <- setTcLevel rhs_tclvl $
runTcSWithEvBinds ev_binds_var (solveWanteds wanteds)
; wanted_transformed_incl_derivs <- TcM.zonkWC wanted_transformed_incl_derivs
-- Step 4) Candidates for quantification are an approximation of wanted_transformed
-- NB: Already the fixpoint of any unifications that may have happened
-- NB: We do not do any defaulting when inferring a type, this can lead
-- to less polymorphic types, see Note [Default while Inferring]
; tc_lcl_env <- TcRnMonad.getLclEnv
; null_ev_binds_var <- TcM.newTcEvBinds
; let wanted_transformed = dropDerivedWC wanted_transformed_incl_derivs
; quant_pred_candidates -- Fully zonked
<- if insolubleWC wanted_transformed_incl_derivs
then return [] -- See Note [Quantification with errors]
-- NB: must include derived errors in this test,
-- hence "incl_derivs"
else do { let quant_cand = approximateWC wanted_transformed
meta_tvs = filter isMetaTyVar (varSetElems (tyVarsOfCts quant_cand))
; gbl_tvs <- tcGetGlobalTyVars
-- Miminise quant_cand. We are not interested in any evidence
-- produced, because we are going to simplify wanted_transformed
-- again later. All we want here is the predicates over which to
-- quantify.
--
-- If any meta-tyvar unifications take place (unlikely), we'll
-- pick that up later.
; WC { wc_simple = simples }
<- setTcLevel rhs_tclvl $
runTcSWithEvBinds null_ev_binds_var $
do { mapM_ (promoteAndDefaultTyVar rhs_tclvl gbl_tvs) meta_tvs
-- See Note [Promote _and_ default when inferring]
; solveSimpleWanteds quant_cand }
; return [ ctEvPred ev | ct <- bagToList simples
, let ev = ctEvidence ct
, isWanted ev ] }
-- NB: quant_pred_candidates is already fully zonked
-- Decide what type variables and constraints to quantify
; zonked_taus <- mapM (TcM.zonkTcType . snd) name_taus
; let zonked_tau_tvs = tyVarsOfTypes zonked_taus
; (qtvs, bound_theta, mr_bites)
<- decideQuantification apply_mr quant_pred_candidates zonked_tau_tvs
-- Emit an implication constraint for the
-- remaining constraints from the RHS
; bound_ev_vars <- mapM TcM.newEvVar bound_theta
; let skol_info = InferSkol [ (name, mkSigmaTy [] bound_theta ty)
| (name, ty) <- name_taus ]
-- Don't add the quantified variables here, because
-- they are also bound in ic_skols and we want them
-- to be tidied uniformly
implic = Implic { ic_tclvl = rhs_tclvl
, ic_skols = qtvs
, ic_no_eqs = False
, ic_given = bound_ev_vars
, ic_wanted = wanted_transformed
, ic_status = IC_Unsolved
, ic_binds = ev_binds_var
, ic_info = skol_info
, ic_env = tc_lcl_env }
; emitImplication implic
-- Promote any type variables that are free in the inferred type
-- of the function:
-- f :: forall qtvs. bound_theta => zonked_tau
-- These variables now become free in the envt, and hence will show
-- up whenever 'f' is called. They may currently at rhs_tclvl, but
-- they had better be unifiable at the outer_tclvl!
-- Example: envt mentions alpha[1]
-- tau_ty = beta[2] -> beta[2]
-- consraints = alpha ~ [beta]
-- we don't quantify over beta (since it is fixed by envt)
-- so we must promote it! The inferred type is just
-- f :: beta -> beta
; outer_tclvl <- TcRnMonad.getTcLevel
; zonked_tau_tvs <- TcM.zonkTyVarsAndFV zonked_tau_tvs
-- decideQuantification turned some meta tyvars into
-- quantified skolems, so we have to zonk again
; let phi_tvs = tyVarsOfTypes bound_theta `unionVarSet` zonked_tau_tvs
promote_tvs = varSetElems (closeOverKinds phi_tvs `delVarSetList` qtvs)
; runTcSWithEvBinds null_ev_binds_var $ -- runTcS just to get the types right :-(
mapM_ (promoteTyVar outer_tclvl) promote_tvs
-- All done!
; traceTc "} simplifyInfer/produced residual implication for quantification" $
vcat [ ptext (sLit "quant_pred_candidates =") <+> ppr quant_pred_candidates
, ptext (sLit "zonked_taus") <+> ppr zonked_taus
, ptext (sLit "zonked_tau_tvs=") <+> ppr zonked_tau_tvs
, ptext (sLit "promote_tvs=") <+> ppr promote_tvs
, ptext (sLit "bound_theta =") <+> vcat [ ppr v <+> dcolon <+> ppr (idType v)
| v <- bound_ev_vars]
, ptext (sLit "mr_bites =") <+> ppr mr_bites
, ptext (sLit "qtvs =") <+> ppr qtvs
, ptext (sLit "implic =") <+> ppr implic ]
; return ( qtvs, bound_ev_vars, mr_bites, TcEvBinds ev_binds_var) }
{-
************************************************************************
* *
Quantification
* *
************************************************************************
Note [Deciding quantification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the monomorphism restriction does not apply, then we quantify as follows:
* Take the global tyvars, and "grow" them using the equality constraints
E.g. if x:alpha is in the environment, and alpha ~ [beta] (which can
happen because alpha is untouchable here) then do not quantify over
beta, because alpha fixes beta, and beta is effectively free in
the environment too
These are the mono_tvs
* Take the free vars of the tau-type (zonked_tau_tvs) and "grow" them
using all the constraints. These are tau_tvs_plus
* Use quantifyTyVars to quantify over (tau_tvs_plus - mono_tvs), being
careful to close over kinds, and to skolemise the quantified tyvars.
(This actually unifies each quantifies meta-tyvar with a fresh skolem.)
Result is qtvs.
* Filter the constraints using pickQuantifyablePreds and the qtvs.
We have to zonk the constraints first, so they "see" the freshly
created skolems.
If the MR does apply, mono_tvs includes all the constrained tyvars,
and the quantified constraints are empty.
-}
decideQuantification
:: Bool -- Apply monomorphism restriction
-> [PredType] -> TcTyVarSet -- Constraints and type variables from RHS
-> TcM ( [TcTyVar] -- Quantify over these tyvars (skolems)
, [PredType] -- and this context (fully zonked)
, Bool ) -- Did the MR bite?
-- See Note [Deciding quantification]
decideQuantification apply_mr constraints zonked_tau_tvs
| apply_mr -- Apply the Monomorphism restriction
= do { gbl_tvs <- tcGetGlobalTyVars
; let constrained_tvs = tyVarsOfTypes constraints
mono_tvs = gbl_tvs `unionVarSet` constrained_tvs
mr_bites = constrained_tvs `intersectsVarSet` zonked_tau_tvs
; qtvs <- quantifyTyVars mono_tvs zonked_tau_tvs
; traceTc "decideQuantification 1" (vcat [ppr constraints, ppr gbl_tvs, ppr mono_tvs, ppr qtvs])
; return (qtvs, [], mr_bites) }
| otherwise
= do { gbl_tvs <- tcGetGlobalTyVars
; let mono_tvs = growThetaTyVars (filter isEqPred constraints) gbl_tvs
tau_tvs_plus = growThetaTyVars constraints zonked_tau_tvs
; qtvs <- quantifyTyVars mono_tvs tau_tvs_plus
; constraints <- zonkTcThetaType constraints
-- quantifyTyVars turned some meta tyvars into
-- quantified skolems, so we have to zonk again
; theta <- pickQuantifiablePreds (mkVarSet qtvs) constraints
; let min_theta = mkMinimalBySCs theta -- See Note [Minimize by Superclasses]
; traceTc "decideQuantification 2" (vcat [ppr constraints, ppr gbl_tvs, ppr mono_tvs
, ppr tau_tvs_plus, ppr qtvs, ppr min_theta])
; return (qtvs, min_theta, False) }
------------------
pickQuantifiablePreds :: TyVarSet -- Quantifying over these
-> TcThetaType -- Proposed constraints to quantify
-> TcM TcThetaType -- A subset that we can actually quantify
-- This function decides whether a particular constraint shoudl be
-- quantified over, given the type variables that are being quantified
pickQuantifiablePreds qtvs theta
= do { flex_ctxt <- xoptM Opt_FlexibleContexts
; return (filter (pick_me flex_ctxt) theta) }
where
pick_me flex_ctxt pred
= case classifyPredType pred of
ClassPred cls tys
| isIPClass cls -> True -- See note [Inheriting implicit parameters]
| otherwise -> pick_cls_pred flex_ctxt tys
EqPred ReprEq ty1 ty2 -> pick_cls_pred flex_ctxt [ty1, ty2]
-- Representational equality is like a class constraint
EqPred NomEq ty1 ty2 -> quant_fun ty1 || quant_fun ty2
IrredPred ty -> tyVarsOfType ty `intersectsVarSet` qtvs
TuplePred {} -> False
pick_cls_pred flex_ctxt tys
= tyVarsOfTypes tys `intersectsVarSet` qtvs
&& (checkValidClsArgs flex_ctxt tys)
-- Only quantify over predicates that checkValidType
-- will pass! See Trac #10351.
-- See Note [Quantifying over equality constraints]
quant_fun ty
= case tcSplitTyConApp_maybe ty of
Just (tc, tys) | isTypeFamilyTyCon tc
-> tyVarsOfTypes tys `intersectsVarSet` qtvs
_ -> False
------------------
growThetaTyVars :: ThetaType -> TyVarSet -> TyVarSet
-- See Note [Growing the tau-tvs using constraints]
growThetaTyVars theta tvs
| null theta = tvs
| otherwise = transCloVarSet mk_next seed_tvs
where
seed_tvs = tvs `unionVarSet` tyVarsOfTypes ips
(ips, non_ips) = partition isIPPred theta
-- See note [Inheriting implicit parameters]
mk_next :: VarSet -> VarSet -- Maps current set to newly-grown ones
mk_next so_far = foldr (grow_one so_far) emptyVarSet non_ips
grow_one so_far pred tvs
| pred_tvs `intersectsVarSet` so_far = tvs `unionVarSet` pred_tvs
| otherwise = tvs
where
pred_tvs = tyVarsOfType pred
{-
Note [Quantifying over equality constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Should we quantify over an equality constraint (s ~ t)? In general, we don't.
Doing so may simply postpone a type error from the function definition site to
its call site. (At worst, imagine (Int ~ Bool)).
However, consider this
forall a. (F [a] ~ Int) => blah
Should we quantify over the (F [a] ~ Int). Perhaps yes, because at the call
site we will know 'a', and perhaps we have instance F [Bool] = Int.
So we *do* quantify over a type-family equality where the arguments mention
the quantified variables.
Note [Growing the tau-tvs using constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(growThetaTyVars insts tvs) is the result of extending the set
of tyvars tvs using all conceivable links from pred
E.g. tvs = {a}, preds = {H [a] b, K (b,Int) c, Eq e}
Then growThetaTyVars preds tvs = {a,b,c}
Notice that
growThetaTyVars is conservative if v might be fixed by vs
=> v `elem` grow(vs,C)
Note [Inheriting implicit parameters]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
f x = (x::Int) + ?y
where f is *not* a top-level binding.
From the RHS of f we'll get the constraint (?y::Int).
There are two types we might infer for f:
f :: Int -> Int
(so we get ?y from the context of f's definition), or
f :: (?y::Int) => Int -> Int
At first you might think the first was better, because then
?y behaves like a free variable of the definition, rather than
having to be passed at each call site. But of course, the WHOLE
IDEA is that ?y should be passed at each call site (that's what
dynamic binding means) so we'd better infer the second.
BOTTOM LINE: when *inferring types* you must quantify over implicit
parameters, *even if* they don't mention the bound type variables.
Reason: because implicit parameters, uniquely, have local instance
declarations. See the pickQuantifiablePreds.
Note [Quantification with errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we find that the RHS of the definition has some absolutely-insoluble
constraints, we abandon all attempts to find a context to quantify
over, and instead make the function fully-polymorphic in whatever
type we have found. For two reasons
a) Minimise downstream errors
b) Avoid spurious errors from this function
But NB that we must include *derived* errors in the check. Example:
(a::*) ~ Int#
We get an insoluble derived error *~#, and we don't want to discard
it before doing the isInsolubleWC test! (Trac #8262)
Note [Default while Inferring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Our current plan is that defaulting only happens at simplifyTop and
not simplifyInfer. This may lead to some insoluble deferred constraints
Example:
instance D g => C g Int b
constraint inferred = (forall b. 0 => C gamma alpha b) /\ Num alpha
type inferred = gamma -> gamma
Now, if we try to default (alpha := Int) we will be able to refine the implication to
(forall b. 0 => C gamma Int b)
which can then be simplified further to
(forall b. 0 => D gamma)
Finally we /can/ approximate this implication with (D gamma) and infer the quantified
type: forall g. D g => g -> g
Instead what will currently happen is that we will get a quantified type
(forall g. g -> g) and an implication:
forall g. 0 => (forall b. 0 => C g alpha b) /\ Num alpha
which, even if the simplifyTop defaults (alpha := Int) we will still be left with an
unsolvable implication:
forall g. 0 => (forall b. 0 => D g)
The concrete example would be:
h :: C g a s => g -> a -> ST s a
f (x::gamma) = (\_ -> x) (runST (h x (undefined::alpha)) + 1)
But it is quite tedious to do defaulting and resolve the implication constraints and
we have not observed code breaking because of the lack of defaulting in inference so
we don't do it for now.
Note [Minimize by Superclasses]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we quantify over a constraint, in simplifyInfer we need to
quantify over a constraint that is minimal in some sense: For
instance, if the final wanted constraint is (Eq alpha, Ord alpha),
we'd like to quantify over Ord alpha, because we can just get Eq alpha
from superclass selection from Ord alpha. This minimization is what
mkMinimalBySCs does. Then, simplifyInfer uses the minimal constraint
to check the original wanted.
Note [Avoid unecessary constraint simplification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-------- NB NB NB (Jun 12) -------------
This note not longer applies; see the notes with Trac #4361.
But I'm leaving it in here so we remember the issue.)
----------------------------------------
When inferring the type of a let-binding, with simplifyInfer,
try to avoid unnecessarily simplifying class constraints.
Doing so aids sharing, but it also helps with delicate
situations like
instance C t => C [t] where ..
f :: C [t] => ....
f x = let g y = ...(constraint C [t])...
in ...
When inferring a type for 'g', we don't want to apply the
instance decl, because then we can't satisfy (C t). So we
just notice that g isn't quantified over 't' and partition
the constraints before simplifying.
This only half-works, but then let-generalisation only half-works.
*********************************************************************************
* *
* Main Simplifier *
* *
***********************************************************************************
Note [Deferring coercion errors to runtime]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
While developing, sometimes it is desirable to allow compilation to succeed even
if there are type errors in the code. Consider the following case:
module Main where
a :: Int
a = 'a'
main = print "b"
Even though `a` is ill-typed, it is not used in the end, so if all that we're
interested in is `main` it is handy to be able to ignore the problems in `a`.
Since we treat type equalities as evidence, this is relatively simple. Whenever
we run into a type mismatch in TcUnify, we normally just emit an error. But it
is always safe to defer the mismatch to the main constraint solver. If we do
that, `a` will get transformed into
co :: Int ~ Char
co = ...
a :: Int
a = 'a' `cast` co
The constraint solver would realize that `co` is an insoluble constraint, and
emit an error with `reportUnsolved`. But we can also replace the right-hand side
of `co` with `error "Deferred type error: Int ~ Char"`. This allows the program
to compile, and it will run fine unless we evaluate `a`. This is what
`deferErrorsToRuntime` does.
It does this by keeping track of which errors correspond to which coercion
in TcErrors (with ErrEnv). TcErrors.reportTidyWanteds does not print the errors
and does not fail if -fdefer-type-errors is on, so that we can continue
compilation. The errors are turned into warnings in `reportUnsolved`.
-}
solveWantedsTcM :: [CtEvidence] -> TcM WantedConstraints
-- Simplify the input constraints
-- Discard the evidence binds
-- Discards all Derived stuff in result
-- Result is /not/ guaranteed zonked
solveWantedsTcM wanted
= do { (wanted1, _binds) <- runTcS (solveWantedsAndDrop (mkSimpleWC wanted))
; return wanted1 }
solveWantedsAndDrop :: WantedConstraints -> TcS WantedConstraints
-- Since solveWanteds returns the residual WantedConstraints,
-- it should always be called within a runTcS or something similar,
-- Result is not zonked
solveWantedsAndDrop wanted
= do { wc <- solveWanteds wanted
; return (dropDerivedWC wc) }
solveWanteds :: WantedConstraints -> TcS WantedConstraints
-- so that the inert set doesn't mindlessly propagate.
-- NB: wc_simples may be wanted /or/ derived now
solveWanteds wc@(WC { wc_simple = simples, wc_insol = insols, wc_impl = implics })
= do { traceTcS "solveWanteds {" (ppr wc)
-- Try the simple bit, including insolubles. Solving insolubles a
-- second time round is a bit of a waste; but the code is simple
-- and the program is wrong anyway, and we don't run the danger
-- of adding Derived insolubles twice; see
-- TcSMonad Note [Do not add duplicate derived insolubles]
; wc1 <- solveSimpleWanteds simples
; let WC { wc_simple = simples1, wc_insol = insols1, wc_impl = implics1 } = wc1
; (floated_eqs, implics2) <- solveNestedImplications (implics `unionBags` implics1)
; final_wc <- simpl_loop 0 floated_eqs
(WC { wc_simple = simples1, wc_impl = implics2
, wc_insol = insols `unionBags` insols1 })
; bb <- getTcEvBindsMap
; traceTcS "solveWanteds }" $
vcat [ text "final wc =" <+> ppr final_wc
, text "current evbinds =" <+> ppr (evBindMapBinds bb) ]
; return final_wc }
simpl_loop :: Int -> Cts
-> WantedConstraints
-> TcS WantedConstraints
simpl_loop n floated_eqs
wc@(WC { wc_simple = simples, wc_insol = insols, wc_impl = implics })
| n > 10
= do { traceTcS "solveWanteds: loop!" (ppr wc); return wc }
| no_floated_eqs
= return wc -- Done!
| otherwise
= do { traceTcS "simpl_loop, iteration" (int n)
-- solveSimples may make progress if either float_eqs hold
; (unifs_happened1, wc1) <- if no_floated_eqs
then return (False, emptyWC)
else reportUnifications $
solveSimpleWanteds (floated_eqs `unionBags` simples)
-- Put floated_eqs first so they get solved first
-- NB: the floated_eqs may include /derived/ equalities
-- arising from fundeps inside an implication
; let WC { wc_simple = simples1, wc_insol = insols1, wc_impl = implics1 } = wc1
-- solveImplications may make progress only if unifs2 holds
; (floated_eqs2, implics2) <- if not unifs_happened1 && isEmptyBag implics1
then return (emptyBag, implics)
else solveNestedImplications (implics `unionBags` implics1)
; simpl_loop (n+1) floated_eqs2
(WC { wc_simple = simples1, wc_impl = implics2
, wc_insol = insols `unionBags` insols1 }) }
where
no_floated_eqs = isEmptyBag floated_eqs
solveNestedImplications :: Bag Implication
-> TcS (Cts, Bag Implication)
-- Precondition: the TcS inerts may contain unsolved simples which have
-- to be converted to givens before we go inside a nested implication.
solveNestedImplications implics
| isEmptyBag implics
= return (emptyBag, emptyBag)
| otherwise
= do { traceTcS "solveNestedImplications starting {" empty
; (floated_eqs_s, unsolved_implics) <- mapAndUnzipBagM solveImplication implics
; let floated_eqs = concatBag floated_eqs_s
-- ... and we are back in the original TcS inerts
-- Notice that the original includes the _insoluble_simples so it was safe to ignore
-- them in the beginning of this function.
; traceTcS "solveNestedImplications end }" $
vcat [ text "all floated_eqs =" <+> ppr floated_eqs
, text "unsolved_implics =" <+> ppr unsolved_implics ]
; return (floated_eqs, catBagMaybes unsolved_implics) }
solveImplication :: Implication -- Wanted
-> TcS (Cts, -- All wanted or derived floated equalities: var = type
Maybe Implication) -- Simplified implication (empty or singleton)
-- Precondition: The TcS monad contains an empty worklist and given-only inerts
-- which after trying to solve this implication we must restore to their original value
solveImplication imp@(Implic { ic_tclvl = tclvl
, ic_binds = ev_binds
, ic_skols = skols
, ic_given = givens
, ic_wanted = wanteds
, ic_info = info
, ic_status = status
, ic_env = env })
| IC_Solved {} <- status
= return (emptyCts, Just imp) -- Do nothing
| otherwise -- Even for IC_Insoluble it is worth doing more work
-- The insoluble stuff might be in one sub-implication
-- and other unsolved goals in another; and we want to
-- solve the latter as much as possible
= do { inerts <- getTcSInerts
; traceTcS "solveImplication {" (ppr imp $$ text "Inerts" <+> ppr inerts)
-- Solve the nested constraints
; (no_given_eqs, given_insols, residual_wanted)
<- nestImplicTcS ev_binds tclvl $
do { given_insols <- solveSimpleGivens (mkGivenLoc tclvl info env) givens
; no_eqs <- getNoGivenEqs tclvl skols
; residual_wanted <- solveWanteds wanteds
-- solveWanteds, *not* solveWantedsAndDrop, because
-- we want to retain derived equalities so we can float
-- them out in floatEqualities
; return (no_eqs, given_insols, residual_wanted) }
; (floated_eqs, residual_wanted)
<- floatEqualities skols no_given_eqs residual_wanted
; let final_wanted = residual_wanted `addInsols` given_insols
; res_implic <- setImplicationStatus (imp { ic_no_eqs = no_given_eqs
, ic_wanted = final_wanted })
; evbinds <- getTcEvBindsMap
; traceTcS "solveImplication end }" $ vcat
[ text "no_given_eqs =" <+> ppr no_given_eqs
, text "floated_eqs =" <+> ppr floated_eqs
, text "res_implic =" <+> ppr res_implic
, text "implication evbinds = " <+> ppr (evBindMapBinds evbinds) ]
; return (floated_eqs, res_implic) }
----------------------
setImplicationStatus :: Implication -> TcS (Maybe Implication)
-- Finalise the implication returned from solveImplication:
-- * Set the ic_status field
-- * Trim the ic_wanted field to remove Derived constraints
-- Return Nothing if we can discard the implication altogether
setImplicationStatus implic@(Implic { ic_binds = EvBindsVar ev_binds_var _
, ic_info = info
, ic_wanted = wc
, ic_given = givens })
| some_insoluble
= return $ Just $
implic { ic_status = IC_Insoluble
, ic_wanted = wc { wc_simple = pruned_simples
, wc_insol = pruned_insols } }
| some_unsolved
= return $ Just $
implic { ic_status = IC_Unsolved
, ic_wanted = wc { wc_simple = pruned_simples
, wc_insol = pruned_insols } }
| otherwise -- Everything is solved; look at the implications
-- See Note [Tracking redundant constraints]
= do { ev_binds <- TcS.readTcRef ev_binds_var
; let all_needs = neededEvVars ev_binds implic_needs
dead_givens | warnRedundantGivens info
= filterOut (`elemVarSet` all_needs) givens
| otherwise = [] -- None to report
final_needs = all_needs `delVarSetList` givens
discard_entire_implication -- Can we discard the entire implication?
= null dead_givens -- No warning from this implication
&& isEmptyBag pruned_implics -- No live children
&& isEmptyVarSet final_needs -- No needed vars to pass up to parent
final_status = IC_Solved { ics_need = final_needs
, ics_dead = dead_givens }
final_implic = implic { ic_status = final_status
, ic_wanted = wc { wc_simple = pruned_simples
, wc_insol = pruned_insols
, wc_impl = pruned_implics } }
-- We can only prune the child implications (pruned_implics)
-- in the IC_Solved status case, because only then we can
-- accumulate their needed evidence variales into the
-- IC_Solved final_status field of the parent implication.
; return $ if discard_entire_implication
then Nothing
else Just final_implic }
where
WC { wc_simple = simples, wc_impl = implics, wc_insol = insols } = wc
some_insoluble = insolubleWC wc
some_unsolved = not (isEmptyBag simples && isEmptyBag insols)
|| isNothing mb_implic_needs
pruned_simples = dropDerivedSimples simples
pruned_insols = dropDerivedInsols insols
pruned_implics = filterBag need_to_keep_implic implics
mb_implic_needs :: Maybe VarSet
-- Just vs => all implics are IC_Solved, with 'vs' needed
-- Nothing => at least one implic is not IC_Solved
mb_implic_needs = foldrBag add_implic (Just emptyVarSet) implics
Just implic_needs = mb_implic_needs
add_implic implic acc
| Just vs_acc <- acc
, IC_Solved { ics_need = vs } <- ic_status implic
= Just (vs `unionVarSet` vs_acc)
| otherwise = Nothing
need_to_keep_implic ic
| IC_Solved { ics_dead = [] } <- ic_status ic
-- Fully solved, and no redundant givens to report
, isEmptyBag (wc_impl (ic_wanted ic))
-- And no children that might have things to report
= False
| otherwise
= True
warnRedundantGivens :: SkolemInfo -> Bool
warnRedundantGivens (SigSkol ctxt _)
= case ctxt of
FunSigCtxt _ warn_redundant -> warn_redundant
ExprSigCtxt -> True
_ -> False
warnRedundantGivens InstSkol = True
warnRedundantGivens _ = False
neededEvVars :: EvBindMap -> VarSet -> VarSet
-- Find all the evidence variables that are "needed",
-- and then delete all those bound by the evidence bindings
-- A variable is "needed" if
-- a) it is free in the RHS of a Wanted EvBind (add_wanted)
-- b) it is free in the RHS of an EvBind whose LHS is needed (transClo)
-- c) it is in the ic_need_evs of a nested implication (initial_seeds)
-- (after removing the givens)
neededEvVars ev_binds initial_seeds
= needed `minusVarSet` bndrs
where
seeds = foldEvBindMap add_wanted initial_seeds ev_binds
needed = transCloVarSet also_needs seeds
bndrs = foldEvBindMap add_bndr emptyVarSet ev_binds
add_wanted :: EvBind -> VarSet -> VarSet
add_wanted (EvBind { eb_is_given = is_given, eb_rhs = rhs }) needs
| is_given = needs -- Add the rhs vars of the Wanted bindings only
| otherwise = evVarsOfTerm rhs `unionVarSet` needs
also_needs :: VarSet -> VarSet
also_needs needs
= foldVarSet add emptyVarSet needs
where
add v needs
| Just ev_bind <- lookupEvBind ev_binds v
, EvBind { eb_is_given = is_given, eb_rhs = rhs } <- ev_bind
, is_given
= evVarsOfTerm rhs `unionVarSet` needs
| otherwise
= needs
add_bndr :: EvBind -> VarSet -> VarSet
add_bndr (EvBind { eb_lhs = v }) vs = extendVarSet vs v
{-
Note [Tracking redundant constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
With Opt_WarnRedundantConstraints, GHC can report which
constraints of a type signature (or instance declaration) are
redundant, and can be omitted. Here is an overview of how it
works:
----- What is a redudant constraint?
* The things that can be redundant are precisely the Given
constraints of an implication.
* A constraint can be redundant in two different ways:
a) It is implied by other givens. E.g.
f :: (Eq a, Ord a) => blah -- Eq a unnecessary
g :: (Eq a, a~b, Eq b) => blah -- Either Eq a or Eq b unnecessary
b) It is not needed by the Wanted constraints covered by the
implication E.g.
f :: Eq a => a -> Bool
f x = True -- Equality not uesd
* To find (a), when we have two Given constraints,
we must be careful to drop the one that is a naked variable (if poss).
So if we have
f :: (Eq a, Ord a) => blah
then we may find [G] sc_sel (d1::Ord a) :: Eq a
[G] d2 :: Eq a
We want to discard d2 in favour of the superclass selection from
the Ord dictionary. This is done by TcInteract.solveOneFromTheOther
See Note [Replacement vs keeping].
* To find (b) we need to know which evidence bindings are 'wanted';
hence the eb_is_given field on an EvBind.
----- How tracking works
* When the constraint solver finishes solving all the wanteds in
an implication, it sets its status to IC_Solved
- The ics_dead field of IC_Solved records the subset of the ic_given
of this implication that are redundant (not needed).
- The ics_need field of IC_Solved then records all the
in-scope (given) evidence variables, bound by the context, that
were needed to solve this implication, including all its nested
implications. (We remove the ic_given of this implication from
the set, of course.)
* We compute which evidence variables are needed by an implication
in setImplicationStatus. A variable is needed if
a) it is free in the RHS of a Wanted EvBind
b) it is free in the RHS of an EvBind whose LHS is needed
c) it is in the ics_need of a nested implication
* We need to be careful not to discard an implication
prematurely, even one that is fully solved, because we might
thereby forget which variables it needs, and hence wrongly
report a constraint as redundant. But we can discard it once
its free vars have been incorporated into its parent; or if it
simply has no free vars. This careful discarding is also
handled in setImplicationStatus
----- Reporting redundant constraints
* TcErrors does the actual warning, in warnRedundantConstraints.
* We don't report redundant givens for *every* implication; only
for those which reply True to TcSimplify.warnRedundantGivens:
- For example, in a class declaration, the default method *can*
use the class constraint, but it certainly doesn't *have* to,
and we don't want to report an error there.
- More subtly, in a function definition
f :: (Ord a, Ord a, Ix a) => a -> a
f x = rhs
we do an ambiguity check on the type (which would find that one
of the Ord a constraints was redundant), and then we check that
the definition has that type (which might find that both are
redundant). We don't want to report the same error twice, so
we disable it for the ambiguity check. Hence the flag in
TcType.FunSigCtxt.
This decision is taken in setImplicationStatus, rather than TcErrors
so that we can discard implication constraints that we don't need.
So ics_dead consists only of the *reportable* redundant givens.
----- Shortcomings
Consider (see Trac #9939)
f2 :: (Eq a, Ord a) => a -> a -> Bool
-- Ord a redundant, but Eq a is reported
f2 x y = (x == y)
We report (Eq a) as redundant, whereas actually (Ord a) is. But it's
really not easy to detect that!
Note [Cutting off simpl_loop]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is very important not to iterate in simpl_loop unless there is a chance
of progress. Trac #8474 is a classic example:
* There's a deeply-nested chain of implication constraints.
?x:alpha => ?y1:beta1 => ... ?yn:betan => [W] ?x:Int
* From the innermost one we get a [D] alpha ~ Int,
but alpha is untouchable until we get out to the outermost one
* We float [D] alpha~Int out (it is in floated_eqs), but since alpha
is untouchable, the solveInteract in simpl_loop makes no progress
* So there is no point in attempting to re-solve
?yn:betan => [W] ?x:Int
because we'll just get the same [D] again
* If we *do* re-solve, we'll get an ininite loop. It is cut off by
the fixed bound of 10, but solving the next takes 10*10*...*10 (ie
exponentially many) iterations!
Conclusion: we should iterate simpl_loop iff we will get more 'givens'
in the inert set when solving the nested implications. That is the
result of prepareInertsForImplications is larger. How can we tell
this?
Consider floated_eqs (all wanted or derived):
(a) [W/D] CTyEqCan (a ~ ty). This can give rise to a new given only by causing
a unification. So we count those unifications.
(b) [W] CFunEqCan (F tys ~ xi). Even though these are wanted, they
are pushed in as givens by prepareInertsForImplications. See Note
[Preparing inert set for implications] in TcSMonad. But because
of that very fact, we won't generate another copy if we iterate
simpl_loop. So we iterate if there any of these
-}
promoteTyVar :: TcLevel -> TcTyVar -> TcS TcTyVar
-- When we float a constraint out of an implication we must restore
-- invariant (MetaTvInv) in Note [TcLevel and untouchable type variables] in TcType
-- See Note [Promoting unification variables]
promoteTyVar tclvl tv
| isFloatedTouchableMetaTyVar tclvl tv
= do { cloned_tv <- TcS.cloneMetaTyVar tv
; let rhs_tv = setMetaTyVarTcLevel cloned_tv tclvl
; unifyTyVar tv (mkTyVarTy rhs_tv)
; return rhs_tv }
| otherwise
= return tv
promoteAndDefaultTyVar :: TcLevel -> TcTyVarSet -> TcTyVar -> TcS TcTyVar
-- See Note [Promote _and_ default when inferring]
promoteAndDefaultTyVar tclvl gbl_tvs tv
= do { tv1 <- if tv `elemVarSet` gbl_tvs
then return tv
else defaultTyVar tv
; promoteTyVar tclvl tv1 }
defaultTyVar :: TcTyVar -> TcS TcTyVar
-- Precondition: MetaTyVars only
-- See Note [DefaultTyVar]
defaultTyVar the_tv
| Just default_k <- defaultKind_maybe (tyVarKind the_tv)
= do { tv' <- TcS.cloneMetaTyVar the_tv
; let new_tv = setTyVarKind tv' default_k
; traceTcS "defaultTyVar" (ppr the_tv <+> ppr new_tv)
; unifyTyVar the_tv (mkTyVarTy new_tv)
; return new_tv }
-- Why not directly derived_pred = mkTcEqPred k default_k?
-- See Note [DefaultTyVar]
-- We keep the same TcLevel on tv'
| otherwise = return the_tv -- The common case
approximateWC :: WantedConstraints -> Cts
-- Postcondition: Wanted or Derived Cts
-- See Note [ApproximateWC]
approximateWC wc
= float_wc emptyVarSet wc
where
float_wc :: TcTyVarSet -> WantedConstraints -> Cts
float_wc trapping_tvs (WC { wc_simple = simples, wc_impl = implics })
= filterBag is_floatable simples `unionBags`
do_bag (float_implic new_trapping_tvs) implics
where
is_floatable ct = tyVarsOfCt ct `disjointVarSet` new_trapping_tvs
new_trapping_tvs = transCloVarSet grow trapping_tvs
grow :: VarSet -> VarSet -- Maps current trapped tyvars to newly-trapped ones
grow so_far = foldrBag (grow_one so_far) emptyVarSet simples
grow_one so_far ct tvs
| ct_tvs `intersectsVarSet` so_far = tvs `unionVarSet` ct_tvs
| otherwise = tvs
where
ct_tvs = tyVarsOfCt ct
float_implic :: TcTyVarSet -> Implication -> Cts
float_implic trapping_tvs imp
| ic_no_eqs imp -- No equalities, so float
= float_wc new_trapping_tvs (ic_wanted imp)
| otherwise -- Don't float out of equalities
= emptyCts -- See Note [ApproximateWC]
where
new_trapping_tvs = trapping_tvs `extendVarSetList` ic_skols imp
do_bag :: (a -> Bag c) -> Bag a -> Bag c
do_bag f = foldrBag (unionBags.f) emptyBag
{-
Note [ApproximateWC]
~~~~~~~~~~~~~~~~~~~~
approximateWC takes a constraint, typically arising from the RHS of a
let-binding whose type we are *inferring*, and extracts from it some
*simple* constraints that we might plausibly abstract over. Of course
the top-level simple constraints are plausible, but we also float constraints
out from inside, if they are not captured by skolems.
The same function is used when doing type-class defaulting (see the call
to applyDefaultingRules) to extract constraints that that might be defaulted.
There are two caveats:
1. We do *not* float anything out if the implication binds equality
constraints, because that defeats the OutsideIn story. Consider
data T a where
TInt :: T Int
MkT :: T a
f TInt = 3::Int
We get the implication (a ~ Int => res ~ Int), where so far we've decided
f :: T a -> res
We don't want to float (res~Int) out because then we'll infer
f :: T a -> Int
which is only on of the possible types. (GHC 7.6 accidentally *did*
float out of such implications, which meant it would happily infer
non-principal types.)
2. We do not float out an inner constraint that shares a type variable
(transitively) with one that is trapped by a skolem. Eg
forall a. F a ~ beta, Integral beta
We don't want to float out (Integral beta). Doing so would be bad
when defaulting, because then we'll default beta:=Integer, and that
makes the error message much worse; we'd get
Can't solve F a ~ Integer
rather than
Can't solve Integral (F a)
Moreover, floating out these "contaminated" constraints doesn't help
when generalising either. If we generalise over (Integral b), we still
can't solve the retained implication (forall a. F a ~ b). Indeed,
arguably that too would be a harder error to understand.
Note [DefaultTyVar]
~~~~~~~~~~~~~~~~~~~
defaultTyVar is used on any un-instantiated meta type variables to
default the kind of OpenKind and ArgKind etc to *. This is important
to ensure that instance declarations match. For example consider
instance Show (a->b)
foo x = show (\_ -> True)
Then we'll get a constraint (Show (p ->q)) where p has kind ArgKind,
and that won't match the typeKind (*) in the instance decl. See tests
tc217 and tc175.
We look only at touchable type variables. No further constraints
are going to affect these type variables, so it's time to do it by
hand. However we aren't ready to default them fully to () or
whatever, because the type-class defaulting rules have yet to run.
An important point is that if the type variable tv has kind k and the
default is default_k we do not simply generate [D] (k ~ default_k) because:
(1) k may be ArgKind and default_k may be * so we will fail
(2) We need to rewrite all occurrences of the tv to be a type
variable with the right kind and we choose to do this by rewriting
the type variable /itself/ by a new variable which does have the
right kind.
Note [Promote _and_ default when inferring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we are inferring a type, we simplify the constraint, and then use
approximateWC to produce a list of candidate constraints. Then we MUST
a) Promote any meta-tyvars that have been floated out by
approximateWC, to restore invariant (MetaTvInv) described in
Note [TcLevel and untouchable type variables] in TcType.
b) Default the kind of any meta-tyyvars that are not mentioned in
in the environment.
To see (b), suppose the constraint is (C ((a :: OpenKind) -> Int)), and we
have an instance (C ((x:*) -> Int)). The instance doesn't match -- but it
should! If we don't solve the constraint, we'll stupidly quantify over
(C (a->Int)) and, worse, in doing so zonkQuantifiedTyVar will quantify over
(b:*) instead of (a:OpenKind), which can lead to disaster; see Trac #7332.
Trac #7641 is a simpler example.
Note [Promoting unification variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we float an equality out of an implication we must "promote" free
unification variables of the equality, in order to maintain Invariant
(MetaTvInv) from Note [TcLevel and untouchable type variables] in TcType. for the
leftover implication.
This is absolutely necessary. Consider the following example. We start
with two implications and a class with a functional dependency.
class C x y | x -> y
instance C [a] [a]
(I1) [untch=beta]forall b. 0 => F Int ~ [beta]
(I2) [untch=beta]forall c. 0 => F Int ~ [[alpha]] /\ C beta [c]
We float (F Int ~ [beta]) out of I1, and we float (F Int ~ [[alpha]]) out of I2.
They may react to yield that (beta := [alpha]) which can then be pushed inwards
the leftover of I2 to get (C [alpha] [a]) which, using the FunDep, will mean that
(alpha := a). In the end we will have the skolem 'b' escaping in the untouchable
beta! Concrete example is in indexed_types/should_fail/ExtraTcsUntch.hs:
class C x y | x -> y where
op :: x -> y -> ()
instance C [a] [a]
type family F a :: *
h :: F Int -> ()
h = undefined
data TEx where
TEx :: a -> TEx
f (x::beta) =
let g1 :: forall b. b -> ()
g1 _ = h [x]
g2 z = case z of TEx y -> (h [[undefined]], op x [y])
in (g1 '3', g2 undefined)
Note [Solving Family Equations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
After we are done with simplification we may be left with constraints of the form:
[Wanted] F xis ~ beta
If 'beta' is a touchable unification variable not already bound in the TyBinds
then we'd like to create a binding for it, effectively "defaulting" it to be 'F xis'.
When is it ok to do so?
1) 'beta' must not already be defaulted to something. Example:
[Wanted] F Int ~ beta <~ Will default [beta := F Int]
[Wanted] F Char ~ beta <~ Already defaulted, can't default again. We
have to report this as unsolved.
2) However, we must still do an occurs check when defaulting (F xis ~ beta), to
set [beta := F xis] only if beta is not among the free variables of xis.
3) Notice that 'beta' can't be bound in ty binds already because we rewrite RHS
of type family equations. See Inert Set invariants in TcInteract.
This solving is now happening during zonking, see Note [Unflattening while zonking]
in TcMType.
*********************************************************************************
* *
* Floating equalities *
* *
*********************************************************************************
Note [Float Equalities out of Implications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For ordinary pattern matches (including existentials) we float
equalities out of implications, for instance:
data T where
MkT :: Eq a => a -> T
f x y = case x of MkT _ -> (y::Int)
We get the implication constraint (x::T) (y::alpha):
forall a. [untouchable=alpha] Eq a => alpha ~ Int
We want to float out the equality into a scope where alpha is no
longer untouchable, to solve the implication!
But we cannot float equalities out of implications whose givens may
yield or contain equalities:
data T a where
T1 :: T Int
T2 :: T Bool
T3 :: T a
h :: T a -> a -> Int
f x y = case x of
T1 -> y::Int
T2 -> y::Bool
T3 -> h x y
We generate constraint, for (x::T alpha) and (y :: beta):
[untouchables = beta] (alpha ~ Int => beta ~ Int) -- From 1st branch
[untouchables = beta] (alpha ~ Bool => beta ~ Bool) -- From 2nd branch
(alpha ~ beta) -- From 3rd branch
If we float the equality (beta ~ Int) outside of the first implication and
the equality (beta ~ Bool) out of the second we get an insoluble constraint.
But if we just leave them inside the implications we unify alpha := beta and
solve everything.
Principle:
We do not want to float equalities out which may
need the given *evidence* to become soluble.
Consequence: classes with functional dependencies don't matter (since there is
no evidence for a fundep equality), but equality superclasses do matter (since
they carry evidence).
-}
floatEqualities :: [TcTyVar] -> Bool
-> WantedConstraints
-> TcS (Cts, WantedConstraints)
-- Main idea: see Note [Float Equalities out of Implications]
--
-- Precondition: the wc_simple of the incoming WantedConstraints are
-- fully zonked, so that we can see their free variables
--
-- Postcondition: The returned floated constraints (Cts) are only
-- Wanted or Derived and come from the input wanted
-- ev vars or deriveds
--
-- Also performs some unifications (via promoteTyVar), adding to
-- monadically-carried ty_binds. These will be used when processing
-- floated_eqs later
--
-- Subtleties: Note [Float equalities from under a skolem binding]
-- Note [Skolem escape]
floatEqualities skols no_given_eqs wanteds@(WC { wc_simple = simples })
| not no_given_eqs -- There are some given equalities, so don't float
= return (emptyBag, wanteds) -- Note [Float Equalities out of Implications]
| otherwise
= do { outer_tclvl <- TcS.getTcLevel
; mapM_ (promoteTyVar outer_tclvl) (varSetElems (tyVarsOfCts float_eqs))
-- See Note [Promoting unification variables]
; traceTcS "floatEqualities" (vcat [ text "Skols =" <+> ppr skols
, text "Simples =" <+> ppr simples
, text "Floated eqs =" <+> ppr float_eqs ])
; return (float_eqs, wanteds { wc_simple = remaining_simples }) }
where
skol_set = mkVarSet skols
(float_eqs, remaining_simples) = partitionBag (usefulToFloat is_useful) simples
is_useful pred = tyVarsOfType pred `disjointVarSet` skol_set
{- Note [Float equalities from under a skolem binding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Which of the simple equalities can we float out? Obviously, only
ones that don't mention the skolem-bound variables. But that is
over-eager. Consider
[2] forall a. F a beta[1] ~ gamma[2], G beta[1] gamma[2] ~ Int
The second constraint doesn't mention 'a'. But if we float it
we'll promote gamma[2] to gamma'[1]. Now suppose that we learn that
beta := Bool, and F a Bool = a, and G Bool _ = Int. Then we'll
we left with the constraint
[2] forall a. a ~ gamma'[1]
which is insoluble because gamma became untouchable.
Solution: float only constraints that stand a jolly good chance of
being soluble simply by being floated, namely ones of form
a ~ ty
where 'a' is a currently-untouchable unification variable, but may
become touchable by being floated (perhaps by more than one level).
We had a very complicated rule previously, but this is nice and
simple. (To see the notes, look at this Note in a version of
TcSimplify prior to Oct 2014).
Note [Skolem escape]
~~~~~~~~~~~~~~~~~~~~
You might worry about skolem escape with all this floating.
For example, consider
[2] forall a. (a ~ F beta[2] delta,
Maybe beta[2] ~ gamma[1])
The (Maybe beta ~ gamma) doesn't mention 'a', so we float it, and
solve with gamma := beta. But what if later delta:=Int, and
F b Int = b.
Then we'd get a ~ beta[2], and solve to get beta:=a, and now the
skolem has escaped!
But it's ok: when we float (Maybe beta[2] ~ gamma[1]), we promote beta[2]
to beta[1], and that means the (a ~ beta[1]) will be stuck, as it should be.
*********************************************************************************
* *
* Defaulting and disamgiguation *
* *
*********************************************************************************
-}
applyDefaultingRules :: WantedConstraints -> TcS Bool
-- True <=> I did some defaulting, by unifying a meta-tyvar
-- Imput WantedConstraints are not necessarily zonked
applyDefaultingRules wanteds
| isEmptyWC wanteds
= return False
| otherwise
= do { info@(default_tys, _) <- getDefaultInfo
; wanteds <- TcS.zonkWC wanteds
; let groups = findDefaultableGroups info wanteds
; traceTcS "applyDefaultingRules {" $
vcat [ text "wanteds =" <+> ppr wanteds
, text "groups =" <+> ppr groups
, text "info =" <+> ppr info ]
; something_happeneds <- mapM (disambigGroup default_tys) groups
; traceTcS "applyDefaultingRules }" (ppr something_happeneds)
; return (or something_happeneds) }
findDefaultableGroups
:: ( [Type]
, (Bool,Bool) ) -- (Overloaded strings, extended default rules)
-> WantedConstraints -- Unsolved (wanted or derived)
-> [(TyVar, [Ct])]
findDefaultableGroups (default_tys, (ovl_strings, extended_defaults)) wanteds
| null default_tys
= []
| otherwise
= [ (tv, map fstOf3 group)
| group@((_,_,tv):_) <- unary_groups
, defaultable_tyvar tv
, defaultable_classes (map sndOf3 group) ]
where
simples = approximateWC wanteds
(unaries, non_unaries) = partitionWith find_unary (bagToList simples)
unary_groups = equivClasses cmp_tv unaries
unary_groups :: [[(Ct, Class, TcTyVar)]] -- (C tv) constraints
unaries :: [(Ct, Class, TcTyVar)] -- (C tv) constraints
non_unaries :: [Ct] -- and *other* constraints
-- Finds unary type-class constraints
-- But take account of polykinded classes like Typeable,
-- which may look like (Typeable * (a:*)) (Trac #8931)
find_unary cc
| Just (cls,tys) <- getClassPredTys_maybe (ctPred cc)
, Just (kinds, ty) <- snocView tys -- Ignore kind arguments
, all isKind kinds -- for this purpose
, Just tv <- tcGetTyVar_maybe ty
, isMetaTyVar tv -- We might have runtime-skolems in GHCi, and
-- we definitely don't want to try to assign to those!
= Left (cc, cls, tv)
find_unary cc = Right cc -- Non unary or non dictionary
bad_tvs :: TcTyVarSet -- TyVars mentioned by non-unaries
bad_tvs = mapUnionVarSet tyVarsOfCt non_unaries
cmp_tv (_,_,tv1) (_,_,tv2) = tv1 `compare` tv2
defaultable_tyvar tv
= let b1 = isTyConableTyVar tv -- Note [Avoiding spurious errors]
b2 = not (tv `elemVarSet` bad_tvs)
in b1 && b2
defaultable_classes clss
| extended_defaults = any isInteractiveClass clss
| otherwise = all is_std_class clss && (any is_num_class clss)
-- In interactive mode, or with -XExtendedDefaultRules,
-- we default Show a to Show () to avoid graututious errors on "show []"
isInteractiveClass cls
= is_num_class cls || (classKey cls `elem` [showClassKey, eqClassKey, ordClassKey])
is_num_class cls = isNumericClass cls || (ovl_strings && (cls `hasKey` isStringClassKey))
-- is_num_class adds IsString to the standard numeric classes,
-- when -foverloaded-strings is enabled
is_std_class cls = isStandardClass cls || (ovl_strings && (cls `hasKey` isStringClassKey))
-- Similarly is_std_class
------------------------------
disambigGroup :: [Type] -- The default types
-> (TcTyVar, [Ct]) -- All classes of the form (C a)
-- sharing same type variable
-> TcS Bool -- True <=> something happened, reflected in ty_binds
disambigGroup [] _
= return False
disambigGroup (default_ty:default_tys) group@(the_tv, wanteds)
= do { traceTcS "disambigGroup {" (vcat [ ppr default_ty, ppr the_tv, ppr wanteds ])
; fake_ev_binds_var <- TcS.newTcEvBinds
; tclvl <- TcS.getTcLevel
; success <- nestImplicTcS fake_ev_binds_var (pushTcLevel tclvl)
try_group
; if success then
-- Success: record the type variable binding, and return
do { unifyTyVar the_tv default_ty
; wrapWarnTcS $ warnDefaulting wanteds default_ty
; traceTcS "disambigGroup succeeded }" (ppr default_ty)
; return True }
else
-- Failure: try with the next type
do { traceTcS "disambigGroup failed, will try other default types }"
(ppr default_ty)
; disambigGroup default_tys group } }
where
try_group
| Just subst <- mb_subst
= do { wanted_evs <- mapM (newWantedEvVarNC loc . substTy subst . ctPred)
wanteds
; residual_wanted <- solveSimpleWanteds $ listToBag $
map mkNonCanonical wanted_evs
; return (isEmptyWC residual_wanted) }
| otherwise
= return False
tmpl_tvs = extendVarSet (tyVarsOfType (tyVarKind the_tv)) the_tv
mb_subst = tcMatchTy tmpl_tvs (mkTyVarTy the_tv) default_ty
-- Make sure the kinds match too; hence this call to tcMatchTy
-- E.g. suppose the only constraint was (Typeable k (a::k))
loc = CtLoc { ctl_origin = GivenOrigin UnkSkol
, ctl_env = panic "disambigGroup:env"
, ctl_depth = initialSubGoalDepth }
{-
Note [Avoiding spurious errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When doing the unification for defaulting, we check for skolem
type variables, and simply don't default them. For example:
f = (*) -- Monomorphic
g :: Num a => a -> a
g x = f x x
Here, we get a complaint when checking the type signature for g,
that g isn't polymorphic enough; but then we get another one when
dealing with the (Num a) context arising from f's definition;
we try to unify a with Int (to default it), but find that it's
already been unified with the rigid variable from g's type sig
-}
| christiaanb/ghc | compiler/typecheck/TcSimplify.hs | bsd-3-clause | 70,406 | 0 | 19 | 20,063 | 7,110 | 3,759 | 3,351 | 569 | 6 |
module Data.Text.Lazy.AsLt
( module Data.Text.Lazy.AsLt
) where
-- generated by https://github.com/rvion/ride/tree/master/jetpack-gen
import qualified Data.Text.Lazy as I
-- lt_empty :: Text
lt_empty = I.empty
-- lt_foldlChunks :: forall a. (a -> Text -> a) -> a -> Text -> a
lt_foldlChunks = I.foldlChunks
-- lt_foldrChunks :: forall a. (Text -> a -> a) -> a -> Text -> a
lt_foldrChunks = I.foldrChunks
-- lt_all :: (Char -> Bool) -> Text -> Bool
lt_all = I.all
-- lt_any :: (Char -> Bool) -> Text -> Bool
lt_any = I.any
-- lt_append :: Text -> Text -> Text
lt_append = I.append
-- lt_break :: (Char -> Bool) -> Text -> (Text, Text)
lt_break = I.break
-- lt_breakOn :: Text -> Text -> (Text, Text)
lt_breakOn = I.breakOn
-- lt_breakOnAll :: Text -> Text -> [(Text, Text)]
lt_breakOnAll = I.breakOnAll
-- lt_breakOnEnd :: Text -> Text -> (Text, Text)
lt_breakOnEnd = I.breakOnEnd
-- lt_center :: Int64 -> Char -> Text -> Text
lt_center = I.center
-- lt_chunksOf :: Int64 -> Text -> [Text]
lt_chunksOf = I.chunksOf
-- lt_commonPrefixes :: Text -> Text -> Maybe (Text, Text, Text)
lt_commonPrefixes = I.commonPrefixes
-- lt_compareLength :: Text -> Int64 -> Ordering
lt_compareLength = I.compareLength
-- lt_concat :: [Text] -> Text
lt_concat = I.concat
-- lt_concatMap :: (Char -> Text) -> Text -> Text
lt_concatMap = I.concatMap
-- lt_cons :: Char -> Text -> Text
lt_cons = I.cons
-- lt_count :: Text -> Text -> Int64
lt_count = I.count
-- lt_cycle :: Text -> Text
lt_cycle = I.cycle
-- lt_drop :: Int64 -> Text -> Text
lt_drop = I.drop
-- lt_dropAround :: (Char -> Bool) -> Text -> Text
lt_dropAround = I.dropAround
-- lt_dropEnd :: Int64 -> Text -> Text
lt_dropEnd = I.dropEnd
-- lt_dropWhile :: (Char -> Bool) -> Text -> Text
lt_dropWhile = I.dropWhile
-- lt_dropWhileEnd :: (Char -> Bool) -> Text -> Text
lt_dropWhileEnd = I.dropWhileEnd
-- lt_filter :: (Char -> Bool) -> Text -> Text
lt_filter = I.filter
-- lt_find :: (Char -> Bool) -> Text -> Maybe Char
lt_find = I.find
-- lt_foldl :: forall a. (a -> Char -> a) -> a -> Text -> a
lt_foldl = I.foldl
-- lt_foldl' :: forall a. (a -> Char -> a) -> a -> Text -> a
lt_foldl' = I.foldl'
-- lt_foldl1 :: (Char -> Char -> Char) -> Text -> Char
lt_foldl1 = I.foldl1
-- lt_foldl1' :: (Char -> Char -> Char) -> Text -> Char
lt_foldl1' = I.foldl1'
-- lt_foldr :: forall a. (Char -> a -> a) -> a -> Text -> a
lt_foldr = I.foldr
-- lt_foldr1 :: (Char -> Char -> Char) -> Text -> Char
lt_foldr1 = I.foldr1
-- lt_fromChunks :: [Text] -> Text
lt_fromChunks = I.fromChunks
-- lt_fromStrict :: Text -> Text
lt_fromStrict = I.fromStrict
-- lt_group :: Text -> [Text]
lt_group = I.group
-- lt_groupBy :: (Char -> Char -> Bool) -> Text -> [Text]
lt_groupBy = I.groupBy
-- lt_head :: Text -> Char
lt_head = I.head
-- lt_index :: Text -> Int64 -> Char
lt_index = I.index
-- lt_init :: Text -> Text
lt_init = I.init
-- lt_inits :: Text -> [Text]
lt_inits = I.inits
-- lt_intercalate :: Text -> [Text] -> Text
lt_intercalate = I.intercalate
-- lt_intersperse :: Char -> Text -> Text
lt_intersperse = I.intersperse
-- lt_isInfixOf :: Text -> Text -> Bool
lt_isInfixOf = I.isInfixOf
-- lt_isPrefixOf :: Text -> Text -> Bool
lt_isPrefixOf = I.isPrefixOf
-- lt_isSuffixOf :: Text -> Text -> Bool
lt_isSuffixOf = I.isSuffixOf
-- lt_iterate :: (Char -> Char) -> Char -> Text
lt_iterate = I.iterate
-- lt_justifyLeft :: Int64 -> Char -> Text -> Text
lt_justifyLeft = I.justifyLeft
-- lt_justifyRight :: Int64 -> Char -> Text -> Text
lt_justifyRight = I.justifyRight
-- lt_last :: Text -> Char
lt_last = I.last
-- lt_length :: Text -> Int64
lt_length = I.length
-- lt_lines :: Text -> [Text]
lt_lines = I.lines
-- lt_map :: (Char -> Char) -> Text -> Text
lt_map = I.map
-- lt_mapAccumL :: forall a. (a -> Char -> (a, Char)) -> a -> Text -> (a, Text)
lt_mapAccumL = I.mapAccumL
-- lt_mapAccumR :: forall a. (a -> Char -> (a, Char)) -> a -> Text -> (a, Text)
lt_mapAccumR = I.mapAccumR
-- lt_maximum :: Text -> Char
lt_maximum = I.maximum
-- lt_minimum :: Text -> Char
lt_minimum = I.minimum
-- lt_null :: Text -> Bool
lt_null = I.null
-- lt_pack :: String -> Text
lt_pack = I.pack
-- lt_partition :: (Char -> Bool) -> Text -> (Text, Text)
lt_partition = I.partition
-- lt_repeat :: Char -> Text
lt_repeat = I.repeat
-- lt_replace :: Text -> Text -> Text -> Text
lt_replace = I.replace
-- lt_replicate :: Int64 -> Text -> Text
lt_replicate = I.replicate
-- lt_reverse :: Text -> Text
lt_reverse = I.reverse
-- lt_scanl :: (Char -> Char -> Char) -> Char -> Text -> Text
lt_scanl = I.scanl
-- lt_scanl1 :: (Char -> Char -> Char) -> Text -> Text
lt_scanl1 = I.scanl1
-- lt_scanr :: (Char -> Char -> Char) -> Char -> Text -> Text
lt_scanr = I.scanr
-- lt_scanr1 :: (Char -> Char -> Char) -> Text -> Text
lt_scanr1 = I.scanr1
-- lt_singleton :: Char -> Text
lt_singleton = I.singleton
-- lt_snoc :: Text -> Char -> Text
lt_snoc = I.snoc
-- lt_span :: (Char -> Bool) -> Text -> (Text, Text)
lt_span = I.span
-- lt_split :: (Char -> Bool) -> Text -> [Text]
lt_split = I.split
-- lt_splitAt :: Int64 -> Text -> (Text, Text)
lt_splitAt = I.splitAt
-- lt_splitOn :: Text -> Text -> [Text]
lt_splitOn = I.splitOn
-- lt_strip :: Text -> Text
lt_strip = I.strip
-- lt_stripEnd :: Text -> Text
lt_stripEnd = I.stripEnd
-- lt_stripPrefix :: Text -> Text -> Maybe Text
lt_stripPrefix = I.stripPrefix
-- lt_stripStart :: Text -> Text
lt_stripStart = I.stripStart
-- lt_stripSuffix :: Text -> Text -> Maybe Text
lt_stripSuffix = I.stripSuffix
-- lt_tail :: Text -> Text
lt_tail = I.tail
-- lt_tails :: Text -> [Text]
lt_tails = I.tails
-- lt_take :: Int64 -> Text -> Text
lt_take = I.take
-- lt_takeEnd :: Int64 -> Text -> Text
lt_takeEnd = I.takeEnd
-- lt_takeWhile :: (Char -> Bool) -> Text -> Text
lt_takeWhile = I.takeWhile
-- lt_toCaseFold :: Text -> Text
lt_toCaseFold = I.toCaseFold
-- lt_toChunks :: Text -> [Text]
lt_toChunks = I.toChunks
-- lt_toLower :: Text -> Text
lt_toLower = I.toLower
-- lt_toStrict :: Text -> Text
lt_toStrict = I.toStrict
-- lt_toTitle :: Text -> Text
lt_toTitle = I.toTitle
-- lt_toUpper :: Text -> Text
lt_toUpper = I.toUpper
-- lt_transpose :: [Text] -> [Text]
lt_transpose = I.transpose
-- lt_uncons :: Text -> Maybe (Char, Text)
lt_uncons = I.uncons
-- lt_unfoldr :: forall a. (a -> Maybe (Char, a)) -> a -> Text
lt_unfoldr = I.unfoldr
-- lt_unfoldrN :: forall a. Int64 -> (a -> Maybe (Char, a)) -> a -> Text
lt_unfoldrN = I.unfoldrN
-- lt_unlines :: [Text] -> Text
lt_unlines = I.unlines
-- lt_unpack :: Text -> String
lt_unpack = I.unpack
-- lt_unwords :: [Text] -> Text
lt_unwords = I.unwords
-- lt_words :: Text -> [Text]
lt_words = I.words
-- lt_zip :: Text -> Text -> [(Char, Char)]
lt_zip = I.zip
-- lt_zipWith :: (Char -> Char -> Char) -> Text -> Text -> Text
lt_zipWith = I.zipWith
type LtText = I.Text
| rvion/ride | jetpack/src/Data/Text/Lazy/AsLt.hs | bsd-3-clause | 6,863 | 0 | 5 | 1,312 | 828 | 521 | 307 | 103 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Lib (
generate,
DeltaParams,
defaultDeltaParams,
setDeltaParamsAuth,
setDeltaParamsOwner,
setDeltaParamsRepo,
setDeltaParamsSince,
setDeltaParamsUntil,
setDeltaParamsLabel,
) where
import Data.Function ((&))
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.String (fromString)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Clock (UTCTime, getCurrentTime)
import Data.Time.Format (defaultTimeLocale, formatTime)
import Data.Vector (Vector)
import qualified Data.Vector as V
import qualified GitHub as GH
-- | Parameters required to generate a Delta.
data DeltaParams =
DeltaParams
{ deltaParamsAuth :: Maybe GH.Auth
, deltaParamsOwner :: GH.Name GH.Owner
, deltaParamsRepo :: GH.Name GH.Repo
, deltaParamsSince :: GH.Name GH.GitCommit
, deltaParamsUntil :: Maybe (GH.Name GH.GitCommit)
, deltaParamsLabel :: Maybe Text
}
-- | Default params using the gh-delta repo.
defaultDeltaParams :: DeltaParams
defaultDeltaParams =
DeltaParams Nothing "filib" "gh-delta" "f44caa05adf066ae441cbdbebe54010d94172e9a" Nothing Nothing
-- | Setter for personal access token.
setDeltaParamsAuth :: Maybe String -> DeltaParams -> DeltaParams
setDeltaParamsAuth x params = params { deltaParamsAuth = fmap (GH.OAuth . fromString) x }
-- | Setter for owner.
setDeltaParamsOwner :: String -> DeltaParams -> DeltaParams
setDeltaParamsOwner x params = params { deltaParamsOwner = fromString x }
-- | Setter for repo.
setDeltaParamsRepo :: String -> DeltaParams -> DeltaParams
setDeltaParamsRepo x params = params { deltaParamsRepo = fromString x }
-- | Setter for SHA since
setDeltaParamsSince :: String -> DeltaParams -> DeltaParams
setDeltaParamsSince x params = params { deltaParamsSince = fromString x }
-- | Setter for SHA until.
setDeltaParamsUntil :: Maybe String -> DeltaParams -> DeltaParams
setDeltaParamsUntil x params = params { deltaParamsUntil = fmap fromString x }
-- | Setter for label.
setDeltaParamsLabel :: Maybe String -> DeltaParams -> DeltaParams
setDeltaParamsLabel x params = params { deltaParamsLabel = fmap fromString x }
-- | Single event in a changelog.
data Event = Event { eventAuthor :: Text, eventTitle :: Text, eventLink :: Text }
-- | Single changelog entry.
data Delta =
Delta
{ deltaDateSince :: Text
, deltaDateUntil :: Text
, deltaEvents :: [Event]
, deltaLabel :: Maybe Text
}
-- | Generate changelog or produce a meaningful error.
generate :: DeltaParams -> IO (Either String Text)
generate params@DeltaParams { .. } = do
dateSinceResponse <- commitDate params deltaParamsSince
case dateSinceResponse of
Left err -> failure $ renderError err
Right dateSince ->
case deltaParamsUntil of
Just sha -> do
dateUntilResponse <- commitDate params sha
case dateUntilResponse of
Left err -> failure $ renderError err
Right dateUntil -> do
pullRequests <- fetchPullRequests dateSince dateUntil
success $ renderTemplate dateSince dateUntil pullRequests
Nothing -> do
dateUntil <- getCurrentTime
pullRequests <- fetchPullRequests dateSince dateUntil
success $ renderTemplate dateSince dateUntil pullRequests
where
fetchPullRequests :: UTCTime -> UTCTime -> IO (Vector GH.SimplePullRequest)
fetchPullRequests = closedPullRequestsSince params
renderError :: (Show a) => a -> String
renderError = show
renderTemplate :: UTCTime -> UTCTime -> Vector GH.SimplePullRequest -> Text
renderTemplate x y z = template (toDelta x y z deltaParamsLabel)
failure :: (Monad m) => a -> m (Either a b)
failure = return . Left
success :: (Monad m) => b -> m (Either a b)
success = return . Right
-- | Get date a commit was created.
commitDate :: DeltaParams -> GH.Name GH.GitCommit -> IO (Either GH.Error UTCTime)
commitDate DeltaParams { .. } sha = do
response <- GH.executeRequestMaybe deltaParamsAuth $ GH.gitCommitR
deltaParamsOwner
deltaParamsRepo
sha
case response of
Left err -> return $ Left err
Right commit -> return $ Right (GH.gitUserDate $ GH.gitCommitAuthor commit)
-- | Get pull requests closed since a given date.
closedPullRequestsSince :: DeltaParams -> UTCTime -> UTCTime -> IO (Vector GH.SimplePullRequest)
closedPullRequestsSince DeltaParams { .. } dateSince dateUntil = do
response <- GH.executeRequestMaybe deltaParamsAuth $
GH.pullRequestsForR deltaParamsOwner deltaParamsRepo opts (Just 100)
case response of
Left err -> error $ show err
Right prs -> return $ V.filter hasSinceBeenMerged prs
where
opts :: GH.PullRequestOptions
opts = GH.defaultPullRequestOptions
& GH.setPullRequestOptionsState GH.PullRequestStateClosed
hasSinceBeenMerged :: GH.SimplePullRequest -> Bool
hasSinceBeenMerged pr =
case GH.simplePullRequestMergedAt pr of
Just mergedAt -> mergedAt > dateSince && mergedAt < dateUntil
_ -> False
-- | Render internal representation as markdown.
template :: Delta -> Text
template Delta { .. } = titleTemplate <>
newLine <>
newLine <>
T.intercalate newLine (eventTemplate <$> deltaEvents)
where
eventTemplate :: Event -> Text
eventTemplate Event { .. } = T.intercalate space
["*", eventTitle, "-", "@" <> eventAuthor, eventLink]
newLine :: Text
newLine = "\n"
space :: Text
space = " "
titleTemplate :: Text
titleTemplate = T.intercalate space ["##", labelTemplate, deltaDateSince, "to", deltaDateUntil]
labelTemplate :: Text
labelTemplate = "[" <> fromMaybe "Unreleased" deltaLabel <> "]"
-- | Convert collection of pull requests to internal representation.
toDelta :: UTCTime -> UTCTime -> Vector GH.SimplePullRequest -> Maybe Text -> Delta
toDelta dateSince dateUntil prs = Delta (formatDate dateSince) (formatDate dateUntil) events
where
formatDate :: UTCTime -> Text
formatDate x = T.pack $ formatTime defaultTimeLocale "%Y-%m-%d" x
events :: [Event]
events = V.toList $ fmap toEvent prs
-- | Convert pull request to internal representation.
toEvent :: GH.SimplePullRequest -> Event
toEvent pr = Event author title link
where
author :: Text
author = GH.untagName $ GH.simpleUserLogin $ GH.simplePullRequestUser pr
title :: Text
title = GH.simplePullRequestTitle pr
link :: Text
link = GH.getUrl $ GH.pullRequestLinksHtml $ GH.simplePullRequestLinks pr
| iconnect/gh-delta | src/Lib.hs | bsd-3-clause | 7,090 | 0 | 21 | 1,837 | 1,674 | 881 | 793 | 135 | 4 |
module Data.Vector.Strategies
( parVector
-- * Re-exported for convenience
, NFData, using
) where
import Control.DeepSeq (NFData(..))
import Control.Parallel.Strategies
import Control.Monad
import qualified Data.Vector.Generic as V
import qualified Data.Vector as VB
-- |Evaluate the elements of a boxed vector in parallel.
--
-- The vector will be divided up into chunks of length less than or
-- equal to the provided chunk size (first argument) and each chunk
-- of elements will be sparked off for evaluation.
--
-- Use this along with the "parallel" package's 'using' function:
--
-- @
-- vec \``using`\` (`parVector` chunkSize)
-- @
--
-- 'parVector' can not provide any benefits (read: no parallelism) for unboxed vectors!
parVector :: V.Vector v a => NFData a => Int -> Strategy (v a)
parVector n = liftM V.fromList . parListChunk n rdeepseq . V.toList
| TomMD/vector-strategies | Data/Vector/Strategies.hs | bsd-3-clause | 873 | 4 | 10 | 148 | 142 | 87 | 55 | -1 | -1 |
module Main where
import Control.Concurrent
import Control.Monad
import Control.Monad.Trans
import qualified Data.ByteString.Char8 as S8
import Data.IterIO.Iter
import Data.IterIO.ListLike
import Data.IterIO.Inum
import Data.IterIO.Extra
minusOne :: Int -> Iter [Int] IO () -> Iter [Int] IO ()
minusOne expect iout = do
x <- headLI
when (x /= expect) $ error $ "expected " ++ show expect ++ ", got " ++ show x
iout' <- enumPure [x - 1] iout
if x <= 0 then runI iout' else minusOne (x - 1) iout'
inumPrintList :: Inum [Int] [Int] IO a
inumPrintList = mkInum $ do
x <- dataI
liftIO $ S8.putStrLn $ S8.pack (show x)
return x
ping :: IO ()
ping = do
(iterA, enumA) <- iterLoop
(enumPure [10] `cat` enumA |. inumNop) |. inumNop
|$ inumNop .| inumPrintList .| minusOne 10 iterA
pong :: IO ()
pong = do
sem <- newQSemN 0
(iterA, enumB) <- iterLoop
(iterB, enumA) <- iterLoop
_ <- forkIO $ do
(enumPure [10] `cat` enumA |. inumNop) |. inumNop
|$ inumNop .| inumPrintList .| minusOne 10 iterA
signalQSemN sem 1
_ <- forkIO $ do
enumB |. inumNop |$ inumNop .| iterB >> signalQSemN sem 1
signalQSemN sem 1
waitQSemN sem 2
S8.putStrLn $ S8.pack "Done"
main :: IO ()
main = ping >> pong
| scslab/iterIO | tests/pingpong.hs | bsd-3-clause | 1,320 | 0 | 19 | 357 | 529 | 268 | 261 | 41 | 2 |
module Main where
import Distribution.Simple
import Distribution.Simple.UUAGC
main = defaultMainWithHooks uuagcUserHook
| norm2782/uuagc | doc/ag-tutorial/gen/Setup.hs | bsd-3-clause | 123 | 0 | 5 | 14 | 23 | 14 | 9 | 4 | 1 |
module Haskmon.Types.Move(
module Haskmon.Types.Move,
I.Move, I.MetaMove
) where
import Haskmon.Types.Internals(MetaData, MetaMove, Move, MetaMoveLearnType)
import qualified Haskmon.Types.Internals as I
mMoveName :: MetaMove -> String
mMoveName = I.mMoveName
mMoveLearnType :: MetaMove -> MetaMoveLearnType
mMoveLearnType = I.mMoveLearnType
getMove :: MetaMove -> IO Move
getMove = I.getMove
moveName :: Move -> String
moveName = I.moveName
movePower :: Move -> Word
movePower = I.movePower
movePp :: Move -> Word
movePp = I.movePp
moveAccuracy :: Move -> Word
moveAccuracy = I.moveAccuracy
moveMetadata :: Move -> MetaData
moveMetadata = I.moveMetadata
| bitemyapp/Haskmon | src/Haskmon/Types/Move.hs | mit | 673 | 0 | 6 | 102 | 186 | 110 | 76 | 21 | 1 |
{-# LANGUAGE RecordWildCards #-}
-- | The module implements /directed acyclic word graphs/ (DAWGs) internaly
-- represented as /minimal acyclic deterministic finite-state automata/.
--
-- In comparison to "Data.DAWG.Dynamic" module the automaton implemented here:
--
-- * Keeps all nodes in one array and therefore uses less memory,
--
-- * When 'weigh'ed, it can be used to perform static hashing with
-- 'index' and 'byIndex' functions,
--
-- * Doesn't provide insert/delete family of operations.
module Data.DAWG.Static
(
-- * DAWG type
DAWG
-- * ID
, ID
, rootID
, byID
-- * Query
, lookup
, edges
, submap
, numStates
, numEdges
-- * Weight
, Weight
, weigh
, size
, index
, byIndex
-- * Construction
, empty
, fromList
, fromListWith
, fromLang
-- * Conversion
, assocs
, keys
, elems
, freeze
-- , thaw
) where
import Prelude hiding (lookup)
import Control.Applicative ((<$), (<$>), (<*>), (<|>))
import Control.Arrow (first)
import Data.Binary (Binary, put, get)
import Data.Vector.Binary ()
import Data.Vector.Unboxed (Unbox)
import qualified Data.IntMap as M
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as U
import Data.DAWG.Types
import qualified Data.DAWG.Util as Util
import qualified Data.DAWG.Trans as T
import qualified Data.DAWG.Static.Node as N
import qualified Data.DAWG.Graph as G
import qualified Data.DAWG.Dynamic as D
import qualified Data.DAWG.Dynamic.Internal as D
-- | @DAWG a b c@ constitutes an automaton with alphabet symbols of type /a/,
-- transition labels of type /b/ and node values of type /Maybe c/.
-- All nodes are stored in a 'V.Vector' with positions of nodes corresponding
-- to their 'ID's.
--
data DAWG a b c = DAWG
{ nodes :: V.Vector (N.Node b c)
-- | The actual DAWG root has the 0 ID. Thanks to the 'rootID'
-- attribute, we can represent a submap of a DAWG.
, rootID :: ID
} deriving (Show, Eq, Ord)
instance (Binary b, Binary c, Unbox b) => Binary (DAWG a b c) where
put DAWG{..} = put nodes >> put rootID
get = DAWG <$> get <*> get
-- | Retrieve sub-DAWG with a given ID (or `Nothing`, if there's
-- no such DAWG). This function can be used, together with the
-- `root` function, to store IDs rather than entire DAWGs in a
-- data structure.
byID :: ID -> DAWG a b c -> Maybe (DAWG a b c)
byID i d = if i >= 0 && i < V.length (nodes d)
then Just (d { rootID = i })
else Nothing
-- | Empty DAWG.
empty :: Unbox b => DAWG a b c
empty = flip DAWG 0 $ V.fromList
[ N.Branch 1 T.empty U.empty
, N.Leaf Nothing ]
-- | A list of outgoing edges.
edges :: Enum a => DAWG a b c -> [(a, DAWG a b c)]
edges d =
[ (toEnum sym, d{ rootID = i })
| (sym, i) <- N.edges n ]
where
n = nodeBy (rootID d) d
-- | Return the sub-DAWG containing all keys beginning with a prefix.
-- The in-memory representation of the resultant DAWG is the same as of
-- the original one, only the pointer to the DAWG root will be different.
submap :: (Enum a, Unbox b) => [a] -> DAWG a b c -> DAWG a b c
submap xs d = case follow (map fromEnum xs) (rootID d) d of
Just i -> d { rootID = i }
Nothing -> empty
{-# SPECIALIZE submap :: Unbox b => String -> DAWG Char b c -> DAWG Char b c #-}
-- | Number of states in the automaton.
-- TODO: The function ignores the `rootID` value, it won't work properly
-- after using the `submap` function.
numStates :: DAWG a b c -> Int
numStates = V.length . nodes
-- | Number of edges in the automaton.
-- TODO: The function ignores the `rootID` value, it won't work properly
-- after using the `submap` function.
numEdges :: DAWG a b c -> Int
numEdges = sum . map (length . N.edges) . V.toList . nodes
-- | Node with the given identifier.
nodeBy :: ID -> DAWG a b c -> N.Node b c
nodeBy i d = nodes d V.! i
-- | Value in leaf node with a given ID.
leafValue :: N.Node b c -> DAWG a b c -> Maybe c
leafValue n = N.value . nodeBy (N.eps n)
-- | Follow the path from the given identifier.
follow :: Unbox b => [Sym] -> ID -> DAWG a b c -> Maybe ID
follow (x:xs) i d = do
j <- N.onSym x (nodeBy i d)
follow xs j d
follow [] i _ = Just i
-- | Find value associated with the key.
lookup :: (Enum a, Unbox b) => [a] -> DAWG a b c -> Maybe c
lookup xs d = lookup'I (map fromEnum xs) (rootID d) d
{-# SPECIALIZE lookup :: Unbox b => String -> DAWG Char b c -> Maybe c #-}
lookup'I :: Unbox b => [Sym] -> ID -> DAWG a b c -> Maybe c
lookup'I xs i d = do
j <- follow xs i d
leafValue (nodeBy j d) d
-- -- | Find all (key, value) pairs such that key is prefixed
-- -- with the given string.
-- withPrefix :: (Enum a, Unbox b) => [a] -> DAWG a b c -> [([a], c)]
-- withPrefix xs d = maybe [] id $ do
-- i <- follow (map fromEnum xs) 0 d
-- let prepare = (xs ++) . map toEnum
-- return $ map (first prepare) (subPairs i d)
-- {-# SPECIALIZE withPrefix
-- :: Unbox b => String -> DAWG Char b c
-- -> [(String, c)] #-}
-- | Return all (key, value) pairs in ascending key order in the
-- sub-DAWG determined by the given node ID.
subPairs :: Unbox b => ID -> DAWG a b c -> [([Sym], c)]
subPairs i d =
here ++ concatMap there (N.edges n)
where
n = nodeBy i d
here = case leafValue n d of
Just x -> [([], x)]
Nothing -> []
there (x, j) = map (first (x:)) (subPairs j d)
-- | Return all (key, value) pairs in the DAWG in ascending key order.
assocs :: (Enum a, Unbox b) => DAWG a b c -> [([a], c)]
assocs d = map (first (map toEnum)) (subPairs (rootID d) d)
{-# SPECIALIZE assocs :: Unbox b => DAWG Char b c -> [(String, c)] #-}
-- | Return all keys of the DAWG in ascending order.
keys :: (Enum a, Unbox b) => DAWG a b c -> [[a]]
keys = map fst . assocs
{-# SPECIALIZE keys :: Unbox b => DAWG Char b c -> [String] #-}
-- | Return all elements of the DAWG in the ascending order of their keys.
elems :: Unbox b => DAWG a b c -> [c]
elems d = map snd $ subPairs (rootID d) d
-- | Construct 'DAWG' from the list of (word, value) pairs.
-- First a 'D.DAWG' is created and then it is frozen using
-- the 'freeze' function.
fromList :: (Enum a, Ord b) => [([a], b)] -> DAWG a () b
fromList = freeze . D.fromList
{-# SPECIALIZE fromList :: Ord b => [(String, b)] -> DAWG Char () b #-}
-- | Construct DAWG from the list of (word, value) pairs
-- with a combining function. The combining function is
-- applied strictly. First a 'D.DAWG' is created and then
-- it is frozen using the 'freeze' function.
fromListWith :: (Enum a, Ord b) => (b -> b -> b) -> [([a], b)] -> DAWG a () b
fromListWith f = freeze . D.fromListWith f
{-# SPECIALIZE fromListWith
:: Ord b => (b -> b -> b)
-> [(String, b)] -> DAWG Char () b #-}
-- | Make DAWG from the list of words. Annotate each word with
-- the @()@ value. First a 'D.DAWG' is created and then it is frozen
-- using the 'freeze' function.
fromLang :: Enum a => [[a]] -> DAWG a () ()
fromLang = freeze . D.fromLang
{-# SPECIALIZE fromLang :: [String] -> DAWG Char () () #-}
-- | Weight of a node corresponds to the number of final states
-- reachable from the node. Weight of an edge is a sum of weights
-- of preceding nodes outgoing from the same parent node.
type Weight = Int
-- | Compute node weights and store corresponding values in transition labels.
-- Be aware, that the entire DAWG will be weighted, even when (because of the use of
-- the `submap` function) only a part of the DAWG is currently selected.
weigh :: DAWG a b c -> DAWG a Weight c
weigh d = flip DAWG (rootID d) $ V.fromList
[ branch n ws
| i <- [0 .. numStates d - 1]
, let n = nodeBy i d
, let ws = accum (N.children n) ]
where
-- Branch with new weights.
branch N.Branch{..} ws = N.Branch eps transMap ws
branch N.Leaf{..} _ = N.Leaf value
-- In nodeWeight node weights are memoized.
nodeWeight = ((V.!) . V.fromList) (map detWeight [0 .. numStates d - 1])
-- Determine weight of the node.
detWeight i = case nodeBy i d of
N.Leaf w -> maybe 0 (const 1) w
n -> sum . map nodeWeight $ allChildren n
-- Weights for subsequent edges.
accum = U.fromList . init . scanl (+) 0 . map nodeWeight
-- Plain children and epsilon child.
allChildren n = N.eps n : N.children n
-- | Construct immutable version of the automaton.
freeze :: D.DAWG a b -> DAWG a () b
freeze d = flip DAWG 0 . V.fromList $
map (N.fromDyn newID . oldBy)
(M.elems (inverse old2new))
where
-- Map from old to new identifiers. The root identifier is mapped to 0.
old2new = M.fromList $ (D.root d, 0) : zip (nodeIDs d) [1..]
newID = (M.!) old2new
-- List of node IDs without the root ID.
nodeIDs = filter (/= D.root d) . map fst . M.assocs . G.nodeMap . D.graph
-- Non-frozen node by given identifier.
oldBy i = G.nodeBy i (D.graph d)
-- | Inverse of the map.
inverse :: M.IntMap Int -> M.IntMap Int
inverse =
let swap (x, y) = (y, x)
in M.fromList . map swap . M.toList
-- -- | Yield mutable version of the automaton.
-- thaw :: (Unbox b, Ord a) => DAWG a b c -> D.DAWG a b
-- thaw d =
-- D.fromNodes nodes 0
-- where
-- -- List of resulting nodes.
-- nodes = branchNodes ++ leafNodes
-- -- Branching nodes.
-- branchNodes =
-- [
-- -- Number of states used to shift new value IDs.
-- n = numStates d
-- -- New identifiers for value nodes.
-- valIDs = foldl' updID GM.empty (values d)
-- -- Values in the automaton.
-- values = map value . V.toList . nodes
-- -- Update ID map.
-- updID m v = case GM.lookup v m of
-- Just i -> m
-- Nothing ->
-- let j = GM.size m + n
-- in j `seq` GM.insert v j
-- | A number of distinct (key, value) pairs in the weighted DAWG.
size :: DAWG a Weight c -> Int
size d = size'I (rootID d) d
size'I :: ID -> DAWG a Weight c -> Int
size'I i d = add $ do
x <- case N.edges n of
[] -> Nothing
xs -> Just (fst $ last xs)
(j, v) <- N.onSym' x n
return $ v + size'I j d
where
n = nodeBy i d
u = maybe 0 (const 1) (leafValue n d)
add m = u + maybe 0 id m
-----------------------------------------
-- Index
-----------------------------------------
-- | Position in a set of all dictionary entries with respect
-- to the lexicographic order.
index :: Enum a => [a] -> DAWG a Weight c -> Maybe Int
index xs d = index'I (map fromEnum xs) (rootID d) d
{-# SPECIALIZE index :: String -> DAWG Char Weight c -> Maybe Int #-}
index'I :: [Sym] -> ID -> DAWG a Weight c -> Maybe Int
index'I [] i d = 0 <$ leafValue (nodeBy i d) d
index'I (x:xs) i d = do
let n = nodeBy i d
u = maybe 0 (const 1) (leafValue n d)
(j, v) <- N.onSym' x n
w <- index'I xs j d
return (u + v + w)
-- | Find dictionary entry given its index with respect to the
-- lexicographic order.
byIndex :: Enum a => Int -> DAWG a Weight c -> Maybe [a]
byIndex ix d = map toEnum <$> byIndex'I ix (rootID d) d
{-# SPECIALIZE byIndex :: Int -> DAWG Char Weight c -> Maybe String #-}
byIndex'I :: Int -> ID -> DAWG a Weight c -> Maybe [Sym]
byIndex'I ix i d
| ix < 0 = Nothing
| otherwise = here <|> there
where
n = nodeBy i d
u = maybe 0 (const 1) (leafValue n d)
here
| ix == 0 = [] <$ leafValue (nodeBy i d) d
| otherwise = Nothing
there = do
(k, w) <- Util.findLastLE cmp (N.labelVect n)
(x, j) <- T.byIndex k (N.transMap n)
xs <- byIndex'I (ix - u - w) j d
return (x:xs)
cmp w = compare w (ix - u)
| kawu/dawg | src/Data/DAWG/Static.hs | bsd-2-clause | 11,576 | 0 | 15 | 2,928 | 3,163 | 1,693 | 1,470 | 181 | 3 |
module Main where
import Text.RegexPR
import System.Directory
import Data.List
import Data.Tree
import System.Environment
main :: IO ()
main = do
[ dir ] <- getArgs
files <- fmap filterSource $ getDirectoryContents dir
dependList <- mapM ( depend dir files ) files
mapM_ ( putStr . showTree [ ] . nubTree ) $
let xs = mergeTree $ map makeTree dependList in xs
showTree :: [ Bool ] -> Tree String -> String
showTree n ( Node x ns ) =
makePre ( reverse n ) ++ x ++ "\n" ++
( concatMap ( showTree ( True : n ) ) ( init ns ) ++
maybe "" ( showTree ( False : n ) ) ( last ns ) )
where
init [ ] = [ ]
init [ x ] = [ ]
init ( x : xs ) = x : init xs
last [ ] = Nothing
last [ x ] = Just x
last ( _ : xs ) = last xs
makePre [ ] = ""
makePre [ _ ] = " + "
makePre ( True : rest ) = " | " ++ makePre rest
makePre ( False : rest ) = " " ++ makePre rest
nubTree :: Eq a => Tree a -> Tree a
nubTree ( Node x ns ) = Node x $ nub $ map nubTree ns
makeTree :: Eq a => ( a, [ a ] ) -> Tree a
makeTree ( x, xs ) = Node x $ map ( flip Node [ ] ) xs
mergeTree :: Eq a => [ Tree a ] -> [ Tree a ]
mergeTree ts = case map fst $ filter snd $ map ( `addTree_` ts ) ts of
[ ] -> ts
new -> mergeTree new
addTree_ :: Eq a => Tree a -> [ Tree a ] -> ( Tree a, Bool )
addTree_ t@( Node x _ ) ts = addTree t $ filter ( ( /= x ) . rootLabel ) ts
addTree :: Eq a => Tree a -> [ Tree a ] -> ( Tree a, Bool )
addTree ( Node x ns ) ts = case filter ( ( == x ) . rootLabel ) ts of
[ ] -> ( Node x $ map fst rets, any snd rets )
t : _ -> ( t, True )
where
rets = map ( `addTree` ts ) ns
depend :: FilePath -> [ String ] -> String -> IO ( String, [ String ] )
depend dir fps fp = do
cnt <- readAnyFile [ dir ++ "/" ++ fp ++ ".hs", dir ++ "/" ++ fp ++ ".y" ]
return ( fp, map ( !! 1 ) $ ggetbrsRegexPR ( mkReg fps ) cnt )
filterSource :: [ FilePath ] -> [ FilePath ]
filterSource =
map stripSuffix . filter ( isSuffixOf ".hs" ||| isSuffixOf ".y" ) .
filter ( not . isPrefixOf "." )
mkReg :: [ FilePath ] -> String
mkReg fps = "^import\\s+(?:qualified\\s+)?(" ++ intercalate "|" fps ++ ")($|\\s|\\()"
stripSuffix :: String -> String
stripSuffix = takeWhile ( /= '.' )
initN :: Int -> [ a ] -> [ a ]
initN n = ( !! n ) . iterate init
(|||) :: ( a -> Bool ) -> ( a -> Bool ) -> a -> Bool
( f1 ||| f2 ) x = f1 x || f2 x
readAnyFile :: [ FilePath ] -> IO String
readAnyFile ( f : fs ) = do
ex <- doesFileExist f
if ex then readFile f else readAnyFile fs
| YoshikuniJujo/zot_haskell | tools/putModTree.hs | bsd-3-clause | 2,464 | 20 | 13 | 657 | 1,238 | 630 | 608 | 63 | 8 |
-----------------------------------------------------------------------------
-- |
-- Module : Language.Haskell.Extension
-- Copyright : Isaac Jones 2003-2004
--
-- Maintainer : Isaac Jones <ijones@syntaxpolice.org>
-- Stability : alpha
-- Portability : portable
--
-- Haskell language extensions
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Language.Haskell.Extension (
Extension(..),
) where
-- ------------------------------------------------------------
-- * Extension
-- ------------------------------------------------------------
-- NB: if you add a constructor to 'Extension', be sure also to
-- add it to Distribution.Compiler.extensionsTo_X_Flag
-- (where X is each compiler)
-- |This represents language extensions beyond Haskell 98 that are
-- supported by some implementations, usually in some special mode.
data Extension
= OverlappingInstances
| UndecidableInstances
| IncoherentInstances
| RecursiveDo
| ParallelListComp
| MultiParamTypeClasses
| NoMonomorphismRestriction
| FunctionalDependencies
| Rank2Types
| RankNTypes
| PolymorphicComponents
| ExistentialQuantification
| ScopedTypeVariables
| ImplicitParams
| FlexibleContexts
| FlexibleInstances
| EmptyDataDecls
| CPP
| BangPatterns
| TypeSynonymInstances
| TemplateHaskell
| ForeignFunctionInterface
| InlinePhase
| ContextStack
| Arrows
| Generics
| NoImplicitPrelude
| NamedFieldPuns
| PatternGuards
| GeneralizedNewtypeDeriving
| ExtensibleRecords
| RestrictedTypeSynonyms
| HereDocuments
deriving (Show, Read, Eq)
| alekar/hugs | packages/Cabal/Language/Haskell/Extension.hs | bsd-3-clause | 3,050 | 2 | 6 | 536 | 156 | 108 | 48 | 37 | 0 |
-- | A collection of utility functions to mediate between 'Maybe', 'Either',
-- other 'Monad's, and exceptions.
module Control.MaybeEitherMonad
where
import Control.Exception
import Data.Maybe (fromMaybe)
-- | Return the 'Just' value, or fail on 'Nothing'
maybeFail :: MonadFail m => Maybe a -> m a
maybeFail = maybe (fail "Nothing") return
-- | Return the 'Right' value, or fail with the 'Left' error message.
eitherFailS :: MonadFail m => Either String a -> m a
eitherFailS = either fail return
-- | Return the 'Right' value, or fail with the 'Left' error value.
eitherFail :: (Show s, MonadFail m) => Either s a -> m a
eitherFail = either (fail . show) return
-- | Thrown when 'maybeFail' runs into a 'Nothing'
data NothingException = NothingException
deriving (Show)
instance Exception NothingException where
-- | Thrown when 'eitherFail' or 'eitherFailS' runs into a 'Left'
data LeftException = LeftException String
deriving (Show)
instance Exception LeftException where
-- | Get 'Just' the value, or throw a 'NothingException'
maybeThrow :: Maybe a -> a
maybeThrow = fromMaybe (throw NothingException)
-- | Get the 'Right' value, or throw a 'LeftException'
eitherThrowS :: Either String a -> a
eitherThrowS = either (throw . LeftException) id
-- | Get the 'Right' value, or throw a 'LeftException'
eitherThrow :: Exception err => Either err a -> a
eitherThrow = either throw id
eitherThrowWith :: Exception err => (x -> err) -> Either x a -> a
eitherThrowWith f = either (throw . f) id
-- | @optionally f v@ executes the @f@ action on 'Just' the value of @v@, or
-- does nothing if @v@ is 'Nothing'.
optionally :: Monad m => (a -> m ()) -> Maybe a -> m ()
optionally = maybe (return ())
| tdammers/templar | src/Control/MaybeEitherMonad.hs | bsd-3-clause | 1,714 | 0 | 10 | 310 | 396 | 206 | 190 | 25 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.CreateVpnConnectionRoute
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates a static route associated with a VPN connection between an existing
-- virtual private gateway and a VPN customer gateway. The static route allows
-- traffic to be routed from the virtual private gateway to the VPN customer
-- gateway.
--
-- For more information about VPN connections, see <http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_VPN.html Adding a Hardware VirtualPrivate Gateway to Your VPC> in the /Amazon Virtual Private Cloud User Guide/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-CreateVpnConnectionRoute.html>
module Network.AWS.EC2.CreateVpnConnectionRoute
(
-- * Request
CreateVpnConnectionRoute
-- ** Request constructor
, createVpnConnectionRoute
-- ** Request lenses
, cvcrDestinationCidrBlock
, cvcrVpnConnectionId
-- * Response
, CreateVpnConnectionRouteResponse
-- ** Response constructor
, createVpnConnectionRouteResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data CreateVpnConnectionRoute = CreateVpnConnectionRoute
{ _cvcrDestinationCidrBlock :: Text
, _cvcrVpnConnectionId :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'CreateVpnConnectionRoute' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cvcrDestinationCidrBlock' @::@ 'Text'
--
-- * 'cvcrVpnConnectionId' @::@ 'Text'
--
createVpnConnectionRoute :: Text -- ^ 'cvcrVpnConnectionId'
-> Text -- ^ 'cvcrDestinationCidrBlock'
-> CreateVpnConnectionRoute
createVpnConnectionRoute p1 p2 = CreateVpnConnectionRoute
{ _cvcrVpnConnectionId = p1
, _cvcrDestinationCidrBlock = p2
}
-- | The CIDR block associated with the local subnet of the customer network.
cvcrDestinationCidrBlock :: Lens' CreateVpnConnectionRoute Text
cvcrDestinationCidrBlock =
lens _cvcrDestinationCidrBlock
(\s a -> s { _cvcrDestinationCidrBlock = a })
-- | The ID of the VPN connection.
cvcrVpnConnectionId :: Lens' CreateVpnConnectionRoute Text
cvcrVpnConnectionId =
lens _cvcrVpnConnectionId (\s a -> s { _cvcrVpnConnectionId = a })
data CreateVpnConnectionRouteResponse = CreateVpnConnectionRouteResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'CreateVpnConnectionRouteResponse' constructor.
createVpnConnectionRouteResponse :: CreateVpnConnectionRouteResponse
createVpnConnectionRouteResponse = CreateVpnConnectionRouteResponse
instance ToPath CreateVpnConnectionRoute where
toPath = const "/"
instance ToQuery CreateVpnConnectionRoute where
toQuery CreateVpnConnectionRoute{..} = mconcat
[ "DestinationCidrBlock" =? _cvcrDestinationCidrBlock
, "VpnConnectionId" =? _cvcrVpnConnectionId
]
instance ToHeaders CreateVpnConnectionRoute
instance AWSRequest CreateVpnConnectionRoute where
type Sv CreateVpnConnectionRoute = EC2
type Rs CreateVpnConnectionRoute = CreateVpnConnectionRouteResponse
request = post "CreateVpnConnectionRoute"
response = nullResponse CreateVpnConnectionRouteResponse
| romanb/amazonka | amazonka-ec2/gen/Network/AWS/EC2/CreateVpnConnectionRoute.hs | mpl-2.0 | 4,196 | 0 | 9 | 825 | 398 | 245 | 153 | 55 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Utilities for desugaring
This module exports some utility functions of no great interest.
-}
{-# LANGUAGE CPP #-}
-- | Utility functions for constructing Core syntax, principally for desugaring
module Language.Haskell.Liquid.Desugar710.DsUtils (
EquationInfo(..),
firstPat, shiftEqns,
MatchResult(..), CanItFail(..), CaseAlt(..),
cantFailMatchResult, alwaysFailMatchResult,
extractMatchResult, combineMatchResults,
adjustMatchResult, adjustMatchResultDs,
mkCoLetMatchResult, mkViewMatchResult, mkGuardedMatchResult,
matchCanFail, mkEvalMatchResult,
mkCoPrimCaseMatchResult, mkCoAlgCaseMatchResult, mkCoSynCaseMatchResult,
wrapBind, wrapBinds,
mkErrorAppDs, mkCoreAppDs, mkCoreAppsDs, mkCastDs,
seqVar,
-- LHs tuples
mkLHsVarPatTup, mkLHsPatTup, mkVanillaTuplePat,
mkBigLHsVarTup, mkBigLHsTup, mkBigLHsVarPatTup, mkBigLHsPatTup,
mkSelectorBinds,
selectSimpleMatchVarL, selectMatchVars, selectMatchVar,
mkOptTickBox, mkBinaryTickBox
) where
-- #include "HsVersions.h"
import {-# SOURCE #-} Language.Haskell.Liquid.Desugar710.Match ( matchSimply )
import HsSyn
import TcHsSyn
import Coercion( Coercion, isReflCo )
import TcType( tcSplitTyConApp )
import CoreSyn
import DsMonad
import {-# SOURCE #-} Language.Haskell.Liquid.Desugar710.DsExpr ( dsLExpr )
import CoreUtils
import MkCore
import MkId
import Id
import Literal
import TyCon
import ConLike
import DataCon
import PatSyn
import Type
import TysPrim
import TysWiredIn
import BasicTypes
import UniqSet
import UniqSupply
import Module
import PrelNames
import Outputable
import SrcLoc
import Util
import DynFlags
import FastString
import TcEvidence
import Control.Monad ( zipWithM )
{-
************************************************************************
* *
\subsection{ Selecting match variables}
* *
************************************************************************
We're about to match against some patterns. We want to make some
@Ids@ to use as match variables. If a pattern has an @Id@ readily at
hand, which should indeed be bound to the pattern as a whole, then use it;
otherwise, make one up.
-}
selectSimpleMatchVarL :: LPat Id -> DsM Id
selectSimpleMatchVarL pat = selectMatchVar (unLoc pat)
-- (selectMatchVars ps tys) chooses variables of type tys
-- to use for matching ps against. If the pattern is a variable,
-- we try to use that, to save inventing lots of fresh variables.
--
-- OLD, but interesting note:
-- But even if it is a variable, its type might not match. Consider
-- data T a where
-- T1 :: Int -> T Int
-- T2 :: a -> T a
--
-- f :: T a -> a -> Int
-- f (T1 i) (x::Int) = x
-- f (T2 i) (y::a) = 0
-- Then we must not choose (x::Int) as the matching variable!
-- And nowadays we won't, because the (x::Int) will be wrapped in a CoPat
selectMatchVars :: [Pat Id] -> DsM [Id]
selectMatchVars ps = mapM selectMatchVar ps
selectMatchVar :: Pat Id -> DsM Id
selectMatchVar (BangPat pat) = selectMatchVar (unLoc pat)
selectMatchVar (LazyPat pat) = selectMatchVar (unLoc pat)
selectMatchVar (ParPat pat) = selectMatchVar (unLoc pat)
selectMatchVar (VarPat var) = return (localiseId var) -- Note [Localise pattern binders]
selectMatchVar (AsPat var _) = return (unLoc var)
selectMatchVar other_pat = newSysLocalDs (hsPatType other_pat)
-- OK, better make up one...
{-
Note [Localise pattern binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider module M where
[Just a] = e
After renaming it looks like
module M where
[Just M.a] = e
We don't generalise, since it's a pattern binding, monomorphic, etc,
so after desugaring we may get something like
M.a = case e of (v:_) ->
case v of Just M.a -> M.a
Notice the "M.a" in the pattern; after all, it was in the original
pattern. However, after optimisation those pattern binders can become
let-binders, and then end up floated to top level. They have a
different *unique* by then (the simplifier is good about maintaining
proper scoping), but it's BAD to have two top-level bindings with the
External Name M.a, because that turns into two linker symbols for M.a.
It's quite rare for this to actually *happen* -- the only case I know
of is tc003 compiled with the 'hpc' way -- but that only makes it
all the more annoying.
To avoid this, we craftily call 'localiseId' in the desugarer, which
simply turns the External Name for the Id into an Internal one, but
doesn't change the unique. So the desugarer produces this:
M.a{r8} = case e of (v:_) ->
case v of Just a{r8} -> M.a{r8}
The unique is still 'r8', but the binding site in the pattern
is now an Internal Name. Now the simplifier's usual mechanisms
will propagate that Name to all the occurrence sites, as well as
un-shadowing it, so we'll get
M.a{r8} = case e of (v:_) ->
case v of Just a{s77} -> a{s77}
In fact, even CoreSubst.simplOptExpr will do this, and simpleOptExpr
runs on the output of the desugarer, so all is well by the end of
the desugaring pass.
************************************************************************
* *
* type synonym EquationInfo and access functions for its pieces *
* *
************************************************************************
\subsection[EquationInfo-synonym]{@EquationInfo@: a useful synonym}
The ``equation info'' used by @match@ is relatively complicated and
worthy of a type synonym and a few handy functions.
-}
firstPat :: EquationInfo -> Pat Id
firstPat eqn = {- ASSERT( notNull (eqn_pats eqn) ) -} head (eqn_pats eqn)
shiftEqns :: [EquationInfo] -> [EquationInfo]
-- Drop the first pattern in each equation
shiftEqns eqns = [ eqn { eqn_pats = tail (eqn_pats eqn) } | eqn <- eqns ]
-- Functions on MatchResults
matchCanFail :: MatchResult -> Bool
matchCanFail (MatchResult CanFail _) = True
matchCanFail (MatchResult CantFail _) = False
alwaysFailMatchResult :: MatchResult
alwaysFailMatchResult = MatchResult CanFail (\fail -> return fail)
cantFailMatchResult :: CoreExpr -> MatchResult
cantFailMatchResult expr = MatchResult CantFail (\_ -> return expr)
extractMatchResult :: MatchResult -> CoreExpr -> DsM CoreExpr
extractMatchResult (MatchResult CantFail match_fn) _
= match_fn (error "It can't fail!")
extractMatchResult (MatchResult CanFail match_fn) fail_expr = do
(fail_bind, if_it_fails) <- mkFailurePair fail_expr
body <- match_fn if_it_fails
return (mkCoreLet fail_bind body)
combineMatchResults :: MatchResult -> MatchResult -> MatchResult
combineMatchResults (MatchResult CanFail body_fn1)
(MatchResult can_it_fail2 body_fn2)
= MatchResult can_it_fail2 body_fn
where
body_fn fail = do body2 <- body_fn2 fail
(fail_bind, duplicatable_expr) <- mkFailurePair body2
body1 <- body_fn1 duplicatable_expr
return (Let fail_bind body1)
combineMatchResults match_result1@(MatchResult CantFail _) _
= match_result1
adjustMatchResult :: DsWrapper -> MatchResult -> MatchResult
adjustMatchResult encl_fn (MatchResult can_it_fail body_fn)
= MatchResult can_it_fail (\fail -> encl_fn <$> body_fn fail)
adjustMatchResultDs :: (CoreExpr -> DsM CoreExpr) -> MatchResult -> MatchResult
adjustMatchResultDs encl_fn (MatchResult can_it_fail body_fn)
= MatchResult can_it_fail (\fail -> encl_fn =<< body_fn fail)
wrapBinds :: [(Var,Var)] -> CoreExpr -> CoreExpr
wrapBinds [] e = e
wrapBinds ((new,old):prs) e = wrapBind new old (wrapBinds prs e)
wrapBind :: Var -> Var -> CoreExpr -> CoreExpr
wrapBind new old body -- NB: this function must deal with term
| new==old = body -- variables, type variables or coercion variables
| otherwise = Let (NonRec new (varToCoreExpr old)) body
seqVar :: Var -> CoreExpr -> CoreExpr
seqVar var body = Case (Var var) var (exprType body)
[(DEFAULT, [], body)]
mkCoLetMatchResult :: CoreBind -> MatchResult -> MatchResult
mkCoLetMatchResult bind = adjustMatchResult (mkCoreLet bind)
-- (mkViewMatchResult var' viewExpr var mr) makes the expression
-- let var' = viewExpr var in mr
mkViewMatchResult :: Id -> CoreExpr -> Id -> MatchResult -> MatchResult
mkViewMatchResult var' viewExpr var =
adjustMatchResult (mkCoreLet (NonRec var' (mkCoreAppDs viewExpr (Var var))))
mkEvalMatchResult :: Id -> Type -> MatchResult -> MatchResult
mkEvalMatchResult var ty
= adjustMatchResult (\e -> Case (Var var) var ty [(DEFAULT, [], e)])
mkGuardedMatchResult :: CoreExpr -> MatchResult -> MatchResult
mkGuardedMatchResult pred_expr (MatchResult _ body_fn)
= MatchResult CanFail (\fail -> do body <- body_fn fail
return (mkIfThenElse pred_expr body fail))
mkCoPrimCaseMatchResult :: Id -- Scrutinee
-> Type -- Type of the case
-> [(Literal, MatchResult)] -- Alternatives
-> MatchResult -- Literals are all unlifted
mkCoPrimCaseMatchResult var ty match_alts
= MatchResult CanFail mk_case
where
mk_case fail = do
alts <- mapM (mk_alt fail) sorted_alts
return (Case (Var var) var ty ((DEFAULT, [], fail) : alts))
sorted_alts = sortWith fst match_alts -- Right order for a Case
mk_alt fail (lit, MatchResult _ body_fn)
= -- ASSERT( not (litIsLifted lit) )
do body <- body_fn fail
return (LitAlt lit, [], body)
data CaseAlt a = MkCaseAlt{ alt_pat :: a,
alt_bndrs :: [CoreBndr],
alt_wrapper :: HsWrapper,
alt_result :: MatchResult }
mkCoAlgCaseMatchResult
:: DynFlags
-> Id -- Scrutinee
-> Type -- Type of exp
-> [CaseAlt DataCon] -- Alternatives (bndrs *include* tyvars, dicts)
-> MatchResult
mkCoAlgCaseMatchResult dflags var ty match_alts
| isNewtype -- Newtype case; use a let
= -- ASSERT( null (tail match_alts) && null (tail arg_ids1) )
mkCoLetMatchResult (NonRec arg_id1 newtype_rhs) match_result1
| isPArrFakeAlts match_alts
= MatchResult CanFail $ mkPArrCase dflags var ty (sort_alts match_alts)
| otherwise
= mkDataConCase var ty match_alts
where
isNewtype = isNewTyCon (dataConTyCon (alt_pat alt1))
-- [Interesting: because of GADTs, we can't rely on the type of
-- the scrutinised Id to be sufficiently refined to have a TyCon in it]
alt1@MkCaseAlt{ alt_bndrs = arg_ids1, alt_result = match_result1 }
= {- ASSERT( notNull match_alts ) -} head match_alts
-- Stuff for newtype
arg_id1 = {- ASSERT( notNull arg_ids1 ) -} head arg_ids1
var_ty = idType var
(tc, ty_args) = tcSplitTyConApp var_ty -- Don't look through newtypes
-- (not that splitTyConApp does, these days)
newtype_rhs = unwrapNewTypeBody tc ty_args (Var var)
--- Stuff for parallel arrays
--
-- Concerning `isPArrFakeAlts':
--
-- * it is *not* sufficient to just check the type of the type
-- constructor, as we have to be careful not to confuse the real
-- representation of parallel arrays with the fake constructors;
-- moreover, a list of alternatives must not mix fake and real
-- constructors (this is checked earlier on)
--
-- FIXME: We actually go through the whole list and make sure that
-- either all or none of the constructors are fake parallel
-- array constructors. This is to spot equations that mix fake
-- constructors with the real representation defined in
-- `PrelPArr'. It would be nicer to spot this situation
-- earlier and raise a proper error message, but it can really
-- only happen in `PrelPArr' anyway.
--
isPArrFakeAlts :: [CaseAlt DataCon] -> Bool
isPArrFakeAlts [alt] = isPArrFakeCon (alt_pat alt)
isPArrFakeAlts (alt:alts) =
case (isPArrFakeCon (alt_pat alt), isPArrFakeAlts alts) of
(True , True ) -> True
(False, False) -> False
_ -> panic "DsUtils: you may not mix `[:...:]' with `PArr' patterns"
isPArrFakeAlts [] = panic "DsUtils: unexpectedly found an empty list of PArr fake alternatives"
mkCoSynCaseMatchResult :: Id -> Type -> CaseAlt PatSyn -> MatchResult
mkCoSynCaseMatchResult var ty alt = MatchResult CanFail $ mkPatSynCase var ty alt
sort_alts :: [CaseAlt DataCon] -> [CaseAlt DataCon]
sort_alts = sortWith (dataConTag . alt_pat)
mkPatSynCase :: Id -> Type -> CaseAlt PatSyn -> CoreExpr -> DsM CoreExpr
mkPatSynCase var ty alt fail = do
matcher <- dsLExpr $ mkLHsWrap wrapper $ nlHsTyApp matcher [ty]
let MatchResult _ mkCont = match_result
cont <- mkCoreLams bndrs <$> mkCont fail
return $ mkCoreAppsDs matcher [Var var, ensure_unstrict cont, Lam voidArgId fail]
where
MkCaseAlt{ alt_pat = psyn,
alt_bndrs = bndrs,
alt_wrapper = wrapper,
alt_result = match_result} = alt
(matcher, needs_void_lam) = patSynMatcher psyn
-- See Note [Matchers and builders for pattern synonyms] in PatSyns
-- on these extra Void# arguments
ensure_unstrict cont | needs_void_lam = Lam voidArgId cont
| otherwise = cont
mkDataConCase :: Id -> Type -> [CaseAlt DataCon] -> MatchResult
mkDataConCase _ _ [] = panic "mkDataConCase: no alternatives"
mkDataConCase var ty alts@(alt1:_) = MatchResult fail_flag mk_case
where
con1 = alt_pat alt1
tycon = dataConTyCon con1
data_cons = tyConDataCons tycon
match_results = map alt_result alts
sorted_alts :: [CaseAlt DataCon]
sorted_alts = sort_alts alts
var_ty = idType var
(_, ty_args) = tcSplitTyConApp var_ty -- Don't look through newtypes
-- (not that splitTyConApp does, these days)
mk_case :: CoreExpr -> DsM CoreExpr
mk_case fail = do
alts <- mapM (mk_alt fail) sorted_alts
return $ mkWildCase (Var var) (idType var) ty (mk_default fail ++ alts)
mk_alt :: CoreExpr -> CaseAlt DataCon -> DsM CoreAlt
mk_alt fail MkCaseAlt{ alt_pat = con,
alt_bndrs = args,
alt_result = MatchResult _ body_fn }
= do { body <- body_fn fail
; case dataConBoxer con of {
Nothing -> return (DataAlt con, args, body) ;
Just (DCB boxer) ->
do { us <- newUniqueSupply
; let (rep_ids, binds) = initUs_ us (boxer ty_args args)
; return (DataAlt con, rep_ids, mkLets binds body) } } }
mk_default :: CoreExpr -> [CoreAlt]
mk_default fail | exhaustive_case = []
| otherwise = [(DEFAULT, [], fail)]
fail_flag :: CanItFail
fail_flag | exhaustive_case
= foldr orFail CantFail [can_it_fail | MatchResult can_it_fail _ <- match_results]
| otherwise
= CanFail
mentioned_constructors = mkUniqSet $ map alt_pat alts
un_mentioned_constructors
= mkUniqSet data_cons `minusUniqSet` mentioned_constructors
exhaustive_case = isEmptyUniqSet un_mentioned_constructors
--- Stuff for parallel arrays
--
-- * the following is to desugar cases over fake constructors for
-- parallel arrays, which are introduced by `tidy1' in the `PArrPat'
-- case
--
mkPArrCase :: DynFlags -> Id -> Type -> [CaseAlt DataCon] -> CoreExpr -> DsM CoreExpr
mkPArrCase dflags var ty sorted_alts fail = do
lengthP <- dsDPHBuiltin lengthPVar
alt <- unboxAlt
return (mkWildCase (len lengthP) intTy ty [alt])
where
elemTy = case splitTyConApp (idType var) of
(_, [elemTy]) -> elemTy
_ -> panic panicMsg
panicMsg = "DsUtils.mkCoAlgCaseMatchResult: not a parallel array?"
len lengthP = mkApps (Var lengthP) [Type elemTy, Var var]
--
unboxAlt = do
l <- newSysLocalDs intPrimTy
indexP <- dsDPHBuiltin indexPVar
alts <- mapM (mkAlt indexP) sorted_alts
return (DataAlt intDataCon, [l], mkWildCase (Var l) intPrimTy ty (dft : alts))
where
dft = (DEFAULT, [], fail)
--
-- each alternative matches one array length (corresponding to one
-- fake array constructor), so the match is on a literal; each
-- alternative's body is extended by a local binding for each
-- constructor argument, which are bound to array elements starting
-- with the first
--
mkAlt indexP alt@MkCaseAlt{alt_result = MatchResult _ bodyFun} = do
body <- bodyFun fail
return (LitAlt lit, [], mkCoreLets binds body)
where
lit = MachInt $ toInteger (dataConSourceArity (alt_pat alt))
binds = [NonRec arg (indexExpr i) | (i, arg) <- zip [1..] (alt_bndrs alt)]
--
indexExpr i = mkApps (Var indexP) [Type elemTy, Var var, mkIntExpr dflags i]
{-
************************************************************************
* *
\subsection{Desugarer's versions of some Core functions}
* *
************************************************************************
-}
mkErrorAppDs :: Id -- The error function
-> Type -- Type to which it should be applied
-> SDoc -- The error message string to pass
-> DsM CoreExpr
mkErrorAppDs err_id ty msg = do
src_loc <- getSrcSpanDs
dflags <- getDynFlags
let
full_msg = showSDoc dflags (hcat [ppr src_loc, text "|", msg])
core_msg = Lit (mkMachString full_msg)
-- mkMachString returns a result of type String#
return (mkApps (Var err_id) [Type ty, core_msg])
{-
'mkCoreAppDs' and 'mkCoreAppsDs' hand the special-case desugaring of 'seq'.
Note [Desugaring seq (1)] cf Trac #1031
~~~~~~~~~~~~~~~~~~~~~~~~~
f x y = x `seq` (y `seq` (# x,y #))
The [CoreSyn let/app invariant] means that, other things being equal, because
the argument to the outer 'seq' has an unlifted type, we'll use call-by-value thus:
f x y = case (y `seq` (# x,y #)) of v -> x `seq` v
But that is bad for two reasons:
(a) we now evaluate y before x, and
(b) we can't bind v to an unboxed pair
Seq is very, very special! So we recognise it right here, and desugar to
case x of _ -> case y of _ -> (# x,y #)
Note [Desugaring seq (2)] cf Trac #2273
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
let chp = case b of { True -> fst x; False -> 0 }
in chp `seq` ...chp...
Here the seq is designed to plug the space leak of retaining (snd x)
for too long.
If we rely on the ordinary inlining of seq, we'll get
let chp = case b of { True -> fst x; False -> 0 }
case chp of _ { I# -> ...chp... }
But since chp is cheap, and the case is an alluring contet, we'll
inline chp into the case scrutinee. Now there is only one use of chp,
so we'll inline a second copy. Alas, we've now ruined the purpose of
the seq, by re-introducing the space leak:
case (case b of {True -> fst x; False -> 0}) of
I# _ -> ...case b of {True -> fst x; False -> 0}...
We can try to avoid doing this by ensuring that the binder-swap in the
case happens, so we get his at an early stage:
case chp of chp2 { I# -> ...chp2... }
But this is fragile. The real culprit is the source program. Perhaps we
should have said explicitly
let !chp2 = chp in ...chp2...
But that's painful. So the code here does a little hack to make seq
more robust: a saturated application of 'seq' is turned *directly* into
the case expression, thus:
x `seq` e2 ==> case x of x -> e2 -- Note shadowing!
e1 `seq` e2 ==> case x of _ -> e2
So we desugar our example to:
let chp = case b of { True -> fst x; False -> 0 }
case chp of chp { I# -> ...chp... }
And now all is well.
The reason it's a hack is because if you define mySeq=seq, the hack
won't work on mySeq.
Note [Desugaring seq (3)] cf Trac #2409
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The isLocalId ensures that we don't turn
True `seq` e
into
case True of True { ... }
which stupidly tries to bind the datacon 'True'.
-}
mkCoreAppDs :: CoreExpr -> CoreExpr -> CoreExpr
mkCoreAppDs (Var f `App` Type ty1 `App` Type ty2 `App` arg1) arg2
| f `hasKey` seqIdKey -- Note [Desugaring seq (1), (2)]
= Case arg1 case_bndr ty2 [(DEFAULT,[],arg2)]
where
case_bndr = case arg1 of
Var v1 | isLocalId v1 -> v1 -- Note [Desugaring seq (2) and (3)]
_ -> mkWildValBinder ty1
mkCoreAppDs fun arg = mkCoreApp fun arg -- The rest is done in MkCore
mkCoreAppsDs :: CoreExpr -> [CoreExpr] -> CoreExpr
mkCoreAppsDs fun args = foldl mkCoreAppDs fun args
mkCastDs :: CoreExpr -> Coercion -> CoreExpr
-- We define a desugarer-specific verison of CoreUtils.mkCast,
-- because in the immediate output of the desugarer, we can have
-- apparently-mis-matched coercions: E.g.
-- let a = b
-- in (x :: a) |> (co :: b ~ Int)
-- Lint know about type-bindings for let and does not complain
-- So here we do not make the assertion checks that we make in
-- CoreUtils.mkCast; and we do less peephole optimisation too
mkCastDs e co | isReflCo co = e
| otherwise = Cast e co
{-
************************************************************************
* *
\subsection[mkSelectorBind]{Make a selector bind}
* *
************************************************************************
This is used in various places to do with lazy patterns.
For each binder $b$ in the pattern, we create a binding:
\begin{verbatim}
b = case v of pat' -> b'
\end{verbatim}
where @pat'@ is @pat@ with each binder @b@ cloned into @b'@.
ToDo: making these bindings should really depend on whether there's
much work to be done per binding. If the pattern is complex, it
should be de-mangled once, into a tuple (and then selected from).
Otherwise the demangling can be in-line in the bindings (as here).
Boring! Boring! One error message per binder. The above ToDo is
even more helpful. Something very similar happens for pattern-bound
expressions.
Note [mkSelectorBinds]
~~~~~~~~~~~~~~~~~~~~~~
Given p = e, where p binds x,y
we are going to make EITHER
EITHER (A) v = e (where v is fresh)
x = case v of p -> x
y = case v of p -> y
OR (B) t = case e of p -> (x,y)
x = case t of (x,_) -> x
y = case t of (_,y) -> y
We do (A) when
* Matching the pattern is cheap so we don't mind
doing it twice.
* Or if the pattern binds only one variable (so we'll only
match once)
* AND the pattern can't fail (else we tiresomely get two inexhaustive
pattern warning messages)
Otherwise we do (B). Really (A) is just an optimisation for very common
cases like
Just x = e
(p,q) = e
-}
mkSelectorBinds :: [[Tickish Id]] -- ticks to add, possibly
-> LPat Id -- The pattern
-> CoreExpr -- Expression to which the pattern is bound
-> DsM [(Id,CoreExpr)]
mkSelectorBinds ticks (L _ (VarPat v)) val_expr
= return [(v, case ticks of
[t] -> mkOptTickBox t val_expr
_ -> val_expr)]
mkSelectorBinds ticks pat val_expr
| null binders
= return []
| isSingleton binders || is_simple_lpat pat
-- See Note [mkSelectorBinds]
= do { val_var <- newSysLocalDs (hsLPatType pat)
-- Make up 'v' in Note [mkSelectorBinds]
-- NB: give it the type of *pattern* p, not the type of the *rhs* e.
-- This does not matter after desugaring, but there's a subtle
-- issue with implicit parameters. Consider
-- (x,y) = ?i
-- Then, ?i is given type {?i :: Int}, a PredType, which is opaque
-- to the desugarer. (Why opaque? Because newtypes have to be. Why
-- does it get that type? So that when we abstract over it we get the
-- right top-level type (?i::Int) => ...)
--
-- So to get the type of 'v', use the pattern not the rhs. Often more
-- efficient too.
-- For the error message we make one error-app, to avoid duplication.
-- But we need it at different types, so we make it polymorphic:
-- err_var = /\a. iRREFUT_PAT_ERR a "blah blah blah"
; err_app <- mkErrorAppDs iRREFUT_PAT_ERROR_ID alphaTy (ppr pat)
; err_var <- newSysLocalDs (mkForAllTy alphaTyVar alphaTy)
; binds <- zipWithM (mk_bind val_var err_var) ticks' binders
; return ( (val_var, val_expr) :
(err_var, Lam alphaTyVar err_app) :
binds ) }
| otherwise
= do { error_expr <- mkErrorAppDs iRREFUT_PAT_ERROR_ID tuple_ty (ppr pat)
; tuple_expr <- matchSimply val_expr PatBindRhs pat local_tuple error_expr
; tuple_var <- newSysLocalDs tuple_ty
; let mk_tup_bind tick binder
= (binder, mkOptTickBox tick $
mkTupleSelector local_binders binder
tuple_var (Var tuple_var))
; return ( (tuple_var, tuple_expr) : zipWith mk_tup_bind ticks' binders ) }
where
binders = collectPatBinders pat
ticks' = ticks ++ repeat []
local_binders = map localiseId binders -- See Note [Localise pattern binders]
local_tuple = mkBigCoreVarTup binders
tuple_ty = exprType local_tuple
mk_bind scrut_var err_var tick bndr_var = do
-- (mk_bind sv err_var) generates
-- bv = case sv of { pat -> bv; other -> err_var @ type-of-bv }
-- Remember, pat binds bv
rhs_expr <- matchSimply (Var scrut_var) PatBindRhs pat
(Var bndr_var) error_expr
return (bndr_var, mkOptTickBox tick rhs_expr)
where
error_expr = Var err_var `App` Type (idType bndr_var)
is_simple_lpat p = is_simple_pat (unLoc p)
is_simple_pat (TuplePat ps Boxed _) = all is_triv_lpat ps
is_simple_pat pat@(ConPatOut{}) = case unLoc (pat_con pat) of
RealDataCon con -> isProductTyCon (dataConTyCon con)
&& all is_triv_lpat (hsConPatArgs (pat_args pat))
PatSynCon _ -> False
is_simple_pat (VarPat _) = True
is_simple_pat (ParPat p) = is_simple_lpat p
is_simple_pat _ = False
is_triv_lpat p = is_triv_pat (unLoc p)
is_triv_pat (VarPat _) = True
is_triv_pat (WildPat _) = True
is_triv_pat (ParPat p) = is_triv_lpat p
is_triv_pat _ = False
{-
Creating big tuples and their types for full Haskell expressions.
They work over *Ids*, and create tuples replete with their types,
which is whey they are not in HsUtils.
-}
mkLHsPatTup :: [LPat Id] -> LPat Id
mkLHsPatTup [] = noLoc $ mkVanillaTuplePat [] Boxed
mkLHsPatTup [lpat] = lpat
mkLHsPatTup lpats = L (getLoc (head lpats)) $
mkVanillaTuplePat lpats Boxed
mkLHsVarPatTup :: [Id] -> LPat Id
mkLHsVarPatTup bs = mkLHsPatTup (map nlVarPat bs)
mkVanillaTuplePat :: [OutPat Id] -> Boxity -> Pat Id
-- A vanilla tuple pattern simply gets its type from its sub-patterns
mkVanillaTuplePat pats box = TuplePat pats box (map hsLPatType pats)
-- The Big equivalents for the source tuple expressions
mkBigLHsVarTup :: [Id] -> LHsExpr Id
mkBigLHsVarTup ids = mkBigLHsTup (map nlHsVar ids)
mkBigLHsTup :: [LHsExpr Id] -> LHsExpr Id
mkBigLHsTup = mkChunkified mkLHsTupleExpr
-- The Big equivalents for the source tuple patterns
mkBigLHsVarPatTup :: [Id] -> LPat Id
mkBigLHsVarPatTup bs = mkBigLHsPatTup (map nlVarPat bs)
mkBigLHsPatTup :: [LPat Id] -> LPat Id
mkBigLHsPatTup = mkChunkified mkLHsPatTup
{-
************************************************************************
* *
\subsection[mkFailurePair]{Code for pattern-matching and other failures}
* *
************************************************************************
Generally, we handle pattern matching failure like this: let-bind a
fail-variable, and use that variable if the thing fails:
\begin{verbatim}
let fail.33 = error "Help"
in
case x of
p1 -> ...
p2 -> fail.33
p3 -> fail.33
p4 -> ...
\end{verbatim}
Then
\begin{itemize}
\item
If the case can't fail, then there'll be no mention of @fail.33@, and the
simplifier will later discard it.
\item
If it can fail in only one way, then the simplifier will inline it.
\item
Only if it is used more than once will the let-binding remain.
\end{itemize}
There's a problem when the result of the case expression is of
unboxed type. Then the type of @fail.33@ is unboxed too, and
there is every chance that someone will change the let into a case:
\begin{verbatim}
case error "Help" of
fail.33 -> case ....
\end{verbatim}
which is of course utterly wrong. Rather than drop the condition that
only boxed types can be let-bound, we just turn the fail into a function
for the primitive case:
\begin{verbatim}
let fail.33 :: Void -> Int#
fail.33 = \_ -> error "Help"
in
case x of
p1 -> ...
p2 -> fail.33 void
p3 -> fail.33 void
p4 -> ...
\end{verbatim}
Now @fail.33@ is a function, so it can be let-bound.
-}
mkFailurePair :: CoreExpr -- Result type of the whole case expression
-> DsM (CoreBind, -- Binds the newly-created fail variable
-- to \ _ -> expression
CoreExpr) -- Fail variable applied to realWorld#
-- See Note [Failure thunks and CPR]
mkFailurePair expr
= do { fail_fun_var <- newFailLocalDs (voidPrimTy `mkFunTy` ty)
; fail_fun_arg <- newSysLocalDs voidPrimTy
; let real_arg = setOneShotLambda fail_fun_arg
; return (NonRec fail_fun_var (Lam real_arg expr),
App (Var fail_fun_var) (Var voidPrimId)) }
where
ty = exprType expr
{-
Note [Failure thunks and CPR]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we make a failure point we ensure that it
does not look like a thunk. Example:
let fail = \rw -> error "urk"
in case x of
[] -> fail realWorld#
(y:ys) -> case ys of
[] -> fail realWorld#
(z:zs) -> (y,z)
Reason: we know that a failure point is always a "join point" and is
entered at most once. Adding a dummy 'realWorld' token argument makes
it clear that sharing is not an issue. And that in turn makes it more
CPR-friendly. This matters a lot: if you don't get it right, you lose
the tail call property. For example, see Trac #3403.
-}
mkOptTickBox :: [Tickish Id] -> CoreExpr -> CoreExpr
mkOptTickBox = flip (foldr Tick)
mkBinaryTickBox :: Int -> Int -> CoreExpr -> DsM CoreExpr
mkBinaryTickBox ixT ixF e = do
uq <- newUnique
this_mod <- getModule
let bndr1 = mkSysLocal (fsLit "t1") uq boolTy
let
falseBox = Tick (HpcTick this_mod ixF) (Var falseDataConId)
trueBox = Tick (HpcTick this_mod ixT) (Var trueDataConId)
--
return $ Case e bndr1 boolTy
[ (DataAlt falseDataCon, [], falseBox)
, (DataAlt trueDataCon, [], trueBox)
]
| abakst/liquidhaskell | src/Language/Haskell/Liquid/Desugar710/DsUtils.hs | bsd-3-clause | 32,488 | 0 | 19 | 8,993 | 5,114 | 2,672 | 2,442 | 357 | 10 |
{- |
Module : $Header$
Description : lattice classes
Copyright : (c) Christian Maeder, DFKI GmbH 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
-}
module Common.Lattice where
class (Ord l, Show l) => Lattice l where
cjoin :: l -> l -> l
ctop :: l
bot :: l
instance Lattice () where
cjoin _ _ = ()
ctop = ()
bot = ()
instance Lattice Bool where
cjoin = (||)
ctop = True
bot = False
instance (Lattice a, Lattice b) => Lattice (a, b) where
cjoin (a, b) (c, d) = (cjoin a c, cjoin b d)
ctop = (ctop, ctop)
bot = (bot, bot)
| mariefarrell/Hets | Common/Lattice.hs | gpl-2.0 | 656 | 0 | 8 | 168 | 204 | 115 | 89 | 17 | 0 |
yes x = case x of {False -> a ; _ -> b} | mpickering/hlint-refactor | tests/examples/Default48.hs | bsd-3-clause | 39 | 0 | 7 | 12 | 28 | 15 | 13 | 1 | 2 |
--
-- Copyright (c) 2014 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE PatternGuards #-}
-- created 11.02.2014
module Migrations.M_21 (migration) where
import UpgradeEngine
import Data.List (foldl')
import ShellTools
import Control.Monad
import Directory
migration = Migration {
sourceVersion = 21
, targetVersion = 22
, actions = act
}
act :: IO ()
act = updateSyncvm >> updateSsh
updateSyncvm = xformVmJSON xform where
xform tree = case jsGet "/type" tree of
Just s | jsUnboxString s == "syncvm" -> modify tree
_ -> tree
where
modify = jsRm "/config/kernel"
updateSsh = do
e <- doesDirectoryExist "/config/ssh"
when e $ mapM_ safeSpawnShell ["mv /config/ssh /config/etc/", "restorecon -R /config/etc/ssh"]
| jean-edouard/manager | upgrade-db/Migrations/M_21.hs | gpl-2.0 | 1,498 | 0 | 14 | 318 | 202 | 115 | 87 | 21 | 2 |
{-# LANGUAGE UnliftedNewtypes #-}
{-# LANGUAGE ExplicitForAll #-}
{-# LANGUAGE PolyKinds #-}
module T17360 where
import GHC.Exts
newtype Id (a :: TYPE r) = Id a
foo :: forall r (a :: TYPE r). Id a -> Id a
foo x = x
| sdiehl/ghc | testsuite/tests/typecheck/should_fail/T17360.hs | bsd-3-clause | 218 | 0 | 8 | 46 | 68 | 40 | 28 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ViewPatterns #-}
-- | Build-specific types.
module Stack.Types.Build
(StackBuildException(..)
,FlagSource(..)
,UnusedFlags(..)
,InstallLocation(..)
,ModTime
,modTime
,Installed(..)
,PackageInstallInfo(..)
,Task(..)
,taskLocation
,LocalPackage(..)
,BaseConfigOpts(..)
,Plan(..)
,TestOpts(..)
,BenchmarkOpts(..)
,BuildOpts(..)
,BuildSubset(..)
,defaultBuildOpts
,TaskType(..)
,TaskConfigOpts(..)
,ConfigCache(..)
,ConstructPlanException(..)
,configureOpts
,BadDependency(..)
,wantedLocalPackages
,FileCacheInfo (..)
,ConfigureOpts (..)
,PrecompiledCache (..))
where
import Control.DeepSeq
import Control.Exception
import Data.Binary (getWord8, putWord8, gput, gget)
import Data.Binary.VersionTagged
import qualified Data.ByteString as S
import Data.Char (isSpace)
import Data.Data
import Data.Hashable
import Data.List (dropWhileEnd, nub, intercalate)
import qualified Data.Map as Map
import Data.Map.Strict (Map)
import Data.Maybe
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Data.Time.Calendar
import Data.Time.Clock
import Distribution.System (Arch)
import Distribution.Text (display)
import GHC.Generics
import Path (Path, Abs, File, Dir, mkRelDir, toFilePath, parseRelDir, (</>))
import Prelude
import Stack.Types.FlagName
import Stack.Types.GhcPkgId
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Version
import System.Exit (ExitCode)
import System.FilePath (dropTrailingPathSeparator, pathSeparator)
----------------------------------------------
-- Exceptions
data StackBuildException
= Couldn'tFindPkgId PackageName
| CompilerVersionMismatch
(Maybe (CompilerVersion, Arch))
(CompilerVersion, Arch)
VersionCheck
(Maybe (Path Abs File))
Text -- recommended resolution
-- ^ Path to the stack.yaml file
| Couldn'tParseTargets [Text]
| UnknownTargets
(Set PackageName) -- no known version
(Map PackageName Version) -- not in snapshot, here's the most recent version in the index
(Path Abs File) -- stack.yaml
| TestSuiteFailure PackageIdentifier (Map Text (Maybe ExitCode)) (Maybe (Path Abs File)) S.ByteString
| ConstructPlanExceptions
[ConstructPlanException]
(Path Abs File) -- stack.yaml
| CabalExitedUnsuccessfully
ExitCode
PackageIdentifier
(Path Abs File) -- cabal Executable
[String] -- cabal arguments
(Maybe (Path Abs File)) -- logfiles location
S.ByteString -- log contents
| ExecutionFailure [SomeException]
| LocalPackageDoesn'tMatchTarget
PackageName
Version -- local version
Version -- version specified on command line
| NoSetupHsFound (Path Abs Dir)
| InvalidFlagSpecification (Set UnusedFlags)
| TargetParseException [Text]
| DuplicateLocalPackageNames [(PackageName, [Path Abs Dir])]
deriving Typeable
data FlagSource = FSCommandLine | FSStackYaml
deriving (Show, Eq, Ord)
data UnusedFlags = UFNoPackage FlagSource PackageName
| UFFlagsNotDefined FlagSource Package (Set FlagName)
| UFSnapshot PackageName
deriving (Show, Eq, Ord)
instance Show StackBuildException where
show (Couldn'tFindPkgId name) =
("After installing " <> packageNameString name <>
", the package id couldn't be found " <> "(via ghc-pkg describe " <>
packageNameString name <> "). This shouldn't happen, " <>
"please report as a bug")
show (CompilerVersionMismatch mactual (expected, earch) check mstack resolution) = concat
[ case mactual of
Nothing -> "No compiler found, expected "
Just (actual, arch) -> concat
[ "Compiler version mismatched, found "
, T.unpack (compilerVersionName actual)
, " ("
, display arch
, ")"
, ", but expected "
]
, case check of
MatchMinor -> "minor version match with "
MatchExact -> "exact version "
NewerMinor -> "minor version match or newer with "
, T.unpack (compilerVersionName expected)
, " ("
, display earch
, ") (based on "
, case mstack of
Nothing -> "command line arguments"
Just stack -> "resolver setting in " ++ toFilePath stack
, "). "
, T.unpack resolution
]
show (Couldn'tParseTargets targets) = unlines
$ "The following targets could not be parsed as package names or directories:"
: map T.unpack targets
show (UnknownTargets noKnown notInSnapshot stackYaml) =
unlines $ noKnown' ++ notInSnapshot'
where
noKnown'
| Set.null noKnown = []
| otherwise = return $
"The following target packages were not found: " ++
intercalate ", " (map packageNameString $ Set.toList noKnown)
notInSnapshot'
| Map.null notInSnapshot = []
| otherwise =
"The following packages are not in your snapshot, but exist"
: "in your package index. Recommended action: add them to your"
: ("extra-deps in " ++ toFilePath stackYaml)
: "(Note: these are the most recent versions,"
: "but there's no guarantee that they'll build together)."
: ""
: map
(\(name, version) -> "- " ++ packageIdentifierString
(PackageIdentifier name version))
(Map.toList notInSnapshot)
show (TestSuiteFailure ident codes mlogFile bs) = unlines $ concat
[ ["Test suite failure for package " ++ packageIdentifierString ident]
, flip map (Map.toList codes) $ \(name, mcode) -> concat
[ " "
, T.unpack name
, ": "
, case mcode of
Nothing -> " executable not found"
Just ec -> " exited with: " ++ show ec
]
, return $ case mlogFile of
Nothing -> "Logs printed to console"
-- TODO Should we load up the full error output and print it here?
Just logFile -> "Full log available at " ++ toFilePath logFile
, if S.null bs
then []
else ["", "", doubleIndent $ T.unpack $ decodeUtf8With lenientDecode bs]
]
where
indent = dropWhileEnd isSpace . unlines . fmap (\line -> " " ++ line) . lines
doubleIndent = indent . indent
show (ConstructPlanExceptions exceptions stackYaml) =
"While constructing the BuildPlan the following exceptions were encountered:" ++
appendExceptions exceptions' ++
if Map.null extras then "" else (unlines
$ ("\n\nRecommended action: try adding the following to your extra-deps in "
++ toFilePath stackYaml)
: map (\(name, version) -> concat
[ "- "
, packageNameString name
, "-"
, versionString version
]) (Map.toList extras)
++ ["", "You may also want to try the 'stack solver' command"]
)
where
exceptions' = removeDuplicates exceptions
appendExceptions = foldr (\e -> (++) ("\n\n--" ++ show e)) ""
removeDuplicates = nub
extras = Map.unions $ map getExtras exceptions'
getExtras (DependencyCycleDetected _) = Map.empty
getExtras (UnknownPackage _) = Map.empty
getExtras (DependencyPlanFailures _ m) =
Map.unions $ map go $ Map.toList m
where
go (name, (_range, Just version, NotInBuildPlan)) =
Map.singleton name version
go _ = Map.empty
-- Supressing duplicate output
show (CabalExitedUnsuccessfully exitCode taskProvides' execName fullArgs logFiles bs) =
let fullCmd = (dropQuotes (toFilePath execName) ++ " " ++ (unwords fullArgs))
logLocations = maybe "" (\fp -> "\n Logs have been written to: " ++ toFilePath fp) logFiles
in "\n-- While building package " ++ dropQuotes (show taskProvides') ++ " using:\n" ++
" " ++ fullCmd ++ "\n" ++
" Process exited with code: " ++ show exitCode ++
logLocations ++
(if S.null bs
then ""
else "\n\n" ++ doubleIndent (T.unpack $ decodeUtf8With lenientDecode bs))
where
-- appendLines = foldr (\pName-> (++) ("\n" ++ show pName)) ""
indent = dropWhileEnd isSpace . unlines . fmap (\line -> " " ++ line) . lines
dropQuotes = filter ('\"' /=)
doubleIndent = indent . indent
show (ExecutionFailure es) = intercalate "\n\n" $ map show es
show (LocalPackageDoesn'tMatchTarget name localV requestedV) = concat
[ "Version for local package "
, packageNameString name
, " is "
, versionString localV
, ", but you asked for "
, versionString requestedV
, " on the command line"
]
show (NoSetupHsFound dir) =
"No Setup.hs or Setup.lhs file found in " ++ toFilePath dir
show (InvalidFlagSpecification unused) = unlines
$ "Invalid flag specification:"
: map go (Set.toList unused)
where
showFlagSrc :: FlagSource -> String
showFlagSrc FSCommandLine = " (specified on command line)"
showFlagSrc FSStackYaml = " (specified in stack.yaml)"
go :: UnusedFlags -> String
go (UFNoPackage src name) = concat
[ "- Package '"
, packageNameString name
, "' not found"
, showFlagSrc src
]
go (UFFlagsNotDefined src pkg flags) = concat
[ "- Package '"
, name
, "' does not define the following flags"
, showFlagSrc src
, ":\n"
, intercalate "\n"
(map (\flag -> " " ++ flagNameString flag)
(Set.toList flags))
, "\n- Flags defined by package '" ++ name ++ "':\n"
, intercalate "\n"
(map (\flag -> " " ++ name ++ ":" ++ flagNameString flag)
(Set.toList pkgFlags))
]
where name = packageNameString (packageName pkg)
pkgFlags = packageDefinedFlags pkg
go (UFSnapshot name) = concat
[ "- Attempted to set flag on snapshot package "
, packageNameString name
, ", please add to extra-deps"
]
show (TargetParseException [err]) = "Error parsing targets: " ++ T.unpack err
show (TargetParseException errs) = unlines
$ "The following errors occurred while parsing the build targets:"
: map (("- " ++) . T.unpack) errs
show (DuplicateLocalPackageNames pairs) = concat
$ "The same package name is used in multiple local packages\n"
: map go pairs
where
go (name, dirs) = unlines
$ ""
: (packageNameString name ++ " used in:")
: map goDir dirs
goDir dir = "- " ++ toFilePath dir
instance Exception StackBuildException
data ConstructPlanException
= DependencyCycleDetected [PackageName]
| DependencyPlanFailures PackageIdentifier (Map PackageName (VersionRange, LatestVersion, BadDependency))
| UnknownPackage PackageName -- TODO perhaps this constructor will be removed, and BadDependency will handle it all
-- ^ Recommend adding to extra-deps, give a helpful version number?
deriving (Typeable, Eq)
-- | For display purposes only, Nothing if package not found
type LatestVersion = Maybe Version
-- | Reason why a dependency was not used
data BadDependency
= NotInBuildPlan
| Couldn'tResolveItsDependencies
| DependencyMismatch Version
deriving (Typeable, Eq)
instance Show ConstructPlanException where
show e =
let details = case e of
(DependencyCycleDetected pNames) ->
"While checking call stack,\n" ++
" dependency cycle detected in packages:" ++ indent (appendLines pNames)
(DependencyPlanFailures pIdent (Map.toList -> pDeps)) ->
"Failure when adding dependencies:" ++ doubleIndent (appendDeps pDeps) ++ "\n" ++
" needed for package: " ++ packageIdentifierString pIdent
(UnknownPackage pName) ->
"While attempting to add dependency,\n" ++
" Could not find package " ++ show pName ++ " in known packages"
in indent details
where
appendLines = foldr (\pName-> (++) ("\n" ++ show pName)) ""
indent = dropWhileEnd isSpace . unlines . fmap (\line -> " " ++ line) . lines
doubleIndent = indent . indent
appendDeps = foldr (\dep-> (++) ("\n" ++ showDep dep)) ""
showDep (name, (range, mlatest, badDep)) = concat
[ show name
, ": needed ("
, display range
, ")"
, ", "
, let latestStr =
case mlatest of
Nothing -> ""
Just latest -> " (latest is " ++ versionString latest ++ ")"
in case badDep of
NotInBuildPlan -> "not present in build plan" ++ latestStr
Couldn'tResolveItsDependencies -> "couldn't resolve its dependencies"
DependencyMismatch version ->
case mlatest of
Just latest
| latest == version ->
versionString version ++
" found (latest version available)"
_ -> versionString version ++ " found" ++ latestStr
]
{- TODO Perhaps change the showDep function to look more like this:
dropQuotes = filter ((/=) '\"')
(VersionOutsideRange pName pIdentifier versionRange) ->
"Exception: Stack.Build.VersionOutsideRange\n" ++
" While adding dependency for package " ++ show pName ++ ",\n" ++
" " ++ dropQuotes (show pIdentifier) ++ " was found to be outside its allowed version range.\n" ++
" Allowed version range is " ++ display versionRange ++ ",\n" ++
" should you correct the version range for " ++ dropQuotes (show pIdentifier) ++ ", found in [extra-deps] in the project's stack.yaml?"
-}
----------------------------------------------
-- | Which subset of packages to build
data BuildSubset
= BSAll
| BSOnlySnapshot
-- ^ Only install packages in the snapshot database, skipping
-- packages intended for the local database.
| BSOnlyDependencies
deriving Show
-- | Configuration for building.
data BuildOpts =
BuildOpts {boptsTargets :: ![Text]
,boptsLibProfile :: !Bool
,boptsExeProfile :: !Bool
,boptsHaddock :: !Bool
-- ^ Build haddocks?
,boptsHaddockDeps :: !(Maybe Bool)
-- ^ Build haddocks for dependencies?
,boptsDryrun :: !Bool
,boptsGhcOptions :: ![Text]
,boptsFlags :: !(Map (Maybe PackageName) (Map FlagName Bool))
,boptsInstallExes :: !Bool
-- ^ Install executables to user path after building?
,boptsPreFetch :: !Bool
-- ^ Fetch all packages immediately
,boptsBuildSubset :: !BuildSubset
,boptsFileWatch :: !Bool
-- ^ Watch files for changes and automatically rebuild
,boptsKeepGoing :: !(Maybe Bool)
-- ^ Keep building/running after failure
,boptsForceDirty :: !Bool
-- ^ Force treating all local packages as having dirty files
,boptsTests :: !Bool
-- ^ Turn on tests for local targets
,boptsTestOpts :: !TestOpts
-- ^ Additional test arguments
,boptsBenchmarks :: !Bool
-- ^ Turn on benchmarks for local targets
,boptsBenchmarkOpts :: !BenchmarkOpts
-- ^ Additional test arguments
,boptsExec :: ![(String, [String])]
-- ^ Commands (with arguments) to run after a successful build
,boptsOnlyConfigure :: !Bool
-- ^ Only perform the configure step when building
}
deriving (Show)
defaultBuildOpts :: BuildOpts
defaultBuildOpts = BuildOpts
{ boptsTargets = []
, boptsLibProfile = False
, boptsExeProfile = False
, boptsHaddock = False
, boptsHaddockDeps = Nothing
, boptsDryrun = False
, boptsGhcOptions = []
, boptsFlags = Map.empty
, boptsInstallExes = False
, boptsPreFetch = False
, boptsBuildSubset = BSAll
, boptsFileWatch = False
, boptsKeepGoing = Nothing
, boptsForceDirty = False
, boptsTests = False
, boptsTestOpts = defaultTestOpts
, boptsBenchmarks = False
, boptsBenchmarkOpts = defaultBenchmarkOpts
, boptsExec = []
, boptsOnlyConfigure = False
}
-- | Options for the 'FinalAction' 'DoTests'
data TestOpts =
TestOpts {toRerunTests :: !Bool -- ^ Whether successful tests will be run gain
,toAdditionalArgs :: ![String] -- ^ Arguments passed to the test program
,toCoverage :: !Bool -- ^ Generate a code coverage report
,toDisableRun :: !Bool -- ^ Disable running of tests
} deriving (Eq,Show)
defaultTestOpts :: TestOpts
defaultTestOpts = TestOpts
{ toRerunTests = True
, toAdditionalArgs = []
, toCoverage = False
, toDisableRun = False
}
-- | Options for the 'FinalAction' 'DoBenchmarks'
data BenchmarkOpts =
BenchmarkOpts {beoAdditionalArgs :: !(Maybe String) -- ^ Arguments passed to the benchmark program
,beoDisableRun :: !Bool -- ^ Disable running of benchmarks
} deriving (Eq,Show)
defaultBenchmarkOpts :: BenchmarkOpts
defaultBenchmarkOpts = BenchmarkOpts
{ beoAdditionalArgs = Nothing
, beoDisableRun = False
}
-- | Package dependency oracle.
newtype PkgDepsOracle =
PkgDeps PackageName
deriving (Show,Typeable,Eq,Hashable,Binary,NFData)
-- | Stored on disk to know whether the flags have changed or any
-- files have changed.
data ConfigCache = ConfigCache
{ configCacheOpts :: !ConfigureOpts
-- ^ All options used for this package.
, configCacheDeps :: !(Set GhcPkgId)
-- ^ The GhcPkgIds of all of the dependencies. Since Cabal doesn't take
-- the complete GhcPkgId (only a PackageIdentifier) in the configure
-- options, just using the previous value is insufficient to know if
-- dependencies have changed.
, configCacheComponents :: !(Set S.ByteString)
-- ^ The components to be built. It's a bit of a hack to include this in
-- here, as it's not a configure option (just a build option), but this
-- is a convenient way to force compilation when the components change.
, configCacheHaddock :: !Bool
-- ^ Are haddocks to be built?
}
deriving (Generic,Eq,Show)
instance Binary ConfigCache where
put x = do
-- magic string
putWord8 1
putWord8 3
putWord8 4
putWord8 8
gput $ from x
get = do
1 <- getWord8
3 <- getWord8
4 <- getWord8
8 <- getWord8
fmap to gget
instance NFData ConfigCache where
rnf = genericRnf
-- | A task to perform when building
data Task = Task
{ taskProvides :: !PackageIdentifier -- ^ the package/version to be built
, taskType :: !TaskType -- ^ the task type, telling us how to build this
, taskConfigOpts :: !TaskConfigOpts
, taskPresent :: !(Map PackageIdentifier GhcPkgId) -- ^ GhcPkgIds of already-installed dependencies
}
deriving Show
-- | Given the IDs of any missing packages, produce the configure options
data TaskConfigOpts = TaskConfigOpts
{ tcoMissing :: !(Set PackageIdentifier)
-- ^ Dependencies for which we don't yet have an GhcPkgId
, tcoOpts :: !(Map PackageIdentifier GhcPkgId -> ConfigureOpts)
-- ^ Produce the list of options given the missing @GhcPkgId@s
}
instance Show TaskConfigOpts where
show (TaskConfigOpts missing f) = concat
[ "Missing: "
, show missing
, ". Without those: "
, show $ f Map.empty
]
-- | The type of a task, either building local code or something from the
-- package index (upstream)
data TaskType = TTLocal LocalPackage
| TTUpstream Package InstallLocation
deriving Show
taskLocation :: Task -> InstallLocation
taskLocation task =
case taskType task of
TTLocal _ -> Local
TTUpstream _ loc -> loc
-- | A complete plan of what needs to be built and how to do it
data Plan = Plan
{ planTasks :: !(Map PackageName Task)
, planFinals :: !(Map PackageName (Task, LocalPackageTB))
-- ^ Final actions to be taken (test, benchmark, etc)
, planUnregisterLocal :: !(Map GhcPkgId (PackageIdentifier, Text))
-- ^ Text is reason we're unregistering, for display only
, planInstallExes :: !(Map Text InstallLocation)
-- ^ Executables that should be installed after successful building
}
deriving Show
-- | Basic information used to calculate what the configure options are
data BaseConfigOpts = BaseConfigOpts
{ bcoSnapDB :: !(Path Abs Dir)
, bcoLocalDB :: !(Path Abs Dir)
, bcoSnapInstallRoot :: !(Path Abs Dir)
, bcoLocalInstallRoot :: !(Path Abs Dir)
, bcoBuildOpts :: !BuildOpts
}
-- | Render a @BaseConfigOpts@ to an actual list of options
configureOpts :: EnvConfig
-> BaseConfigOpts
-> Map PackageIdentifier GhcPkgId -- ^ dependencies
-> Bool -- ^ wanted?
-> InstallLocation
-> Package
-> ConfigureOpts
configureOpts econfig bco deps wanted loc package = ConfigureOpts
{ coDirs = configureOptsDirs bco loc package
, coNoDirs = configureOptsNoDir econfig bco deps wanted package
}
configureOptsDirs :: BaseConfigOpts
-> InstallLocation
-> Package
-> [String]
configureOptsDirs bco loc package = concat
[ ["--user", "--package-db=clear", "--package-db=global"]
, map (("--package-db=" ++) . toFilePath) $ case loc of
Snap -> [bcoSnapDB bco]
Local -> [bcoSnapDB bco, bcoLocalDB bco]
, [ "--libdir=" ++ toFilePathNoTrailingSlash (installRoot </> $(mkRelDir "lib"))
, "--bindir=" ++ toFilePathNoTrailingSlash (installRoot </> bindirSuffix)
, "--datadir=" ++ toFilePathNoTrailingSlash (installRoot </> $(mkRelDir "share"))
, "--libexecdir=" ++ toFilePathNoTrailingSlash (installRoot </> $(mkRelDir "libexec"))
, "--sysconfdir=" ++ toFilePathNoTrailingSlash (installRoot </> $(mkRelDir "etc"))
, "--docdir=" ++ toFilePathNoTrailingSlash docDir
, "--htmldir=" ++ toFilePathNoTrailingSlash docDir
, "--haddockdir=" ++ toFilePathNoTrailingSlash docDir]
]
where
toFilePathNoTrailingSlash = dropTrailingPathSeparator . toFilePath
installRoot =
case loc of
Snap -> bcoSnapInstallRoot bco
Local -> bcoLocalInstallRoot bco
docDir =
case pkgVerDir of
Nothing -> installRoot </> docDirSuffix
Just dir -> installRoot </> docDirSuffix </> dir
pkgVerDir =
parseRelDir (packageIdentifierString (PackageIdentifier (packageName package)
(packageVersion package)) ++
[pathSeparator])
-- | Same as 'configureOpts', but does not include directory path options
configureOptsNoDir :: EnvConfig
-> BaseConfigOpts
-> Map PackageIdentifier GhcPkgId -- ^ dependencies
-> Bool -- ^ wanted?
-> Package
-> [String]
configureOptsNoDir econfig bco deps wanted package = concat
[ depOptions
, ["--enable-library-profiling" | boptsLibProfile bopts || boptsExeProfile bopts]
, ["--enable-executable-profiling" | boptsExeProfile bopts]
, map (\(name,enabled) ->
"-f" <>
(if enabled
then ""
else "-") <>
flagNameString name)
(Map.toList (packageFlags package))
, concatMap (\x -> ["--ghc-options", T.unpack x]) allGhcOptions
, map (("--extra-include-dirs=" ++) . T.unpack) (Set.toList (configExtraIncludeDirs config))
, map (("--extra-lib-dirs=" ++) . T.unpack) (Set.toList (configExtraLibDirs config))
, if whichCompiler (envConfigCompilerVersion econfig) == Ghcjs
then ["--ghcjs"]
else []
]
where
config = getConfig econfig
bopts = bcoBuildOpts bco
depOptions = map (uncurry toDepOption) $ Map.toList deps
where
toDepOption =
if envConfigCabalVersion econfig >= $(mkVersion "1.22")
then toDepOption1_22
else toDepOption1_18
toDepOption1_22 ident gid = concat
[ "--dependency="
, packageNameString $ packageIdentifierName ident
, "="
, ghcPkgIdString gid
]
toDepOption1_18 ident _gid = concat
[ "--constraint="
, packageNameString name
, "=="
, versionString version
]
where
PackageIdentifier name version = ident
ghcOptionsMap = configGhcOptions $ getConfig econfig
allGhcOptions = concat
[ fromMaybe [] $ Map.lookup Nothing ghcOptionsMap
, fromMaybe [] $ Map.lookup (Just $ packageName package) ghcOptionsMap
, if wanted
then boptsGhcOptions bopts
else []
]
-- | Get set of wanted package names from locals.
wantedLocalPackages :: [LocalPackage] -> Set PackageName
wantedLocalPackages = Set.fromList . map (packageName . lpPackage) . filter lpWanted
-- | One-way conversion to serialized time.
modTime :: UTCTime -> ModTime
modTime x =
ModTime
( toModifiedJulianDay
(utctDay x)
, toRational
(utctDayTime x))
data Installed = Library PackageIdentifier GhcPkgId | Executable PackageIdentifier
deriving (Show, Eq, Ord)
-- | Configure options to be sent to Setup.hs configure
data ConfigureOpts = ConfigureOpts
{ coDirs :: ![String]
-- ^ Options related to various paths. We separate these out since they do
-- not have an impact on the contents of the compiled binary for checking
-- if we can use an existing precompiled cache.
, coNoDirs :: ![String]
}
deriving (Show, Eq, Generic)
instance Binary ConfigureOpts
instance NFData ConfigureOpts where
rnf = genericRnf
-- | Information on a compiled package: the library conf file (if relevant),
-- and all of the executable paths.
data PrecompiledCache = PrecompiledCache
-- Use FilePath instead of Path Abs File for Binary instances
{ pcLibrary :: !(Maybe FilePath)
-- ^ .conf file inside the package database
, pcExes :: ![FilePath]
-- ^ Full paths to executables
}
deriving (Show, Eq, Generic)
instance Binary PrecompiledCache
instance NFData PrecompiledCache where
rnf = genericRnf
| akhileshs/stack | src/Stack/Types/Build.hs | bsd-3-clause | 28,694 | 0 | 21 | 9,047 | 5,494 | 3,007 | 2,487 | 661 | 5 |
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Syntax.Paren
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
--
-- Parser for Haskell that only cares about parenthesis and layout.
module Yi.Syntax.Paren where
import Prelude hiding (elem)
import Control.Applicative (Alternative ((<|>), many))
import Data.Foldable (elem, toList)
import Data.Maybe (listToMaybe)
import Data.Monoid (Endo (Endo, appEndo), (<>))
import Yi.IncrementalParse (P, Parser, eof, lookNext, recoverWith, symbol)
import Yi.Lexer.Alex hiding (tokenToStyle)
import Yi.Lexer.Haskell
import Yi.Style (StyleName, errorStyle, hintStyle)
import Yi.Syntax (Point, Scanner, Span)
import Yi.Syntax.Layout (State, layoutHandler)
import Yi.Syntax.Tree
indentScanner :: Scanner (AlexState lexState) TT
-> Scanner (Yi.Syntax.Layout.State Token lexState) TT
indentScanner = layoutHandler startsLayout [(Special '(', Special ')'),
(Special '[', Special ']'),
(Special '{', Special '}')] ignoredToken
(Special '<', Special '>', Special '.') isBrace
-- HACK: We insert the Special '<', '>', '.', that don't occur in normal haskell
-- parsing.
isBrace :: TT -> Bool
isBrace (Tok b _ _) = Special '{' == b
ignoredToken :: TT -> Bool
ignoredToken (Tok t _ _) = isComment t || t == CppDirective
isNoise :: Token -> Bool
isNoise (Special c) = c `elem` (";,`" :: String)
isNoise _ = True
type Expr t = [Tree t]
data Tree t
= Paren t (Expr t) t -- A parenthesized expression (maybe with [ ] ...)
| Block ([Tree t]) -- A list of things separated by layout (as in do; etc.)
| Atom t
| Error t
| Expr [Tree t]
deriving (Show, Foldable, Functor)
instance IsTree Tree where
emptyNode = Expr []
uniplate (Paren l g r) = (g,\g' -> Paren l g' r)
uniplate (Expr g) = (g,Expr)
uniplate (Block s) = (s,Block)
uniplate t = ([],const t)
-- | Search the given list, and return the 1st tree after the given
-- point on the given line. This is the tree that will be moved if
-- something is inserted at the point. Precondition: point is in the
-- given line.
-- TODO: this should be optimized by just giving the point of the end
-- of the line
getIndentingSubtree :: Tree TT -> Point -> Int -> Maybe (Tree TT)
getIndentingSubtree root offset line =
listToMaybe [t | (t,posn) <- takeWhile ((<= line) . posnLine . snd) allSubTreesPosn,
-- it's very important that we do a linear search
-- here (takeWhile), so that the tree is evaluated
-- lazily and therefore parsing it can be lazy.
posnOfs posn > offset, posnLine posn == line]
where allSubTreesPosn = [(t',posn) | t'@(Block _) <-filter (not . null . toList) (getAllSubTrees root),
let (tok:_) = toList t',
let posn = tokPosn tok]
-- | Given a tree, return (first offset, number of lines).
getSubtreeSpan :: Tree TT -> (Point, Int)
getSubtreeSpan tree = (posnOfs first, lastLine - firstLine)
where bounds@[first, _last] = fmap (tokPosn . assertJust) [getFirstElement tree, getLastElement tree]
[firstLine, lastLine] = fmap posnLine bounds
assertJust (Just x) = x
assertJust _ = error "assertJust: Just expected"
-- dropWhile' f = foldMap (\x -> if f x then mempty else Endo (x :))
--
-- isBefore l (Atom t) = isBefore' l t
-- isBefore l (Error t) = isBefore l t
-- isBefore l (Paren l g r) = isBefore l r
-- isBefore l (Block s) = False
--
-- isBefore' l (Tok {tokPosn = Posn {posnLn = l'}}) =
parse :: P TT (Tree TT)
parse = Expr <$> parse' tokT tokFromT
parse' :: (TT -> Token) -> (Token -> TT) -> P TT [Tree TT]
parse' toTok _ = pExpr <* eof
where
-- parse a special symbol
sym c = symbol (isSpecial [c] . toTok)
pleaseSym c = recoverWith errTok <|> sym c
pExpr :: P TT (Expr TT)
pExpr = many pTree
pBlocks = (Expr <$> pExpr) `sepBy1` sym '.' -- the '.' is generated by the layout, see HACK above
-- note that we can have empty statements, hence we use sepBy1.
pTree :: P TT (Tree TT)
pTree = (Paren <$> sym '(' <*> pExpr <*> pleaseSym ')')
<|> (Paren <$> sym '[' <*> pExpr <*> pleaseSym ']')
<|> (Paren <$> sym '{' <*> pExpr <*> pleaseSym '}')
<|> (Block <$> (sym '<' *> pBlocks <* sym '>')) -- see HACK above
<|> (Atom <$> symbol (isNoise . toTok))
<|> (Error <$> recoverWith (symbol (isSpecial "})]" . toTok)))
-- note that, by construction, '<' and '>' will always be matched, so
-- we don't try to recover errors with them.
getStrokes :: Point -> Point -> Point -> Tree TT -> [Stroke]
getStrokes point _begin _end t0 = -- trace (show t0)
result
where getStrokes' (Atom t) = one (ts t)
getStrokes' (Error t) = one (modStroke errorStyle (ts t)) -- paint in red
getStrokes' (Block s) = getStrokesL s
getStrokes' (Expr g) = getStrokesL g
getStrokes' (Paren l g r)
| isErrorTok $ tokT r = one (modStroke errorStyle (ts l)) <> getStrokesL g
-- left paren wasn't matched: paint it in red.
-- note that testing this on the "Paren" node actually forces the parsing of the
-- right paren, undermining online behaviour.
| posnOfs (tokPosn l) == point || posnOfs (tokPosn r) == point - 1
= one (modStroke hintStyle (ts l)) <> getStrokesL g <> one (modStroke hintStyle (ts r))
| otherwise = one (ts l) <> getStrokesL g <> one (ts r)
getStrokesL = foldMap getStrokes'
ts = tokenToStroke
result = appEndo (getStrokes' t0) []
one x = Endo (x :)
tokenToStroke :: TT -> Stroke
tokenToStroke = fmap tokenToStyle . tokToSpan
modStroke :: StyleName -> Stroke -> Stroke
modStroke f = fmap (f `mappend`)
tokenToAnnot :: TT -> Maybe (Span String)
tokenToAnnot = sequenceA . tokToSpan . fmap tokenToText
-- | Create a special error token. (e.g. fill in where there is no correct token to parse)
-- Note that the position of the token has to be correct for correct computation of
-- node spans.
errTok :: Parser (Tok t) (Tok Token)
errTok = mkTok <$> curPos
where curPos = tB <$> lookNext
tB Nothing = maxBound
tB (Just x) = tokBegin x
mkTok p = Tok (Special '!') 0 (startPosn {posnOfs = p})
| siddhanathan/yi | yi-mode-haskell/src/Yi/Syntax/Paren.hs | gpl-2.0 | 6,832 | 0 | 16 | 1,969 | 1,845 | 989 | 856 | 100 | 5 |
{- arch-tag: Tests main file
Copyright (C) 2004-2005 John Goerzen <jgoerzen@complete.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
module Tests(tests) where
import Test.HUnit
import qualified Testbasics
import qualified TestSbasics
import qualified SpecificDBTests
import qualified TestMisc
test1 = TestCase ("x" @=? "x")
tests = TestList [TestLabel "test1" test1,
TestLabel "String basics" TestSbasics.tests,
TestLabel "SqlValue basics" Testbasics.tests,
TestLabel "SpecificDB" SpecificDBTests.tests,
TestLabel "Misc tests" TestMisc.tests]
| abuiles/turbinado-blog | tmp/dependencies/hdbc-postgresql-1.1.4.1/testsrc/Tests.hs | bsd-3-clause | 1,255 | 0 | 8 | 250 | 102 | 58 | 44 | 12 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.MessageFeedback
-- Copyright : (c) Quentin Moser <moserq@gmail.com>
-- License : BSD3
--
-- Maintainer : orphaned
-- Stability : unstable
-- Portability : unportable
--
-- Alternative to 'XMonad.Operations.sendMessage' that provides knowledge
-- of whether the message was handled, and utility functions based on
-- this facility.
-----------------------------------------------------------------------------
module XMonad.Actions.MessageFeedback (
-- * Usage
-- $usage
send
, tryMessage
, tryMessage_
, tryInOrder
, tryInOrder_
, sm
, sendSM
, sendSM_
) where
import XMonad.Core ( X (), Message, SomeMessage(..), LayoutClass(..), windowset, catchX )
import XMonad.StackSet ( current, workspace, layout, tag )
import XMonad.Operations ( updateLayout )
import Control.Monad.State ( gets )
import Data.Maybe ( isJust )
import Control.Applicative ((<$>))
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Actions.MessageFeedback
--
-- You can then use this module's functions wherever an action is expected.
--
-- Note that most functions in this module have a return type of @X Bool@
-- whereas configuration options will expect a @X ()@ action.
-- For example, the key binding
--
-- > -- Shrink the master area of a tiled layout, or move the focused window
-- > -- to the left in a WindowArranger-based layout
-- > ((modKey, xK_Left), tryMessage Shrink (MoveLeft 50))
--
-- is mis-typed. For this reason, this module provides alternatives (ending with
-- an underscore, e.g. tryMessage_) that discard their result and return an @X ()@.
-- For example, to correct the previous example:
--
-- > ((modKey, xK_Left), tryMessage_ Shrink (MoveLeft 50))
--
-- | Behaves like 'XMonad.Operations.sendMessage', but returns True of the
-- message was handled by the layout, False otherwise.
send :: Message a => a -> X Bool
send = sendSM . sm
-- | Sends the first message, and if it was not handled, sends the second.
-- Returns True if either message was handled, False otherwise.
tryMessage :: (Message a, Message b) => a -> b -> X Bool
tryMessage m1 m2 = do b <- send m1
if b then return True else send m2
tryMessage_ :: (Message a, Message b) => a -> b -> X ()
tryMessage_ m1 m2 = tryMessage m1 m2 >> return ()
-- | Tries sending every message of the list in order until one of them
-- is handled. Returns True if one of the messages was handled, False otherwise.
tryInOrder :: [SomeMessage] -> X Bool
tryInOrder [] = return False
tryInOrder (m:ms) = do b <- sendSM m
if b then return True else tryInOrder ms
tryInOrder_ :: [SomeMessage] -> X ()
tryInOrder_ ms = tryInOrder ms >> return ()
-- | Convenience shorthand for 'XMonad.Core.SomeMessage'.
sm :: Message a => a -> SomeMessage
sm = SomeMessage
sendSM :: SomeMessage -> X Bool
sendSM m = do w <- workspace . current <$> gets windowset
ml' <- handleMessage (layout w) m `catchX` return Nothing
updateLayout (tag w) ml'
return $ isJust ml'
sendSM_ :: SomeMessage -> X ()
sendSM_ m = sendSM m >> return () | pjones/xmonad-test | vendor/xmonad-contrib/XMonad/Actions/MessageFeedback.hs | bsd-2-clause | 3,665 | 0 | 11 | 1,052 | 579 | 324 | 255 | 37 | 2 |
module Main where
main :: IO ()
main = putStrLn "This is foo from also-has-exe-foo"
| rubik/stack | test/integration/tests/1198-multiple-exes-with-same-name/files/also-has-exe-foo/app/Main.hs | bsd-3-clause | 85 | 0 | 6 | 16 | 22 | 12 | 10 | 3 | 1 |
-- showing/reading floats
--
module Main(main) where
import Numeric
main = do
let dbls = map (shEFloat (Just 7)) doubles
++ map (shEFloat (Just 0)) doubles
++ map (shEFloat Nothing) doubles
++ map (shFFloat (Just 7)) doubles
++ map (shFFloat (Just 0)) doubles
++ map (shFFloat Nothing) doubles
++ map (shGFloat (Just 7)) doubles
++ map (shGFloat (Just 0)) doubles
++ map (shGFloat Nothing) doubles
flts = map (shEFloat (Just 7)) floats
++ map (shEFloat (Just 0)) floats
++ map (shEFloat Nothing) floats
++ map (shFFloat (Just 7)) floats
++ map (shFFloat (Just 0)) floats
++ map (shFFloat Nothing) floats
++ map (shGFloat (Just 7)) floats
++ map (shGFloat (Just 0)) floats
++ map (shGFloat Nothing) floats
putStrLn (unlines dbls)
putStrLn (unlines flts)
print (map read dbls :: [Double])
print (map read flts :: [Double])
shEFloat p f = showEFloat p f ""
shFFloat p f = showFFloat p f ""
shGFloat p f = showGFloat p f ""
doubles :: [ Double ]
doubles = [ 0.0
, 420
, 42
, 4.2
, 0.42
, 0.042
, 1.82173691287639817263897126389712638972163
, 1.82173691287639817263897126389712638972163e-300
]
floats :: [ Float ]
floats = [ 0.0
, 420
, 42
, 4.2
, 0.42
, 0.042
, 1.82173691287639817263897126389712638972163
, 1.82173691287639817263897126389712638972163e-300
]
| wxwxwwxxx/ghc | libraries/base/tests/Numeric/num008.hs | bsd-3-clause | 1,672 | 0 | 22 | 624 | 556 | 280 | 276 | 46 | 1 |
module Main where
main :: IO ()
main = print "Hello, World!"
| fredmorcos/attic | projects/pet/archive/pet_haskell_old_parsers/Main.hs | isc | 62 | 0 | 6 | 13 | 22 | 12 | 10 | 3 | 1 |
{-# LANGUAGE DeriveFunctor
, DeriveFoldable
, DeriveTraversable
, TypeSynonymInstances
, FlexibleInstances
, FlexibleContexts
, UndecidableInstances #-}
module Abstract where
import Prelude hiding (foldr)
import Control.Monad.Reader
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.IntMap as M
import Data.Foldable
import Data.Traversable
import qualified Concrete as C
import Util
-- * Abstract names and identifiers
type UID = Int
-- | Names in the abstract syntax are unique.
data Name = Name
{ uid :: UID -- ^ Positive numbers come from user,
-- negative from system (e.g. quoting).
, suggestion :: C.Name -- ^ Name suggestion for printing.
}
instance Eq Name where
x == y = uid x == uid y
instance Ord Name where
compare x y = compare (uid x) (uid y)
instance Show Name where
show x = suggestion x ++ ":" ++ show (uid x)
-- | Local and global names.
data Ident
= Var { name :: Name } -- ^ Locally bound identifier.
| Con { name :: Name } -- ^ Declared constant.
| Def { name :: Name } -- ^ Defined identifier.
| Let { name :: Name } -- ^ Shared expression identifier.
deriving (Eq,Ord)
instance Show Ident where
show (Var n) = "Var " ++ show n
show (Con n) = "Con " ++ show n
show (Def n) = "Def " ++ show n
show (Let n) = "Let " ++ show n
isGlobalIdent :: Ident -> Bool
isGlobalIdent (Var{}) = False
isGlobalIdent (Let{}) = False
isGlobalIdent _ = True
-- * Generating local names
systemGeneratedName :: Name
systemGeneratedName = Name { uid = -1, suggestion = C.noName }
noName = systemGeneratedName
type SysNameCounter = Int
initSysNameCounter :: SysNameCounter
initSysNameCounter = -1
-- | Get the next local name, as variant of an existing name.
nextSysName :: SysNameCounter -> Name -> (Name, SysNameCounter)
nextSysName i n = (n { uid = i }, i - 1)
-- | Get the next local name with no concrete representation.
nextSysName' :: SysNameCounter -> (Name, SysNameCounter)
nextSysName' i = nextSysName i noName
-- * Abstract syntax for declarations and expressions
newtype Declarations = Declarations { declarations :: [Declaration] }
data Declaration
= TypeSig Name Type -- ^ @c : A.@
| Defn Name (Maybe Type) Expr -- ^ @d : A = e.@ or @d = e.@
-- | GLet Name Expr -- ^ @[x = e].@ global shared expr. NYI.
deriving (Show)
type Type = Expr
type Expr = Expression Ident
type TypeExpr id = Expression id
data Expression id
= Ident id -- ^ @x@ or @c@ or @d@
| Typ -- ^ @type@
| Pi (Maybe Name) (TypeExpr id) (TypeExpr id) -- ^ @A -> B@ or @{x:A} B@
| Lam Name (Maybe (TypeExpr id)) (Expression id) -- ^ @[x:A] E@ or @[x]E@
| App (Expression id) (Expression id) -- ^ @E1 E2@
| LLet Name (Expression id) (Expression id) -- ^ @[x = E1] E2@
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
-- * Spine view
instance IsApp Expr where
isApp (App f e) = Just (f, e)
isApp _ = Nothing
-- * alpha equality
newtype Alpha a = Alpha a
deriving (Functor)
instance Show a => Show (Alpha a) where
show (Alpha a) = show a
instance Ord (Alpha Expr) where
compare (Alpha e) (Alpha e') = aCompare e e'
instance Ord (Alpha a) => Eq (Alpha a) where
a == a' = compare a a' == EQ
type IMap = M.IntMap UID -- map directed from left to right
type Cmp = Reader IMap
class OrdAlpha a where
aCompare :: a -> a -> Ordering
aCompare e1 e2 = runReader (acmp e1 e2) M.empty
acmp :: a -> a -> Cmp Ordering
acmp e1 e2 = return $ aCompare e1 e2
instance OrdAlpha Name where
acmp (Name x _) (Name y _)
| x == y = return EQ
| otherwise = do
m <- ask
case M.lookup x m of
Nothing -> return $ compare x y
Just y' -> return $ compare y' y
-- | Just look at UID
instance OrdAlpha Ident where
acmp x y = acmp (name x) (name y)
instance OrdAlpha Expr where
acmp e e' = case (e, e') of
(Ident x, Ident x') -> acmp x x'
(Ident _, _) -> return LT
(_, Ident _) -> return GT
(App f e, App f' e') -> lexM [ acmp f f', acmp e e' ]
(App _ _, _) -> return LT
(_, App _ _) -> return GT
(Pi Nothing a b, Pi Nothing a' b') -> acmp (a,b) (a',b')
(Pi Nothing _ _, _) -> return LT
(_, Pi Nothing _ _) -> return GT
(Pi (Just x) a b, Pi (Just x') a' b') -> lexM
[ acmp a a'
, local (M.insert (uid x) (uid x')) $ acmp b b' ]
(Pi (Just _) _ _, _) -> return LT
(_, Pi (Just _) _ _) -> return GT
(Lam x a e, Lam x' a' e') -> lexM
[ acmp a a'
, local (M.insert (uid x) (uid x')) $ acmp e e' ]
(Lam _ _ _, _) -> return LT
(_, Lam _ _ _) -> return GT
(LLet x a e, LLet x' a' e') -> lexM
[ acmp a a'
, local (M.insert (uid x) (uid x')) $ acmp e e' ]
(LLet _ _ _, _) -> return LT
(_, LLet _ _ _) -> return GT
(Typ, Typ) -> return EQ
-- Redundant:
-- (Typ, _) -> return LT
-- (_, Typ) -> return GT
-- | Lexicographic comparison.
instance (OrdAlpha a, OrdAlpha b) => OrdAlpha (a,b) where
acmp (a1,b1) (a2,b2) = lexM [acmp a1 a2, acmp b1 b2]
-- | Use only for lists of equal length!
instance (OrdAlpha a) => OrdAlpha [a] where
acmp as bs = lexM $ zipWith acmp as bs
instance (OrdAlpha a) => OrdAlpha (Maybe a) where
acmp Nothing Nothing = return EQ
acmp Nothing (Just _) = return LT
acmp (Just _) Nothing = return GT
acmp (Just a) (Just b) = acmp a b
-- | Lazy lexicographic combination.
lexM :: Monad m => [m Ordering] -> m Ordering
lexM [] = return EQ
lexM (m:ms) = do
o <- m
case o of
LT -> return LT
GT -> return GT
EQ -> lexM ms
-- * Queries
globalIds :: Expr -> Set Ident
globalIds = foldr (\ n ns -> if isGlobalIdent n then Set.insert n ns else ns)
Set.empty
globalCNames :: Expr -> Set C.Name
globalCNames =
foldr (\ n ns -> if isGlobalIdent n then Set.insert (suggestion $ name n) ns else ns)
Set.empty
| andreasabel/helf | src/Abstract.hs | mit | 6,091 | 0 | 16 | 1,725 | 2,260 | 1,181 | 1,079 | 147 | 3 |
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
{-# LANGUAGE RecordWildCards #-}
import Data.Foldable (for_)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import Binary (toDecimal)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "toDecimal" $ for_ cases test
where
test Case{..} = it description assertion
where
assertion = toDecimal binary `shouldBe` fromIntegral expected
-- Some descriptions mentioning "errors" where changed to avoid confusion.
data Case = Case { description :: String
, binary :: String
, expected :: Integer
}
cases :: [Case]
cases = [ Case { description = "binary 0 is decimal 0"
, binary = "0"
, expected = 0
}
, Case { description = "binary 1 is decimal 1"
, binary = "1"
, expected = 1
}
, Case { description = "binary 10 is decimal 2"
, binary = "10"
, expected = 2
}
, Case { description = "binary 11 is decimal 3"
, binary = "11"
, expected = 3
}
, Case { description = "binary 100 is decimal 4"
, binary = "100"
, expected = 4
}
, Case { description = "binary 1001 is decimal 9"
, binary = "1001"
, expected = 9
}
, Case { description = "binary 11010 is decimal 26"
, binary = "11010"
, expected = 26
}
, Case { description = "binary 10001101000 is decimal 1128"
, binary = "10001101000"
, expected = 1128
}
, Case { description = "binary ignores leading zeros"
, binary = "000011111"
, expected = 31
}
, Case { description = "numbers other than one and zero return zero"
, binary = "2"
, expected = 0
}
, Case { description = "numbers other than one and zero return zero"
, binary = "012"
, expected = 0
}
, Case { description = "containing letters returns zero"
, binary = "10nope"
, expected = 0
}
, Case { description = "containing letters returns zero"
, binary = "nope10"
, expected = 0
}
, Case { description = "containing letters returns zero"
, binary = "10nope10"
, expected = 0
}
, Case { description = "containing letters returns zero"
, binary = "001 nope"
, expected = 0
}
]
| exercism/xhaskell | exercises/practice/binary/test/Tests.hs | mit | 3,073 | 0 | 8 | 1,411 | 534 | 333 | 201 | 61 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.Event
(js_stopPropagation, stopPropagation, js_preventDefault,
preventDefault, js_initEvent, initEvent,
js_stopImmediatePropagation, stopImmediatePropagation,
pattern NONE, pattern CAPTURING_PHASE, pattern AT_TARGET,
pattern BUBBLING_PHASE, pattern MOUSEDOWN, pattern MOUSEUP,
pattern MOUSEOVER, pattern MOUSEOUT, pattern MOUSEMOVE,
pattern MOUSEDRAG, pattern CLICK, pattern DBLCLICK,
pattern KEYDOWN, pattern KEYUP, pattern KEYPRESS, pattern DRAGDROP,
pattern FOCUS, pattern BLUR, pattern SELECT, pattern CHANGE,
js_getType, getType, js_getTarget, getTarget, js_getCurrentTarget,
getCurrentTarget, js_getEventPhase, getEventPhase, js_getBubbles,
getBubbles, js_getCancelable, getCancelable, js_getTimeStamp,
getTimeStamp, js_getDefaultPrevented, getDefaultPrevented,
js_getSrcElement, getSrcElement, js_setReturnValue, setReturnValue,
js_getReturnValue, getReturnValue, js_setCancelBubble,
setCancelBubble, js_getCancelBubble, getCancelBubble,
js_getClipboardData, getClipboardData, Event, castToEvent,
gTypeEvent, IsEvent, toEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"stopPropagation\"]()"
js_stopPropagation :: Event -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.stopPropagation Mozilla Event.stopPropagation documentation>
stopPropagation :: (MonadIO m, IsEvent self) => self -> m ()
stopPropagation self = liftIO (js_stopPropagation (toEvent self))
foreign import javascript unsafe "$1[\"preventDefault\"]()"
js_preventDefault :: Event -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.preventDefault Mozilla Event.preventDefault documentation>
preventDefault :: (MonadIO m, IsEvent self) => self -> m ()
preventDefault self = liftIO (js_preventDefault (toEvent self))
foreign import javascript unsafe "$1[\"initEvent\"]($2, $3, $4)"
js_initEvent :: Event -> JSString -> Bool -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.initEvent Mozilla Event.initEvent documentation>
initEvent ::
(MonadIO m, IsEvent self, ToJSString eventTypeArg) =>
self -> eventTypeArg -> Bool -> Bool -> m ()
initEvent self eventTypeArg canBubbleArg cancelableArg
= liftIO
(js_initEvent (toEvent self) (toJSString eventTypeArg) canBubbleArg
cancelableArg)
foreign import javascript unsafe
"$1[\"stopImmediatePropagation\"]()" js_stopImmediatePropagation ::
Event -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.stopImmediatePropagation Mozilla Event.stopImmediatePropagation documentation>
stopImmediatePropagation ::
(MonadIO m, IsEvent self) => self -> m ()
stopImmediatePropagation self
= liftIO (js_stopImmediatePropagation (toEvent self))
pattern NONE = 0
pattern CAPTURING_PHASE = 1
pattern AT_TARGET = 2
pattern BUBBLING_PHASE = 3
pattern MOUSEDOWN = 1
pattern MOUSEUP = 2
pattern MOUSEOVER = 4
pattern MOUSEOUT = 8
pattern MOUSEMOVE = 16
pattern MOUSEDRAG = 32
pattern CLICK = 64
pattern DBLCLICK = 128
pattern KEYDOWN = 256
pattern KEYUP = 512
pattern KEYPRESS = 1024
pattern DRAGDROP = 2048
pattern FOCUS = 4096
pattern BLUR = 8192
pattern SELECT = 16384
pattern CHANGE = 32768
foreign import javascript unsafe "$1[\"type\"]" js_getType ::
Event -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.type Mozilla Event.type documentation>
getType ::
(MonadIO m, IsEvent self, FromJSString result) => self -> m result
getType self
= liftIO (fromJSString <$> (js_getType (toEvent self)))
foreign import javascript unsafe "$1[\"target\"]" js_getTarget ::
Event -> IO (Nullable EventTarget)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.target Mozilla Event.target documentation>
getTarget ::
(MonadIO m, IsEvent self) => self -> m (Maybe EventTarget)
getTarget self
= liftIO (nullableToMaybe <$> (js_getTarget (toEvent self)))
foreign import javascript unsafe "$1[\"currentTarget\"]"
js_getCurrentTarget :: Event -> IO (Nullable EventTarget)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.currentTarget Mozilla Event.currentTarget documentation>
getCurrentTarget ::
(MonadIO m, IsEvent self) => self -> m (Maybe EventTarget)
getCurrentTarget self
= liftIO (nullableToMaybe <$> (js_getCurrentTarget (toEvent self)))
foreign import javascript unsafe "$1[\"eventPhase\"]"
js_getEventPhase :: Event -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.eventPhase Mozilla Event.eventPhase documentation>
getEventPhase :: (MonadIO m, IsEvent self) => self -> m Word
getEventPhase self = liftIO (js_getEventPhase (toEvent self))
foreign import javascript unsafe "($1[\"bubbles\"] ? 1 : 0)"
js_getBubbles :: Event -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.bubbles Mozilla Event.bubbles documentation>
getBubbles :: (MonadIO m, IsEvent self) => self -> m Bool
getBubbles self = liftIO (js_getBubbles (toEvent self))
foreign import javascript unsafe "($1[\"cancelable\"] ? 1 : 0)"
js_getCancelable :: Event -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.cancelable Mozilla Event.cancelable documentation>
getCancelable :: (MonadIO m, IsEvent self) => self -> m Bool
getCancelable self = liftIO (js_getCancelable (toEvent self))
foreign import javascript unsafe "$1[\"timeStamp\"]"
js_getTimeStamp :: Event -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.timeStamp Mozilla Event.timeStamp documentation>
getTimeStamp :: (MonadIO m, IsEvent self) => self -> m Word
getTimeStamp self = liftIO (js_getTimeStamp (toEvent self))
foreign import javascript unsafe
"($1[\"defaultPrevented\"] ? 1 : 0)" js_getDefaultPrevented ::
Event -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.defaultPrevented Mozilla Event.defaultPrevented documentation>
getDefaultPrevented :: (MonadIO m, IsEvent self) => self -> m Bool
getDefaultPrevented self
= liftIO (js_getDefaultPrevented (toEvent self))
foreign import javascript unsafe "$1[\"srcElement\"]"
js_getSrcElement :: Event -> IO (Nullable EventTarget)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.srcElement Mozilla Event.srcElement documentation>
getSrcElement ::
(MonadIO m, IsEvent self) => self -> m (Maybe EventTarget)
getSrcElement self
= liftIO (nullableToMaybe <$> (js_getSrcElement (toEvent self)))
foreign import javascript unsafe "$1[\"returnValue\"] = $2;"
js_setReturnValue :: Event -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.returnValue Mozilla Event.returnValue documentation>
setReturnValue :: (MonadIO m, IsEvent self) => self -> Bool -> m ()
setReturnValue self val
= liftIO (js_setReturnValue (toEvent self) val)
foreign import javascript unsafe "($1[\"returnValue\"] ? 1 : 0)"
js_getReturnValue :: Event -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.returnValue Mozilla Event.returnValue documentation>
getReturnValue :: (MonadIO m, IsEvent self) => self -> m Bool
getReturnValue self = liftIO (js_getReturnValue (toEvent self))
foreign import javascript unsafe "$1[\"cancelBubble\"] = $2;"
js_setCancelBubble :: Event -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.cancelBubble Mozilla Event.cancelBubble documentation>
setCancelBubble ::
(MonadIO m, IsEvent self) => self -> Bool -> m ()
setCancelBubble self val
= liftIO (js_setCancelBubble (toEvent self) val)
foreign import javascript unsafe "($1[\"cancelBubble\"] ? 1 : 0)"
js_getCancelBubble :: Event -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.cancelBubble Mozilla Event.cancelBubble documentation>
getCancelBubble :: (MonadIO m, IsEvent self) => self -> m Bool
getCancelBubble self = liftIO (js_getCancelBubble (toEvent self))
foreign import javascript unsafe "$1[\"clipboardData\"]"
js_getClipboardData :: Event -> IO (Nullable DataTransfer)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Event.clipboardData Mozilla Event.clipboardData documentation>
getClipboardData ::
(MonadIO m, IsEvent self) => self -> m (Maybe DataTransfer)
getClipboardData self
= liftIO (nullableToMaybe <$> (js_getClipboardData (toEvent self))) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/Event.hs | mit | 9,370 | 118 | 11 | 1,488 | 2,141 | 1,157 | 984 | 148 | 1 |
{-# LANGUAGE OverloadedStrings, CPP #-}
module Main (main) where
import Control.Exception(try, IOException)
import Control.Monad (when)
import System.Console.GetOpt
import Utils.Executable
import Utils.Text
import Madl.Network
import qualified Parser.MadlAST as AST (Network)
import Parser.MadlParser (madlParser)
import Parser.IncludeStatement
import Parser.MadlTypeChecker
import Parser.ASTTranslator
-- | Error function
fileName :: Text
fileName = "parser-main"
fatal :: Int -> Text -> a
fatal i s = error ("Fatal "++show i++" in " ++utxt fileName ++":\n "++utxt s)
src :: Int -> (Text, Int)
src i = (fileName, i)
_okWhenNotUsed :: a
_okWhenNotUsed = undefined fatal src
-- End of error function
-------------------------
-- CommandLine Options
-------------------------
data CommandLineOptions = CommandLineOptions {
argNetwork :: FilePath,
typeCheck :: Bool,
checkValid :: Bool,
showParseResult :: Bool,
showNetworkSpecification :: Bool,
showResultingNetwork :: Bool
}
defaultOptions :: CommandLineOptions
defaultOptions = CommandLineOptions {
argNetwork = "",
typeCheck = True,
checkValid = False,
showParseResult = False,
showNetworkSpecification = False,
showResultingNetwork = False
}
exeOptions :: [OptDescr (CommandLineOptions -> CommandLineOptions)]
exeOptions =
[ Option "f" ["network"]
(ReqArg (\f opts -> opts {argNetwork = f}) "MaDLFILE")
"MaDL file name (mandatory).\n"
, Option "t" ["typecheck"]
(NoArg (\opts -> opts {typeCheck = True}))
"Typecheck the parsed file (default)."
, Option "" ["no-typecheck"]
(NoArg (\opts -> opts {typeCheck = False}))
"Don't typecheck the parsed file."
, Option "c" ["check"]
(NoArg (\opts -> opts {checkValid = True}))
"Check whether the parsed network is a valid Madl Network.\n"
, Option "p" ["show-parse"]
(NoArg (\opts -> opts {showParseResult = True}))
"Print the parse tree (AST)."
, Option "s" ["show-network-specification"]
(NoArg (\opts -> opts {showNetworkSpecification = True}))
"Print the network specification."
, Option "n" ["show-network"]
(NoArg (\opts -> opts {showResultingNetwork = True}))
"Print the network datastructure."
, Option "v" ["verbose"]
(NoArg (\opts -> opts {showParseResult = True, showNetworkSpecification = True, showResultingNetwork = True}))
"Print the parse tree, the network specification, and the network datastructure.\n"
]
-------------------------
-- Main entry point
-------------------------
main :: IO ()
main = do
opts <- parseArgs exeOptions defaultOptions
when (null $ argNetwork opts) $ ioError . userError $ "Provide an input file using -f or --network."
contentsOrError <- try . readFile $ argNetwork opts
case contentsOrError of
Left err -> ioError . userError $ "Couldn't read file " ++ argNetwork opts ++ ": " ++ show (err :: IOException)
Right contents -> case madlParser (argNetwork opts) contents of
Left err -> ioError . userError $ "Error parsing file " ++ argNetwork opts ++ ": " ++ show err
Right ast -> do
putStrLn "File was succesfully parsed."
astOrError <- removeIncludeStatements (argNetwork opts) ast
case astOrError of
Left err -> ioError $ userError err
Right ast' -> if not (typeCheck opts) then processAST opts ast'
else case typecheckNetwork ast' of
Left err -> ioError . userError $ "Error typechecking file " ++ argNetwork opts ++ ":\n" ++ showError err
Right ast'' -> do
putStrLn "File was succesfully typechecked."
processAST opts ast''
processAST :: CommandLineOptions -> AST.Network -> IO()
processAST opts ast = do
let networkspec = networkSpecification $ translateNetwork ast
when (showParseResult opts) $ putStrLn $ "Parse tree:\n" ++ show ast
when (showNetworkSpecification opts) $ putStrLn $ "Network specification:\n" ++ utxt (printNetworkSpecification networkspec)
let net = mkNetwork networkspec
when (checkValid opts) $ do
case validMaDLNetwork net of
Just err -> putStrLn $ "The specified network is not valid:\n" ++ err
Nothing -> putStrLn "The specified network is valid."
when (showResultingNetwork opts) $ putStrLn $ "Network datastructure:\n" ++ show net
| julienschmaltz/madl | app/Parser/parser-main.hs | mit | 4,575 | 0 | 26 | 1,133 | 1,169 | 616 | 553 | 93 | 6 |
module Feature.QueryLimitedSpec where
import Test.Hspec hiding (pendingWith)
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Network.HTTP.Types
import Network.Wai.Test (SResponse(simpleHeaders, simpleStatus))
import Text.Heredoc
import SpecHelper
import Network.Wai (Application)
import Protolude hiding (get)
spec :: SpecWith Application
spec =
describe "Requesting many items with server limits enabled" $ do
it "restricts results" $
get "/items"
`shouldRespondWith` [json| [{"id":1},{"id":2}] |]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-1/*"]
}
it "respects additional client limiting" $ do
r <- request methodGet "/items"
(rangeHdrs $ ByteRangeFromTo 0 0) ""
liftIO $ do
simpleHeaders r `shouldSatisfy`
matchHeader "Content-Range" "0-0/*"
simpleStatus r `shouldBe` ok200
it "limit works on all levels" $
get "/users?select=id,tasks{id}&order=id.asc&tasks.order=id.asc"
`shouldRespondWith` [str|[{"id":1,"tasks":[{"id":1},{"id":2}]},{"id":2,"tasks":[{"id":5},{"id":6}]}]|]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-1/*"]
}
it "limit is not applied to parent embeds" $
get "/tasks?select=id,project{id}&id=gt.5"
`shouldRespondWith` [str|[{"id":6,"project":{"id":3}},{"id":7,"project":{"id":4}}]|]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-1/*"]
}
| Skyfold/postgrest | test/Feature/QueryLimitedSpec.hs | mit | 1,514 | 0 | 15 | 341 | 306 | 175 | 131 | -1 | -1 |
module Control.Concurrent.Extensions where
import Control.Concurrent as Concurrent
import Control.Concurrent.MVar as MVar
import Control.DeepSeq as DeepSeq
import Control.Exception.Base
import Data.Either as Either
import Data.Either.Extensions as EitherExt
import Prelude.Extensions as PreludeExt
forkFuture :: IO a -> IO (ThreadId, MVar (Either SomeException a))
forkFuture = \io -> do
future <- newEmptyMVar
let complete = \exception_or_result -> (putMVar future exception_or_result)
thread_id <- (forkFinally io complete)
(return (thread_id, future))
forkStrictFuture :: NFData a => IO a -> IO (ThreadId, MVar (Either SomeException a))
forkStrictFuture = \io -> do
future <- newEmptyMVar
let {complete = \exception_or_result -> do
let strict_result = (DeepSeq.force (EitherExt.right exception_or_result))
let strict_either = (ifElse (Either.isRight exception_or_result) (Right strict_result) exception_or_result)
(putMVar future strict_either)}
thread_id <- (forkFinally io complete)
(return (thread_id, future))
nonForkedFuture :: IO a -> IO (ThreadId, MVar (Either SomeException a))
nonForkedFuture = \io -> do
id <- myThreadId
result <- io
future <- (newMVar (Right result))
(return (id, future))
| stevedonnelly/haskell | code/Control/Concurrent/Extensions.hs | mit | 1,282 | 2 | 22 | 226 | 421 | 222 | 199 | 29 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.Geolocation
(getCurrentPosition, watchPosition, watchPosition_, clearWatch,
Geolocation(..), gTypeGeolocation)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Geolocation.getCurrentPosition Mozilla Geolocation.getCurrentPosition documentation>
getCurrentPosition ::
(MonadDOM m) =>
Geolocation ->
PositionCallback ->
Maybe PositionErrorCallback -> Maybe PositionOptions -> m ()
getCurrentPosition self successCallback errorCallback options
= liftDOM
(void
(self ^. jsf "getCurrentPosition"
[toJSVal successCallback, toJSVal errorCallback, toJSVal options]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Geolocation.watchPosition Mozilla Geolocation.watchPosition documentation>
watchPosition ::
(MonadDOM m) =>
Geolocation ->
PositionCallback ->
Maybe PositionErrorCallback -> Maybe PositionOptions -> m Int
watchPosition self successCallback errorCallback options
= liftDOM
(round <$>
((self ^. jsf "watchPosition"
[toJSVal successCallback, toJSVal errorCallback, toJSVal options])
>>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Geolocation.watchPosition Mozilla Geolocation.watchPosition documentation>
watchPosition_ ::
(MonadDOM m) =>
Geolocation ->
PositionCallback ->
Maybe PositionErrorCallback -> Maybe PositionOptions -> m ()
watchPosition_ self successCallback errorCallback options
= liftDOM
(void
(self ^. jsf "watchPosition"
[toJSVal successCallback, toJSVal errorCallback, toJSVal options]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/Geolocation.clearWatch Mozilla Geolocation.clearWatch documentation>
clearWatch :: (MonadDOM m) => Geolocation -> Int -> m ()
clearWatch self watchId
= liftDOM (void (self ^. jsf "clearWatch" [toJSVal watchId]))
| ghcjs/jsaddle-dom | src/JSDOM/Generated/Geolocation.hs | mit | 3,019 | 0 | 14 | 620 | 660 | 382 | 278 | 53 | 1 |
module Language.Haskell.Names (
isName,
name,
qName,
declName) where
import Language.Haskell.Syntax
isName :: String -> HsName -> Bool
isName n hsName = name hsName == n
name :: HsName -> String
name (HsSymbol n) = n
name (HsIdent n) = n
qName :: HsQName -> Maybe String
qName (Qual _ hsName) = Just (name hsName)
qName (UnQual hsName) = Just (name hsName)
qName _ = Nothing
declName :: HsDecl -> String
declName (HsTypeSig _ [b] _) = name b
declName (HsTypeDecl _ b _ _) = name b
declName (HsPatBind _ (HsPVar n) _ _) = name n
declName (HsFunBind cases) | (HsMatch _ n _ _ _) <- head cases = name n
declName _ = []
| flbulgarelli/hs-inspector | src/Language/Haskell/Names.hs | mit | 667 | 0 | 10 | 168 | 301 | 152 | 149 | 21 | 1 |
{-# LANGUAGE BangPatterns #-}
module Tools.Mill.Query
( Query
, parseQuery
, passTest
, Test (..)
) where
import Data.Either
import Tools.Mill.Table
import Text.Parsec.Prim
import Text.Parsec.Error
import Text.Parsec.Char
import Text.Parsec.Combinator
import Text.Parsec.ByteString (GenParser)
import Data.Map
import Control.Applicative ((<$>), (<*>), (*>), (<*))
import Data.ByteString(ByteString,hGetLine,hGetContents,pack,unpack)
import Data.ByteString.Lex.Double
import qualified Data.ByteString.Char8 as C
type QueryAtom = (Colname, Test)
type Query = Map Colname Test
data Test = EqualString ByteString
| NotEqualString ByteString
| GreaterEqual Double
| GreaterThan Double
| LowerEqual Double
| LowerThan Double
deriving (Show,Eq)
passTest :: Test -> ByteString -> Bool
passTest (EqualString !s1) s2 = {-# SCC "passTest.=" #-} s1 == s2
passTest (NotEqualString !s1)s2 = {-# SCC "passTest.!=" #-} s1 /= s2
passTest (GreaterEqual !s1) s2 = {-# SCC "passTest.>=" #-} (read' s2) >= s1
passTest (GreaterThan !s1) s2 = {-# SCC "passTest.>" #-} (read' s2) > s1
passTest (LowerEqual !s1) s2 = {-# SCC "passTest.<=" #-} (read' s2) <= s1
passTest (LowerThan !s1) s2 = {-# SCC "passTest.<" #-} (read' s2) < s1
read' :: ByteString -> Double
read' s = case unsafeReadDouble s of
Nothing -> error "cannot read as double"
Just (d,_) -> d
mergeAtoms :: [(Colname,Test)] -> Query
mergeAtoms = fromList
parseQuery :: ByteString -> Either ParseError Query
parseQuery q = runParser queryParser () "" q
queryParser :: GenParser Char st Query
queryParser = mergeAtoms <$> many queryAtomParser
queryAtomParser :: GenParser Char st QueryAtom
queryAtomParser = (,) <$> colname <*> test
colname = C.pack <$> many1 (noneOf "!=>< ")
test = try equal
<|> try notequal
<|> try gte
<|> try gt
<|> try lte
<|> try lt
<|> fail "no such test"
equal = EqualString . C.pack <$> ((op "=") *> opVal)
notequal = NotEqualString . C.pack <$> ((op "!=") *> opVal)
gte = GreaterEqual . read <$> ((op ">=") *> number)
gt = GreaterThan . read <$> ((op ">") *> number)
lte = LowerEqual . read <$> ((op "<=") *> number)
lt = LowerThan . read <$> ((op "<") *> number)
op v = between (optional spaces) (optional spaces) (string v)
opVal = many1 (noneOf " ")
number = try stringFloat
<|> try stringInt
<?> "number"
stringInt = negativeInt <|> positiveInt
positiveInt = many1 digit
negativeInt = ('-':) <$> (char '-' *> positiveInt)
stringFloat = negativeFloat <|> positiveFloat
positiveFloat = do
int <- many1 digit
sep <- string "." <|> try (string "e-") <|> string "e" <?> "float form"
frac <- many1 digit
return $ concat [int, sep, frac]
negativeFloat = ('-':) <$> (char '-' *> positiveFloat)
| lucasdicioccio/mill | Tools/Mill/Query.hs | mit | 2,924 | 0 | 13 | 678 | 981 | 529 | 452 | 75 | 2 |
{-# LANGUAGE CPP, RankNTypes #-}
-- | Functions for interacting with bytes.
module Data.Conduit.Binary
( -- * Files and @Handle@s
-- | Note that most of these functions live in the @MonadResource@ monad
-- to ensure resource finalization even in the presence of exceptions. In
-- order to run such code, you will need to use @runResourceT@.
-- ** Sources
sourceFile
, sourceHandle
, sourceIOHandle
, sourceFileRange
, sourceHandleRange
-- ** Sinks
, sinkFile
, sinkHandle
, sinkIOHandle
-- ** Conduits
, conduitFile
-- * Utilities
-- ** Sources
, sourceLbs
-- ** Sinks
, head
, dropWhile
, take
, drop
, sinkCacheLength
, sinkLbs
-- ** Conduits
, isolate
, takeWhile
, Data.Conduit.Binary.lines
) where
import Prelude hiding (head, take, drop, takeWhile, dropWhile)
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import Data.Conduit
import Data.Conduit.List (sourceList, consume)
import Control.Exception (assert, finally)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO, MonadIO)
import Control.Monad.Trans.Resource (allocate, release)
import qualified System.IO as IO
import Data.Word (Word8, Word64)
import Control.Applicative ((<$>))
import System.Directory (getTemporaryDirectory, removeFile)
import Data.ByteString.Lazy.Internal (defaultChunkSize)
#if CABAL_OS_WINDOWS
import qualified System.Win32File as F
#elif NO_HANDLES
import qualified System.PosixFile as F
#endif
-- | Stream the contents of a file as binary data.
--
-- Since 0.3.0
sourceFile :: MonadResource m
=> FilePath
-> Producer m S.ByteString
sourceFile fp =
#if CABAL_OS_WINDOWS || NO_HANDLES
bracketP
(F.openRead fp)
F.close
loop
where
loop h = liftIO (F.read h) >>= maybe (return ()) (\bs -> yield bs >> loop h)
#else
sourceIOHandle (IO.openBinaryFile fp IO.ReadMode)
#endif
-- | Stream the contents of a 'IO.Handle' as binary data. Note that this
-- function will /not/ automatically close the @Handle@ when processing
-- completes, since it did not acquire the @Handle@ in the first place.
--
-- Since 0.3.0
sourceHandle :: MonadIO m
=> IO.Handle
-> Producer m S.ByteString
sourceHandle h =
loop
where
loop = do
bs <- liftIO (S.hGetSome h defaultChunkSize)
if S.null bs
then return ()
else yield bs >> loop
-- | An alternative to 'sourceHandle'.
-- Instead of taking a pre-opened 'IO.Handle', it takes an action that opens
-- a 'IO.Handle' (in read mode), so that it can open it only when needed
-- and closed it as soon as possible.
--
-- Since 0.3.0
sourceIOHandle :: MonadResource m
=> IO IO.Handle
-> Producer m S.ByteString
sourceIOHandle alloc = bracketP alloc IO.hClose sourceHandle
-- | Stream all incoming data to the given 'IO.Handle'. Note that this function
-- will /not/ automatically close the @Handle@ when processing completes.
--
-- Since 0.3.0
sinkHandle :: MonadIO m
=> IO.Handle
-> Consumer S.ByteString m ()
sinkHandle h = awaitForever $ liftIO . S.hPut h
-- | An alternative to 'sinkHandle'.
-- Instead of taking a pre-opened 'IO.Handle', it takes an action that opens
-- a 'IO.Handle' (in write mode), so that it can open it only when needed
-- and close it as soon as possible.
--
-- Since 0.3.0
sinkIOHandle :: MonadResource m
=> IO IO.Handle
-> Consumer S.ByteString m ()
sinkIOHandle alloc = bracketP alloc IO.hClose sinkHandle
-- | Stream the contents of a file as binary data, starting from a certain
-- offset and only consuming up to a certain number of bytes.
--
-- Since 0.3.0
sourceFileRange :: MonadResource m
=> FilePath
-> Maybe Integer -- ^ Offset
-> Maybe Integer -- ^ Maximum count
-> Producer m S.ByteString
sourceFileRange fp offset count = bracketP
(IO.openBinaryFile fp IO.ReadMode)
IO.hClose
(\h -> sourceHandleRange h offset count)
-- | Stream the contents of a handle as binary data, starting from a certain
-- offset and only consuming up to a certain number of bytes.
--
-- Since 1.0.8
sourceHandleRange :: MonadIO m
=> IO.Handle
-> Maybe Integer -- ^ Offset
-> Maybe Integer -- ^ Maximum count
-> Producer m S.ByteString
sourceHandleRange handle offset count = do
case offset of
Nothing -> return ()
Just off -> liftIO $ IO.hSeek handle IO.AbsoluteSeek off
case count of
Nothing -> pullUnlimited
Just c -> pullLimited (fromInteger c)
where
pullUnlimited = do
bs <- liftIO $ S.hGetSome handle 4096
if S.null bs
then return ()
else do
yield bs
pullUnlimited
pullLimited c = do
bs <- liftIO $ S.hGetSome handle (min c 4096)
let c' = c - S.length bs
assert (c' >= 0) $
if S.null bs
then return ()
else do
yield bs
pullLimited c'
-- | Stream all incoming data to the given file.
--
-- Since 0.3.0
sinkFile :: MonadResource m
=> FilePath
-> Consumer S.ByteString m ()
#if NO_HANDLES
sinkFile fp =
bracketP
(F.openWrite fp)
F.close
loop
where
loop h = awaitForever $ liftIO . F.write h
#else
sinkFile fp = sinkIOHandle (IO.openBinaryFile fp IO.WriteMode)
#endif
-- | Stream the contents of the input to a file, and also send it along the
-- pipeline. Similar in concept to the Unix command @tee@.
--
-- Since 0.3.0
conduitFile :: MonadResource m
=> FilePath
-> Conduit S.ByteString m S.ByteString
conduitFile fp = bracketP
(IO.openBinaryFile fp IO.WriteMode)
IO.hClose
go
where
go h = awaitForever $ \bs -> liftIO (S.hPut h bs) >> yield bs
-- | Ensure that only up to the given number of bytes are consume by the inner
-- sink. Note that this does /not/ ensure that all of those bytes are in fact
-- consumed.
--
-- Since 0.3.0
isolate :: Monad m
=> Int
-> Conduit S.ByteString m S.ByteString
isolate =
loop
where
loop 0 = return ()
loop count = do
mbs <- await
case mbs of
Nothing -> return ()
Just bs -> do
let (a, b) = S.splitAt count bs
case count - S.length a of
0 -> do
unless (S.null b) $ leftover b
yield a
count' -> assert (S.null b) $ yield a >> loop count'
-- | Return the next byte from the stream, if available.
--
-- Since 0.3.0
head :: Monad m => Consumer S.ByteString m (Maybe Word8)
head = do
mbs <- await
case mbs of
Nothing -> return Nothing
Just bs ->
case S.uncons bs of
Nothing -> head
Just (w, bs') -> leftover bs' >> return (Just w)
-- | Return all bytes while the predicate returns @True@.
--
-- Since 0.3.0
takeWhile :: Monad m => (Word8 -> Bool) -> Conduit S.ByteString m S.ByteString
takeWhile p =
loop
where
loop = await >>= maybe (return ()) go
go bs
| S.null x = next
| otherwise = yield x >> next
where
next = if S.null y then loop else leftover y
(x, y) = S.span p bs
-- | Ignore all bytes while the predicate returns @True@.
--
-- Since 0.3.0
dropWhile :: Monad m => (Word8 -> Bool) -> Consumer S.ByteString m ()
dropWhile p =
loop
where
loop = do
mbs <- await
case S.dropWhile p <$> mbs of
Nothing -> return ()
Just bs
| S.null bs -> loop
| otherwise -> leftover bs
-- | Take the given number of bytes, if available.
--
-- Since 0.3.0
take :: Monad m => Int -> Consumer S.ByteString m L.ByteString
take 0 = return L.empty
take n0 = go n0 id
where
go n front =
await >>= maybe (return $ L.fromChunks $ front []) go'
where
go' bs =
case S.length bs `compare` n of
LT -> go (n - S.length bs) (front . (bs:))
EQ -> return $ L.fromChunks $ front [bs]
GT ->
let (x, y) = S.splitAt n bs
in assert (not $ S.null y) $ leftover y >> return (L.fromChunks $ front [x])
-- | Drop up to the given number of bytes.
--
-- Since 0.5.0
drop :: Monad m => Int -> Consumer S.ByteString m ()
drop 0 = return ()
drop n0 = go n0
where
go n =
await >>= maybe (return ()) go'
where
go' bs =
case S.length bs `compare` n of
LT -> go (n - S.length bs)
EQ -> return ()
GT ->
let y = S.drop n bs
in assert (not $ S.null y) $ leftover y >> return ()
-- | Split the input bytes into lines. In other words, split on the LF byte
-- (10), and strip it from the output.
--
-- Since 0.3.0
lines :: Monad m => Conduit S.ByteString m S.ByteString
lines =
loop id
where
loop front = await >>= maybe (finish front) (go front)
finish front =
let final = front S.empty
in unless (S.null final) (yield final)
go sofar more =
case S.uncons second of
Just (_, second') -> yield (sofar first) >> go id second'
Nothing ->
let rest = sofar more
in loop $ S.append rest
where
(first, second) = S.breakByte 10 more
-- | Stream the chunks from a lazy bytestring.
--
-- Since 0.5.0
sourceLbs :: Monad m => L.ByteString -> Producer m S.ByteString
sourceLbs = sourceList . L.toChunks
-- | Stream the input data into a temp file and count the number of bytes
-- present. When complete, return a new @Source@ reading from the temp file
-- together with the length of the input in bytes.
--
-- All resources will be cleaned up automatically.
--
-- Since 1.0.5
sinkCacheLength :: (MonadResource m1, MonadResource m2)
=> Sink S.ByteString m1 (Word64, Source m2 S.ByteString)
sinkCacheLength = do
tmpdir <- liftIO getTemporaryDirectory
(releaseKey, (fp, h)) <- allocate
(IO.openBinaryTempFile tmpdir "conduit.cache")
(\(fp, h) -> IO.hClose h `finally` removeFile fp)
len <- sinkHandleLen h
liftIO $ IO.hClose h
return (len, sourceFile fp >> release releaseKey)
where
sinkHandleLen :: MonadResource m => IO.Handle -> Sink S.ByteString m Word64
sinkHandleLen h =
loop 0
where
loop x =
await >>= maybe (return x) go
where
go bs = do
liftIO $ S.hPut h bs
loop $ x + fromIntegral (S.length bs)
-- | Consume a stream of input into a lazy bytestring. Note that no lazy I\/O
-- is performed, but rather all content is read into memory strictly.
--
-- Since 1.0.5
sinkLbs :: Monad m => Sink S.ByteString m L.ByteString
sinkLbs = fmap L.fromChunks consume
| moonKimura/conduit-1.0.8 | Data/Conduit/Binary.hs | mit | 11,171 | 0 | 23 | 3,433 | 2,768 | 1,420 | 1,348 | 217 | 5 |
{-
Fibonacci sequence
-}
module Fib where
import Data.List
import Data.Bits
fibs :: [Integer]
fibs = 0 : 1 : zipWith (+) fibs (tail fibs)
zipWith' f (a:as) (b:bs) = x `seq` x : zipWith' f as bs
where
x = f a b
zipWith' _ _ _ = []
fibs' :: [Integer]
fibs' = 0 : 1 : zipWith' (+) fibs (tail fibs)
fib :: Int -> Integer
fib n = snd . foldl' fib' (1, 0) . dropWhile not $
[testBit n k | k <- let s = finiteBitSize n in [s-1,s-2..0]]
where
fib' (f, g) p
| p = (f*(f+2*g), ss)
| otherwise = (ss, g*(2*f-g))
where ss = f*f+g*g
fib1 :: Int -> Integer
fib1 n = snd . foldl fib' (1, 0) . map (toEnum . fromIntegral) $ unfoldl divs n
where
unfoldl f x = case f x of
Nothing -> []
Just (u, v) -> unfoldl f v ++ [u]
divs 0 = Nothing
divs k = Just (uncurry (flip (,)) (k `divMod` 2))
fib' (f, g) p
| p = (f*(f+2*g), f^2 + g^2)
| otherwise = (f^2+g^2, g*(2*f-g))
| kittttttan/pe | haskell/Fib.hs | mit | 1,164 | 0 | 13 | 504 | 590 | 311 | 279 | 27 | 3 |
{--
Problem 40
An irrational decimal fraction is created by concatenating the positive integers:
0.123456789101112131415161718192021...
It can be seen that the 12th digit of the fractional part is 1.
If dn represents the nth digit of the fractional part, find the value of the following expression.
d1 × d10 × d100 × d1000 × d10000 × d100000 × d1000000
-}
import Data.Char
euler40 = foldl (\a b -> a * getNth (10^b)) 1 [0..6]
where getNth n = digitToInt $ digits !! (n - 1)
digits = concatMap show [1..]
| RossMeikleham/Project-Euler-Haskell | 40.hs | mit | 530 | 1 | 11 | 110 | 87 | 46 | 41 | 4 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Cipher where
import Control.Monad (return)
import Data.Bool (otherwise)
import Data.Char (Char)
import Data.Int (Int)
import Data.List (map)
import Data.Ord ((>), (<))
import Data.String (String)
import Prelude (($), (-), pred, succ)
import System.IO (IO, getLine, putStrLn)
import Text.Read (read)
shift :: (Char -> Char) -> Int -> Char -> Char
shift _ 0 c | c > 'z' = 'a'
| c < 'a' = 'z'
| otherwise = c
shift f n c | c > 'z' = shift f n 'a'
| c < 'a' = shift f n 'z'
| otherwise = shift f (n - 1) (f c)
shiftRight :: Int -> Char -> Char
shiftRight = shift succ
shiftLeft :: Int -> Char -> Char
shiftLeft = shift pred
caesar :: Int -> String -> String
caesar n = map (shiftRight n)
unCaesar :: Int -> String -> String
unCaesar n = map (shiftLeft n)
main :: IO ()
main = do word <- getLine
n <- getLine
number <- return $ read n
putStrLn $ caesar number word
| Numberartificial/workflow | haskell-first-principles/haskell-programming-from-first-principles-master/src/Cipher.hs | mit | 1,003 | 0 | 9 | 279 | 432 | 229 | 203 | 32 | 1 |
import XMonad
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Util.Run(spawnPipe)
import XMonad.Util.EZConfig(additionalKeys)
import System.IO
myManageHook = composeAll
[ title =? "File Operation Progress" --> doFloat
]
main = do
xmproc <- spawnPipe "/usr/bin/xmobar"
xmonad $ defaultConfig
{ manageHook = manageDocks <+> myManageHook
<+> manageHook defaultConfig
, layoutHook = avoidStruts $ layoutHook defaultConfig
, handleEventHook = mconcat
[ docksEventHook
, handleEventHook defaultConfig ]
, normalBorderColor = myNormalBorderColor
, focusedBorderColor = myFocusedBorderColor
, terminal = myTerminal
, workspaces = myWorkspaces
, logHook = dynamicLogWithPP xmobarPP
{ ppOutput = hPutStrLn xmproc
, ppTitle = xmobarColor "#e8d174" "". shorten 100
, ppCurrent = xmobarColor "#9ed670" "#232323"
, ppSep = xmobarColor "#ff9900" "" " ="
, ppHiddenNoWindows = \w -> xmobarColor "#4d7358" "" ("" ++ w ++ "")
, ppHidden = \w -> xmobarColor "#d64d4d" "" ("" ++ w ++ "")
, ppLayout = \x -> case x of
"Tall" -> "t"
"Mirror Tall" -> "m"
"Full" -> "f"
}
, modMask = mod4Mask
} `additionalKeys`
[ ((mod4Mask, xK_f), spawn "firefox")
, ((mod4Mask, xK_p), spawn "dmenu_run -fn terminus-9:normal*")
, ((mod4Mask, xK_d), spawn "thunar")
, ((mod4Mask, xK_g), spawn "gimp")
, ((mod4Mask, xK_b), spawn "mocp -P")
, ((mod4Mask .|. shiftMask, xK_b), spawn "mocp -U")
, ((mod4Mask, xK_v), spawn "mocp -v +25")
, ((mod4Mask .|. shiftMask, xK_v), spawn "mocp -v -25")
, ((mod4Mask, xK_x), spawn "mocp -S; mocp -p")
, ((mod4Mask .|. shiftMask, xK_x), spawn "mocp -x")
, ((shiftMask, xK_Print), spawn "sleep 0.2; scrot -u poop.png")
, ((0, xK_Print), spawn "scrot poop.png")
]
myNormalBorderColor = "#232323"
myFocusedBorderColor = "#FFFFFF"
myTerminal = "xfce4-terminal"
myWorkspaces = ["1", "2", "3", "4", "5", "6", "7", "8", "min"] | kittehoverlord/dotfiles | .xmonad/xmonad.hs | gpl-2.0 | 2,443 | 0 | 18 | 863 | 598 | 349 | 249 | 50 | 3 |
module Main where
import Data.Ix
import Data.SuffixArray
import Data.CountingSort
import qualified Data.Vector as V
main = interact doSuffixArr
--dumb s = s
doSuffixArr s = let SuffixArray a _ = suffixArray $ V.fromList s
in (show (V.head a)) ++ "\n"
| VictorDenisov/suffixarray | Tests/Benchmark.hs | gpl-2.0 | 269 | 0 | 12 | 58 | 88 | 47 | 41 | 8 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Buildsome.ClangCommands
( make
) where
import Prelude.Compat hiding (FilePath)
import Buildsome.BuildMaps (TargetRep)
import Buildsome.Stats (Stats)
import Data.Aeson ((.=))
import Data.Aeson.Encode.Pretty (encodePretty)
import Data.Functor.Identity (Identity(..))
import Data.Maybe (fromMaybe)
import Lib.FilePath (FilePath, (</>))
import Lib.Makefile (TargetType(..), Target, targetInterpolatedCmds)
import qualified Buildsome.BuildMaps as BuildMaps
import qualified Buildsome.Stats as Stats
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Char8 as BS8
import qualified Data.ByteString.Lazy.Char8 as BS8L
import qualified Data.Map as Map
import qualified Lib.Revisit as Revisit
type M = Revisit.M TargetRep Identity
buildCommands :: FilePath -> Stats -> Target -> M [Aeson.Value]
buildCommands cwd stats target =
fmap (fromMaybe []) $
Revisit.avoid (BuildMaps.computeTargetRep target) $ do
deps <- depBuildCommands
return $ myBuildCommands ++ deps
where
myBuildCommands =
case targetInputs target of
[file]
| not (BS8.null (targetInterpolatedCmds target)) ->
[ Aeson.object
[ "directory" .= BS8.unpack cwd
, "command" .= BS8.unpack (targetInterpolatedCmds target)
, "file" .= BS8.unpack file
]
]
_ -> []
depBuildCommands =
case Map.lookup (BuildMaps.computeTargetRep target) (Stats.ofTarget stats) of
Nothing ->
error "BUG: Stats does not contain targets that appear as root/dependencies"
Just targetStats -> buildCommandsTargets cwd stats $ Stats.tsDirectDeps targetStats
buildCommandsTargets :: FilePath -> Stats -> [Target] -> M [Aeson.Value]
buildCommandsTargets cwd stats = fmap concat . mapM (buildCommands cwd stats)
make :: FilePath -> Stats -> [Target] -> FilePath -> IO ()
make cwd stats rootTargets filePath = do
putStrLn $ "Writing clang commands to: " ++ show (cwd </> filePath)
BS8L.writeFile (BS8.unpack filePath) $
encodePretty $ reverse $
runIdentity $ Revisit.run (buildCommandsTargets cwd stats rootTargets)
| sinelaw/buildsome | src/Buildsome/ClangCommands.hs | gpl-2.0 | 2,210 | 0 | 17 | 435 | 622 | 340 | 282 | 49 | 3 |
module Tests.ADTUntypedDebruijn where
import QHaskell.MyPrelude
import QHaskell.Expression.ADTUntypedDebruijn
import QHaskell.Variable.Plain
import qualified QHaskell.Expression.ADTValue as V
import QHaskell.Conversion
import QHaskell.Expression.Conversions.Evaluation.ADTUntypedDebruijn ()
dbl :: Exp
dbl = Abs (Fun (Prm Zro [Var Zro,Var Zro]))
compose :: Exp
compose = Abs (Fun
(Abs (Fun
(Abs (Fun (App (Var (Suc (Suc Zro)))
(App (Var (Suc Zro))
(Var Zro))))))))
four :: Exp
four = App (App (App compose dbl) dbl) (ConI 1)
test :: Bool
test = (case runNamM (cnv (four , ([V.lft ((+) :: Word32 -> Word32 -> Word32)],[] :: [V.Exp]))) of
Rgt (V.colft -> Rgt (4 :: Word32)) -> True
_ -> False)
| shayan-najd/QHaskell | Tests/ADTUntypedDebruijn.hs | gpl-3.0 | 852 | 0 | 23 | 259 | 325 | 179 | 146 | -1 | -1 |
module Main where
import Assembler.Assembler
import Assembler.Instruction
import Assembler.Parser
import Assembler.SymbolTable
import System.Environment
import System.IO
main = do
[inPath, outPath] <- getArgs
contents <- readFile inPath
{-
let Right voss = parseLines $ lines contents
Right table = generateSymbolTable voss
Right ss = replaceSymbols table voss
putStrLn $ contents
putStrLn $ unlines $ map show ss
-}
either (hPutStr stderr) (writeFile outPath) $ assemble contents
| rifung/nand2tetris | projects/compiler/app/Assembler.hs | gpl-3.0 | 531 | 0 | 10 | 115 | 90 | 48 | 42 | 11 | 1 |
module View.Panel
( drawPanel
) where
import Control.Lens hiding (index)
import View.ViewState
import View.Convert
import GameLogic
import Middleware.FreeGame.Facade
--TODO: Pause checkbox
--TODO: Collapsible panel
drawPanel :: ViewState ()
drawPanel = do
state <- get
let (_width, height) = state ^. windowSize
fnt = state ^. font
g = state ^. game
activePlayerPosition = g ^. playerOfGame activePlayerIndex . selectedPos
ownerPlayerInd = g ^. cellOfGame activePlayerPosition . playerIndex
lift $ do
color panelBkColor $ rectangleSolid panelWidth height
translate (V2 10 20) $ drawPosition state
fps <- getFPS
let fpsText = "FPS:" <> show fps
translate (V2 (panelWidth - 60) 20) . color black $ text fnt 15 fpsText
translate (V2 0 30) . drawPlayers fnt ownerPlayerInd $ g ^. players
translate (shiftMiniMap height) $ drawMiniMap g
translate (V2 110 (height - 10)) $ drawPaused state
translate (V2 95 (height - 70)) $ drawGameSpeed state
drawPosition :: ViewData -> Frame ()
drawPosition state
= color black $ text (state ^. font) panelFontSize str
where str = "Position: " <> show x <> "x" <> show y
(x,y) = state ^. game . playerOfGame activePlayerIndex . selectedPos
drawPlayers :: Font -> Int -> Players -> Frame()
drawPlayers fnt ownerPlayerInd
= (`evalStateT` 1) . sequence_ . mapP (drawPlayer fnt ownerPlayerInd)
drawPlayer :: Font -> Int -> (Int, Player) -> StateT Int Frame ()
drawPlayer fnt ownerPlayerInd (index, player)
| not ( isAlive player)
= return ()
| otherwise
= do
ind <- get
let shiftY = fromIntegral ind * playerInfoHeight
clr = playerColor index
translate (V2 (panelWidth/2) shiftY) . color clr
. lift . drawPlayerInfoParts fnt $ player
when (ownerPlayerInd == index)
. translate (V2 0 shiftY) . color clr $ lift drawOwnerFlag
put (ind+1)
drawOwnerFlag :: Frame ()
drawOwnerFlag = line [V2 0 (-5), V2 10 (-5)] >> line [V2 5 0, V2 5 (-10)]
drawPlayerInfoParts :: Font -> Player -> Frame ()
drawPlayerInfoParts fnt player = mapM_ p playerInfoParts
where p (shift, f) = translate (V2 (playerInfoWidth*shift) 0)
. text fnt panelFontSize $ f player
playerInfoParts :: [(Double, Player -> String)]
playerInfoParts = [(-0.50, show.view num)
,(-0.22, show.view free)
--,(-0.01, remainText)
,( 0.22, shieldText)
,( 0.40, aggrText)
]
--remainText :: Player -> String
--remainText player = show $ view remain player `div` remainDivMult
shieldText :: Player -> String
shieldText player
| strength < 128
= show strength
| active
= "+1"
| otherwise
= "+0"
where strength = player ^. shieldStrength
active = player ^. shieldActive
aggrText :: Player -> String
aggrText player
| isAI player
= show $ player ^. aggr
| otherwise
= ""
drawPaused :: ViewData -> Frame ()
drawPaused state
= when (state ^. game . paused)
. color black $ text (state ^. font) panelFontSize "PAUSED"
drawMiniMap :: GameData -> Frame ()
drawMiniMap game' = draw $ sequence_ cells
where cells :: (Applicative f, Monad f, Picture2D f, Local f) => [f ()]
cells = mapW (drawMiniMapCell mapCellScale) w
--swap for testing drawing speed degradation
--cells = fmap (drawMiniMapCell mapCellScale) [((x,y), mkCell 1 1) | x<-[1..wSize], y<-[1..wSize]]
w = game' ^. world
wSize = getWorldSize w
mapCellScale = mapSize / fromIntegral wSize
drawMiniMapCell :: (Applicative f, Monad f, Picture2D f, Local f)
=> Double -> (WorldPos, Cell) -> f ()
drawMiniMapCell mapCellScale (pos, cell)
| isFree cell
= translateCell pos $ color emptyCellColor rect
| otherwise
= translateCell pos $ color clr rect
where translateCell (x,y) pict =
let xx = (fromIntegral x - 1) * mapCellScale
yy = (fromIntegral y - 1) * mapCellScale
in translate (V2 xx yy) pict
rect = rectangleSolid mapCellScale mapCellScale
clr = playerColor $ cell ^. playerIndex
drawGameSpeed :: ViewData -> Frame ()
drawGameSpeed state = do
let gs = state ^. game . gameSpeed
gaudgeLeft = panelWidth * 0.065
gaudgeWidth = panelWidth * 0.3
gaudgeStep = gaudgeWidth / fromIntegral (fromEnum (maxBound :: GameSpeed))
gaudgeTop = panelFontSize * 1.5
gaudgeHeight = panelFontSize
gaudgePos sp = gaudgeLeft + gaudgeStep * fromIntegral (fromEnum sp)
translate (V2 0 panelFontSize) . color black
$ text (state ^. font) panelFontSize "Game speed"
translate (V2 0 (panelFontSize * 2)) . color black
$ line [V2 gaudgeLeft 0, V2 (gaudgeLeft + gaudgeWidth) 0]
translate (V2 0 gaudgeTop) . color black $ sequence_
[line [V2 x 0, V2 x gaudgeHeight]
| sp <- enumFrom (minBound :: GameSpeed)
, let x = gaudgePos sp]
translate (V2 (gaudgePos gs) gaudgeTop) $ drawGameSpeedMarker gaudgeHeight
drawGameSpeedMarker :: Coord -> Frame ()
drawGameSpeedMarker gaudgeHeight
= color gray $ polygon [ V2 0 0
, V2 hw hw
, V2 hw gaudgeHeight
, V2 (-hw) gaudgeHeight
, V2 (-hw) hw
]
where hw = 5
| EPashkin/gamenumber-freegame | src/View/Panel.hs | gpl-3.0 | 5,492 | 0 | 16 | 1,563 | 1,847 | 919 | 928 | -1 | -1 |
import Test.HUnit
import qualified Test.RoutingTest as Routing
import qualified Test.HelperTest as Helper
allTests = TestList [
Routing.tests
,Helper.tests
]
main :: IO ()
main = runTestTT allTests >>= print
| shinjiro-itagaki/shinjirecs | shinjirecs-api/test/Test.hs | gpl-3.0 | 216 | 0 | 7 | 37 | 62 | 36 | 26 | 8 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.DeleteSpotDatafeedSubscription
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the data feed for Spot instances.
--
-- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DeleteSpotDatafeedSubscription.html AWS API Reference> for DeleteSpotDatafeedSubscription.
module Network.AWS.EC2.DeleteSpotDatafeedSubscription
(
-- * Creating a Request
deleteSpotDatafeedSubscription
, DeleteSpotDatafeedSubscription
-- * Request Lenses
, dsdssDryRun
-- * Destructuring the Response
, deleteSpotDatafeedSubscriptionResponse
, DeleteSpotDatafeedSubscriptionResponse
) where
import Network.AWS.EC2.Types
import Network.AWS.EC2.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Contains the parameters for DeleteSpotDatafeedSubscription.
--
-- /See:/ 'deleteSpotDatafeedSubscription' smart constructor.
newtype DeleteSpotDatafeedSubscription = DeleteSpotDatafeedSubscription'
{ _dsdssDryRun :: Maybe Bool
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteSpotDatafeedSubscription' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dsdssDryRun'
deleteSpotDatafeedSubscription
:: DeleteSpotDatafeedSubscription
deleteSpotDatafeedSubscription =
DeleteSpotDatafeedSubscription'
{ _dsdssDryRun = Nothing
}
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have
-- the required permissions, the error response is 'DryRunOperation'.
-- Otherwise, it is 'UnauthorizedOperation'.
dsdssDryRun :: Lens' DeleteSpotDatafeedSubscription (Maybe Bool)
dsdssDryRun = lens _dsdssDryRun (\ s a -> s{_dsdssDryRun = a});
instance AWSRequest DeleteSpotDatafeedSubscription
where
type Rs DeleteSpotDatafeedSubscription =
DeleteSpotDatafeedSubscriptionResponse
request = postQuery eC2
response
= receiveNull DeleteSpotDatafeedSubscriptionResponse'
instance ToHeaders DeleteSpotDatafeedSubscription
where
toHeaders = const mempty
instance ToPath DeleteSpotDatafeedSubscription where
toPath = const "/"
instance ToQuery DeleteSpotDatafeedSubscription where
toQuery DeleteSpotDatafeedSubscription'{..}
= mconcat
["Action" =:
("DeleteSpotDatafeedSubscription" :: ByteString),
"Version" =: ("2015-04-15" :: ByteString),
"DryRun" =: _dsdssDryRun]
-- | /See:/ 'deleteSpotDatafeedSubscriptionResponse' smart constructor.
data DeleteSpotDatafeedSubscriptionResponse =
DeleteSpotDatafeedSubscriptionResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteSpotDatafeedSubscriptionResponse' with the minimum fields required to make a request.
--
deleteSpotDatafeedSubscriptionResponse
:: DeleteSpotDatafeedSubscriptionResponse
deleteSpotDatafeedSubscriptionResponse =
DeleteSpotDatafeedSubscriptionResponse'
| olorin/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DeleteSpotDatafeedSubscription.hs | mpl-2.0 | 3,801 | 0 | 9 | 688 | 369 | 227 | 142 | 54 | 1 |
module Matrix
( Matrix
, cols
, column
, flatten
, fromList
, fromString
, reshape
, row
, rows
, shape
, transpose
) where
import qualified Data.Vector as V
data Matrix a = Matrix (V.Vector a) Int Int deriving (Eq, Show)
cols :: Matrix a -> Int
cols (Matrix _ _ c) = c
cells :: Matrix a -> V.Vector a
cells (Matrix v _ _)= v
rows :: Matrix a -> Int
rows (Matrix _ r _) = r
column :: Int -> Matrix a -> V.Vector a
column n m = V.ifilter inColumn $ cells m
where
inColumn i _ = i `mod` c == n
c = cols m
flatten :: Matrix a -> V.Vector a
flatten = cells
fromList :: [[a]] -> Matrix a
fromList xs = Matrix cells rows cols
where
cells = V.fromList (concat xs)
rows = length xs
cols = if null xs then 0 else length $ head xs
fromString :: (Read a) => String -> Matrix a
fromString = fromList . map convert . lines
where
convert line = if null r then [] else h : convert t
where
r = reads line
(h, t) = head r
reshape :: (Int, Int) -> Matrix a -> Matrix a
reshape (r, c) m = Matrix cells r c
where
cells = flatten m
row :: Int -> Matrix a -> V.Vector a
row n m = V.take c $ V.drop (n * c) (flatten m)
where
c = cols m
shape :: Matrix a -> (Int, Int)
shape m = (rows m, cols m)
transpose :: Matrix a -> Matrix a
transpose m = Matrix cells rows' cols'
where
cells = V.concat $ map (`column` m) [0 .. cols m - 1]
rows' = cols m
cols' = rows m
| daewon/til | exercism/haskell/matrix/src/Matrix.hs | mpl-2.0 | 1,426 | 0 | 11 | 392 | 693 | 360 | 333 | 49 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.FirebaseDynamicLinks.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.FirebaseDynamicLinks.Types
(
-- * Service Configuration
firebaseDynamicLinksService
-- * OAuth Scopes
, firebaseScope
-- * NavigationInfo
, NavigationInfo
, navigationInfo
, niEnableForcedRedirect
-- * DesktopInfo
, DesktopInfo
, desktopInfo
, diDesktopFallbackLink
-- * DynamicLinkEventStatPlatform
, DynamicLinkEventStatPlatform (..)
-- * Suffix
, Suffix
, suffix
, sCustomSuffix
, sOption
-- * DynamicLinkWarning
, DynamicLinkWarning
, dynamicLinkWarning
, dlwWarningCode
, dlwWarningDocumentLink
, dlwWarningMessage
-- * ManagedShortLink
, ManagedShortLink
, managedShortLink
, mslCreationTime
, mslLink
, mslVisibility
, mslLinkName
, mslFlaggedAttribute
, mslInfo
-- * CreateShortDynamicLinkRequest
, CreateShortDynamicLinkRequest
, createShortDynamicLinkRequest
, csdlrLongDynamicLink
, csdlrSuffix
, csdlrDynamicLinkInfo
, csdlrSdkVersion
-- * SocialMetaTagInfo
, SocialMetaTagInfo
, socialMetaTagInfo
, smtiSocialImageLink
, smtiSocialDescription
, smtiSocialTitle
-- * CreateShortDynamicLinkResponse
, CreateShortDynamicLinkResponse
, createShortDynamicLinkResponse
, csdlrPreviewLink
, csdlrWarning
, csdlrShortLink
-- * DynamicLinkEventStat
, DynamicLinkEventStat
, dynamicLinkEventStat
, dlesEvent
, dlesPlatform
, dlesCount
-- * IosInfo
, IosInfo
, iosInfo
, iiIosBundleId
, iiIosIPadBundleId
, iiIosAppStoreId
, iiIosMinimumVersion
, iiIosIPadFallbackLink
, iiIosCustomScheme
, iiIosFallbackLink
-- * DynamicLinkInfo
, DynamicLinkInfo
, dynamicLinkInfo
, dliNavigationInfo
, dliDesktopInfo
, dliSocialMetaTagInfo
, dliDynamicLinkDomain
, dliLink
, dliIosInfo
, dliDomainURIPrefix
, dliAndroidInfo
, dliAnalyticsInfo
-- * GetIosPostInstallAttributionRequestVisualStyle
, GetIosPostInstallAttributionRequestVisualStyle (..)
-- * DynamicLinkStats
, DynamicLinkStats
, dynamicLinkStats
, dlsLinkEventStats
-- * SuffixOption
, SuffixOption (..)
-- * ManagedShortLinkFlaggedAttributeItem
, ManagedShortLinkFlaggedAttributeItem (..)
-- * DynamicLinkEventStatEvent
, DynamicLinkEventStatEvent (..)
-- * CreateManagedShortLinkRequest
, CreateManagedShortLinkRequest
, createManagedShortLinkRequest
, cmslrLongDynamicLink
, cmslrSuffix
, cmslrDynamicLinkInfo
, cmslrSdkVersion
, cmslrName
-- * GetIosReopenAttributionResponse
, GetIosReopenAttributionResponse
, getIosReopenAttributionResponse
, girarIosMinAppVersion
, girarDeepLink
, girarUtmContent
, girarResolvedLink
, girarUtmMedium
, girarInvitationId
, girarUtmTerm
, girarUtmCampaign
, girarUtmSource
-- * GetIosPostInstallAttributionResponseRequestIPVersion
, GetIosPostInstallAttributionResponseRequestIPVersion (..)
-- * GetIosPostInstallAttributionRequest
, GetIosPostInstallAttributionRequest
, getIosPostInstallAttributionRequest
, gipiarIosVersion
, gipiarUniqueMatchLinkToCheck
, gipiarAppInstallationTime
, gipiarDevice
, gipiarSdkVersion
, gipiarBundleId
, gipiarRetrievalMethod
, gipiarVisualStyle
-- * Xgafv
, Xgafv (..)
-- * GetIosPostInstallAttributionResponseAttributionConfidence
, GetIosPostInstallAttributionResponseAttributionConfidence (..)
-- * AndroidInfo
, AndroidInfo
, androidInfo
, aiAndroidMinPackageVersionCode
, aiAndroidFallbackLink
, aiAndroidLink
, aiAndroidPackageName
-- * DynamicLinkWarningWarningCode
, DynamicLinkWarningWarningCode (..)
-- * AnalyticsInfo
, AnalyticsInfo
, analyticsInfo
, aiItunesConnectAnalytics
, aiGooglePlayAnalytics
-- * ITunesConnectAnalytics
, ITunesConnectAnalytics
, iTunesConnectAnalytics
, itcaAt
, itcaMt
, itcaPt
, itcaCt
-- * GetIosPostInstallAttributionResponse
, GetIosPostInstallAttributionResponse
, getIosPostInstallAttributionResponse
, gipiarDeepLink
, gipiarRequestIPVersion
, gipiarAppMinimumVersion
, gipiarAttributionConfidence
, gipiarExternalBrowserDestinationLink
, gipiarUtmContent
, gipiarResolvedLink
, gipiarRequestedLink
, gipiarUtmMedium
, gipiarFallbackLink
, gipiarInvitationId
, gipiarIsStrongMatchExecutable
, gipiarUtmTerm
, gipiarUtmCampaign
, gipiarMatchMessage
, gipiarUtmSource
-- * CreateManagedShortLinkResponse
, CreateManagedShortLinkResponse
, createManagedShortLinkResponse
, cmslrManagedShortLink
, cmslrPreviewLink
, cmslrWarning
-- * GetIosReopenAttributionRequest
, GetIosReopenAttributionRequest
, getIosReopenAttributionRequest
, girarRequestedLink
, girarSdkVersion
, girarBundleId
-- * GooglePlayAnalytics
, GooglePlayAnalytics
, googlePlayAnalytics
, gpaUtmContent
, gpaUtmMedium
, gpaUtmTerm
, gpaUtmCampaign
, gpaGclid
, gpaUtmSource
-- * GetIosPostInstallAttributionRequestRetrievalMethod
, GetIosPostInstallAttributionRequestRetrievalMethod (..)
-- * DeviceInfo
, DeviceInfo
, deviceInfo
, diLanguageCodeFromWebview
, diScreenResolutionWidth
, diLanguageCode
, diDeviceModelName
, diScreenResolutionHeight
, diLanguageCodeRaw
, diTimezone
-- * ManagedShortLinkVisibility
, ManagedShortLinkVisibility (..)
) where
import Network.Google.FirebaseDynamicLinks.Types.Product
import Network.Google.FirebaseDynamicLinks.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v1' of the Firebase Dynamic Links API. This contains the host and root path used as a starting point for constructing service requests.
firebaseDynamicLinksService :: ServiceConfig
firebaseDynamicLinksService
= defaultService
(ServiceId "firebasedynamiclinks:v1")
"firebasedynamiclinks.googleapis.com"
-- | View and administer all your Firebase data and settings
firebaseScope :: Proxy '["https://www.googleapis.com/auth/firebase"]
firebaseScope = Proxy
| brendanhay/gogol | gogol-firebase-dynamiclinks/gen/Network/Google/FirebaseDynamicLinks/Types.hs | mpl-2.0 | 6,855 | 0 | 7 | 1,502 | 674 | 467 | 207 | 187 | 1 |
{-# OPTIONS_GHC -F -pgmF ./dist/build/htfpp/htfpp #-}
module Foo.B (htf_thisModulesTests) where
import qualified Test.Framework as HTF
test_b_OK = assertEqual 1 1
| ekarayel/HTF | tests/Foo/B.hs | lgpl-2.1 | 166 | 0 | 5 | 23 | 30 | 19 | 11 | 4 | 1 |
module GCrypt.AsymmetricCrypto.IO (
initReadableString,
initWritableString,
initReadableByteString,
ACIOPtr,
ACIO(..),
mkACIO,
) where
import Foreign.Ptr
import Foreign.C.Types
import Foreign.ForeignPtr
import Data.ByteString (ByteString)
import Data.ByteString.Unsafe
import GCrypt.Base
-- | the constructor should only ever be called from mkACIO.
newtype ACIO = ACIO { unACIO :: ForeignPtr ACIO }
deriving (Show)
mkACIO :: IO ACIO
mkACIO = mallocForeignPtrBytes sizeOfACIO >>= return . ACIO
initReadableString :: ACIOPtr -> Ptr CUChar -> CULong -> IO ()
initReadableString = gcry_ac_io_init_readable_string
initReadableByteString :: ByteString -> IO ACIO
initReadableByteString bs = unsafeUseAsCStringLen bs f
where
f (s,l) = do
a <- mkACIO
withForeignPtr (unACIO a) $ \a' ->
initReadableString (ACIOPtr (castPtr a'))
(castPtr s)
(fromIntegral l)
return a
initWritableString :: ACIOPtr -> IO (ForeignPtr (Ptr CUChar),ForeignPtr CULong)
initWritableString p = do
s <- mallocForeignPtr :: IO (ForeignPtr (Ptr CUChar))
l <- mallocForeignPtr :: IO (ForeignPtr CULong)
withForeignPtr s $ \s' -> withForeignPtr l $ \l' ->
gcry_ac_io_init_writable_string p s' l'
return (s,l)
| sw17ch/hsgcrypt | src/GCrypt/AsymmetricCrypto/IO.hs | lgpl-3.0 | 1,366 | 0 | 15 | 348 | 381 | 200 | 181 | 35 | 1 |
module MaybeMonoid where
import Data.Monoid
import Test.QuickCheck
monoidAssoc :: (Eq m, Monoid m) => m -> m -> m -> Bool
monoidAssoc a b c = (a <> (b <> c)) == ((a <> b) <> c)
monoidLeftIdentity :: (Eq m, Monoid m) => m -> Bool
monoidLeftIdentity m = (mempty <> m) == m
monoidRightIdentity :: (Eq m, Monoid m) => m -> Bool
monoidRightIdentity m = (m <> mempty) == m
data Optional a =
Nada
| Only a
deriving (Eq, Show)
instance Monoid a => Monoid (Optional a) where
mempty = Nada
mappend Nada x = x
mappend x Nada = x
mappend (Only x) (Only y) = Only (x <> y)
newtype First' a =
First' {getFirst' :: Optional a }
deriving (Eq, Show)
instance Monoid (First' a) where
mempty = First' Nada
mappend (First' Nada) (First' (Only a)) = First' (Only a)
mappend (First' (Only a)) _ = First' (Only a)
mappend (First' Nada) (First' Nada) = First' Nada
instance Arbitrary a => Arbitrary (First' a) where
arbitrary = do
a <- arbitrary
frequency [(1, return (First' Nada)), (1, return (First' (Only a)))]
firstMappend :: First' a -> First' a -> First' a
firstMappend = mappend
type FirstMappend =
First' String
-> First' String
-> First' String
-> Bool
main :: IO ()
main = do
quickCheck (monoidAssoc :: FirstMappend)
quickCheck (monoidLeftIdentity :: First' String -> Bool)
quickCheck (monoidRightIdentity :: First' String -> Bool) | thewoolleyman/haskellbook | 15/12/maor/MaybeMonoid.hs | unlicense | 1,474 | 0 | 15 | 394 | 621 | 321 | 300 | 42 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Model.JsonTypes.Invitation
( Invitation(..)
, jsonInvitation
)
where
import Data.Aeson (ToJSON)
import Data.Text (Text)
import Database.Persist.Sql (Entity(..))
import GHC.Generics (Generic)
import Prelude hiding (id)
import qualified Model.SqlTypes as SqlT
import Query.Util (integerKey)
data Invitation =
Invitation { id :: Integer
, email :: Text
, invitationCode :: Text
} deriving (Show, Generic)
instance ToJSON Invitation
jsonInvitation :: Entity SqlT.Invitation -> Invitation
jsonInvitation (Entity invitationId invitation) =
Invitation { id = integerKey invitationId
, email = SqlT.invitationEmail invitation
, invitationCode = SqlT.invitationCode invitation
}
| flatrapp/core | app/Model/JsonTypes/Invitation.hs | apache-2.0 | 988 | 0 | 8 | 367 | 200 | 119 | 81 | 22 | 1 |
{-# LANGUAGE TupleSections #-}
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
-- TODO: Implement translation for more unsupported language parts
{-|
Provides translation from a subset of the dynamically typed Fortran syntax
("Language.Fortran.AST") to the strongly typed expression language
("Language.Fortran.Model").
-}
module Language.Fortran.Model.Translate
(
-- * Types
-- ** Fortran Expressions
FortranExpr
-- ** Existentials
, Some(..)
, SomeVar
, SomeExpr
, SomeType
-- ** Semantics
, KindSelector(..)
, FortranSemantics(..)
, defaultSemantics
-- * Translation Monad
-- ** Environment
, TranslateEnv(..)
, defaultTranslateEnv
-- ** Errors
, TranslateError(..)
-- ** Monad
, TranslateT(..)
, runTranslateT
-- * Translating Expressions
, translateExpression
, translateExpression'
, translateCoerceExpression
-- * Translating Types
-- ** 'TypeInfo'
, TypeInfo
, typeInfo
-- ** Translation
, translateTypeInfo
-- * Lenses
-- ** 'FortranSemantics'
, fsIntegerKinds
, fsRealKinds
, fsLogicalKinds
, fsCharacterKinds
, fsDoublePrecisionKinds
-- * 'TranslateEnv'
, teVarsInScope
, teImplicitVars
, teSemantics
-- ** 'TypeInfo'
, tiSrcSpan
, tiBaseType
, tiSelectorLength
, tiSelectorKind
, tiDeclaratorLength
, tiDimensionDeclarators
, tiAttributes
) where
import Prelude hiding (span)
import Control.Applicative ((<|>))
import Data.Char (toLower)
import Data.List (intersperse)
import Data.Maybe (catMaybes)
import Data.Typeable (Typeable)
import Text.Read (readMaybe)
import Control.Lens hiding (Const (..),
indices, op, rmap, (.>))
import Control.Monad.Except
import Control.Monad.Reader
import Data.Map (Map)
import Data.Singletons
import Data.Singletons.Prelude.List (Length)
import Data.Vinyl
import Data.Vinyl.Functor (Const (..))
import qualified Language.Fortran.Analysis as F
import qualified Language.Fortran.AST as F
import qualified Language.Fortran.Util.Position as F
import Language.Expression
import Language.Expression.Pretty
import Camfort.Analysis.Logger
import Camfort.Helpers.TypeLevel
import Language.Fortran.Model.Op.Core
import Language.Fortran.Model.Op.Meta
import Language.Fortran.Model.Op.Core.Match
import Language.Fortran.Model.Singletons
import Language.Fortran.Model.Types
import Language.Fortran.Model.Types.Match
import Language.Fortran.Model.Vars
--------------------------------------------------------------------------------
-- General types
--------------------------------------------------------------------------------
-- | The type of strongly-typed Fortran expressions.
type FortranExpr = HFree CoreOp FortranVar
-- | A Fortran variable with an existential type.
type SomeVar = Some FortranVar
-- | A Fortran expression with an existential type.
type SomeExpr = Some (PairOf D FortranExpr)
-- | An existential Fortran type.
type SomeType = Some D
--------------------------------------------------------------------------------
-- Semantics
--------------------------------------------------------------------------------
-- | A function mapping numeric kind annotations from Fortran programs to actual
-- precision, for a particular basic type `bt`.
newtype KindSelector = KindSelector { selectKind :: Integer -> Maybe Precision }
{-|
A (currently very incomplete) specification of the semantics of a particular
version of Fortran, needed when translating.
-}
data FortranSemantics =
FortranSemantics
{ _fsIntegerKinds :: KindSelector
, _fsRealKinds :: KindSelector
, _fsCharacterKinds :: KindSelector
, _fsLogicalKinds :: KindSelector
, _fsDoublePrecisionKinds :: Maybe KindSelector
}
makeLenses ''FortranSemantics
{-|
== /Kinds/
The default semantics has sensible defaults for kind 0 (unspecified). Otherwise,
the kind is the number of bytes used for the type's representation. Only
power-of-two values up to 8 are valid. Characters only allow single byte
precision. Reals only allow 4- or 8-byte precision.
-}
defaultSemantics :: FortranSemantics
defaultSemantics =
FortranSemantics
{ _fsIntegerKinds = KindSelector $ \case
0 -> Just P64
1 -> Just P8
2 -> Just P16
4 -> Just P32
8 -> Just P64
_ -> Nothing
, _fsRealKinds = KindSelector $ \case
0 -> Just P32
4 -> Just P32
8 -> Just P64
_ -> Nothing
, _fsCharacterKinds = KindSelector $ \case
0 -> Just P8
_ -> Nothing
, _fsLogicalKinds = KindSelector $ \case
0 -> Just P8
1 -> Just P8
2 -> Just P16
4 -> Just P32
8 -> Just P64
_ -> Nothing
, _fsDoublePrecisionKinds = Nothing
}
--------------------------------------------------------------------------------
-- Translate Monad
--------------------------------------------------------------------------------
-- | In order to translate Fortran expressions, we require some information
-- about the environment. That information is capture in this record.
data TranslateEnv =
TranslateEnv
{ _teImplicitVars :: Bool
-- ^ Are implicit variable types enabled? (TODO: this currently does
-- nothing)
, _teVarsInScope :: Map UniqueName SomeVar
-- ^ A map of the variables in scope, including their types
, _teSemantics :: FortranSemantics
-- ^ The version of Fortran's semantics to use when translating code.
}
defaultTranslateEnv :: TranslateEnv
defaultTranslateEnv =
TranslateEnv
{ _teImplicitVars = True
, _teVarsInScope = mempty
, _teSemantics = defaultSemantics
}
makeLenses ''TranslateEnv
newtype TranslateT m a =
TranslateT
{ getTranslateT
:: ReaderT TranslateEnv (ExceptT TranslateError m) a
}
deriving ( Functor, Applicative, Monad
, MonadError TranslateError
, MonadReader TranslateEnv
, MonadLogger e w
)
runTranslateT
:: (Monad m)
=> TranslateT m a
-> TranslateEnv
-> m (Either TranslateError a)
runTranslateT (TranslateT action) env = runExceptT $ runReaderT action env
--------------------------------------------------------------------------------
-- Errors
--------------------------------------------------------------------------------
data TranslateError
= ErrUnsupportedItem Text
-- ^ Tried to translate a part of the language that is not (yet) supported.
| ErrBadLiteral
-- ^ Found a literal value that we didn't know how to translate. May or may
-- not be valid Fortran.
| ErrUnexpectedType Text SomeType SomeType
-- ^ @'ErrUnexpectedType' message expected actual@: tried to translate a
-- Fortran language part into the wrong expression type, and it wasn't
-- coercible to the correct type.
| ErrInvalidOpApplication (Some (Rec D))
-- ^ Tried to apply an operator to arguments with the wrong types.
| ErrVarNotInScope F.Name
-- ^ Reference to a variable that's not currently in scope
| ErrInvalidKind Text Integer
-- ^ @'ErrInvalidKind' baseTypeName givenKind@: tried to interpret a type with
-- the given kind which is not valid under the semantics.
deriving (Typeable)
instance Describe TranslateError where
describeBuilder = \case
ErrUnsupportedItem message ->
"unsupported " <> describeBuilder message
ErrBadLiteral ->
"encountered a literal value that couldn't be translated; " <>
"it might be invalid Fortran or it might use unsupported language features"
ErrUnexpectedType message expected actual ->
"unexpected type in " <> describeBuilder message <>
"; expected type was '" <> describeBuilder (show expected) <>
"'; actual type was '" <> describeBuilder (show actual) <> "'"
ErrInvalidOpApplication (Some argTypes) ->
let descTypes
= recordToList
. rmap (Const . surround "'" . describeBuilder . pretty1)
$ argTypes
surround s x = s <> x <> s
in "tried to apply operator to arguments of the wrong type; arguments had types " <>
mconcat (intersperse ", " descTypes)
ErrVarNotInScope nm ->
"reference to variable '" <> describeBuilder nm <> "' which is not in scope"
ErrInvalidKind bt k ->
"type with base '" <> describeBuilder bt <> "' specified a kind '" <>
describeBuilder (show k) <> "' which is not valid under the current semantics"
unsupported :: (MonadError TranslateError m) => Text -> m a
unsupported = throwError . ErrUnsupportedItem
--------------------------------------------------------------------------------
-- Translating Types
--------------------------------------------------------------------------------
{-|
The different ways of specifying Fortran types are complicated. This record
contains information about all the different things that might contribute to a
type.
-}
data TypeInfo ann =
TypeInfo
{ _tiSrcSpan :: F.SrcSpan
, _tiBaseType :: F.BaseType
, _tiSelectorLength :: Maybe (F.Expression ann)
-- ^ The length expression from a 'F.Selector' associated with a
-- 'F.TypeSpec'.
, _tiSelectorKind :: Maybe (F.Expression ann)
-- ^ The kind expression from a 'F.Selector' associated with a 'F.TypeSpec'.
, _tiDeclaratorLength :: Maybe (F.Expression ann)
-- ^ The length expression from a 'F.Declarator' associated with an instance
-- of 'F.StDeclaration'.
, _tiDimensionDeclarators :: Maybe (F.AList F.DimensionDeclarator ann)
-- ^ The list of dimension declarators from an instance of 'F.DeclArray'
-- associated with an instance of 'F.StDeclaration'.
, _tiAttributes :: Maybe (F.AList F.Attribute ann)
-- ^ The list of attributes from an instance of 'F.StDeclaration'.
}
deriving (Functor, Show)
makeLenses ''TypeInfo
instance F.Spanned (TypeInfo ann) where
getSpan = view tiSrcSpan
setSpan = set tiSrcSpan
-- | Create a simple 'TypeInfo' from an 'F.TypeSpec'. Many use cases will need
-- to add more information to fully specify the type.
typeInfo :: F.TypeSpec ann -> TypeInfo ann
typeInfo ts@(F.TypeSpec _ _ bt mselector) =
let selectorLength (F.Selector _ _ l _) = l
selectorKind (F.Selector _ _ _ k) = k
in TypeInfo
{ _tiSrcSpan = F.getSpan ts
, _tiBaseType = bt
, _tiSelectorLength = mselector >>= selectorLength
, _tiSelectorKind = mselector >>= selectorKind
, _tiDeclaratorLength = Nothing
, _tiDimensionDeclarators = Nothing
, _tiAttributes = Nothing
}
-- | Convert a 'TypeInfo' to its corresponding strong type.
translateTypeInfo
:: (Monad m, Show ann)
=> TypeInfo ann
-> TranslateT m SomeType
translateTypeInfo ti = do
-- TODO: Derived data types
SomePrimD basePrim <- translateBaseType (ti ^. tiBaseType) (ti ^. tiSelectorKind)
let
-- If an attribute corresponds to a dimension declaration which contains a
-- simple length dimension, get the expression out.
attrToLength (F.AttrDimension _ _ declarators) = dimensionDeclaratorsToLength declarators
attrToLength _ = Nothing
attrsToLength (F.AList _ _ attrs) =
case catMaybes (attrToLength <$> attrs) of
[e] -> Just e
_ -> Nothing
-- If a list of dimension declarators corresponds to a simple one
-- dimensional length, get the expression out. We don't handle other cases
-- yet.
dimensionDeclaratorsToLength (F.AList _ _ [F.DimensionDeclarator _ _ e1 e2]) = e1 <|> e2
dimensionDeclaratorsToLength _ = Nothing
mLengthExp =
(ti ^. tiSelectorLength) <|>
(ti ^. tiDeclaratorLength) <|>
(ti ^. tiDimensionDeclarators >>= dimensionDeclaratorsToLength) <|>
(ti ^. tiAttributes >>= attrsToLength)
case mLengthExp of
Just lengthExp -> do
-- If a length expression could be found, this variable is an array
-- TODO: If the length expression is malformed, throw an error.
-- TODO: Use information about the length.
-- maybe (unsupported "type spec") void (exprIntLit lengthExp)
case basePrim of
DPrim bp -> return (Some (DArray (Index PInt64) (ArrPrim bp)))
Nothing ->
return (Some basePrim)
data SomePrimD where
SomePrimD :: D (PrimS a) -> SomePrimD
translateBaseType
:: (Monad m)
=> F.BaseType
-> Maybe (F.Expression ann) -- ^ Kind
-> TranslateT m SomePrimD
translateBaseType bt mkind = do
kindInt <- case mkind of
Nothing -> return 0
Just (F.ExpValue _ _ (F.ValInteger s)) ->
case readLitInteger s of
Just k -> return k
Nothing -> throwError ErrBadLiteral
_ -> unsupported "kind which isn't an integer literal"
let getKindPrec btName ksl = do
mks <- preview (teSemantics . ksl)
case mks >>= (`selectKind` kindInt) of
Just p -> return p
Nothing -> throwError $ ErrInvalidKind btName kindInt
-- Get value-level representations of the type's basic type and precision
(basicType, prec) <- case bt of
F.TypeInteger -> (BTInt ,) <$> getKindPrec "integer" fsIntegerKinds
F.TypeReal -> (BTReal ,) <$> getKindPrec "real" fsRealKinds
F.TypeCharacter -> (BTChar ,) <$> getKindPrec "character" fsCharacterKinds
F.TypeLogical -> (BTLogical ,) <$> getKindPrec "logical" fsLogicalKinds
-- Double precision is special because it's not always supported as its own
-- basic type, being subsumed by the `REAL` basic type.
F.TypeDoublePrecision ->
(BTReal,) <$> getKindPrec "double precision" (fsDoublePrecisionKinds . _Just)
_ -> unsupported "type spec"
-- Lift the value-level representations to the type level and get a primitive
-- type with those properties.
case (toSing basicType, toSing prec) of
(SomeSing sbt, SomeSing sprec) -> case makePrim sprec sbt of
Just (MakePrim prim) -> return (SomePrimD (DPrim prim))
Nothing -> unsupported "type spec"
--------------------------------------------------------------------------------
-- Translating Expressions
--------------------------------------------------------------------------------
-- | Translate an expression with an unknown type. The return value
-- existentially captures the type of the result.
translateExpression :: (Monad m) => F.Expression (F.Analysis ann) -> TranslateT m SomeExpr
translateExpression = \case
e@(F.ExpValue ann span val) -> translateValue e
F.ExpBinary ann span bop e1 e2 -> translateOp2App e1 e2 bop
F.ExpUnary ann span uop operand -> translateOp1App operand uop
F.ExpSubscript ann span lhs (F.AList _ _ indices) -> translateSubscript lhs indices
F.ExpDataRef ann span e1 e2 -> unsupported "data reference"
F.ExpFunctionCall ann span fexpr args -> unsupported "function call"
F.ExpImpliedDo ann span es spec -> unsupported "implied do expression"
F.ExpInitialisation ann span es -> unsupported "intitialization expression"
F.ExpReturnSpec ann span rval -> unsupported "return spec expression"
-- | Translate an expression with a known type. Fails if the actual type does
-- not match.
translateExpression'
:: (Monad m) => D a -> F.Expression (F.Analysis ann)
-> TranslateT m (FortranExpr a)
translateExpression' targetD ast = do
SomePair sourceD expr <- translateExpression ast
case dcast sourceD targetD expr of
Just y -> return y
Nothing -> throwError $ ErrUnexpectedType "expression" (Some sourceD) (Some targetD)
-- | Translate an expression and try to coerce it to a particular type. Fails if
-- the actual type cannot be coerced to the given type.
translateCoerceExpression
:: (Monad m) => D a -> F.Expression (F.Analysis ann)
-> TranslateT m (HFree MetaOp FortranExpr a)
translateCoerceExpression targetD ast = do
SomePair sourceD expr <- translateExpression ast
-- First check if it's already the right type
case dcast sourceD targetD expr of
Just y -> return (HPure y)
Nothing -> case (matchPrimD sourceD, matchPrimD targetD) of
(Just (MatchPrimD _ sourcePrim), Just (MatchPrimD _ targetPrim)) ->
return (HWrap (MopCoercePrim targetPrim (HPure expr)))
_ -> throwError $ ErrUnexpectedType "expression" (Some sourceD) (Some targetD)
translateSubscript
:: (Monad m)
=> F.Expression (F.Analysis ann) -> [F.Index (F.Analysis ann)] -> TranslateT m SomeExpr
translateSubscript arrAst [F.IxSingle _ _ _ ixAst] = do
SomePair arrD arrExp <- translateExpression arrAst
SomePair ixD ixExp <- translateExpression ixAst
case matchOpSpec OpLookup (arrD :& ixD :& RNil) of
Just (MatchOpSpec opResult resultD) ->
return $ SomePair resultD $ HWrap $ CoreOp OpLookup opResult (arrExp :& ixExp :& RNil)
Nothing ->
case arrD of
-- If the LHS is indeed an array, the index type must not have matched
DArray (Index requiredIx) _ ->
throwError $
ErrUnexpectedType "array indexing"
(Some (DPrim requiredIx)) (Some ixD)
-- If the LHS is not an array, tell the user we expected some specific
-- array type; in reality any array type would have done.
_ -> throwError $
ErrUnexpectedType "array indexing"
(Some (DArray (Index PInt64) (ArrPrim PInt64)))
(Some arrD)
translateSubscript lhs [F.IxRange {}] =
unsupported "range indices"
translateSubscript _ _ =
unsupported "multiple indices"
-- | Translate a source 'F.Value' to a strongly-typed expression. Accepts an
-- 'F.Expression' which is expected to be an 'F.ExpValue' because it needs
-- access to annotations to get unique names, and 'F.Value' doesn't have any
-- annotations of its own.
--
-- Do not call on an expression that you don't know to be an 'F.ExpValue'!
translateValue :: (Monad m) => F.Expression (F.Analysis ann) -> TranslateT m SomeExpr
translateValue e = case e of
F.ExpValue _ _ v -> case v of
F.ValInteger s -> translateLiteral v PInt64 (fmap fromIntegral . readLitInteger) s
F.ValReal s -> translateLiteral v PFloat (fmap realToFrac . readLitReal) s
-- TODO: Auxiliary variables
F.ValVariable nm -> do
let uniq = UniqueName (F.varName e)
theVar <- view (teVarsInScope . at uniq)
case theVar of
Just (Some v'@(FortranVar d _)) -> return (SomePair d (HPure v'))
_ -> throwError $ ErrVarNotInScope nm
F.ValLogical s ->
let intoBool = fmap (\b -> if b then Bool8 1 else Bool8 0) . readLitBool
in translateLiteral v PBool8 intoBool s
F.ValComplex r c -> unsupported "complex literal"
F.ValString s -> unsupported "string literal"
F.ValHollerith s -> unsupported "hollerith literal"
F.ValIntrinsic nm -> unsupported $ "intrinsic " <> describe nm
F.ValOperator s -> unsupported "user-defined operator"
F.ValAssignment -> unsupported "interface assignment"
F.ValType s -> unsupported "type value"
F.ValStar -> unsupported "star value"
_ -> fail "impossible: translateValue called on a non-value"
translateLiteral
:: (Monad m)
=> F.Value ann
-> Prim p k a -> (s -> Maybe a) -> s
-> TranslateT m SomeExpr
translateLiteral v pa readLit
= maybe (throwError ErrBadLiteral) (return . SomePair (DPrim pa) . flit pa)
. readLit
where
flit px x = HWrap (CoreOp OpLit (OSLit px x) RNil)
translateOp1 :: F.UnaryOp -> Maybe (Some (Op 1))
translateOp1 = \case
F.Minus -> Just (Some OpNeg)
F.Plus -> Just (Some OpPos)
F.Not -> Just (Some OpNot)
_ -> Nothing
translateOp2 :: F.BinaryOp -> Maybe (Some (Op 2))
translateOp2 = \case
F.Addition -> Just (Some OpAdd)
F.Subtraction -> Just (Some OpSub)
F.Multiplication -> Just (Some OpMul)
F.Division -> Just (Some OpDiv)
F.LT -> Just (Some OpLT)
F.GT -> Just (Some OpGT)
F.LTE -> Just (Some OpLE)
F.GTE -> Just (Some OpGE)
F.EQ -> Just (Some OpEq)
F.NE -> Just (Some OpNE)
F.And -> Just (Some OpAnd)
F.Or -> Just (Some OpOr)
F.Equivalent -> Just (Some OpEquiv)
F.NotEquivalent -> Just (Some OpNotEquiv)
_ -> Nothing
data HasLength n as where
HasLength :: Length as ~ n => HasLength n as
-- | Given a record of 'Some' functorial types, return 'Some' record over the
-- list of those types.
--
-- In the return value, @'Some' ('PairOf' ('HasLength' n) ('Rec' f))@ is a record over
-- an unknown list of types, with the constraint that the unknown list has
-- length @n@.
recSequenceSome :: Rec (Const (Some f)) xs -> Some (PairOf (HasLength (Length xs)) (Rec f))
recSequenceSome RNil = SomePair HasLength RNil
recSequenceSome (x :& xs) = case (x, recSequenceSome xs) of
(Const (Some y), Some (PairOf HasLength ys)) -> SomePair HasLength (y :& ys)
-- This is way too general for its own good but it was fun to write.
translateOpApp
:: (Monad m)
=> (Length xs ~ n)
=> Op n ok
-> Rec (Const (F.Expression (F.Analysis ann))) xs -> TranslateT m SomeExpr
translateOpApp operator argAsts = do
someArgs <- recSequenceSome <$> rtraverse (fmap Const . translateExpression . getConst) argAsts
case someArgs of
Some (PairOf HasLength argsTranslated) -> do
let argsD = rmap (\(PairOf d _) -> d) argsTranslated
argsExpr = rmap (\(PairOf _ e) -> e) argsTranslated
MatchOpSpec opResult resultD <- case matchOpSpec operator argsD of
Just x -> return x
Nothing -> throwError $ ErrInvalidOpApplication (Some argsD)
return $ SomePair resultD $ HWrap $ CoreOp operator opResult argsExpr
translateOp2App
:: (Monad m)
=> F.Expression (F.Analysis ann) -> F.Expression (F.Analysis ann) -> F.BinaryOp
-> TranslateT m SomeExpr
translateOp2App e1 e2 bop = do
Some operator <- case translateOp2 bop of
Just x -> return x
Nothing -> unsupported "binary operator"
translateOpApp operator (Const e1 :& Const e2 :& RNil)
translateOp1App
:: (Monad m)
=> F.Expression (F.Analysis ann) -> F.UnaryOp
-> TranslateT m SomeExpr
translateOp1App e uop = do
Some operator <- case translateOp1 uop of
Just x -> return x
Nothing -> unsupported "unary operator"
translateOpApp operator (Const e :& RNil)
--------------------------------------------------------------------------------
-- Readers for things that are strings in the AST
--------------------------------------------------------------------------------
readLitInteger :: String -> Maybe Integer
readLitInteger = readMaybe
readLitReal :: String -> Maybe Double
readLitReal = readMaybe
readLitBool :: String -> Maybe Bool
readLitBool l = case map toLower l of
".true." -> Just True
".false." -> Just False
_ -> Nothing
| dorchard/camfort | src/Language/Fortran/Model/Translate.hs | apache-2.0 | 23,809 | 0 | 21 | 5,628 | 5,120 | 2,653 | 2,467 | 422 | 15 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QGraphicsScene_h.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:26
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QGraphicsScene_h (
QhelpEvent_h(..)
) where
import Foreign.C.Types
import Qtc.Enums.Base
import Qtc.Enums.Core.Qt
import Qtc.Classes.Base
import Qtc.Classes.Qccs_h
import Qtc.Classes.Core_h
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui_h
import Qtc.ClassTypes.Gui
import Foreign.Marshal.Array
instance QunSetUserMethod (QGraphicsScene ()) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QGraphicsScene_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
foreign import ccall "qtc_QGraphicsScene_unSetUserMethod" qtc_QGraphicsScene_unSetUserMethod :: Ptr (TQGraphicsScene a) -> CInt -> CInt -> IO (CBool)
instance QunSetUserMethod (QGraphicsSceneSc a) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QGraphicsScene_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
instance QunSetUserMethodVariant (QGraphicsScene ()) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QGraphicsScene_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariant (QGraphicsSceneSc a) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QGraphicsScene_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariantList (QGraphicsScene ()) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QGraphicsScene_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QunSetUserMethodVariantList (QGraphicsSceneSc a) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QGraphicsScene_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QsetUserMethod (QGraphicsScene ()) (QGraphicsScene x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QGraphicsScene setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QGraphicsScene_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QGraphicsScene_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QGraphicsScene_setUserMethod" qtc_QGraphicsScene_setUserMethod :: Ptr (TQGraphicsScene a) -> CInt -> Ptr (Ptr (TQGraphicsScene x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethod_QGraphicsScene :: (Ptr (TQGraphicsScene x0) -> IO ()) -> IO (FunPtr (Ptr (TQGraphicsScene x0) -> IO ()))
foreign import ccall "wrapper" wrapSetUserMethod_QGraphicsScene_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QGraphicsSceneSc a) (QGraphicsScene x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QGraphicsScene setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QGraphicsScene_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QGraphicsScene_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetUserMethod (QGraphicsScene ()) (QGraphicsScene x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QGraphicsScene setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QGraphicsScene_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QGraphicsScene_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QGraphicsScene_setUserMethodVariant" qtc_QGraphicsScene_setUserMethodVariant :: Ptr (TQGraphicsScene a) -> CInt -> Ptr (Ptr (TQGraphicsScene x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethodVariant_QGraphicsScene :: (Ptr (TQGraphicsScene x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> IO (FunPtr (Ptr (TQGraphicsScene x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))))
foreign import ccall "wrapper" wrapSetUserMethodVariant_QGraphicsScene_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QGraphicsSceneSc a) (QGraphicsScene x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QGraphicsScene setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QGraphicsScene_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QGraphicsScene_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QunSetHandler (QGraphicsScene ()) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QGraphicsScene_unSetHandler cobj_qobj cstr_evid
foreign import ccall "qtc_QGraphicsScene_unSetHandler" qtc_QGraphicsScene_unSetHandler :: Ptr (TQGraphicsScene a) -> CWString -> IO (CBool)
instance QunSetHandler (QGraphicsSceneSc a) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QGraphicsScene_unSetHandler cobj_qobj cstr_evid
instance QsetHandler (QGraphicsScene ()) (QGraphicsScene x0 -> QEvent t1 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQEvent t1) -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- objectFromPtr_nf x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QGraphicsScene_setHandler1" qtc_QGraphicsScene_setHandler1 :: Ptr (TQGraphicsScene a) -> CWString -> Ptr (Ptr (TQGraphicsScene x0) -> Ptr (TQEvent t1) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene1 :: (Ptr (TQGraphicsScene x0) -> Ptr (TQEvent t1) -> IO ()) -> IO (FunPtr (Ptr (TQGraphicsScene x0) -> Ptr (TQEvent t1) -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene1_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QGraphicsSceneSc a) (QGraphicsScene x0 -> QEvent t1 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQEvent t1) -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- objectFromPtr_nf x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QcontextMenuEvent_h (QGraphicsScene ()) ((QGraphicsSceneContextMenuEvent t1)) where
contextMenuEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_contextMenuEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_contextMenuEvent" qtc_QGraphicsScene_contextMenuEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQGraphicsSceneContextMenuEvent t1) -> IO ()
instance QcontextMenuEvent_h (QGraphicsSceneSc a) ((QGraphicsSceneContextMenuEvent t1)) where
contextMenuEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_contextMenuEvent cobj_x0 cobj_x1
instance QdragEnterEvent_h (QGraphicsScene ()) ((QGraphicsSceneDragDropEvent t1)) where
dragEnterEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_dragEnterEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_dragEnterEvent" qtc_QGraphicsScene_dragEnterEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQGraphicsSceneDragDropEvent t1) -> IO ()
instance QdragEnterEvent_h (QGraphicsSceneSc a) ((QGraphicsSceneDragDropEvent t1)) where
dragEnterEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_dragEnterEvent cobj_x0 cobj_x1
instance QdragLeaveEvent_h (QGraphicsScene ()) ((QGraphicsSceneDragDropEvent t1)) where
dragLeaveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_dragLeaveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_dragLeaveEvent" qtc_QGraphicsScene_dragLeaveEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQGraphicsSceneDragDropEvent t1) -> IO ()
instance QdragLeaveEvent_h (QGraphicsSceneSc a) ((QGraphicsSceneDragDropEvent t1)) where
dragLeaveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_dragLeaveEvent cobj_x0 cobj_x1
instance QdragMoveEvent_h (QGraphicsScene ()) ((QGraphicsSceneDragDropEvent t1)) where
dragMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_dragMoveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_dragMoveEvent" qtc_QGraphicsScene_dragMoveEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQGraphicsSceneDragDropEvent t1) -> IO ()
instance QdragMoveEvent_h (QGraphicsSceneSc a) ((QGraphicsSceneDragDropEvent t1)) where
dragMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_dragMoveEvent cobj_x0 cobj_x1
instance QsetHandler (QGraphicsScene ()) (QGraphicsScene x0 -> QPainter t1 -> QRectF t2 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> Ptr (TQRectF t2) -> IO ()
setHandlerWrapper x0 x1 x2
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- objectFromPtr_nf x1
x2obj <- objectFromPtr_nf x2
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj x2obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QGraphicsScene_setHandler2" qtc_QGraphicsScene_setHandler2 :: Ptr (TQGraphicsScene a) -> CWString -> Ptr (Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> Ptr (TQRectF t2) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene2 :: (Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> Ptr (TQRectF t2) -> IO ()) -> IO (FunPtr (Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> Ptr (TQRectF t2) -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene2_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QGraphicsSceneSc a) (QGraphicsScene x0 -> QPainter t1 -> QRectF t2 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> Ptr (TQRectF t2) -> IO ()
setHandlerWrapper x0 x1 x2
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- objectFromPtr_nf x1
x2obj <- objectFromPtr_nf x2
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj x2obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QqdrawBackground_h (QGraphicsScene ()) ((QPainter t1, QRectF t2)) where
qdrawBackground_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QGraphicsScene_drawBackground cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QGraphicsScene_drawBackground" qtc_QGraphicsScene_drawBackground :: Ptr (TQGraphicsScene a) -> Ptr (TQPainter t1) -> Ptr (TQRectF t2) -> IO ()
instance QqdrawBackground_h (QGraphicsSceneSc a) ((QPainter t1, QRectF t2)) where
qdrawBackground_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QGraphicsScene_drawBackground cobj_x0 cobj_x1 cobj_x2
instance QdrawBackground_h (QGraphicsScene ()) ((QPainter t1, RectF)) where
drawBackground_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withCRectF x2 $ \crectf_x2_x crectf_x2_y crectf_x2_w crectf_x2_h ->
qtc_QGraphicsScene_drawBackground_qth cobj_x0 cobj_x1 crectf_x2_x crectf_x2_y crectf_x2_w crectf_x2_h
foreign import ccall "qtc_QGraphicsScene_drawBackground_qth" qtc_QGraphicsScene_drawBackground_qth :: Ptr (TQGraphicsScene a) -> Ptr (TQPainter t1) -> CDouble -> CDouble -> CDouble -> CDouble -> IO ()
instance QdrawBackground_h (QGraphicsSceneSc a) ((QPainter t1, RectF)) where
drawBackground_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withCRectF x2 $ \crectf_x2_x crectf_x2_y crectf_x2_w crectf_x2_h ->
qtc_QGraphicsScene_drawBackground_qth cobj_x0 cobj_x1 crectf_x2_x crectf_x2_y crectf_x2_w crectf_x2_h
instance QqdrawForeground_h (QGraphicsScene ()) ((QPainter t1, QRectF t2)) where
qdrawForeground_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QGraphicsScene_drawForeground cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QGraphicsScene_drawForeground" qtc_QGraphicsScene_drawForeground :: Ptr (TQGraphicsScene a) -> Ptr (TQPainter t1) -> Ptr (TQRectF t2) -> IO ()
instance QqdrawForeground_h (QGraphicsSceneSc a) ((QPainter t1, QRectF t2)) where
qdrawForeground_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QGraphicsScene_drawForeground cobj_x0 cobj_x1 cobj_x2
instance QdrawForeground_h (QGraphicsScene ()) ((QPainter t1, RectF)) where
drawForeground_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withCRectF x2 $ \crectf_x2_x crectf_x2_y crectf_x2_w crectf_x2_h ->
qtc_QGraphicsScene_drawForeground_qth cobj_x0 cobj_x1 crectf_x2_x crectf_x2_y crectf_x2_w crectf_x2_h
foreign import ccall "qtc_QGraphicsScene_drawForeground_qth" qtc_QGraphicsScene_drawForeground_qth :: Ptr (TQGraphicsScene a) -> Ptr (TQPainter t1) -> CDouble -> CDouble -> CDouble -> CDouble -> IO ()
instance QdrawForeground_h (QGraphicsSceneSc a) ((QPainter t1, RectF)) where
drawForeground_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withCRectF x2 $ \crectf_x2_x crectf_x2_y crectf_x2_w crectf_x2_h ->
qtc_QGraphicsScene_drawForeground_qth cobj_x0 cobj_x1 crectf_x2_x crectf_x2_y crectf_x2_w crectf_x2_h
instance QsetHandler (QGraphicsScene ()) (QGraphicsScene x0 -> QPainter t1 -> Int -> [QGraphicsItem t3] -> [QStyleOptionGraphicsItem t4] -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> IO ()
setHandlerWrapper x0 x1 x2 _l3 x3 _l4 x4
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- objectFromPtr_nf x1
let x2int = fromCInt x2
let lx3 = fromCInt _l3
xps3 <- peekArray lx3 x3
x3obj <- objectListFromPtrList_nf xps3
let lx4 = fromCInt _l4
xps4 <- peekArray lx4 x4
x4obj <- objectListFromPtrList_nf xps4
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj x2int x3obj x4obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QGraphicsScene_setHandler3" qtc_QGraphicsScene_setHandler3 :: Ptr (TQGraphicsScene a) -> CWString -> Ptr (Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene3 :: (Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> IO ()) -> IO (FunPtr (Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene3_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QGraphicsSceneSc a) (QGraphicsScene x0 -> QPainter t1 -> Int -> [QGraphicsItem t3] -> [QStyleOptionGraphicsItem t4] -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> IO ()
setHandlerWrapper x0 x1 x2 _l3 x3 _l4 x4
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- objectFromPtr_nf x1
let x2int = fromCInt x2
let lx3 = fromCInt _l3
xps3 <- peekArray lx3 x3
x3obj <- objectListFromPtrList_nf xps3
let lx4 = fromCInt _l4
xps4 <- peekArray lx4 x4
x4obj <- objectListFromPtrList_nf xps4
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj x2int x3obj x4obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetHandler (QGraphicsScene ()) (QGraphicsScene x0 -> QPainter t1 -> Int -> [QGraphicsItem t3] -> [QStyleOptionGraphicsItem t4] -> QObject t5 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene4 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene4_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler4 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> Ptr (TQObject t5) -> IO ()
setHandlerWrapper x0 x1 x2 _l3 x3 _l4 x4 x5
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- objectFromPtr_nf x1
let x2int = fromCInt x2
let lx3 = fromCInt _l3
xps3 <- peekArray lx3 x3
x3obj <- objectListFromPtrList_nf xps3
let lx4 = fromCInt _l4
xps4 <- peekArray lx4 x4
x4obj <- objectListFromPtrList_nf xps4
x5obj <- qObjectFromPtr x5
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj x2int x3obj x4obj x5obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QGraphicsScene_setHandler4" qtc_QGraphicsScene_setHandler4 :: Ptr (TQGraphicsScene a) -> CWString -> Ptr (Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> Ptr (TQObject t5) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene4 :: (Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> Ptr (TQObject t5) -> IO ()) -> IO (FunPtr (Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> Ptr (TQObject t5) -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene4_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QGraphicsSceneSc a) (QGraphicsScene x0 -> QPainter t1 -> Int -> [QGraphicsItem t3] -> [QStyleOptionGraphicsItem t4] -> QObject t5 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene4 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene4_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler4 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> Ptr (TQObject t5) -> IO ()
setHandlerWrapper x0 x1 x2 _l3 x3 _l4 x4 x5
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- objectFromPtr_nf x1
let x2int = fromCInt x2
let lx3 = fromCInt _l3
xps3 <- peekArray lx3 x3
x3obj <- objectListFromPtrList_nf xps3
let lx4 = fromCInt _l4
xps4 <- peekArray lx4 x4
x4obj <- objectListFromPtrList_nf xps4
x5obj <- qObjectFromPtr x5
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj x2int x3obj x4obj x5obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QdrawItems_h (QGraphicsScene ()) ((QPainter t1, Int, [QGraphicsItem t3], [QStyleOptionGraphicsItem t4])) where
drawItems_h x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withQListObject x3 $ \cqlistlen_x3 cqlistobj_x3 ->
withQListObject x4 $ \cqlistlen_x4 cqlistobj_x4 ->
qtc_QGraphicsScene_drawItems cobj_x0 cobj_x1 (toCInt x2) cqlistlen_x3 cqlistobj_x3 cqlistlen_x4 cqlistobj_x4
foreign import ccall "qtc_QGraphicsScene_drawItems" qtc_QGraphicsScene_drawItems :: Ptr (TQGraphicsScene a) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> IO ()
instance QdrawItems_h (QGraphicsSceneSc a) ((QPainter t1, Int, [QGraphicsItem t3], [QStyleOptionGraphicsItem t4])) where
drawItems_h x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withQListObject x3 $ \cqlistlen_x3 cqlistobj_x3 ->
withQListObject x4 $ \cqlistlen_x4 cqlistobj_x4 ->
qtc_QGraphicsScene_drawItems cobj_x0 cobj_x1 (toCInt x2) cqlistlen_x3 cqlistobj_x3 cqlistlen_x4 cqlistobj_x4
instance QdrawItems_h (QGraphicsScene ()) ((QPainter t1, Int, [QGraphicsItem t3], [QStyleOptionGraphicsItem t4], QWidget t5)) where
drawItems_h x0 (x1, x2, x3, x4, x5)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withQListObject x3 $ \cqlistlen_x3 cqlistobj_x3 ->
withQListObject x4 $ \cqlistlen_x4 cqlistobj_x4 ->
withObjectPtr x5 $ \cobj_x5 ->
qtc_QGraphicsScene_drawItems1 cobj_x0 cobj_x1 (toCInt x2) cqlistlen_x3 cqlistobj_x3 cqlistlen_x4 cqlistobj_x4 cobj_x5
foreign import ccall "qtc_QGraphicsScene_drawItems1" qtc_QGraphicsScene_drawItems1 :: Ptr (TQGraphicsScene a) -> Ptr (TQPainter t1) -> CInt -> CInt -> Ptr (Ptr (TQGraphicsItem t3)) -> CInt -> Ptr (Ptr (TQStyleOptionGraphicsItem t4)) -> Ptr (TQWidget t5) -> IO ()
instance QdrawItems_h (QGraphicsSceneSc a) ((QPainter t1, Int, [QGraphicsItem t3], [QStyleOptionGraphicsItem t4], QWidget t5)) where
drawItems_h x0 (x1, x2, x3, x4, x5)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withQListObject x3 $ \cqlistlen_x3 cqlistobj_x3 ->
withQListObject x4 $ \cqlistlen_x4 cqlistobj_x4 ->
withObjectPtr x5 $ \cobj_x5 ->
qtc_QGraphicsScene_drawItems1 cobj_x0 cobj_x1 (toCInt x2) cqlistlen_x3 cqlistobj_x3 cqlistlen_x4 cqlistobj_x4 cobj_x5
instance QdropEvent_h (QGraphicsScene ()) ((QGraphicsSceneDragDropEvent t1)) where
dropEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_dropEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_dropEvent" qtc_QGraphicsScene_dropEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQGraphicsSceneDragDropEvent t1) -> IO ()
instance QdropEvent_h (QGraphicsSceneSc a) ((QGraphicsSceneDragDropEvent t1)) where
dropEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_dropEvent cobj_x0 cobj_x1
instance QsetHandler (QGraphicsScene ()) (QGraphicsScene x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene5 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene5_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler5 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QGraphicsScene_setHandler5" qtc_QGraphicsScene_setHandler5 :: Ptr (TQGraphicsScene a) -> CWString -> Ptr (Ptr (TQGraphicsScene x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene5 :: (Ptr (TQGraphicsScene x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> IO (FunPtr (Ptr (TQGraphicsScene x0) -> Ptr (TQEvent t1) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene5_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QGraphicsSceneSc a) (QGraphicsScene x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene5 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene5_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler5 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance Qevent_h (QGraphicsScene ()) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_event cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_event" qtc_QGraphicsScene_event :: Ptr (TQGraphicsScene a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent_h (QGraphicsSceneSc a) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_event cobj_x0 cobj_x1
instance QfocusInEvent_h (QGraphicsScene ()) ((QFocusEvent t1)) where
focusInEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_focusInEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_focusInEvent" qtc_QGraphicsScene_focusInEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQFocusEvent t1) -> IO ()
instance QfocusInEvent_h (QGraphicsSceneSc a) ((QFocusEvent t1)) where
focusInEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_focusInEvent cobj_x0 cobj_x1
instance QfocusOutEvent_h (QGraphicsScene ()) ((QFocusEvent t1)) where
focusOutEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_focusOutEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_focusOutEvent" qtc_QGraphicsScene_focusOutEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQFocusEvent t1) -> IO ()
instance QfocusOutEvent_h (QGraphicsSceneSc a) ((QFocusEvent t1)) where
focusOutEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_focusOutEvent cobj_x0 cobj_x1
class QhelpEvent_h x0 x1 where
helpEvent_h :: x0 -> x1 -> IO ()
instance QhelpEvent_h (QGraphicsScene ()) ((QGraphicsSceneHelpEvent t1)) where
helpEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_helpEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_helpEvent" qtc_QGraphicsScene_helpEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQGraphicsSceneHelpEvent t1) -> IO ()
instance QhelpEvent_h (QGraphicsSceneSc a) ((QGraphicsSceneHelpEvent t1)) where
helpEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_helpEvent cobj_x0 cobj_x1
instance QinputMethodEvent_h (QGraphicsScene ()) ((QInputMethodEvent t1)) where
inputMethodEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_inputMethodEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_inputMethodEvent" qtc_QGraphicsScene_inputMethodEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQInputMethodEvent t1) -> IO ()
instance QinputMethodEvent_h (QGraphicsSceneSc a) ((QInputMethodEvent t1)) where
inputMethodEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_inputMethodEvent cobj_x0 cobj_x1
instance QsetHandler (QGraphicsScene ()) (QGraphicsScene x0 -> InputMethodQuery -> IO (QVariant t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene6 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene6_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler6 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> CLong -> IO (Ptr (TQVariant t0))
setHandlerWrapper x0 x1
= do x0obj <- qGraphicsSceneFromPtr x0
let x1enum = qEnum_fromInt $ fromCLong x1
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1enum
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QGraphicsScene_setHandler6" qtc_QGraphicsScene_setHandler6 :: Ptr (TQGraphicsScene a) -> CWString -> Ptr (Ptr (TQGraphicsScene x0) -> CLong -> IO (Ptr (TQVariant t0))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene6 :: (Ptr (TQGraphicsScene x0) -> CLong -> IO (Ptr (TQVariant t0))) -> IO (FunPtr (Ptr (TQGraphicsScene x0) -> CLong -> IO (Ptr (TQVariant t0))))
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene6_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QGraphicsSceneSc a) (QGraphicsScene x0 -> InputMethodQuery -> IO (QVariant t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene6 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene6_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler6 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> CLong -> IO (Ptr (TQVariant t0))
setHandlerWrapper x0 x1
= do x0obj <- qGraphicsSceneFromPtr x0
let x1enum = qEnum_fromInt $ fromCLong x1
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1enum
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QinputMethodQuery_h (QGraphicsScene ()) ((InputMethodQuery)) where
inputMethodQuery_h x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsScene_inputMethodQuery cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QGraphicsScene_inputMethodQuery" qtc_QGraphicsScene_inputMethodQuery :: Ptr (TQGraphicsScene a) -> CLong -> IO (Ptr (TQVariant ()))
instance QinputMethodQuery_h (QGraphicsSceneSc a) ((InputMethodQuery)) where
inputMethodQuery_h x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QGraphicsScene_inputMethodQuery cobj_x0 (toCLong $ qEnum_toInt x1)
instance QkeyPressEvent_h (QGraphicsScene ()) ((QKeyEvent t1)) where
keyPressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_keyPressEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_keyPressEvent" qtc_QGraphicsScene_keyPressEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQKeyEvent t1) -> IO ()
instance QkeyPressEvent_h (QGraphicsSceneSc a) ((QKeyEvent t1)) where
keyPressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_keyPressEvent cobj_x0 cobj_x1
instance QkeyReleaseEvent_h (QGraphicsScene ()) ((QKeyEvent t1)) where
keyReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_keyReleaseEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_keyReleaseEvent" qtc_QGraphicsScene_keyReleaseEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQKeyEvent t1) -> IO ()
instance QkeyReleaseEvent_h (QGraphicsSceneSc a) ((QKeyEvent t1)) where
keyReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_keyReleaseEvent cobj_x0 cobj_x1
instance QmouseDoubleClickEvent_h (QGraphicsScene ()) ((QGraphicsSceneMouseEvent t1)) where
mouseDoubleClickEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_mouseDoubleClickEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_mouseDoubleClickEvent" qtc_QGraphicsScene_mouseDoubleClickEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQGraphicsSceneMouseEvent t1) -> IO ()
instance QmouseDoubleClickEvent_h (QGraphicsSceneSc a) ((QGraphicsSceneMouseEvent t1)) where
mouseDoubleClickEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_mouseDoubleClickEvent cobj_x0 cobj_x1
instance QmouseMoveEvent_h (QGraphicsScene ()) ((QGraphicsSceneMouseEvent t1)) where
mouseMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_mouseMoveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_mouseMoveEvent" qtc_QGraphicsScene_mouseMoveEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQGraphicsSceneMouseEvent t1) -> IO ()
instance QmouseMoveEvent_h (QGraphicsSceneSc a) ((QGraphicsSceneMouseEvent t1)) where
mouseMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_mouseMoveEvent cobj_x0 cobj_x1
instance QmousePressEvent_h (QGraphicsScene ()) ((QGraphicsSceneMouseEvent t1)) where
mousePressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_mousePressEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_mousePressEvent" qtc_QGraphicsScene_mousePressEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQGraphicsSceneMouseEvent t1) -> IO ()
instance QmousePressEvent_h (QGraphicsSceneSc a) ((QGraphicsSceneMouseEvent t1)) where
mousePressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_mousePressEvent cobj_x0 cobj_x1
instance QmouseReleaseEvent_h (QGraphicsScene ()) ((QGraphicsSceneMouseEvent t1)) where
mouseReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_mouseReleaseEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_mouseReleaseEvent" qtc_QGraphicsScene_mouseReleaseEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQGraphicsSceneMouseEvent t1) -> IO ()
instance QmouseReleaseEvent_h (QGraphicsSceneSc a) ((QGraphicsSceneMouseEvent t1)) where
mouseReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_mouseReleaseEvent cobj_x0 cobj_x1
instance QwheelEvent_h (QGraphicsScene ()) ((QGraphicsSceneWheelEvent t1)) where
wheelEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_wheelEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QGraphicsScene_wheelEvent" qtc_QGraphicsScene_wheelEvent :: Ptr (TQGraphicsScene a) -> Ptr (TQGraphicsSceneWheelEvent t1) -> IO ()
instance QwheelEvent_h (QGraphicsSceneSc a) ((QGraphicsSceneWheelEvent t1)) where
wheelEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QGraphicsScene_wheelEvent cobj_x0 cobj_x1
instance QsetHandler (QGraphicsScene ()) (QGraphicsScene x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene7 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene7_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler7 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QGraphicsScene_setHandler7" qtc_QGraphicsScene_setHandler7 :: Ptr (TQGraphicsScene a) -> CWString -> Ptr (Ptr (TQGraphicsScene x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene7 :: (Ptr (TQGraphicsScene x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> IO (FunPtr (Ptr (TQGraphicsScene x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QGraphicsScene7_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QGraphicsSceneSc a) (QGraphicsScene x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QGraphicsScene7 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QGraphicsScene7_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QGraphicsScene_setHandler7 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQGraphicsScene x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qGraphicsSceneFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QeventFilter_h (QGraphicsScene ()) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QGraphicsScene_eventFilter cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QGraphicsScene_eventFilter" qtc_QGraphicsScene_eventFilter :: Ptr (TQGraphicsScene a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter_h (QGraphicsSceneSc a) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QGraphicsScene_eventFilter cobj_x0 cobj_x1 cobj_x2
| keera-studios/hsQt | Qtc/Gui/QGraphicsScene_h.hs | bsd-2-clause | 52,510 | 0 | 22 | 10,774 | 16,951 | 8,155 | 8,796 | -1 | -1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QColormap.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:25
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QColormap (
qColormap
,qColormap_nf
,qColormapCleanup
,qColormapInitialize
,QqColormapInstance(..)
,qColormap_delete
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Gui.QColormap
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
qColormap :: (QColormap t1) -> IO (QColormap ())
qColormap (x1)
= withQColormapResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColormap cobj_x1
foreign import ccall "qtc_QColormap" qtc_QColormap :: Ptr (TQColormap t1) -> IO (Ptr (TQColormap ()))
qColormap_nf :: (QColormap t1) -> IO (QColormap ())
qColormap_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColormap cobj_x1
qColormapCleanup :: (()) -> IO ()
qColormapCleanup ()
= qtc_QColormap_cleanup
foreign import ccall "qtc_QColormap_cleanup" qtc_QColormap_cleanup :: IO ()
instance Qdepth (QColormap a) (()) (IO (Int)) where
depth x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QColormap_depth cobj_x0
foreign import ccall "qtc_QColormap_depth" qtc_QColormap_depth :: Ptr (TQColormap a) -> IO CInt
qColormapInitialize :: (()) -> IO ()
qColormapInitialize ()
= qtc_QColormap_initialize
foreign import ccall "qtc_QColormap_initialize" qtc_QColormap_initialize :: IO ()
class QqColormapInstance x1 where
qColormapInstance :: x1 -> IO (QColormap ())
instance QqColormapInstance (()) where
qColormapInstance ()
= withQColormapResult $
qtc_QColormap_instance
foreign import ccall "qtc_QColormap_instance" qtc_QColormap_instance :: IO (Ptr (TQColormap ()))
instance QqColormapInstance ((Int)) where
qColormapInstance (x1)
= withQColormapResult $
qtc_QColormap_instance1 (toCInt x1)
foreign import ccall "qtc_QColormap_instance1" qtc_QColormap_instance1 :: CInt -> IO (Ptr (TQColormap ()))
instance Qmode (QColormap a) (()) (IO (QColormapMode)) where
mode x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QColormap_mode cobj_x0
foreign import ccall "qtc_QColormap_mode" qtc_QColormap_mode :: Ptr (TQColormap a) -> IO CLong
instance Qpixel (QColormap a) ((QColor t1)) where
pixel x0 (x1)
= withUnsignedIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QColormap_pixel cobj_x0 cobj_x1
foreign import ccall "qtc_QColormap_pixel" qtc_QColormap_pixel :: Ptr (TQColormap a) -> Ptr (TQColor t1) -> IO CUInt
instance Qqsize (QColormap a) (()) (IO (Int)) where
qsize x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QColormap_size cobj_x0
foreign import ccall "qtc_QColormap_size" qtc_QColormap_size :: Ptr (TQColormap a) -> IO CInt
qColormap_delete :: QColormap a -> IO ()
qColormap_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QColormap_delete cobj_x0
foreign import ccall "qtc_QColormap_delete" qtc_QColormap_delete :: Ptr (TQColormap a) -> IO ()
| keera-studios/hsQt | Qtc/Gui/QColormap.hs | bsd-2-clause | 3,432 | 0 | 12 | 556 | 959 | 503 | 456 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
module Propellor.Property.Postfix where
import Propellor.Base
import qualified Propellor.Property.Apt as Apt
import qualified Propellor.Property.File as File
import qualified Propellor.Property.Service as Service
import qualified Propellor.Property.User as User
import qualified Data.Map as M
import Data.List
import Data.Char
installed :: Property NoInfo
installed = Apt.serviceInstalledRunning "postfix"
restarted :: Property NoInfo
restarted = Service.restarted "postfix"
reloaded :: Property NoInfo
reloaded = Service.reloaded "postfix"
-- | Configures postfix as a satellite system, which
-- relays all mail through a relay host, which defaults to smtp.domain,
-- but can be changed by @mainCf "relayhost"@.
--
-- The smarthost may refuse to relay mail on to other domains, without
-- further configuration/keys. But this should be enough to get cron job
-- mail flowing to a place where it will be seen.
satellite :: Property NoInfo
satellite = check (not <$> mainCfIsSet "relayhost") setup
`requires` installed
where
setup = trivial $ property "postfix satellite system" $ do
hn <- asks hostName
let (_, domain) = separate (== '.') hn
ensureProperties
[ Apt.reConfigure "postfix"
[ ("postfix/main_mailer_type", "select", "Satellite system")
, ("postfix/root_address", "string", "root")
, ("postfix/destinations", "string", "localhost")
, ("postfix/mailname", "string", hn)
]
, mainCf ("relayhost", "smtp." ++ domain)
`onChange` reloaded
]
-- | Sets up a file by running a property (which the filename is passed
-- to). If the setup property makes a change, postmap will be run on the
-- file, and postfix will be reloaded.
mappedFile
:: Combines (Property x) (Property NoInfo)
=> FilePath
-> (FilePath -> Property x)
-> Property (CInfo x NoInfo)
mappedFile f setup = setup f
`onChange` cmdProperty "postmap" [f]
-- | Run newaliases command, which should be done after changing
-- @/etc/aliases@.
newaliases :: Property NoInfo
newaliases = trivial $ cmdProperty "newaliases" []
-- | The main config file for postfix.
mainCfFile :: FilePath
mainCfFile = "/etc/postfix/main.cf"
-- | Sets a main.cf @name=value@ pair. Does not reload postfix immediately.
mainCf :: (String, String) -> Property NoInfo
mainCf (name, value) = check notset set
`describe` ("postfix main.cf " ++ setting)
where
setting = name ++ "=" ++ value
notset = (/= Just value) <$> getMainCf name
set = cmdProperty "postconf" ["-e", setting]
-- | Gets a main.cf setting.
getMainCf :: String -> IO (Maybe String)
getMainCf name = parse . lines <$> readProcess "postconf" [name]
where
parse (l:_) = Just $
case separate (== '=') l of
(_, (' ':v)) -> v
(_, v) -> v
parse [] = Nothing
-- | Checks if a main.cf field is set. A field that is set to
-- the empty string is considered not set.
mainCfIsSet :: String -> IO Bool
mainCfIsSet name = do
v <- getMainCf name
return $ v /= Nothing && v /= Just ""
-- | Parses main.cf, and removes any initial configuration lines that are
-- overridden to other values later in the file.
--
-- For example, to add some settings, removing any old settings:
--
-- > mainCf `File.containsLines`
-- > [ "# I like bars."
-- > , "foo = bar"
-- > ] `onChange` dedupMainCf
--
-- Note that multiline configurations that continue onto the next line
-- are not currently supported.
dedupMainCf :: Property NoInfo
dedupMainCf = File.fileProperty "postfix main.cf dedupped" dedupCf mainCfFile
dedupCf :: [String] -> [String]
dedupCf ls =
let parsed = map parse ls
in dedup [] (keycounts $ rights parsed) parsed
where
parse l
| "#" `isPrefixOf` l = Left l
| "=" `isInfixOf` l =
let (k, v) = separate (== '=') l
in Right ((filter (not . isSpace) k), v)
| otherwise = Left l
fmt k v = k ++ " =" ++ v
keycounts = M.fromListWith (+) . map (\(k, _v) -> (k, (1 :: Integer)))
dedup c _ [] = reverse c
dedup c kc ((Left v):rest) = dedup (v:c) kc rest
dedup c kc ((Right (k, v)):rest) = case M.lookup k kc of
Just n | n > 1 -> dedup c (M.insert k (n - 1) kc) rest
_ -> dedup (fmt k v:c) kc rest
-- | Installs saslauthd and configures it for postfix, authenticating
-- against PAM.
--
-- Does not configure postfix to use it; eg @smtpd_sasl_auth_enable = yes@
-- needs to be set to enable use. See
-- <https://wiki.debian.org/PostfixAndSASL>.
saslAuthdInstalled :: Property NoInfo
saslAuthdInstalled = setupdaemon
`requires` Service.running "saslauthd"
`requires` postfixgroup
`requires` dirperm
`requires` Apt.installed ["sasl2-bin"]
`requires` smtpdconf
where
setupdaemon = "/etc/default/saslauthd" `File.containsLines`
[ "START=yes"
, "OPTIONS=\"-c -m " ++ dir ++ "\""
]
`onChange` Service.restarted "saslauthd"
smtpdconf = "/etc/postfix/sasl/smtpd.conf" `File.containsLines`
[ "pwcheck_method: saslauthd"
, "mech_list: PLAIN LOGIN"
]
dirperm = check (not <$> doesDirectoryExist dir) $
cmdProperty "dpkg-statoverride"
[ "--add", "root", "sasl", "710", dir ]
postfixgroup = (User "postfix") `User.hasGroup` (Group "sasl")
`onChange` restarted
dir = "/var/spool/postfix/var/run/saslauthd"
| np/propellor | src/Propellor/Property/Postfix.hs | bsd-2-clause | 5,157 | 74 | 16 | 936 | 1,390 | 765 | 625 | 97 | 4 |
{-| Implementation of the Ganeti Query2 node group queries.
-}
{-
Copyright (C) 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Query.Network
( getGroupConnection
, getNetworkUuid
, instIsConnected
, Runtime
, fieldsMap
, collectLiveData
) where
-- FIXME: everything except Runtime(..) and fieldsMap
-- is only exported for testing.
import qualified Data.Map as Map
import Data.Maybe (fromMaybe, mapMaybe)
import Data.List (find, foldl', intercalate)
import Ganeti.JSON
import Ganeti.Network
import Ganeti.Objects
import Ganeti.Query.Language
import Ganeti.Query.Common
import Ganeti.Query.Types
import Ganeti.Types
import Ganeti.Utils (b64StringToBitString)
-- | There is no actual runtime.
data Runtime = Runtime
networkFields :: FieldList Network Runtime
networkFields =
[ (FieldDefinition "name" "Network" QFTText "Name",
FieldSimple (rsNormal . networkName), QffNormal)
, (FieldDefinition "network" "Subnet" QFTText "IPv4 subnet",
FieldSimple (rsNormal . networkNetwork), QffNormal)
, (FieldDefinition "gateway" "Gateway" QFTOther "IPv4 gateway",
FieldSimple (rsMaybeUnavail . networkGateway), QffNormal)
, (FieldDefinition "network6" "IPv6Subnet" QFTOther "IPv6 subnet",
FieldSimple (rsMaybeUnavail . networkNetwork6), QffNormal)
, (FieldDefinition "gateway6" "IPv6Gateway" QFTOther "IPv6 gateway",
FieldSimple (rsMaybeUnavail . networkGateway6), QffNormal)
, (FieldDefinition "mac_prefix" "MacPrefix" QFTOther "MAC address prefix",
FieldSimple (rsMaybeUnavail . networkMacPrefix), QffNormal)
, (FieldDefinition "free_count" "FreeCount" QFTNumber "Number of available\
\ addresses",
FieldSimple (rsMaybeNoData . fmap getFreeCount . createAddressPool),
QffNormal)
, (FieldDefinition "map" "Map" QFTText "Actual mapping",
FieldSimple (rsMaybeNoData . fmap getMap . createAddressPool),
QffNormal)
, (FieldDefinition "reserved_count" "ReservedCount" QFTNumber
"Number of reserved addresses",
FieldSimple (rsMaybeNoData . fmap getReservedCount . createAddressPool),
QffNormal)
, (FieldDefinition "group_list" "GroupList" QFTOther
"List of nodegroups (group name, NIC mode, NIC link)",
FieldConfig (\cfg -> rsNormal . getGroupConnections cfg . networkUuid),
QffNormal)
, (FieldDefinition "group_cnt" "NodeGroups" QFTNumber "Number of nodegroups",
FieldConfig (\cfg -> rsNormal . length . getGroupConnections cfg
. networkUuid), QffNormal)
, (FieldDefinition "inst_list" "InstanceList" QFTOther "List of instances",
FieldConfig (\cfg -> rsNormal . getInstances cfg . networkUuid),
QffNormal)
, (FieldDefinition "inst_cnt" "Instances" QFTNumber "Number of instances",
FieldConfig (\cfg -> rsNormal . length . getInstances cfg
. networkUuid), QffNormal)
, (FieldDefinition "external_reservations" "ExternalReservations" QFTText
"External reservations",
FieldSimple getExtReservationsString, QffNormal)
] ++
timeStampFields ++
uuidFields "Network" ++
serialFields "Network" ++
tagsFields
-- | The group fields map.
fieldsMap :: FieldMap Network Runtime
fieldsMap =
Map.fromList $ map (\v@(f, _, _) -> (fdefName f, v)) networkFields
-- TODO: the following fields are not implemented yet: external_reservations
-- | Given a network's UUID, this function lists all connections from
-- the network to nodegroups including the respective mode and links.
getGroupConnections :: ConfigData -> String -> [(String, String, String)]
getGroupConnections cfg network_uuid =
mapMaybe (getGroupConnection network_uuid)
((Map.elems . fromContainer . configNodegroups) cfg)
-- | Given a network's UUID and a node group, this function assembles
-- a tuple of the group's name, the mode and the link by which the
-- network is connected to the group. Returns 'Nothing' if the network
-- is not connected to the group.
getGroupConnection :: String -> NodeGroup -> Maybe (String, String, String)
getGroupConnection network_uuid group =
let networks = fromContainer . groupNetworks $ group
in case Map.lookup network_uuid networks of
Nothing -> Nothing
Just net ->
Just (groupName group, getNicMode net, getNicLink net)
-- | Retrieves the network's mode and formats it human-readable,
-- also in case it is not available.
getNicMode :: PartialNicParams -> String
getNicMode nic_params =
maybe "-" nICModeToRaw $ nicpModeP nic_params
-- | Retrieves the network's link and formats it human-readable, also in
-- case it it not available.
getNicLink :: PartialNicParams -> String
getNicLink nic_params = fromMaybe "-" (nicpLinkP nic_params)
-- | Retrieves the network's instances' names.
getInstances :: ConfigData -> String -> [String]
getInstances cfg network_uuid =
map instName (filter (instIsConnected network_uuid)
((Map.elems . fromContainer . configInstances) cfg))
-- | Helper function that checks if an instance is linked to the given network.
instIsConnected :: String -> Instance -> Bool
instIsConnected network_uuid inst =
network_uuid `elem` mapMaybe nicNetwork (instNics inst)
-- | Helper function to look up a network's UUID by its name
getNetworkUuid :: ConfigData -> String -> Maybe String
getNetworkUuid cfg name =
let net = find (\n -> name == fromNonEmpty (networkName n))
((Map.elems . fromContainer . configNetworks) cfg)
in fmap networkUuid net
-- | Computes the reservations list for a network.
--
-- This doesn't use the netmask for validation of the length, instead
-- simply iterating over the reservations string.
getReservations :: Ip4Network -> String -> [Ip4Address]
getReservations (Ip4Network net _) =
reverse .
fst .
foldl' (\(accu, addr) c ->
let addr' = nextIp4Address addr
accu' = case c of
'1' -> addr:accu
'0' -> accu
_ -> -- FIXME: the reservations string
-- should be a proper type
accu
in (accu', addr')) ([], net)
-- | Computes the external reservations as string for a network.
getExtReservationsString :: Network -> ResultEntry
getExtReservationsString net =
let addrs = getReservations (networkNetwork net)
(b64StringToBitString . fromMaybe "" $ networkExtReservations net)
in rsNormal . intercalate ", " $ map show addrs
-- | Dummy function for collecting live data (which networks don't have).
collectLiveData :: Bool -> ConfigData -> [Network] -> IO [(Network, Runtime)]
collectLiveData _ _ = return . map (\n -> (n, Runtime))
| apyrgio/snf-ganeti | src/Ganeti/Query/Network.hs | bsd-2-clause | 7,913 | 0 | 16 | 1,525 | 1,412 | 767 | 645 | 114 | 3 |
-- |
-- Module : Language.C.Quote.CUDA
-- Copyright : (c) 2006-2011 Harvard University
-- (c) 2011-2013 Geoffrey Mainland
-- : (c) 2013-2015 Drexel University
-- License : BSD-style
-- Maintainer : mainland@cs.drexel.edu
-- The quasiquoters exposed by this module support the CUDA extensions, including CUDA-specific declaration specifiers and @\<\<\<…>>>@ kernel invocation syntax.
--
-- It includees partial support for C++11 lambda expressions syntax.
--
-- Support for lambda-expressions has the following limitations:
--
-- * the capture list must either be empty or have only the default capture mode specifier;
--
-- * the return type cannot be explicitly specified;
--
-- * the package supports C language, not C++, therefore lambda parameter list and body must be in valid C syntax.
--
-- Examples of lambdas supported by the 'cexp' quasiquoter:
--
-- > [] (int i) mutable {}
--
-- > [&] { return 7; }
--
module Language.C.Quote.CUDA (
ToIdent(..),
ToConst(..),
ToExp(..),
cexp,
cedecl,
cdecl,
csdecl,
cenum,
ctyquals,
cty,
cparam,
cparams,
cinit,
cstm,
cstms,
citem,
citems,
cunit,
cfun
) where
import qualified Language.C.Parser as P
import qualified Language.C.Syntax as C
import Language.C.Quote.Base (ToIdent(..), ToConst(..), ToExp(..), quasiquote)
import Language.Haskell.TH.Quote (QuasiQuoter)
exts :: [C.Extensions]
exts = [C.CUDA]
typenames :: [String]
typenames =
concatMap (typeN 4) ["char", "uchar", "short", "ushort",
"int", "uint", "long", "ulong",
"longlong", "ulonglong",
"float", "double"] ++
["dim3"]
typeN :: Int -> String -> [String]
typeN k typename = [typename ++ show n | n <- [1..k]]
cdecl, cedecl, cenum, cexp, cfun, cinit, cparam, cparams, csdecl, cstm, cstms :: QuasiQuoter
citem, citems, ctyquals, cty, cunit :: QuasiQuoter
cdecl = quasiquote exts typenames P.parseDecl
cedecl = quasiquote exts typenames P.parseEdecl
cenum = quasiquote exts typenames P.parseEnum
cexp = quasiquote exts typenames P.parseExp
cfun = quasiquote exts typenames P.parseFunc
cinit = quasiquote exts typenames P.parseInit
cparam = quasiquote exts typenames P.parseParam
cparams = quasiquote exts typenames P.parseParams
csdecl = quasiquote exts typenames P.parseStructDecl
cstm = quasiquote exts typenames P.parseStm
cstms = quasiquote exts typenames P.parseStms
citem = quasiquote exts typenames P.parseBlockItem
citems = quasiquote exts typenames P.parseBlockItems
ctyquals = quasiquote exts typenames P.parseTypeQuals
cty = quasiquote exts typenames P.parseType
cunit = quasiquote exts typenames P.parseUnit
| flowbox-public/language-c-quote | Language/C/Quote/CUDA.hs | bsd-3-clause | 2,779 | 0 | 8 | 606 | 565 | 349 | 216 | 53 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Checks where
import Check.Http
import Check.Snmp
import Check.Snmp.Snmp
import Data.Yaml
import System.Cron
import Types
import Data.Map.Strict (unions)
checkRoutes :: Rules -> RouteCheck
checkRoutes rs =
let http = map (routeCheck rs) [ HttpSimple ]
system' = map (routeCheck rs) [ Snmp "system.disk", Snmp "network.interface" ]
all' = system' ++ http -- ++ snmp -- ++ shell
in unions all'
routes :: Rules -> Route
routes rs =
let http = map (route rs) [HttpSimple]
snmp' = map (route rs) [ Snmp "system.disk", Snmp "network.interface" ]
-- shell = map route [Shell]
all' = snmp' ++ http -- ++ snmp -- ++ shell
in unions all'
testHttp, testHttp1, testShell:: Check
testHttp = Check (CheckName "web") (Hostname "ya.ru") (Cron daily) "http.simple" $ object [ ("url", (String "http://ya.ru")) ]
testHttp1 = Check (CheckName "web") (Hostname "ubank.ru") (Cron daily) "http.status" $ object [("url", String "http://ubank.ru"), ("redirects", Number 2 )]
testShell = Check (CheckName "shell") (Hostname "localhost") (Cron daily) "cmd.run" $ object [("abc", String "" ), ("command", String "uptime")]
-- testSnmp = Check (CheckName "net") (Hostname "salt") (Cron daily) "snmp.network.interface" $ object [ ("community", String "helloall" ), ("host", String "salt" ) ]
| chemist/fixmon | src/Checks.hs | bsd-3-clause | 1,440 | 0 | 11 | 329 | 402 | 216 | 186 | 25 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Yahoo.HistoricalData (
requestData,
) where
import Control.Lens
import Control.Monad (join, (>>=))
import Data.Aeson
import Data.Aeson.Lens (key, _Array)
import qualified Data.Aeson.Types as AT
import qualified Data.ByteString.Lazy as BS
import qualified Data.HashMap.Strict as HM
import Data.Text (unpack, toLower, replace)
import Data.Time
import qualified Data.Vector as V
import Network.Wreq (responseBody, Response)
import Text.Printf
import Data (DayQuote(..))
import Yahoo.Query
type Symbol = String
instance FromJSON Day where
parseJSON (AT.String t) = return $ read $ unpack t
parseJSON invalid = AT.typeMismatch "Day" invalid
instance FromJSON DayQuote where
parseJSON = withObject "DayQuote" $ \v -> do
symbol <- v .: "Symbol"
date <- fmap read $ v .: "Date"
open <- fmap read $ v .: "Open"
high <- fmap read $ v .: "High"
low <- fmap read $ v .: "Low"
close <- fmap read $ v .: "Close"
volume <- fmap read $ v .: "Volume"
adjclose <- fmap read $ v .: "Adj_Close"
return DayQuote{..}
jsonClean :: Value -> Value
jsonClean (Object o) = Object . HM.fromList . map lowerPair . HM.toList $ o
where
lowerPair (key, val) = (replace "_" "" $ toLower key, val)
jsonClean x = x
requestData :: Symbol -> Day -> Day -> IO (Result [DayQuote])
requestData sym fromDate toDate = do
body <- requestDataBody sym fromDate toDate
let maybeVs = requestGetValues body
return $ sequence $ maybe undefined (map fromJSON . V.toList) maybeVs
requestDataBody :: Symbol -> Day -> Day -> IO (Response BS.ByteString)
requestDataBody sym fromDate toDate = request $ printf "SELECT * FROM yahoo.finance.historicaldata WHERE symbol = \"%s\" AND startDate = \"%s\" AND endDate = \"%s\"" sym (show fromDate) (show toDate)
requestGetValues :: Response BS.ByteString -> Maybe (V.Vector Value)
requestGetValues body = body ^? responseBody . key "query" . key "results" . key "quote" . _Array
| tetigi/raider | src/Yahoo/HistoricalData.hs | bsd-3-clause | 2,117 | 0 | 13 | 477 | 685 | 356 | 329 | -1 | -1 |
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
import Language.Haskell.Liquid.Prelude (isEven)
[lq| takeEvens :: [Int] -> [{v: Int | v mod 2 = 0}] |]
takeEvens :: [Int] -> [Int]
takeEvens [] = []
takeEvens (x:xs) = if isEven x
then x : takeEvens xs
else takeEvens xs
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/modTest.hs | bsd-3-clause | 324 | 0 | 7 | 100 | 87 | 49 | 38 | 9 | 2 |
module Part1.Problem24 where
import Data.List (permutations)
--
-- Problem 24: Lexicographic permutations
--
-- A permutation is an ordered arrangement of objects. For example, 3124 is one
-- possible permutation of the digits 1, 2, 3 and 4. If all of the permutations
-- are listed numerically or alphabetically, we call it lexicographic order.
-- The lexicographic permutations of 0, 1 and 2 are:
--
-- 012 021 102 120 201 210
--
-- What is the millionth lexicographic permutation of the digits 0, 1, 2, 3, 4,
-- 5, 6, 7, 8 and 9?
problem24 :: String
problem24 = concatMap show digits where
digits = lexperms [0..9] !! 999999
-- |
-- >>> extract [0,1,2]
-- [(0,[1,2]), (1,[0,2]), (2,[0,1])]
extract :: [a] -> [(a, [a])]
extract = extract' [] where
extract' _ [] = []
extract' acc (x:xs) = (x, acc ++ xs) : extract' (acc ++ [x]) xs
lexperms :: [a] -> [[a]]
lexperms = lexperms' [] where
lexperms' _ [] = []
lexperms' acc [x] = [acc ++ [x]]
lexperms' acc xs = concatMap (lexpermsRec acc) (extract xs)
lexpermsRec acc (x, xs) = lexperms' (acc ++ [x]) xs
| c0deaddict/project-euler | src/Part1/Problem24.hs | bsd-3-clause | 1,109 | 0 | 11 | 247 | 286 | 163 | 123 | 15 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
module Reactor.Moore
( Moore(..)
) where
import Control.Applicative
import Control.Comonad
import Data.Functor.Apply
import Data.Typeable
data Moore i o = Moore { step :: i -> Moore i o, current :: o }
deriving Typeable
instance Functor (Moore i) where
fmap g (Moore f o) = Moore (fmap g . f) (g o)
b <$ _ = pure b
instance Extend (Moore i) where
duplicate m = Moore (duplicate . step m) m
extend g m = Moore (extend g . step m) (g m)
instance Comonad (Moore i) where
extract (Moore _ o) = o
instance Apply (Moore i) where
Moore ff f <.> Moore fa a = Moore (\i -> ff i <.> fa i) (f a)
a <. _ = a
_ .> b = b
instance Applicative (Moore i) where
pure o = m where m = Moore (const m) o
(<*>) = (<.>)
(<* ) = (<. )
( *>) = ( .>)
| ekmett/reactor | Reactor/Moore.hs | bsd-3-clause | 800 | 0 | 10 | 200 | 393 | 207 | 186 | 26 | 0 |
module Commi.Plot where
import Haste.Graphics.Canvas
import Haste.Perch hiding (head)
import Haste.HPlay.View hiding (head)
import Prelude hiding (id, div)
import Text.Printf
import Data.Function hiding (id)
import Data.List
import Control.Monad.IO.Class (liftIO)
import Control.Arrow
import Control.Applicative
import Control.Monad
import Commi.Task
import Debug.Trace
plotWidget :: PlotState -> [ ([(Double, Double)], Color) ] -> String -> String -> (Double, Double) -> Int -> Int -> [(Double, Double, Color)] -> Widget ()
plotWidget state secondaryPlots xstr ystr (xsize, ysize) xdigs ydigs dots = do
canvasId <- fmap ("canvas" ++) getNextId
resetEventData
wraw $ do
canvas ! id canvasId
! style "border: 1px solid black;"
! atr "width" (show $ (1 + 2 * margin) * xsize)
! atr "height" (show $ (1 + 2 * margin) * ysize)
$ noHtml
wraw $ liftIO $ do
wcan <- getCanvasById canvasId
case wcan of
Nothing -> return ()
Just can -> render can $
translate (margin*xsize, margin*ysize) $
plot xstr ystr ((points, red):secondaryPlots) (xsize, ysize) xdigs ydigs dots
where
points = values state
margin = 0.1
red = RGB 200 0 0
plot :: String -> String -> [ ([(Double, Double)], Color) ] -> (Double, Double) -> Int -> Int -> [(Double, Double, Color)] -> Picture ()
plot xstr ystr ptss (xs, ys) xdigs ydigs mdots = coords >> xlabel >> ylabel >> grid >> plotted >> dots
where
pts = fst $ head ptss
ptsCol = snd $ head ptss
secPts = tail ptss
coords = xcoord >> ycoord
xcoord = stroke (line (0, ys) (xs, ys)) >> translate (xs, ys) xarrow
ycoord = stroke (line (0, ys) (0, 0)) >> translate (0, 0) yarrow
xarrow = stroke $ path [(-0.025*xs, 0.01*ys), (0, 0), (-0.025*xs, -0.01*ys)]
yarrow = rotate (-pi/2) xarrow
dots = mapM_ dot mdots
dot (x, y, c) = color c $ fill $ circle (toLocal (x,y)) (0.005*xs)
xmin = fst $ minimumBy (compare `on` fst) pts
xmax = fst $ maximumBy (compare `on` fst) pts
ymin = snd $ minimumBy (compare `on` snd) pts
ymax = snd $ maximumBy (compare `on` snd) pts
xmargin = 0.1
ymargin = 0.1
xrange = let v = xmax - xmin in if v < 0.001 then 1.0 else v
yrange = let v = ymax - ymin in if v < 0.001 then 1.0 else v
toLocal (x, y) = ( xs * (xmargin + (1 - 2*xmargin) * (x - xmin) / xrange)
, ys * (1 - (ymargin + (1 - 2*ymargin) * (y - ymin) / yrange)))
fromLocalX x = xmin + (x / xs - xmargin) * xrange / (1 - 2*xmargin)
fromLocalY y = ymin + ((1 - y / ys) - ymargin) * yrange / (1 - 2*ymargin)
localPts = toLocal <$> pts
localSecPts = first (fmap toLocal) <$> secPts
intervals ps
| null ps = []
| length ps == 1 = [(head ps, head ps)]
| otherwise = ps `zip` tail ps
plotted = do
color ptsCol $ sequence_ $ stroke . uncurry line <$> intervals localPts
forM localSecPts $ \ps ->
color (snd ps) $ sequence_ $ stroke . uncurry line <$> intervals (fst ps)
ltexscale = 2.0 * xs / 900
xlabel = translate (0.8*xs, 0.95*ys) $ scale (ltexscale, ltexscale) $ text (0,0) xstr
ylabel = translate (0.05*xs, 0) $ scale (ltexscale, ltexscale) $ text (0,0) ystr
gridPts
| length localPts <= 2 = localPts
| otherwise = head localPts : (middle ++ [last localPts])
where
middlePts = tail $ init localPts
middle = takeUniform (min 10 (length middlePts)) middlePts
smalltext :: Int -> Double -> Picture ()
smalltext n = scale (ltexscale, ltexscale) . text (0, 0) . truncText n
truncText :: Int -> Double -> String
truncText n d = if v == "-0." ++ replicate n '0' then "0." ++ replicate n '0' else v
where v = printf ("%."++show n++"f") d
xgrid = sequence_ $ (\x -> stroke (line (x,0) (x,ys)) >> translate (x - (textOffsetX x)*0.005*xs, 1.05*ys) (smalltext xdigs $ fromLocalX x)) . fst <$> gridPts
ygrid = sequence_ $ (\y -> stroke (line (0,y) (xs,y)) >> translate ( - (textOffsetY y)*0.022*xs, y+0.015*ys) (smalltext ydigs $ fromLocalY y)) . snd <$> gridPts
textOffsetX n = min 4 $ (fromIntegral $ length (truncText 0 $ fromLocalX n) - 1)
textOffsetY n = min 4 $ (fromIntegral $ length (truncText 2 $ fromLocalY n) - 1)
grid = color (RGB 125 125 125) $ xgrid >> ygrid
takeUniform :: Int -> [a] -> [a]
takeUniform n l
| n > length l = error "n is larger than passed list!"
| otherwise = take n $ every step l
where
step = round $ (fromIntegral (length l) :: Double) / fromIntegral n
every k xs = case drop (k-1) xs of
(y:ys) -> y : every k ys
[] -> []
sample :: Double -> Double -> Int -> (Double -> Double) -> [(Double, Double)]
sample xmin xmax i f = (\x -> (x, f x)) <$> ((\j -> xmin + (xmax - xmin) * fromIntegral j / fromIntegral i) <$> [0 .. i]) | Teaspot-Studio/bmstu-commi-genetics-haste | Commi/Plot.hs | bsd-3-clause | 4,891 | 0 | 19 | 1,293 | 2,297 | 1,212 | 1,085 | 96 | 4 |
{-# LANGUAGE CPP #-}
-- #define DEBUG
{-|
Module : AERN2.RealFun.UnaryBallFun.Integration
Description : unary function integration
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Unary function integration
-}
module AERN2.RealFun.UnaryBallFun.Integration
(
integralOnIntervalSubdivide
, integralOnIntervalIncreasePrecision
)
where
#ifdef DEBUG
import Debug.Trace (trace)
#define maybeTrace trace
#else
#define maybeTrace (flip const)
#endif
import MixedTypesNumPrelude
-- import qualified Prelude as P
-- import Text.Printf
-- import Control.Arrow
-- import Control.Applicative
-- import Control.Lens.Operators
-- import Control.Lens (_Just)
-- import AERN2.MP.Dyadic
import AERN2.MP
-- import qualified AERN2.MP.Ball as MPBall
-- import AERN2.QA
import AERN2.Real
import AERN2.Interval (DyadicInterval)
-- import AERN2.Interval (Interval(..), DyadicInterval, RealInterval)
import qualified AERN2.Interval as Interval
import AERN2.RealFun.Operations
import AERN2.RealFun.UnaryBallFun.Type
import AERN2.RealFun.UnaryBallFun.Evaluation ()
instance CanIntegrateOverDom UnaryBallFun DyadicInterval where
type IntegralOverDomType UnaryBallFun DyadicInterval = CauchyRealCN
integrateOverDom f =
integralOnIntervalSubdivide (integralOnIntervalIncreasePrecision getArea)
(\ (AccuracySG _ acG) -> standardPrecisions (ac2prec acG))
-- integralOnIntervalSubdivide (\s di _ac -> (s, getArea di)) standardPrecisions
where
getArea di p =
(apply f diB)*(Interval.width di)
where
diB = raisePrecisionIfBelow p $ mpBall di
integralOnIntervalIncreasePrecision ::
(DyadicInterval -> Precision -> CN MPBall) ->
[Precision] -> DyadicInterval -> Accuracy ->
([Precision], CN MPBall)
integralOnIntervalIncreasePrecision _getArea [] _di _ac =
error "AERN2.RealFun.UnaryBallFun: internal error in integrateOverDom"
integralOnIntervalIncreasePrecision getArea ps@(p1_O:_) di ac =
aux (getArea di p1_O) ps
where
aux diArea1 ps2@(p1:p2:p3rest) =
maybeTrace
(
"integralOnIntervalIncreasePrecision: "
++ "\n di = " ++ show di
++ "\n ac = " ++ show ac
++ "\n p1 = " ++ show p1
++ "\n getAccuracy diArea1 = " ++ show (getAccuracy diArea1)
++ "\n p2 = " ++ show p2
++ "\n getAccuracy diArea2 = " ++ show (getAccuracy diArea2)
)
res
where
res
| getAccuracy diArea1 >= ac
= (ps2, diArea1)
| getAccuracy diArea1 < getAccuracy diArea2
= (p2:p3rest, diArea2)
-- aux diArea2 (p2:p3rest)
| otherwise
= (ps2, diArea2)
diArea2 = getArea di p2
aux diArea1 ps2 = (ps2, diArea1)
integralOnIntervalSubdivide ::
(s -> DyadicInterval -> Accuracy -> (s, CN MPBall))
->
(AccuracySG -> s) -> (DyadicInterval -> CauchyRealCN)
integralOnIntervalSubdivide integralOnInterval initS diO =
newCRCN "integral" [] makeQ
where
makeQ _ ac =
integr (initS ac) diO (_acStrict ac)
integr s di ac
| getAccuracy value >= ac =
maybeTrace
("integrate by subdivide:"
++ "\n di = " ++ show di
++ "\n ac = " ++ show ac
++ "\n getAccuracy value = " ++ show (getAccuracy value)
++ "\n getPrecision value = " ++ show (fmap getPrecision value)
)
value
| otherwise =
maybeTrace
("integrate by subdivide:"
++ "\n di = " ++ show di
++ "\n ac = " ++ show ac
++ "\n getAccuracy value = " ++ show (getAccuracy value)
++ "\n getPrecision value = " ++ show (fmap getPrecision value)
) $
(integr s' diL (ac+1))
+
(integr s' diR (ac+1))
where
(diL, diR) = Interval.split di
(s', value) = integralOnInterval s di ac
| michalkonecny/aern2 | aern2-fun-univariate/src/AERN2/RealFun/UnaryBallFun/Integration.hs | bsd-3-clause | 3,961 | 0 | 20 | 1,026 | 874 | 458 | 416 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
module Sound.Amplitude
(
-- ** Decibel amplitude
Amplitude
, db
, dbn
-- ** Limiting amplitude
, limit
, flimit
, clamp
, fclamp
-- ** Ramp streams
, lgeos
-- ** Contours/envelopes
, rlexp
)
where
import Sound.InfList
import Sound.Time
type Amplitude a = Nonnegative a
{-|
This produces an amplitude multiplier
that is equivalent to the given decibel.
@
db 0 = 1
db 20 = 10
db 40 = 100
@
-}
db :: (Floating a) => a -> Amplitude a
db !x =
let
!r = 10 ** (x / 20)
in
r
{-# INLINE db #-}
{- |
@
dbn = 'db' '.' 'negate'
@
Haskell parses @db -1@ as @db - 1@
so we would have to write @db (-1)@ or @db (negate 1)@ without @dbn@.
-}
dbn :: (Floating a) => a -> Amplitude a
dbn !x =
let
!r = db (negate x)
in
r
{-# INLINE dbn #-}
{- |
The result of @clamp mi ma x@ is @x@ if and only if @x@ is between @mi@ and @ma@,
where @mi@ cannot exceed @ma@.
-}
clamp :: (Ord a) => Min a -> Max a -> a -> a
clamp !mi !ma !x =
case x of
_ | x < mi -> mi
_ | x > ma -> ma
_ -> x
{- |
@
fclamp mi ma = 'fmap' ('clamp' mi ma)
@
-}
fclamp :: (Functor f, Ord a) => Min a -> Max a -> f a -> f a
fclamp !mi !ma = fmap (clamp mi ma)
{- |
@
limit x = 'clamp' ('negate' x) x
@
-}
limit :: (Num a, Ord a) => Amplitude a -> a -> a
limit !t = clamp (negate t) t
{- |
@
flimit x = 'fclamp' ('negate' x) x
@
-}
flimit :: (Functor f, Num a, Ord a) => Amplitude a -> f a -> f a
flimit !t = fmap (limit t)
{- |
The second argument is the exponent (@m@) in the following equation:
@
x = exp (m * t)
@
where @t@ is time in seconds.
-}
rlexp :: (Floating a) => Rate Int -> a -> RL a
rlexp rate_ expo_ =
rated rate_ $ literate (f *) 1
where
f = exp (expo_ / fromIntegral (_unRate rate_))
| edom/sound | src/Sound/Amplitude.hs | bsd-3-clause | 1,837 | 0 | 12 | 554 | 523 | 265 | 258 | 43 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Heed.DbEnums
( ItemsDate(Missing, Present)
, PGItemsDate
) where
import qualified Data.ByteString.Char8 as B8
import Data.Monoid ((<>))
import qualified Data.Profunctor as Pro
import qualified Data.Profunctor.Product.Default as ProDef
import Data.Serialize (Serialize)
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import Data.Typeable
import qualified Database.PostgreSQL.Simple.FromField as PG
import GHC.Generics
import qualified Opaleye as O
-- | Iso to Bool, only to avoid boolean blindness
data ItemsDate
= Missing -- ^ Feed has no items on dates and the current time will be used
| Present -- ^ Feed has dates on items
deriving (Typeable, Show, Generic)
instance Serialize ItemsDate
-- | Postgres type
data PGItemsDate
missing, present, unexpected :: Text.Text
missing = "missing"
present = "present"
unexpected = "Unexpected itemsdate:"
instance PG.FromField ItemsDate where
fromField f itemsDate = pgTextFromFieldNoTypeCheck f itemsDate >>= parseItemsDate
where
parseItemsDate itemsDateString
| itemsDateString == missing = return Missing
| itemsDateString == present = return Present
| otherwise =
PG.returnError PG.ConversionFailed f (Text.unpack $ unexpected <> itemsDateString)
instance O.QueryRunnerColumnDefault PGItemsDate ItemsDate where
queryRunnerColumnDefault = O.fieldQueryRunnerColumn
constantColumnUsing
:: O.Constant haskell (O.Column pgType)
-> (haskell' -> haskell)
-> O.Constant haskell' (O.Column pgType')
constantColumnUsing oldConstant f = Pro.dimap f O.unsafeCoerceColumn oldConstant
instance ProDef.Default O.Constant ItemsDate (O.Column PGItemsDate) where
def =
constantColumnUsing (ProDef.def :: O.Constant String (O.Column O.PGText)) itemsDateToString
where
itemsDateToString :: ItemsDate -> String
itemsDateToString Missing = "missing"
itemsDateToString Present = "present"
pgGuardNotNull :: PG.FieldParser B8.ByteString
pgGuardNotNull f mb =
case mb of
Nothing -> PG.returnError PG.UnexpectedNull f ""
Just b -> return b
{-# INLINABLE pgGuardNotNull #-}
-- | Like the 'Pg.FromField' instance for 'Text.Text' but doesn't check the
-- 'Pg.Field' type. ENUMS WON'T WORK OTHERWISE! thanks to k0001 for the tip.
pgTextFromFieldNoTypeCheck :: PG.FieldParser Text.Text
pgTextFromFieldNoTypeCheck f mb = do
b <- pgGuardNotNull f mb
case Text.decodeUtf8' b of
Left e -> PG.conversionError e
Right t -> return t
{-# INLINABLE pgTextFromFieldNoTypeCheck #-}
| Arguggi/heed | heed-lib/src/Heed/DbEnums.hs | bsd-3-clause | 2,810 | 0 | 13 | 536 | 590 | 324 | 266 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.Monad.Trans.Reader
import Control.Monad.Trans.State
import Control.Monad.Trans.Class
import Control.Monad.Loops
import Control.Monad
import System.Random
import System.IO
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
data PlayerParity = PlayerParity
{ parity :: Parity
, player :: Player
} deriving (Eq)
instance Show PlayerParity where
show p = (show. player) p ++ " is " ++ (show. parity) p
data Config = Config
{ asParityMap :: M.Map Parity Player
, asComputerParity :: Parity
, asString :: String
}
data Parity = Even | Odd deriving (Ord, Eq,Show)
data Player = Computer | Human | Nobody deriving (Eq, Ord, Show)
data RoundResult = RoundResult
{ copmuter :: Parity
, human :: Parity
, winner :: Player
} deriving (Show)
type TrigramKey = (Parity, Parity)
type TrigramVal = Parity
type StateType = ([RoundResult], M.Map TrigramKey TrigramVal)
chooseParity :: IO Config
chooseParity = do
choose <- randomRIO (0,1)
return $ case choose::Int of
0 -> Config
(M.fromList $ [(Even, Computer), (Odd, Human)])
Even
("Computer is Even, Human is Odd")
1 -> Config
(M.fromList $ [(Odd, Computer), (Even, Human)])
Odd
("Computer is Odd, Human is Even")
chooseZeroOne :: IO Parity
chooseZeroOne = do
r <- randomRIO (0, 1) :: IO Int
return $ if r `mod` 2 == 0 then Even else Odd
humanChoice :: IO Parity
humanChoice = do
i <- getLine >>= return . (read :: String -> Int)
return $ if i `mod` 2 == 0 then Even else Odd
computerChoice :: Parity -> StateT StateType IO Parity
computerChoice computerParity = do
mystate <- get
let trigrams = snd mystate
history = fst mystate
lastTwo = human <$> take 2 history
case M.lookup (lastTwo !! 1, lastTwo !! 0) trigrams of
Just p -> do
lift $ putStrLn "( Com : choosing from Trigram... )"
return $ computerParityDecision computerParity p
Nothing -> do
lift $ putStrLn "( Com : choosing from Random... )"
lift $ chooseZeroOne
where
computerParityDecision com man
| com == Odd && man == Even = Odd
| com == Odd && man == Odd = Even
| otherwise = man
singleRoundMorra :: Parity -> ReaderT Config IO RoundResult
singleRoundMorra computerParity = do
parityMap <- asks asParityMap
humanParity <- lift $ putStr "Man : " >> humanChoice
-- computerNumer <- lift chooseZeroOne
lift $ putStrLn ("Com : " ++ show computerParity)
let currentParity = determinParity computerParity humanParity
winner' = (fromMaybe Nobody) $ M.lookup currentParity parityMap
lift $ putStrLn (" - " ++ show winner' ++ " wins")
return $ RoundResult computerParity humanParity winner'
determinParity :: Parity -> Parity -> Parity
determinParity x y
| x == Odd && y == Even = Odd
| x == Odd && y == Odd = Even
| otherwise = y
score :: RoundResult -> Player -> Int
score r p
| winner r == p = 1
| otherwise = 0
loop :: ReaderT Config (StateT StateType IO) RoundResult
loop = do
config <- ask
mystate <- lift $ get
let history = fst mystate
trigrams = snd mystate
computerNumber <- lift $ computerChoice (asComputerParity config)
roundResult <- lift.lift $ runReaderT (singleRoundMorra computerNumber) config
-- lift . lift $ print roundResult
lift . lift $ putStrLn " "
let newHistory = roundResult : history
lastThree = human <$> take 3 newHistory
first = (lastThree !! 2)
second = (lastThree !! 1)
third = (lastThree !! 0)
lift $ if length newHistory >= 3
then put (newHistory, M.insert (first,second) third trigrams)
else put (newHistory, trigrams)
return $ roundResult
checkState :: (Monad m) => StateT StateType m Bool
checkState = do
mystate <- get
let history = fst mystate
computerScore = calcScoreForPlayer Computer history
humanScore = calcScoreForPlayer Human history
return $ computerScore /= 3 && humanScore /= 3
calcWinner :: [RoundResult] -> Player
calcWinner history
| calcScoreForPlayer Computer history > calcScoreForPlayer Human history = Computer
| calcScoreForPlayer Computer history < calcScoreForPlayer Human history = Human
| otherwise = Nobody
calcScoreForPlayer :: Player -> [RoundResult] -> Int
calcScoreForPlayer p ps = length $ filter (((==) p ) . winner) ps
takeR :: Int -> [a] -> [a]
takeR = join . (foldr (const(.tail)) id.) . drop
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
config <- chooseParity
putStrLn " "
putStrLn $ "========= Configuration ========="
putStrLn " "
putStrLn $ " " ++ asString config
putStrLn " "
putStrLn $ "================================="
putStrLn " "
history <- evalStateT (whileM (checkState) (runReaderT loop config)) ([], M.empty)
-- forM_ history print
let finalWinner = calcWinner history
putStrLn " "
putStrLn $ "========== The Winner! =========="
putStrLn " "
putStrLn $ " " ++ show finalWinner
putStrLn " "
putStrLn $ "================================="
| jo-taro/hpffp-ch26-monad-transformers | src/Morra.hs | bsd-3-clause | 5,427 | 0 | 15 | 1,455 | 1,706 | 869 | 837 | 140 | 2 |
module Control.ConstraintClasses.Lookup
(
-- * Constraint Lookup
CLookup (..)
, (!?)
) where
import Control.ConstraintClasses.Domain
import Control.ConstraintClasses.Key
import Control.ConstraintClasses.Functor
import Data.Key
-- base
import Data.Functor.Product
import Data.Functor.Sum
import Data.Functor.Compose
-- vector
import qualified Data.Vector as Vector
import qualified Data.Vector.Storable as VectorStorable
import qualified Data.Vector.Unboxed as VectorUnboxed
--------------------------------------------------------------------------------
-- CLASS
--------------------------------------------------------------------------------
class CFunctor f => CLookup f where
_lookup :: Dom f a => CKey f -> f a -> Maybe a
(!?) :: (CLookup f, Dom f a) => f a -> CKey f -> Maybe a
(!?) = flip _lookup
{-# INLINE [1] (!?) #-}
--------------------------------------------------------------------------------
-- INSTANCES
--------------------------------------------------------------------------------
-- base
instance (CLookup f, CLookup g) => CLookup (Compose f g) where
_lookup (i,j) (Compose x) = _lookup i x >>= _lookup j
{-# INLINE [1] _lookup #-}
-- vector
instance CLookup Vector.Vector where
_lookup = flip (Vector.!?)
{-# INLINE [1] _lookup #-}
instance CLookup VectorStorable.Vector where
_lookup = flip (VectorStorable.!?)
{-# INLINE [1] _lookup #-}
instance CLookup VectorUnboxed.Vector where
_lookup = flip (VectorUnboxed.!?)
{-# INLINE [1] _lookup #-}
| guaraqe/constraint-classes | src/Control/ConstraintClasses/Lookup.hs | bsd-3-clause | 1,512 | 0 | 10 | 213 | 333 | 195 | 138 | 31 | 1 |
module Sexy.Instances.Apply.Function () where
import Sexy.Classes (Apply(..))
instance Apply (a -> b) where
type In (a -> b) = a
type Out (a -> b) = b
f $ x = f x
| DanBurton/sexy | src/Sexy/Instances/Apply/Function.hs | bsd-3-clause | 171 | 0 | 7 | 41 | 86 | 49 | 37 | -1 | -1 |
module Data.Array.Repa.Repr.Unboxed
( U, U.Unbox, Array (..)
, computeUnboxedS, computeUnboxedP
, fromListUnboxed
, fromUnboxed, toUnboxed
, zip, zip3, zip4, zip5, zip6
, unzip, unzip3, unzip4, unzip5, unzip6)
where
import Data.Array.Repa.Shape as R
import Data.Array.Repa.Base as R
import Data.Array.Repa.Eval as R
import Data.Array.Repa.Repr.Delayed as R
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Unboxed.Mutable as UM
import Control.Monad
import Prelude hiding (zip, zip3, unzip, unzip3)
-- | Unboxed arrays are represented as unboxed vectors.
--
-- The implementation uses @Data.Vector.Unboxed@ which is based on type
-- families and picks an efficient, specialised representation for every
-- element type. In particular, unboxed vectors of pairs are represented
-- as pairs of unboxed vectors.
-- This is the most efficient representation for numerical data.
--
data U
-- | Read elements from an unboxed vector array.
instance U.Unbox a => Source U a where
data Array U sh a
= AUnboxed !sh !(U.Vector a)
linearIndex (AUnboxed _ vec) ix
= vec U.! ix
{-# INLINE linearIndex #-}
unsafeLinearIndex (AUnboxed _ vec) ix
= vec `U.unsafeIndex` ix
{-# INLINE unsafeLinearIndex #-}
extent (AUnboxed sh _)
= sh
{-# INLINE extent #-}
deepSeqArray (AUnboxed sh vec) x
= sh `deepSeq` vec `seq` x
{-# INLINE deepSeqArray #-}
deriving instance (Show sh, Show e, U.Unbox e)
=> Show (Array U sh e)
deriving instance (Read sh, Read e, U.Unbox e)
=> Read (Array U sh e)
-- Fill -----------------------------------------------------------------------
-- | Filling of unboxed vector arrays.
instance U.Unbox e => Target U e where
data MVec U e
= UMVec (UM.IOVector e)
newMVec n
= liftM UMVec (UM.new n)
{-# INLINE newMVec #-}
unsafeWriteMVec (UMVec v) ix
= UM.unsafeWrite v ix
{-# INLINE unsafeWriteMVec #-}
unsafeFreezeMVec sh (UMVec mvec)
= do vec <- U.unsafeFreeze mvec
return $ AUnboxed sh vec
{-# INLINE unsafeFreezeMVec #-}
deepSeqMVec (UMVec vec) x
= vec `seq` x
{-# INLINE deepSeqMVec #-}
touchMVec _
= return ()
{-# INLINE touchMVec #-}
-- Conversions ----------------------------------------------------------------
-- | Sequential computation of array elements..
--
-- * This is an alias for `computeS` with a more specific type.
--
computeUnboxedS
:: ( Shape sh
, Load r1 sh e, U.Unbox e)
=> Array r1 sh e -> Array U sh e
computeUnboxedS = computeS
{-# INLINE computeUnboxedS #-}
-- | Parallel computation of array elements.
--
-- * This is an alias for `computeP` with a more specific type.
--
computeUnboxedP
:: ( Shape sh
, Load r1 sh e, Monad m, U.Unbox e)
=> Array r1 sh e -> m (Array U sh e)
computeUnboxedP = computeP
{-# INLINE computeUnboxedP #-}
-- | O(n). Convert a list to an unboxed vector array.
--
-- * This is an alias for `fromList` with a more specific type.
--
fromListUnboxed
:: (Shape sh, U.Unbox a)
=> sh -> [a] -> Array U sh a
fromListUnboxed = R.fromList
{-# INLINE fromListUnboxed #-}
-- | O(1). Wrap an unboxed vector as an array.
fromUnboxed
:: (Shape sh, U.Unbox e)
=> sh -> U.Vector e -> Array U sh e
fromUnboxed sh vec
= AUnboxed sh vec
{-# INLINE fromUnboxed #-}
-- | O(1). Unpack an unboxed vector from an array.
toUnboxed
:: U.Unbox e
=> Array U sh e -> U.Vector e
toUnboxed (AUnboxed _ vec)
= vec
{-# INLINE toUnboxed #-}
-- Zip ------------------------------------------------------------------------
-- | O(1). Zip some unboxed arrays.
-- The shapes must be identical else `error`.
zip :: (Shape sh, U.Unbox a, U.Unbox b)
=> Array U sh a -> Array U sh b
-> Array U sh (a, b)
zip (AUnboxed sh1 vec1) (AUnboxed sh2 vec2)
| sh1 /= sh2 = error "Repa: zip array shapes not identical"
| otherwise = AUnboxed sh1 (U.zip vec1 vec2)
{-# INLINE zip #-}
-- | O(1). Zip some unboxed arrays.
-- The shapes must be identical else `error`.
zip3 :: (Shape sh, U.Unbox a, U.Unbox b, U.Unbox c)
=> Array U sh a -> Array U sh b -> Array U sh c
-> Array U sh (a, b, c)
zip3 (AUnboxed sh1 vec1) (AUnboxed sh2 vec2) (AUnboxed sh3 vec3)
| sh1 /= sh2 || sh1 /= sh3
= error "Repa: zip array shapes not identical"
| otherwise = AUnboxed sh1 (U.zip3 vec1 vec2 vec3)
{-# INLINE zip3 #-}
-- | O(1). Zip some unboxed arrays.
-- The shapes must be identical else `error`.
zip4 :: (Shape sh, U.Unbox a, U.Unbox b, U.Unbox c, U.Unbox d)
=> Array U sh a -> Array U sh b -> Array U sh c -> Array U sh d
-> Array U sh (a, b, c, d)
zip4 (AUnboxed sh1 vec1) (AUnboxed sh2 vec2) (AUnboxed sh3 vec3) (AUnboxed sh4 vec4)
| sh1 /= sh2 || sh1 /= sh3 || sh1 /= sh4
= error "Repa: zip array shapes not identical"
| otherwise = AUnboxed sh1 (U.zip4 vec1 vec2 vec3 vec4)
{-# INLINE zip4 #-}
-- | O(1). Zip some unboxed arrays.
-- The shapes must be identical else `error`.
zip5 :: (Shape sh, U.Unbox a, U.Unbox b, U.Unbox c, U.Unbox d, U.Unbox e)
=> Array U sh a -> Array U sh b -> Array U sh c -> Array U sh d -> Array U sh e
-> Array U sh (a, b, c, d, e)
zip5 (AUnboxed sh1 vec1) (AUnboxed sh2 vec2) (AUnboxed sh3 vec3) (AUnboxed sh4 vec4) (AUnboxed sh5 vec5)
| sh1 /= sh2 || sh1 /= sh3 || sh1 /= sh4 || sh1 /= sh5
= error "Repa: zip array shapes not identical"
| otherwise = AUnboxed sh1 (U.zip5 vec1 vec2 vec3 vec4 vec5)
{-# INLINE zip5 #-}
-- | O(1). Zip some unboxed arrays.
-- The shapes must be identical else `error`.
zip6 :: (Shape sh, U.Unbox a, U.Unbox b, U.Unbox c, U.Unbox d, U.Unbox e, U.Unbox f)
=> Array U sh a -> Array U sh b -> Array U sh c -> Array U sh d -> Array U sh e -> Array U sh f
-> Array U sh (a, b, c, d, e, f)
zip6 (AUnboxed sh1 vec1) (AUnboxed sh2 vec2) (AUnboxed sh3 vec3) (AUnboxed sh4 vec4) (AUnboxed sh5 vec5) (AUnboxed sh6 vec6)
| sh1 /= sh2 || sh1 /= sh3 || sh1 /= sh4 || sh1 /= sh5 || sh1 /= sh6
= error "Repa: zip array shapes not identical"
| otherwise = AUnboxed sh1 (U.zip6 vec1 vec2 vec3 vec4 vec5 vec6)
{-# INLINE zip6 #-}
-- Unzip ----------------------------------------------------------------------
-- | O(1). Unzip an unboxed array.
unzip :: (U.Unbox a, U.Unbox b)
=> Array U sh (a, b)
-> (Array U sh a, Array U sh b)
unzip (AUnboxed sh vec)
= let (as, bs) = U.unzip vec
in (AUnboxed sh as, AUnboxed sh bs)
{-# INLINE unzip #-}
-- | O(1). Unzip an unboxed array.
unzip3 :: (U.Unbox a, U.Unbox b, U.Unbox c)
=> Array U sh (a, b, c)
-> (Array U sh a, Array U sh b, Array U sh c)
unzip3 (AUnboxed sh vec)
= let (as, bs, cs) = U.unzip3 vec
in (AUnboxed sh as, AUnboxed sh bs, AUnboxed sh cs)
{-# INLINE unzip3 #-}
-- | O(1). Unzip an unboxed array.
unzip4 :: (U.Unbox a, U.Unbox b, U.Unbox c, U.Unbox d)
=> Array U sh (a, b, c, d)
-> (Array U sh a, Array U sh b, Array U sh c, Array U sh d)
unzip4 (AUnboxed sh vec)
= let (as, bs, cs, ds) = U.unzip4 vec
in (AUnboxed sh as, AUnboxed sh bs, AUnboxed sh cs, AUnboxed sh ds)
{-# INLINE unzip4 #-}
-- | O(1). Unzip an unboxed array.
unzip5 :: (U.Unbox a, U.Unbox b, U.Unbox c, U.Unbox d, U.Unbox e)
=> Array U sh (a, b, c, d, e)
-> (Array U sh a, Array U sh b, Array U sh c, Array U sh d, Array U sh e)
unzip5 (AUnboxed sh vec)
= let (as, bs, cs, ds, es) = U.unzip5 vec
in (AUnboxed sh as, AUnboxed sh bs, AUnboxed sh cs, AUnboxed sh ds, AUnboxed sh es)
{-# INLINE unzip5 #-}
-- | O(1). Unzip an unboxed array.
unzip6 :: (U.Unbox a, U.Unbox b, U.Unbox c, U.Unbox d, U.Unbox e, U.Unbox f)
=> Array U sh (a, b, c, d, e, f)
-> (Array U sh a, Array U sh b, Array U sh c, Array U sh d, Array U sh e, Array U sh f)
unzip6 (AUnboxed sh vec)
= let (as, bs, cs, ds, es, fs) = U.unzip6 vec
in (AUnboxed sh as, AUnboxed sh bs, AUnboxed sh cs, AUnboxed sh ds, AUnboxed sh es, AUnboxed sh fs)
{-# INLINE unzip6 #-}
| kairne/repa-lts | Data/Array/Repa/Repr/Unboxed.hs | bsd-3-clause | 8,216 | 0 | 16 | 2,127 | 2,839 | 1,490 | 1,349 | -1 | -1 |
module Sets where
import Data.List hiding (union)
newtype Set a = SetI [a]
mapSet :: Ord b => (a -> b) -> Set a -> Set b
mapSet f (SetI xs) = makeSet (map f xs)
makeSet :: Ord a => [a] -> Set a
makeSet = SetI . remDups . sort
where
remDups [] = []
remDups [x] = [x]
remDups (x:y:xs)
| x < y = x : remDups (y:xs)
| otherwise = remDups (y:xs)
card :: Set a -> Int
card (SetI xs) = length xs
eqSet :: Eq a => Set a -> Set a -> Bool
eqSet (SetI xs) (SetI ys) = xs == ys
showSet :: Show a => Set a -> String
showSet (SetI xs) = "{" ++ f xs ++ "}"
where
f [] = ""
f [y] = show y
f (y:ys) = show y ++ ", " ++ f ys
flatten :: Set a -> [a]
flatten (SetI xs) = xs
empty :: Set aempty
empty = SetI []
isEmpty :: (Eq a) => Set a -> Bool
isEmpty = (== empty)
sing :: a -> Set a
sing x = SetI [x]
union :: Ord a => Set a -> Set a -> Set a
union (SetI xs) (SetI ys) = SetI (uni xs ys)
uni :: Ord a => [a] -> [a] -> [a]
uni [] ys = ys
uni xs [] = xs
uni (x:xs) (y:ys)
| x<y = x : uni xs (y:ys)
| x==y = x : uni xs ys
| otherwise = y : uni (x:xs) ys
inter :: Ord a => Set a -> Set a -> Set a
inter (SetI xs) (SetI ys) = SetI (int xs ys)
int :: Ord a => [a] -> [a] -> [a]
int [] ys = []
int xs [] = []
int (x:xs) (y:ys)
| x<y = int xs (y:ys)
| x==y = x : int xs ys
| otherwise = int (x:xs) ys
setlimit :: Eq a => (Set a -> Set a) -> Set a -> Set a
setlimit f s
| s == next = s
| otherwise = setlimit f next
where
next = f s
instance Eq a => Eq (Set a) where
(==) = eqSet
instance Ord a => Ord (Set a) where
s1 <= s2 = flatten s1 <= flatten s2
instance Show a => Show (Set a) where
show = showSet
| arthurmgo/regex-ftc | src/Sets.hs | bsd-3-clause | 1,770 | 0 | 11 | 596 | 1,094 | 542 | 552 | 58 | 3 |
module Gidl.Types.Base
( uint8_t
, uint16_t
, uint32_t
, uint64_t
, sint8_t
, sint16_t
, sint32_t
, sint64_t
, bool_t
, float_t
, double_t
, sequence_num_t
, baseTypeEnv
) where
import Gidl.Types.AST
uint8_t :: Type
uint8_t = PrimType (AtomType (AtomWord Bits8))
uint16_t :: Type
uint16_t = PrimType (AtomType (AtomWord Bits16))
uint32_t :: Type
uint32_t = PrimType (AtomType (AtomWord Bits32))
uint64_t :: Type
uint64_t = PrimType (AtomType (AtomWord Bits64))
sint8_t :: Type
sint8_t = PrimType (AtomType (AtomInt Bits8))
sint16_t :: Type
sint16_t = PrimType (AtomType (AtomInt Bits16))
sint32_t :: Type
sint32_t = PrimType (AtomType (AtomInt Bits32))
sint64_t :: Type
sint64_t = PrimType (AtomType (AtomInt Bits64))
bool_t :: Type
bool_t = PrimType (EnumType "bool_t" Bits8 [("false", 0), ("true", 1)])
float_t :: Type
float_t = PrimType (AtomType AtomFloat)
double_t :: Type
double_t = PrimType (AtomType AtomDouble)
sequence_num_t :: Type
sequence_num_t = PrimType (Newtype "sequence_num_t" (AtomType (AtomWord Bits32)))
baseTypeEnv :: TypeEnv
baseTypeEnv = TypeEnv
[ ( "uint8_t" , uint8_t)
, ( "uint16_t", uint16_t)
, ( "uint32_t", uint32_t)
, ( "uint64_t", uint64_t)
, ( "sint8_t" , sint8_t)
, ( "sint16_t", sint16_t)
, ( "sint32_t", sint32_t)
, ( "sint64_t", sint64_t)
, ( "bool_t" , bool_t)
, ( "float_t" , float_t)
, ( "double_t", double_t)
, ( "sequence_num_t", sequence_num_t)
]
| GaloisInc/gidl | src/Gidl/Types/Base.hs | bsd-3-clause | 1,463 | 0 | 11 | 276 | 487 | 280 | 207 | 53 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Install
-- Copyright : (c) 2005 David Himmelstrup
-- 2007 Bjorn Bringert
-- 2007-2010 Duncan Coutts
-- License : BSD-like
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- High level interface to package installation.
-----------------------------------------------------------------------------
module Distribution.Client.Install (
-- * High-level interface
install,
-- * Lower-level interface that allows to manipulate the install plan
makeInstallContext,
makeInstallPlan,
processInstallPlan,
InstallArgs,
InstallContext,
-- * Prune certain packages from the install plan
pruneInstallPlan
) where
import Data.Foldable
( traverse_ )
import Data.List
( isPrefixOf, unfoldr, nub, sort, (\\) )
import qualified Data.Set as S
import Data.Maybe
( catMaybes, isJust, isNothing, fromMaybe, mapMaybe )
import Control.Exception as Exception
( Exception(toException), bracket, catches
, Handler(Handler), handleJust, IOException, SomeException )
#ifndef mingw32_HOST_OS
import Control.Exception as Exception
( Exception(fromException) )
#endif
import System.Exit
( ExitCode(..) )
import Distribution.Compat.Exception
( catchIO, catchExit )
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
( (<$>) )
import Data.Traversable
( traverse )
#endif
import Control.Monad
( filterM, forM_, when, unless )
import System.Directory
( getTemporaryDirectory, doesDirectoryExist, doesFileExist,
createDirectoryIfMissing, removeFile, renameDirectory )
import System.FilePath
( (</>), (<.>), equalFilePath, takeDirectory )
import System.IO
( openFile, IOMode(AppendMode), hClose )
import System.IO.Error
( isDoesNotExistError, ioeGetFileName )
import Distribution.Client.Targets
import Distribution.Client.Configure
( chooseCabalVersion, configureSetupScript )
import Distribution.Client.Dependency
import Distribution.Client.Dependency.Types
( Solver(..) )
import Distribution.Client.FetchUtils
import qualified Distribution.Client.Haddock as Haddock (regenerateHaddockIndex)
import Distribution.Client.IndexUtils as IndexUtils
( getSourcePackages, getInstalledPackages )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.InstallPlan (InstallPlan)
import Distribution.Client.Setup
( GlobalFlags(..)
, ConfigFlags(..), configureCommand, filterConfigureFlags
, ConfigExFlags(..), InstallFlags(..) )
import Distribution.Client.Config
( defaultCabalDir, defaultUserInstall )
import Distribution.Client.Sandbox.Timestamp
( withUpdateTimestamps )
import Distribution.Client.Sandbox.Types
( SandboxPackageInfo(..), UseSandbox(..), isUseSandbox
, whenUsingSandbox )
import Distribution.Client.Tar (extractTarGzFile)
import Distribution.Client.Types as Source
import Distribution.Client.BuildReports.Types
( ReportLevel(..) )
import Distribution.Client.SetupWrapper
( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions )
import Distribution.Client.PackageIndex
( PackageFixedDeps(..) )
import qualified Distribution.Client.BuildReports.Anonymous as BuildReports
import qualified Distribution.Client.BuildReports.Storage as BuildReports
( storeAnonymous, storeLocal, fromInstallPlan, fromPlanningFailure )
import qualified Distribution.Client.InstallSymlink as InstallSymlink
( symlinkBinaries )
import qualified Distribution.Client.PackageIndex as SourcePackageIndex
import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade
import qualified Distribution.Client.World as World
import qualified Distribution.InstalledPackageInfo as Installed
import Distribution.Client.Compat.ExecutablePath
import Distribution.Client.JobControl
import qualified Distribution.Client.ComponentDeps as CD
import Distribution.Utils.NubList
import Distribution.Simple.Compiler
( CompilerId(..), Compiler(compilerId), compilerFlavor
, CompilerInfo(..), compilerInfo, PackageDB(..), PackageDBStack )
import Distribution.Simple.Program (ProgramConfiguration,
defaultProgramConfiguration)
import qualified Distribution.Simple.InstallDirs as InstallDirs
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import Distribution.Simple.Setup
( haddockCommand, HaddockFlags(..)
, buildCommand, BuildFlags(..), emptyBuildFlags
, toFlag, fromFlag, fromFlagOrDefault, flagToMaybe, defaultDistPref )
import qualified Distribution.Simple.Setup as Cabal
( Flag(..)
, copyCommand, CopyFlags(..), emptyCopyFlags
, registerCommand, RegisterFlags(..), emptyRegisterFlags
, testCommand, TestFlags(..), emptyTestFlags )
import Distribution.Simple.Utils
( createDirectoryIfMissingVerbose, rawSystemExit, comparing
, writeFileAtomic, withTempFile , withUTF8FileContents )
import Distribution.Simple.InstallDirs as InstallDirs
( PathTemplate, fromPathTemplate, toPathTemplate, substPathTemplate
, initialPathTemplateEnv, installDirsTemplateEnv )
import Distribution.Package
( PackageIdentifier(..), PackageId, packageName, packageVersion
, Package(..), PackageKey
, Dependency(..), thisPackageVersion, InstalledPackageId, installedPackageId )
import qualified Distribution.PackageDescription as PackageDescription
import Distribution.PackageDescription
( PackageDescription, GenericPackageDescription(..), Flag(..)
, FlagName(..), FlagAssignment )
import Distribution.PackageDescription.Configuration
( finalizePackageDescription )
import Distribution.ParseUtils
( showPWarning )
import Distribution.Version
( Version, VersionRange, foldVersionRange )
import Distribution.Simple.Utils as Utils
( notice, info, warn, debug, debugNoWrap, die
, intercalate, withTempDirectory )
import Distribution.Client.Utils
( determineNumJobs, inDir, mergeBy, MergeResult(..)
, tryCanonicalizePath )
import Distribution.System
( Platform, OS(Windows), buildOS )
import Distribution.Text
( display )
import Distribution.Verbosity as Verbosity
( Verbosity, showForCabal, normal, verbose )
import Distribution.Simple.BuildPaths ( exeExtension )
--TODO:
-- * assign flags to packages individually
-- * complain about flags that do not apply to any package given as target
-- so flags do not apply to dependencies, only listed, can use flag
-- constraints for dependencies
-- * only record applicable flags in world file
-- * allow flag constraints
-- * allow installed constraints
-- * allow flag and installed preferences
-- * change world file to use cabal section syntax
-- * allow persistent configure flags for each package individually
-- ------------------------------------------------------------
-- * Top level user actions
-- ------------------------------------------------------------
-- | Installs the packages needed to satisfy a list of dependencies.
--
install
:: Verbosity
-> PackageDBStack
-> [Repo]
-> Compiler
-> Platform
-> ProgramConfiguration
-> UseSandbox
-> Maybe SandboxPackageInfo
-> GlobalFlags
-> ConfigFlags
-> ConfigExFlags
-> InstallFlags
-> HaddockFlags
-> [UserTarget]
-> IO ()
install verbosity packageDBs repos comp platform conf useSandbox mSandboxPkgInfo
globalFlags configFlags configExFlags installFlags haddockFlags
userTargets0 = do
installContext <- makeInstallContext verbosity args (Just userTargets0)
planResult <- foldProgress logMsg (return . Left) (return . Right) =<<
makeInstallPlan verbosity args installContext
case planResult of
Left message -> do
reportPlanningFailure verbosity args installContext message
die' message
Right installPlan ->
processInstallPlan verbosity args installContext installPlan
where
args :: InstallArgs
args = (packageDBs, repos, comp, platform, conf, useSandbox, mSandboxPkgInfo,
globalFlags, configFlags, configExFlags, installFlags,
haddockFlags)
die' message = die (message ++ if isUseSandbox useSandbox
then installFailedInSandbox else [])
-- TODO: use a better error message, remove duplication.
installFailedInSandbox =
"\nNote: when using a sandbox, all packages are required to have "
++ "consistent dependencies. "
++ "Try reinstalling/unregistering the offending packages or "
++ "recreating the sandbox."
logMsg message rest = debugNoWrap verbosity message >> rest
-- TODO: Make InstallContext a proper data type with documented fields.
-- | Common context for makeInstallPlan and processInstallPlan.
type InstallContext = ( InstalledPackageIndex, SourcePackageDb
, [UserTarget], [PackageSpecifier SourcePackage] )
-- TODO: Make InstallArgs a proper data type with documented fields or just get
-- rid of it completely.
-- | Initial arguments given to 'install' or 'makeInstallContext'.
type InstallArgs = ( PackageDBStack
, [Repo]
, Compiler
, Platform
, ProgramConfiguration
, UseSandbox
, Maybe SandboxPackageInfo
, GlobalFlags
, ConfigFlags
, ConfigExFlags
, InstallFlags
, HaddockFlags )
-- | Make an install context given install arguments.
makeInstallContext :: Verbosity -> InstallArgs -> Maybe [UserTarget]
-> IO InstallContext
makeInstallContext verbosity
(packageDBs, repos, comp, _, conf,_,_,
globalFlags, _, _, _, _) mUserTargets = do
installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf
sourcePkgDb <- getSourcePackages verbosity repos
(userTargets, pkgSpecifiers) <- case mUserTargets of
Nothing ->
-- We want to distinguish between the case where the user has given an
-- empty list of targets on the command-line and the case where we
-- specifically want to have an empty list of targets.
return ([], [])
Just userTargets0 -> do
-- For install, if no target is given it means we use the current
-- directory as the single target.
let userTargets | null userTargets0 = [UserTargetLocalDir "."]
| otherwise = userTargets0
pkgSpecifiers <- resolveUserTargets verbosity
(fromFlag $ globalWorldFile globalFlags)
(packageIndex sourcePkgDb)
userTargets
return (userTargets, pkgSpecifiers)
return (installedPkgIndex, sourcePkgDb, userTargets, pkgSpecifiers)
-- | Make an install plan given install context and install arguments.
makeInstallPlan :: Verbosity -> InstallArgs -> InstallContext
-> IO (Progress String String InstallPlan)
makeInstallPlan verbosity
(_, _, comp, platform, _, _, mSandboxPkgInfo,
_, configFlags, configExFlags, installFlags,
_)
(installedPkgIndex, sourcePkgDb,
_, pkgSpecifiers) = do
solver <- chooseSolver verbosity (fromFlag (configSolver configExFlags))
(compilerInfo comp)
notice verbosity "Resolving dependencies..."
return $ planPackages comp platform mSandboxPkgInfo solver
configFlags configExFlags installFlags
installedPkgIndex sourcePkgDb pkgSpecifiers
-- | Given an install plan, perform the actual installations.
processInstallPlan :: Verbosity -> InstallArgs -> InstallContext
-> InstallPlan
-> IO ()
processInstallPlan verbosity
args@(_,_, comp, _, _, _, _, _, _, _, installFlags, _)
(installedPkgIndex, sourcePkgDb,
userTargets, pkgSpecifiers) installPlan = do
checkPrintPlan verbosity comp installedPkgIndex installPlan sourcePkgDb
installFlags pkgSpecifiers
unless (dryRun || nothingToInstall) $ do
installPlan' <- performInstallations verbosity
args installedPkgIndex installPlan
postInstallActions verbosity args userTargets installPlan'
where
dryRun = fromFlag (installDryRun installFlags)
nothingToInstall = null (InstallPlan.ready installPlan)
-- ------------------------------------------------------------
-- * Installation planning
-- ------------------------------------------------------------
planPackages :: Compiler
-> Platform
-> Maybe SandboxPackageInfo
-> Solver
-> ConfigFlags
-> ConfigExFlags
-> InstallFlags
-> InstalledPackageIndex
-> SourcePackageDb
-> [PackageSpecifier SourcePackage]
-> Progress String String InstallPlan
planPackages comp platform mSandboxPkgInfo solver
configFlags configExFlags installFlags
installedPkgIndex sourcePkgDb pkgSpecifiers =
resolveDependencies
platform (compilerInfo comp)
solver
resolverParams
>>= if onlyDeps then pruneInstallPlan pkgSpecifiers else return
where
resolverParams =
setMaxBackjumps (if maxBackjumps < 0 then Nothing
else Just maxBackjumps)
. setIndependentGoals independentGoals
. setReorderGoals reorderGoals
. setAvoidReinstalls avoidReinstalls
. setShadowPkgs shadowPkgs
. setStrongFlags strongFlags
. setPreferenceDefault (if upgradeDeps then PreferAllLatest
else PreferLatestForSelected)
. removeUpperBounds allowNewer
. addPreferences
-- preferences from the config file or command line
[ PackageVersionPreference name ver
| Dependency name ver <- configPreferences configExFlags ]
. addConstraints
-- version constraints from the config file or command line
(map userToPackageConstraint (configExConstraints configExFlags))
. addConstraints
--FIXME: this just applies all flags to all targets which
-- is silly. We should check if the flags are appropriate
[ PackageConstraintFlags (pkgSpecifierTarget pkgSpecifier) flags
| let flags = configConfigurationsFlags configFlags
, not (null flags)
, pkgSpecifier <- pkgSpecifiers ]
. addConstraints
[ PackageConstraintStanzas (pkgSpecifierTarget pkgSpecifier) stanzas
| pkgSpecifier <- pkgSpecifiers ]
. maybe id applySandboxInstallPolicy mSandboxPkgInfo
. (if reinstall then reinstallTargets else id)
$ standardInstallPolicy
installedPkgIndex sourcePkgDb pkgSpecifiers
stanzas = concat
[ if testsEnabled then [TestStanzas] else []
, if benchmarksEnabled then [BenchStanzas] else []
]
testsEnabled = fromFlagOrDefault False $ configTests configFlags
benchmarksEnabled = fromFlagOrDefault False $ configBenchmarks configFlags
reinstall = fromFlag (installReinstall installFlags)
reorderGoals = fromFlag (installReorderGoals installFlags)
independentGoals = fromFlag (installIndependentGoals installFlags)
avoidReinstalls = fromFlag (installAvoidReinstalls installFlags)
shadowPkgs = fromFlag (installShadowPkgs installFlags)
strongFlags = fromFlag (installStrongFlags installFlags)
maxBackjumps = fromFlag (installMaxBackjumps installFlags)
upgradeDeps = fromFlag (installUpgradeDeps installFlags)
onlyDeps = fromFlag (installOnlyDeps installFlags)
allowNewer = fromFlag (configAllowNewer configExFlags)
-- | Remove the provided targets from the install plan.
pruneInstallPlan :: Package pkg => [PackageSpecifier pkg] -> InstallPlan
-> Progress String String InstallPlan
pruneInstallPlan pkgSpecifiers =
-- TODO: this is a general feature and should be moved to D.C.Dependency
-- Also, the InstallPlan.remove should return info more precise to the
-- problem, rather than the very general PlanProblem type.
either (Fail . explain) Done
. InstallPlan.remove (\pkg -> packageName pkg `elem` targetnames)
where
explain :: [InstallPlan.PlanProblem] -> String
explain problems =
"Cannot select only the dependencies (as requested by the "
++ "'--only-dependencies' flag), "
++ (case pkgids of
[pkgid] -> "the package " ++ display pkgid ++ " is "
_ -> "the packages "
++ intercalate ", " (map display pkgids) ++ " are ")
++ "required by a dependency of one of the other targets."
where
pkgids =
nub [ depid
| InstallPlan.PackageMissingDeps _ depids <- problems
, depid <- depids
, packageName depid `elem` targetnames ]
targetnames = map pkgSpecifierTarget pkgSpecifiers
-- ------------------------------------------------------------
-- * Informational messages
-- ------------------------------------------------------------
-- | Perform post-solver checks of the install plan and print it if
-- either requested or needed.
checkPrintPlan :: Verbosity
-> Compiler
-> InstalledPackageIndex
-> InstallPlan
-> SourcePackageDb
-> InstallFlags
-> [PackageSpecifier SourcePackage]
-> IO ()
checkPrintPlan verbosity comp installed installPlan sourcePkgDb
installFlags pkgSpecifiers = do
-- User targets that are already installed.
let preExistingTargets =
[ p | let tgts = map pkgSpecifierTarget pkgSpecifiers,
InstallPlan.PreExisting p <- InstallPlan.toList installPlan,
packageName p `elem` tgts ]
-- If there's nothing to install, we print the already existing
-- target packages as an explanation.
when nothingToInstall $
notice verbosity $ unlines $
"All the requested packages are already installed:"
: map (display . packageId) preExistingTargets
++ ["Use --reinstall if you want to reinstall anyway."]
let lPlan = linearizeInstallPlan comp installed installPlan
-- Are any packages classified as reinstalls?
let reinstalledPkgs = concatMap (extractReinstalls . snd) lPlan
-- Packages that are already broken.
let oldBrokenPkgs =
map Installed.installedPackageId
. PackageIndex.reverseDependencyClosure installed
. map (Installed.installedPackageId . fst)
. PackageIndex.brokenPackages
$ installed
let excluded = reinstalledPkgs ++ oldBrokenPkgs
-- Packages that are reverse dependencies of replaced packages are very
-- likely to be broken. We exclude packages that are already broken.
let newBrokenPkgs =
filter (\ p -> not (Installed.installedPackageId p `elem` excluded))
(PackageIndex.reverseDependencyClosure installed reinstalledPkgs)
let containsReinstalls = not (null reinstalledPkgs)
let breaksPkgs = not (null newBrokenPkgs)
let adaptedVerbosity
| containsReinstalls && not overrideReinstall = verbosity `max` verbose
| otherwise = verbosity
-- We print the install plan if we are in a dry-run or if we are confronted
-- with a dangerous install plan.
when (dryRun || containsReinstalls && not overrideReinstall) $
printPlan (dryRun || breaksPkgs && not overrideReinstall)
adaptedVerbosity lPlan sourcePkgDb
-- If the install plan is dangerous, we print various warning messages. In
-- particular, if we can see that packages are likely to be broken, we even
-- bail out (unless installation has been forced with --force-reinstalls).
when containsReinstalls $ do
if breaksPkgs
then do
(if dryRun || overrideReinstall then warn verbosity else die) $ unlines $
"The following packages are likely to be broken by the reinstalls:"
: map (display . Installed.sourcePackageId) newBrokenPkgs
++ if overrideReinstall
then if dryRun then [] else
["Continuing even though the plan contains dangerous reinstalls."]
else
["Use --force-reinstalls if you want to install anyway."]
else unless dryRun $ warn verbosity
"Note that reinstalls are always dangerous. Continuing anyway..."
-- If we are explicitly told to not download anything, check that all packages
-- are already fetched.
let offline = fromFlagOrDefault False (installOfflineMode installFlags)
when offline $ do
let pkgs = [ sourcePkg
| InstallPlan.Configured (ConfiguredPackage sourcePkg _ _ _)
<- InstallPlan.toList installPlan ]
notFetched <- fmap (map packageInfoId)
. filterM (fmap isNothing . checkFetched . packageSource)
$ pkgs
unless (null notFetched) $
die $ "Can't download packages in offline mode. "
++ "Must download the following packages to proceed:\n"
++ intercalate ", " (map display notFetched)
++ "\nTry using 'cabal fetch'."
where
nothingToInstall = null (InstallPlan.ready installPlan)
dryRun = fromFlag (installDryRun installFlags)
overrideReinstall = fromFlag (installOverrideReinstall installFlags)
linearizeInstallPlan :: Compiler
-> InstalledPackageIndex
-> InstallPlan
-> [(ReadyPackage, PackageStatus)]
linearizeInstallPlan comp installedPkgIndex plan =
unfoldr next plan
where
next plan' = case InstallPlan.ready plan' of
[] -> Nothing
(pkg:_) -> Just ((pkg, status), plan'')
where
pkgid = installedPackageId pkg
status = packageStatus comp installedPkgIndex pkg
plan'' = InstallPlan.completed pkgid
(BuildOk DocsNotTried TestsNotTried
(Just $ Installed.emptyInstalledPackageInfo
{ Installed.sourcePackageId = packageId pkg
, Installed.installedPackageId = pkgid }))
(InstallPlan.processing [pkg] plan')
--FIXME: This is a bit of a hack,
-- pretending that each package is installed
-- It's doubly a hack because the installed package ID
-- didn't get updated...
data PackageStatus = NewPackage
| NewVersion [Version]
| Reinstall [InstalledPackageId] [PackageChange]
type PackageChange = MergeResult PackageIdentifier PackageIdentifier
extractReinstalls :: PackageStatus -> [InstalledPackageId]
extractReinstalls (Reinstall ipids _) = ipids
extractReinstalls _ = []
packageStatus :: Compiler -> InstalledPackageIndex -> ReadyPackage -> PackageStatus
packageStatus _comp installedPkgIndex cpkg =
case PackageIndex.lookupPackageName installedPkgIndex
(packageName cpkg) of
[] -> NewPackage
ps -> case filter ((== packageId cpkg)
. Installed.sourcePackageId) (concatMap snd ps) of
[] -> NewVersion (map fst ps)
pkgs@(pkg:_) -> Reinstall (map Installed.installedPackageId pkgs)
(changes pkg cpkg)
where
changes :: Installed.InstalledPackageInfo
-> ReadyPackage
-> [MergeResult PackageIdentifier PackageIdentifier]
changes pkg pkg' = filter changed $
mergeBy (comparing packageName)
(resolveInstalledIds $ Installed.depends pkg) -- deps of installed pkg
(resolveInstalledIds $ CD.nonSetupDeps (depends pkg')) -- deps of configured pkg
-- convert to source pkg ids via index
resolveInstalledIds :: [InstalledPackageId] -> [PackageIdentifier]
resolveInstalledIds =
nub
. sort
. map Installed.sourcePackageId
. catMaybes
. map (PackageIndex.lookupInstalledPackageId installedPkgIndex)
changed (InBoth pkgid pkgid') = pkgid /= pkgid'
changed _ = True
printPlan :: Bool -- is dry run
-> Verbosity
-> [(ReadyPackage, PackageStatus)]
-> SourcePackageDb
-> IO ()
printPlan dryRun verbosity plan sourcePkgDb = case plan of
[] -> return ()
pkgs
| verbosity >= Verbosity.verbose -> notice verbosity $ unlines $
("In order, the following " ++ wouldWill ++ " be installed:")
: map showPkgAndReason pkgs
| otherwise -> notice verbosity $ unlines $
("In order, the following " ++ wouldWill
++ " be installed (use -v for more details):")
: map showPkg pkgs
where
wouldWill | dryRun = "would"
| otherwise = "will"
showPkg (pkg, _) = display (packageId pkg) ++
showLatest (pkg)
showPkgAndReason (pkg', pr) = display (packageId pkg') ++
showLatest pkg' ++
showFlagAssignment (nonDefaultFlags pkg') ++
showStanzas (stanzas pkg') ++ " " ++
case pr of
NewPackage -> "(new package)"
NewVersion _ -> "(new version)"
Reinstall _ cs -> "(reinstall)" ++ case cs of
[] -> ""
diff -> " changes: " ++ intercalate ", " (map change diff)
showLatest :: ReadyPackage -> String
showLatest pkg = case mLatestVersion of
Just latestVersion ->
if packageVersion pkg < latestVersion
then (" (latest: " ++ display latestVersion ++ ")")
else ""
Nothing -> ""
where
mLatestVersion :: Maybe Version
mLatestVersion = case SourcePackageIndex.lookupPackageName
(packageIndex sourcePkgDb)
(packageName pkg) of
[] -> Nothing
x -> Just $ packageVersion $ last x
toFlagAssignment :: [Flag] -> FlagAssignment
toFlagAssignment = map (\ f -> (flagName f, flagDefault f))
nonDefaultFlags :: ReadyPackage -> FlagAssignment
nonDefaultFlags (ReadyPackage spkg fa _ _) =
let defaultAssignment =
toFlagAssignment
(genPackageFlags (Source.packageDescription spkg))
in fa \\ defaultAssignment
stanzas :: ReadyPackage -> [OptionalStanza]
stanzas (ReadyPackage _ _ sts _) = sts
showStanzas :: [OptionalStanza] -> String
showStanzas = concatMap ((' ' :) . showStanza)
showStanza TestStanzas = "*test"
showStanza BenchStanzas = "*bench"
-- FIXME: this should be a proper function in a proper place
showFlagAssignment :: FlagAssignment -> String
showFlagAssignment = concatMap ((' ' :) . showFlagValue)
showFlagValue (f, True) = '+' : showFlagName f
showFlagValue (f, False) = '-' : showFlagName f
showFlagName (FlagName f) = f
change (OnlyInLeft pkgid) = display pkgid ++ " removed"
change (InBoth pkgid pkgid') = display pkgid ++ " -> "
++ display (packageVersion pkgid')
change (OnlyInRight pkgid') = display pkgid' ++ " added"
-- ------------------------------------------------------------
-- * Post installation stuff
-- ------------------------------------------------------------
-- | Report a solver failure. This works slightly differently to
-- 'postInstallActions', as (by definition) we don't have an install plan.
reportPlanningFailure :: Verbosity -> InstallArgs -> InstallContext -> String -> IO ()
reportPlanningFailure verbosity
(_, _, comp, platform, _, _, _
,_, configFlags, _, installFlags, _)
(_, sourcePkgDb, _, pkgSpecifiers)
message = do
when reportFailure $ do
-- Only create reports for explicitly named packages
let pkgids =
filter (SourcePackageIndex.elemByPackageId (packageIndex sourcePkgDb)) $
mapMaybe theSpecifiedPackage pkgSpecifiers
buildReports = BuildReports.fromPlanningFailure platform (compilerId comp)
pkgids (configConfigurationsFlags configFlags)
when (not (null buildReports)) $
info verbosity $
"Solver failure will be reported for "
++ intercalate "," (map display pkgids)
-- Save reports
BuildReports.storeLocal (compilerInfo comp)
(fromNubList $ installSummaryFile installFlags) buildReports platform
-- Save solver log
case logFile of
Nothing -> return ()
Just template -> forM_ pkgids $ \pkgid ->
let env = initialPathTemplateEnv pkgid dummyPackageKey
(compilerInfo comp) platform
path = fromPathTemplate $ substPathTemplate env template
in writeFile path message
where
reportFailure = fromFlag (installReportPlanningFailure installFlags)
logFile = flagToMaybe (installLogFile installFlags)
-- A PackageKey is calculated from the transitive closure of
-- dependencies, but when the solver fails we don't have that.
-- So we fail.
dummyPackageKey = error "reportPlanningFailure: package key not available"
-- | If a 'PackageSpecifier' refers to a single package, return Just that package.
theSpecifiedPackage :: Package pkg => PackageSpecifier pkg -> Maybe PackageId
theSpecifiedPackage pkgSpec =
case pkgSpec of
NamedPackage name [PackageConstraintVersion name' version]
| name == name' -> PackageIdentifier name <$> trivialRange version
NamedPackage _ _ -> Nothing
SpecificSourcePackage pkg -> Just $ packageId pkg
where
-- | If a range includes only a single version, return Just that version.
trivialRange :: VersionRange -> Maybe Version
trivialRange = foldVersionRange
Nothing
Just -- "== v"
(\_ -> Nothing)
(\_ -> Nothing)
(\_ _ -> Nothing)
(\_ _ -> Nothing)
-- | Various stuff we do after successful or unsuccessfully installing a bunch
-- of packages. This includes:
--
-- * build reporting, local and remote
-- * symlinking binaries
-- * updating indexes
-- * updating world file
-- * error reporting
--
postInstallActions :: Verbosity
-> InstallArgs
-> [UserTarget]
-> InstallPlan
-> IO ()
postInstallActions verbosity
(packageDBs, _, comp, platform, conf, useSandbox, mSandboxPkgInfo
,globalFlags, configFlags, _, installFlags, _)
targets installPlan = do
unless oneShot $
World.insert verbosity worldFile
--FIXME: does not handle flags
[ World.WorldPkgInfo dep []
| UserTargetNamed dep <- targets ]
let buildReports = BuildReports.fromInstallPlan installPlan
BuildReports.storeLocal (compilerInfo comp) (fromNubList $ installSummaryFile installFlags) buildReports
(InstallPlan.planPlatform installPlan)
when (reportingLevel >= AnonymousReports) $
BuildReports.storeAnonymous buildReports
when (reportingLevel == DetailedReports) $
storeDetailedBuildReports verbosity logsDir buildReports
regenerateHaddockIndex verbosity packageDBs comp platform conf useSandbox
configFlags installFlags installPlan
symlinkBinaries verbosity comp configFlags installFlags installPlan
printBuildFailures installPlan
updateSandboxTimestampsFile useSandbox mSandboxPkgInfo
comp platform installPlan
where
reportingLevel = fromFlag (installBuildReports installFlags)
logsDir = fromFlag (globalLogsDir globalFlags)
oneShot = fromFlag (installOneShot installFlags)
worldFile = fromFlag $ globalWorldFile globalFlags
storeDetailedBuildReports :: Verbosity -> FilePath
-> [(BuildReports.BuildReport, Maybe Repo)] -> IO ()
storeDetailedBuildReports verbosity logsDir reports = sequence_
[ do dotCabal <- defaultCabalDir
let logFileName = display (BuildReports.package report) <.> "log"
logFile = logsDir </> logFileName
reportsDir = dotCabal </> "reports" </> remoteRepoName remoteRepo
reportFile = reportsDir </> logFileName
handleMissingLogFile $ do
buildLog <- readFile logFile
createDirectoryIfMissing True reportsDir -- FIXME
writeFile reportFile (show (BuildReports.show report, buildLog))
| (report, Just Repo { repoKind = Left remoteRepo }) <- reports
, isLikelyToHaveLogFile (BuildReports.installOutcome report) ]
where
isLikelyToHaveLogFile BuildReports.ConfigureFailed {} = True
isLikelyToHaveLogFile BuildReports.BuildFailed {} = True
isLikelyToHaveLogFile BuildReports.InstallFailed {} = True
isLikelyToHaveLogFile BuildReports.InstallOk {} = True
isLikelyToHaveLogFile _ = False
handleMissingLogFile = Exception.handleJust missingFile $ \ioe ->
warn verbosity $ "Missing log file for build report: "
++ fromMaybe "" (ioeGetFileName ioe)
missingFile ioe
| isDoesNotExistError ioe = Just ioe
missingFile _ = Nothing
regenerateHaddockIndex :: Verbosity
-> [PackageDB]
-> Compiler
-> Platform
-> ProgramConfiguration
-> UseSandbox
-> ConfigFlags
-> InstallFlags
-> InstallPlan
-> IO ()
regenerateHaddockIndex verbosity packageDBs comp platform conf useSandbox
configFlags installFlags installPlan
| haddockIndexFileIsRequested && shouldRegenerateHaddockIndex = do
defaultDirs <- InstallDirs.defaultInstallDirs
(compilerFlavor comp)
(fromFlag (configUserInstall configFlags))
True
let indexFileTemplate = fromFlag (installHaddockIndex installFlags)
indexFile = substHaddockIndexFileName defaultDirs indexFileTemplate
notice verbosity $
"Updating documentation index " ++ indexFile
--TODO: might be nice if the install plan gave us the new InstalledPackageInfo
installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf
Haddock.regenerateHaddockIndex verbosity installedPkgIndex conf indexFile
| otherwise = return ()
where
haddockIndexFileIsRequested =
fromFlag (installDocumentation installFlags)
&& isJust (flagToMaybe (installHaddockIndex installFlags))
-- We want to regenerate the index if some new documentation was actually
-- installed. Since the index can be only per-user or per-sandbox (see
-- #1337), we don't do it for global installs or special cases where we're
-- installing into a specific db.
shouldRegenerateHaddockIndex = (isUseSandbox useSandbox || normalUserInstall)
&& someDocsWereInstalled installPlan
where
someDocsWereInstalled = any installedDocs . InstallPlan.toList
normalUserInstall = (UserPackageDB `elem` packageDBs)
&& all (not . isSpecificPackageDB) packageDBs
installedDocs (InstallPlan.Installed _ (BuildOk DocsOk _ _)) = True
installedDocs _ = False
isSpecificPackageDB (SpecificPackageDB _) = True
isSpecificPackageDB _ = False
substHaddockIndexFileName defaultDirs = fromPathTemplate
. substPathTemplate env
where
env = env0 ++ installDirsTemplateEnv absoluteDirs
env0 = InstallDirs.compilerTemplateEnv (compilerInfo comp)
++ InstallDirs.platformTemplateEnv platform
++ InstallDirs.abiTemplateEnv (compilerInfo comp) platform
absoluteDirs = InstallDirs.substituteInstallDirTemplates
env0 templateDirs
templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault
defaultDirs (configInstallDirs configFlags)
symlinkBinaries :: Verbosity
-> Compiler
-> ConfigFlags
-> InstallFlags
-> InstallPlan -> IO ()
symlinkBinaries verbosity comp configFlags installFlags plan = do
failed <- InstallSymlink.symlinkBinaries comp configFlags installFlags plan
case failed of
[] -> return ()
[(_, exe, path)] ->
warn verbosity $
"could not create a symlink in " ++ bindir ++ " for "
++ exe ++ " because the file exists there already but is not "
++ "managed by cabal. You can create a symlink for this executable "
++ "manually if you wish. The executable file has been installed at "
++ path
exes ->
warn verbosity $
"could not create symlinks in " ++ bindir ++ " for "
++ intercalate ", " [ exe | (_, exe, _) <- exes ]
++ " because the files exist there already and are not "
++ "managed by cabal. You can create symlinks for these executables "
++ "manually if you wish. The executable files have been installed at "
++ intercalate ", " [ path | (_, _, path) <- exes ]
where
bindir = fromFlag (installSymlinkBinDir installFlags)
printBuildFailures :: InstallPlan -> IO ()
printBuildFailures plan =
case [ (pkg, reason)
| InstallPlan.Failed pkg reason <- InstallPlan.toList plan ] of
[] -> return ()
failed -> die . unlines
$ "Error: some packages failed to install:"
: [ display (packageId pkg) ++ printFailureReason reason
| (pkg, reason) <- failed ]
where
printFailureReason reason = case reason of
DependentFailed pkgid -> " depends on " ++ display pkgid
++ " which failed to install."
DownloadFailed e -> " failed while downloading the package."
++ showException e
UnpackFailed e -> " failed while unpacking the package."
++ showException e
ConfigureFailed e -> " failed during the configure step."
++ showException e
BuildFailed e -> " failed during the building phase."
++ showException e
TestsFailed e -> " failed during the tests phase."
++ showException e
InstallFailed e -> " failed during the final install step."
++ showException e
-- This will never happen, but we include it for completeness
PlanningFailed -> " failed during the planning phase."
showException e = " The exception was:\n " ++ show e ++ maybeOOM e
#ifdef mingw32_HOST_OS
maybeOOM _ = ""
#else
maybeOOM e = maybe "" onExitFailure (fromException e)
onExitFailure (ExitFailure n)
| n == 9 || n == -9 =
"\nThis may be due to an out-of-memory condition."
onExitFailure _ = ""
#endif
-- | If we're working inside a sandbox and some add-source deps were installed,
-- update the timestamps of those deps.
updateSandboxTimestampsFile :: UseSandbox -> Maybe SandboxPackageInfo
-> Compiler -> Platform -> InstallPlan
-> IO ()
updateSandboxTimestampsFile (UseSandbox sandboxDir)
(Just (SandboxPackageInfo _ _ _ allAddSourceDeps))
comp platform installPlan =
withUpdateTimestamps sandboxDir (compilerId comp) platform $ \_ -> do
let allInstalled = [ pkg | InstallPlan.Installed pkg _
<- InstallPlan.toList installPlan ]
allSrcPkgs = [ pkg | ReadyPackage pkg _ _ _ <- allInstalled ]
allPaths = [ pth | LocalUnpackedPackage pth
<- map packageSource allSrcPkgs]
allPathsCanonical <- mapM tryCanonicalizePath allPaths
return $! filter (`S.member` allAddSourceDeps) allPathsCanonical
updateSandboxTimestampsFile _ _ _ _ _ = return ()
-- ------------------------------------------------------------
-- * Actually do the installations
-- ------------------------------------------------------------
data InstallMisc = InstallMisc {
rootCmd :: Maybe FilePath,
libVersion :: Maybe Version
}
-- | If logging is enabled, contains location of the log file and the verbosity
-- level for logging.
type UseLogFile = Maybe (PackageIdentifier -> PackageKey -> FilePath, Verbosity)
performInstallations :: Verbosity
-> InstallArgs
-> InstalledPackageIndex
-> InstallPlan
-> IO InstallPlan
performInstallations verbosity
(packageDBs, _, comp, _, conf, useSandbox, _,
globalFlags, configFlags, configExFlags, installFlags, haddockFlags)
installedPkgIndex installPlan = do
-- With 'install -j' it can be a bit hard to tell whether a sandbox is used.
whenUsingSandbox useSandbox $ \sandboxDir ->
when parallelInstall $
notice verbosity $ "Notice: installing into a sandbox located at "
++ sandboxDir
jobControl <- if parallelInstall then newParallelJobControl
else newSerialJobControl
buildLimit <- newJobLimit numJobs
fetchLimit <- newJobLimit (min numJobs numFetchJobs)
installLock <- newLock -- serialise installation
cacheLock <- newLock -- serialise access to setup exe cache
executeInstallPlan verbosity comp jobControl useLogFile installPlan $ \rpkg ->
-- Calculate the package key (ToDo: Is this right for source install)
let pkg_key = readyPackageKey comp rpkg in
installReadyPackage platform cinfo configFlags
rpkg $ \configFlags' src pkg pkgoverride ->
fetchSourcePackage verbosity fetchLimit src $ \src' ->
installLocalPackage verbosity buildLimit
(packageId pkg) src' distPref $ \mpath ->
installUnpackedPackage verbosity buildLimit installLock numJobs pkg_key
(setupScriptOptions installedPkgIndex cacheLock rpkg)
miscOptions configFlags' installFlags haddockFlags
cinfo platform pkg pkgoverride mpath useLogFile
where
platform = InstallPlan.planPlatform installPlan
cinfo = InstallPlan.planCompiler installPlan
numJobs = determineNumJobs (installNumJobs installFlags)
numFetchJobs = 2
parallelInstall = numJobs >= 2
distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions)
(configDistPref configFlags)
setupScriptOptions index lock rpkg =
configureSetupScript
packageDBs
comp
platform
conf
distPref
(chooseCabalVersion configExFlags (libVersion miscOptions))
(Just lock)
parallelInstall
index
(Just rpkg)
reportingLevel = fromFlag (installBuildReports installFlags)
logsDir = fromFlag (globalLogsDir globalFlags)
-- Should the build output be written to a log file instead of stdout?
useLogFile :: UseLogFile
useLogFile = fmap ((\f -> (f, loggingVerbosity)) . substLogFileName)
logFileTemplate
where
installLogFile' = flagToMaybe $ installLogFile installFlags
defaultTemplate = toPathTemplate $ logsDir </> "$pkgid" <.> "log"
-- If the user has specified --remote-build-reporting=detailed, use the
-- default log file location. If the --build-log option is set, use the
-- provided location. Otherwise don't use logging, unless building in
-- parallel (in which case the default location is used).
logFileTemplate :: Maybe PathTemplate
logFileTemplate
| useDefaultTemplate = Just defaultTemplate
| otherwise = installLogFile'
-- If the user has specified --remote-build-reporting=detailed or
-- --build-log, use more verbose logging.
loggingVerbosity :: Verbosity
loggingVerbosity | overrideVerbosity = max Verbosity.verbose verbosity
| otherwise = verbosity
useDefaultTemplate :: Bool
useDefaultTemplate
| reportingLevel == DetailedReports = True
| isJust installLogFile' = False
| parallelInstall = True
| otherwise = False
overrideVerbosity :: Bool
overrideVerbosity
| reportingLevel == DetailedReports = True
| isJust installLogFile' = True
| parallelInstall = False
| otherwise = False
substLogFileName :: PathTemplate -> PackageIdentifier -> PackageKey -> FilePath
substLogFileName template pkg pkg_key = fromPathTemplate
. substPathTemplate env
$ template
where env = initialPathTemplateEnv (packageId pkg) pkg_key
(compilerInfo comp) platform
miscOptions = InstallMisc {
rootCmd = if fromFlag (configUserInstall configFlags)
|| (isUseSandbox useSandbox)
then Nothing -- ignore --root-cmd if --user
-- or working inside a sandbox.
else flagToMaybe (installRootCmd installFlags),
libVersion = flagToMaybe (configCabalVersion configExFlags)
}
executeInstallPlan :: Verbosity
-> Compiler
-> JobControl IO (PackageId, PackageKey, BuildResult)
-> UseLogFile
-> InstallPlan
-> (ReadyPackage -> IO BuildResult)
-> IO InstallPlan
executeInstallPlan verbosity comp jobCtl useLogFile plan0 installPkg =
tryNewTasks 0 plan0
where
tryNewTasks taskCount plan = do
case InstallPlan.ready plan of
[] | taskCount == 0 -> return plan
| otherwise -> waitForTasks taskCount plan
pkgs -> do
sequence_
[ do info verbosity $ "Ready to install " ++ display pkgid
spawnJob jobCtl $ do
buildResult <- installPkg pkg
return (packageId pkg, pkg_key, buildResult)
| pkg <- pkgs
, let pkgid = packageId pkg
pkg_key = readyPackageKey comp pkg ]
let taskCount' = taskCount + length pkgs
plan' = InstallPlan.processing pkgs plan
waitForTasks taskCount' plan'
waitForTasks taskCount plan = do
info verbosity $ "Waiting for install task to finish..."
(pkgid, pkg_key, buildResult) <- collectJob jobCtl
printBuildResult pkgid pkg_key buildResult
let taskCount' = taskCount-1
plan' = updatePlan pkgid buildResult plan
tryNewTasks taskCount' plan'
updatePlan :: PackageIdentifier -> BuildResult -> InstallPlan -> InstallPlan
updatePlan pkgid (Right buildSuccess) =
InstallPlan.completed (Source.fakeInstalledPackageId pkgid) buildSuccess
updatePlan pkgid (Left buildFailure) =
InstallPlan.failed (Source.fakeInstalledPackageId pkgid) buildFailure depsFailure
where
depsFailure = DependentFailed pkgid
-- So this first pkgid failed for whatever reason (buildFailure).
-- All the other packages that depended on this pkgid, which we
-- now cannot build, we mark as failing due to 'DependentFailed'
-- which kind of means it was not their fault.
-- Print build log if something went wrong, and 'Installed $PKGID'
-- otherwise.
printBuildResult :: PackageId -> PackageKey -> BuildResult -> IO ()
printBuildResult pkgid pkg_key buildResult = case buildResult of
(Right _) -> notice verbosity $ "Installed " ++ display pkgid
(Left _) -> do
notice verbosity $ "Failed to install " ++ display pkgid
when (verbosity >= normal) $
case useLogFile of
Nothing -> return ()
Just (mkLogFileName, _) -> do
let logName = mkLogFileName pkgid pkg_key
putStr $ "Build log ( " ++ logName ++ " ):\n"
printFile logName
printFile :: FilePath -> IO ()
printFile path = readFile path >>= putStr
-- | Call an installer for an 'SourcePackage' but override the configure
-- flags with the ones given by the 'ReadyPackage'. In particular the
-- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly
-- versioned package dependencies. So we ignore any previous partial flag
-- assignment or dependency constraints and use the new ones.
--
-- NB: when updating this function, don't forget to also update
-- 'configurePackage' in D.C.Configure.
installReadyPackage :: Platform -> CompilerInfo
-> ConfigFlags
-> ReadyPackage
-> (ConfigFlags -> PackageLocation (Maybe FilePath)
-> PackageDescription
-> PackageDescriptionOverride -> a)
-> a
installReadyPackage platform cinfo configFlags
(ReadyPackage (SourcePackage _ gpkg source pkgoverride)
flags stanzas deps)
installPkg = installPkg configFlags {
configConfigurationsFlags = flags,
-- We generate the legacy constraints as well as the new style precise deps.
-- In the end only one set gets passed to Setup.hs configure, depending on
-- the Cabal version we are talking to.
configConstraints = [ thisPackageVersion (packageId deppkg)
| deppkg <- CD.nonSetupDeps deps ],
configDependencies = [ (packageName (Installed.sourcePackageId deppkg),
Installed.installedPackageId deppkg)
| deppkg <- CD.nonSetupDeps deps ],
-- Use '--exact-configuration' if supported.
configExactConfiguration = toFlag True,
configBenchmarks = toFlag False,
configTests = toFlag (TestStanzas `elem` stanzas)
} source pkg pkgoverride
where
pkg = case finalizePackageDescription flags
(const True)
platform cinfo [] (enableStanzas stanzas gpkg) of
Left _ -> error "finalizePackageDescription ReadyPackage failed"
Right (desc, _) -> desc
fetchSourcePackage
:: Verbosity
-> JobLimit
-> PackageLocation (Maybe FilePath)
-> (PackageLocation FilePath -> IO BuildResult)
-> IO BuildResult
fetchSourcePackage verbosity fetchLimit src installPkg = do
fetched <- checkFetched src
case fetched of
Just src' -> installPkg src'
Nothing -> onFailure DownloadFailed $ do
loc <- withJobLimit fetchLimit $
fetchPackage verbosity src
installPkg loc
installLocalPackage
:: Verbosity
-> JobLimit
-> PackageIdentifier -> PackageLocation FilePath -> FilePath
-> (Maybe FilePath -> IO BuildResult)
-> IO BuildResult
installLocalPackage verbosity jobLimit pkgid location distPref installPkg =
case location of
LocalUnpackedPackage dir ->
installPkg (Just dir)
LocalTarballPackage tarballPath ->
installLocalTarballPackage verbosity jobLimit
pkgid tarballPath distPref installPkg
RemoteTarballPackage _ tarballPath ->
installLocalTarballPackage verbosity jobLimit
pkgid tarballPath distPref installPkg
RepoTarballPackage _ _ tarballPath ->
installLocalTarballPackage verbosity jobLimit
pkgid tarballPath distPref installPkg
installLocalTarballPackage
:: Verbosity
-> JobLimit
-> PackageIdentifier -> FilePath -> FilePath
-> (Maybe FilePath -> IO BuildResult)
-> IO BuildResult
installLocalTarballPackage verbosity jobLimit pkgid
tarballPath distPref installPkg = do
tmp <- getTemporaryDirectory
withTempDirectory verbosity tmp "cabal-tmp" $ \tmpDirPath ->
onFailure UnpackFailed $ do
let relUnpackedPath = display pkgid
absUnpackedPath = tmpDirPath </> relUnpackedPath
descFilePath = absUnpackedPath
</> display (packageName pkgid) <.> "cabal"
withJobLimit jobLimit $ do
info verbosity $ "Extracting " ++ tarballPath
++ " to " ++ tmpDirPath ++ "..."
extractTarGzFile tmpDirPath relUnpackedPath tarballPath
exists <- doesFileExist descFilePath
when (not exists) $
die $ "Package .cabal file not found: " ++ show descFilePath
maybeRenameDistDir absUnpackedPath
installPkg (Just absUnpackedPath)
where
-- 'cabal sdist' puts pre-generated files in the 'dist'
-- directory. This fails when a nonstandard build directory name
-- is used (as is the case with sandboxes), so we need to rename
-- the 'dist' dir here.
--
-- TODO: 'cabal get happy && cd sandbox && cabal install ../happy' still
-- fails even with this workaround. We probably can live with that.
maybeRenameDistDir :: FilePath -> IO ()
maybeRenameDistDir absUnpackedPath = do
let distDirPath = absUnpackedPath </> defaultDistPref
distDirPathTmp = absUnpackedPath </> (defaultDistPref ++ "-tmp")
distDirPathNew = absUnpackedPath </> distPref
distDirExists <- doesDirectoryExist distDirPath
when (distDirExists
&& (not $ distDirPath `equalFilePath` distDirPathNew)) $ do
-- NB: we need to handle the case when 'distDirPathNew' is a
-- subdirectory of 'distDirPath' (e.g. the former is
-- 'dist/dist-sandbox-3688fbc2' and the latter is 'dist').
debug verbosity $ "Renaming '" ++ distDirPath ++ "' to '"
++ distDirPathTmp ++ "'."
renameDirectory distDirPath distDirPathTmp
when (distDirPath `isPrefixOf` distDirPathNew) $
createDirectoryIfMissingVerbose verbosity False distDirPath
debug verbosity $ "Renaming '" ++ distDirPathTmp ++ "' to '"
++ distDirPathNew ++ "'."
renameDirectory distDirPathTmp distDirPathNew
installUnpackedPackage
:: Verbosity
-> JobLimit
-> Lock
-> Int
-> PackageKey
-> SetupScriptOptions
-> InstallMisc
-> ConfigFlags
-> InstallFlags
-> HaddockFlags
-> CompilerInfo
-> Platform
-> PackageDescription
-> PackageDescriptionOverride
-> Maybe FilePath -- ^ Directory to change to before starting the installation.
-> UseLogFile -- ^ File to log output to (if any)
-> IO BuildResult
installUnpackedPackage verbosity buildLimit installLock numJobs pkg_key
scriptOptions miscOptions
configFlags installFlags haddockFlags
cinfo platform pkg pkgoverride workingDir useLogFile = do
-- Override the .cabal file if necessary
case pkgoverride of
Nothing -> return ()
Just pkgtxt -> do
let descFilePath = fromMaybe "." workingDir
</> display (packageName pkgid) <.> "cabal"
info verbosity $
"Updating " ++ display (packageName pkgid) <.> "cabal"
++ " with the latest revision from the index."
writeFileAtomic descFilePath pkgtxt
-- Make sure that we pass --libsubdir etc to 'setup configure' (necessary if
-- the setup script was compiled against an old version of the Cabal lib).
configFlags' <- addDefaultInstallDirs configFlags
-- Filter out flags not supported by the old versions of the Cabal lib.
let configureFlags :: Version -> ConfigFlags
configureFlags = filterConfigureFlags configFlags' {
configVerbosity = toFlag verbosity'
}
-- Path to the optional log file.
mLogPath <- maybeLogPath
-- Configure phase
onFailure ConfigureFailed $ withJobLimit buildLimit $ do
when (numJobs > 1) $ notice verbosity $
"Configuring " ++ display pkgid ++ "..."
setup configureCommand configureFlags mLogPath
-- Build phase
onFailure BuildFailed $ do
when (numJobs > 1) $ notice verbosity $
"Building " ++ display pkgid ++ "..."
setup buildCommand' buildFlags mLogPath
-- Doc generation phase
docsResult <- if shouldHaddock
then (do setup haddockCommand haddockFlags' mLogPath
return DocsOk)
`catchIO` (\_ -> return DocsFailed)
`catchExit` (\_ -> return DocsFailed)
else return DocsNotTried
-- Tests phase
onFailure TestsFailed $ do
when (testsEnabled && PackageDescription.hasTests pkg) $
setup Cabal.testCommand testFlags mLogPath
let testsResult | testsEnabled = TestsOk
| otherwise = TestsNotTried
-- Install phase
onFailure InstallFailed $ criticalSection installLock $ do
-- Capture installed package configuration file
maybePkgConf <- maybeGenPkgConf mLogPath
-- Actual installation
withWin32SelfUpgrade verbosity pkg_key configFlags cinfo platform pkg $ do
case rootCmd miscOptions of
(Just cmd) -> reexec cmd
Nothing -> do
setup Cabal.copyCommand copyFlags mLogPath
when shouldRegister $ do
setup Cabal.registerCommand registerFlags mLogPath
return (Right (BuildOk docsResult testsResult maybePkgConf))
where
pkgid = packageId pkg
buildCommand' = buildCommand defaultProgramConfiguration
buildFlags _ = emptyBuildFlags {
buildDistPref = configDistPref configFlags,
buildVerbosity = toFlag verbosity'
}
shouldHaddock = fromFlag (installDocumentation installFlags)
haddockFlags' _ = haddockFlags {
haddockVerbosity = toFlag verbosity',
haddockDistPref = configDistPref configFlags
}
testsEnabled = fromFlag (configTests configFlags)
&& fromFlagOrDefault False (installRunTests installFlags)
testFlags _ = Cabal.emptyTestFlags {
Cabal.testDistPref = configDistPref configFlags
}
copyFlags _ = Cabal.emptyCopyFlags {
Cabal.copyDistPref = configDistPref configFlags,
Cabal.copyDest = toFlag InstallDirs.NoCopyDest,
Cabal.copyVerbosity = toFlag verbosity'
}
shouldRegister = PackageDescription.hasLibs pkg
registerFlags _ = Cabal.emptyRegisterFlags {
Cabal.regDistPref = configDistPref configFlags,
Cabal.regVerbosity = toFlag verbosity'
}
verbosity' = maybe verbosity snd useLogFile
tempTemplate name = name ++ "-" ++ display pkgid
addDefaultInstallDirs :: ConfigFlags -> IO ConfigFlags
addDefaultInstallDirs configFlags' = do
defInstallDirs <- InstallDirs.defaultInstallDirs flavor userInstall False
return $ configFlags' {
configInstallDirs = fmap Cabal.Flag .
InstallDirs.substituteInstallDirTemplates env $
InstallDirs.combineInstallDirs fromFlagOrDefault
defInstallDirs (configInstallDirs configFlags)
}
where
CompilerId flavor _ = compilerInfoId cinfo
env = initialPathTemplateEnv pkgid pkg_key cinfo platform
userInstall = fromFlagOrDefault defaultUserInstall
(configUserInstall configFlags')
maybeGenPkgConf :: Maybe FilePath
-> IO (Maybe Installed.InstalledPackageInfo)
maybeGenPkgConf mLogPath =
if shouldRegister then do
tmp <- getTemporaryDirectory
withTempFile tmp (tempTemplate "pkgConf") $ \pkgConfFile handle -> do
hClose handle
let registerFlags' version = (registerFlags version) {
Cabal.regGenPkgConf = toFlag (Just pkgConfFile)
}
setup Cabal.registerCommand registerFlags' mLogPath
withUTF8FileContents pkgConfFile $ \pkgConfText ->
case Installed.parseInstalledPackageInfo pkgConfText of
Installed.ParseFailed perror -> pkgConfParseFailed perror
Installed.ParseOk warns pkgConf -> do
unless (null warns) $
warn verbosity $ unlines (map (showPWarning pkgConfFile) warns)
return (Just pkgConf)
else return Nothing
pkgConfParseFailed :: Installed.PError -> IO a
pkgConfParseFailed perror =
die $ "Couldn't parse the output of 'setup register --gen-pkg-config':"
++ show perror
maybeLogPath :: IO (Maybe FilePath)
maybeLogPath =
case useLogFile of
Nothing -> return Nothing
Just (mkLogFileName, _) -> do
let logFileName = mkLogFileName (packageId pkg) pkg_key
logDir = takeDirectory logFileName
unless (null logDir) $ createDirectoryIfMissing True logDir
logFileExists <- doesFileExist logFileName
when logFileExists $ removeFile logFileName
return (Just logFileName)
setup cmd flags mLogPath =
Exception.bracket
(traverse (\path -> openFile path AppendMode) mLogPath)
(traverse_ hClose)
(\logFileHandle ->
setupWrapper verbosity
scriptOptions { useLoggingHandle = logFileHandle
, useWorkingDir = workingDir }
(Just pkg)
cmd flags [])
reexec cmd = do
-- look for our own executable file and re-exec ourselves using a helper
-- program like sudo to elevate privileges:
self <- getExecutablePath
weExist <- doesFileExist self
if weExist
then inDir workingDir $
rawSystemExit verbosity cmd
[self, "install", "--only"
,"--verbose=" ++ showForCabal verbosity]
else die $ "Unable to find cabal executable at: " ++ self
-- helper
onFailure :: (SomeException -> BuildFailure) -> IO BuildResult -> IO BuildResult
onFailure result action =
action `catches`
[ Handler $ \ioe -> handler (ioe :: IOException)
, Handler $ \exit -> handler (exit :: ExitCode)
]
where
handler :: Exception e => e -> IO BuildResult
handler = return . Left . result . toException
-- ------------------------------------------------------------
-- * Weird windows hacks
-- ------------------------------------------------------------
withWin32SelfUpgrade :: Verbosity
-> PackageKey
-> ConfigFlags
-> CompilerInfo
-> Platform
-> PackageDescription
-> IO a -> IO a
withWin32SelfUpgrade _ _ _ _ _ _ action | buildOS /= Windows = action
withWin32SelfUpgrade verbosity pkg_key configFlags cinfo platform pkg action = do
defaultDirs <- InstallDirs.defaultInstallDirs
compFlavor
(fromFlag (configUserInstall configFlags))
(PackageDescription.hasLibs pkg)
Win32SelfUpgrade.possibleSelfUpgrade verbosity
(exeInstallPaths defaultDirs) action
where
pkgid = packageId pkg
(CompilerId compFlavor _) = compilerInfoId cinfo
exeInstallPaths defaultDirs =
[ InstallDirs.bindir absoluteDirs </> exeName <.> exeExtension
| exe <- PackageDescription.executables pkg
, PackageDescription.buildable (PackageDescription.buildInfo exe)
, let exeName = prefix ++ PackageDescription.exeName exe ++ suffix
prefix = substTemplate prefixTemplate
suffix = substTemplate suffixTemplate ]
where
fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "")
prefixTemplate = fromFlagTemplate (configProgPrefix configFlags)
suffixTemplate = fromFlagTemplate (configProgSuffix configFlags)
templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault
defaultDirs (configInstallDirs configFlags)
absoluteDirs = InstallDirs.absoluteInstallDirs
pkgid pkg_key
cinfo InstallDirs.NoCopyDest
platform templateDirs
substTemplate = InstallDirs.fromPathTemplate
. InstallDirs.substPathTemplate env
where env = InstallDirs.initialPathTemplateEnv pkgid pkg_key cinfo platform
| ian-ross/cabal | cabal-install/Distribution/Client/Install.hs | bsd-3-clause | 65,207 | 0 | 31 | 18,175 | 12,402 | 6,429 | 5,973 | 1,152 | 12 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module Shed.Types where
import Control.Monad.Base (MonadBase(..), liftBaseDefault)
import Control.Monad.Trans.Control
( MonadBaseControl(..)
, MonadTransControl(..)
, ComposeSt
, defaultLiftBaseWith
, defaultRestoreM
)
import Control.Monad.Except
import Control.Monad.Reader
import Database.Persist.Sql
import Servant
-- | Our applications monad transformer.
newtype AppT m a = App
{ runApp :: ReaderT ConnectionPool (ExceptT ServantErr m) a
} deriving
( Monad
, Functor
, Applicative
, MonadReader ConnectionPool
, MonadIO
, MonadError ServantErr
)
instance MonadTrans AppT where
lift = App . lift . lift
instance MonadBase b m => MonadBase b (AppT m) where
liftBase = liftBaseDefault
instance MonadTransControl AppT where
type StT AppT a = StT (ExceptT ServantErr) (StT (ReaderT ConnectionPool) a)
liftWith f = App $ liftWith $ \run ->
liftWith $ \run' ->
f (run' . run . runApp)
restoreT = App . restoreT . restoreT
instance MonadBaseControl b m => MonadBaseControl b (AppT m) where
type StM (AppT m) a = ComposeSt AppT m a
liftBaseWith = defaultLiftBaseWith
restoreM = defaultRestoreM
| wayofthepie/shed | src/Shed/Types.hs | bsd-3-clause | 1,377 | 0 | 13 | 280 | 355 | 200 | 155 | 40 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Cauterize.JavaScript.Files
( allFiles
) where
import qualified Data.ByteString as B
import Data.FileEmbed
allFiles :: [(FilePath, B.ByteString)]
allFiles = $(embedDir "data/support")
| cauterize-tools/caut-javascript-ref | src/Cauterize/JavaScript/Files.hs | bsd-3-clause | 229 | 0 | 7 | 31 | 54 | 34 | 20 | 7 | 1 |
{-
Copyright (C) 2012-2017 Jimmy Liang, Kacper Bak, Michał Antkiewicz <http://gsd.uwaterloo.ca>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-}
module Language.Clafer.Intermediate.SimpleScopeAnalyzer (simpleScopeAnalysis) where
import Control.Applicative
import Control.Lens hiding (elements, assign)
import Data.Graph
import Data.List
import Data.Data.Lens (biplate)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Ord
import Data.Ratio
import Prelude hiding (exp)
import Language.Clafer.Common
import Language.Clafer.Intermediate.Intclafer
-- | Collects the global cardinality and hierarchy information into proper, not necessarily lower, bounds.
simpleScopeAnalysis :: IModule -> [(String, Integer)]
simpleScopeAnalysis iModule@IModule{_mDecls = decls'} =
[(a, b) | (a, b) <- finalAnalysis, b /= 1]
where
uidClaferMap' = createUidIClaferMap iModule
findClafer :: UID -> IClafer
findClafer uid' = fromJust $ findIClafer uidClaferMap' uid'
finalAnalysis = Map.toList $ foldl analyzeComponent supersAndRefsAnalysis connectedComponents
upperCards u =
Map.findWithDefault (error $ "No upper cardinality for clafer named \"" ++ u ++ "\".") u upperCardsMap
upperCardsMap = Map.fromList [(_uid c, snd $ fromJust $ _card c) | c <- clafers]
supersAnalysis = foldl (analyzeSupers uidClaferMap' clafers) Map.empty decls'
supersAndRefsAnalysis = foldl (analyzeRefs uidClaferMap' clafers) supersAnalysis decls'
constraintAnalysis = analyzeConstraints constraints upperCards
(subclaferMap, parentMap) = analyzeHierarchy uidClaferMap' clafers
connectedComponents = analyzeDependencies uidClaferMap' clafers
clafers :: [ IClafer ]
clafers = universeOn biplate iModule
constraints = concatMap findConstraints decls'
lowerOrUpperFixedCard analysis' clafer =
maximum [cardLb, cardUb, lowFromConstraints, oneForStar, targetScopeForStar ]
where
Just (cardLb, cardUb) = _card clafer
oneForStar = if (cardLb == 0 && cardUb == -1) then 1 else 0
targetScopeForStar = if ((isJust $ _reference clafer) && cardUb == -1)
then case getReference clafer of
[ref'] -> Map.findWithDefault 1 (fromMaybe "unknown" $ _uid <$> findIClafer uidClaferMap' ref' ) analysis'
_ -> 0
else 0
lowFromConstraints = Map.findWithDefault 0 (_uid clafer) constraintAnalysis
analyzeComponent analysis' component =
case flattenSCC component of
[uid'] -> analyzeSingleton uid' analysis'
uids ->
foldr analyzeSingleton assume uids
where
-- assume that each of the scopes in the component is 1 while solving
assume = foldr (`Map.insert` 1) analysis' uids
where
analyzeSingleton uid' analysis'' = analyze analysis'' $ findClafer uid'
analyze :: Map String Integer -> IClafer -> Map String Integer
analyze analysis' clafer =
-- Take the max between the supers and references analysis and this analysis
Map.insertWith max (_uid clafer) scope analysis'
where
scope
| _isAbstract clafer = sum subclaferScopes
| otherwise = parentScope * (lowerOrUpperFixedCard analysis' clafer)
subclaferScopes = map (findOrError " subclafer scope not found" analysis') subclafers
parentScope =
case parentMaybe of
Just parent'' -> findOrError " parent scope not found" analysis' parent''
Nothing -> rootScope
subclafers = Map.findWithDefault [] (_uid clafer) subclaferMap
parentMaybe = Map.lookup (_uid clafer) parentMap
rootScope = 1
findOrError message m key = Map.findWithDefault (error $ key ++ message) key m
analyzeSupers :: UIDIClaferMap -> [IClafer] -> Map String Integer -> IElement -> Map String Integer
analyzeSupers uidClaferMap' clafers analysis (IEClafer clafer) =
foldl (analyzeSupers uidClaferMap' clafers) analysis' (_elements clafer)
where
(Just (cardLb, cardUb)) = _card clafer
lowerOrFixedUpperBound = maximum [1, cardLb, cardUb ]
analysis' = if (isJust $ _reference clafer)
then analysis
else case (directSuper uidClaferMap' clafer) of
(Just c) -> Map.alter (incLB lowerOrFixedUpperBound) (_uid c) analysis
Nothing -> analysis
incLB lb' Nothing = Just lb'
incLB lb' (Just lb) = Just (lb + lb')
analyzeSupers _ _ analysis _ = analysis
analyzeRefs :: UIDIClaferMap -> [IClafer] -> Map String Integer -> IElement -> Map String Integer
analyzeRefs uidClaferMap' clafers analysis (IEClafer clafer) =
foldl (analyzeRefs uidClaferMap' clafers) analysis' (_elements clafer)
where
(Just (cardLb, cardUb)) = _card clafer
lowerOrFixedUpperBound = maximum [1, cardLb, cardUb]
analysis' = if (isJust $ _reference clafer)
then case (directSuper uidClaferMap' clafer) of
(Just c) -> Map.alter (maxLB lowerOrFixedUpperBound) (_uid c) analysis
Nothing -> analysis
else analysis
maxLB lb' Nothing = Just lb'
maxLB lb' (Just lb) = Just (max lb lb')
analyzeRefs _ _ analysis _ = analysis
analyzeConstraints :: [PExp] -> (String -> Integer) -> Map String Integer
analyzeConstraints constraints upperCards =
foldr analyzeConstraint Map.empty $ filter isOneOrSomeConstraint constraints
where
isOneOrSomeConstraint PExp{_exp = IDeclPExp{_quant = quant'}} =
-- Only these two quantifiers requires an increase in scope to satisfy.
case quant' of
IOne -> True
ISome -> True
_ -> False
isOneOrSomeConstraint _ = False
-- Only considers how quantifiers affect scope. Other types of constraints are not considered.
-- Constraints of the type [some path1.path2] or [no path1.path2], etc.
analyzeConstraint PExp{_exp = IDeclPExp{_oDecls = [], _bpexp = bpexp'}} analysis =
foldr atLeastOne analysis path'
where
path' = dropThisAndParent $ unfoldJoins bpexp'
atLeastOne = Map.insertWith max `flip` 1
-- Constraints of the type [all disj a : path1.path2] or [some b : path3.path4], etc.
analyzeConstraint PExp{_exp = IDeclPExp{_oDecls = decls'}} analysis =
foldr analyzeDecl analysis decls'
analyzeConstraint _ analysis = analysis
analyzeDecl IDecl{_isDisj = isDisj', _decls = decls', _body = body'} analysis =
foldr (uncurry insert') analysis $ zip path' scores
where
-- Take the first element in the path', and change its effective lower cardinality.
-- Can overestimate the scope.
path' = dropThisAndParent $ unfoldJoins body'
-- "disj a;b;c" implies at least 3 whereas "a;b;c" implies at least one.
minScope = if isDisj' then fromIntegral $ length decls' else 1
insert' = Map.insertWith max
scores = assign path' minScope
{-
- abstract Z
- C *
- D : integer *
-
- A : Z
- B : integer
- [some disj a;b;c;d : D | a = 1 && b = 2 && c = 3 && d = B]
-}
-- Need at least 4 D's per A.
-- Either
-- a) Make the effective lower cardinality of C=4 and D=1
-- b) Make the effective lower cardinality of C=1 and D=4
-- c) Some other combination.
-- Choose b, a greedy algorithm that starts from the lowest child progressing upwards.
{-
- abstract Z
- C *
- D : integer 3..*
-
- A : Z
- B : integer
- [some disj a;b;c;d : D | a = 1 && b = 2 && c = 3 && d = B]
-}
-- The algorithm we do is greedy so it will chose D=3.
-- However, it still needs more D's so it will choose C=2
-- C=2, D=3
-- This might not be optimum since now the scope allows for 6 D's.
-- A better solution might be C=2, D=2.
-- Well too bad, we are using the greedy algorithm.
assign [] _ = [1]
assign (p : ps) score =
pScore : ps'
where
--upper = upperCards p
ps' = assign ps score
psScore = product $ ps'
pDesireScore = ceiling (score % psScore)
pMaxScore = upperCards p
pScore = min' pDesireScore pMaxScore
min' a b = if b == -1 then a else min a b
-- The each child has at most one parent. No matter what the path in a quantifier
-- looks like, we ignore the parent parts.
dropThisAndParent = dropWhile (== "parent") . dropWhile (== "this")
analyzeDependencies :: UIDIClaferMap -> [IClafer] -> [SCC String]
analyzeDependencies uidClaferMap' clafers = connComponents
where
connComponents = stronglyConnComp [(key, key, depends) | (key, depends) <- dependencyGraph]
dependencies = concatMap (dependency uidClaferMap') clafers
dependencyGraph = Map.toList $ Map.fromListWith (++) [(a, [b]) | (a, b) <- dependencies]
dependency :: UIDIClaferMap -> IClafer -> [(String, String)]
dependency uidClaferMap' clafer =
selfDependency : (maybeToList superDependency ++ childDependencies)
where
-- This is to make the "stronglyConnComp" from Data.Graph play nice. Otherwise,
-- clafers with no dependencies will not appear in the result.
selfDependency = (_uid clafer, _uid clafer)
superDependency
| isNothing $ _super clafer = Nothing
| otherwise =
do
super' <- directSuper uidClaferMap' clafer
-- Need to analyze clafer before its super
return (_uid super', _uid clafer)
-- Need to analyze clafer before its children
childDependencies = [(_uid child, _uid clafer) | child <- childClafers clafer]
analyzeHierarchy :: UIDIClaferMap -> [IClafer] -> (Map String [String], Map String String)
analyzeHierarchy uidClaferMap' clafers =
foldl hierarchy (Map.empty, Map.empty) clafers
where
hierarchy (subclaferMap, parentMap) clafer = (subclaferMap', parentMap')
where
subclaferMap' =
case super' of
Just super'' -> Map.insertWith (++) (_uid super'') [_uid clafer] subclaferMap
Nothing -> subclaferMap
super' = directSuper uidClaferMap' clafer
parentMap' = foldr (flip Map.insert $ _uid clafer) parentMap (map _uid $ childClafers clafer)
directSuper :: UIDIClaferMap -> IClafer -> Maybe IClafer
directSuper uidClaferMap' clafer =
second $ findHierarchy getSuper uidClaferMap' clafer
where
second [] = Nothing
second [_] = Nothing
second (_:x:_) = Just x
-- Find all constraints
findConstraints :: IElement -> [PExp]
findConstraints IEConstraint{_cpexp = c} = [c]
findConstraints (IEClafer clafer) = concatMap findConstraints (_elements clafer)
findConstraints _ = []
-- Finds all the direct ancestors (ie. children)
childClafers :: IClafer -> [IClafer]
childClafers clafer = clafer ^.. elements.traversed.iClafer
-- Unfold joins
-- If the expression is a tree of only joins, then this function will flatten
-- the joins into a list.
-- Otherwise, returns an empty list.
unfoldJoins :: PExp -> [String]
unfoldJoins pexp =
fromMaybe [] $ unfoldJoins' pexp
where
unfoldJoins' PExp{_exp = (IFunExp "." args)} =
return $ args >>= unfoldJoins
unfoldJoins' PExp{_exp = IClaferId{_sident = sident'}} =
return $ [sident']
unfoldJoins' _ =
fail "not a join"
| juodaspaulius/clafer | src/Language/Clafer/Intermediate/SimpleScopeAnalyzer.hs | mit | 12,635 | 0 | 17 | 3,264 | 2,657 | 1,402 | 1,255 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE RoleAnnotations #-}
{-# LANGUAGE DataKinds #-}
module System.HFind.Path where
import Data.Function
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.ByteString as B
import qualified System.Directory as D
import Unsafe.Coerce
data PathMode = Rel | Abs
data PathType = File | Dir
type Rel = 'Rel
type Abs = 'Abs
type File = 'File
type Dir = 'Dir
type RawPath = T.Text
type role Path nominal nominal
newtype Path (t :: PathMode) (d :: PathType) = Path T.Text
deriving (Eq, Ord)
toText :: Path b t -> T.Text
toText (Path p) = p
{-# INLINE toText #-}
toByteString :: Path b t -> B.ByteString
toByteString = T.encodeUtf8 . toText
{-# INLINE toByteString #-}
toString :: Path b t -> FilePath
toString = T.unpack . toText
{-# INLINE toString #-}
newtype Link = Link { getLinkPath :: Path Abs File }
instance Show (Path b t) where
show (Path p) = show p
instance Show Link where
show (Link p) = show p
class IsPathType (t :: PathType) where
getPathType :: p t -> PathType
instance IsPathType 'File where
getPathType _ = File
instance IsPathType 'Dir where
getPathType _ = Dir
dropTrailingSlash :: RawPath -> RawPath
dropTrailingSlash p
| p == "/" = -- WARNING: removing the slash here could cause a lot of pain
error "'/' passed to dropTrailingSlash"
| T.null p = p
| T.last p == '/' = T.init p
| otherwise = p
addTrailingSlash :: RawPath -> RawPath
addTrailingSlash p
| T.null p = -- WARNING: adding a slash here could cause a lot of pain
error "'' passed to addTrailingSlash"
| T.last p == '/' = p
| otherwise = T.snoc p '/'
asFilePath :: Path Abs t -> Path Abs File
asFilePath (Path "") = Path ""
asFilePath (Path "/") = Path ""
asFilePath (Path p) = Path (dropTrailingSlash p)
asDirPath :: Path Abs t -> Path Abs Dir
asDirPath (Path "") = Path "/"
asDirPath (Path "/") = Path "/"
asDirPath (Path p) = Path (addTrailingSlash p)
coercePath :: forall t t'. IsPathType t' => Path Abs t -> Path Abs t'
coercePath p =
case getPathType (undefined :: Path Abs t') of
File -> unsafeCoerce (asFilePath p)
Dir -> unsafeCoerce (asDirPath p)
(</>) :: Path b Dir -> Path Rel t -> Path b t
Path p </> Path p' = Path (p <> p')
{-# INLINE (</>) #-}
unsafeRelFile :: RawPath -> Path Rel File
unsafeRelFile = Path
unsafeRelDir :: RawPath -> Path Rel Dir
unsafeRelDir = Path
unsafeAbsFile :: RawPath -> Path Abs File
unsafeAbsFile = Path
unsafeAbsDir :: RawPath -> Path Abs Dir
unsafeAbsDir = Path
unsnocPath :: Path Abs t -> Maybe (Path Abs Dir, T.Text)
unsnocPath (Path "") = Nothing
unsnocPath (Path "/") = Nothing
unsnocPath (Path fp) = Just (unsafeAbsDir dir, base)
where
(dir, base) = let noTrailing = T.dropWhileEnd (=='/') fp
in T.breakOnEnd "/" noTrailing
parent :: Path Abs t -> Maybe (Path Abs Dir)
parent = fmap fst . unsnocPath
filename :: Path Abs t -> Maybe T.Text
filename = fmap snd . unsnocPath
isAbsolute :: RawPath -> Bool
isAbsolute p = T.null p || T.head p == '/'
normalize :: RawPath -> Maybe RawPath
normalize p = p
& T.splitOn "/"
& filter (not . T.null) -- remove //
& filter (/= ".") -- remove /./
& shortCircuit (0, []) -- remove /../
& \case
(0, p')
| T.null p -> Nothing
| isAbsolute p -> Just $ T.intercalate "/" ("":p') -- append root
| otherwise -> Just $ T.intercalate "/" p'
_ -> Nothing
where
shortCircuit :: (Int, [T.Text]) -> [T.Text] -> (Int, [T.Text])
shortCircuit = foldr $ \case
".." -> \(!i, p') -> (i+1, p')
d -> \(!i, p') -> if i == 0 then (0, d:p') else (i-1, p')
canonicalizeUnder :: Path Abs Dir -> RawPath -> Maybe (Path Abs File)
canonicalizeUnder _ "" = Nothing
canonicalizeUnder parentPath p =
Path <$> normalize (absolute p)
where
absolute d
| isAbsolute d = d
| otherwise = toText (parentPath </> unsafeRelFile p)
canonicalizeBeside :: Path Abs t -> RawPath -> Maybe (Path Abs File)
canonicalizeBeside sibling p = do
parentPath <- parent sibling
canonicalizeUnder parentPath p
canonicalizeFromHere :: RawPath -> IO (Maybe (Path Abs File))
canonicalizeFromHere p = do
here <- unsafeAbsDir . addTrailingSlash . T.pack <$> D.getCurrentDirectory
return (canonicalizeUnder here p)
| xcv-/hfind | src/System/HFind/Path.hs | mit | 4,732 | 0 | 14 | 1,146 | 1,661 | 855 | 806 | 128 | 4 |
{-# LANGUAGE TupleSections,FlexibleContexts #-}
module LabelConnections where
import Data.Functor
import qualified Data.Map as M
import Data.Map ((!))
--import Debug.Trace
import Data.Function
import Data.List (sortBy)
import Types
import Unification
import Propositions
import Scopes
import Unbound.LocallyNameless
labelConnections :: Context -> Task -> Proof -> M.Map (Key Connection) ConnLabel
labelConnections ctxt task proof = labels
where
-- Strategy:
-- 1. For each block, look up the rule data and localize the variables
-- according to the blockNum.
-- 2. Calculate scopes, and give scoped bound variables as arguments to
-- free variables
-- 3. Look at each connection, turn this into equations
-- [(proposition, proposition)], and pass them to the unification module
-- 4. Get back a substiution, apply it to the data structure from step 2
-- 5. For each connection, calculate the ConnLabel
renamedBlockData :: M.Map (Key Block) ([Var], M.Map (Key Port) (Term, [Var]))
renamedBlockData = M.map go (blocks proof)
where
go block = (f', ports')
where
rule = block2Rule ctxt block
l = localVars rule
num = fromIntegral $ blockNum block
localize :: Var -> Var
localize n = makeName (name2String n) num
s = [(n, V (localize n)) | n <- l ]
f' = map localize (freeVars rule)
ports' = M.map goPort (ports rule)
where
goPort p = (prop', scopes')
where
prop' = substs s (portProp p)
scopes' = map localize (portScopes p)
unificationVariables :: [Var]
unificationVariables = concat $ map fst $ M.elems renamedBlockData
scopedVariables :: [Var]
scopedVariables = concatMap (concatMap snd . M.elems . snd) $ M.elems renamedBlockData
scopes = calculateScopes ctxt task proof
scopeMap = M.fromListWith (++) [ (k, [pdom]) | (ks, pdom) <- scopes, k <- ks ]
renamedBlockProps :: M.Map (Key Block) (M.Map (Key Port) Term)
renamedBlockProps = M.mapWithKey prepareBlock renamedBlockData
prepareBlock blockKey (unv, ports) = M.map preparePort ports
where
scopedVars = [ v
| BlockPort pdBlockKey pdPortKey <- M.findWithDefault [] blockKey scopeMap
, Just (_,ports) <- return $ M.lookup pdBlockKey renamedBlockData
, let (_,sv) = ports M.! pdPortKey
, v <- sv
]
-- Change free variables to variables, possibly depending on these arguments
s = [ (s, mkApps (V s) (map V scopedVars)) | s <- unv ] ++
[ (s, V s) | s <- scopedVariables ]
preparePort (prop, _) = (substs s prop)
propAt NoPort = Nothing
propAt (ConclusionPort n) = Just $ tConclusions task !! (n-1)
propAt (AssumptionPort n) = Just $ tAssumptions task !! (n-1)
propAt (BlockPort blockKey portKey) = Just $ renamedBlockProps ! blockKey ! portKey
equations =
[ (connKey, (prop1, prop2))
| (connKey, conn) <- sortBy (compare `on` snd) $ M.toList (connections proof)
, Just prop1 <- return $ propAt (connFrom conn)
, Just prop2 <- return $ propAt (connTo conn)
]
(final_bind, unificationResults) = unifyLiberally unificationVariables equations
resultsMap :: M.Map (Key Connection) UnificationResult
resultsMap = M.fromList unificationResults
labels = M.mapWithKey instantiate (connections proof)
instantiate connKey conn = case (propFromMB, propToMB) of
(Nothing, Nothing) -> Unconnected
(Just propFrom, Nothing) -> Ok propFrom
(Nothing, Just propTo) -> Ok propTo
(Just propFrom, Just propTo) -> case resultsMap M.! connKey of
Solved | propFrom == propTo -> Ok propFrom
| otherwise -> error "instantiate: not solved"
Dunno -> DunnoLabel propFrom propTo
Failed -> Mismatch propFrom propTo
where
propFromMB = applyBinding' highest final_bind <$> propAt (connFrom conn)
propToMB = applyBinding' highest final_bind <$> propAt (connTo conn)
-- It is far to costly to do that in every invocatio to applyBinding below
highest = firstFree (M.toList final_bind, map M.elems (M.elems renamedBlockProps))
| psibi/incredible | logic/LabelConnections.hs | mit | 4,433 | 0 | 17 | 1,267 | 1,258 | 661 | 597 | 70 | 9 |
{-| (re-exports)
re-export everything, for convenient importing, and for `ghci`:
@
:m +Commands.Plugins.Spiros
@
-}
module Commands.Plugins.Spiros
( module Commands.Plugins.Spiros.Extra
, module Commands.Plugins.Spiros.Types
, module Commands.Plugins.Spiros.TypeLevel
, module Commands.Plugins.Spiros.Main
, module Commands.Plugins.Spiros.Apply
--, module Commands.Plugins.Spiros.Finite
, module Commands.Plugins.Spiros.Digit.Grammar
-- , module Commands.Plugins.Spiros.Correct
-- , module Commands.Plugins.Spiros.Server
-- -- , module Commands.Plugins.Spiros.Server.Workflow
-- , module Commands.Plugins.Spiros.Shim
, module Commands.Plugins.Spiros.Template
-- grammars
, module Commands.Plugins.Spiros.Root
, module Commands.Plugins.Spiros.Macros
, module Commands.Plugins.Spiros.Shortcut
, module Commands.Plugins.Spiros.Shell
, module Commands.Plugins.Spiros.Keys
, module Commands.Plugins.Spiros.Number
, module Commands.Plugins.Spiros.Phrase
, module Commands.Plugins.Spiros.Edit
, module Commands.Plugins.Spiros.Emacs
, module Commands.Plugins.Spiros.Chrome
) where
import Commands.Plugins.Spiros.Extra
import Commands.Plugins.Spiros.Types
import Commands.Plugins.Spiros.TypeLevel
import Commands.Plugins.Spiros.Main
import Commands.Plugins.Spiros.Apply
-- import Commands.Plugins.Spiros.Finite
import Commands.Plugins.Spiros.Digit.Grammar
-- import Commands.Plugins.Spiros.Correct
-- import Commands.Plugins.Spiros.Server
-- import Commands.Plugins.Spiros.Server.Workflow
-- import Commands.Plugins.Spiros.Shim
import Commands.Plugins.Spiros.Template
import Commands.Plugins.Spiros.Root
import Commands.Plugins.Spiros.Macros
import Commands.Plugins.Spiros.Shortcut
import Commands.Plugins.Spiros.Shell
import Commands.Plugins.Spiros.Keys
import Commands.Plugins.Spiros.Number
import Commands.Plugins.Spiros.Phrase
import Commands.Plugins.Spiros.Edit
import Commands.Plugins.Spiros.Emacs
import Commands.Plugins.Spiros.Chrome
| sboosali/commands-spiros | config/Commands/Plugins/Spiros.hs | gpl-2.0 | 1,960 | 0 | 5 | 168 | 278 | 207 | 71 | 35 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.